From 95c4a8ebd634e1e99114727a7b157eeeb9297ee9 Mon Sep 17 00:00:00 2001 From: Aoang Date: Thu, 17 Oct 2024 13:55:16 +0800 Subject: [PATCH 01/55] feat: add support type for net/netip.addr and net/netip.prefix (#1028) * feat(schema): add support type for net/netip.Addr and net/netip.Prefix * fix(schema): net.IPNet(not ptr) is not implement fmt.Stringer Edit: updated commit message to comply with commitlint [subject-case] rule. Original subject: "Add support type..." --- dialect/mssqldialect/dialect.go | 4 + dialect/mysqldialect/dialect.go | 4 + dialect/pgdialect/dialect.go | 4 + dialect/pgdialect/inspector.go | 242 +++++++++++++++++++++++++++++++ dialect/pgdialect/sqltype.go | 20 +++ dialect/sqlitedialect/dialect.go | 11 ++ internal/dbtest/db_test.go | 18 +++ internal/dbtest/inspect_test.go | 112 ++++++++++++++ internal/dbtest/migrate_test.go | 138 ++++++++++++++++++ migrate/auto.go | 212 +++++++++++++++++++++++++++ schema/dialect.go | 3 + schema/inspector.go | 76 ++++++++++ schema/inspector/dialect.go | 11 ++ schema/tables.go | 12 ++ 14 files changed, 867 insertions(+) create mode 100644 dialect/pgdialect/inspector.go create mode 100644 internal/dbtest/inspect_test.go create mode 100644 migrate/auto.go create mode 100644 schema/inspector.go create mode 100644 schema/inspector/dialect.go diff --git a/dialect/mssqldialect/dialect.go b/dialect/mssqldialect/dialect.go index a5c99a274..bde140963 100755 --- a/dialect/mssqldialect/dialect.go +++ b/dialect/mssqldialect/dialect.go @@ -141,6 +141,10 @@ func (d *Dialect) AppendSequence(b []byte, _ *schema.Table, _ *schema.Field) []b return append(b, " IDENTITY"...) } +func (d *Dialect) DefaultSchema() string { + return "dbo" +} + func sqlType(field *schema.Field) string { switch field.DiscoveredSQLType { case sqltype.Timestamp: diff --git a/dialect/mysqldialect/dialect.go b/dialect/mysqldialect/dialect.go index 881aa7ebf..9b4dfe87c 100644 --- a/dialect/mysqldialect/dialect.go +++ b/dialect/mysqldialect/dialect.go @@ -206,6 +206,10 @@ func (d *Dialect) AppendSequence(b []byte, _ *schema.Table, _ *schema.Field) []b return append(b, " AUTO_INCREMENT"...) } +func (d *Dialect) DefaultSchema() string { + return "mydb" +} + func sqlType(field *schema.Field) string { if field.DiscoveredSQLType == sqltype.Timestamp { return datetimeType diff --git a/dialect/pgdialect/dialect.go b/dialect/pgdialect/dialect.go index 358971f61..766aa1be4 100644 --- a/dialect/pgdialect/dialect.go +++ b/dialect/pgdialect/dialect.go @@ -11,6 +11,7 @@ import ( "github.com/uptrace/bun/dialect/feature" "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/schema" + "github.com/uptrace/bun/schema/inspector" ) var pgDialect = New() @@ -29,6 +30,9 @@ type Dialect struct { features feature.Feature } +var _ schema.Dialect = (*Dialect)(nil) +var _ inspector.Dialect = (*Dialect)(nil) + func New() *Dialect { d := new(Dialect) d.tables = schema.NewTables(d) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go new file mode 100644 index 000000000..418140855 --- /dev/null +++ b/dialect/pgdialect/inspector.go @@ -0,0 +1,242 @@ +package pgdialect + +import ( + "context" + "fmt" + "strings" + + "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/sqltype" + "github.com/uptrace/bun/schema" +) + +func (d *Dialect) Inspector(db *bun.DB) schema.Inspector { + return newDatabaseInspector(db) +} + +type DatabaseInspector struct { + db *bun.DB +} + +var _ schema.Inspector = (*DatabaseInspector)(nil) + +func newDatabaseInspector(db *bun.DB) *DatabaseInspector { + return &DatabaseInspector{db: db} +} + +func (di *DatabaseInspector) Inspect(ctx context.Context) (schema.State, error) { + var state schema.State + var tables []*InformationSchemaTable + if err := di.db.NewRaw(sqlInspectTables).Scan(ctx, &tables); err != nil { + return state, err + } + + for _, table := range tables { + var columns []*InformationSchemaColumn + if err := di.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { + return state, err + } + colDefs := make(map[string]schema.ColumnDef) + for _, c := range columns { + dataType := fromDatabaseType(c.DataType) + if strings.EqualFold(dataType, sqltype.VarChar) && c.VarcharLen > 0 { + dataType = fmt.Sprintf("%s(%d)", dataType, c.VarcharLen) + } + + def := c.Default + if c.IsSerial || c.IsIdentity { + def = "" + } + + colDefs[c.Name] = schema.ColumnDef{ + SQLType: strings.ToLower(dataType), + IsPK: c.IsPK, + IsNullable: c.IsNullable, + IsAutoIncrement: c.IsSerial, + IsIdentity: c.IsIdentity, + DefaultValue: def, + } + } + + state.Tables = append(state.Tables, schema.TableDef{ + Schema: table.Schema, + Name: table.Name, + Columns: colDefs, + }) + } + return state, nil +} + +type InformationSchemaTable struct { + bun.BaseModel + + Schema string `bun:"table_schema,pk"` + Name string `bun:"table_name,pk"` + + Columns []*InformationSchemaColumn `bun:"rel:has-many,join:table_schema=table_schema,join:table_name=table_name"` +} + +type InformationSchemaColumn struct { + bun.BaseModel + + Schema string `bun:"table_schema"` + Table string `bun:"table_name"` + Name string `bun:"column_name"` + DataType string `bun:"data_type"` + VarcharLen int `bun:"varchar_len"` + IsArray bool `bun:"is_array"` + ArrayDims int `bun:"array_dims"` + Default string `bun:"default"` + IsPK bool `bun:"is_pk"` + IsIdentity bool `bun:"is_identity"` + IndentityType string `bun:"identity_type"` + IsSerial bool `bun:"is_serial"` + IsNullable bool `bun:"is_nullable"` + IsUnique bool `bun:"is_unique"` + UniqueGroup []string `bun:"unique_group,array"` +} + +const ( + // sqlInspectTables retrieves all user-defined tables across all schemas. + // It excludes relations from Postgres's reserved "pg_" schemas and views from the "information_schema". + sqlInspectTables = ` +SELECT table_schema, table_name +FROM information_schema.tables +WHERE table_type = 'BASE TABLE' + AND table_schema <> 'information_schema' + AND table_schema NOT LIKE 'pg_%' + ` + + // sqlInspectColumnsQuery retrieves column definitions for the specified table. + // Unlike sqlInspectTables and sqlInspectSchema, it should be passed to bun.NewRaw + // with additional args for table_schema and table_name. + sqlInspectColumnsQuery = ` +SELECT + "c".table_schema, + "c".table_name, + "c".column_name, + "c".data_type, + "c".character_maximum_length::integer AS varchar_len, + "c".data_type = 'ARRAY' AS is_array, + COALESCE("c".array_dims, 0) AS array_dims, + CASE + WHEN "c".column_default ~ '^''.*''::.*$' THEN substring("c".column_default FROM '^''(.*)''::.*$') + ELSE "c".column_default + END AS "default", + 'p' = ANY("c".constraint_type) AS is_pk, + "c".is_identity = 'YES' AS is_identity, + "c".column_default = format('nextval(''%s_%s_seq''::regclass)', "c".table_name, "c".column_name) AS is_serial, + COALESCE("c".identity_type, '') AS identity_type, + "c".is_nullable = 'YES' AS is_nullable, + 'u' = ANY("c".constraint_type) AS is_unique, + "c"."constraint_name" AS unique_group +FROM ( + SELECT + "table_schema", + "table_name", + "column_name", + "c".data_type, + "c".character_maximum_length, + "c".column_default, + "c".is_identity, + "c".is_nullable, + att.array_dims, + att.identity_type, + att."constraint_name", + att."constraint_type" + FROM information_schema.columns "c" + LEFT JOIN ( + SELECT + s.nspname AS "table_schema", + "t".relname AS "table_name", + "c".attname AS "column_name", + "c".attndims AS array_dims, + "c".attidentity AS identity_type, + ARRAY_AGG(con.conname) AS "constraint_name", + ARRAY_AGG(con.contype) AS "constraint_type" + FROM ( + SELECT + conname, + contype, + connamespace, + conrelid, + conrelid AS attrelid, + UNNEST(conkey) AS attnum + FROM pg_constraint + ) con + LEFT JOIN pg_attribute "c" USING (attrelid, attnum) + LEFT JOIN pg_namespace s ON s.oid = con.connamespace + LEFT JOIN pg_class "t" ON "t".oid = con.conrelid + GROUP BY 1, 2, 3, 4, 5 + ) att USING ("table_schema", "table_name", "column_name") + ) "c" +WHERE "table_schema" = ? AND "table_name" = ? + ` + + // sqlInspectSchema retrieves column type definitions for all user-defined tables. + // Other relations, such as views and indices, as well as Posgres's internal relations are excluded. + sqlInspectSchema = ` +SELECT + "t"."table_schema", + "t".table_name, + "c".column_name, + "c".data_type, + "c".character_maximum_length::integer AS varchar_len, + "c".data_type = 'ARRAY' AS is_array, + COALESCE("c".array_dims, 0) AS array_dims, + CASE + WHEN "c".column_default ~ '^''.*''::.*$' THEN substring("c".column_default FROM '^''(.*)''::.*$') + ELSE "c".column_default + END AS "default", + "c".constraint_type = 'p' AS is_pk, + "c".is_identity = 'YES' AS is_identity, + "c".column_default = format('nextval(''%s_%s_seq''::regclass)', "t".table_name, "c".column_name) AS is_serial, + COALESCE("c".identity_type, '') AS identity_type, + "c".is_nullable = 'YES' AS is_nullable, + "c".constraint_type = 'u' AS is_unique, + "c"."constraint_name" AS unique_group +FROM information_schema.tables "t" + LEFT JOIN ( + SELECT + "table_schema", + "table_name", + "column_name", + "c".data_type, + "c".character_maximum_length, + "c".column_default, + "c".is_identity, + "c".is_nullable, + att.array_dims, + att.identity_type, + att."constraint_name", + att."constraint_type" + FROM information_schema.columns "c" + LEFT JOIN ( + SELECT + s.nspname AS table_schema, + "t".relname AS "table_name", + "c".attname AS "column_name", + "c".attndims AS array_dims, + "c".attidentity AS identity_type, + con.conname AS "constraint_name", + con.contype AS "constraint_type" + FROM ( + SELECT + conname, + contype, + connamespace, + conrelid, + conrelid AS attrelid, + UNNEST(conkey) AS attnum + FROM pg_constraint + ) con + LEFT JOIN pg_attribute "c" USING (attrelid, attnum) + LEFT JOIN pg_namespace s ON s.oid = con.connamespace + LEFT JOIN pg_class "t" ON "t".oid = con.conrelid + ) att USING (table_schema, "table_name", "column_name") + ) "c" USING (table_schema, "table_name") +WHERE table_type = 'BASE TABLE' + AND table_schema <> 'information_schema' + AND table_schema NOT LIKE 'pg_%' + ` +) diff --git a/dialect/pgdialect/sqltype.go b/dialect/pgdialect/sqltype.go index fad84209d..6b862b972 100644 --- a/dialect/pgdialect/sqltype.go +++ b/dialect/pgdialect/sqltype.go @@ -5,6 +5,7 @@ import ( "encoding/json" "net" "reflect" + "strings" "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/schema" @@ -28,6 +29,12 @@ const ( pgTypeSerial = "SERIAL" // 4 byte autoincrementing integer pgTypeBigSerial = "BIGSERIAL" // 8 byte autoincrementing integer + // Character Types + pgTypeChar = "CHAR" // fixed length string (blank padded) + pgTypeText = "TEXT" // variable length string without limit + pgTypeVarchar = "VARCHAR" // variable length string with optional limit + pgTypeCharacterVarying = "CHARACTER VARYING" // alias for VARCHAR + // Binary Data Types pgTypeBytea = "BYTEA" // binary string ) @@ -43,6 +50,10 @@ func (d *Dialect) DefaultVarcharLen() int { return 0 } +func (d *Dialect) DefaultSchema() string { + return "public" +} + func fieldSQLType(field *schema.Field) string { if field.UserSQLType != "" { return field.UserSQLType @@ -103,3 +114,12 @@ func sqlType(typ reflect.Type) string { return sqlType } + +// fromDatabaseType converts Postgres-specific type to a more generic `sqltype`. +func fromDatabaseType(dbType string) string { + switch strings.ToUpper(dbType) { + case pgTypeChar, pgTypeVarchar, pgTypeCharacterVarying, pgTypeText: + return sqltype.VarChar + } + return dbType +} diff --git a/dialect/sqlitedialect/dialect.go b/dialect/sqlitedialect/dialect.go index 3bfe500ff..c2c676d05 100644 --- a/dialect/sqlitedialect/dialect.go +++ b/dialect/sqlitedialect/dialect.go @@ -96,9 +96,13 @@ func (d *Dialect) DefaultVarcharLen() int { // AUTOINCREMENT is only valid for INTEGER PRIMARY KEY, and this method will be a noop for other columns. // // Because this is a valid construct: +// // CREATE TABLE ("id" INTEGER PRIMARY KEY AUTOINCREMENT); +// // and this is not: +// // CREATE TABLE ("id" INTEGER AUTOINCREMENT, PRIMARY KEY ("id")); +// // AppendSequence adds a primary key constraint as a *side-effect*. Callers should expect it to avoid building invalid SQL. // SQLite also [does not support] AUTOINCREMENT column in composite primary keys. // @@ -111,6 +115,13 @@ func (d *Dialect) AppendSequence(b []byte, table *schema.Table, field *schema.Fi return b } +// DefaultSchemaName is the "schema-name" of the main database. +// The details might differ from other dialects, but for all means and purposes +// "main" is the default schema in an SQLite database. +func (d *Dialect) DefaultSchema() string { + return "main" +} + func fieldSQLType(field *schema.Field) string { switch field.DiscoveredSQLType { case sqltype.SmallInt, sqltype.BigInt: diff --git a/internal/dbtest/db_test.go b/internal/dbtest/db_test.go index d44e48b52..5aed97480 100644 --- a/internal/dbtest/db_test.go +++ b/internal/dbtest/db_test.go @@ -24,6 +24,7 @@ import ( "github.com/uptrace/bun/driver/pgdriver" "github.com/uptrace/bun/driver/sqliteshim" "github.com/uptrace/bun/extra/bundebug" + "github.com/uptrace/bun/schema" _ "github.com/denisenkom/go-mssqldb" _ "github.com/go-sql-driver/mysql" @@ -53,6 +54,13 @@ var allDBs = map[string]func(tb testing.TB) *bun.DB{ mssql2019Name: mssql2019, } +var allDialects = []func() schema.Dialect{ + func() schema.Dialect { return pgdialect.New() }, + func() schema.Dialect { return mysqldialect.New() }, + func() schema.Dialect { return sqlitedialect.New() }, + func() schema.Dialect { return mssqldialect.New() }, +} + func pg(tb testing.TB) *bun.DB { dsn := os.Getenv("PG") if dsn == "" { @@ -216,6 +224,16 @@ func testEachDB(t *testing.T, f func(t *testing.T, dbName string, db *bun.DB)) { } } +// testEachDialect allows testing dialect-specific functionality that does not require database interactions. +func testEachDialect(t *testing.T, f func(t *testing.T, dialectName string, dialect func() schema.Dialect)) { + for _, newDialect := range allDialects { + name := newDialect().Name().String() + t.Run(name, func(t *testing.T) { + f(t, name, newDialect) + }) + } +} + func funcName(x interface{}) string { s := runtime.FuncForPC(reflect.ValueOf(x).Pointer()).Name() if i := strings.LastIndexByte(s, '.'); i >= 0 { diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go new file mode 100644 index 000000000..9b092ef4d --- /dev/null +++ b/internal/dbtest/inspect_test.go @@ -0,0 +1,112 @@ +package dbtest_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + "github.com/uptrace/bun" + "github.com/uptrace/bun/schema" + "github.com/uptrace/bun/schema/inspector" +) + +func TestDatabaseInspector_Inspect(t *testing.T) { + + type Book struct { + bun.BaseModel `bun:"table:books"` + + ISBN int `bun:",pk,identity"` + Author string `bun:",notnull,unique:title_author,default:'john doe'"` + Title string `bun:",notnull,unique:title_author"` + Locale string `bun:",type:varchar(5),default:'en-GB'"` + Pages int8 `bun:"page_count,notnull,default:1"` + Count int32 `bun:"book_count,autoincrement"` + } + + testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { + var dialect inspector.Dialect + dbDialect := db.Dialect() + + if id, ok := dbDialect.(inspector.Dialect); ok { + dialect = id + } else { + t.Skipf("%q dialect does not implement inspector.Dialect", dbDialect.Name()) + } + + ctx := context.Background() + createTableOrSkip(t, ctx, db, (*Book)(nil)) + + dbInspector := dialect.Inspector(db) + want := schema.State{ + Tables: []schema.TableDef{ + { + Schema: "public", + Name: "books", + Columns: map[string]schema.ColumnDef{ + "isbn": { + SQLType: "bigint", + IsPK: true, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: true, + DefaultValue: "", + }, + "author": { + SQLType: "varchar", + IsPK: false, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "john doe", + }, + "title": { + SQLType: "varchar", + IsPK: false, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "", + }, + "locale": { + SQLType: "varchar(5)", + IsPK: false, + IsNullable: true, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "en-GB", + }, + "page_count": { + SQLType: "smallint", + IsPK: false, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "1", + }, + "book_count": { + SQLType: "integer", + IsPK: false, + IsNullable: false, + IsAutoIncrement: true, + IsIdentity: false, + DefaultValue: "", + }, + }, + }, + }, + } + + got, err := dbInspector.Inspect(ctx) + require.NoError(t, err) + require.Equal(t, want, got) + }) +} + +func getDatabaseInspectorOrSkip(tb testing.TB, db *bun.DB) schema.Inspector { + dialect := db.Dialect() + if id, ok := dialect.(inspector.Dialect); ok { + return id.Inspector(db) + } + tb.Skipf("%q dialect does not implement inspector.Dialect", dialect.Name()) + return nil +} diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 74e33eab2..bab42e9b3 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -8,6 +8,7 @@ import ( "github.com/stretchr/testify/require" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate" + "github.com/uptrace/bun/schema" ) const ( @@ -158,3 +159,140 @@ func testMigrateUpError(t *testing.T, db *bun.DB) { require.Len(t, group.Migrations, 2) require.Equal(t, []string{"down2", "down1"}, history) } + +func TestAutoMigrator_Migrate(t *testing.T) { + tests := []struct { + fn func(t *testing.T, db *bun.DB) + }{ + {testRenameTable}, + } + + testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { + for _, tt := range tests { + t.Run(funcName(tt.fn), func(t *testing.T) { + tt.fn(t, db) + }) + } + }) +} + +func testRenameTable(t *testing.T, db *bun.DB) { + type initial struct { + bun.BaseModel `bun:"table:initial"` + Foo int `bun:"foo,notnull"` + } + + type changed struct { + bun.BaseModel `bun:"table:changed"` + Foo int `bun:"foo,notnull"` + } + + // Arrange + ctx := context.Background() + di := getDatabaseInspectorOrSkip(t, db) + createTableOrSkip(t, ctx, db, (*initial)(nil)) + + m, err := migrate.NewAutoMigrator(db) + require.NoError(t, err) + m.SetModels((*changed)(nil)) + + // Act + err = m.Migrate(ctx) + require.NoError(t, err) + + // Assert + state, err := di.Inspect(ctx) + require.NoError(t, err) + + tables := state.Tables + require.Len(t, tables, 1) + require.Equal(t, "changed", tables[0].Name) +} + +func createTableOrSkip(tb testing.TB, ctx context.Context, db *bun.DB, model interface{}) { + tb.Helper() + if _, err := db.NewCreateTable().IfNotExists().Model(model).Exec(ctx); err != nil { + tb.Skip("setup failed:", err) + } + tb.Cleanup(func() { + if _, err := db.NewDropTable().IfExists().Model(model).Exec(ctx); err != nil { + tb.Log("cleanup:", err) + } + }) +} + +func TestDetector_Diff(t *testing.T) { + tests := []struct { + name string + states func(testing.TB, context.Context, func() schema.Dialect) (stateDb schema.State, stateModel schema.State) + operations []migrate.Operation + }{ + { + name: "find a renamed table", + states: renamedTableStates, + operations: []migrate.Operation{ + &migrate.RenameTable{ + From: "books", + To: "books_renamed", + }, + }, + }, + } + + testEachDialect(t, func(t *testing.T, dialectName string, dialect func() schema.Dialect) { + if dialectName != "pg" { + t.Skip() + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + var d migrate.Detector + stateDb, stateModel := tt.states(t, ctx, dialect) + + diff := d.Diff(stateDb, stateModel) + + require.Equal(t, tt.operations, diff.Operations()) + }) + } + }) +} + +func renamedTableStates(tb testing.TB, ctx context.Context, dialect func() schema.Dialect) (s1, s2 schema.State) { + type Book struct { + bun.BaseModel + + ISBN string `bun:"isbn,pk"` + Title string `bun:"title,notnull"` + Pages int `bun:"page_count,notnull,default:0"` + } + + type Author struct { + bun.BaseModel + Name string `bun:"name"` + } + + type BookRenamed struct { + bun.BaseModel `bun:"table:books_renamed"` + + ISBN string `bun:"isbn,pk"` + Title string `bun:"title,notnull"` + Pages int `bun:"page_count,notnull,default:0"` + } + return getState(tb, ctx, dialect(), + (*Author)(nil), + (*Book)(nil), + ), getState(tb, ctx, dialect(), + (*Author)(nil), + (*BookRenamed)(nil), + ) +} + +func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) schema.State { + inspector := schema.NewInspector(dialect, models...) + state, err := inspector.Inspect(ctx) + if err != nil { + tb.Skip("get state: %w", err) + } + return state +} diff --git a/migrate/auto.go b/migrate/auto.go new file mode 100644 index 000000000..8453e069d --- /dev/null +++ b/migrate/auto.go @@ -0,0 +1,212 @@ +package migrate + +import ( + "context" + "fmt" + + "github.com/uptrace/bun" + "github.com/uptrace/bun/schema" + "github.com/uptrace/bun/schema/inspector" +) + +type AutoMigrator struct { + db *bun.DB + + // models limit the set of tables considered for the migration. + models []interface{} + + // dbInspector creates the current state for the target database. + dbInspector schema.Inspector + + // modelInspector creates the desired state based on the model definitions. + modelInspector schema.Inspector +} + +func NewAutoMigrator(db *bun.DB) (*AutoMigrator, error) { + dialect := db.Dialect() + withInspector, ok := dialect.(inspector.Dialect) + if !ok { + return nil, fmt.Errorf("%q dialect does not implement inspector.Dialect", dialect.Name()) + } + + return &AutoMigrator{ + db: db, + dbInspector: withInspector.Inspector(db), + }, nil +} + +func (am *AutoMigrator) SetModels(models ...interface{}) { + am.models = models +} + +func (am *AutoMigrator) diff(ctx context.Context) (Changeset, error) { + var changes Changeset + var err error + + // TODO: do on "SetModels" + am.modelInspector = schema.NewInspector(am.db.Dialect(), am.models...) + + _, err = am.dbInspector.Inspect(ctx) + if err != nil { + return changes, err + } + + _, err = am.modelInspector.Inspect(ctx) + if err != nil { + return changes, err + } + return changes, nil +} + +func (am *AutoMigrator) Migrate(ctx context.Context) error { + return nil +} + +// INTERNAL ------------------------------------------------------------------- + +// Operation is an abstraction a level above a MigrationFunc. +// Apart from storing the function to execute the change, +// it knows how to *write* the corresponding code, and what the reverse operation is. +type Operation interface { + Func() MigrationFunc +} + +type RenameTable struct { + From string + To string +} + +func (rt *RenameTable) Func() MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + db.Dialect() + return nil + } +} + +// Changeset is a set of changes that alter database state. +type Changeset struct { + operations []Operation +} + +func (c Changeset) Operations() []Operation { + return c.operations +} + +func (c *Changeset) Add(op Operation) { + c.operations = append(c.operations, op) +} + +type Detector struct{} + +func (d *Detector) Diff(got, want schema.State) Changeset { + var changes Changeset + + // Detect renamed models + oldModels := newTableSet(got.Tables...) + newModels := newTableSet(want.Tables...) + + addedModels := newModels.Sub(oldModels) + for _, added := range addedModels.Values() { + removedModels := oldModels.Sub(newModels) + for _, removed := range removedModels.Values() { + if !haveSameSignature(added, removed) { + continue + } + changes.Add(&RenameTable{ + From: removed.Name, + To: added.Name, + }) + } + } + + return changes +} + +// haveSameSignature determines if two tables have the same "signature". +func haveSameSignature(t1, t2 schema.TableDef) bool { + sig1 := newSignature(t1) + sig2 := newSignature(t2) + return sig1.Equals(sig2) +} + +// tableSet stores unique table definitions. +type tableSet struct { + underlying map[string]schema.TableDef +} + +func newTableSet(initial ...schema.TableDef) tableSet { + set := tableSet{ + underlying: make(map[string]schema.TableDef), + } + for _, t := range initial { + set.Add(t) + } + return set +} + +func (set tableSet) Add(t schema.TableDef) { + set.underlying[t.Name] = t +} + +func (set tableSet) Remove(s string) { + delete(set.underlying, s) +} + +func (set tableSet) Values() (tables []schema.TableDef) { + for _, t := range set.underlying { + tables = append(tables, t) + } + return +} + +func (set tableSet) Sub(other tableSet) tableSet { + res := set.clone() + for v := range other.underlying { + if _, ok := set.underlying[v]; ok { + res.Remove(v) + } + } + return res +} + +func (set tableSet) clone() tableSet { + res := newTableSet() + for _, t := range set.underlying { + res.Add(t) + } + return res +} + +// signature is a set of column definitions, which allows "relation/name-agnostic" comparison between them; +// meaning that two columns are considered equal if their types are the same. +type signature struct { + + // underlying stores the number of occurences for each unique column type. + // It helps to account for the fact that a table might have multiple columns that have the same type. + underlying map[schema.ColumnDef]int +} + +func newSignature(t schema.TableDef) signature { + s := signature{ + underlying: make(map[schema.ColumnDef]int), + } + s.scan(t) + return s +} + +// scan iterates over table's field and counts occurrences of each unique column definition. +func (s *signature) scan(t schema.TableDef) { + for _, c := range t.Columns { + s.underlying[c]++ + } +} + +// Equals returns true if 2 signatures share an identical set of columns. +func (s *signature) Equals(other signature) bool { + for k, count := range s.underlying { + if countOther, ok := other.underlying[k]; !ok || countOther != count { + return false + } + } + return true +} diff --git a/schema/dialect.go b/schema/dialect.go index 330293444..a5e2afb4e 100644 --- a/schema/dialect.go +++ b/schema/dialect.go @@ -39,6 +39,9 @@ type Dialect interface { // is mandatory in queries that modify the schema (CREATE TABLE / ADD COLUMN, etc). // Dialects that do not have such requirement may return 0, which should be interpreted so by the caller. DefaultVarcharLen() int + + // DefaultSchema should returns the name of the default database schema. + DefaultSchema() string } // ------------------------------------------------------------------------------ diff --git a/schema/inspector.go b/schema/inspector.go new file mode 100644 index 000000000..464cfa81f --- /dev/null +++ b/schema/inspector.go @@ -0,0 +1,76 @@ +package schema + +import ( + "context" + "strings" +) + +type Inspector interface { + Inspect(ctx context.Context) (State, error) +} + +type State struct { + Tables []TableDef +} + +type TableDef struct { + Schema string + Name string + Columns map[string]ColumnDef +} + +type ColumnDef struct { + SQLType string + DefaultValue string + IsPK bool + IsNullable bool + IsAutoIncrement bool + IsIdentity bool +} + +type SchemaInspector struct { + dialect Dialect +} + +var _ Inspector = (*SchemaInspector)(nil) + +func NewInspector(dialect Dialect, models ...interface{}) *SchemaInspector { + dialect.Tables().Register(models...) + return &SchemaInspector{ + dialect: dialect, + } +} + +func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { + var state State + for _, t := range si.dialect.Tables().All() { + columns := make(map[string]ColumnDef) + for _, f := range t.Fields { + columns[f.Name] = ColumnDef{ + SQLType: f.CreateTableSQLType, + DefaultValue: f.SQLDefault, + IsPK: f.IsPK, + IsNullable: !f.NotNull, + IsAutoIncrement: f.AutoIncrement, + IsIdentity: f.Identity, + } + } + + schema, table := splitTableNameTag(si.dialect, t.Name) + state.Tables = append(state.Tables, TableDef{ + Schema: schema, + Name: table, + Columns: columns, + }) + } + return state, nil +} + +// splitTableNameTag +func splitTableNameTag(d Dialect, nameTag string) (string, string) { + schema, table := d.DefaultSchema(), nameTag + if schemaTable := strings.Split(nameTag, "."); len(schemaTable) == 2 { + schema, table = schemaTable[0], schemaTable[1] + } + return schema, table +} \ No newline at end of file diff --git a/schema/inspector/dialect.go b/schema/inspector/dialect.go new file mode 100644 index 000000000..701300da9 --- /dev/null +++ b/schema/inspector/dialect.go @@ -0,0 +1,11 @@ +package inspector + +import ( + "github.com/uptrace/bun" + "github.com/uptrace/bun/schema" +) + +type Dialect interface { + schema.Dialect + Inspector(db *bun.DB) schema.Inspector +} diff --git a/schema/tables.go b/schema/tables.go index 985093421..58c45cbee 100644 --- a/schema/tables.go +++ b/schema/tables.go @@ -77,6 +77,7 @@ func (t *Tables) InProgress(typ reflect.Type) *Table { return table } +// ByModel gets the table by its Go name. func (t *Tables) ByModel(name string) *Table { var found *Table t.tables.Range(func(typ reflect.Type, table *Table) bool { @@ -89,6 +90,7 @@ func (t *Tables) ByModel(name string) *Table { return found } +// ByName gets the table by its SQL name. func (t *Tables) ByName(name string) *Table { var found *Table t.tables.Range(func(typ reflect.Type, table *Table) bool { @@ -100,3 +102,13 @@ func (t *Tables) ByName(name string) *Table { }) return found } + +// All returns all registered tables. +func (t *Tables) All() []*Table { + var found []*Table + t.tables.Range(func(typ reflect.Type, table *Table) bool { + found = append(found, table) + return true + }) + return found +} From e04de83958e2c021aaadc9f5edd2179a3845c9fd Mon Sep 17 00:00:00 2001 From: bevzzz Date: Sat, 21 Oct 2023 22:44:05 +0200 Subject: [PATCH 02/55] test: allow passing go test flags to test.sh --- internal/dbtest/test.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/dbtest/test.sh b/internal/dbtest/test.sh index 16c25ce7a..1f7e03b1c 100755 --- a/internal/dbtest/test.sh +++ b/internal/dbtest/test.sh @@ -3,5 +3,5 @@ trap 'docker-compose down -v' EXIT docker-compose down -v docker-compose up -d sleep 30 -CGO_ENABLED=0 TZ= go test -CGO_ENABLED=1 TZ= go test -tags cgosqlite +CGO_ENABLED=0 TZ= go test "$@" +CGO_ENABLED=1 TZ= go test -tags cgosqlite "$@" From 8857bab54b94170d218633f3b210d379e4e51a21 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Sat, 21 Oct 2023 13:16:22 +0200 Subject: [PATCH 03/55] feat: detect renamed tables --- schema/dialect.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/schema/dialect.go b/schema/dialect.go index a5e2afb4e..bb40af62b 100644 --- a/schema/dialect.go +++ b/schema/dialect.go @@ -188,3 +188,7 @@ func (d *nopDialect) DefaultVarcharLen() int { func (d *nopDialect) AppendSequence(b []byte, _ *Table, _ *Field) []byte { return b } + +func (d *nopDialect) DefaultSchema() string { + return "nop" +} From 5060e47db13451a982e48d0f14055a58ba60b472 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Tue, 31 Oct 2023 19:19:28 +0100 Subject: [PATCH 04/55] feat: implement fmt.Stringer queries - RawQuery - CreateTableQuery --- query_raw.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/query_raw.go b/query_raw.go index fda088a7c..1634d0e5b 100644 --- a/query_raw.go +++ b/query_raw.go @@ -96,3 +96,12 @@ func (q *RawQuery) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) func (q *RawQuery) Operation() string { return "SELECT" } + +func (q *RawQuery) String() string { + buf, err := q.AppendQuery(q.db.Formatter(), nil) + if err != nil { + panic(err) + } + + return string(buf) +} From 6dfdf952e99c531ae22abc3ff77e2999c922fb72 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Tue, 31 Oct 2023 19:22:31 +0100 Subject: [PATCH 05/55] test: cleanup test databases to avoid side-effects --- internal/dbtest/db_test.go | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/internal/dbtest/db_test.go b/internal/dbtest/db_test.go index 5aed97480..e93ce5843 100644 --- a/internal/dbtest/db_test.go +++ b/internal/dbtest/db_test.go @@ -1567,6 +1567,27 @@ func testEmbedModelPointer(t *testing.T, db *bun.DB) { require.Equal(t, *m1, m2) } +func testEmbedTypeField(t *testing.T, db *bun.DB) { + type Embed string + type Model struct { + Embed + } + + ctx := context.Background() + mustResetModel(t, ctx, db, (*Model)(nil)) + + m1 := &Model{ + Embed: Embed("foo"), + } + _, err := db.NewInsert().Model(m1).Exec(ctx) + require.NoError(t, err) + + var m2 Model + err = db.NewSelect().Model(&m2).Scan(ctx) + require.NoError(t, err) + require.Equal(t, *m1, m2) +} + type JSONField struct { Foo string `json:"foo"` } From c03938ff5e9fa2f653e4c60668b1368357d2de10 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Tue, 31 Oct 2023 19:24:39 +0100 Subject: [PATCH 06/55] feat(automigrate): detect renamed tables --- dialect/pgdialect/alter_table.go | 26 ++++ dialect/pgdialect/dialect.go | 2 + dialect/pgdialect/inspector.go | 29 +++-- internal/dbtest/db_test.go | 7 +- internal/dbtest/inspect_test.go | 6 +- internal/dbtest/migrate_test.go | 51 +++----- migrate/auto.go | 204 +++++++++++++++++++++++++++---- migrate/migrator.go | 17 ++- migrate/sqlschema/migrate.go | 33 +++++ schema/inspector.go | 39 +++--- schema/inspector/dialect.go | 2 +- schema/table.go | 20 +++ 12 files changed, 335 insertions(+), 101 deletions(-) create mode 100644 dialect/pgdialect/alter_table.go create mode 100644 migrate/sqlschema/migrate.go diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go new file mode 100644 index 000000000..f8341fbf7 --- /dev/null +++ b/dialect/pgdialect/alter_table.go @@ -0,0 +1,26 @@ +package pgdialect + +import ( + "context" + "database/sql" + "fmt" + + "github.com/uptrace/bun/migrate/sqlschema" +) + +func (d *Dialect) Migrator(sqldb *sql.DB) sqlschema.Migrator { + return &Migrator{sqldb: sqldb} +} + +type Migrator struct { + sqldb *sql.DB +} + +func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { + query := fmt.Sprintf("ALTER TABLE %s RENAME TO %s", oldName, newName) + _, err := m.sqldb.ExecContext(ctx, query) + if err != nil { + return err + } + return nil +} diff --git a/dialect/pgdialect/dialect.go b/dialect/pgdialect/dialect.go index 766aa1be4..022c74bd0 100644 --- a/dialect/pgdialect/dialect.go +++ b/dialect/pgdialect/dialect.go @@ -10,6 +10,7 @@ import ( "github.com/uptrace/bun/dialect" "github.com/uptrace/bun/dialect/feature" "github.com/uptrace/bun/dialect/sqltype" + "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" "github.com/uptrace/bun/schema/inspector" ) @@ -32,6 +33,7 @@ type Dialect struct { var _ schema.Dialect = (*Dialect)(nil) var _ inspector.Dialect = (*Dialect)(nil) +var _ sqlschema.MigratorDialect = (*Dialect)(nil) func New() *Dialect { d := new(Dialect) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 418140855..5aa2995f3 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -10,30 +10,38 @@ import ( "github.com/uptrace/bun/schema" ) -func (d *Dialect) Inspector(db *bun.DB) schema.Inspector { - return newDatabaseInspector(db) +func (d *Dialect) Inspector(db *bun.DB, excludeTables ...string) schema.Inspector { + return newInspector(db, excludeTables...) } -type DatabaseInspector struct { - db *bun.DB +type Inspector struct { + db *bun.DB + excludeTables []string } -var _ schema.Inspector = (*DatabaseInspector)(nil) +var _ schema.Inspector = (*Inspector)(nil) -func newDatabaseInspector(db *bun.DB) *DatabaseInspector { - return &DatabaseInspector{db: db} +func newInspector(db *bun.DB, excludeTables ...string) *Inspector { + return &Inspector{db: db, excludeTables: excludeTables} } -func (di *DatabaseInspector) Inspect(ctx context.Context) (schema.State, error) { +func (in *Inspector) Inspect(ctx context.Context) (schema.State, error) { var state schema.State + + exclude := in.excludeTables + if len(exclude) == 0 { + // Avoid getting NOT IN (NULL) if bun.In() is called with an empty slice. + exclude = []string{""} + } + var tables []*InformationSchemaTable - if err := di.db.NewRaw(sqlInspectTables).Scan(ctx, &tables); err != nil { + if err := in.db.NewRaw(sqlInspectTables, bun.In(exclude)).Scan(ctx, &tables); err != nil { return state, err } for _, table := range tables { var columns []*InformationSchemaColumn - if err := di.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { + if err := in.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { return state, err } colDefs := make(map[string]schema.ColumnDef) @@ -105,6 +113,7 @@ FROM information_schema.tables WHERE table_type = 'BASE TABLE' AND table_schema <> 'information_schema' AND table_schema NOT LIKE 'pg_%' + AND table_name NOT IN (?) ` // sqlInspectColumnsQuery retrieves column definitions for the specified table. diff --git a/internal/dbtest/db_test.go b/internal/dbtest/db_test.go index e93ce5843..c3ad08565 100644 --- a/internal/dbtest/db_test.go +++ b/internal/dbtest/db_test.go @@ -225,11 +225,12 @@ func testEachDB(t *testing.T, f func(t *testing.T, dbName string, db *bun.DB)) { } // testEachDialect allows testing dialect-specific functionality that does not require database interactions. -func testEachDialect(t *testing.T, f func(t *testing.T, dialectName string, dialect func() schema.Dialect)) { +func testEachDialect(t *testing.T, f func(t *testing.T, dialectName string, dialect schema.Dialect)) { for _, newDialect := range allDialects { - name := newDialect().Name().String() + d := newDialect() + name := d.Name().String() t.Run(name, func(t *testing.T) { - f(t, name, newDialect) + f(t, name, d) }) } } diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 9b092ef4d..bd17a8a32 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -20,7 +20,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Title string `bun:",notnull,unique:title_author"` Locale string `bun:",type:varchar(5),default:'en-GB'"` Pages int8 `bun:"page_count,notnull,default:1"` - Count int32 `bun:"book_count,autoincrement"` + Count int32 `bun:"book_count,autoincrement"` } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -34,7 +34,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { } ctx := context.Background() - createTableOrSkip(t, ctx, db, (*Book)(nil)) + mustResetModel(t, ctx, db, (*Book)(nil)) dbInspector := dialect.Inspector(db) want := schema.State{ @@ -105,7 +105,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { func getDatabaseInspectorOrSkip(tb testing.TB, db *bun.DB) schema.Inspector { dialect := db.Dialect() if id, ok := dialect.(inspector.Dialect); ok { - return id.Inspector(db) + return id.Inspector(db, migrationsTable, migrationLocksTable) } tb.Skipf("%q dialect does not implement inspector.Dialect", dialect.Name()) return nil diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index bab42e9b3..c9b5197cf 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -160,7 +160,8 @@ func testMigrateUpError(t *testing.T, db *bun.DB) { require.Equal(t, []string{"down2", "down1"}, history) } -func TestAutoMigrator_Migrate(t *testing.T) { +func TestAutoMigrator_Run(t *testing.T) { + tests := []struct { fn func(t *testing.T, db *bun.DB) }{ @@ -190,14 +191,17 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Arrange ctx := context.Background() di := getDatabaseInspectorOrSkip(t, db) - createTableOrSkip(t, ctx, db, (*initial)(nil)) + mustResetModel(t, ctx, db, (*initial)(nil)) + mustDropTableOnCleanup(t, ctx, db, (*changed)(nil)) - m, err := migrate.NewAutoMigrator(db) + m, err := migrate.NewAutoMigrator(db, + migrate.WithTableNameAuto(migrationsTable), + migrate.WithLocksTableNameAuto(migrationLocksTable), + migrate.WithModel((*changed)(nil))) require.NoError(t, err) - m.SetModels((*changed)(nil)) // Act - err = m.Migrate(ctx) + err = m.Run(ctx) require.NoError(t, err) // Assert @@ -209,27 +213,13 @@ func testRenameTable(t *testing.T, db *bun.DB) { require.Equal(t, "changed", tables[0].Name) } -func createTableOrSkip(tb testing.TB, ctx context.Context, db *bun.DB, model interface{}) { - tb.Helper() - if _, err := db.NewCreateTable().IfNotExists().Model(model).Exec(ctx); err != nil { - tb.Skip("setup failed:", err) - } - tb.Cleanup(func() { - if _, err := db.NewDropTable().IfExists().Model(model).Exec(ctx); err != nil { - tb.Log("cleanup:", err) - } - }) -} - func TestDetector_Diff(t *testing.T) { tests := []struct { - name string - states func(testing.TB, context.Context, func() schema.Dialect) (stateDb schema.State, stateModel schema.State) + states func(testing.TB, context.Context, schema.Dialect) (stateDb schema.State, stateModel schema.State) operations []migrate.Operation }{ { - name: "find a renamed table", - states: renamedTableStates, + states: testDetectRenamedTable, operations: []migrate.Operation{ &migrate.RenameTable{ From: "books", @@ -239,13 +229,9 @@ func TestDetector_Diff(t *testing.T) { }, } - testEachDialect(t, func(t *testing.T, dialectName string, dialect func() schema.Dialect) { - if dialectName != "pg" { - t.Skip() - } - + testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { + t.Run(funcName(tt.states), func(t *testing.T) { ctx := context.Background() var d migrate.Detector stateDb, stateModel := tt.states(t, ctx, dialect) @@ -258,7 +244,7 @@ func TestDetector_Diff(t *testing.T) { }) } -func renamedTableStates(tb testing.TB, ctx context.Context, dialect func() schema.Dialect) (s1, s2 schema.State) { +func testDetectRenamedTable(tb testing.TB, ctx context.Context, dialect schema.Dialect) (s1, s2 schema.State) { type Book struct { bun.BaseModel @@ -279,17 +265,20 @@ func renamedTableStates(tb testing.TB, ctx context.Context, dialect func() schem Title string `bun:"title,notnull"` Pages int `bun:"page_count,notnull,default:0"` } - return getState(tb, ctx, dialect(), + return getState(tb, ctx, dialect, (*Author)(nil), (*Book)(nil), - ), getState(tb, ctx, dialect(), + ), getState(tb, ctx, dialect, (*Author)(nil), (*BookRenamed)(nil), ) } func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) schema.State { - inspector := schema.NewInspector(dialect, models...) + tables := schema.NewTables(dialect) + tables.Register(models...) + + inspector := schema.NewInspector(tables) state, err := inspector.Inspect(ctx) if err != nil { tb.Skip("get state: %w", err) diff --git a/migrate/auto.go b/migrate/auto.go index 8453e069d..fac7dcf97 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -3,62 +3,167 @@ package migrate import ( "context" "fmt" + "strings" "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" "github.com/uptrace/bun/schema/inspector" ) +type AutoMigratorOption func(m *AutoMigrator) + +// WithModel adds a bun.Model to the scope of migrations. +func WithModel(models ...interface{}) AutoMigratorOption { + return func(m *AutoMigrator) { + m.includeModels = append(m.includeModels, models...) + } +} + +// WithExcludeTable tells the AutoMigrator to ignore a table in the database. +func WithExcludeTable(tables ...string) AutoMigratorOption { + return func(m *AutoMigrator) { + m.excludeTables = append(m.excludeTables, tables...) + } +} + +// WithTableNameAuto overrides default migrations table name. +func WithTableNameAuto(table string) AutoMigratorOption { + return func(m *AutoMigrator) { + m.table = table + m.migratorOpts = append(m.migratorOpts, WithTableName(table)) + } +} + +// WithLocksTableNameAuto overrides default migration locks table name. +func WithLocksTableNameAuto(table string) AutoMigratorOption { + return func(m *AutoMigrator) { + m.locksTable = table + m.migratorOpts = append(m.migratorOpts, WithLocksTableName(table)) + } +} + +// WithMarkAppliedOnSuccessAuto sets the migrator to only mark migrations as applied/unapplied +// when their up/down is successful. +func WithMarkAppliedOnSuccessAuto(enabled bool) AutoMigratorOption { + return func(m *AutoMigrator) { + m.migratorOpts = append(m.migratorOpts, WithMarkAppliedOnSuccess(enabled)) + } +} + type AutoMigrator struct { db *bun.DB - // models limit the set of tables considered for the migration. - models []interface{} - // dbInspector creates the current state for the target database. dbInspector schema.Inspector // modelInspector creates the desired state based on the model definitions. modelInspector schema.Inspector + + // schemaMigrator executes ALTER TABLE queries. + schemaMigrator sqlschema.Migrator + + table string + locksTable string + + // includeModels define the migration scope. + includeModels []interface{} + + // excludeTables are excluded from database inspection. + excludeTables []string + + // migratorOpts are passed to Migrator constructor. + migratorOpts []MigratorOption } -func NewAutoMigrator(db *bun.DB) (*AutoMigrator, error) { +func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, error) { + am := &AutoMigrator{ + db: db, + table: defaultTable, + locksTable: defaultLocksTable, + } + + for _, opt := range opts { + opt(am) + } + am.excludeTables = append(am.excludeTables, am.table, am.locksTable) + + schemaMigrator, err := sqlschema.NewMigrator(db) + if err != nil { + return nil, err + } + am.schemaMigrator = schemaMigrator + + tables := schema.NewTables(db.Dialect()) + tables.Register(am.includeModels...) + am.modelInspector = schema.NewInspector(tables) + dialect := db.Dialect() - withInspector, ok := dialect.(inspector.Dialect) + inspectorDialect, ok := dialect.(inspector.Dialect) if !ok { return nil, fmt.Errorf("%q dialect does not implement inspector.Dialect", dialect.Name()) } + am.dbInspector = inspectorDialect.Inspector(db, am.excludeTables...) - return &AutoMigrator{ - db: db, - dbInspector: withInspector.Inspector(db), - }, nil -} - -func (am *AutoMigrator) SetModels(models ...interface{}) { - am.models = models + return am, nil } func (am *AutoMigrator) diff(ctx context.Context) (Changeset, error) { + var detector Detector var changes Changeset var err error - // TODO: do on "SetModels" - am.modelInspector = schema.NewInspector(am.db.Dialect(), am.models...) - - _, err = am.dbInspector.Inspect(ctx) + got, err := am.dbInspector.Inspect(ctx) if err != nil { return changes, err } - _, err = am.modelInspector.Inspect(ctx) + want, err := am.modelInspector.Inspect(ctx) if err != nil { return changes, err } - return changes, nil + return detector.Diff(got, want), nil +} + +// Migrate writes required changes to a new migration file and runs the migration. +// This will create and entry in the migrations table, making it possible to revert +// the changes with Migrator.Rollback(). +func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) error { + changeset, err := am.diff(ctx) + if err != nil { + return fmt.Errorf("auto migrate: %w", err) + } + + migrations := NewMigrations() + name, _ := genMigrationName("auto") + migrations.Add(Migration{ + Name: name, + Up: changeset.Up(am.schemaMigrator), + Down: changeset.Down(am.schemaMigrator), + Comment: "Changes detected by bun.migrate.AutoMigrator", + }) + + migrator := NewMigrator(am.db, migrations, am.migratorOpts...) + if err := migrator.Init(ctx); err != nil { + return fmt.Errorf("auto migrate: %w", err) + } + + if _, err := migrator.Migrate(ctx, opts...); err != nil { + return fmt.Errorf("auto migrate: %w", err) + } + return nil } -func (am *AutoMigrator) Migrate(ctx context.Context) error { +// Run runs required migrations in-place and without creating a database entry. +func (am *AutoMigrator) Run(ctx context.Context) error { + changeset, err := am.diff(ctx) + if err != nil { + return fmt.Errorf("run auto migrate: %w", err) + } + up := changeset.Up(am.schemaMigrator) + if err := up(ctx, am.db); err != nil { + return fmt.Errorf("run auto migrate: %w", err) + } return nil } @@ -68,7 +173,9 @@ func (am *AutoMigrator) Migrate(ctx context.Context) error { // Apart from storing the function to execute the change, // it knows how to *write* the corresponding code, and what the reverse operation is. type Operation interface { - Func() MigrationFunc + Func(sqlschema.Migrator) MigrationFunc + // GetReverse returns an operation that can revert the current one. + GetReverse() Operation } type RenameTable struct { @@ -76,10 +183,16 @@ type RenameTable struct { To string } -func (rt *RenameTable) Func() MigrationFunc { +func (rt *RenameTable) Func(m sqlschema.Migrator) MigrationFunc { return func(ctx context.Context, db *bun.DB) error { - db.Dialect() - return nil + return m.RenameTable(ctx, rt.From, rt.To) + } +} + +func (rt *RenameTable) GetReverse() Operation { + return &RenameTable{ + From: rt.To, + To: rt.From, } } @@ -88,6 +201,8 @@ type Changeset struct { operations []Operation } +var _ Operation = (*Changeset)(nil) + func (c Changeset) Operations() []Operation { return c.operations } @@ -96,6 +211,36 @@ func (c *Changeset) Add(op Operation) { c.operations = append(c.operations, op) } +func (c *Changeset) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + for _, op := range c.operations { + fn := op.Func(m) + if err := fn(ctx, db); err != nil { + return err + } + } + return nil + } +} + +func (c *Changeset) GetReverse() Operation { + var reverse Changeset + for _, op := range c.operations { + reverse.Add(op.GetReverse()) + } + return &reverse +} + +// Up is syntactic sugar. +func (c *Changeset) Up(m sqlschema.Migrator) MigrationFunc { + return c.Func(m) +} + +// Down is syntactic sugar. +func (c *Changeset) Down(m sqlschema.Migrator) MigrationFunc { + return c.GetReverse().Func(m) +} + type Detector struct{} func (d *Detector) Diff(got, want schema.State) Changeset { @@ -177,6 +322,17 @@ func (set tableSet) clone() tableSet { return res } +func (set tableSet) String() string { + var s strings.Builder + for k := range set.underlying { + if s.Len() > 0 { + s.WriteString(", ") + } + s.WriteString(k) + } + return s.String() +} + // signature is a set of column definitions, which allows "relation/name-agnostic" comparison between them; // meaning that two columns are considered equal if their types are the same. type signature struct { diff --git a/migrate/migrator.go b/migrate/migrator.go index e6d70e39f..b14ad64ca 100644 --- a/migrate/migrator.go +++ b/migrate/migrator.go @@ -12,14 +12,21 @@ import ( "github.com/uptrace/bun" ) +const ( + defaultTable = "bun_migrations" + defaultLocksTable = "bun_migration_locks" +) + type MigratorOption func(m *Migrator) +// WithTableName overrides default migrations table name. func WithTableName(table string) MigratorOption { return func(m *Migrator) { m.table = table } } +// WithLocksTableName overrides default migration locks table name. func WithLocksTableName(table string) MigratorOption { return func(m *Migrator) { m.locksTable = table @@ -27,7 +34,7 @@ func WithLocksTableName(table string) MigratorOption { } // WithMarkAppliedOnSuccess sets the migrator to only mark migrations as applied/unapplied -// when their up/down is successful +// when their up/down is successful. func WithMarkAppliedOnSuccess(enabled bool) MigratorOption { return func(m *Migrator) { m.markAppliedOnSuccess = enabled @@ -52,8 +59,8 @@ func NewMigrator(db *bun.DB, migrations *Migrations, opts ...MigratorOption) *Mi ms: migrations.ms, - table: "bun_migrations", - locksTable: "bun_migration_locks", + table: defaultTable, + locksTable: defaultLocksTable, } for _, opt := range opts { opt(m) @@ -246,7 +253,7 @@ func (m *Migrator) CreateGoMigration( opt(cfg) } - name, err := m.genMigrationName(name) + name, err := genMigrationName(name) if err != nil { return nil, err } @@ -329,7 +336,7 @@ func (m *Migrator) createSQL(ctx context.Context, fname string, transactional bo var nameRE = regexp.MustCompile(`^[0-9a-z_\-]+$`) -func (m *Migrator) genMigrationName(name string) (string, error) { +func genMigrationName(name string) (string, error) { const timeFormat = "20060102150405" if name == "" { diff --git a/migrate/sqlschema/migrate.go b/migrate/sqlschema/migrate.go new file mode 100644 index 000000000..0b51e2b02 --- /dev/null +++ b/migrate/sqlschema/migrate.go @@ -0,0 +1,33 @@ +package sqlschema + +import ( + "context" + "database/sql" + "fmt" + + "github.com/uptrace/bun" + "github.com/uptrace/bun/schema" +) + +type MigratorDialect interface { + schema.Dialect + Migrator(*sql.DB) Migrator +} + +type Migrator interface { + RenameTable(ctx context.Context, oldName, newName string) error +} + +type migrator struct { + Migrator +} + +func NewMigrator(db *bun.DB) (Migrator, error) { + md, ok := db.Dialect().(MigratorDialect) + if !ok { + return nil, fmt.Errorf("%q dialect does not implement sqlschema.Migrator", db.Dialect().Name()) + } + return &migrator{ + Migrator: md.Migrator(db.DB), + }, nil +} diff --git a/schema/inspector.go b/schema/inspector.go index 464cfa81f..e767742a8 100644 --- a/schema/inspector.go +++ b/schema/inspector.go @@ -14,8 +14,8 @@ type State struct { } type TableDef struct { - Schema string - Name string + Schema string + Name string Columns map[string]ColumnDef } @@ -29,48 +29,39 @@ type ColumnDef struct { } type SchemaInspector struct { - dialect Dialect + tables *Tables } var _ Inspector = (*SchemaInspector)(nil) -func NewInspector(dialect Dialect, models ...interface{}) *SchemaInspector { - dialect.Tables().Register(models...) +func NewInspector(tables *Tables) *SchemaInspector { return &SchemaInspector{ - dialect: dialect, + tables: tables, } } +// Inspect creates the current project state from the passed bun.Models. +// Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { var state State - for _, t := range si.dialect.Tables().All() { + for _, t := range si.tables.All() { columns := make(map[string]ColumnDef) for _, f := range t.Fields { columns[f.Name] = ColumnDef{ - SQLType: f.CreateTableSQLType, - DefaultValue: f.SQLDefault, - IsPK: f.IsPK, - IsNullable: !f.NotNull, + SQLType: strings.ToLower(f.CreateTableSQLType), + DefaultValue: f.SQLDefault, + IsPK: f.IsPK, + IsNullable: !f.NotNull, IsAutoIncrement: f.AutoIncrement, - IsIdentity: f.Identity, + IsIdentity: f.Identity, } } - schema, table := splitTableNameTag(si.dialect, t.Name) state.Tables = append(state.Tables, TableDef{ - Schema: schema, - Name: table, + Schema: t.Schema, + Name: t.Name, Columns: columns, }) } return state, nil } - -// splitTableNameTag -func splitTableNameTag(d Dialect, nameTag string) (string, string) { - schema, table := d.DefaultSchema(), nameTag - if schemaTable := strings.Split(nameTag, "."); len(schemaTable) == 2 { - schema, table = schemaTable[0], schemaTable[1] - } - return schema, table -} \ No newline at end of file diff --git a/schema/inspector/dialect.go b/schema/inspector/dialect.go index 701300da9..beb23eea5 100644 --- a/schema/inspector/dialect.go +++ b/schema/inspector/dialect.go @@ -7,5 +7,5 @@ import ( type Dialect interface { schema.Dialect - Inspector(db *bun.DB) schema.Inspector + Inspector(db *bun.DB, excludeTables ...string) schema.Inspector } diff --git a/schema/table.go b/schema/table.go index c8e71e38f..f770a2f76 100644 --- a/schema/table.go +++ b/schema/table.go @@ -45,6 +45,7 @@ type Table struct { TypeName string ModelName string + Schema string Name string SQLName Safe SQLNameForSelects Safe @@ -371,10 +372,18 @@ func (t *Table) processBaseModelField(f reflect.StructField) { } if tag.Name != "" { + schema, _ := t.schemaFromTagName(tag.Name) + t.Schema = schema + + // Eventually, we should only assign the "table" portion as the table name, + // which will also require a change in how the table name is appended to queries. + // Until that is done, set table name to tag.Name. t.setName(tag.Name) } if s, ok := tag.Option("table"); ok { + schema, _ := t.schemaFromTagName(tag.Name) + t.Schema = schema t.setName(s) } @@ -388,6 +397,17 @@ func (t *Table) processBaseModelField(f reflect.StructField) { } } +// schemaFromTagName splits the bun.BaseModel tag name into schema and table name +// in case it is specified in the "schema"."table" format. +// Assume default schema if one isn't explicitly specified. +func (t *Table) schemaFromTagName(name string) (string, string) { + schema, table := t.dialect.DefaultSchema(), name + if schemaTable := strings.Split(name, "."); len(schemaTable) == 2 { + schema, table = schemaTable[0], schemaTable[1] + } + return schema, table +} + // nolint func (t *Table) newField(sf reflect.StructField, tag tagparser.Tag) *Field { sqlName := internal.Underscore(sf.Name) From 27a5a2b701cb10fcc08b6d6af4f146f56b81b4e0 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Wed, 1 Nov 2023 19:55:21 +0100 Subject: [PATCH 07/55] chore: re-organize packages --- dialect/pgdialect/dialect.go | 3 +- dialect/pgdialect/inspector.go | 16 ++-- internal/dbtest/inspect_test.go | 29 ++---- internal/dbtest/migrate_test.go | 16 ++-- migrate/auto.go | 89 +++++-------------- migrate/sqlschema/inspector.go | 71 +++++++++++++++ migrate/sqlschema/{migrate.go => migrator.go} | 0 migrate/sqlschema/state.go | 62 +++++++++++++ 8 files changed, 182 insertions(+), 104 deletions(-) create mode 100644 migrate/sqlschema/inspector.go rename migrate/sqlschema/{migrate.go => migrator.go} (100%) create mode 100644 migrate/sqlschema/state.go diff --git a/dialect/pgdialect/dialect.go b/dialect/pgdialect/dialect.go index 022c74bd0..73355b6c0 100644 --- a/dialect/pgdialect/dialect.go +++ b/dialect/pgdialect/dialect.go @@ -12,7 +12,6 @@ import ( "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" - "github.com/uptrace/bun/schema/inspector" ) var pgDialect = New() @@ -32,7 +31,7 @@ type Dialect struct { } var _ schema.Dialect = (*Dialect)(nil) -var _ inspector.Dialect = (*Dialect)(nil) +var _ sqlschema.InspectorDialect = (*Dialect)(nil) var _ sqlschema.MigratorDialect = (*Dialect)(nil) func New() *Dialect { diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 5aa2995f3..db457de21 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -7,10 +7,10 @@ import ( "github.com/uptrace/bun" "github.com/uptrace/bun/dialect/sqltype" - "github.com/uptrace/bun/schema" + "github.com/uptrace/bun/migrate/sqlschema" ) -func (d *Dialect) Inspector(db *bun.DB, excludeTables ...string) schema.Inspector { +func (d *Dialect) Inspector(db *bun.DB, excludeTables ...string) sqlschema.Inspector { return newInspector(db, excludeTables...) } @@ -19,14 +19,14 @@ type Inspector struct { excludeTables []string } -var _ schema.Inspector = (*Inspector)(nil) +var _ sqlschema.Inspector = (*Inspector)(nil) func newInspector(db *bun.DB, excludeTables ...string) *Inspector { return &Inspector{db: db, excludeTables: excludeTables} } -func (in *Inspector) Inspect(ctx context.Context) (schema.State, error) { - var state schema.State +func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { + var state sqlschema.State exclude := in.excludeTables if len(exclude) == 0 { @@ -44,7 +44,7 @@ func (in *Inspector) Inspect(ctx context.Context) (schema.State, error) { if err := in.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { return state, err } - colDefs := make(map[string]schema.ColumnDef) + colDefs := make(map[string]sqlschema.Column) for _, c := range columns { dataType := fromDatabaseType(c.DataType) if strings.EqualFold(dataType, sqltype.VarChar) && c.VarcharLen > 0 { @@ -56,7 +56,7 @@ func (in *Inspector) Inspect(ctx context.Context) (schema.State, error) { def = "" } - colDefs[c.Name] = schema.ColumnDef{ + colDefs[c.Name] = sqlschema.Column{ SQLType: strings.ToLower(dataType), IsPK: c.IsPK, IsNullable: c.IsNullable, @@ -66,7 +66,7 @@ func (in *Inspector) Inspect(ctx context.Context) (schema.State, error) { } } - state.Tables = append(state.Tables, schema.TableDef{ + state.Tables = append(state.Tables, sqlschema.Table{ Schema: table.Schema, Name: table.Name, Columns: colDefs, diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index bd17a8a32..53c8905cc 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -6,8 +6,7 @@ import ( "github.com/stretchr/testify/require" "github.com/uptrace/bun" - "github.com/uptrace/bun/schema" - "github.com/uptrace/bun/schema/inspector" + "github.com/uptrace/bun/migrate/sqlschema" ) func TestDatabaseInspector_Inspect(t *testing.T) { @@ -24,25 +23,20 @@ func TestDatabaseInspector_Inspect(t *testing.T) { } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { - var dialect inspector.Dialect - dbDialect := db.Dialect() - - if id, ok := dbDialect.(inspector.Dialect); ok { - dialect = id - } else { - t.Skipf("%q dialect does not implement inspector.Dialect", dbDialect.Name()) + dbInspector, err := sqlschema.NewInspector(db) + if err != nil { + t.Skip(err) } ctx := context.Background() mustResetModel(t, ctx, db, (*Book)(nil)) - dbInspector := dialect.Inspector(db) - want := schema.State{ - Tables: []schema.TableDef{ + want := sqlschema.State{ + Tables: []sqlschema.Table{ { Schema: "public", Name: "books", - Columns: map[string]schema.ColumnDef{ + Columns: map[string]sqlschema.Column{ "isbn": { SQLType: "bigint", IsPK: true, @@ -101,12 +95,3 @@ func TestDatabaseInspector_Inspect(t *testing.T) { require.Equal(t, want, got) }) } - -func getDatabaseInspectorOrSkip(tb testing.TB, db *bun.DB) schema.Inspector { - dialect := db.Dialect() - if id, ok := dialect.(inspector.Dialect); ok { - return id.Inspector(db, migrationsTable, migrationLocksTable) - } - tb.Skipf("%q dialect does not implement inspector.Dialect", dialect.Name()) - return nil -} diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index c9b5197cf..12f310b36 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -8,6 +8,7 @@ import ( "github.com/stretchr/testify/require" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate" + "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -190,7 +191,10 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Arrange ctx := context.Background() - di := getDatabaseInspectorOrSkip(t, db) + dbInspector, err := sqlschema.NewInspector(db) + if err != nil { + t.Skip(err) + } mustResetModel(t, ctx, db, (*initial)(nil)) mustDropTableOnCleanup(t, ctx, db, (*changed)(nil)) @@ -205,7 +209,7 @@ func testRenameTable(t *testing.T, db *bun.DB) { require.NoError(t, err) // Assert - state, err := di.Inspect(ctx) + state, err := dbInspector.Inspect(ctx) require.NoError(t, err) tables := state.Tables @@ -215,7 +219,7 @@ func testRenameTable(t *testing.T, db *bun.DB) { func TestDetector_Diff(t *testing.T) { tests := []struct { - states func(testing.TB, context.Context, schema.Dialect) (stateDb schema.State, stateModel schema.State) + states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) operations []migrate.Operation }{ { @@ -244,7 +248,7 @@ func TestDetector_Diff(t *testing.T) { }) } -func testDetectRenamedTable(tb testing.TB, ctx context.Context, dialect schema.Dialect) (s1, s2 schema.State) { +func testDetectRenamedTable(tb testing.TB, ctx context.Context, dialect schema.Dialect) (s1, s2 sqlschema.State) { type Book struct { bun.BaseModel @@ -274,11 +278,11 @@ func testDetectRenamedTable(tb testing.TB, ctx context.Context, dialect schema.D ) } -func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) schema.State { +func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) sqlschema.State { tables := schema.NewTables(dialect) tables.Register(models...) - inspector := schema.NewInspector(tables) + inspector := sqlschema.NewSchemaInspector(tables) state, err := inspector.Inspect(ctx) if err != nil { tb.Skip("get state: %w", err) diff --git a/migrate/auto.go b/migrate/auto.go index fac7dcf97..c137d77e9 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -8,7 +8,6 @@ import ( "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" - "github.com/uptrace/bun/schema/inspector" ) type AutoMigratorOption func(m *AutoMigrator) @@ -55,13 +54,13 @@ type AutoMigrator struct { db *bun.DB // dbInspector creates the current state for the target database. - dbInspector schema.Inspector + dbInspector sqlschema.Inspector // modelInspector creates the desired state based on the model definitions. - modelInspector schema.Inspector + modelInspector sqlschema.Inspector - // schemaMigrator executes ALTER TABLE queries. - schemaMigrator sqlschema.Migrator + // dbMigrator executes ALTER TABLE queries. + dbMigrator sqlschema.Migrator table string locksTable string @@ -88,22 +87,21 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err } am.excludeTables = append(am.excludeTables, am.table, am.locksTable) - schemaMigrator, err := sqlschema.NewMigrator(db) + dbInspector, err := sqlschema.NewInspector(db, am.excludeTables...) if err != nil { return nil, err } - am.schemaMigrator = schemaMigrator + am.dbInspector = dbInspector + + dbMigrator, err := sqlschema.NewMigrator(db) + if err != nil { + return nil, err + } + am.dbMigrator = dbMigrator tables := schema.NewTables(db.Dialect()) tables.Register(am.includeModels...) - am.modelInspector = schema.NewInspector(tables) - - dialect := db.Dialect() - inspectorDialect, ok := dialect.(inspector.Dialect) - if !ok { - return nil, fmt.Errorf("%q dialect does not implement inspector.Dialect", dialect.Name()) - } - am.dbInspector = inspectorDialect.Inspector(db, am.excludeTables...) + am.modelInspector = sqlschema.NewSchemaInspector(tables) return am, nil } @@ -138,8 +136,8 @@ func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) er name, _ := genMigrationName("auto") migrations.Add(Migration{ Name: name, - Up: changeset.Up(am.schemaMigrator), - Down: changeset.Down(am.schemaMigrator), + Up: changeset.Up(am.dbMigrator), + Down: changeset.Down(am.dbMigrator), Comment: "Changes detected by bun.migrate.AutoMigrator", }) @@ -160,7 +158,7 @@ func (am *AutoMigrator) Run(ctx context.Context) error { if err != nil { return fmt.Errorf("run auto migrate: %w", err) } - up := changeset.Up(am.schemaMigrator) + up := changeset.Up(am.dbMigrator) if err := up(ctx, am.db); err != nil { return fmt.Errorf("run auto migrate: %w", err) } @@ -243,7 +241,7 @@ func (c *Changeset) Down(m sqlschema.Migrator) MigrationFunc { type Detector struct{} -func (d *Detector) Diff(got, want schema.State) Changeset { +func (d *Detector) Diff(got, want sqlschema.State) Changeset { var changes Changeset // Detect renamed models @@ -254,7 +252,7 @@ func (d *Detector) Diff(got, want schema.State) Changeset { for _, added := range addedModels.Values() { removedModels := oldModels.Sub(newModels) for _, removed := range removedModels.Values() { - if !haveSameSignature(added, removed) { + if !sqlschema.EqualSignatures(added, removed) { continue } changes.Add(&RenameTable{ @@ -267,21 +265,14 @@ func (d *Detector) Diff(got, want schema.State) Changeset { return changes } -// haveSameSignature determines if two tables have the same "signature". -func haveSameSignature(t1, t2 schema.TableDef) bool { - sig1 := newSignature(t1) - sig2 := newSignature(t2) - return sig1.Equals(sig2) -} - // tableSet stores unique table definitions. type tableSet struct { - underlying map[string]schema.TableDef + underlying map[string]sqlschema.Table } -func newTableSet(initial ...schema.TableDef) tableSet { +func newTableSet(initial ...sqlschema.Table) tableSet { set := tableSet{ - underlying: make(map[string]schema.TableDef), + underlying: make(map[string]sqlschema.Table), } for _, t := range initial { set.Add(t) @@ -289,7 +280,7 @@ func newTableSet(initial ...schema.TableDef) tableSet { return set } -func (set tableSet) Add(t schema.TableDef) { +func (set tableSet) Add(t sqlschema.Table) { set.underlying[t.Name] = t } @@ -297,7 +288,7 @@ func (set tableSet) Remove(s string) { delete(set.underlying, s) } -func (set tableSet) Values() (tables []schema.TableDef) { +func (set tableSet) Values() (tables []sqlschema.Table) { for _, t := range set.underlying { tables = append(tables, t) } @@ -332,37 +323,3 @@ func (set tableSet) String() string { } return s.String() } - -// signature is a set of column definitions, which allows "relation/name-agnostic" comparison between them; -// meaning that two columns are considered equal if their types are the same. -type signature struct { - - // underlying stores the number of occurences for each unique column type. - // It helps to account for the fact that a table might have multiple columns that have the same type. - underlying map[schema.ColumnDef]int -} - -func newSignature(t schema.TableDef) signature { - s := signature{ - underlying: make(map[schema.ColumnDef]int), - } - s.scan(t) - return s -} - -// scan iterates over table's field and counts occurrences of each unique column definition. -func (s *signature) scan(t schema.TableDef) { - for _, c := range t.Columns { - s.underlying[c]++ - } -} - -// Equals returns true if 2 signatures share an identical set of columns. -func (s *signature) Equals(other signature) bool { - for k, count := range s.underlying { - if countOther, ok := other.underlying[k]; !ok || countOther != count { - return false - } - } - return true -} diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go new file mode 100644 index 000000000..7974b0c25 --- /dev/null +++ b/migrate/sqlschema/inspector.go @@ -0,0 +1,71 @@ +package sqlschema + +import ( + "context" + "fmt" + "strings" + + "github.com/uptrace/bun" + "github.com/uptrace/bun/schema" +) + +type InspectorDialect interface { + schema.Dialect + Inspector(db *bun.DB, excludeTables ...string) Inspector +} + +type Inspector interface { + Inspect(ctx context.Context) (State, error) +} + +type inspector struct { + Inspector +} + +func NewInspector(db *bun.DB, excludeTables ...string) (Inspector, error) { + dialect, ok := (db.Dialect()).(InspectorDialect) + if !ok { + return nil, fmt.Errorf("%s does not implement sqlschema.Inspector", db.Dialect().Name()) + } + return &inspector{ + Inspector: dialect.Inspector(db, excludeTables...), + }, nil +} + +type SchemaInspector struct { + tables *schema.Tables +} + +var _ Inspector = (*SchemaInspector)(nil) + +func NewSchemaInspector(tables *schema.Tables) *SchemaInspector { + return &SchemaInspector{ + tables: tables, + } +} + +// Inspect creates the current project state from the passed bun.Models. +// Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. +func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { + var state State + for _, t := range si.tables.All() { + columns := make(map[string]Column) + for _, f := range t.Fields { + columns[f.Name] = Column{ + SQLType: strings.ToLower(f.CreateTableSQLType), + DefaultValue: f.SQLDefault, + IsPK: f.IsPK, + IsNullable: !f.NotNull, + IsAutoIncrement: f.AutoIncrement, + IsIdentity: f.Identity, + } + } + + state.Tables = append(state.Tables, Table{ + Schema: t.Schema, + Name: t.Name, + Columns: columns, + }) + } + return state, nil +} diff --git a/migrate/sqlschema/migrate.go b/migrate/sqlschema/migrator.go similarity index 100% rename from migrate/sqlschema/migrate.go rename to migrate/sqlschema/migrator.go diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go new file mode 100644 index 000000000..8b89368a4 --- /dev/null +++ b/migrate/sqlschema/state.go @@ -0,0 +1,62 @@ +package sqlschema + +type State struct { + Tables []Table +} + +type Table struct { + Schema string + Name string + Columns map[string]Column +} + +// Column stores attributes of a database column. +type Column struct { + SQLType string + DefaultValue string + IsPK bool + IsNullable bool + IsAutoIncrement bool + IsIdentity bool +} + +// EqualSignatures determines if two tables have the same "signature". +func EqualSignatures(t1, t2 Table) bool { + sig1 := newSignature(t1) + sig2 := newSignature(t2) + return sig1.Equals(sig2) +} + +// signature is a set of column definitions, which allows "relation/name-agnostic" comparison between them; +// meaning that two columns are considered equal if their types are the same. +type signature struct { + + // underlying stores the number of occurences for each unique column type. + // It helps to account for the fact that a table might have multiple columns that have the same type. + underlying map[Column]int +} + +func newSignature(t Table) signature { + s := signature{ + underlying: make(map[Column]int), + } + s.scan(t) + return s +} + +// scan iterates over table's field and counts occurrences of each unique column definition. +func (s *signature) scan(t Table) { + for _, c := range t.Columns { + s.underlying[c]++ + } +} + +// Equals returns true if 2 signatures share an identical set of columns. +func (s *signature) Equals(other signature) bool { + for k, count := range s.underlying { + if countOther, ok := other.underlying[k]; !ok || countOther != count { + return false + } + } + return true +} From a8788bf62cbcc954a08532c299c774262de7a81d Mon Sep 17 00:00:00 2001 From: bevzzz Date: Wed, 1 Nov 2023 20:29:05 +0100 Subject: [PATCH 08/55] feat: use *bun.DB in MigratorDialect --- dialect/pgdialect/alter_table.go | 10 +++++----- migrate/sqlschema/migrator.go | 5 ++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index f8341fbf7..450f12513 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -2,23 +2,23 @@ package pgdialect import ( "context" - "database/sql" "fmt" + "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" ) -func (d *Dialect) Migrator(sqldb *sql.DB) sqlschema.Migrator { - return &Migrator{sqldb: sqldb} +func (d *Dialect) Migrator(db *bun.DB) sqlschema.Migrator { + return &Migrator{db: db} } type Migrator struct { - sqldb *sql.DB + db *bun.DB } func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { query := fmt.Sprintf("ALTER TABLE %s RENAME TO %s", oldName, newName) - _, err := m.sqldb.ExecContext(ctx, query) + _, err := m.db.ExecContext(ctx, query) if err != nil { return err } diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 0b51e2b02..037c90e23 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -2,7 +2,6 @@ package sqlschema import ( "context" - "database/sql" "fmt" "github.com/uptrace/bun" @@ -11,7 +10,7 @@ import ( type MigratorDialect interface { schema.Dialect - Migrator(*sql.DB) Migrator + Migrator(*bun.DB) Migrator } type Migrator interface { @@ -28,6 +27,6 @@ func NewMigrator(db *bun.DB) (Migrator, error) { return nil, fmt.Errorf("%q dialect does not implement sqlschema.Migrator", db.Dialect().Name()) } return &migrator{ - Migrator: md.Migrator(db.DB), + Migrator: md.Migrator(db), }, nil } From 75a6f2b4db85922d4101868467a88fdc2886d9aa Mon Sep 17 00:00:00 2001 From: bevzzz Date: Wed, 1 Nov 2023 19:55:21 +0100 Subject: [PATCH 09/55] chore: re-organize packages --- schema/inspector.go | 67 ------------------------------------- schema/inspector/dialect.go | 11 ------ 2 files changed, 78 deletions(-) delete mode 100644 schema/inspector.go delete mode 100644 schema/inspector/dialect.go diff --git a/schema/inspector.go b/schema/inspector.go deleted file mode 100644 index e767742a8..000000000 --- a/schema/inspector.go +++ /dev/null @@ -1,67 +0,0 @@ -package schema - -import ( - "context" - "strings" -) - -type Inspector interface { - Inspect(ctx context.Context) (State, error) -} - -type State struct { - Tables []TableDef -} - -type TableDef struct { - Schema string - Name string - Columns map[string]ColumnDef -} - -type ColumnDef struct { - SQLType string - DefaultValue string - IsPK bool - IsNullable bool - IsAutoIncrement bool - IsIdentity bool -} - -type SchemaInspector struct { - tables *Tables -} - -var _ Inspector = (*SchemaInspector)(nil) - -func NewInspector(tables *Tables) *SchemaInspector { - return &SchemaInspector{ - tables: tables, - } -} - -// Inspect creates the current project state from the passed bun.Models. -// Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. -func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { - var state State - for _, t := range si.tables.All() { - columns := make(map[string]ColumnDef) - for _, f := range t.Fields { - columns[f.Name] = ColumnDef{ - SQLType: strings.ToLower(f.CreateTableSQLType), - DefaultValue: f.SQLDefault, - IsPK: f.IsPK, - IsNullable: !f.NotNull, - IsAutoIncrement: f.AutoIncrement, - IsIdentity: f.Identity, - } - } - - state.Tables = append(state.Tables, TableDef{ - Schema: t.Schema, - Name: t.Name, - Columns: columns, - }) - } - return state, nil -} diff --git a/schema/inspector/dialect.go b/schema/inspector/dialect.go deleted file mode 100644 index beb23eea5..000000000 --- a/schema/inspector/dialect.go +++ /dev/null @@ -1,11 +0,0 @@ -package inspector - -import ( - "github.com/uptrace/bun" - "github.com/uptrace/bun/schema" -) - -type Dialect interface { - schema.Dialect - Inspector(db *bun.DB, excludeTables ...string) schema.Inspector -} From 408859f07be38236b39a00909cdce55d49f6f824 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Fri, 3 Nov 2023 16:05:28 +0100 Subject: [PATCH 10/55] feat: detect Create/Drop table --- dialect/pgdialect/alter_table.go | 4 +- internal/dbtest/migrate_test.go | 202 +++++++++++++++++++++++------- migrate/auto.go | 203 ++++++++++++++++++++++++------- migrate/sqlschema/inspector.go | 5 +- migrate/sqlschema/migrator.go | 28 +++++ migrate/sqlschema/state.go | 1 + query_table_drop.go | 9 ++ schema/table.go | 3 +- 8 files changed, 367 insertions(+), 88 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 450f12513..af103fe86 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -9,10 +9,12 @@ import ( ) func (d *Dialect) Migrator(db *bun.DB) sqlschema.Migrator { - return &Migrator{db: db} + return &Migrator{db: db, BaseMigrator: sqlschema.NewBaseMigrator(db)} } type Migrator struct { + *sqlschema.BaseMigrator + db *bun.DB } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 12f310b36..91bb59265 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -3,7 +3,9 @@ package dbtest_test import ( "context" "errors" + "sort" "testing" + "time" "github.com/stretchr/testify/require" "github.com/uptrace/bun" @@ -167,6 +169,7 @@ func TestAutoMigrator_Run(t *testing.T) { fn func(t *testing.T, db *bun.DB) }{ {testRenameTable}, + {testCreateDropTable}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -217,68 +220,181 @@ func testRenameTable(t *testing.T, db *bun.DB) { require.Equal(t, "changed", tables[0].Name) } -func TestDetector_Diff(t *testing.T) { - tests := []struct { - states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) - operations []migrate.Operation - }{ - { - states: testDetectRenamedTable, - operations: []migrate.Operation{ - &migrate.RenameTable{ - From: "books", - To: "books_renamed", - }, - }, - }, +func testCreateDropTable(t *testing.T, db *bun.DB) { + type DropMe struct { + bun.BaseModel `bun:"table:dropme"` + Foo int `bun:"foo,identity"` + } + + type CreateMe struct { + bun.BaseModel `bun:"table:createme"` + Bar string `bun:",pk,default:gen_random_uuid()"` + Baz time.Time } + // Arrange + ctx := context.Background() + dbInspector, err := sqlschema.NewInspector(db) + if err != nil { + t.Skip(err) + } + mustResetModel(t, ctx, db, (*DropMe)(nil)) + mustDropTableOnCleanup(t, ctx, db, (*CreateMe)(nil)) + + m, err := migrate.NewAutoMigrator(db, + migrate.WithTableNameAuto(migrationsTable), + migrate.WithLocksTableNameAuto(migrationLocksTable), + migrate.WithModel((*CreateMe)(nil))) + require.NoError(t, err) + + // Act + err = m.Run(ctx) + require.NoError(t, err) + + // Assert + state, err := dbInspector.Inspect(ctx) + require.NoError(t, err) + + tables := state.Tables + require.Len(t, tables, 1) + require.Equal(t, "createme", tables[0].Name) +} + +type Journal struct { + ISBN string `bun:"isbn,pk"` + Title string `bun:"title,notnull"` + Pages int `bun:"page_count,notnull,default:0"` +} + +type Reader struct { + Username string `bun:",pk,default:gen_random_uuid()"` +} + +type ExternalUsers struct { + bun.BaseModel `bun:"external.users"` + Name string `bun:",pk"` +} + +func TestDetector_Diff(t *testing.T) { testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { - for _, tt := range tests { + for _, tt := range []struct { + name string + states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) + want []migrate.Operation + }{ + { + name: "1 table renamed, 1 added, 2 dropped", + states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { + // Database state ------------- + type Subscription struct { + bun.BaseModel `bun:"table:billing.subscriptions"` + } + type Review struct{} + + type Author struct { + Name string `bun:"name"` + } + + // Model state ------------- + type JournalRenamed struct { + bun.BaseModel `bun:"table:journals_renamed"` + + ISBN string `bun:"isbn,pk"` + Title string `bun:"title,notnull"` + Pages int `bun:"page_count,notnull,default:0"` + } + + return getState(tb, ctx, d, + (*Author)(nil), + (*Journal)(nil), + (*Review)(nil), + (*Subscription)(nil), + ), getState(tb, ctx, d, + (*Author)(nil), + (*JournalRenamed)(nil), + (*Reader)(nil), + ) + }, + want: []migrate.Operation{ + &migrate.RenameTable{ + Schema: dialect.DefaultSchema(), + From: "journals", + To: "journals_renamed", + }, + &migrate.CreateTable{ + Model: &Reader{}, // (*Reader)(nil) would be more idiomatic, but schema.Tables + }, + &migrate.DropTable{ + Schema: "billing", + Name: "billing.subscriptions", // TODO: fix once schema is used correctly + }, + &migrate.DropTable{ + Schema: dialect.DefaultSchema(), + Name: "reviews", + }, + }, + }, + { + name: "renaming does not work across schemas", + states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { + // Users have the same columns as the "added" ExternalUsers. + // However, we should not recognize it as a RENAME, because only models in the same schema can be renamed. + // Instead, this is a DROP + CREATE case. + type Users struct { + bun.BaseModel `bun:"external_users"` + Name string `bun:",pk"` + } + + return getState(tb, ctx, d, + (*Users)(nil), + ), getState(t, ctx, d, + (*ExternalUsers)(nil), + ) + }, + want: []migrate.Operation{ + &migrate.DropTable{ + Schema: dialect.DefaultSchema(), + Name: "external_users", + }, + &migrate.CreateTable{ + Model: &ExternalUsers{}, + }, + }, + }, + } { t.Run(funcName(tt.states), func(t *testing.T) { ctx := context.Background() var d migrate.Detector stateDb, stateModel := tt.states(t, ctx, dialect) - diff := d.Diff(stateDb, stateModel) - - require.Equal(t, tt.operations, diff.Operations()) + got := d.Diff(stateDb, stateModel).Operations() + checkEqualChangeset(t, got, tt.want) }) } }) } -func testDetectRenamedTable(tb testing.TB, ctx context.Context, dialect schema.Dialect) (s1, s2 sqlschema.State) { - type Book struct { - bun.BaseModel +func checkEqualChangeset(tb testing.TB, got, want []migrate.Operation) { + tb.Helper() - ISBN string `bun:"isbn,pk"` - Title string `bun:"title,notnull"` - Pages int `bun:"page_count,notnull,default:0"` - } - - type Author struct { - bun.BaseModel - Name string `bun:"name"` - } + // Sort alphabetically to ensure we don't fail because of the wrong order + sort.Slice(got, func(i, j int) bool { + return got[i].String() < got[j].String() + }) + sort.Slice(want, func(i, j int) bool { + return want[i].String() < want[j].String() + }) - type BookRenamed struct { - bun.BaseModel `bun:"table:books_renamed"` + var cgot, cwant migrate.Changeset + cgot.Add(got...) + cwant.Add(want...) - ISBN string `bun:"isbn,pk"` - Title string `bun:"title,notnull"` - Pages int `bun:"page_count,notnull,default:0"` - } - return getState(tb, ctx, dialect, - (*Author)(nil), - (*Book)(nil), - ), getState(tb, ctx, dialect, - (*Author)(nil), - (*BookRenamed)(nil), - ) + require.Equal(tb, cwant.String(), cgot.String()) } func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) sqlschema.State { + tb.Helper() + tables := schema.NewTables(dialect) tables.Register(models...) diff --git a/migrate/auto.go b/migrate/auto.go index c137d77e9..c58e243d9 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -167,31 +167,56 @@ func (am *AutoMigrator) Run(ctx context.Context) error { // INTERNAL ------------------------------------------------------------------- -// Operation is an abstraction a level above a MigrationFunc. -// Apart from storing the function to execute the change, -// it knows how to *write* the corresponding code, and what the reverse operation is. -type Operation interface { - Func(sqlschema.Migrator) MigrationFunc - // GetReverse returns an operation that can revert the current one. - GetReverse() Operation -} +type Detector struct{} -type RenameTable struct { - From string - To string -} +func (d *Detector) Diff(got, want sqlschema.State) Changeset { + var changes Changeset -func (rt *RenameTable) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return m.RenameTable(ctx, rt.From, rt.To) + oldModels := newTableSet(got.Tables...) + newModels := newTableSet(want.Tables...) + + addedModels := newModels.Sub(oldModels) + +AddedLoop: + for _, added := range addedModels.Values() { + removedModels := oldModels.Sub(newModels) + for _, removed := range removedModels.Values() { + if d.canRename(added, removed) { + changes.Add(&RenameTable{ + Schema: removed.Schema, + From: removed.Name, + To: added.Name, + }) + + // TODO: check for altered columns. + + // Do not check this model further, we know it was renamed. + oldModels.Remove(removed.Name) + continue AddedLoop + } + } + // If a new table did not appear because of the rename operation, then it must've been created. + changes.Add(&CreateTable{ + Schema: added.Schema, + Name: added.Name, + Model: added.Model, + }) } -} -func (rt *RenameTable) GetReverse() Operation { - return &RenameTable{ - From: rt.To, - To: rt.From, + // Tables that aren't present anymore and weren't renamed were deleted. + for _, t := range oldModels.Sub(newModels).Values() { + changes.Add(&DropTable{ + Schema: t.Schema, + Name: t.Name, + }) } + + return changes +} + +// canRename checks if t1 can be renamed to t2. +func (d Detector) canRename(t1, t2 sqlschema.Table) bool { + return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2) } // Changeset is a set of changes that alter database state. @@ -201,14 +226,24 @@ type Changeset struct { var _ Operation = (*Changeset)(nil) +func (c Changeset) String() string { + var ops []string + for _, op := range c.operations { + ops = append(ops, op.String()) + } + return strings.Join(ops, "\n") +} + func (c Changeset) Operations() []Operation { return c.operations } -func (c *Changeset) Add(op Operation) { - c.operations = append(c.operations, op) +// Add new operations to the changeset. +func (c *Changeset) Add(op ...Operation) { + c.operations = append(c.operations, op...) } +// Func chains all underlying operations in a single MigrationFunc. func (c *Changeset) Func(m sqlschema.Migrator) MigrationFunc { return func(ctx context.Context, db *bun.DB) error { for _, op := range c.operations { @@ -239,32 +274,118 @@ func (c *Changeset) Down(m sqlschema.Migrator) MigrationFunc { return c.GetReverse().Func(m) } -type Detector struct{} +// Operation is an abstraction a level above a MigrationFunc. +// Apart from storing the function to execute the change, +// it knows how to *write* the corresponding code, and what the reverse operation is. +type Operation interface { + fmt.Stringer -func (d *Detector) Diff(got, want sqlschema.State) Changeset { - var changes Changeset + Func(sqlschema.Migrator) MigrationFunc + // GetReverse returns an operation that can revert the current one. + GetReverse() Operation +} - // Detect renamed models - oldModels := newTableSet(got.Tables...) - newModels := newTableSet(want.Tables...) +// noop is a migration that doesn't change the schema. +type noop struct{} - addedModels := newModels.Sub(oldModels) - for _, added := range addedModels.Values() { - removedModels := oldModels.Sub(newModels) - for _, removed := range removedModels.Values() { - if !sqlschema.EqualSignatures(added, removed) { - continue - } - changes.Add(&RenameTable{ - From: removed.Name, - To: added.Name, - }) - } +var _ Operation = (*noop)(nil) + +func (*noop) String() string { return "noop" } +func (*noop) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { return nil } +} +func (*noop) GetReverse() Operation { return &noop{} } + +type RenameTable struct { + Schema string + From string + To string +} + +var _ Operation = (*RenameTable)(nil) + +func (op RenameTable) String() string { + return fmt.Sprintf( + "Rename table %q.%q to %q.%q", + op.Schema, trimSchema(op.From), op.Schema, trimSchema(op.To), + ) +} + +func (op *RenameTable) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + return m.RenameTable(ctx, op.From, op.To) } +} - return changes +func (op *RenameTable) GetReverse() Operation { + return &RenameTable{ + From: op.To, + To: op.From, + } +} + +type CreateTable struct { + Schema string + Name string + Model interface{} } +var _ Operation = (*CreateTable)(nil) + +func (op CreateTable) String() string { + return fmt.Sprintf("CreateTable %T", op.Model) +} + +func (op *CreateTable) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + return m.CreateTable(ctx, op.Model) + } +} + +func (op *CreateTable) GetReverse() Operation { + return &DropTable{ + Schema: op.Schema, + Name: op.Name, + } +} + +type DropTable struct { + Schema string + Name string +} + +var _ Operation = (*DropTable)(nil) + +func (op DropTable) String() string { + return fmt.Sprintf("DropTable %q.%q", op.Schema, trimSchema(op.Name)) +} + +func (op *DropTable) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + return m.DropTable(ctx, op.Schema, op.Name) + } +} + +// GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, +// but DropTable does not have the table's definition to create one. +// +// TODO: we can fetch table definitions for deleted tables +// from the database engine and execute them as a raw query. +func (op *DropTable) GetReverse() Operation { + return &noop{} +} + +// trimSchema drops schema name from the table name. +// This is a workaroud until schema.Table.Schema is fully integrated with other bun packages. +func trimSchema(name string) string { + if strings.Contains(name, ".") { + return strings.Split(name, ".")[1] + } + return name +} + +// sqlschema utils ------------------------------------------------------------ + // tableSet stores unique table definitions. type tableSet struct { underlying map[string]sqlschema.Table diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 7974b0c25..2f44f93c5 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -32,6 +32,8 @@ func NewInspector(db *bun.DB, excludeTables ...string) (Inspector, error) { }, nil } +// SchemaInspector creates the current project state from the passed bun.Models. +// Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. type SchemaInspector struct { tables *schema.Tables } @@ -44,8 +46,6 @@ func NewSchemaInspector(tables *schema.Tables) *SchemaInspector { } } -// Inspect creates the current project state from the passed bun.Models. -// Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { var state State for _, t := range si.tables.All() { @@ -64,6 +64,7 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { state.Tables = append(state.Tables, Table{ Schema: t.Schema, Name: t.Name, + Model: t.ZeroIface, Columns: columns, }) } diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 037c90e23..41b481f77 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -15,8 +15,11 @@ type MigratorDialect interface { type Migrator interface { RenameTable(ctx context.Context, oldName, newName string) error + CreateTable(ctx context.Context, model interface{}) error + DropTable(ctx context.Context, schema, table string) error } +// Migrator is a dialect-agnostic wrapper for sqlschema.Dialect type migrator struct { Migrator } @@ -30,3 +33,28 @@ func NewMigrator(db *bun.DB) (Migrator, error) { Migrator: md.Migrator(db), }, nil } + +// BaseMigrator can be embeded by dialect's Migrator implementations to re-use some of the existing bun queries. +type BaseMigrator struct { + db *bun.DB +} + +func NewBaseMigrator(db *bun.DB) *BaseMigrator { + return &BaseMigrator{db: db} +} + +func (m *BaseMigrator) CreateTable(ctx context.Context, model interface{}) error { + _, err := m.db.NewCreateTable().Model(model).Exec(ctx) + if err != nil { + return err + } + return nil +} + +func (m *BaseMigrator) DropTable(ctx context.Context, schema, name string) error { + _, err := m.db.NewDropTable().TableExpr("?.?", bun.Ident(schema), bun.Ident(name)).Exec(ctx) + if err != nil { + return err + } + return nil +} diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index 8b89368a4..8f7e96b0d 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -7,6 +7,7 @@ type State struct { type Table struct { Schema string Name string + Model interface{} Columns map[string]Column } diff --git a/query_table_drop.go b/query_table_drop.go index e4447a8d2..a92014515 100644 --- a/query_table_drop.go +++ b/query_table_drop.go @@ -151,3 +151,12 @@ func (q *DropTableQuery) afterDropTableHook(ctx context.Context) error { } return nil } + +func (q *DropTableQuery) String() string { + buf, err := q.AppendQuery(q.db.Formatter(), nil) + if err != nil { + panic(err) + } + + return string(buf) +} diff --git a/schema/table.go b/schema/table.go index f770a2f76..e0c46fe07 100644 --- a/schema/table.go +++ b/schema/table.go @@ -86,6 +86,7 @@ func (table *Table) init(dialect Dialect, typ reflect.Type, canAddr bool) { table.setName(tableName) table.Alias = table.ModelName table.SQLAlias = table.quoteIdent(table.ModelName) + table.Schema = dialect.DefaultSchema() table.Fields = make([]*Field, 0, typ.NumField()) table.FieldMap = make(map[string]*Field, typ.NumField()) @@ -382,7 +383,7 @@ func (t *Table) processBaseModelField(f reflect.StructField) { } if s, ok := tag.Option("table"); ok { - schema, _ := t.schemaFromTagName(tag.Name) + schema, _ := t.schemaFromTagName(s) t.Schema = schema t.setName(s) } From a918dc472a33dd24c5fffd4d048bcf49f2e07a42 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Sun, 5 Nov 2023 16:36:35 +0100 Subject: [PATCH 11/55] feat: detect modified relations --- dialect/pgdialect/inspector.go | 86 +++++++-- internal/dbtest/inspect_test.go | 310 +++++++++++++++++++++++------- internal/dbtest/migrate_test.go | 151 +++++++++++++-- internal/dbtest/sqlschema_test.go | 222 +++++++++++++++++++++ migrate/auto.go | 101 ++++++++-- migrate/sqlschema/inspector.go | 20 +- migrate/sqlschema/state.go | 217 +++++++++++++++++++++ 7 files changed, 1000 insertions(+), 107 deletions(-) create mode 100644 internal/dbtest/sqlschema_test.go diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index db457de21..95d2581b2 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -39,6 +39,12 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { return state, err } + var fks []*ForeignKey + if err := in.db.NewRaw(sqlInspectForeignKeys, bun.In(exclude), bun.In(exclude)).Scan(ctx, &fks); err != nil { + return state, err + } + state.FKs = make(map[sqlschema.FK]string, len(fks)) + for _, table := range tables { var columns []*InformationSchemaColumn if err := in.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { @@ -72,12 +78,17 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { Columns: colDefs, }) } + + for _, fk := range fks { + state.FKs[sqlschema.FK{ + From: sqlschema.C(fk.SourceSchema, fk.SourceTable, fk.SourceColumns...), + To: sqlschema.C(fk.TargetSchema, fk.TargetTable, fk.TargetColumns...), + }] = fk.ConstraintName + } return state, nil } type InformationSchemaTable struct { - bun.BaseModel - Schema string `bun:"table_schema,pk"` Name string `bun:"table_name,pk"` @@ -85,8 +96,6 @@ type InformationSchemaTable struct { } type InformationSchemaColumn struct { - bun.BaseModel - Schema string `bun:"table_schema"` Table string `bun:"table_name"` Name string `bun:"column_name"` @@ -104,17 +113,29 @@ type InformationSchemaColumn struct { UniqueGroup []string `bun:"unique_group,array"` } +type ForeignKey struct { + ConstraintName string `bun:"constraint_name"` + SourceSchema string `bun:"schema_name"` + SourceTable string `bun:"table_name"` + SourceColumns []string `bun:"columns,array"` + TargetSchema string `bun:"target_schema"` + TargetTable string `bun:"target_table"` + TargetColumns []string `bun:"target_columns,array"` +} + const ( // sqlInspectTables retrieves all user-defined tables across all schemas. // It excludes relations from Postgres's reserved "pg_" schemas and views from the "information_schema". + // Pass bun.In([]string{...}) to exclude tables from this inspection or bun.In([]string{''}) to include all results. sqlInspectTables = ` -SELECT table_schema, table_name +SELECT "table_schema", "table_name" FROM information_schema.tables WHERE table_type = 'BASE TABLE' - AND table_schema <> 'information_schema' - AND table_schema NOT LIKE 'pg_%' - AND table_name NOT IN (?) - ` + AND "table_schema" <> 'information_schema' + AND "table_schema" NOT LIKE 'pg_%' + AND "table_name" NOT IN (?) +ORDER BY "table_schema", "table_name" +` // sqlInspectColumnsQuery retrieves column definitions for the specified table. // Unlike sqlInspectTables and sqlInspectSchema, it should be passed to bun.NewRaw @@ -180,10 +201,13 @@ FROM ( ) att USING ("table_schema", "table_name", "column_name") ) "c" WHERE "table_schema" = ? AND "table_name" = ? - ` +ORDER BY "table_schema", "table_name", "column_name" +` // sqlInspectSchema retrieves column type definitions for all user-defined tables. // Other relations, such as views and indices, as well as Posgres's internal relations are excluded. + // + // TODO: implement scanning ORM relations for RawQuery too, so that one could scan this query directly to InformationSchemaTable. sqlInspectSchema = ` SELECT "t"."table_schema", @@ -247,5 +271,45 @@ FROM information_schema.tables "t" WHERE table_type = 'BASE TABLE' AND table_schema <> 'information_schema' AND table_schema NOT LIKE 'pg_%' - ` +ORDER BY table_schema, table_name +` + + // sqlInspectForeignKeys get FK definitions for user-defined tables. + // Pass bun.In([]string{...}) to exclude tables from this inspection or bun.In([]string{''}) to include all results. + sqlInspectForeignKeys = ` +WITH + "schemas" AS ( + SELECT oid, nspname + FROM pg_namespace + ), + "tables" AS ( + SELECT oid, relnamespace, relname, relkind + FROM pg_class + ), + "columns" AS ( + SELECT attrelid, attname, attnum + FROM pg_attribute + WHERE attisdropped = false + ) +SELECT DISTINCT + co.conname AS "constraint_name", + ss.nspname AS schema_name, + s.relname AS "table_name", + ARRAY_AGG(sc.attname) AS "columns", + ts.nspname AS target_schema, + "t".relname AS target_table, + ARRAY_AGG(tc.attname) AS target_columns +FROM pg_constraint co + LEFT JOIN "tables" s ON s.oid = co.conrelid + LEFT JOIN "schemas" ss ON ss.oid = s.relnamespace + LEFT JOIN "columns" sc ON sc.attrelid = s.oid AND sc.attnum = ANY(co.conkey) + LEFT JOIN "tables" t ON t.oid = co.confrelid + LEFT JOIN "schemas" ts ON ts.oid = "t".relnamespace + LEFT JOIN "columns" tc ON tc.attrelid = "t".oid AND tc.attnum = ANY(co.confkey) +WHERE co.contype = 'f' + AND co.conrelid IN (SELECT oid FROM pg_class WHERE relkind = 'r') + AND ARRAY_POSITION(co.conkey, sc.attnum) = ARRAY_POSITION(co.confkey, tc.attnum) + AND s.relname NOT IN (?) AND "t".relname NOT IN (?) +GROUP BY "constraint_name", "schema_name", "table_name", target_schema, target_table +` ) diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 53c8905cc..42e200e2c 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -9,89 +9,265 @@ import ( "github.com/uptrace/bun/migrate/sqlschema" ) -func TestDatabaseInspector_Inspect(t *testing.T) { +type Article struct { + ISBN int `bun:",pk,identity"` + Editor string `bun:",notnull,unique:title_author,default:'john doe'"` + Title string `bun:",notnull,unique:title_author"` + Locale string `bun:",type:varchar(5),default:'en-GB'"` + Pages int8 `bun:"page_count,notnull,default:1"` + Count int32 `bun:"book_count,autoincrement"` + PublisherID string `bun:"publisher_id,notnull"` + AuthorID int `bun:"author_id,notnull"` - type Book struct { - bun.BaseModel `bun:"table:books"` + // Publisher that published this article. + Publisher *Publisher `bun:"rel:belongs-to,join:publisher_id=publisher_id"` - ISBN int `bun:",pk,identity"` - Author string `bun:",notnull,unique:title_author,default:'john doe'"` - Title string `bun:",notnull,unique:title_author"` - Locale string `bun:",type:varchar(5),default:'en-GB'"` - Pages int8 `bun:"page_count,notnull,default:1"` - Count int32 `bun:"book_count,autoincrement"` - } + // Author wrote this article. + Author *Journalist `bun:"rel:belongs-to,join:author_id=author_id"` +} + +type Office struct { + bun.BaseModel `bun:"table:admin.offices"` + Name string `bun:"office_name,pk"` + TennantID string `bun:"publisher_id"` + TennantName string `bun:"publisher_name"` + + Tennant *Publisher `bun:"rel:has-one,join:publisher_id=publisher_id,join:publisher_name=publisher_name"` +} + +type Publisher struct { + ID string `bun:"publisher_id,pk,default:gen_random_uuid(),unique:office_fk"` + Name string `bun:"publisher_name,unique,notnull,unique:office_fk"` + + // Writers write articles for this publisher. + Writers []Journalist `bun:"m2m:publisher_to_journalists,join:Publisher=Author"` +} + +// PublisherToJournalist describes which journalist work with each publisher. +// One publisher can also work with many journalists. It's an N:N (or m2m) relation. +type PublisherToJournalist struct { + bun.BaseModel `bun:"table:publisher_to_journalists"` + PublisherID string `bun:"publisher_id,pk"` + AuthorID int `bun:"author_id,pk"` + + Publisher *Publisher `bun:"rel:belongs-to,join:publisher_id=publisher_id"` + Author *Journalist `bun:"rel:belongs-to,join:author_id=author_id"` +} + +type Journalist struct { + bun.BaseModel `bun:"table:authors"` + ID int `bun:"author_id,pk,identity"` + FirstName string `bun:",notnull"` + LastName string + + // Articles that this journalist has written. + Articles []*Article `bun:"rel:has-many,join:author_id=author_id"` +} + +func TestDatabaseInspector_Inspect(t *testing.T) { testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { + db.RegisterModel((*PublisherToJournalist)(nil)) + dbInspector, err := sqlschema.NewInspector(db) if err != nil { t.Skip(err) } ctx := context.Background() - mustResetModel(t, ctx, db, (*Book)(nil)) - - want := sqlschema.State{ - Tables: []sqlschema.Table{ - { - Schema: "public", - Name: "books", - Columns: map[string]sqlschema.Column{ - "isbn": { - SQLType: "bigint", - IsPK: true, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: true, - DefaultValue: "", - }, - "author": { - SQLType: "varchar", - IsPK: false, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "john doe", - }, - "title": { - SQLType: "varchar", - IsPK: false, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "", - }, - "locale": { - SQLType: "varchar(5)", - IsPK: false, - IsNullable: true, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "en-GB", - }, - "page_count": { - SQLType: "smallint", - IsPK: false, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "1", - }, - "book_count": { - SQLType: "integer", - IsPK: false, - IsNullable: false, - IsAutoIncrement: true, - IsIdentity: false, - DefaultValue: "", - }, + mustCreateSchema(t, ctx, db, "admin") + mustCreateTableWithFKs(t, ctx, db, + // Order of creation matters: + (*Journalist)(nil), // does not reference other tables + (*Publisher)(nil), // does not reference other tables + (*Office)(nil), // references Publisher + (*PublisherToJournalist)(nil), // references Journalist and Publisher + (*Article)(nil), // references Journalist and Publisher + ) + defaultSchema := db.Dialect().DefaultSchema() + + // Tables come sorted alphabetically by schema and table. + wantTables := []sqlschema.Table{ + { + Schema: "admin", + Name: "offices", + Columns: map[string]sqlschema.Column{ + "office_name": { + SQLType: "varchar", + IsPK: true, + }, + "publisher_id": { + SQLType: "varchar", + IsNullable: true, + }, + "publisher_name": { + SQLType: "varchar", + IsNullable: true, + }, + }, + }, + { + Schema: defaultSchema, + Name: "articles", + Columns: map[string]sqlschema.Column{ + "isbn": { + SQLType: "bigint", + IsPK: true, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: true, + DefaultValue: "", + }, + "editor": { + SQLType: "varchar", + IsPK: false, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "john doe", + }, + "title": { + SQLType: "varchar", + IsPK: false, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "", + }, + "locale": { + SQLType: "varchar(5)", + IsPK: false, + IsNullable: true, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "en-GB", + }, + "page_count": { + SQLType: "smallint", + IsPK: false, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "1", + }, + "book_count": { + SQLType: "integer", + IsPK: false, + IsNullable: false, + IsAutoIncrement: true, + IsIdentity: false, + DefaultValue: "", + }, + "publisher_id": { + SQLType: "varchar", + }, + "author_id": { + SQLType: "bigint", }, }, }, + { + Schema: defaultSchema, + Name: "authors", + Columns: map[string]sqlschema.Column{ + "author_id": { + SQLType: "bigint", + IsPK: true, + IsIdentity: true, + }, + "first_name": { + SQLType: "varchar", + }, + "last_name": { + SQLType: "varchar", + IsNullable: true, + }, + }, + }, + { + Schema: defaultSchema, + Name: "publisher_to_journalists", + Columns: map[string]sqlschema.Column{ + "publisher_id": { + SQLType: "varchar", + IsPK: true, + }, + "author_id": { + SQLType: "bigint", + IsPK: true, + }, + }, + }, + { + Schema: defaultSchema, + Name: "publishers", + Columns: map[string]sqlschema.Column{ + "publisher_id": { + SQLType: "varchar", + IsPK: true, + DefaultValue: "gen_random_uuid()", + }, + "publisher_name": { + SQLType: "varchar", + }, + }, + }, + } + + wantFKs := []sqlschema.FK{ + { // + From: sqlschema.C(defaultSchema, "articles", "publisher_id"), + To: sqlschema.C(defaultSchema, "publishers", "publisher_id"), + }, + { + From: sqlschema.C(defaultSchema, "articles", "author_id"), + To: sqlschema.C(defaultSchema, "authors", "author_id"), + }, + { // + From: sqlschema.C("admin", "offices", "publisher_name", "publisher_id"), + To: sqlschema.C(defaultSchema, "publishers", "publisher_name", "publisher_id"), + }, + { // + From: sqlschema.C(defaultSchema, "publisher_to_journalists", "publisher_id"), + To: sqlschema.C(defaultSchema, "publishers", "publisher_id"), + }, + { // + From: sqlschema.C(defaultSchema, "publisher_to_journalists", "author_id"), + To: sqlschema.C(defaultSchema, "authors", "author_id"), + }, } got, err := dbInspector.Inspect(ctx) require.NoError(t, err) - require.Equal(t, want, got) + + // State.FKs store their database names, which differ from dialect to dialect. + // Because of that we compare FKs and Tables separately. + require.Equal(t, wantTables, got.Tables) + + var fks []sqlschema.FK + for fk := range got.FKs { + fks = append(fks, fk) + } + require.ElementsMatch(t, wantFKs, fks) + }) +} + +func mustCreateTableWithFKs(tb testing.TB, ctx context.Context, db *bun.DB, models ...interface{}) { + tb.Helper() + for _, model := range models { + create := db.NewCreateTable().Model(model).WithForeignKeys() + _, err := create.Exec(ctx) + require.NoError(tb, err, "must create table %q:", create.GetTableName()) + mustDropTableOnCleanup(tb, ctx, db, model) + } +} + +func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema string) { + tb.Helper() + _, err := db.NewRaw("CREATE SCHEMA IF NOT EXISTS ?", bun.Ident(schema)).Exec(ctx) + require.NoError(tb, err, "create schema %q:", schema) + + tb.Cleanup(func() { + db.NewRaw("DROP SCHEMA IF EXISTS ?", bun.Ident(schema)).Exec(ctx) }) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 91bb59265..d1da1ea34 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -260,22 +260,41 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { require.Equal(t, "createme", tables[0].Name) } -type Journal struct { - ISBN string `bun:"isbn,pk"` - Title string `bun:"title,notnull"` - Pages int `bun:"page_count,notnull,default:0"` -} +func TestDetector_Diff(t *testing.T) { + type Journal struct { + ISBN string `bun:"isbn,pk"` + Title string `bun:"title,notnull"` + Pages int `bun:"page_count,notnull,default:0"` + } -type Reader struct { - Username string `bun:",pk,default:gen_random_uuid()"` -} + type Reader struct { + Username string `bun:",pk,default:gen_random_uuid()"` + } -type ExternalUsers struct { - bun.BaseModel `bun:"external.users"` - Name string `bun:",pk"` -} + type ExternalUsers struct { + bun.BaseModel `bun:"external.users"` + Name string `bun:",pk"` + } + + // ------------------------------------------------------------------------ + type ThingNoOwner struct { + bun.BaseModel `bun:"things"` + ID int64 `bun:"thing_id,pk"` + OwnerID int64 `bun:",notnull"` + } + + type Owner struct { + ID int64 `bun:",pk"` + } + + type Thing struct { + bun.BaseModel `bun:"things"` + ID int64 `bun:"thing_id,pk"` + OwnerID int64 `bun:",notnull"` + + Owner *Owner `bun:"rel:belongs-to,join:owner_id=id"` + } -func TestDetector_Diff(t *testing.T) { testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { for _, tt := range []struct { name string @@ -283,7 +302,7 @@ func TestDetector_Diff(t *testing.T) { want []migrate.Operation }{ { - name: "1 table renamed, 1 added, 2 dropped", + name: "1 table renamed, 1 created, 2 dropped", states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { // Database state ------------- type Subscription struct { @@ -361,13 +380,111 @@ func TestDetector_Diff(t *testing.T) { }, }, }, + { + name: "detect new FKs on existing columns", + states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { + // database state + type LonelyUser struct { + bun.BaseModel `bun:"table:users"` + Username string `bun:",pk"` + DreamPetKind string `bun:"pet_kind,notnull"` + DreamPetName string `bun:"pet_name,notnull"` + ImaginaryFriend string `bun:"friend"` + } + + type Pet struct { + Nickname string `bun:",pk"` + Kind string `bun:",pk"` + } + + // model state + type HappyUser struct { + bun.BaseModel `bun:"table:users"` + Username string `bun:",pk"` + PetKind string `bun:"pet_kind,notnull"` + PetName string `bun:"pet_name,notnull"` + Friend string `bun:"friend"` + + Pet *Pet `bun:"rel:has-one,join:pet_kind=kind,join:pet_name=nickname"` + BestFriend *HappyUser `bun:"rel:has-one,join:friend=username"` + } + + return getState(t, ctx, d, + (*LonelyUser)(nil), + (*Pet)(nil), + ), getState(t, ctx, d, + (*HappyUser)(nil), + (*Pet)(nil), + ) + }, + want: []migrate.Operation{ + &migrate.AddForeignKey{ + SourceTable: "users", + SourceColumns: []string{"pet_kind", "pet_name"}, + TargetTable: "pets", + TargetColums: []string{"kind", "nickname"}, + }, + &migrate.AddForeignKey{ + SourceTable: "users", + SourceColumns: []string{"friend"}, + TargetTable: "users", + TargetColums: []string{"username"}, + }, + }, + }, + { + name: "create FKs for new tables", // TODO: update test case to detect an added column too + states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { + return getState(t, ctx, d, + (*ThingNoOwner)(nil), + ), getState(t, ctx, d, + (*Owner)(nil), + (*Thing)(nil), + ) + }, + want: []migrate.Operation{ + &migrate.CreateTable{ + Model: &Owner{}, + }, + &migrate.AddForeignKey{ + SourceTable: "things", + SourceColumns: []string{"owner_id"}, + TargetTable: "owners", + TargetColums: []string{"id"}, + }, + }, + }, + { + name: "drop FKs for dropped tables", // TODO: update test case to detect dropped columns too + states: func(t testing.TB, ctx context.Context, d schema.Dialect) (sqlschema.State, sqlschema.State) { + stateDb := getState(t, ctx, d, (*Owner)(nil), (*Thing)(nil)) + stateModel := getState(t, ctx, d, (*ThingNoOwner)(nil)) + + // Normally a database state will have the names of the constraints filled in, but we need to mimic that for the test. + stateDb.FKs[sqlschema.FK{ + From: sqlschema.C(d.DefaultSchema(), "things", "owner_id"), + To: sqlschema.C(d.DefaultSchema(), "owners", "id"), + }] = "test_fkey" + return stateDb, stateModel + }, + want: []migrate.Operation{ + &migrate.DropTable{ + Schema: dialect.DefaultSchema(), + Name: "owners", + }, + &migrate.DropForeignKey{ + Schema: dialect.DefaultSchema(), + Table: "things", + ConstraintName: "test_fkey", + }, + }, + }, } { - t.Run(funcName(tt.states), func(t *testing.T) { + t.Run(tt.name, func(t *testing.T) { ctx := context.Background() - var d migrate.Detector stateDb, stateModel := tt.states(t, ctx, dialect) - got := d.Diff(stateDb, stateModel).Operations() + got := migrate.Diff(stateDb, stateModel).Operations() checkEqualChangeset(t, got, tt.want) }) } diff --git a/internal/dbtest/sqlschema_test.go b/internal/dbtest/sqlschema_test.go new file mode 100644 index 000000000..29f709e14 --- /dev/null +++ b/internal/dbtest/sqlschema_test.go @@ -0,0 +1,222 @@ +package dbtest_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/uptrace/bun/migrate/sqlschema" +) + +func TestRefMap_Update(t *testing.T) { + for _, tt := range []struct { + name string + fks []sqlschema.FK + update func(rm sqlschema.RefMap) int + wantUpdated int + wantFKs []sqlschema.FK + }{ + { + name: "update table reference in all FKs that reference its columns", + fks: []sqlschema.FK{ + { + From: sqlschema.C("x", "y", "z"), + To: sqlschema.C("a", "b", "c"), + }, + { + From: sqlschema.C("m", "n", "o"), + To: sqlschema.C("a", "b", "d"), + }, + }, + update: func(rm sqlschema.RefMap) int { + return rm.UpdateT(sqlschema.T("a", "b"), sqlschema.T("a", "new_b")) + }, + wantUpdated: 2, + wantFKs: []sqlschema.FK{ // checking 1 of the 2 updated ones should be enough + { + From: sqlschema.C("x", "y", "z"), + To: sqlschema.C("a", "new_b", "c"), + }, + }, + }, + { + name: "update table reference in FK which points to the same table", + fks: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "child"), + To: sqlschema.C("a", "b", "parent"), + }, + }, + update: func(rm sqlschema.RefMap) int { + return rm.UpdateT(sqlschema.T("a", "b"), sqlschema.T("a", "new_b")) + }, + wantUpdated: 1, + wantFKs: []sqlschema.FK{ + { + From: sqlschema.C("a", "new_b", "child"), + To: sqlschema.C("a", "new_b", "parent"), + }, + }, + }, + { + name: "update column reference in all FKs which depend on it", + fks: []sqlschema.FK{ + { + From: sqlschema.C("x", "y", "z"), + To: sqlschema.C("a", "b", "c"), + }, + { + From: sqlschema.C("a", "b", "c"), + To: sqlschema.C("m", "n", "o"), + }, + }, + update: func(rm sqlschema.RefMap) int { + return rm.UpdateC(sqlschema.C("a", "b", "c"), "c_new") + }, + wantUpdated: 2, + wantFKs: []sqlschema.FK{ + { + From: sqlschema.C("x", "y", "z"), + To: sqlschema.C("a", "b", "c_new"), + }, + }, + }, + { + name: "foreign keys defined on multiple columns", + fks: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c1", "c2"), + To: sqlschema.C("q", "r", "s1", "s2"), + }, + { + From: sqlschema.C("m", "n", "o", "p"), + To: sqlschema.C("a", "b", "c2"), + }, + }, + update: func(rm sqlschema.RefMap) int { + return rm.UpdateC(sqlschema.C("a", "b", "c2"), "x2") + }, + wantUpdated: 2, + wantFKs: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c1", "x2"), + To: sqlschema.C("q", "r", "s1", "s2"), + }, + }, + }, + } { + t.Run(tt.name, func(t *testing.T) { + rm := sqlschema.NewRefMap(tt.fks...) + + n := tt.update(rm) + + require.Equal(t, tt.wantUpdated, n) + require.Equal(t, tt.wantUpdated, len(rm.Updated())) + checkHasFK(t, rm, tt.wantFKs...) + }) + } +} + +func checkHasFK(tb testing.TB, rm sqlschema.RefMap, fks ...sqlschema.FK) { +outer: + for _, want := range fks { + for _, gotptr := range rm { + if got := *gotptr; got == want { + continue outer + } + } + tb.Fatalf("did not find FK%+v", want) + } +} + +func TestRefMap_Delete(t *testing.T) { + for _, tt := range []struct { + name string + fks []sqlschema.FK + del func(rm sqlschema.RefMap) int + wantDeleted []sqlschema.FK + }{ + { + name: "delete FKs that depend on the table", + fks: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c"), + To: sqlschema.C("x", "y", "z"), + }, + { + From: sqlschema.C("m", "n", "o"), + To: sqlschema.C("a", "b", "d"), + }, + { + From: sqlschema.C("q", "r", "s"), + To: sqlschema.C("w", "w", "w"), + }, + }, + del: func(rm sqlschema.RefMap) int { + return rm.DeleteT(sqlschema.T("a", "b")) + }, + wantDeleted: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c"), + To: sqlschema.C("x", "y", "z"), + }, + { + From: sqlschema.C("m", "n", "o"), + To: sqlschema.C("a", "b", "d"), + }, + }, + }, + { + name: "delete FKs that depend on the column", + fks: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c"), + To: sqlschema.C("x", "y", "z"), + }, + { + From: sqlschema.C("q", "r", "s"), + To: sqlschema.C("w", "w", "w"), + }, + }, + del: func(rm sqlschema.RefMap) int { + return rm.DeleteC(sqlschema.C("a", "b", "c")) + }, + wantDeleted: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c"), + To: sqlschema.C("x", "y", "z"), + }, + }, + }, + { + name: "foreign keys defined on multiple columns", + fks: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c1", "c2"), + To: sqlschema.C("q", "r", "s1", "s2"), + }, + { + From: sqlschema.C("m", "n", "o", "p"), + To: sqlschema.C("a", "b", "c2"), + }, + }, + del: func(rm sqlschema.RefMap) int { + return rm.DeleteC(sqlschema.C("a", "b", "c1")) + }, + wantDeleted: []sqlschema.FK{ + { + From: sqlschema.C("a", "b", "c1", "c2"), + To: sqlschema.C("q", "r", "s1", "s2"), + }, + }, + }, + } { + t.Run(tt.name, func(t *testing.T) { + rm := sqlschema.NewRefMap(tt.fks...) + + n := tt.del(rm) + + require.Equal(t, len(tt.wantDeleted), n) + require.ElementsMatch(t, rm.Deleted(), tt.wantDeleted) + }) + } +} diff --git a/migrate/auto.go b/migrate/auto.go index c58e243d9..677ce6787 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -107,7 +107,6 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err } func (am *AutoMigrator) diff(ctx context.Context) (Changeset, error) { - var detector Detector var changes Changeset var err error @@ -120,7 +119,7 @@ func (am *AutoMigrator) diff(ctx context.Context) (Changeset, error) { if err != nil { return changes, err } - return detector.Diff(got, want), nil + return Diff(got, want), nil } // Migrate writes required changes to a new migration file and runs the migration. @@ -167,12 +166,23 @@ func (am *AutoMigrator) Run(ctx context.Context) error { // INTERNAL ------------------------------------------------------------------- -type Detector struct{} +func Diff(got, want sqlschema.State) Changeset { + detector := newDetector() + return detector.DetectChanges(got, want) +} -func (d *Detector) Diff(got, want sqlschema.State) Changeset { - var changes Changeset +type detector struct { + changes Changeset +} + +func newDetector() *detector { + return &detector{} +} - oldModels := newTableSet(got.Tables...) +func (d *detector) DetectChanges(got, want sqlschema.State) Changeset { + + // TableSets for discovering CREATE/RENAME/DROP TABLE + oldModels := newTableSet(got.Tables...) // newModels := newTableSet(want.Tables...) addedModels := newModels.Sub(oldModels) @@ -182,7 +192,7 @@ AddedLoop: removedModels := oldModels.Sub(newModels) for _, removed := range removedModels.Values() { if d.canRename(added, removed) { - changes.Add(&RenameTable{ + d.changes.Add(&RenameTable{ Schema: removed.Schema, From: removed.Name, To: added.Name, @@ -196,7 +206,7 @@ AddedLoop: } } // If a new table did not appear because of the rename operation, then it must've been created. - changes.Add(&CreateTable{ + d.changes.Add(&CreateTable{ Schema: added.Schema, Name: added.Name, Model: added.Model, @@ -205,17 +215,39 @@ AddedLoop: // Tables that aren't present anymore and weren't renamed were deleted. for _, t := range oldModels.Sub(newModels).Values() { - changes.Add(&DropTable{ + d.changes.Add(&DropTable{ Schema: t.Schema, Name: t.Name, }) } - return changes + // Compare FKs + for fk /*, fkName */ := range want.FKs { + if _, ok := got.FKs[fk]; !ok { + d.changes.Add(&AddForeignKey{ + SourceTable: fk.From.Table, + SourceColumns: fk.From.Column.Split(), + TargetTable: fk.To.Table, + TargetColums: fk.To.Column.Split(), + }) + } + } + + for fk, fkName := range got.FKs { + if _, ok := want.FKs[fk]; !ok { + d.changes.Add(&DropForeignKey{ + Schema: fk.From.Schema, + Table: fk.From.Table, + ConstraintName: fkName, + }) + } + } + + return d.changes } // canRename checks if t1 can be renamed to t2. -func (d Detector) canRename(t1, t2 sqlschema.Table) bool { +func (d detector) canRename(t1, t2 sqlschema.Table) bool { return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2) } @@ -231,6 +263,9 @@ func (c Changeset) String() string { for _, op := range c.operations { ops = append(ops, op.String()) } + if len(ops) == 0 { + return "" + } return strings.Join(ops, "\n") } @@ -384,6 +419,50 @@ func trimSchema(name string) string { return name } +type AddForeignKey struct { + SourceTable string + SourceColumns []string + TargetTable string + TargetColums []string +} + +var _ Operation = (*AddForeignKey)(nil) + +func (op AddForeignKey) String() string { + return fmt.Sprintf("AddForeignKey %s(%s) references %s(%s)", + op.SourceTable, strings.Join(op.SourceColumns, ","), + op.TargetTable, strings.Join(op.TargetColums, ","), + ) +} + +func (op *AddForeignKey) Func(m sqlschema.Migrator) MigrationFunc { + return nil +} + +func (op *AddForeignKey) GetReverse() Operation { + return nil +} + +type DropForeignKey struct { + Schema string + Table string + ConstraintName string +} + +var _ Operation = (*DropForeignKey)(nil) + +func (op *DropForeignKey) String() string { + return fmt.Sprintf("DropFK %q on table %q.%q", op.ConstraintName, op.Schema, op.Table) +} + +func (op *DropForeignKey) Func(m sqlschema.Migrator) MigrationFunc { + return nil +} + +func (op *DropForeignKey) GetReverse() Operation { + return nil +} + // sqlschema utils ------------------------------------------------------------ // tableSet stores unique table definitions. diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 2f44f93c5..2060fef0c 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -47,7 +47,9 @@ func NewSchemaInspector(tables *schema.Tables) *SchemaInspector { } func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { - var state State + state := State{ + FKs: make(map[FK]string), + } for _, t := range si.tables.All() { columns := make(map[string]Column) for _, f := range t.Fields { @@ -67,6 +69,22 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { Model: t.ZeroIface, Columns: columns, }) + + for _, rel := range t.Relations { + var fromCols, toCols []string + for _, f := range rel.BaseFields { + fromCols = append(fromCols, f.Name) + } + for _, f := range rel.JoinFields { + toCols = append(toCols, f.Name) + } + + target := rel.JoinTable + state.FKs[FK{ + From: C(t.Schema, t.Name, fromCols...), + To: C(target.Schema, target.Name, toCols...), + }] = "" + } } return state, nil } diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index 8f7e96b0d..c57ea36d0 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -1,7 +1,10 @@ package sqlschema +import "strings" + type State struct { Tables []Table + FKs map[FK]string } type Table struct { @@ -61,3 +64,217 @@ func (s *signature) Equals(other signature) bool { } return true } + +// tFQN is a fully-qualified table name. +type tFQN struct { + Schema string + Table string +} + +func T(schema, table string) tFQN { return tFQN{Schema: schema, Table: table} } + +// cFQN is a fully-qualified column name. +type cFQN struct { + tFQN + Column composite +} + +func C(schema, table string, columns ...string) cFQN { + return cFQN{tFQN: T(schema, table), Column: newComposite(columns...)} +} + +// composite is a hashable representation of []string used to define FKs that depend on multiple columns. +// Although having duplicated column references in a FK is illegal, composite neither validate nor enforce this constraint on the caller. +type composite string + +// newComposite creates a composite column from a slice of column names. +func newComposite(columns ...string) composite { + return composite(strings.Join(columns, ",")) +} + +// Split returns a slice of column names that make up the composite. +func (c composite) Split() []string { + return strings.Split(string(c), ",") +} + +// Contains checks that a composite column contains every part of another composite. +func (c composite) Contains(other composite) bool { + var count int + checkColumns := other.Split() + wantCount := len(checkColumns) + + for _, check := range checkColumns { + for _, column := range c.Split() { + if check == column { + count++ + } + if count == wantCount { + return true + } + } + } + return count == wantCount +} + +// Replace renames a column if it is part of the composite. +// If a composite consists of multiple columns, only one column will be renamed. +func (c composite) Replace(oldColumn, newColumn string) composite { + columns := c.Split() + for i, column := range columns { + if column == oldColumn { + columns[i] = newColumn + return newComposite(columns...) + } + } + return c +} + +// T returns the FQN of the column's parent table. +func (c cFQN) T() tFQN { + return tFQN{Schema: c.Schema, Table: c.Table} +} + +// FK defines a foreign key constraint. +// FK depends on a column/table if their FQN is included in its definition. +// +// Example: +// +// FK{ +// From: C{"A", "B", "C"}, +// To: C{"X", "Y", "Z"}, +// } +// - depends on C{"A", "B", "C"} +// - depends on C{"X", "Y", "Z"} +// - depends on T{"A", "B"} and T{"X", "Y"} +// +// FIXME: current design does not allow for one column referencing multiple columns. Or does it? Think again. +// Consider: +// +// CONSTRAINT fk_customers FOREIGN KEY (customer_id) REFERENCES customers(id) +// CONSTRAINT fk_orders FOREIGN KEY (customer_id) REFERENCES orders(customer_id) +type FK struct { + From cFQN // From is the referencing column. + To cFQN // To is the referenced column. +} + +// DependsT checks if either part of the FK's definition mentions T +// and returns the columns that belong to T. Notice that *C allows modifying the column's FQN. +func (fk *FK) DependsT(t tFQN) (ok bool, cols []*cFQN) { + if c := &fk.From; c.T() == t { + ok = true + cols = append(cols, c) + } + if c := &fk.To; c.T() == t { + ok = true + cols = append(cols, c) + } + if !ok { + return false, nil + } + return +} + +// DependsC checks if the FK definition mentions C and returns a modifiable FQN of the matching column. +func (fk *FK) DependsC(c cFQN) (bool, *cFQN) { + switch { + case fk.From.Column.Contains(c.Column): + return true, &fk.From + case fk.To.Column.Contains(c.Column): + return true, &fk.To + } + return false, nil +} + +// RefMap helps detecting modified FK relations. +// It starts with an initial state and provides methods to update and delete +// foreign key relations based on the column or table they depend on. +type RefMap map[FK]*FK + +// deleted is a special value that RefMap uses to denote a deleted FK constraint. +var deleted FK + +// NewRefMap records the FK's initial state to a RefMap. +func NewRefMap(fks ...FK) RefMap { + ref := make(RefMap) + for _, fk := range fks { + copyfk := fk + ref[fk] = ©fk + } + return ref +} + +// UpdateT updates the table FQN in all FKs that depend on it, e.g. if a table is renamed or moved to a different schema. +// Returns the number of updated entries. +func (r RefMap) UpdateT(oldT, newT tFQN) (n int) { + for _, fk := range r { + ok, cols := fk.DependsT(oldT) + if !ok { + continue + } + for _, c := range cols { + c.Schema = newT.Schema + c.Table = newT.Table + } + n++ + } + return +} + +// UpdateC updates the column FQN in all FKs that depend on it, e.g. if a column is renamed, +// and so, only the column-name part of the FQN can be updated. Returns the number of updated entries. +func (r RefMap) UpdateC(oldC cFQN, newColumn string) (n int) { + for _, fk := range r { + if ok, col := fk.DependsC(oldC); ok { + oldColumns := oldC.Column.Split() + // UpdateC can only update 1 column at a time. + col.Column = col.Column.Replace(oldColumns[0], newColumn) + n++ + } + } + return +} + +// DeleteT marks all FKs that depend on the table as deleted. +// Returns the number of deleted entries. +func (r RefMap) DeleteT(t tFQN) (n int) { + for old, fk := range r { + if ok, _ := fk.DependsT(t); ok { + r[old] = &deleted + n++ + } + } + return +} + +// DeleteC marks all FKs that depend on the column as deleted. +// Returns the number of deleted entries. +func (r RefMap) DeleteC(c cFQN) (n int) { + for old, fk := range r { + if ok, _ := fk.DependsC(c); ok { + r[old] = &deleted + n++ + } + } + return +} + +// Updated returns FKs that were updated, both their old and new defitions. +func (r RefMap) Updated() map[FK]FK { + fks := make(map[FK]FK) + for old, fk := range r { + if old != *fk { + fks[old] = *fk + } + } + return fks +} + +// Deleted gets all FKs that were marked as deleted. +func (r RefMap) Deleted() (fks []FK) { + for old, fk := range r { + if fk == &deleted { + fks = append(fks, old) + } + } + return +} From 4c1dfdbe99c73d0c0f2d7b1f8b11adf30c6a41f7 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Thu, 9 Nov 2023 23:23:51 +0100 Subject: [PATCH 12/55] feat: migrate FKs --- dialect/pgdialect/alter_table.go | 27 +++++++++ internal/dbtest/db_test.go | 2 +- internal/dbtest/migrate_test.go | 94 +++++++++++++++++++++++++++++++- migrate/auto.go | 32 +++++++---- migrate/sqlschema/migrator.go | 2 + migrate/sqlschema/state.go | 17 +++--- 6 files changed, 153 insertions(+), 21 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index af103fe86..192d9138f 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -18,6 +18,8 @@ type Migrator struct { db *bun.DB } +var _ sqlschema.Migrator = (*Migrator)(nil) + func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { query := fmt.Sprintf("ALTER TABLE %s RENAME TO %s", oldName, newName) _, err := m.db.ExecContext(ctx, query) @@ -26,3 +28,28 @@ func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) err } return nil } + +func (m *Migrator) AddContraint(ctx context.Context, fk sqlschema.FK, name string) error { + q := m.db.NewRaw( + "ALTER TABLE ?.? ADD CONSTRAINT ? FOREIGN KEY (?) REFERENCES ?.? (?)", + bun.Safe(fk.From.Schema), bun.Safe(fk.From.Table), bun.Safe(name), + bun.Safe(fk.From.Column.String()), + bun.Safe(fk.To.Schema), bun.Safe(fk.To.Table), + bun.Safe(fk.To.Column.String()), + ) + if _, err := q.Exec(ctx); err != nil { + return err + } + return nil +} + +func (m *Migrator) DropContraint(ctx context.Context, schema, table, name string) error { + q := m.db.NewRaw( + "ALTER TABLE ?.? DROP CONSTRAINT ?", + bun.Safe(schema), bun.Safe(table), bun.Safe(name), + ) + if _, err := q.Exec(ctx); err != nil { + return err + } + return nil +} diff --git a/internal/dbtest/db_test.go b/internal/dbtest/db_test.go index c3ad08565..ddc9d70a5 100644 --- a/internal/dbtest/db_test.go +++ b/internal/dbtest/db_test.go @@ -1784,7 +1784,7 @@ func mustResetModel(tb testing.TB, ctx context.Context, db *bun.DB, models ...in func mustDropTableOnCleanup(tb testing.TB, ctx context.Context, db *bun.DB, models ...interface{}) { tb.Cleanup(func() { for _, model := range models { - drop := db.NewDropTable().IfExists().Model(model) + drop := db.NewDropTable().IfExists().Cascade().Model(model) _, err := drop.Exec(ctx) require.NoError(tb, err, "must drop table: %q", drop.GetTableName()) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index d1da1ea34..b721ab644 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -170,6 +170,7 @@ func TestAutoMigrator_Run(t *testing.T) { }{ {testRenameTable}, {testCreateDropTable}, + {testAlterForeignKeys}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -260,6 +261,87 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { require.Equal(t, "createme", tables[0].Name) } +func testAlterForeignKeys(t *testing.T, db *bun.DB) { + // Initial state -- each thing has one owner + type OwnerExclusive struct { + bun.BaseModel `bun:"owners"` + ID int64 `bun:",pk"` + } + + type ThingExclusive struct { + bun.BaseModel `bun:"things"` + ID int64 `bun:",pk"` + OwnerID int64 `bun:",notnull"` + + Owner *OwnerExclusive `bun:"rel:belongs-to,join:owner_id=id"` + } + + // Change -- each thing has multiple owners + + type ThingCommon struct { + bun.BaseModel `bun:"things"` + ID int64 `bun:",pk"` + } + + type OwnerCommon struct { + bun.BaseModel `bun:"owners"` + ID int64 `bun:",pk"` + Things []*ThingCommon `bun:"m2m:things_to_owners,join:Owner=Thing"` + } + + type ThingsToOwner struct { + OwnerID int64 `bun:",notnull"` + Owner *OwnerCommon `bun:"rel:belongs-to,join:owner_id=id"` + ThingID int64 `bun:",notnull"` + Thing *ThingCommon `bun:"rel:belongs-to,join:thing_id=id"` + } + + // Arrange + ctx := context.Background() + dbInspector, err := sqlschema.NewInspector(db) + if err != nil { + t.Skip(err) + } + db.RegisterModel((*ThingsToOwner)(nil)) + + mustCreateTableWithFKs(t, ctx, db, + (*OwnerExclusive)(nil), + (*ThingExclusive)(nil), + ) + mustDropTableOnCleanup(t, ctx, db, (*ThingsToOwner)(nil)) + + m, err := migrate.NewAutoMigrator(db, + migrate.WithTableNameAuto(migrationsTable), + migrate.WithLocksTableNameAuto(migrationLocksTable), + migrate.WithModel((*ThingCommon)(nil)), + migrate.WithModel((*OwnerCommon)(nil)), + migrate.WithModel((*ThingsToOwner)(nil)), + ) + require.NoError(t, err) + + // Act + err = m.Run(ctx) + require.NoError(t, err) + + // Assert + state, err := dbInspector.Inspect(ctx) + require.NoError(t, err) + + defaultSchema := db.Dialect().DefaultSchema() + require.Contains(t, state.FKs, sqlschema.FK{ + From: sqlschema.C(defaultSchema, "things_to_owners", "owner_id"), + To: sqlschema.C(defaultSchema, "owners", "id"), + }) + require.Contains(t, state.FKs, sqlschema.FK{ + From: sqlschema.C(defaultSchema, "things_to_owners", "thing_id"), + To: sqlschema.C(defaultSchema, "things", "id"), + }) + require.NotContains(t, state.FKs, sqlschema.FK{ + From: sqlschema.C(defaultSchema, "things", "owner_id"), + To: sqlschema.C(defaultSchema, "owners", "id"), + }) +} + func TestDetector_Diff(t *testing.T) { type Journal struct { ISBN string `bun:"isbn,pk"` @@ -419,16 +501,20 @@ func TestDetector_Diff(t *testing.T) { }, want: []migrate.Operation{ &migrate.AddForeignKey{ + SourceSchema: dialect.DefaultSchema(), SourceTable: "users", SourceColumns: []string{"pet_kind", "pet_name"}, + TargetSchema: dialect.DefaultSchema(), TargetTable: "pets", - TargetColums: []string{"kind", "nickname"}, + TargetColumns: []string{"kind", "nickname"}, }, &migrate.AddForeignKey{ + SourceSchema: dialect.DefaultSchema(), SourceTable: "users", SourceColumns: []string{"friend"}, + TargetSchema: dialect.DefaultSchema(), TargetTable: "users", - TargetColums: []string{"username"}, + TargetColumns: []string{"username"}, }, }, }, @@ -447,10 +533,12 @@ func TestDetector_Diff(t *testing.T) { Model: &Owner{}, }, &migrate.AddForeignKey{ + SourceSchema: dialect.DefaultSchema(), SourceTable: "things", SourceColumns: []string{"owner_id"}, + TargetSchema: dialect.DefaultSchema(), TargetTable: "owners", - TargetColums: []string{"id"}, + TargetColumns: []string{"id"}, }, }, }, diff --git a/migrate/auto.go b/migrate/auto.go index 677ce6787..37b295382 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "strings" + "time" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" @@ -172,7 +173,7 @@ func Diff(got, want sqlschema.State) Changeset { } type detector struct { - changes Changeset + changes Changeset } func newDetector() *detector { @@ -225,10 +226,12 @@ AddedLoop: for fk /*, fkName */ := range want.FKs { if _, ok := got.FKs[fk]; !ok { d.changes.Add(&AddForeignKey{ + SourceSchema: fk.From.Schema, SourceTable: fk.From.Table, SourceColumns: fk.From.Column.Split(), + TargetSchema: fk.To.Schema, TargetTable: fk.To.Table, - TargetColums: fk.To.Column.Split(), + TargetColumns: fk.To.Column.Split(), }) } } @@ -420,27 +423,34 @@ func trimSchema(name string) string { } type AddForeignKey struct { + SourceSchema string SourceTable string SourceColumns []string + TargetSchema string TargetTable string - TargetColums []string + TargetColumns []string } var _ Operation = (*AddForeignKey)(nil) func (op AddForeignKey) String() string { - return fmt.Sprintf("AddForeignKey %s(%s) references %s(%s)", - op.SourceTable, strings.Join(op.SourceColumns, ","), - op.TargetTable, strings.Join(op.TargetColums, ","), + return fmt.Sprintf("AddForeignKey %s.%s(%s) references %s.%s(%s)", + op.SourceSchema, op.SourceTable, strings.Join(op.SourceColumns, ","), + op.SourceTable, op.TargetTable, strings.Join(op.TargetColumns, ","), ) } func (op *AddForeignKey) Func(m sqlschema.Migrator) MigrationFunc { - return nil + return func(ctx context.Context, db *bun.DB) error { + return m.AddContraint(ctx, sqlschema.FK{ + From: sqlschema.C(op.SourceSchema, op.SourceTable, op.SourceColumns...), + To: sqlschema.C(op.TargetSchema, op.TargetTable, op.TargetColumns...), + }, "dummy_name_"+fmt.Sprint(time.Now().UnixNano())) + } } func (op *AddForeignKey) GetReverse() Operation { - return nil + return &noop{} // TODO: unless the WithFKNameFunc is specified, we cannot know what the constraint is called } type DropForeignKey struct { @@ -456,11 +466,13 @@ func (op *DropForeignKey) String() string { } func (op *DropForeignKey) Func(m sqlschema.Migrator) MigrationFunc { - return nil + return func(ctx context.Context, db *bun.DB) error { + return m.DropContraint(ctx, op.Schema, op.Table, op.ConstraintName) + } } func (op *DropForeignKey) GetReverse() Operation { - return nil + return &noop{} // TODO: store "OldFK" to recreate it } // sqlschema utils ------------------------------------------------------------ diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 41b481f77..564e42a96 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -17,6 +17,8 @@ type Migrator interface { RenameTable(ctx context.Context, oldName, newName string) error CreateTable(ctx context.Context, model interface{}) error DropTable(ctx context.Context, schema, table string) error + AddContraint(ctx context.Context, fk FK, name string) error + DropContraint(ctx context.Context, schema, table, name string) error } // Migrator is a dialect-agnostic wrapper for sqlschema.Dialect diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index c57ea36d0..f48190c0f 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -92,9 +92,13 @@ func newComposite(columns ...string) composite { return composite(strings.Join(columns, ",")) } +func (c composite) String() string { + return string(c) +} + // Split returns a slice of column names that make up the composite. func (c composite) Split() []string { - return strings.Split(string(c), ",") + return strings.Split(c.String(), ",") } // Contains checks that a composite column contains every part of another composite. @@ -146,12 +150,6 @@ func (c cFQN) T() tFQN { // - depends on C{"A", "B", "C"} // - depends on C{"X", "Y", "Z"} // - depends on T{"A", "B"} and T{"X", "Y"} -// -// FIXME: current design does not allow for one column referencing multiple columns. Or does it? Think again. -// Consider: -// -// CONSTRAINT fk_customers FOREIGN KEY (customer_id) REFERENCES customers(id) -// CONSTRAINT fk_orders FOREIGN KEY (customer_id) REFERENCES orders(customer_id) type FK struct { From cFQN // From is the referencing column. To cFQN // To is the referenced column. @@ -188,6 +186,11 @@ func (fk *FK) DependsC(c cFQN) (bool, *cFQN) { // RefMap helps detecting modified FK relations. // It starts with an initial state and provides methods to update and delete // foreign key relations based on the column or table they depend on. +// +// Note: this is only important/necessary if we want to rename FKs instead of re-creating them. +// Most of the time it wouldn't make a difference, but there may be cases in which re-creating FKs could be costly +// and renaming them would be preferred. For that we could provided an options like WithRenameFKs(true) and +// WithRenameFKFunc(func(sqlschema.FK) string) to allow customizing the FK naming convention. type RefMap map[FK]*FK // deleted is a special value that RefMap uses to denote a deleted FK constraint. From a822fc5f8ae547b7cd41e1ca35609d519d78598b Mon Sep 17 00:00:00 2001 From: bevzzz Date: Sat, 11 Nov 2023 20:33:29 +0100 Subject: [PATCH 13/55] feat: improve FK handling Additionally: - allow custom FK names (limited applicability rn) - implement GetReverse for AddFK and DropFK - detect renamed foreign keys - EqualSignature handles empty models (no columns) --- dialect/pgdialect/alter_table.go | 32 ++-- internal/dbtest/migrate_test.go | 197 +++++++++++++++++++---- migrate/auto.go | 267 +++++++++++++++++++++++-------- migrate/sqlschema/migrator.go | 1 + migrate/sqlschema/state.go | 43 +++-- 5 files changed, 419 insertions(+), 121 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 192d9138f..f0b1c948b 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -2,7 +2,6 @@ package pgdialect import ( "context" - "fmt" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" @@ -20,15 +19,18 @@ type Migrator struct { var _ sqlschema.Migrator = (*Migrator)(nil) -func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { - query := fmt.Sprintf("ALTER TABLE %s RENAME TO %s", oldName, newName) - _, err := m.db.ExecContext(ctx, query) - if err != nil { +func (m *Migrator) exec(ctx context.Context, q *bun.RawQuery) error { + if _, err := q.Exec(ctx); err != nil { return err } return nil } +func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { + q := m.db.NewRaw("ALTER TABLE ? RENAME TO ?", bun.Ident(oldName), bun.Ident(newName)) + return m.exec(ctx, q) +} + func (m *Migrator) AddContraint(ctx context.Context, fk sqlschema.FK, name string) error { q := m.db.NewRaw( "ALTER TABLE ?.? ADD CONSTRAINT ? FOREIGN KEY (?) REFERENCES ?.? (?)", @@ -37,19 +39,21 @@ func (m *Migrator) AddContraint(ctx context.Context, fk sqlschema.FK, name strin bun.Safe(fk.To.Schema), bun.Safe(fk.To.Table), bun.Safe(fk.To.Column.String()), ) - if _, err := q.Exec(ctx); err != nil { - return err - } - return nil + return m.exec(ctx, q) } func (m *Migrator) DropContraint(ctx context.Context, schema, table, name string) error { q := m.db.NewRaw( "ALTER TABLE ?.? DROP CONSTRAINT ?", - bun.Safe(schema), bun.Safe(table), bun.Safe(name), + bun.Ident(schema), bun.Ident(table), bun.Ident(name), ) - if _, err := q.Exec(ctx); err != nil { - return err - } - return nil + return m.exec(ctx, q) +} + +func (m *Migrator) RenameConstraint(ctx context.Context, schema, table, oldName, newName string) error { + q := m.db.NewRaw( + "ALTER TABLE ?.? RENAME CONSTRAINT ? TO ?", + bun.Ident(schema), bun.Ident(table), bun.Ident(oldName), bun.Ident(newName), + ) + return m.exec(ctx, q) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index b721ab644..79c53e1cc 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -4,6 +4,7 @@ import ( "context" "errors" "sort" + "strings" "testing" "time" @@ -171,6 +172,8 @@ func TestAutoMigrator_Run(t *testing.T) { {testRenameTable}, {testCreateDropTable}, {testAlterForeignKeys}, + {testCustomFKNameFunc}, + {testForceRenameFK}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -328,6 +331,8 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { require.NoError(t, err) defaultSchema := db.Dialect().DefaultSchema() + + // Crated 2 new constraints require.Contains(t, state.FKs, sqlschema.FK{ From: sqlschema.C(defaultSchema, "things_to_owners", "owner_id"), To: sqlschema.C(defaultSchema, "owners", "id"), @@ -336,13 +341,146 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { From: sqlschema.C(defaultSchema, "things_to_owners", "thing_id"), To: sqlschema.C(defaultSchema, "things", "id"), }) + + // Dropped the initial one require.NotContains(t, state.FKs, sqlschema.FK{ From: sqlschema.C(defaultSchema, "things", "owner_id"), To: sqlschema.C(defaultSchema, "owners", "id"), }) } -func TestDetector_Diff(t *testing.T) { +func testForceRenameFK(t *testing.T, db *bun.DB) { + // Database state + type Owner struct { + ID int64 `bun:",pk"` + } + + type OwnedThing struct { + bun.BaseModel `bun:"table:things"` + ID int64 `bun:",pk"` + OwnerID int64 `bun:"owner_id,notnull"` + + Owner *Owner `bun:"rel:belongs-to,join:owner_id=id"` + } + + // Model state + type Person struct { + ID int64 `bun:",pk"` + } + + type PersonalThing struct { + bun.BaseModel `bun:"table:things"` + ID int64 `bun:",pk"` + PersonID int64 `bun:"owner_id,notnull"` + + Owner *Person `bun:"rel:belongs-to,join:owner_id=id"` + } + + ctx := context.Background() + dbInspector, err := sqlschema.NewInspector(db) + if err != nil { + t.Skip(err) + } + + mustCreateTableWithFKs(t, ctx, db, + (*Owner)(nil), + (*OwnedThing)(nil), + ) + mustDropTableOnCleanup(t, ctx, db, (*Person)(nil)) + + m, err := migrate.NewAutoMigrator(db, + migrate.WithTableNameAuto(migrationsTable), + migrate.WithLocksTableNameAuto(migrationLocksTable), + migrate.WithModel( + (*Person)(nil), + (*PersonalThing)(nil), + ), + migrate.WithFKNameFunc(func(fk sqlschema.FK) string { + return strings.Join([]string{ + fk.From.Table, fk.To.Table, "fkey", + }, "_") + }), + migrate.WithRenameFK(true), + ) + require.NoError(t, err) + + // Act + err = m.Run(ctx) + require.NoError(t, err) + + // Assert + state, err := dbInspector.Inspect(ctx) + require.NoError(t, err) + + schema := db.Dialect().DefaultSchema() + wantName, ok := state.FKs[sqlschema.FK{ + From: sqlschema.C(schema, "things", "owner_id"), + To: sqlschema.C(schema, "people", "id"), + }] + require.True(t, ok, "expect state.FKs to contain things_people_fkey") + require.Equal(t, wantName, "things_people_fkey") +} + +func testCustomFKNameFunc(t *testing.T, db *bun.DB) { + // Database state + type Column struct { + OID int64 `bun:",pk"` + RelID int64 `bun:"attrelid,notnull"` + } + type Table struct { + OID int64 `bun:",pk"` + } + + // Model state + type ColumnM struct { + bun.BaseModel `bun:"table:columns"` + OID int64 `bun:",pk"` + RelID int64 `bun:"attrelid,notnull"` + + Table *Table `bun:"rel:belongs-to,join:attrelid=oid"` + } + type TableM struct { + bun.BaseModel `bun:"table:tables"` + OID int64 `bun:",pk"` + } + + ctx := context.Background() + dbInspector, err := sqlschema.NewInspector(db) + if err != nil { + t.Skip(err) + } + + mustCreateTableWithFKs(t, ctx, db, + (*Table)(nil), + (*Column)(nil), + ) + + m, err := migrate.NewAutoMigrator(db, + migrate.WithTableNameAuto(migrationsTable), + migrate.WithLocksTableNameAuto(migrationLocksTable), + migrate.WithFKNameFunc(func(sqlschema.FK) string { return "test_fkey" }), + migrate.WithModel((*TableM)(nil)), + migrate.WithModel((*ColumnM)(nil)), + ) + require.NoError(t, err) + + // Act + err = m.Run(ctx) + require.NoError(t, err) + + // Assert + state, err := dbInspector.Inspect(ctx) + require.NoError(t, err) + + fkName := state.FKs[sqlschema.FK{ + From: sqlschema.C(db.Dialect().DefaultSchema(), "columns", "attrelid"), + To: sqlschema.C(db.Dialect().DefaultSchema(), "tables", "oid"), + }] + require.Equal(t, fkName, "test_fkey") +} + +// TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package +func TestDiff(t *testing.T) { type Journal struct { ISBN string `bun:"isbn,pk"` Title string `bun:"title,notnull"` @@ -378,6 +516,8 @@ func TestDetector_Diff(t *testing.T) { } testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { + defaultSchema := dialect.DefaultSchema() + for _, tt := range []struct { name string states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) @@ -418,7 +558,7 @@ func TestDetector_Diff(t *testing.T) { }, want: []migrate.Operation{ &migrate.RenameTable{ - Schema: dialect.DefaultSchema(), + Schema: defaultSchema, From: "journals", To: "journals_renamed", }, @@ -430,7 +570,7 @@ func TestDetector_Diff(t *testing.T) { Name: "billing.subscriptions", // TODO: fix once schema is used correctly }, &migrate.DropTable{ - Schema: dialect.DefaultSchema(), + Schema: defaultSchema, Name: "reviews", }, }, @@ -454,7 +594,7 @@ func TestDetector_Diff(t *testing.T) { }, want: []migrate.Operation{ &migrate.DropTable{ - Schema: dialect.DefaultSchema(), + Schema: defaultSchema, Name: "external_users", }, &migrate.CreateTable{ @@ -500,21 +640,19 @@ func TestDetector_Diff(t *testing.T) { ) }, want: []migrate.Operation{ - &migrate.AddForeignKey{ - SourceSchema: dialect.DefaultSchema(), - SourceTable: "users", - SourceColumns: []string{"pet_kind", "pet_name"}, - TargetSchema: dialect.DefaultSchema(), - TargetTable: "pets", - TargetColumns: []string{"kind", "nickname"}, + &migrate.AddFK{ + FK: sqlschema.FK{ + From: sqlschema.C(defaultSchema, "users", "pet_kind", "pet_name"), + To: sqlschema.C(defaultSchema, "pets", "kind", "nickname"), + }, + ConstraintName: "users_pet_kind_pet_name_fkey", }, - &migrate.AddForeignKey{ - SourceSchema: dialect.DefaultSchema(), - SourceTable: "users", - SourceColumns: []string{"friend"}, - TargetSchema: dialect.DefaultSchema(), - TargetTable: "users", - TargetColumns: []string{"username"}, + &migrate.AddFK{ + FK: sqlschema.FK{ + From: sqlschema.C(defaultSchema, "users", "friend"), + To: sqlschema.C(defaultSchema, "users", "username"), + }, + ConstraintName: "users_friend_fkey", }, }, }, @@ -532,13 +670,12 @@ func TestDetector_Diff(t *testing.T) { &migrate.CreateTable{ Model: &Owner{}, }, - &migrate.AddForeignKey{ - SourceSchema: dialect.DefaultSchema(), - SourceTable: "things", - SourceColumns: []string{"owner_id"}, - TargetSchema: dialect.DefaultSchema(), - TargetTable: "owners", - TargetColumns: []string{"id"}, + &migrate.AddFK{ + FK: sqlschema.FK{ + From: sqlschema.C(defaultSchema, "things", "owner_id"), + To: sqlschema.C(defaultSchema, "owners", "id"), + }, + ConstraintName: "things_owner_id_fkey", }, }, }, @@ -557,12 +694,14 @@ func TestDetector_Diff(t *testing.T) { }, want: []migrate.Operation{ &migrate.DropTable{ - Schema: dialect.DefaultSchema(), + Schema: defaultSchema, Name: "owners", }, - &migrate.DropForeignKey{ - Schema: dialect.DefaultSchema(), - Table: "things", + &migrate.DropFK{ + FK: sqlschema.FK{ + From: sqlschema.C(defaultSchema, "things", "owner_id"), + To: sqlschema.C(defaultSchema, "owners", "id"), + }, ConstraintName: "test_fkey", }, }, diff --git a/migrate/auto.go b/migrate/auto.go index 37b295382..87be34f42 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "strings" - "time" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" @@ -27,6 +26,29 @@ func WithExcludeTable(tables ...string) AutoMigratorOption { } } +// WithFKNameFunc sets the function to build a new name for created or renamed FK constraints. +// +// Notice: this option is not supported in SQLite dialect and will have no effect. +// SQLite does not implement ADD CONSTRAINT, so adding or renaming a constraint will require re-creating the table. +// We need to support custom FKNameFunc in CreateTable to control how FKs are named. +// +// More generally, this option will have no effect whenever FKs are included in the CREATE TABLE definition, +// which is the default strategy. Perhaps it would make sense to allow disabling this and switching to separate (CreateTable + AddFK) +func WithFKNameFunc(f func(sqlschema.FK) string) AutoMigratorOption { + return func(m *AutoMigrator) { + m.diffOpts = append(m.diffOpts, FKNameFunc(f)) + } +} + +// WithRenameFK prevents AutoMigrator from recreating foreign keys when their dependent relations are renamed, +// and forces it to run a RENAME CONSTRAINT query instead. Creating an index on a large table can take a very long time, +// and in those cases simply renaming the FK makes a lot more sense. +func WithRenameFK(enabled bool) AutoMigratorOption { + return func(m *AutoMigrator) { + m.diffOpts = append(m.diffOpts, DetectRenamedFKs(enabled)) + } +} + // WithTableNameAuto overrides default migrations table name. func WithTableNameAuto(table string) AutoMigratorOption { return func(m *AutoMigrator) { @@ -63,8 +85,8 @@ type AutoMigrator struct { // dbMigrator executes ALTER TABLE queries. dbMigrator sqlschema.Migrator - table string - locksTable string + table string // Migrations table (excluded from database inspection) + locksTable string // Migration locks table (excluded from database inspection) // includeModels define the migration scope. includeModels []interface{} @@ -72,6 +94,9 @@ type AutoMigrator struct { // excludeTables are excluded from database inspection. excludeTables []string + // diffOpts are passed to Diff. + diffOpts []DiffOption + // migratorOpts are passed to Migrator constructor. migratorOpts []MigratorOption } @@ -120,7 +145,7 @@ func (am *AutoMigrator) diff(ctx context.Context) (Changeset, error) { if err != nil { return changes, err } - return Diff(got, want), nil + return Diff(got, want, am.diffOpts...), nil } // Migrate writes required changes to a new migration file and runs the migration. @@ -166,33 +191,85 @@ func (am *AutoMigrator) Run(ctx context.Context) error { } // INTERNAL ------------------------------------------------------------------- +// TODO: move to migrate/internal + +type DiffOption func(*detectorConfig) -func Diff(got, want sqlschema.State) Changeset { - detector := newDetector() - return detector.DetectChanges(got, want) +func FKNameFunc(f func(sqlschema.FK) string) DiffOption { + return func(cfg *detectorConfig) { + cfg.FKNameFunc = f + } +} + +func DetectRenamedFKs(enabled bool) DiffOption { + return func(cfg *detectorConfig) { + cfg.DetectRenamedFKs = enabled + } +} + +func Diff(got, want sqlschema.State, opts ...DiffOption) Changeset { + detector := newDetector(got, want, opts...) + return detector.DetectChanges() +} + +// detectorConfig controls how differences in the model states are resolved. +type detectorConfig struct { + FKNameFunc func(sqlschema.FK) string + DetectRenamedFKs bool } type detector struct { + // current state represents the existing database schema. + current sqlschema.State + + // target state represents the database schema defined in bun models. + target sqlschema.State + changes Changeset -} + refMap sqlschema.RefMap + + // fkNameFunc builds the name for created/renamed FK contraints. + fkNameFunc func(sqlschema.FK) string -func newDetector() *detector { - return &detector{} + // detectRenemedFKS controls how FKs are treated when their references (table/column) are renamed. + detectRenamedFKs bool } -func (d *detector) DetectChanges(got, want sqlschema.State) Changeset { +func newDetector(got, want sqlschema.State, opts ...DiffOption) *detector { + cfg := &detectorConfig{ + FKNameFunc: defaultFKName, + DetectRenamedFKs: false, + } + for _, opt := range opts { + opt(cfg) + } + + var existingFKs []sqlschema.FK + for fk := range got.FKs { + existingFKs = append(existingFKs, fk) + } + + return &detector{ + current: got, + target: want, + refMap: sqlschema.NewRefMap(existingFKs...), + fkNameFunc: cfg.FKNameFunc, + detectRenamedFKs: cfg.DetectRenamedFKs, + } +} - // TableSets for discovering CREATE/RENAME/DROP TABLE - oldModels := newTableSet(got.Tables...) // - newModels := newTableSet(want.Tables...) +func (d *detector) DetectChanges() Changeset { - addedModels := newModels.Sub(oldModels) + // Discover CREATE/RENAME/DROP TABLE + targetTables := newTableSet(d.target.Tables...) + currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) + addedTables := targetTables.Sub(currentTables) AddedLoop: - for _, added := range addedModels.Values() { - removedModels := oldModels.Sub(newModels) - for _, removed := range removedModels.Values() { - if d.canRename(added, removed) { + for _, added := range addedTables.Values() { + removedTables := currentTables.Sub(targetTables) + for _, removed := range removedTables.Values() { + if d.canRename(removed, added) { d.changes.Add(&RenameTable{ Schema: removed.Schema, From: removed.Name, @@ -201,8 +278,13 @@ AddedLoop: // TODO: check for altered columns. + // Update referenced table in all related FKs + if d.detectRenamedFKs { + d.refMap.UpdateT(removed.T(), added.T()) + } + // Do not check this model further, we know it was renamed. - oldModels.Remove(removed.Name) + currentTables.Remove(removed.Name) continue AddedLoop } } @@ -214,33 +296,52 @@ AddedLoop: }) } - // Tables that aren't present anymore and weren't renamed were deleted. - for _, t := range oldModels.Sub(newModels).Values() { + // Tables that aren't present anymore and weren't renamed or left untouched were deleted. + for _, t := range currentTables.Sub(targetTables).Values() { d.changes.Add(&DropTable{ Schema: t.Schema, Name: t.Name, }) } - // Compare FKs - for fk /*, fkName */ := range want.FKs { - if _, ok := got.FKs[fk]; !ok { - d.changes.Add(&AddForeignKey{ - SourceSchema: fk.From.Schema, - SourceTable: fk.From.Table, - SourceColumns: fk.From.Column.Split(), - TargetSchema: fk.To.Schema, - TargetTable: fk.To.Table, - TargetColumns: fk.To.Column.Split(), + // Compare and update FKs + + currentFKs := make(map[sqlschema.FK]string) + for k, v := range d.current.FKs { + currentFKs[k] = v + } + + if d.detectRenamedFKs { + // Add RenameFK migrations for updated FKs. + for old, renamed := range d.refMap.Updated() { + newName := d.fkNameFunc(renamed) + d.changes.Add(&RenameFK{ + FK: renamed, // TODO: make sure this is applied after the table/columns are renamed + From: d.current.FKs[old], + To: d.fkNameFunc(renamed), }) + + // Here we can add this fk to "current.FKs" to prevent it from firing in the next 2 for-loops. + currentFKs[renamed] = newName + delete(currentFKs, old) } } - for fk, fkName := range got.FKs { - if _, ok := want.FKs[fk]; !ok { - d.changes.Add(&DropForeignKey{ - Schema: fk.From.Schema, - Table: fk.From.Table, + // Add AddFK migrations for newly added FKs. + for fk := range d.target.FKs { + if _, ok := currentFKs[fk]; !ok { + d.changes.Add(&AddFK{ + FK: fk, + ConstraintName: d.fkNameFunc(fk), + }) + } + } + + // Add DropFK migrations for removed FKs. + for fk, fkName := range currentFKs { + if _, ok := d.target.FKs[fk]; !ok { + d.changes.Add(&DropFK{ + FK: fk, ConstraintName: fkName, }) } @@ -422,57 +523,91 @@ func trimSchema(name string) string { return name } -type AddForeignKey struct { - SourceSchema string - SourceTable string - SourceColumns []string - TargetSchema string - TargetTable string - TargetColumns []string +// defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. +func defaultFKName(fk sqlschema.FK) string { + columnnames := strings.Join(fk.From.Column.Split(), "_") + return fmt.Sprintf("%s_%s_fkey", fk.From.Table, columnnames) +} + +type AddFK struct { + FK sqlschema.FK + ConstraintName string } -var _ Operation = (*AddForeignKey)(nil) +var _ Operation = (*AddFK)(nil) -func (op AddForeignKey) String() string { - return fmt.Sprintf("AddForeignKey %s.%s(%s) references %s.%s(%s)", - op.SourceSchema, op.SourceTable, strings.Join(op.SourceColumns, ","), - op.SourceTable, op.TargetTable, strings.Join(op.TargetColumns, ","), +func (op AddFK) String() string { + source, target := op.FK.From, op.FK.To + return fmt.Sprintf("AddForeignKey %q %s.%s(%s) references %s.%s(%s)", op.ConstraintName, + source.Schema, source.Table, strings.Join(source.Column.Split(), ","), + target.Schema, target.Table, strings.Join(target.Column.Split(), ","), ) } -func (op *AddForeignKey) Func(m sqlschema.Migrator) MigrationFunc { +func (op *AddFK) Func(m sqlschema.Migrator) MigrationFunc { return func(ctx context.Context, db *bun.DB) error { - return m.AddContraint(ctx, sqlschema.FK{ - From: sqlschema.C(op.SourceSchema, op.SourceTable, op.SourceColumns...), - To: sqlschema.C(op.TargetSchema, op.TargetTable, op.TargetColumns...), - }, "dummy_name_"+fmt.Sprint(time.Now().UnixNano())) + return m.AddContraint(ctx, op.FK, op.ConstraintName) } } -func (op *AddForeignKey) GetReverse() Operation { - return &noop{} // TODO: unless the WithFKNameFunc is specified, we cannot know what the constraint is called +func (op *AddFK) GetReverse() Operation { + return &DropFK{ + FK: op.FK, + ConstraintName: op.ConstraintName, + } } -type DropForeignKey struct { - Schema string - Table string +type DropFK struct { + FK sqlschema.FK ConstraintName string } -var _ Operation = (*DropForeignKey)(nil) +var _ Operation = (*DropFK)(nil) + +func (op *DropFK) String() string { + source := op.FK.From.T() + return fmt.Sprintf("DropFK %q on table %q.%q", op.ConstraintName, source.Schema, source.Table) +} + +func (op *DropFK) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + source := op.FK.From.T() + return m.DropContraint(ctx, source.Schema, source.Table, op.ConstraintName) + } +} + +func (op *DropFK) GetReverse() Operation { + return &AddFK{ + FK: op.FK, + ConstraintName: op.ConstraintName, + } +} -func (op *DropForeignKey) String() string { - return fmt.Sprintf("DropFK %q on table %q.%q", op.ConstraintName, op.Schema, op.Table) +type RenameFK struct { + FK sqlschema.FK + From string + To string } -func (op *DropForeignKey) Func(m sqlschema.Migrator) MigrationFunc { +var _ Operation = (*RenameFK)(nil) + +func (op *RenameFK) String() string { + return "RenameFK" +} + +func (op *RenameFK) Func(m sqlschema.Migrator) MigrationFunc { return func(ctx context.Context, db *bun.DB) error { - return m.DropContraint(ctx, op.Schema, op.Table, op.ConstraintName) + table := op.FK.From + return m.RenameConstraint(ctx, table.Schema, table.Table, op.From, op.To) } } -func (op *DropForeignKey) GetReverse() Operation { - return &noop{} // TODO: store "OldFK" to recreate it +func (op *RenameFK) GetReverse() Operation { + return &RenameFK{ + FK: op.FK, + From: op.From, + To: op.To, + } } // sqlschema utils ------------------------------------------------------------ diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 564e42a96..d8b555a35 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -19,6 +19,7 @@ type Migrator interface { DropTable(ctx context.Context, schema, table string) error AddContraint(ctx context.Context, fk FK, name string) error DropContraint(ctx context.Context, schema, table, name string) error + RenameConstraint(ctx context.Context, schema, table, oldName, newName string) error } // Migrator is a dialect-agnostic wrapper for sqlschema.Dialect diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index f48190c0f..553634d90 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -14,6 +14,10 @@ type Table struct { Columns map[string]Column } +func (t *Table) T() tFQN { + return T(t.Schema, t.Name) +} + // Column stores attributes of a database column. type Column struct { SQLType string @@ -57,6 +61,9 @@ func (s *signature) scan(t Table) { // Equals returns true if 2 signatures share an identical set of columns. func (s *signature) Equals(other signature) bool { + if len(s.underlying) != len(other.underlying) { + return false + } for k, count := range s.underlying { if countOther, ok := other.underlying[k]; !ok || countOther != count { return false @@ -83,6 +90,11 @@ func C(schema, table string, columns ...string) cFQN { return cFQN{tFQN: T(schema, table), Column: newComposite(columns...)} } +// T returns the FQN of the column's parent table. +func (c cFQN) T() tFQN { + return c.tFQN +} + // composite is a hashable representation of []string used to define FKs that depend on multiple columns. // Although having duplicated column references in a FK is illegal, composite neither validate nor enforce this constraint on the caller. type composite string @@ -133,23 +145,14 @@ func (c composite) Replace(oldColumn, newColumn string) composite { return c } -// T returns the FQN of the column's parent table. -func (c cFQN) T() tFQN { - return tFQN{Schema: c.Schema, Table: c.Table} -} - // FK defines a foreign key constraint. -// FK depends on a column/table if their FQN is included in its definition. // // Example: // -// FK{ -// From: C{"A", "B", "C"}, -// To: C{"X", "Y", "Z"}, +// fk := FK{ +// From: C("a", "b", "c_1", "c_2"), // supports multicolumn FKs +// To: C("w", "x", "y_1", "y_2") // } -// - depends on C{"A", "B", "C"} -// - depends on C{"X", "Y", "Z"} -// - depends on T{"A", "B"} and T{"X", "Y"} type FK struct { From cFQN // From is the referencing column. To cFQN // To is the referenced column. @@ -157,6 +160,14 @@ type FK struct { // DependsT checks if either part of the FK's definition mentions T // and returns the columns that belong to T. Notice that *C allows modifying the column's FQN. +// +// Example: +// +// FK{ +// From: C("a", "b", "c"), +// To: C("x", "y", "z"), +// } +// depends on T("a", "b") and T("x", "y") func (fk *FK) DependsT(t tFQN) (ok bool, cols []*cFQN) { if c := &fk.From; c.T() == t { ok = true @@ -173,6 +184,14 @@ func (fk *FK) DependsT(t tFQN) (ok bool, cols []*cFQN) { } // DependsC checks if the FK definition mentions C and returns a modifiable FQN of the matching column. +// +// Example: +// +// FK{ +// From: C("a", "b", "c_1", "c_2"), +// To: C("w", "x", "y_1", "y_2"), +// } +// depends on C("a", "b", "c_1"), C("a", "b", "c_2"), C("w", "x", "y_1"), and C("w", "x", "y_2") func (fk *FK) DependsC(c cFQN) (bool, *cFQN) { switch { case fk.From.Column.Contains(c.Column): From 886d0a5b18aba272f1c86af2a2cf68ce4c8879f2 Mon Sep 17 00:00:00 2001 From: bevzzz Date: Fri, 17 Nov 2023 16:42:00 +0100 Subject: [PATCH 14/55] feat: detect renamed columns --- dialect/pgdialect/alter_table.go | 8 ++ internal/dbtest/migrate_test.go | 239 ++++++++++++++++++++++--------- migrate/auto.go | 88 +++++++++++- migrate/sqlschema/migrator.go | 1 + 4 files changed, 259 insertions(+), 77 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index f0b1c948b..71b090e46 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -57,3 +57,11 @@ func (m *Migrator) RenameConstraint(ctx context.Context, schema, table, oldName, ) return m.exec(ctx, q) } + +func (m *Migrator) RenameColumn(ctx context.Context, schema, table, oldName, newName string) error { + q := m.db.NewRaw( + "ALTER TABLE ?.? RENAME COLUMN ? TO ?", + bun.Ident(schema), bun.Ident(table), bun.Ident(oldName), bun.Ident(newName), + ) + return m.exec(ctx, q) +} diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 79c53e1cc..0a2d60e15 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -164,6 +164,37 @@ func testMigrateUpError(t *testing.T, db *bun.DB) { require.Equal(t, []string{"down2", "down1"}, history) } +// newAutoMigrator creates an AutoMigrator configured to use test migratins/locks tables. +// If the dialect doesn't support schema inspections or migrations, the test will fail with the corresponding error. +func newAutoMigrator(tb testing.TB, db *bun.DB, opts ...migrate.AutoMigratorOption) *migrate.AutoMigrator { + tb.Helper() + + opts = append(opts, + migrate.WithTableNameAuto(migrationsTable), + migrate.WithLocksTableNameAuto(migrationLocksTable), + ) + + m, err := migrate.NewAutoMigrator(db, opts...) + require.NoError(tb, err) + return m +} + +// inspectDbOrSkip returns a function to inspect the current state of the database. +// It calls tb.Skip() if the current dialect doesn't support database inpection and +// fails the test if the inspector cannot successfully retrieve database state. +func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.State { + tb.Helper() + inspector, err := sqlschema.NewInspector(db) + if err != nil { + tb.Skip(err) + } + return func(ctx context.Context) sqlschema.State { + state, err := inspector.Inspect(ctx) + require.NoError(tb, err) + return state + } +} + func TestAutoMigrator_Run(t *testing.T) { tests := []struct { @@ -174,6 +205,8 @@ func TestAutoMigrator_Run(t *testing.T) { {testAlterForeignKeys}, {testCustomFKNameFunc}, {testForceRenameFK}, + {testRenamedColumns}, + {testRenameColumnRenamesFK}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -198,28 +231,19 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Arrange ctx := context.Background() - dbInspector, err := sqlschema.NewInspector(db) - if err != nil { - t.Skip(err) - } + inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*initial)(nil)) mustDropTableOnCleanup(t, ctx, db, (*changed)(nil)) - - m, err := migrate.NewAutoMigrator(db, - migrate.WithTableNameAuto(migrationsTable), - migrate.WithLocksTableNameAuto(migrationLocksTable), - migrate.WithModel((*changed)(nil))) - require.NoError(t, err) + m := newAutoMigrator(t, db, migrate.WithModel((*changed)(nil))) // Act - err = m.Run(ctx) + err := m.Run(ctx) require.NoError(t, err) // Assert - state, err := dbInspector.Inspect(ctx) - require.NoError(t, err) - + state := inspect(ctx) tables := state.Tables + require.Len(t, tables, 1) require.Equal(t, "changed", tables[0].Name) } @@ -238,28 +262,19 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { // Arrange ctx := context.Background() - dbInspector, err := sqlschema.NewInspector(db) - if err != nil { - t.Skip(err) - } + inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*DropMe)(nil)) mustDropTableOnCleanup(t, ctx, db, (*CreateMe)(nil)) - - m, err := migrate.NewAutoMigrator(db, - migrate.WithTableNameAuto(migrationsTable), - migrate.WithLocksTableNameAuto(migrationLocksTable), - migrate.WithModel((*CreateMe)(nil))) - require.NoError(t, err) + m := newAutoMigrator(t, db, migrate.WithModel((*CreateMe)(nil))) // Act - err = m.Run(ctx) + err := m.Run(ctx) require.NoError(t, err) // Assert - state, err := dbInspector.Inspect(ctx) - require.NoError(t, err) - + state := inspect(ctx) tables := state.Tables + require.Len(t, tables, 1) require.Equal(t, "createme", tables[0].Name) } @@ -301,10 +316,7 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { // Arrange ctx := context.Background() - dbInspector, err := sqlschema.NewInspector(db) - if err != nil { - t.Skip(err) - } + inspect := inspectDbOrSkip(t, db) db.RegisterModel((*ThingsToOwner)(nil)) mustCreateTableWithFKs(t, ctx, db, @@ -313,23 +325,18 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { ) mustDropTableOnCleanup(t, ctx, db, (*ThingsToOwner)(nil)) - m, err := migrate.NewAutoMigrator(db, - migrate.WithTableNameAuto(migrationsTable), - migrate.WithLocksTableNameAuto(migrationLocksTable), - migrate.WithModel((*ThingCommon)(nil)), - migrate.WithModel((*OwnerCommon)(nil)), - migrate.WithModel((*ThingsToOwner)(nil)), - ) - require.NoError(t, err) + m := newAutoMigrator(t, db, migrate.WithModel( + (*ThingCommon)(nil), + (*OwnerCommon)(nil), + (*ThingsToOwner)(nil), + )) // Act - err = m.Run(ctx) + err := m.Run(ctx) require.NoError(t, err) // Assert - state, err := dbInspector.Inspect(ctx) - require.NoError(t, err) - + state := inspect(ctx) defaultSchema := db.Dialect().DefaultSchema() // Crated 2 new constraints @@ -377,10 +384,7 @@ func testForceRenameFK(t *testing.T, db *bun.DB) { } ctx := context.Background() - dbInspector, err := sqlschema.NewInspector(db) - if err != nil { - t.Skip(err) - } + inspect := inspectDbOrSkip(t, db) mustCreateTableWithFKs(t, ctx, db, (*Owner)(nil), @@ -388,31 +392,27 @@ func testForceRenameFK(t *testing.T, db *bun.DB) { ) mustDropTableOnCleanup(t, ctx, db, (*Person)(nil)) - m, err := migrate.NewAutoMigrator(db, - migrate.WithTableNameAuto(migrationsTable), - migrate.WithLocksTableNameAuto(migrationLocksTable), + m := newAutoMigrator(t, db, migrate.WithModel( (*Person)(nil), (*PersonalThing)(nil), ), + migrate.WithRenameFK(true), migrate.WithFKNameFunc(func(fk sqlschema.FK) string { return strings.Join([]string{ fk.From.Table, fk.To.Table, "fkey", }, "_") }), - migrate.WithRenameFK(true), ) - require.NoError(t, err) // Act - err = m.Run(ctx) + err := m.Run(ctx) require.NoError(t, err) // Assert - state, err := dbInspector.Inspect(ctx) - require.NoError(t, err) - + state := inspect(ctx) schema := db.Dialect().DefaultSchema() + wantName, ok := state.FKs[sqlschema.FK{ From: sqlschema.C(schema, "things", "owner_id"), To: sqlschema.C(schema, "people", "id"), @@ -445,33 +445,27 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { } ctx := context.Background() - dbInspector, err := sqlschema.NewInspector(db) - if err != nil { - t.Skip(err) - } + inspect := inspectDbOrSkip(t, db) mustCreateTableWithFKs(t, ctx, db, (*Table)(nil), (*Column)(nil), ) - m, err := migrate.NewAutoMigrator(db, - migrate.WithTableNameAuto(migrationsTable), - migrate.WithLocksTableNameAuto(migrationLocksTable), + m := newAutoMigrator(t, db, migrate.WithFKNameFunc(func(sqlschema.FK) string { return "test_fkey" }), - migrate.WithModel((*TableM)(nil)), - migrate.WithModel((*ColumnM)(nil)), + migrate.WithModel( + (*TableM)(nil), + (*ColumnM)(nil), + ), ) - require.NoError(t, err) // Act - err = m.Run(ctx) + err := m.Run(ctx) require.NoError(t, err) // Assert - state, err := dbInspector.Inspect(ctx) - require.NoError(t, err) - + state := inspect(ctx) fkName := state.FKs[sqlschema.FK{ From: sqlschema.C(db.Dialect().DefaultSchema(), "columns", "attrelid"), To: sqlschema.C(db.Dialect().DefaultSchema(), "tables", "oid"), @@ -479,6 +473,109 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { require.Equal(t, fkName, "test_fkey") } +func testRenamedColumns(t *testing.T, db *bun.DB) { + // Database state + type Original struct { + ID int64 `bun:",pk"` + } + + type Model1 struct { + bun.BaseModel `bun:"models"` + ID string `bun:",pk"` + DoNotRename string `bun:",default:2"` + ColumnTwo int `bun:",default:2"` + } + + // Model state + type Renamed struct { + bun.BaseModel `bun:"renamed"` + Count int64 `bun:",pk"` // renamed column in renamed model + } + + type Model2 struct { + bun.BaseModel `bun:"models"` + ID string `bun:",pk"` + DoNotRename string `bun:",default:2"` + SecondColumn int `bun:",default:2"` // renamed column + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, + (*Original)(nil), + (*Model1)(nil), + ) + mustDropTableOnCleanup(t, ctx, db, (*Renamed)(nil)) + m := newAutoMigrator(t, db, migrate.WithModel( + (*Renamed)(nil), + (*Model2)(nil), + )) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + + require.Len(t, state.Tables, 2) + + var renamed, model2 sqlschema.Table + for _, tbl := range state.Tables { + switch tbl.Name { + case "renamed": + renamed = tbl + case "models": + model2 = tbl + } + } + + require.Contains(t, renamed.Columns, "count") + require.Contains(t, model2.Columns, "second_column") + require.Contains(t, model2.Columns, "do_not_rename") +} + +func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { + type TennantBefore struct { + bun.BaseModel `bun:"table:tennants"` + ID int64 `bun:",pk,identity"` + Apartment int8 + NeighbourID int64 + + Neighbour *TennantBefore `bun:"rel:has-one,join:neighbour_id=id"` + } + + type TennantAfter struct { + bun.BaseModel `bun:"table:tennants"` + TennantID int64 `bun:",pk,identity"` + Apartment int8 + NeighbourID int64 `bun:"my_neighbour"` + + Neighbour *TennantAfter `bun:"rel:has-one,join:my_neighbour=tennant_id"` + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustCreateTableWithFKs(t, ctx, db, (*TennantBefore)(nil)) + m := newAutoMigrator(t, db, + migrate.WithRenameFK(true), + migrate.WithModel((*TennantAfter)(nil)), + ) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + + fkName := state.FKs[sqlschema.FK{ + From: sqlschema.C(db.Dialect().DefaultSchema(), "tennants", "my_neighbour"), + To: sqlschema.C(db.Dialect().DefaultSchema(), "tennants", "tennant_id"), + }] + require.Equal(t, "tennants_my_neighbour_fkey", fkName) +} + // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package func TestDiff(t *testing.T) { type Journal struct { diff --git a/migrate/auto.go b/migrate/auto.go index 87be34f42..5750cab00 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -259,11 +259,14 @@ func newDetector(got, want sqlschema.State, opts ...DiffOption) *detector { } func (d *detector) DetectChanges() Changeset { - // Discover CREATE/RENAME/DROP TABLE targetTables := newTableSet(d.target.Tables...) currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) + // These table sets record "updates" to the targetTables set. + created := newTableSet() + renamed := newTableSet() + addedTables := targetTables.Sub(currentTables) AddedLoop: for _, added := range addedTables.Values() { @@ -276,13 +279,15 @@ AddedLoop: To: added.Name, }) - // TODO: check for altered columns. + d.detectRenamedColumns(removed, added) // Update referenced table in all related FKs if d.detectRenamedFKs { d.refMap.UpdateT(removed.T(), added.T()) } + renamed.Add(added) + // Do not check this model further, we know it was renamed. currentTables.Remove(removed.Name) continue AddedLoop @@ -294,18 +299,36 @@ AddedLoop: Name: added.Name, Model: added.Model, }) + created.Add(added) } // Tables that aren't present anymore and weren't renamed or left untouched were deleted. - for _, t := range currentTables.Sub(targetTables).Values() { + dropped := currentTables.Sub(targetTables) + for _, t := range dropped.Values() { d.changes.Add(&DropTable{ Schema: t.Schema, Name: t.Name, }) } - // Compare and update FKs + // Detect changes in existing tables that weren't renamed + // TODO: here having State.Tables be a map[string]Table would be much more convenient. + // Then we can alse retire tableSet, or at least simplify it to a certain extent. + curEx := currentTables.Sub(dropped) + tarEx := targetTables.Sub(created).Sub(renamed) + for _, target := range tarEx.Values() { + // This step is redundant if we have map[string]Table + var current sqlschema.Table + for _, cur := range curEx.Values() { + if cur.Name == target.Name { + current = cur + break + } + } + d.detectRenamedColumns(current, target) + } + // Compare and update FKs ---------------- currentFKs := make(map[sqlschema.FK]string) for k, v := range d.current.FKs { currentFKs[k] = v @@ -355,6 +378,28 @@ func (d detector) canRename(t1, t2 sqlschema.Table) bool { return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2) } +func (d *detector) detectRenamedColumns(removed, added sqlschema.Table) { + for aName, aCol := range added.Columns { + // This column exists in the database, so it wasn't renamed + if _, ok := removed.Columns[aName]; ok { + continue + } + for rName, rCol := range removed.Columns { + if aCol != rCol { + continue + } + d.changes.Add(&RenameColumn{ + Schema: added.Schema, + Table: added.Name, + From: rName, + To: aName, + }) + delete(removed.Columns, rName) // no need to check this column again + d.refMap.UpdateC(sqlschema.C(added.Schema, added.Name, rName), aName) + } + } +} + // Changeset is a set of changes that alter database state. type Changeset struct { operations []Operation @@ -458,8 +503,9 @@ func (op *RenameTable) Func(m sqlschema.Migrator) MigrationFunc { func (op *RenameTable) GetReverse() Operation { return &RenameTable{ - From: op.To, - To: op.From, + Schema: op.Schema, + From: op.To, + To: op.From, } } @@ -583,6 +629,7 @@ func (op *DropFK) GetReverse() Operation { } } +// RenameFK type RenameFK struct { FK sqlschema.FK From string @@ -610,6 +657,35 @@ func (op *RenameFK) GetReverse() Operation { } } +// RenameColumn +type RenameColumn struct { + Schema string + Table string + From string + To string +} + +var _ Operation = (*RenameColumn)(nil) + +func (op RenameColumn) String() string { + return "" +} + +func (op *RenameColumn) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + return m.RenameColumn(ctx, op.Schema, op.Table, op.From, op.To) + } +} + +func (op *RenameColumn) GetReverse() Operation { + return &RenameColumn{ + Schema: op.Schema, + Table: op.Table, + From: op.To, + To: op.From, + } +} + // sqlschema utils ------------------------------------------------------------ // tableSet stores unique table definitions. diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index d8b555a35..befdb8ad5 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -20,6 +20,7 @@ type Migrator interface { AddContraint(ctx context.Context, fk FK, name string) error DropContraint(ctx context.Context, schema, table, name string) error RenameConstraint(ctx context.Context, schema, table, oldName, newName string) error + RenameColumn(ctx context.Context, schema, table, oldName, newName string) error } // Migrator is a dialect-agnostic wrapper for sqlschema.Dialect From 132289549dac28d7c60db72dd6fa22e1fe29c0cf Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sat, 9 Dec 2023 15:45:59 +0100 Subject: [PATCH 15/55] chore: resolve dependencies This is a WIP commit. --- dialect/pgdialect/alter_table.go | 217 ++++++++++- internal/dbtest/db_test.go | 14 +- internal/dbtest/migrate_test.go | 616 ++++++++++++++++++------------- migrate/alt/operations.go | 263 +++++++++++++ migrate/auto.go | 597 ++---------------------------- migrate/diff.go | 390 +++++++++++++++++++ migrate/migrator.go | 4 +- migrate/sqlschema/inspector.go | 7 + migrate/sqlschema/migrator.go | 7 + schema/sqlfmt.go | 19 + schema/table.go | 2 + 11 files changed, 1280 insertions(+), 856 deletions(-) create mode 100644 migrate/alt/operations.go create mode 100644 migrate/diff.go diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 71b090e46..15034d042 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -2,9 +2,13 @@ package pgdialect import ( "context" + "errors" + "fmt" "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate/alt" "github.com/uptrace/bun/migrate/sqlschema" + "github.com/uptrace/bun/schema" ) func (d *Dialect) Migrator(db *bun.DB) sqlschema.Migrator { @@ -19,7 +23,7 @@ type Migrator struct { var _ sqlschema.Migrator = (*Migrator)(nil) -func (m *Migrator) exec(ctx context.Context, q *bun.RawQuery) error { +func (m *Migrator) execRaw(ctx context.Context, q *bun.RawQuery) error { if _, err := q.Exec(ctx); err != nil { return err } @@ -28,7 +32,7 @@ func (m *Migrator) exec(ctx context.Context, q *bun.RawQuery) error { func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { q := m.db.NewRaw("ALTER TABLE ? RENAME TO ?", bun.Ident(oldName), bun.Ident(newName)) - return m.exec(ctx, q) + return m.execRaw(ctx, q) } func (m *Migrator) AddContraint(ctx context.Context, fk sqlschema.FK, name string) error { @@ -39,7 +43,7 @@ func (m *Migrator) AddContraint(ctx context.Context, fk sqlschema.FK, name strin bun.Safe(fk.To.Schema), bun.Safe(fk.To.Table), bun.Safe(fk.To.Column.String()), ) - return m.exec(ctx, q) + return m.execRaw(ctx, q) } func (m *Migrator) DropContraint(ctx context.Context, schema, table, name string) error { @@ -47,7 +51,7 @@ func (m *Migrator) DropContraint(ctx context.Context, schema, table, name string "ALTER TABLE ?.? DROP CONSTRAINT ?", bun.Ident(schema), bun.Ident(table), bun.Ident(name), ) - return m.exec(ctx, q) + return m.execRaw(ctx, q) } func (m *Migrator) RenameConstraint(ctx context.Context, schema, table, oldName, newName string) error { @@ -55,7 +59,7 @@ func (m *Migrator) RenameConstraint(ctx context.Context, schema, table, oldName, "ALTER TABLE ?.? RENAME CONSTRAINT ? TO ?", bun.Ident(schema), bun.Ident(table), bun.Ident(oldName), bun.Ident(newName), ) - return m.exec(ctx, q) + return m.execRaw(ctx, q) } func (m *Migrator) RenameColumn(ctx context.Context, schema, table, oldName, newName string) error { @@ -63,5 +67,206 @@ func (m *Migrator) RenameColumn(ctx context.Context, schema, table, oldName, new "ALTER TABLE ?.? RENAME COLUMN ? TO ?", bun.Ident(schema), bun.Ident(table), bun.Ident(oldName), bun.Ident(newName), ) - return m.exec(ctx, q) + return m.execRaw(ctx, q) +} + +// ------------- + +func (m *Migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) error { + if len(changes) == 0 { + return nil + } + + queries, err := m.buildQueries(changes...) + if err != nil { + return fmt.Errorf("apply database schema changes: %w", err) + } + + for _, query := range queries { + var b []byte + if b, err = query.AppendQuery(m.db.Formatter(), b); err != nil { + return err + } + m.execRaw(ctx, m.db.NewRaw(string(b))) + } + + return nil +} + +// buildQueries combines schema changes to a number of ALTER TABLE queries. +func (m *Migrator) buildQueries(changes ...sqlschema.Operation) ([]*AlterTableQuery, error) { + var queries []*AlterTableQuery + + chain := func(change sqlschema.Operation) error { + for _, query := range queries { + if err := query.Chain(change); err != errCannotChain { + return err // either nil (successful) or non-nil (failed) + } + } + + // Create a new query for this change, since it cannot be chained to any of the existing ones. + q, err := newAlterTableQuery(change) + if err != nil { + return err + } + queries = append(queries, q.Sep()) + return nil + } + + for _, change := range changes { + if err := chain(change); err != nil { + return nil, err + } + } + return queries, nil +} + +type AlterTableQuery struct { + FQN schema.FQN + + RenameTable sqlschema.Operation + RenameColumn sqlschema.Operation + RenameConstraint sqlschema.Operation + Actions Actions + + separate bool +} + +type Actions []*Action + +var _ schema.QueryAppender = (*Actions)(nil) + +type Action struct { + AddColumn sqlschema.Operation + DropColumn sqlschema.Operation + AlterColumn sqlschema.Operation + AlterType sqlschema.Operation + SetDefault sqlschema.Operation + DropDefault sqlschema.Operation + SetNotNull sqlschema.Operation + DropNotNull sqlschema.Operation + AddGenerated sqlschema.Operation + AddConstraint sqlschema.Operation + DropConstraint sqlschema.Operation + Custom sqlschema.Operation +} + +var _ schema.QueryAppender = (*Action)(nil) + +func newAlterTableQuery(op sqlschema.Operation) (*AlterTableQuery, error) { + q := AlterTableQuery{ + FQN: op.FQN(), + } + switch op.(type) { + case *alt.RenameTable: + q.RenameTable = op + case *alt.RenameColumn: + q.RenameColumn = op + case *alt.RenameConstraint: + q.RenameConstraint = op + default: + q.Actions = append(q.Actions, newAction(op)) + } + return &q, nil +} + +func newAction(op sqlschema.Operation) *Action { + var a Action + return &a +} + +// errCannotChain is a sentinel error. To apply the change, callers should +// create a new AlterTableQuery instead and include it there. +var errCannotChain = errors.New("cannot chain change to the current query") + +func (q *AlterTableQuery) Chain(op sqlschema.Operation) error { + if op.FQN() != q.FQN { + return errCannotChain + } + + switch op.(type) { + default: + return fmt.Errorf("unsupported operation %T", op) + } +} + +func (q *AlterTableQuery) isEmpty() bool { + return q.RenameTable == nil && q.RenameColumn == nil && q.RenameConstraint == nil && len(q.Actions) == 0 +} + +// Sep appends a ";" separator at the end of the query. +func (q *AlterTableQuery) Sep() *AlterTableQuery { + q.separate = true + return q +} + +func (q *AlterTableQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { + var op schema.QueryAppender + switch true { + case q.RenameTable != nil: + op = q.RenameTable + case q.RenameColumn != nil: + op = q.RenameColumn + case q.RenameConstraint != nil: + op = q.RenameConstraint + case len(q.Actions) > 0: + op = q.Actions + default: + return b, nil + } + b = append(b, "ALTER TABLE "...) + b, _ = q.FQN.AppendQuery(fmter, b) + b = append(b, " "...) + if b, err = op.AppendQuery(fmter, b); err != nil { + return b, err + } + + if q.separate { + b = append(b, ";"...) + } + return b, nil +} + +func (actions Actions) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { + for i, a := range actions { + if i > 0 { + b = append(b, ", "...) + } + b, err = a.AppendQuery(fmter, b) + if err != nil { + return b, err + } + } + return b, nil +} + +func (a *Action) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + var op schema.QueryAppender + switch true { + case a.AddColumn != nil: + op = a.AddColumn + case a.DropColumn != nil: + op = a.DropColumn + case a.AlterColumn != nil: + op = a.AlterColumn + case a.AlterType != nil: + op = a.AlterType + case a.SetDefault != nil: + op = a.SetDefault + case a.DropDefault != nil: + op = a.DropDefault + case a.SetNotNull != nil: + op = a.SetNotNull + case a.DropNotNull != nil: + op = a.DropNotNull + case a.AddGenerated != nil: + op = a.AddGenerated + case a.AddConstraint != nil: + op = a.AddConstraint + case a.DropConstraint != nil: + op = a.DropConstraint + default: + return b, nil + } + return op.AppendQuery(fmter, b) } diff --git a/internal/dbtest/db_test.go b/internal/dbtest/db_test.go index ddc9d70a5..b355efaee 100644 --- a/internal/dbtest/db_test.go +++ b/internal/dbtest/db_test.go @@ -45,13 +45,13 @@ const ( ) var allDBs = map[string]func(tb testing.TB) *bun.DB{ - pgName: pg, - pgxName: pgx, - mysql5Name: mysql5, - mysql8Name: mysql8, - mariadbName: mariadb, - sqliteName: sqlite, - mssql2019Name: mssql2019, + pgName: pg, + // pgxName: pgx, + // mysql5Name: mysql5, + // mysql8Name: mysql8, + // mariadbName: mariadb, + // sqliteName: sqlite, + // mssql2019Name: mssql2019, } var allDialects = []func() schema.Dialect{ diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 0a2d60e15..037ef32dc 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -3,16 +3,15 @@ package dbtest_test import ( "context" "errors" - "sort" "strings" "testing" "time" "github.com/stretchr/testify/require" "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" - "github.com/uptrace/bun/schema" ) const ( @@ -201,12 +200,13 @@ func TestAutoMigrator_Run(t *testing.T) { fn func(t *testing.T, db *bun.DB) }{ {testRenameTable}, - {testCreateDropTable}, - {testAlterForeignKeys}, - {testCustomFKNameFunc}, - {testForceRenameFK}, {testRenamedColumns}, + // {testCreateDropTable}, + // {testAlterForeignKeys}, + // {testCustomFKNameFunc}, + {testForceRenameFK}, {testRenameColumnRenamesFK}, + // {testChangeColumnType}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -476,7 +476,8 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { func testRenamedColumns(t *testing.T, db *bun.DB) { // Database state type Original struct { - ID int64 `bun:",pk"` + bun.BaseModel `bun:"original"` + ID int64 `bun:",pk"` } type Model1 struct { @@ -507,8 +508,8 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { ) mustDropTableOnCleanup(t, ctx, db, (*Renamed)(nil)) m := newAutoMigrator(t, db, migrate.WithModel( - (*Renamed)(nil), (*Model2)(nil), + (*Renamed)(nil), )) // Act @@ -576,273 +577,356 @@ func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { require.Equal(t, "tennants_my_neighbour_fkey", fkName) } -// TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package -func TestDiff(t *testing.T) { - type Journal struct { - ISBN string `bun:"isbn,pk"` - Title string `bun:"title,notnull"` - Pages int `bun:"page_count,notnull,default:0"` - } - - type Reader struct { - Username string `bun:",pk,default:gen_random_uuid()"` - } - - type ExternalUsers struct { - bun.BaseModel `bun:"external.users"` - Name string `bun:",pk"` - } - - // ------------------------------------------------------------------------ - type ThingNoOwner struct { - bun.BaseModel `bun:"things"` - ID int64 `bun:"thing_id,pk"` - OwnerID int64 `bun:",notnull"` - } - - type Owner struct { - ID int64 `bun:",pk"` - } - - type Thing struct { - bun.BaseModel `bun:"things"` - ID int64 `bun:"thing_id,pk"` - OwnerID int64 `bun:",notnull"` - - Owner *Owner `bun:"rel:belongs-to,join:owner_id=id"` - } - - testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { - defaultSchema := dialect.DefaultSchema() - - for _, tt := range []struct { - name string - states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) - want []migrate.Operation - }{ - { - name: "1 table renamed, 1 created, 2 dropped", - states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { - // Database state ------------- - type Subscription struct { - bun.BaseModel `bun:"table:billing.subscriptions"` - } - type Review struct{} - - type Author struct { - Name string `bun:"name"` - } - - // Model state ------------- - type JournalRenamed struct { - bun.BaseModel `bun:"table:journals_renamed"` - - ISBN string `bun:"isbn,pk"` - Title string `bun:"title,notnull"` - Pages int `bun:"page_count,notnull,default:0"` - } - - return getState(tb, ctx, d, - (*Author)(nil), - (*Journal)(nil), - (*Review)(nil), - (*Subscription)(nil), - ), getState(tb, ctx, d, - (*Author)(nil), - (*JournalRenamed)(nil), - (*Reader)(nil), - ) - }, - want: []migrate.Operation{ - &migrate.RenameTable{ - Schema: defaultSchema, - From: "journals", - To: "journals_renamed", - }, - &migrate.CreateTable{ - Model: &Reader{}, // (*Reader)(nil) would be more idiomatic, but schema.Tables - }, - &migrate.DropTable{ - Schema: "billing", - Name: "billing.subscriptions", // TODO: fix once schema is used correctly - }, - &migrate.DropTable{ - Schema: defaultSchema, - Name: "reviews", - }, - }, - }, - { - name: "renaming does not work across schemas", - states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { - // Users have the same columns as the "added" ExternalUsers. - // However, we should not recognize it as a RENAME, because only models in the same schema can be renamed. - // Instead, this is a DROP + CREATE case. - type Users struct { - bun.BaseModel `bun:"external_users"` - Name string `bun:",pk"` - } - - return getState(tb, ctx, d, - (*Users)(nil), - ), getState(t, ctx, d, - (*ExternalUsers)(nil), - ) +func testChangeColumnType(t *testing.T, db *bun.DB) { + type TableBefore struct { + bun.BaseModel `bun:"table:table"` + + // NewPK int64 `bun:"new_pk,notnull,unique"` + PK int32 `bun:"old_pk,pk,identity"` + DefaultExpr string `bun:"default_expr,default:gen_random_uuid()"` + Timestamp time.Time `bun:"ts"` + StillNullable string `bun:"not_null"` + TypeOverride string `bun:"type:char(100)"` + Logical bool `bun:"default:false"` + // ManyValues []string `bun:",array"` + } + + type TableAfter struct { + bun.BaseModel `bun:"table:table"` + + // NewPK int64 `bun:",pk"` + PK int64 `bun:"old_pk,identity"` // ~~no longer PK (not identity)~~ (wip) + DefaultExpr string `bun:"default_expr,type:uuid,default:uuid_nil()"` // different default + type UUID + Timestamp time.Time `bun:"ts,default:current_timestamp"` // has default value now + NotNullable string `bun:"not_null,notnull"` // added NOT NULL + TypeOverride string `bun:"type:char(200)"` // new length + Logical uint8 `bun:"default:1"` // change type + different default + // ManyValues []string `bun:",array"` // did not change + } + + wantTables := []sqlschema.Table{ + { + Schema: db.Dialect().DefaultSchema(), + Name: "table", + Columns: map[string]sqlschema.Column{ + // "new_pk": { + // IsPK: true, + // SQLType: "bigint", + // }, + "old_pk": { + SQLType: "bigint", + IsPK: true, }, - want: []migrate.Operation{ - &migrate.DropTable{ - Schema: defaultSchema, - Name: "external_users", - }, - &migrate.CreateTable{ - Model: &ExternalUsers{}, - }, + "default_expr": { + SQLType: "uuid", + IsNullable: true, + DefaultValue: "uuid_nil()", }, - }, - { - name: "detect new FKs on existing columns", - states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { - // database state - type LonelyUser struct { - bun.BaseModel `bun:"table:users"` - Username string `bun:",pk"` - DreamPetKind string `bun:"pet_kind,notnull"` - DreamPetName string `bun:"pet_name,notnull"` - ImaginaryFriend string `bun:"friend"` - } - - type Pet struct { - Nickname string `bun:",pk"` - Kind string `bun:",pk"` - } - - // model state - type HappyUser struct { - bun.BaseModel `bun:"table:users"` - Username string `bun:",pk"` - PetKind string `bun:"pet_kind,notnull"` - PetName string `bun:"pet_name,notnull"` - Friend string `bun:"friend"` - - Pet *Pet `bun:"rel:has-one,join:pet_kind=kind,join:pet_name=nickname"` - BestFriend *HappyUser `bun:"rel:has-one,join:friend=username"` - } - - return getState(t, ctx, d, - (*LonelyUser)(nil), - (*Pet)(nil), - ), getState(t, ctx, d, - (*HappyUser)(nil), - (*Pet)(nil), - ) - }, - want: []migrate.Operation{ - &migrate.AddFK{ - FK: sqlschema.FK{ - From: sqlschema.C(defaultSchema, "users", "pet_kind", "pet_name"), - To: sqlschema.C(defaultSchema, "pets", "kind", "nickname"), - }, - ConstraintName: "users_pet_kind_pet_name_fkey", - }, - &migrate.AddFK{ - FK: sqlschema.FK{ - From: sqlschema.C(defaultSchema, "users", "friend"), - To: sqlschema.C(defaultSchema, "users", "username"), - }, - ConstraintName: "users_friend_fkey", - }, - }, - }, - { - name: "create FKs for new tables", // TODO: update test case to detect an added column too - states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { - return getState(t, ctx, d, - (*ThingNoOwner)(nil), - ), getState(t, ctx, d, - (*Owner)(nil), - (*Thing)(nil), - ) + "ts": { + SQLType: sqltype.Timestamp, + DefaultValue: "current_timestamp", + IsNullable: true, }, - want: []migrate.Operation{ - &migrate.CreateTable{ - Model: &Owner{}, - }, - &migrate.AddFK{ - FK: sqlschema.FK{ - From: sqlschema.C(defaultSchema, "things", "owner_id"), - To: sqlschema.C(defaultSchema, "owners", "id"), - }, - ConstraintName: "things_owner_id_fkey", - }, + "not_null": { + SQLType: "varchar", }, - }, - { - name: "drop FKs for dropped tables", // TODO: update test case to detect dropped columns too - states: func(t testing.TB, ctx context.Context, d schema.Dialect) (sqlschema.State, sqlschema.State) { - stateDb := getState(t, ctx, d, (*Owner)(nil), (*Thing)(nil)) - stateModel := getState(t, ctx, d, (*ThingNoOwner)(nil)) - - // Normally a database state will have the names of the constraints filled in, but we need to mimic that for the test. - stateDb.FKs[sqlschema.FK{ - From: sqlschema.C(d.DefaultSchema(), "things", "owner_id"), - To: sqlschema.C(d.DefaultSchema(), "owners", "id"), - }] = "test_fkey" - return stateDb, stateModel + "type_override": { + SQLType: "char(200)", + IsNullable: true, }, - want: []migrate.Operation{ - &migrate.DropTable{ - Schema: defaultSchema, - Name: "owners", - }, - &migrate.DropFK{ - FK: sqlschema.FK{ - From: sqlschema.C(defaultSchema, "things", "owner_id"), - To: sqlschema.C(defaultSchema, "owners", "id"), - }, - ConstraintName: "test_fkey", - }, + "logical": { + SQLType: "smallint", + DefaultValue: "1", + IsNullable: true, }, + // "many_values": { + // SQLType: "array", + // }, }, - } { - t.Run(tt.name, func(t *testing.T) { - ctx := context.Background() - stateDb, stateModel := tt.states(t, ctx, dialect) - - got := migrate.Diff(stateDb, stateModel).Operations() - checkEqualChangeset(t, got, tt.want) - }) - } - }) -} - -func checkEqualChangeset(tb testing.TB, got, want []migrate.Operation) { - tb.Helper() + }, + } - // Sort alphabetically to ensure we don't fail because of the wrong order - sort.Slice(got, func(i, j int) bool { - return got[i].String() < got[j].String() - }) - sort.Slice(want, func(i, j int) bool { - return want[i].String() < want[j].String() - }) + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, (*TableBefore)(nil)) + m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) - var cgot, cwant migrate.Changeset - cgot.Add(got...) - cwant.Add(want...) + // Act + err := m.Run(ctx) + require.NoError(t, err) - require.Equal(tb, cwant.String(), cgot.String()) + // Assert + state := inspect(ctx) + require.Equal(t, wantTables, state.Tables) } -func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) sqlschema.State { - tb.Helper() - - tables := schema.NewTables(dialect) - tables.Register(models...) - - inspector := sqlschema.NewSchemaInspector(tables) - state, err := inspector.Inspect(ctx) - if err != nil { - tb.Skip("get state: %w", err) - } - return state -} +// // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package +// func TestDiff(t *testing.T) { +// type Journal struct { +// ISBN string `bun:"isbn,pk"` +// Title string `bun:"title,notnull"` +// Pages int `bun:"page_count,notnull,default:0"` +// } + +// type Reader struct { +// Username string `bun:",pk,default:gen_random_uuid()"` +// } + +// type ExternalUsers struct { +// bun.BaseModel `bun:"external.users"` +// Name string `bun:",pk"` +// } + +// // ------------------------------------------------------------------------ +// type ThingNoOwner struct { +// bun.BaseModel `bun:"things"` +// ID int64 `bun:"thing_id,pk"` +// OwnerID int64 `bun:",notnull"` +// } + +// type Owner struct { +// ID int64 `bun:",pk"` +// } + +// type Thing struct { +// bun.BaseModel `bun:"things"` +// ID int64 `bun:"thing_id,pk"` +// OwnerID int64 `bun:",notnull"` + +// Owner *Owner `bun:"rel:belongs-to,join:owner_id=id"` +// } + +// testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { +// defaultSchema := dialect.DefaultSchema() + +// for _, tt := range []struct { +// name string +// states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) +// want []migrate.Operation +// }{ +// { +// name: "1 table renamed, 1 created, 2 dropped", +// states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { +// // Database state ------------- +// type Subscription struct { +// bun.BaseModel `bun:"table:billing.subscriptions"` +// } +// type Review struct{} + +// type Author struct { +// Name string `bun:"name"` +// } + +// // Model state ------------- +// type JournalRenamed struct { +// bun.BaseModel `bun:"table:journals_renamed"` + +// ISBN string `bun:"isbn,pk"` +// Title string `bun:"title,notnull"` +// Pages int `bun:"page_count,notnull,default:0"` +// } + +// return getState(tb, ctx, d, +// (*Author)(nil), +// (*Journal)(nil), +// (*Review)(nil), +// (*Subscription)(nil), +// ), getState(tb, ctx, d, +// (*Author)(nil), +// (*JournalRenamed)(nil), +// (*Reader)(nil), +// ) +// }, +// want: []migrate.Operation{ +// &migrate.RenameTable{ +// Schema: defaultSchema, +// From: "journals", +// To: "journals_renamed", +// }, +// &migrate.CreateTable{ +// Model: &Reader{}, // (*Reader)(nil) would be more idiomatic, but schema.Tables +// }, +// &migrate.DropTable{ +// Schema: "billing", +// Name: "billing.subscriptions", // TODO: fix once schema is used correctly +// }, +// &migrate.DropTable{ +// Schema: defaultSchema, +// Name: "reviews", +// }, +// }, +// }, +// { +// name: "renaming does not work across schemas", +// states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { +// // Users have the same columns as the "added" ExternalUsers. +// // However, we should not recognize it as a RENAME, because only models in the same schema can be renamed. +// // Instead, this is a DROP + CREATE case. +// type Users struct { +// bun.BaseModel `bun:"external_users"` +// Name string `bun:",pk"` +// } + +// return getState(tb, ctx, d, +// (*Users)(nil), +// ), getState(t, ctx, d, +// (*ExternalUsers)(nil), +// ) +// }, +// want: []migrate.Operation{ +// &migrate.DropTable{ +// Schema: defaultSchema, +// Name: "external_users", +// }, +// &migrate.CreateTable{ +// Model: &ExternalUsers{}, +// }, +// }, +// }, +// { +// name: "detect new FKs on existing columns", +// states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { +// // database state +// type LonelyUser struct { +// bun.BaseModel `bun:"table:users"` +// Username string `bun:",pk"` +// DreamPetKind string `bun:"pet_kind,notnull"` +// DreamPetName string `bun:"pet_name,notnull"` +// ImaginaryFriend string `bun:"friend"` +// } + +// type Pet struct { +// Nickname string `bun:",pk"` +// Kind string `bun:",pk"` +// } + +// // model state +// type HappyUser struct { +// bun.BaseModel `bun:"table:users"` +// Username string `bun:",pk"` +// PetKind string `bun:"pet_kind,notnull"` +// PetName string `bun:"pet_name,notnull"` +// Friend string `bun:"friend"` + +// Pet *Pet `bun:"rel:has-one,join:pet_kind=kind,join:pet_name=nickname"` +// BestFriend *HappyUser `bun:"rel:has-one,join:friend=username"` +// } + +// return getState(t, ctx, d, +// (*LonelyUser)(nil), +// (*Pet)(nil), +// ), getState(t, ctx, d, +// (*HappyUser)(nil), +// (*Pet)(nil), +// ) +// }, +// want: []migrate.Operation{ +// &migrate.AddFK{ +// FK: sqlschema.FK{ +// From: sqlschema.C(defaultSchema, "users", "pet_kind", "pet_name"), +// To: sqlschema.C(defaultSchema, "pets", "kind", "nickname"), +// }, +// ConstraintName: "users_pet_kind_pet_name_fkey", +// }, +// &migrate.AddFK{ +// FK: sqlschema.FK{ +// From: sqlschema.C(defaultSchema, "users", "friend"), +// To: sqlschema.C(defaultSchema, "users", "username"), +// }, +// ConstraintName: "users_friend_fkey", +// }, +// }, +// }, +// { +// name: "create FKs for new tables", // TODO: update test case to detect an added column too +// states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { +// return getState(t, ctx, d, +// (*ThingNoOwner)(nil), +// ), getState(t, ctx, d, +// (*Owner)(nil), +// (*Thing)(nil), +// ) +// }, +// want: []migrate.Operation{ +// &migrate.CreateTable{ +// Model: &Owner{}, +// }, +// &migrate.AddFK{ +// FK: sqlschema.FK{ +// From: sqlschema.C(defaultSchema, "things", "owner_id"), +// To: sqlschema.C(defaultSchema, "owners", "id"), +// }, +// ConstraintName: "things_owner_id_fkey", +// }, +// }, +// }, +// { +// name: "drop FKs for dropped tables", // TODO: update test case to detect dropped columns too +// states: func(t testing.TB, ctx context.Context, d schema.Dialect) (sqlschema.State, sqlschema.State) { +// stateDb := getState(t, ctx, d, (*Owner)(nil), (*Thing)(nil)) +// stateModel := getState(t, ctx, d, (*ThingNoOwner)(nil)) + +// // Normally a database state will have the names of the constraints filled in, but we need to mimic that for the test. +// stateDb.FKs[sqlschema.FK{ +// From: sqlschema.C(d.DefaultSchema(), "things", "owner_id"), +// To: sqlschema.C(d.DefaultSchema(), "owners", "id"), +// }] = "test_fkey" +// return stateDb, stateModel +// }, +// want: []migrate.Operation{ +// &migrate.DropTable{ +// Schema: defaultSchema, +// Name: "owners", +// }, +// &migrate.DropFK{ +// FK: sqlschema.FK{ +// From: sqlschema.C(defaultSchema, "things", "owner_id"), +// To: sqlschema.C(defaultSchema, "owners", "id"), +// }, +// ConstraintName: "test_fkey", +// }, +// }, +// }, +// } { +// t.Run(tt.name, func(t *testing.T) { +// ctx := context.Background() +// stateDb, stateModel := tt.states(t, ctx, dialect) + +// got := migrate.Diff(stateDb, stateModel).Operations() +// checkEqualChangeset(t, got, tt.want) +// }) +// } +// }) +// } + +// func checkEqualChangeset(tb testing.TB, got, want []migrate.Operation) { +// tb.Helper() + +// // Sort alphabetically to ensure we don't fail because of the wrong order +// sort.Slice(got, func(i, j int) bool { +// return got[i].String() < got[j].String() +// }) +// sort.Slice(want, func(i, j int) bool { +// return want[i].String() < want[j].String() +// }) + +// var cgot, cwant migrate.Changeset +// cgot.Add(got...) +// cwant.Add(want...) + +// require.Equal(tb, cwant.String(), cgot.String()) +// } + +// func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) sqlschema.State { +// tb.Helper() + +// tables := schema.NewTables(dialect) +// tables.Register(models...) + +// inspector := sqlschema.NewSchemaInspector(tables) +// state, err := inspector.Inspect(ctx) +// if err != nil { +// tb.Skip("get state: %w", err) +// } +// return state +// } diff --git a/migrate/alt/operations.go b/migrate/alt/operations.go new file mode 100644 index 000000000..f7f1a8873 --- /dev/null +++ b/migrate/alt/operations.go @@ -0,0 +1,263 @@ +package alt + +import ( + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate/sqlschema" + "github.com/uptrace/bun/schema" +) + +// Operation encapsulates the request to change a database definition +// and knowns which operation can revert it. +type Operation interface { + GetReverse() Operation +} + +// CreateTable +type CreateTable struct { + Schema string + Name string + Model interface{} +} + +var _ Operation = (*CreateTable)(nil) + +func (op *CreateTable) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.Name, + } +} + +func (op *CreateTable) GetReverse() Operation { + return &DropTable{ + Schema: op.Schema, + Name: op.Name, + } +} + +type DropTable struct { + Schema string + Name string +} + +var _ Operation = (*DropTable)(nil) + +func (op *DropTable) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.Name, + } +} + +func (op *DropTable) DependsOn(another Operation) bool { + d, ok := another.(*DropConstraint) + return ok && ((d.FK.From.Schema == op.Schema && d.FK.From.Table == op.Name) || + (d.FK.To.Schema == op.Schema && d.FK.To.Table == op.Name)) +} + +// GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, +// but DropTable does not have the table's definition to create one. +// +// TODO: we can fetch table definitions for deleted tables +// from the database engine and execute them as a raw query. +func (op *DropTable) GetReverse() Operation { + return &noop{} +} + +type RenameTable struct { + Schema string + OldName string + NewName string +} + +var _ Operation = (*RenameTable)(nil) +var _ sqlschema.Operation = (*RenameTable)(nil) + +func (op *RenameTable) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.OldName, + } +} + +func (op *RenameTable) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + return fmter.AppendQuery(b, "RENAME TO ?", bun.Ident(op.NewName)), nil +} + +func (op *RenameTable) GetReverse() Operation { + return &RenameTable{ + Schema: op.Schema, + OldName: op.NewName, + NewName: op.OldName, + } +} + +// RenameColumn. +type RenameColumn struct { + Schema string + Table string + OldName string + NewName string +} + +var _ Operation = (*RenameColumn)(nil) +var _ sqlschema.Operation = (*RenameColumn)(nil) + +func (op *RenameColumn) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.Table, + } +} + +func (op *RenameColumn) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + return fmter.AppendQuery(b, "RENAME COLUMN ? TO ?", bun.Ident(op.OldName), bun.Ident(op.NewName)), nil +} + +func (op *RenameColumn) GetReverse() Operation { + return &RenameColumn{ + Schema: op.Schema, + Table: op.Table, + OldName: op.NewName, + NewName: op.OldName, + } +} + +func (op *RenameColumn) DependsOn(another Operation) bool { + rt, ok := another.(*RenameTable) + return ok && rt.Schema == op.Schema && rt.NewName == op.Table +} + +// RenameConstraint. +type RenameConstraint struct { + FK sqlschema.FK + OldName string + NewName string +} + +var _ Operation = (*RenameConstraint)(nil) +var _ sqlschema.Operation = (*RenameConstraint)(nil) + +func (op *RenameConstraint) FQN() schema.FQN { + return schema.FQN{ + Schema: op.FK.From.Schema, + Table: op.FK.From.Table, + } +} + +func (op *RenameConstraint) DependsOn(another Operation) bool { + rt, ok := another.(*RenameTable) + return ok && rt.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table +} + +func (op *RenameConstraint) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + return fmter.AppendQuery(b, "RENAME CONSTRAINT ? TO ?", bun.Ident(op.OldName), bun.Ident(op.NewName)), nil +} + +func (op *RenameConstraint) GetReverse() Operation { + return &RenameConstraint{ + FK: op.FK, + OldName: op.OldName, + NewName: op.NewName, + } +} + +type AddForeignKey struct { + FK sqlschema.FK + ConstraintName string +} + +var _ Operation = (*AddForeignKey)(nil) +var _ sqlschema.Operation = (*AddForeignKey)(nil) + +func (op *AddForeignKey) FQN() schema.FQN { + return schema.FQN{ + Schema: op.FK.From.Schema, + Table: op.FK.From.Table, + } +} + +func (op *AddForeignKey) DependsOn(another Operation) bool { + switch another := another.(type) { + case *RenameTable: + return another.Schema == op.FK.From.Schema && another.NewName == op.FK.From.Table + case *CreateTable: + return (another.Schema == op.FK.To.Schema && another.Name == op.FK.To.Table) || // either it's the referencing one + (another.Schema == op.FK.From.Schema && another.Name == op.FK.From.Table) // or the one being referenced + } + return false +} + +func (op *AddForeignKey) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + fqn := schema.FQN{ + Schema: op.FK.To.Schema, + Table: op.FK.To.Table, + } + b = fmter.AppendQuery(b, + "ADD CONSTRAINT ? FOREIGN KEY (?) REFERENCES ", + bun.Ident(op.ConstraintName), bun.Safe(op.FK.From.Column), + ) + b, _ = fqn.AppendQuery(fmter, b) + return fmter.AppendQuery(b, " (?)", bun.Ident(op.FK.To.Column)), nil +} + +func (op *AddForeignKey) GetReverse() Operation { + return &DropConstraint{ + FK: op.FK, + ConstraintName: op.ConstraintName, + } +} + +// DropConstraint. +type DropConstraint struct { + FK sqlschema.FK + ConstraintName string +} + +var _ Operation = (*DropConstraint)(nil) +var _ sqlschema.Operation = (*DropConstraint)(nil) + +func (op *DropConstraint) FQN() schema.FQN { + return schema.FQN{ + Schema: op.FK.From.Schema, + Table: op.FK.From.Table, + } +} + +func (op *DropConstraint) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + return fmter.AppendQuery(b, "DROP CONSTRAINT ?", bun.Ident(op.ConstraintName)), nil +} + +func (op *DropConstraint) GetReverse() Operation { + return &AddForeignKey{ + FK: op.FK, + ConstraintName: op.ConstraintName, + } +} + +type ChangeColumnType struct { + Schema string + Table string + Column string + From sqlschema.Column + To sqlschema.Column +} + +var _ Operation = (*ChangeColumnType)(nil) + +func (op *ChangeColumnType) GetReverse() Operation { + return &ChangeColumnType{ + Schema: op.Schema, + Table: op.Table, + Column: op.Column, + From: op.To, + To: op.From, + } +} + +// noop is a migration that doesn't change the schema. +type noop struct{} + +var _ Operation = (*noop)(nil) + +func (*noop) GetReverse() Operation { return &noop{} } diff --git a/migrate/auto.go b/migrate/auto.go index 5750cab00..edb8f9f77 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -3,7 +3,6 @@ package migrate import ( "context" "fmt" - "strings" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" @@ -36,7 +35,7 @@ func WithExcludeTable(tables ...string) AutoMigratorOption { // which is the default strategy. Perhaps it would make sense to allow disabling this and switching to separate (CreateTable + AddFK) func WithFKNameFunc(f func(sqlschema.FK) string) AutoMigratorOption { return func(m *AutoMigrator) { - m.diffOpts = append(m.diffOpts, FKNameFunc(f)) + m.diffOpts = append(m.diffOpts, fKNameFunc(f)) } } @@ -45,7 +44,7 @@ func WithFKNameFunc(f func(sqlschema.FK) string) AutoMigratorOption { // and in those cases simply renaming the FK makes a lot more sense. func WithRenameFK(enabled bool) AutoMigratorOption { return func(m *AutoMigrator) { - m.diffOpts = append(m.diffOpts, DetectRenamedFKs(enabled)) + m.diffOpts = append(m.diffOpts, detectRenamedFKs(enabled)) } } @@ -94,8 +93,8 @@ type AutoMigrator struct { // excludeTables are excluded from database inspection. excludeTables []string - // diffOpts are passed to Diff. - diffOpts []DiffOption + // diffOpts are passed to detector constructor. + diffOpts []diffOption // migratorOpts are passed to Migrator constructor. migratorOpts []MigratorOption @@ -132,27 +131,32 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err return am, nil } -func (am *AutoMigrator) diff(ctx context.Context) (Changeset, error) { - var changes Changeset +func (am *AutoMigrator) plan(ctx context.Context) (*changeset, error) { var err error got, err := am.dbInspector.Inspect(ctx) if err != nil { - return changes, err + return nil, err } want, err := am.modelInspector.Inspect(ctx) if err != nil { - return changes, err + return nil, err + } + + detector := newDetector(got, want, am.diffOpts...) + changes := detector.Diff() + if err := changes.ResolveDependencies(); err != nil { + return nil, fmt.Errorf("plan migrations: %w", err) } - return Diff(got, want, am.diffOpts...), nil + return changes, nil } // Migrate writes required changes to a new migration file and runs the migration. // This will create and entry in the migrations table, making it possible to revert // the changes with Migrator.Rollback(). func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) error { - changeset, err := am.diff(ctx) + changes, err := am.plan(ctx) if err != nil { return fmt.Errorf("auto migrate: %w", err) } @@ -161,8 +165,8 @@ func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) er name, _ := genMigrationName("auto") migrations.Add(Migration{ Name: name, - Up: changeset.Up(am.dbMigrator), - Down: changeset.Down(am.dbMigrator), + Up: changes.Up(am.dbMigrator), + Down: changes.Down(am.dbMigrator), Comment: "Changes detected by bun.migrate.AutoMigrator", }) @@ -179,570 +183,13 @@ func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) er // Run runs required migrations in-place and without creating a database entry. func (am *AutoMigrator) Run(ctx context.Context) error { - changeset, err := am.diff(ctx) + changes, err := am.plan(ctx) if err != nil { - return fmt.Errorf("run auto migrate: %w", err) + return fmt.Errorf("auto migrate: %w", err) } - up := changeset.Up(am.dbMigrator) + up := changes.Up(am.dbMigrator) if err := up(ctx, am.db); err != nil { - return fmt.Errorf("run auto migrate: %w", err) + return fmt.Errorf("auto migrate: %w", err) } return nil -} - -// INTERNAL ------------------------------------------------------------------- -// TODO: move to migrate/internal - -type DiffOption func(*detectorConfig) - -func FKNameFunc(f func(sqlschema.FK) string) DiffOption { - return func(cfg *detectorConfig) { - cfg.FKNameFunc = f - } -} - -func DetectRenamedFKs(enabled bool) DiffOption { - return func(cfg *detectorConfig) { - cfg.DetectRenamedFKs = enabled - } -} - -func Diff(got, want sqlschema.State, opts ...DiffOption) Changeset { - detector := newDetector(got, want, opts...) - return detector.DetectChanges() -} - -// detectorConfig controls how differences in the model states are resolved. -type detectorConfig struct { - FKNameFunc func(sqlschema.FK) string - DetectRenamedFKs bool -} - -type detector struct { - // current state represents the existing database schema. - current sqlschema.State - - // target state represents the database schema defined in bun models. - target sqlschema.State - - changes Changeset - refMap sqlschema.RefMap - - // fkNameFunc builds the name for created/renamed FK contraints. - fkNameFunc func(sqlschema.FK) string - - // detectRenemedFKS controls how FKs are treated when their references (table/column) are renamed. - detectRenamedFKs bool -} - -func newDetector(got, want sqlschema.State, opts ...DiffOption) *detector { - cfg := &detectorConfig{ - FKNameFunc: defaultFKName, - DetectRenamedFKs: false, - } - for _, opt := range opts { - opt(cfg) - } - - var existingFKs []sqlschema.FK - for fk := range got.FKs { - existingFKs = append(existingFKs, fk) - } - - return &detector{ - current: got, - target: want, - refMap: sqlschema.NewRefMap(existingFKs...), - fkNameFunc: cfg.FKNameFunc, - detectRenamedFKs: cfg.DetectRenamedFKs, - } -} - -func (d *detector) DetectChanges() Changeset { - // Discover CREATE/RENAME/DROP TABLE - targetTables := newTableSet(d.target.Tables...) - currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) - - // These table sets record "updates" to the targetTables set. - created := newTableSet() - renamed := newTableSet() - - addedTables := targetTables.Sub(currentTables) -AddedLoop: - for _, added := range addedTables.Values() { - removedTables := currentTables.Sub(targetTables) - for _, removed := range removedTables.Values() { - if d.canRename(removed, added) { - d.changes.Add(&RenameTable{ - Schema: removed.Schema, - From: removed.Name, - To: added.Name, - }) - - d.detectRenamedColumns(removed, added) - - // Update referenced table in all related FKs - if d.detectRenamedFKs { - d.refMap.UpdateT(removed.T(), added.T()) - } - - renamed.Add(added) - - // Do not check this model further, we know it was renamed. - currentTables.Remove(removed.Name) - continue AddedLoop - } - } - // If a new table did not appear because of the rename operation, then it must've been created. - d.changes.Add(&CreateTable{ - Schema: added.Schema, - Name: added.Name, - Model: added.Model, - }) - created.Add(added) - } - - // Tables that aren't present anymore and weren't renamed or left untouched were deleted. - dropped := currentTables.Sub(targetTables) - for _, t := range dropped.Values() { - d.changes.Add(&DropTable{ - Schema: t.Schema, - Name: t.Name, - }) - } - - // Detect changes in existing tables that weren't renamed - // TODO: here having State.Tables be a map[string]Table would be much more convenient. - // Then we can alse retire tableSet, or at least simplify it to a certain extent. - curEx := currentTables.Sub(dropped) - tarEx := targetTables.Sub(created).Sub(renamed) - for _, target := range tarEx.Values() { - // This step is redundant if we have map[string]Table - var current sqlschema.Table - for _, cur := range curEx.Values() { - if cur.Name == target.Name { - current = cur - break - } - } - d.detectRenamedColumns(current, target) - } - - // Compare and update FKs ---------------- - currentFKs := make(map[sqlschema.FK]string) - for k, v := range d.current.FKs { - currentFKs[k] = v - } - - if d.detectRenamedFKs { - // Add RenameFK migrations for updated FKs. - for old, renamed := range d.refMap.Updated() { - newName := d.fkNameFunc(renamed) - d.changes.Add(&RenameFK{ - FK: renamed, // TODO: make sure this is applied after the table/columns are renamed - From: d.current.FKs[old], - To: d.fkNameFunc(renamed), - }) - - // Here we can add this fk to "current.FKs" to prevent it from firing in the next 2 for-loops. - currentFKs[renamed] = newName - delete(currentFKs, old) - } - } - - // Add AddFK migrations for newly added FKs. - for fk := range d.target.FKs { - if _, ok := currentFKs[fk]; !ok { - d.changes.Add(&AddFK{ - FK: fk, - ConstraintName: d.fkNameFunc(fk), - }) - } - } - - // Add DropFK migrations for removed FKs. - for fk, fkName := range currentFKs { - if _, ok := d.target.FKs[fk]; !ok { - d.changes.Add(&DropFK{ - FK: fk, - ConstraintName: fkName, - }) - } - } - - return d.changes -} - -// canRename checks if t1 can be renamed to t2. -func (d detector) canRename(t1, t2 sqlschema.Table) bool { - return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2) -} - -func (d *detector) detectRenamedColumns(removed, added sqlschema.Table) { - for aName, aCol := range added.Columns { - // This column exists in the database, so it wasn't renamed - if _, ok := removed.Columns[aName]; ok { - continue - } - for rName, rCol := range removed.Columns { - if aCol != rCol { - continue - } - d.changes.Add(&RenameColumn{ - Schema: added.Schema, - Table: added.Name, - From: rName, - To: aName, - }) - delete(removed.Columns, rName) // no need to check this column again - d.refMap.UpdateC(sqlschema.C(added.Schema, added.Name, rName), aName) - } - } -} - -// Changeset is a set of changes that alter database state. -type Changeset struct { - operations []Operation -} - -var _ Operation = (*Changeset)(nil) - -func (c Changeset) String() string { - var ops []string - for _, op := range c.operations { - ops = append(ops, op.String()) - } - if len(ops) == 0 { - return "" - } - return strings.Join(ops, "\n") -} - -func (c Changeset) Operations() []Operation { - return c.operations -} - -// Add new operations to the changeset. -func (c *Changeset) Add(op ...Operation) { - c.operations = append(c.operations, op...) -} - -// Func chains all underlying operations in a single MigrationFunc. -func (c *Changeset) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - for _, op := range c.operations { - fn := op.Func(m) - if err := fn(ctx, db); err != nil { - return err - } - } - return nil - } -} - -func (c *Changeset) GetReverse() Operation { - var reverse Changeset - for _, op := range c.operations { - reverse.Add(op.GetReverse()) - } - return &reverse -} - -// Up is syntactic sugar. -func (c *Changeset) Up(m sqlschema.Migrator) MigrationFunc { - return c.Func(m) -} - -// Down is syntactic sugar. -func (c *Changeset) Down(m sqlschema.Migrator) MigrationFunc { - return c.GetReverse().Func(m) -} - -// Operation is an abstraction a level above a MigrationFunc. -// Apart from storing the function to execute the change, -// it knows how to *write* the corresponding code, and what the reverse operation is. -type Operation interface { - fmt.Stringer - - Func(sqlschema.Migrator) MigrationFunc - // GetReverse returns an operation that can revert the current one. - GetReverse() Operation -} - -// noop is a migration that doesn't change the schema. -type noop struct{} - -var _ Operation = (*noop)(nil) - -func (*noop) String() string { return "noop" } -func (*noop) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { return nil } -} -func (*noop) GetReverse() Operation { return &noop{} } - -type RenameTable struct { - Schema string - From string - To string -} - -var _ Operation = (*RenameTable)(nil) - -func (op RenameTable) String() string { - return fmt.Sprintf( - "Rename table %q.%q to %q.%q", - op.Schema, trimSchema(op.From), op.Schema, trimSchema(op.To), - ) -} - -func (op *RenameTable) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return m.RenameTable(ctx, op.From, op.To) - } -} - -func (op *RenameTable) GetReverse() Operation { - return &RenameTable{ - Schema: op.Schema, - From: op.To, - To: op.From, - } -} - -type CreateTable struct { - Schema string - Name string - Model interface{} -} - -var _ Operation = (*CreateTable)(nil) - -func (op CreateTable) String() string { - return fmt.Sprintf("CreateTable %T", op.Model) -} - -func (op *CreateTable) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return m.CreateTable(ctx, op.Model) - } -} - -func (op *CreateTable) GetReverse() Operation { - return &DropTable{ - Schema: op.Schema, - Name: op.Name, - } -} - -type DropTable struct { - Schema string - Name string -} - -var _ Operation = (*DropTable)(nil) - -func (op DropTable) String() string { - return fmt.Sprintf("DropTable %q.%q", op.Schema, trimSchema(op.Name)) -} - -func (op *DropTable) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return m.DropTable(ctx, op.Schema, op.Name) - } -} - -// GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, -// but DropTable does not have the table's definition to create one. -// -// TODO: we can fetch table definitions for deleted tables -// from the database engine and execute them as a raw query. -func (op *DropTable) GetReverse() Operation { - return &noop{} -} - -// trimSchema drops schema name from the table name. -// This is a workaroud until schema.Table.Schema is fully integrated with other bun packages. -func trimSchema(name string) string { - if strings.Contains(name, ".") { - return strings.Split(name, ".")[1] - } - return name -} - -// defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. -func defaultFKName(fk sqlschema.FK) string { - columnnames := strings.Join(fk.From.Column.Split(), "_") - return fmt.Sprintf("%s_%s_fkey", fk.From.Table, columnnames) -} - -type AddFK struct { - FK sqlschema.FK - ConstraintName string -} - -var _ Operation = (*AddFK)(nil) - -func (op AddFK) String() string { - source, target := op.FK.From, op.FK.To - return fmt.Sprintf("AddForeignKey %q %s.%s(%s) references %s.%s(%s)", op.ConstraintName, - source.Schema, source.Table, strings.Join(source.Column.Split(), ","), - target.Schema, target.Table, strings.Join(target.Column.Split(), ","), - ) -} - -func (op *AddFK) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return m.AddContraint(ctx, op.FK, op.ConstraintName) - } -} - -func (op *AddFK) GetReverse() Operation { - return &DropFK{ - FK: op.FK, - ConstraintName: op.ConstraintName, - } -} - -type DropFK struct { - FK sqlschema.FK - ConstraintName string -} - -var _ Operation = (*DropFK)(nil) - -func (op *DropFK) String() string { - source := op.FK.From.T() - return fmt.Sprintf("DropFK %q on table %q.%q", op.ConstraintName, source.Schema, source.Table) -} - -func (op *DropFK) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - source := op.FK.From.T() - return m.DropContraint(ctx, source.Schema, source.Table, op.ConstraintName) - } -} - -func (op *DropFK) GetReverse() Operation { - return &AddFK{ - FK: op.FK, - ConstraintName: op.ConstraintName, - } -} - -// RenameFK -type RenameFK struct { - FK sqlschema.FK - From string - To string -} - -var _ Operation = (*RenameFK)(nil) - -func (op *RenameFK) String() string { - return "RenameFK" -} - -func (op *RenameFK) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - table := op.FK.From - return m.RenameConstraint(ctx, table.Schema, table.Table, op.From, op.To) - } -} - -func (op *RenameFK) GetReverse() Operation { - return &RenameFK{ - FK: op.FK, - From: op.From, - To: op.To, - } -} - -// RenameColumn -type RenameColumn struct { - Schema string - Table string - From string - To string -} - -var _ Operation = (*RenameColumn)(nil) - -func (op RenameColumn) String() string { - return "" -} - -func (op *RenameColumn) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return m.RenameColumn(ctx, op.Schema, op.Table, op.From, op.To) - } -} - -func (op *RenameColumn) GetReverse() Operation { - return &RenameColumn{ - Schema: op.Schema, - Table: op.Table, - From: op.To, - To: op.From, - } -} - -// sqlschema utils ------------------------------------------------------------ - -// tableSet stores unique table definitions. -type tableSet struct { - underlying map[string]sqlschema.Table -} - -func newTableSet(initial ...sqlschema.Table) tableSet { - set := tableSet{ - underlying: make(map[string]sqlschema.Table), - } - for _, t := range initial { - set.Add(t) - } - return set -} - -func (set tableSet) Add(t sqlschema.Table) { - set.underlying[t.Name] = t -} - -func (set tableSet) Remove(s string) { - delete(set.underlying, s) -} - -func (set tableSet) Values() (tables []sqlschema.Table) { - for _, t := range set.underlying { - tables = append(tables, t) - } - return -} - -func (set tableSet) Sub(other tableSet) tableSet { - res := set.clone() - for v := range other.underlying { - if _, ok := set.underlying[v]; ok { - res.Remove(v) - } - } - return res -} - -func (set tableSet) clone() tableSet { - res := newTableSet() - for _, t := range set.underlying { - res.Add(t) - } - return res -} - -func (set tableSet) String() string { - var s strings.Builder - for k := range set.underlying { - if s.Len() > 0 { - s.WriteString(", ") - } - s.WriteString(k) - } - return s.String() -} +} \ No newline at end of file diff --git a/migrate/diff.go b/migrate/diff.go new file mode 100644 index 000000000..4c875975c --- /dev/null +++ b/migrate/diff.go @@ -0,0 +1,390 @@ +package migrate + +import ( + "context" + "errors" + "fmt" + "strings" + + "github.com/uptrace/bun" + "github.com/uptrace/bun/migrate/alt" + "github.com/uptrace/bun/migrate/sqlschema" +) + +// changeset is a set of changes to the database definition. +type changeset struct { + operations []alt.Operation +} + +// Add new operations to the changeset. +func (c *changeset) Add(op ...alt.Operation) { + c.operations = append(c.operations, op...) +} + +// Func creates a MigrationFunc that applies all operations all the changeset. +func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + var operations []sqlschema.Operation + for _, op := range c.operations { + operations = append(operations, op.(sqlschema.Operation)) + } + return m.Apply(ctx, operations...) + } +} + +// Up is syntactic sugar. +func (c *changeset) Up(m sqlschema.Migrator) MigrationFunc { + return c.Func(m) +} + +// Down is syntactic sugar. +func (c *changeset) Down(m sqlschema.Migrator) MigrationFunc { + var reverse changeset + for i := len(c.operations) - 1; i >= 0; i-- { + reverse.Add(c.operations[i].GetReverse()) + } + return reverse.Func(m) +} + +func (c *changeset) ResolveDependencies() error { + if len(c.operations) <= 1 { + return nil + } + + const ( + unvisited = iota + current + visited + ) + + var resolved []alt.Operation + var visit func(op alt.Operation) error + + var nextOp alt.Operation + var next func() bool + + status := make(map[alt.Operation]int, len(c.operations)) + for _, op := range c.operations { + status[op] = unvisited + } + + next = func() bool { + for op, s := range status { + if s == unvisited { + nextOp = op + return true + } + } + return false + } + + // visit iterates over c.operations until it finds all operations that depend on the current one + // or runs into cirtular dependency, in which case it will return an error. + visit = func(op alt.Operation) error { + switch status[op] { + case visited: + return nil + case current: + // TODO: add details (circle) to the error message + return errors.New("detected circular dependency") + } + + status[op] = current + + for _, another := range c.operations { + if dop, hasDeps := another.(interface { + DependsOn(alt.Operation) bool + }); another == op || !hasDeps || !dop.DependsOn(op) { + continue + } + if err := visit(another); err != nil { + return err + } + } + + status[op] = visited + + // Any dependent nodes would've already been added to the list by now, so we prepend. + resolved = append([]alt.Operation{op}, resolved...) + return nil + } + + for next() { + if err := visit(nextOp); err != nil { + return err + } + } + + c.operations = resolved + return nil +} + +type diffOption func(*detectorConfig) + +func fKNameFunc(f func(sqlschema.FK) string) diffOption { + return func(cfg *detectorConfig) { + cfg.FKNameFunc = f + } +} + +func detectRenamedFKs(enabled bool) diffOption { + return func(cfg *detectorConfig) { + cfg.DetectRenamedFKs = enabled + } +} + +// detectorConfig controls how differences in the model states are resolved. +type detectorConfig struct { + FKNameFunc func(sqlschema.FK) string + DetectRenamedFKs bool +} + +type detector struct { + // current state represents the existing database schema. + current sqlschema.State + + // target state represents the database schema defined in bun models. + target sqlschema.State + + changes changeset + refMap sqlschema.RefMap + + // fkNameFunc builds the name for created/renamed FK contraints. + fkNameFunc func(sqlschema.FK) string + + // detectRenemedFKS controls how FKs are treated when their references (table/column) are renamed. + detectRenamedFKs bool +} + +func newDetector(got, want sqlschema.State, opts ...diffOption) *detector { + cfg := &detectorConfig{ + FKNameFunc: defaultFKName, + DetectRenamedFKs: false, + } + for _, opt := range opts { + opt(cfg) + } + + var existingFKs []sqlschema.FK + for fk := range got.FKs { + existingFKs = append(existingFKs, fk) + } + + return &detector{ + current: got, + target: want, + refMap: sqlschema.NewRefMap(existingFKs...), + fkNameFunc: cfg.FKNameFunc, + detectRenamedFKs: cfg.DetectRenamedFKs, + } +} + +func (d *detector) Diff() *changeset { + // Discover CREATE/RENAME/DROP TABLE + targetTables := newTableSet(d.target.Tables...) + currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) + + // These table sets record "updates" to the targetTables set. + created := newTableSet() + renamed := newTableSet() + + addedTables := targetTables.Sub(currentTables) +AddedLoop: + for _, added := range addedTables.Values() { + removedTables := currentTables.Sub(targetTables) + for _, removed := range removedTables.Values() { + if d.canRename(removed, added) { + d.changes.Add(&alt.RenameTable{ + Schema: removed.Schema, + OldName: removed.Name, + NewName: added.Name, + }) + + d.detectRenamedColumns(removed, added) + + // Update referenced table in all related FKs + if d.detectRenamedFKs { + d.refMap.UpdateT(removed.T(), added.T()) + } + + renamed.Add(added) + + // Do not check this model further, we know it was renamed. + currentTables.Remove(removed.Name) + continue AddedLoop + } + } + // If a new table did not appear because of the rename operation, then it must've been created. + d.changes.Add(&alt.CreateTable{ + Schema: added.Schema, + Name: added.Name, + Model: added.Model, + }) + created.Add(added) + } + + // Tables that aren't present anymore and weren't renamed or left untouched were deleted. + dropped := currentTables.Sub(targetTables) + for _, t := range dropped.Values() { + d.changes.Add(&alt.DropTable{ + Schema: t.Schema, + Name: t.Name, + }) + } + + // Detect changes in existing tables that weren't renamed + // TODO: here having State.Tables be a map[string]Table would be much more convenient. + // Then we can alse retire tableSet, or at least simplify it to a certain extent. + curEx := currentTables.Sub(dropped) + tarEx := targetTables.Sub(created).Sub(renamed) + for _, target := range tarEx.Values() { + // This step is redundant if we have map[string]Table + var current sqlschema.Table + for _, cur := range curEx.Values() { + if cur.Name == target.Name { + current = cur + break + } + } + d.detectRenamedColumns(current, target) + } + + // Compare and update FKs ---------------- + currentFKs := make(map[sqlschema.FK]string) + for k, v := range d.current.FKs { + currentFKs[k] = v + } + + if d.detectRenamedFKs { + // Add RenameFK migrations for updated FKs. + for old, renamed := range d.refMap.Updated() { + newName := d.fkNameFunc(renamed) + d.changes.Add(&alt.RenameConstraint{ + FK: renamed, // TODO: make sure this is applied after the table/columns are renamed + OldName: d.current.FKs[old], + NewName: d.fkNameFunc(renamed), + }) + + // Here we can add this fk to "current.FKs" to prevent it from firing in the next 2 for-loops. + currentFKs[renamed] = newName + delete(currentFKs, old) + } + } + + // Add AddFK migrations for newly added FKs. + for fk := range d.target.FKs { + if _, ok := currentFKs[fk]; !ok { + d.changes.Add(&alt.AddForeignKey{ + FK: fk, + ConstraintName: d.fkNameFunc(fk), + }) + } + } + + // Add DropFK migrations for removed FKs. + for fk, fkName := range currentFKs { + if _, ok := d.target.FKs[fk]; !ok { + d.changes.Add(&alt.DropConstraint{ + FK: fk, + ConstraintName: fkName, + }) + } + } + + return &d.changes +} + +// canRename checks if t1 can be renamed to t2. +func (d detector) canRename(t1, t2 sqlschema.Table) bool { + return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2) +} + +func (d *detector) detectRenamedColumns(current, added sqlschema.Table) { + for aName, aCol := range added.Columns { + // This column exists in the database, so it wasn't renamed + if _, ok := current.Columns[aName]; ok { + continue + } + for cName, cCol := range current.Columns { + if aCol != cCol { + continue + } + d.changes.Add(&alt.RenameColumn{ + Schema: added.Schema, + Table: added.Name, + OldName: cName, + NewName: aName, + }) + delete(current.Columns, cName) // no need to check this column again + d.refMap.UpdateC(sqlschema.C(added.Schema, added.Name, cName), aName) + break + } + } +} + +// sqlschema utils ------------------------------------------------------------ + +// tableSet stores unique table definitions. +type tableSet struct { + underlying map[string]sqlschema.Table +} + +func newTableSet(initial ...sqlschema.Table) tableSet { + set := tableSet{ + underlying: make(map[string]sqlschema.Table), + } + for _, t := range initial { + set.Add(t) + } + return set +} + +func (set tableSet) Add(t sqlschema.Table) { + set.underlying[t.Name] = t +} + +func (set tableSet) Remove(s string) { + delete(set.underlying, s) +} + +func (set tableSet) Values() (tables []sqlschema.Table) { + for _, t := range set.underlying { + tables = append(tables, t) + } + return +} + +func (set tableSet) Sub(other tableSet) tableSet { + res := set.clone() + for v := range other.underlying { + if _, ok := set.underlying[v]; ok { + res.Remove(v) + } + } + return res +} + +func (set tableSet) clone() tableSet { + res := newTableSet() + for _, t := range set.underlying { + res.Add(t) + } + return res +} + +func (set tableSet) String() string { + var s strings.Builder + for k := range set.underlying { + if s.Len() > 0 { + s.WriteString(", ") + } + s.WriteString(k) + } + return s.String() +} + +// defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. +func defaultFKName(fk sqlschema.FK) string { + columnnames := strings.Join(fk.From.Column.Split(), "_") + return fmt.Sprintf("%s_%s_fkey", fk.From.Table, columnnames) +} diff --git a/migrate/migrator.go b/migrate/migrator.go index b14ad64ca..9f1b5222c 100644 --- a/migrate/migrator.go +++ b/migrate/migrator.go @@ -276,7 +276,7 @@ func (m *Migrator) CreateGoMigration( // CreateTxSQLMigration creates transactional up and down SQL migration files. func (m *Migrator) CreateTxSQLMigrations(ctx context.Context, name string) ([]*MigrationFile, error) { - name, err := m.genMigrationName(name) + name, err := genMigrationName(name) if err != nil { return nil, err } @@ -296,7 +296,7 @@ func (m *Migrator) CreateTxSQLMigrations(ctx context.Context, name string) ([]*M // CreateSQLMigrations creates up and down SQL migration files. func (m *Migrator) CreateSQLMigrations(ctx context.Context, name string) ([]*MigrationFile, error) { - name, err := m.genMigrationName(name) + name, err := genMigrationName(name) if err != nil { return nil, err } diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 2060fef0c..53fc95a0f 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -71,6 +71,13 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { }) for _, rel := range t.Relations { + // These relations are nominal and do not need a foreign key to be declared in the current table. + // They will be either expressed as N:1 relations in an m2m mapping table, or will be referenced by the other table if it's a 1:N. + if rel.Type == schema.ManyToManyRelation || + rel.Type == schema.HasManyRelation { + continue + } + var fromCols, toCols []string for _, f := range rel.BaseFields { fromCols = append(fromCols, f.Name) diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index befdb8ad5..3bdeb7e08 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -13,7 +13,14 @@ type MigratorDialect interface { Migrator(*bun.DB) Migrator } +type Operation interface { + schema.QueryAppender + FQN() schema.FQN +} + type Migrator interface { + Apply(ctx context.Context, changes ...Operation) error + RenameTable(ctx context.Context, oldName, newName string) error CreateTable(ctx context.Context, model interface{}) error DropTable(ctx context.Context, schema, table string) error diff --git a/schema/sqlfmt.go b/schema/sqlfmt.go index 7b4a9493f..11eabb13b 100644 --- a/schema/sqlfmt.go +++ b/schema/sqlfmt.go @@ -1,6 +1,7 @@ package schema import ( + "fmt" "strings" "github.com/uptrace/bun/internal" @@ -38,6 +39,24 @@ func (s Name) AppendQuery(fmter Formatter, b []byte) ([]byte, error) { //------------------------------------------------------------------------------ +// FQN represents a fully qualified table name. +type FQN struct { + Schema string + Table string +} + +var _ QueryAppender = (*FQN)(nil) + +func (fqn *FQN) AppendQuery(fmter Formatter, b []byte) ([]byte, error) { + return fmter.AppendQuery(b, "?.?", Ident(fqn.Schema), Ident(fqn.Table)), nil +} + +func (fqn *FQN) String() string { + return fmt.Sprintf("%s.%s", fqn.Schema, fqn.Table) +} + +//------------------------------------------------------------------------------ + // Ident represents a SQL identifier, for example, // a fully qualified column name such as `table_name.col_name`. type Ident string diff --git a/schema/table.go b/schema/table.go index e0c46fe07..e41a2c732 100644 --- a/schema/table.go +++ b/schema/table.go @@ -1057,3 +1057,5 @@ func makeIndex(a, b []int) []int { dest = append(dest, b...) return dest } + + From 94cecaf6cdadd2de4acd1d3bff769ad06a0fdc1d Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sat, 3 Aug 2024 16:42:21 +0200 Subject: [PATCH 16/55] refactor: remove AlterTableQuery bloat and fix test errors - Do not implement AppendQuery on Operation level. This is a leaky abstraction as queries are dialect-specific and migrate package should not be concerned with how they are constructed. - AlterTableQuery also is an unnecessary abstraction. Now pgdialect will just build a simple string-query for each Operation. - Moved operations.go to migrate/ package and deleted alt/ package. - Minor clean-ups and documentation. testChangeColumnType is commented out because the implementation is missing. --- dialect/pgdialect/alter_table.go | 324 +++++++++++-------------------- internal/dbtest/migrate_test.go | 8 +- migrate/diff.go | 67 ++++--- migrate/{alt => }/operations.go | 33 +--- migrate/sqlschema/migrator.go | 21 +- migrate/sqlschema/state.go | 38 ++-- schema/sqlfmt.go | 2 +- 7 files changed, 187 insertions(+), 306 deletions(-) rename migrate/{alt => }/operations.go (80%) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 15034d042..5170ba316 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -2,271 +2,175 @@ package pgdialect import ( "context" - "errors" "fmt" "github.com/uptrace/bun" - "github.com/uptrace/bun/migrate/alt" + "github.com/uptrace/bun/internal" + "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) func (d *Dialect) Migrator(db *bun.DB) sqlschema.Migrator { - return &Migrator{db: db, BaseMigrator: sqlschema.NewBaseMigrator(db)} + return &migrator{db: db, BaseMigrator: sqlschema.NewBaseMigrator(db)} } -type Migrator struct { +type migrator struct { *sqlschema.BaseMigrator db *bun.DB } -var _ sqlschema.Migrator = (*Migrator)(nil) +var _ sqlschema.Migrator = (*migrator)(nil) -func (m *Migrator) execRaw(ctx context.Context, q *bun.RawQuery) error { - if _, err := q.Exec(ctx); err != nil { - return err - } - return nil -} - -func (m *Migrator) RenameTable(ctx context.Context, oldName, newName string) error { - q := m.db.NewRaw("ALTER TABLE ? RENAME TO ?", bun.Ident(oldName), bun.Ident(newName)) - return m.execRaw(ctx, q) -} - -func (m *Migrator) AddContraint(ctx context.Context, fk sqlschema.FK, name string) error { - q := m.db.NewRaw( - "ALTER TABLE ?.? ADD CONSTRAINT ? FOREIGN KEY (?) REFERENCES ?.? (?)", - bun.Safe(fk.From.Schema), bun.Safe(fk.From.Table), bun.Safe(name), - bun.Safe(fk.From.Column.String()), - bun.Safe(fk.To.Schema), bun.Safe(fk.To.Table), - bun.Safe(fk.To.Column.String()), - ) - return m.execRaw(ctx, q) -} - -func (m *Migrator) DropContraint(ctx context.Context, schema, table, name string) error { - q := m.db.NewRaw( - "ALTER TABLE ?.? DROP CONSTRAINT ?", - bun.Ident(schema), bun.Ident(table), bun.Ident(name), - ) - return m.execRaw(ctx, q) -} - -func (m *Migrator) RenameConstraint(ctx context.Context, schema, table, oldName, newName string) error { - q := m.db.NewRaw( - "ALTER TABLE ?.? RENAME CONSTRAINT ? TO ?", - bun.Ident(schema), bun.Ident(table), bun.Ident(oldName), bun.Ident(newName), - ) - return m.execRaw(ctx, q) -} - -func (m *Migrator) RenameColumn(ctx context.Context, schema, table, oldName, newName string) error { - q := m.db.NewRaw( - "ALTER TABLE ?.? RENAME COLUMN ? TO ?", - bun.Ident(schema), bun.Ident(table), bun.Ident(oldName), bun.Ident(newName), - ) - return m.execRaw(ctx, q) -} - -// ------------- - -func (m *Migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) error { +func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) error { if len(changes) == 0 { return nil } + var conn bun.IConn + var err error - queries, err := m.buildQueries(changes...) - if err != nil { - return fmt.Errorf("apply database schema changes: %w", err) - } - - for _, query := range queries { - var b []byte - if b, err = query.AppendQuery(m.db.Formatter(), b); err != nil { - return err - } - m.execRaw(ctx, m.db.NewRaw(string(b))) + if conn, err = m.db.Conn(ctx); err != nil { + return err } - return nil -} - -// buildQueries combines schema changes to a number of ALTER TABLE queries. -func (m *Migrator) buildQueries(changes ...sqlschema.Operation) ([]*AlterTableQuery, error) { - var queries []*AlterTableQuery + fmter := m.db.Formatter() + for _, change := range changes { + var b []byte // TODO(dyma): call db.MakeQueryBytes - chain := func(change sqlschema.Operation) error { - for _, query := range queries { - if err := query.Chain(change); err != errCannotChain { - return err // either nil (successful) or non-nil (failed) + switch change := change.(type) { + case *migrate.CreateTable: + err = m.CreateTable(ctx, change.Model) + if err != nil { + return fmt.Errorf("apply changes: create table %s: %w", change.FQN(), err) + } + continue + case *migrate.DropTable: + err = m.DropTable(ctx, change.Schema, change.Name) + if err != nil { + return fmt.Errorf("apply changes: drop table %s: %w", change.FQN(), err) } + continue + case *migrate.RenameTable: + b, err = m.renameTable(fmter, b, change) + case *migrate.RenameColumn: + b, err = m.renameColumn(fmter, b, change) + case *migrate.DropConstraint: + b, err = m.dropContraint(fmter, b, change) + case *migrate.AddForeignKey: + b, err = m.addForeignKey(fmter, b, change) + case *migrate.RenameConstraint: + b, err = m.renameConstraint(fmter, b, change) + default: + return fmt.Errorf("apply changes: unknown operation %T", change) } - - // Create a new query for this change, since it cannot be chained to any of the existing ones. - q, err := newAlterTableQuery(change) if err != nil { - return err + return fmt.Errorf("apply changes: %w", err) } - queries = append(queries, q.Sep()) - return nil - } - for _, change := range changes { - if err := chain(change); err != nil { - return nil, err + query := internal.String(b) + // log.Println("exec query: " + query) + if _, err = conn.ExecContext(ctx, query); err != nil { + return fmt.Errorf("apply changes: %w", err) } } - return queries, nil -} - -type AlterTableQuery struct { - FQN schema.FQN - - RenameTable sqlschema.Operation - RenameColumn sqlschema.Operation - RenameConstraint sqlschema.Operation - Actions Actions - - separate bool + return nil } -type Actions []*Action - -var _ schema.QueryAppender = (*Actions)(nil) - -type Action struct { - AddColumn sqlschema.Operation - DropColumn sqlschema.Operation - AlterColumn sqlschema.Operation - AlterType sqlschema.Operation - SetDefault sqlschema.Operation - DropDefault sqlschema.Operation - SetNotNull sqlschema.Operation - DropNotNull sqlschema.Operation - AddGenerated sqlschema.Operation - AddConstraint sqlschema.Operation - DropConstraint sqlschema.Operation - Custom sqlschema.Operation +func (m *migrator) renameTable(fmter schema.Formatter, b []byte, rename *migrate.RenameTable) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := rename.FQN() + if b, err = fqn.AppendQuery(fmter, b); err != nil { + return b, err + } + b = append(b, " RENAME TO "...) + if b, err = bun.Ident(rename.NewName).AppendQuery(fmter, b); err != nil { + return b, err + } + return b, nil } -var _ schema.QueryAppender = (*Action)(nil) - -func newAlterTableQuery(op sqlschema.Operation) (*AlterTableQuery, error) { - q := AlterTableQuery{ - FQN: op.FQN(), +func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrate.RenameColumn) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := rename.FQN() + if b, err = fqn.AppendQuery(fmter, b); err != nil { + return b, err } - switch op.(type) { - case *alt.RenameTable: - q.RenameTable = op - case *alt.RenameColumn: - q.RenameColumn = op - case *alt.RenameConstraint: - q.RenameConstraint = op - default: - q.Actions = append(q.Actions, newAction(op)) + + b = append(b, " RENAME COLUMN "...) + if b, err = bun.Ident(rename.OldName).AppendQuery(fmter, b); err != nil { + return b, err } - return &q, nil -} -func newAction(op sqlschema.Operation) *Action { - var a Action - return &a + b = append(b, " TO "...) + if b, err = bun.Ident(rename.NewName).AppendQuery(fmter, b); err != nil { + return b, err + } + return b, nil } -// errCannotChain is a sentinel error. To apply the change, callers should -// create a new AlterTableQuery instead and include it there. -var errCannotChain = errors.New("cannot chain change to the current query") +func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *migrate.RenameConstraint) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := rename.FQN() + if b, err = fqn.AppendQuery(fmter, b); err != nil { + return b, err + } -func (q *AlterTableQuery) Chain(op sqlschema.Operation) error { - if op.FQN() != q.FQN { - return errCannotChain + b = append(b, " RENAME CONSTRAINT "...) + if b, err = bun.Ident(rename.OldName).AppendQuery(fmter, b); err != nil { + return b, err } - switch op.(type) { - default: - return fmt.Errorf("unsupported operation %T", op) + b = append(b, " TO "...) + if b, err = bun.Ident(rename.NewName).AppendQuery(fmter, b); err != nil { + return b, err } + return b, nil } -func (q *AlterTableQuery) isEmpty() bool { - return q.RenameTable == nil && q.RenameColumn == nil && q.RenameConstraint == nil && len(q.Actions) == 0 -} +func (m *migrator) dropContraint(fmter schema.Formatter, b []byte, drop *migrate.DropConstraint) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := drop.FQN() + if b, err = fqn.AppendQuery(fmter, b); err != nil { + return b, err + } -// Sep appends a ";" separator at the end of the query. -func (q *AlterTableQuery) Sep() *AlterTableQuery { - q.separate = true - return q + b = append(b, " DROP CONSTRAINT "...) + if b, err = bun.Ident(drop.ConstraintName).AppendQuery(fmter, b); err != nil { + return b, err + } + return b, nil } -func (q *AlterTableQuery) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { - var op schema.QueryAppender - switch true { - case q.RenameTable != nil: - op = q.RenameTable - case q.RenameColumn != nil: - op = q.RenameColumn - case q.RenameConstraint != nil: - op = q.RenameConstraint - case len(q.Actions) > 0: - op = q.Actions - default: - return b, nil - } +func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate.AddForeignKey) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - b, _ = q.FQN.AppendQuery(fmter, b) - b = append(b, " "...) - if b, err = op.AppendQuery(fmter, b); err != nil { + fqn := add.FQN() + if b, err = fqn.AppendQuery(fmter, b); err != nil { return b, err } - if q.separate { - b = append(b, ";"...) + b = append(b, " ADD CONSTRAINT "...) + if b, err = bun.Ident(add.ConstraintName).AppendQuery(fmter, b); err != nil { + return b, err } - return b, nil -} -func (actions Actions) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { - for i, a := range actions { - if i > 0 { - b = append(b, ", "...) - } - b, err = a.AppendQuery(fmter, b) - if err != nil { - return b, err - } + b = append(b, " FOREIGN KEY ("...) + if b, err = add.FK.From.Column.Safe().AppendQuery(fmter, b); err != nil { + return b, err + } + b = append(b, ") "...) + + other := schema.FQN{Schema: add.FK.To.Schema, Table: add.FK.To.Table} + b = append(b, " REFERENCES "...) + if b, err = other.AppendQuery(fmter, b); err != nil { + return b, err } - return b, nil -} -func (a *Action) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - var op schema.QueryAppender - switch true { - case a.AddColumn != nil: - op = a.AddColumn - case a.DropColumn != nil: - op = a.DropColumn - case a.AlterColumn != nil: - op = a.AlterColumn - case a.AlterType != nil: - op = a.AlterType - case a.SetDefault != nil: - op = a.SetDefault - case a.DropDefault != nil: - op = a.DropDefault - case a.SetNotNull != nil: - op = a.SetNotNull - case a.DropNotNull != nil: - op = a.DropNotNull - case a.AddGenerated != nil: - op = a.AddGenerated - case a.AddConstraint != nil: - op = a.AddConstraint - case a.DropConstraint != nil: - op = a.DropConstraint - default: - return b, nil + b = append(b, " ("...) + if b, err = add.FK.To.Column.Safe().AppendQuery(fmter, b); err != nil { + return b, err } - return op.AppendQuery(fmter, b) + b = append(b, ")"...) + + return b, nil } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 037ef32dc..e362db7d1 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -201,9 +201,9 @@ func TestAutoMigrator_Run(t *testing.T) { }{ {testRenameTable}, {testRenamedColumns}, - // {testCreateDropTable}, - // {testAlterForeignKeys}, - // {testCustomFKNameFunc}, + {testCreateDropTable}, + {testAlterForeignKeys}, + {testCustomFKNameFunc}, {testForceRenameFK}, {testRenameColumnRenamesFK}, // {testChangeColumnType}, @@ -470,7 +470,7 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { From: sqlschema.C(db.Dialect().DefaultSchema(), "columns", "attrelid"), To: sqlschema.C(db.Dialect().DefaultSchema(), "tables", "oid"), }] - require.Equal(t, fkName, "test_fkey") + require.Equal(t, "test_fkey", fkName) } func testRenamedColumns(t *testing.T, db *bun.DB) { diff --git a/migrate/diff.go b/migrate/diff.go index 4c875975c..3406bb76c 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -7,17 +7,16 @@ import ( "strings" "github.com/uptrace/bun" - "github.com/uptrace/bun/migrate/alt" "github.com/uptrace/bun/migrate/sqlschema" ) // changeset is a set of changes to the database definition. type changeset struct { - operations []alt.Operation + operations []Operation } // Add new operations to the changeset. -func (c *changeset) Add(op ...alt.Operation) { +func (c *changeset) Add(op ...Operation) { c.operations = append(c.operations, op...) } @@ -57,17 +56,17 @@ func (c *changeset) ResolveDependencies() error { visited ) - var resolved []alt.Operation - var visit func(op alt.Operation) error + var resolved []Operation + var visit func(op Operation) error - var nextOp alt.Operation + var nextOp Operation var next func() bool - status := make(map[alt.Operation]int, len(c.operations)) + status := make(map[Operation]int, len(c.operations)) for _, op := range c.operations { status[op] = unvisited } - + next = func() bool { for op, s := range status { if s == unvisited { @@ -80,7 +79,7 @@ func (c *changeset) ResolveDependencies() error { // visit iterates over c.operations until it finds all operations that depend on the current one // or runs into cirtular dependency, in which case it will return an error. - visit = func(op alt.Operation) error { + visit = func(op Operation) error { switch status[op] { case visited: return nil @@ -93,7 +92,7 @@ func (c *changeset) ResolveDependencies() error { for _, another := range c.operations { if dop, hasDeps := another.(interface { - DependsOn(alt.Operation) bool + DependsOn(Operation) bool }); another == op || !hasDeps || !dop.DependsOn(op) { continue } @@ -105,7 +104,7 @@ func (c *changeset) ResolveDependencies() error { status[op] = visited // Any dependent nodes would've already been added to the list by now, so we prepend. - resolved = append([]alt.Operation{op}, resolved...) + resolved = append([]Operation{op}, resolved...) return nil } @@ -184,7 +183,7 @@ func (d *detector) Diff() *changeset { targetTables := newTableSet(d.target.Tables...) currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) - // These table sets record "updates" to the targetTables set. + // These table-sets record changes to the targetTables set. created := newTableSet() renamed := newTableSet() @@ -194,15 +193,17 @@ AddedLoop: removedTables := currentTables.Sub(targetTables) for _, removed := range removedTables.Values() { if d.canRename(removed, added) { - d.changes.Add(&alt.RenameTable{ + d.changes.Add(&RenameTable{ Schema: removed.Schema, OldName: removed.Name, NewName: added.Name, }) + // Here we do not check for created / dropped columns,as well as column type changes, + // because it is only possible to detect a renamed table if its signature (see state.go) did not change. d.detectRenamedColumns(removed, added) - // Update referenced table in all related FKs + // Update referenced table in all related FKs. if d.detectRenamedFKs { d.refMap.UpdateT(removed.T(), added.T()) } @@ -215,7 +216,7 @@ AddedLoop: } } // If a new table did not appear because of the rename operation, then it must've been created. - d.changes.Add(&alt.CreateTable{ + d.changes.Add(&CreateTable{ Schema: added.Schema, Name: added.Name, Model: added.Model, @@ -226,19 +227,20 @@ AddedLoop: // Tables that aren't present anymore and weren't renamed or left untouched were deleted. dropped := currentTables.Sub(targetTables) for _, t := range dropped.Values() { - d.changes.Add(&alt.DropTable{ + d.changes.Add(&DropTable{ Schema: t.Schema, Name: t.Name, }) } - // Detect changes in existing tables that weren't renamed + // Detect changes in existing tables that weren't renamed. + // // TODO: here having State.Tables be a map[string]Table would be much more convenient. // Then we can alse retire tableSet, or at least simplify it to a certain extent. curEx := currentTables.Sub(dropped) tarEx := targetTables.Sub(created).Sub(renamed) for _, target := range tarEx.Values() { - // This step is redundant if we have map[string]Table + // TODO(dyma): step is redundant if we have map[string]Table var current sqlschema.Table for _, cur := range curEx.Values() { if cur.Name == target.Name { @@ -259,13 +261,13 @@ AddedLoop: // Add RenameFK migrations for updated FKs. for old, renamed := range d.refMap.Updated() { newName := d.fkNameFunc(renamed) - d.changes.Add(&alt.RenameConstraint{ + d.changes.Add(&RenameConstraint{ FK: renamed, // TODO: make sure this is applied after the table/columns are renamed OldName: d.current.FKs[old], - NewName: d.fkNameFunc(renamed), + NewName: newName, }) - // Here we can add this fk to "current.FKs" to prevent it from firing in the next 2 for-loops. + // Add this FK to currentFKs to prevent it from firing in the two loops below. currentFKs[renamed] = newName delete(currentFKs, old) } @@ -274,7 +276,7 @@ AddedLoop: // Add AddFK migrations for newly added FKs. for fk := range d.target.FKs { if _, ok := currentFKs[fk]; !ok { - d.changes.Add(&alt.AddForeignKey{ + d.changes.Add(&AddForeignKey{ FK: fk, ConstraintName: d.fkNameFunc(fk), }) @@ -284,7 +286,7 @@ AddedLoop: // Add DropFK migrations for removed FKs. for fk, fkName := range currentFKs { if _, ok := d.target.FKs[fk]; !ok { - d.changes.Add(&alt.DropConstraint{ + d.changes.Add(&DropConstraint{ FK: fk, ConstraintName: fkName, }) @@ -309,7 +311,7 @@ func (d *detector) detectRenamedColumns(current, added sqlschema.Table) { if aCol != cCol { continue } - d.changes.Add(&alt.RenameColumn{ + d.changes.Add(&RenameColumn{ Schema: added.Schema, Table: added.Name, OldName: cName, @@ -329,8 +331,8 @@ type tableSet struct { underlying map[string]sqlschema.Table } -func newTableSet(initial ...sqlschema.Table) tableSet { - set := tableSet{ +func newTableSet(initial ...sqlschema.Table) *tableSet { + set := &tableSet{ underlying: make(map[string]sqlschema.Table), } for _, t := range initial { @@ -339,22 +341,22 @@ func newTableSet(initial ...sqlschema.Table) tableSet { return set } -func (set tableSet) Add(t sqlschema.Table) { +func (set *tableSet) Add(t sqlschema.Table) { set.underlying[t.Name] = t } -func (set tableSet) Remove(s string) { +func (set *tableSet) Remove(s string) { delete(set.underlying, s) } -func (set tableSet) Values() (tables []sqlschema.Table) { +func (set *tableSet) Values() (tables []sqlschema.Table) { for _, t := range set.underlying { tables = append(tables, t) } return } -func (set tableSet) Sub(other tableSet) tableSet { +func (set *tableSet) Sub(other *tableSet) *tableSet { res := set.clone() for v := range other.underlying { if _, ok := set.underlying[v]; ok { @@ -364,7 +366,7 @@ func (set tableSet) Sub(other tableSet) tableSet { return res } -func (set tableSet) clone() tableSet { +func (set *tableSet) clone() *tableSet { res := newTableSet() for _, t := range set.underlying { res.Add(t) @@ -372,7 +374,8 @@ func (set tableSet) clone() tableSet { return res } -func (set tableSet) String() string { +// String is a debug helper to get a list of table names in the set. +func (set *tableSet) String() string { var s strings.Builder for k := range set.underlying { if s.Len() > 0 { diff --git a/migrate/alt/operations.go b/migrate/operations.go similarity index 80% rename from migrate/alt/operations.go rename to migrate/operations.go index f7f1a8873..14671f86a 100644 --- a/migrate/alt/operations.go +++ b/migrate/operations.go @@ -1,7 +1,6 @@ -package alt +package migrate import ( - "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -51,6 +50,7 @@ func (op *DropTable) FQN() schema.FQN { func (op *DropTable) DependsOn(another Operation) bool { d, ok := another.(*DropConstraint) + // return ok && ((d.FK.From.Schema == op.Schema && d.FK.From.Table == op.Name) || (d.FK.To.Schema == op.Schema && d.FK.To.Table == op.Name)) } @@ -80,10 +80,6 @@ func (op *RenameTable) FQN() schema.FQN { } } -func (op *RenameTable) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - return fmter.AppendQuery(b, "RENAME TO ?", bun.Ident(op.NewName)), nil -} - func (op *RenameTable) GetReverse() Operation { return &RenameTable{ Schema: op.Schema, @@ -110,10 +106,6 @@ func (op *RenameColumn) FQN() schema.FQN { } } -func (op *RenameColumn) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - return fmter.AppendQuery(b, "RENAME COLUMN ? TO ?", bun.Ident(op.OldName), bun.Ident(op.NewName)), nil -} - func (op *RenameColumn) GetReverse() Operation { return &RenameColumn{ Schema: op.Schema, @@ -150,10 +142,6 @@ func (op *RenameConstraint) DependsOn(another Operation) bool { return ok && rt.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table } -func (op *RenameConstraint) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - return fmter.AppendQuery(b, "RENAME CONSTRAINT ? TO ?", bun.Ident(op.OldName), bun.Ident(op.NewName)), nil -} - func (op *RenameConstraint) GetReverse() Operation { return &RenameConstraint{ FK: op.FK, @@ -188,19 +176,6 @@ func (op *AddForeignKey) DependsOn(another Operation) bool { return false } -func (op *AddForeignKey) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - fqn := schema.FQN{ - Schema: op.FK.To.Schema, - Table: op.FK.To.Table, - } - b = fmter.AppendQuery(b, - "ADD CONSTRAINT ? FOREIGN KEY (?) REFERENCES ", - bun.Ident(op.ConstraintName), bun.Safe(op.FK.From.Column), - ) - b, _ = fqn.AppendQuery(fmter, b) - return fmter.AppendQuery(b, " (?)", bun.Ident(op.FK.To.Column)), nil -} - func (op *AddForeignKey) GetReverse() Operation { return &DropConstraint{ FK: op.FK, @@ -224,10 +199,6 @@ func (op *DropConstraint) FQN() schema.FQN { } } -func (op *DropConstraint) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - return fmter.AppendQuery(b, "DROP CONSTRAINT ?", bun.Ident(op.ConstraintName)), nil -} - func (op *DropConstraint) GetReverse() Operation { return &AddForeignKey{ FK: op.FK, diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 3bdeb7e08..6087a8448 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -13,24 +13,11 @@ type MigratorDialect interface { Migrator(*bun.DB) Migrator } -type Operation interface { - schema.QueryAppender - FQN() schema.FQN -} - type Migrator interface { Apply(ctx context.Context, changes ...Operation) error - - RenameTable(ctx context.Context, oldName, newName string) error - CreateTable(ctx context.Context, model interface{}) error - DropTable(ctx context.Context, schema, table string) error - AddContraint(ctx context.Context, fk FK, name string) error - DropContraint(ctx context.Context, schema, table, name string) error - RenameConstraint(ctx context.Context, schema, table, oldName, newName string) error - RenameColumn(ctx context.Context, schema, table, oldName, newName string) error } -// Migrator is a dialect-agnostic wrapper for sqlschema.Dialect +// migrator is a dialect-agnostic wrapper for sqlschema.MigratorDialect. type migrator struct { Migrator } @@ -69,3 +56,9 @@ func (m *BaseMigrator) DropTable(ctx context.Context, schema, name string) error } return nil } + +// Operation is a helper interface each migrate.Operation must implement +// so an not to handle this in every dialect separately. +type Operation interface { + FQN() schema.FQN +} diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index 553634d90..40fda8320 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -1,6 +1,10 @@ package sqlschema -import "strings" +import ( + "strings" + + "github.com/uptrace/bun/schema" +) type State struct { Tables []Table @@ -14,6 +18,7 @@ type Table struct { Columns map[string]Column } +// T returns a fully-qualified name object for the table. func (t *Table) T() tFQN { return T(t.Schema, t.Name) } @@ -78,6 +83,7 @@ type tFQN struct { Table string } +// T creates a fully-qualified table name object. func T(schema, table string) tFQN { return tFQN{Schema: schema, Table: table} } // cFQN is a fully-qualified column name. @@ -86,6 +92,7 @@ type cFQN struct { Column composite } +// C creates a fully-qualified column name object. func C(schema, table string, columns ...string) cFQN { return cFQN{tFQN: T(schema, table), Column: newComposite(columns...)} } @@ -96,7 +103,7 @@ func (c cFQN) T() tFQN { } // composite is a hashable representation of []string used to define FKs that depend on multiple columns. -// Although having duplicated column references in a FK is illegal, composite neither validate nor enforce this constraint on the caller. +// Although having duplicated column references in a FK is illegal, composite neither validates nor enforces this constraint on the caller. type composite string // newComposite creates a composite column from a slice of column names. @@ -108,6 +115,10 @@ func (c composite) String() string { return string(c) } +func (c composite) Safe() schema.Safe { + return schema.Safe(c) +} + // Split returns a slice of column names that make up the composite. func (c composite) Split() []string { return strings.Split(c.String(), ",") @@ -151,14 +162,14 @@ func (c composite) Replace(oldColumn, newColumn string) composite { // // fk := FK{ // From: C("a", "b", "c_1", "c_2"), // supports multicolumn FKs -// To: C("w", "x", "y_1", "y_2") +// To: C("w", "x", "y_1", "y_2") // } type FK struct { From cFQN // From is the referencing column. To cFQN // To is the referenced column. } -// DependsT checks if either part of the FK's definition mentions T +// dependsT checks if either part of the FK's definition mentions T // and returns the columns that belong to T. Notice that *C allows modifying the column's FQN. // // Example: @@ -168,7 +179,7 @@ type FK struct { // To: C("x", "y", "z"), // } // depends on T("a", "b") and T("x", "y") -func (fk *FK) DependsT(t tFQN) (ok bool, cols []*cFQN) { +func (fk *FK) dependsT(t tFQN) (ok bool, cols []*cFQN) { if c := &fk.From; c.T() == t { ok = true cols = append(cols, c) @@ -183,7 +194,7 @@ func (fk *FK) DependsT(t tFQN) (ok bool, cols []*cFQN) { return } -// DependsC checks if the FK definition mentions C and returns a modifiable FQN of the matching column. +// dependsC checks if the FK definition mentions C and returns a modifiable FQN of the matching column. // // Example: // @@ -192,7 +203,7 @@ func (fk *FK) DependsT(t tFQN) (ok bool, cols []*cFQN) { // To: C("w", "x", "y_1", "y_2"), // } // depends on C("a", "b", "c_1"), C("a", "b", "c_2"), C("w", "x", "y_1"), and C("w", "x", "y_2") -func (fk *FK) DependsC(c cFQN) (bool, *cFQN) { +func (fk *FK) dependsC(c cFQN) (bool, *cFQN) { switch { case fk.From.Column.Contains(c.Column): return true, &fk.From @@ -208,8 +219,7 @@ func (fk *FK) DependsC(c cFQN) (bool, *cFQN) { // // Note: this is only important/necessary if we want to rename FKs instead of re-creating them. // Most of the time it wouldn't make a difference, but there may be cases in which re-creating FKs could be costly -// and renaming them would be preferred. For that we could provided an options like WithRenameFKs(true) and -// WithRenameFKFunc(func(sqlschema.FK) string) to allow customizing the FK naming convention. +// and renaming them would be preferred. type RefMap map[FK]*FK // deleted is a special value that RefMap uses to denote a deleted FK constraint. @@ -229,7 +239,7 @@ func NewRefMap(fks ...FK) RefMap { // Returns the number of updated entries. func (r RefMap) UpdateT(oldT, newT tFQN) (n int) { for _, fk := range r { - ok, cols := fk.DependsT(oldT) + ok, cols := fk.dependsT(oldT) if !ok { continue } @@ -246,9 +256,9 @@ func (r RefMap) UpdateT(oldT, newT tFQN) (n int) { // and so, only the column-name part of the FQN can be updated. Returns the number of updated entries. func (r RefMap) UpdateC(oldC cFQN, newColumn string) (n int) { for _, fk := range r { - if ok, col := fk.DependsC(oldC); ok { + if ok, col := fk.dependsC(oldC); ok { oldColumns := oldC.Column.Split() - // UpdateC can only update 1 column at a time. + // updateC will only update 1 column per invocation. col.Column = col.Column.Replace(oldColumns[0], newColumn) n++ } @@ -260,7 +270,7 @@ func (r RefMap) UpdateC(oldC cFQN, newColumn string) (n int) { // Returns the number of deleted entries. func (r RefMap) DeleteT(t tFQN) (n int) { for old, fk := range r { - if ok, _ := fk.DependsT(t); ok { + if ok, _ := fk.dependsT(t); ok { r[old] = &deleted n++ } @@ -272,7 +282,7 @@ func (r RefMap) DeleteT(t tFQN) (n int) { // Returns the number of deleted entries. func (r RefMap) DeleteC(c cFQN) (n int) { for old, fk := range r { - if ok, _ := fk.DependsC(c); ok { + if ok, _ := fk.dependsC(c); ok { r[old] = &deleted n++ } diff --git a/schema/sqlfmt.go b/schema/sqlfmt.go index 11eabb13b..5703c9694 100644 --- a/schema/sqlfmt.go +++ b/schema/sqlfmt.go @@ -39,7 +39,7 @@ func (s Name) AppendQuery(fmter Formatter, b []byte) ([]byte, error) { //------------------------------------------------------------------------------ -// FQN represents a fully qualified table name. +// FQN appends a fully-qualified table name. type FQN struct { Schema string Table string From 3cfd8c62125786aaf6f493acc5b39f4d3db3d628 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sat, 3 Aug 2024 20:44:58 +0200 Subject: [PATCH 17/55] feat: change column type Bufixes and improvements: - pgdialect.Inspector canonicalizes all default expressions (lowercase) to make sure they are always comparable with the model definition. - sqlschema.SchemaInspector canonicalizes all default expressions (lowercase) - pgdialect and sqlschema now support type-equivalence, which prevents unnecessary migrations like CHAR -> CHARACTER from being created. Changing PRIMARY KEY and UNIQUE-ness are outside of this commit's scope, because those constraints can span multiple columns. --- dialect/pgdialect/alter_table.go | 60 +++++- dialect/pgdialect/inspector.go | 46 +++-- dialect/pgdialect/sqltype.go | 74 +++++-- dialect/pgdialect/sqltype_test.go | 84 ++++++++ internal/dbtest/inspect_test.go | 219 +++++++++++++++++++-- internal/dbtest/migrate_test.go | 77 ++++---- migrate/auto.go | 7 +- migrate/diff.go | 312 +++++++++++++++++------------- migrate/operations.go | 8 + migrate/sqlschema/inspector.go | 38 +++- migrate/sqlschema/state.go | 57 +++++- 11 files changed, 747 insertions(+), 235 deletions(-) create mode 100644 dialect/pgdialect/sqltype_test.go diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 5170ba316..237e0bb78 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -3,6 +3,7 @@ package pgdialect import ( "context" "fmt" + "log" "github.com/uptrace/bun" "github.com/uptrace/bun/internal" @@ -61,6 +62,8 @@ func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) er b, err = m.addForeignKey(fmter, b, change) case *migrate.RenameConstraint: b, err = m.renameConstraint(fmter, b, change) + case *migrate.ChangeColumnType: + b, err = m.changeColumnType(fmter, b, change) default: return fmt.Errorf("apply changes: unknown operation %T", change) } @@ -69,7 +72,7 @@ func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) er } query := internal.String(b) - // log.Println("exec query: " + query) + log.Println("exec query: " + query) if _, err = conn.ExecContext(ctx, query); err != nil { return fmt.Errorf("apply changes: %w", err) } @@ -174,3 +177,58 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. return b, nil } + +func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *migrate.ChangeColumnType) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := colDef.FQN() + if b, err = fqn.AppendQuery(fmter, b); err != nil { + return b, err + } + + var i int + appendAlterColumn := func() { + if i > 0 { + b = append(b, ", "...) + } + b = append(b, " ALTER COLUMN "...) + b, err = bun.Ident(colDef.Column).AppendQuery(fmter, b) + i++ + } + + got, want := colDef.From, colDef.To + + if want.SQLType != got.SQLType { + if appendAlterColumn(); err != nil { + return b, err + } + b = append(b, " SET DATA TYPE "...) + if b, err = want.AppendQuery(fmter, b); err != nil { + return b, err + } + } + + if want.IsNullable != got.IsNullable { + if appendAlterColumn(); err != nil { + return b, err + } + if !want.IsNullable { + b = append(b, " SET NOT NULL"...) + } else { + b = append(b, " DROP NOT NULL"...) + } + } + + if want.DefaultValue != got.DefaultValue { + if appendAlterColumn(); err != nil { + return b, err + } + if want.DefaultValue == "" { + b = append(b, " DROP DEFAULT"...) + } else { + b = append(b, " SET DEFAULT "...) + b = append(b, want.DefaultValue...) + } + } + + return b, nil +} diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 95d2581b2..c95e95cfb 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -2,11 +2,9 @@ package pgdialect import ( "context" - "fmt" "strings" "github.com/uptrace/bun" - "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate/sqlschema" ) @@ -52,23 +50,21 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { } colDefs := make(map[string]sqlschema.Column) for _, c := range columns { - dataType := fromDatabaseType(c.DataType) - if strings.EqualFold(dataType, sqltype.VarChar) && c.VarcharLen > 0 { - dataType = fmt.Sprintf("%s(%d)", dataType, c.VarcharLen) - } - def := c.Default if c.IsSerial || c.IsIdentity { def = "" + } else if !c.IsDefaultLiteral { + def = strings.ToLower(def) } colDefs[c.Name] = sqlschema.Column{ - SQLType: strings.ToLower(dataType), + SQLType: c.DataType, + VarcharLen: c.VarcharLen, + DefaultValue: def, IsPK: c.IsPK, IsNullable: c.IsNullable, IsAutoIncrement: c.IsSerial, IsIdentity: c.IsIdentity, - DefaultValue: def, } } @@ -96,21 +92,22 @@ type InformationSchemaTable struct { } type InformationSchemaColumn struct { - Schema string `bun:"table_schema"` - Table string `bun:"table_name"` - Name string `bun:"column_name"` - DataType string `bun:"data_type"` - VarcharLen int `bun:"varchar_len"` - IsArray bool `bun:"is_array"` - ArrayDims int `bun:"array_dims"` - Default string `bun:"default"` - IsPK bool `bun:"is_pk"` - IsIdentity bool `bun:"is_identity"` - IndentityType string `bun:"identity_type"` - IsSerial bool `bun:"is_serial"` - IsNullable bool `bun:"is_nullable"` - IsUnique bool `bun:"is_unique"` - UniqueGroup []string `bun:"unique_group,array"` + Schema string `bun:"table_schema"` + Table string `bun:"table_name"` + Name string `bun:"column_name"` + DataType string `bun:"data_type"` + VarcharLen int `bun:"varchar_len"` + IsArray bool `bun:"is_array"` + ArrayDims int `bun:"array_dims"` + Default string `bun:"default"` + IsDefaultLiteral bool `bun:"default_is_literal_expr"` + IsPK bool `bun:"is_pk"` + IsIdentity bool `bun:"is_identity"` + IndentityType string `bun:"identity_type"` + IsSerial bool `bun:"is_serial"` + IsNullable bool `bun:"is_nullable"` + IsUnique bool `bun:"is_unique"` + UniqueGroup []string `bun:"unique_group,array"` } type ForeignKey struct { @@ -153,6 +150,7 @@ SELECT WHEN "c".column_default ~ '^''.*''::.*$' THEN substring("c".column_default FROM '^''(.*)''::.*$') ELSE "c".column_default END AS "default", + "c".column_default ~ '^''.*''::.*$' OR "c".column_default ~ '^[0-9\.]+$' AS default_is_literal_expr, 'p' = ANY("c".constraint_type) AS is_pk, "c".is_identity = 'YES' AS is_identity, "c".column_default = format('nextval(''%s_%s_seq''::regclass)', "c".table_name, "c".column_name) AS is_serial, diff --git a/dialect/pgdialect/sqltype.go b/dialect/pgdialect/sqltype.go index 6b862b972..a595df5be 100644 --- a/dialect/pgdialect/sqltype.go +++ b/dialect/pgdialect/sqltype.go @@ -8,16 +8,19 @@ import ( "strings" "github.com/uptrace/bun/dialect/sqltype" + "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) const ( // Date / Time - pgTypeTimestampTz = "TIMESTAMPTZ" // Timestamp with a time zone - pgTypeDate = "DATE" // Date - pgTypeTime = "TIME" // Time without a time zone - pgTypeTimeTz = "TIME WITH TIME ZONE" // Time with a time zone - pgTypeInterval = "INTERVAL" // Time Interval + pgTypeTimestamp = "TIMESTAMP" // Timestamp + pgTypeTimestampWithTz = "TIMESTAMP WITH TIME ZONE" // Timestamp with a time zone + pgTypeTimestampTz = "TIMESTAMPTZ" // Timestamp with a time zone (alias) + pgTypeDate = "DATE" // Date + pgTypeTime = "TIME" // Time without a time zone + pgTypeTimeTz = "TIME WITH TIME ZONE" // Time with a time zone + pgTypeInterval = "INTERVAL" // Time interval // Network Addresses pgTypeInet = "INET" // IPv4 or IPv6 hosts and networks @@ -31,6 +34,7 @@ const ( // Character Types pgTypeChar = "CHAR" // fixed length string (blank padded) + pgTypeCharacter = "CHARACTER" // alias for CHAR pgTypeText = "TEXT" // variable length string without limit pgTypeVarchar = "VARCHAR" // variable length string with optional limit pgTypeCharacterVarying = "CHARACTER VARYING" // alias for VARCHAR @@ -115,11 +119,59 @@ func sqlType(typ reflect.Type) string { return sqlType } -// fromDatabaseType converts Postgres-specific type to a more generic `sqltype`. -func fromDatabaseType(dbType string) string { - switch strings.ToUpper(dbType) { - case pgTypeChar, pgTypeVarchar, pgTypeCharacterVarying, pgTypeText: - return sqltype.VarChar +var ( + char = newAliases(pgTypeChar, pgTypeCharacter) + varchar = newAliases(pgTypeVarchar, pgTypeCharacterVarying) + timestampTz = newAliases(sqltype.Timestamp, pgTypeTimestampTz, pgTypeTimestampWithTz) +) + +func (d *Dialect) EquivalentType(col1, col2 sqlschema.Column) bool { + if col1.SQLType == col2.SQLType { + return checkVarcharLen(col1, col2, d.DefaultVarcharLen()) + } + + typ1, typ2 := strings.ToUpper(col1.SQLType), strings.ToUpper(col2.SQLType) + + switch { + case char.IsAlias(typ1) && char.IsAlias(typ2): + return checkVarcharLen(col1, col2, d.DefaultVarcharLen()) + case varchar.IsAlias(typ1) && varchar.IsAlias(typ2): + return checkVarcharLen(col1, col2, d.DefaultVarcharLen()) + case timestampTz.IsAlias(typ1) && timestampTz.IsAlias(typ2): + return true + } + return false +} + +// checkVarcharLen returns true if columns have the same VarcharLen, or, +// if one specifies no VarcharLen and the other one has the default lenght for pgdialect. +// We assume that the types are otherwise equivalent and that any non-character column +// would have VarcharLen == 0; +func checkVarcharLen(col1, col2 sqlschema.Column, defaultLen int) bool { + if col1.VarcharLen == col2.VarcharLen { + return true + } + + if (col1.VarcharLen == 0 && col2.VarcharLen == defaultLen) || (col1.VarcharLen == defaultLen && col2.VarcharLen == 0) { + return true + } + return false +} + +// typeAlias defines aliases for common data types. It is a lightweight string set implementation. +type typeAlias map[string]struct{} + +// IsAlias checks if typ1 and typ2 are aliases of the same data type. +func (t typeAlias) IsAlias(typ string) bool { + _, ok := t[typ] + return ok +} + +// newAliases creates a set of aliases. +func newAliases(aliases ...string) typeAlias { + types := make(typeAlias) + for _, a := range aliases { + types[a] = struct{}{} } - return dbType + return types } diff --git a/dialect/pgdialect/sqltype_test.go b/dialect/pgdialect/sqltype_test.go new file mode 100644 index 000000000..5a51e1275 --- /dev/null +++ b/dialect/pgdialect/sqltype_test.go @@ -0,0 +1,84 @@ +package pgdialect + +import ( + "testing" + + "github.com/stretchr/testify/require" + "github.com/uptrace/bun/dialect/sqltype" + "github.com/uptrace/bun/migrate/sqlschema" +) + +func TestInspectorDialect_EquivalentType(t *testing.T) { + d := New() + + t.Run("common types", func(t *testing.T) { + for _, tt := range []struct { + typ1, typ2 string + want bool + }{ + {"text", "text", true}, // identical types + + {sqltype.VarChar, pgTypeVarchar, true}, + {sqltype.VarChar, pgTypeCharacterVarying, true}, + {sqltype.VarChar, pgTypeChar, false}, + {sqltype.VarChar, pgTypeCharacter, false}, + {pgTypeCharacterVarying, pgTypeVarchar, true}, + {pgTypeCharacter, pgTypeChar, true}, + {sqltype.VarChar, pgTypeText, false}, + {pgTypeChar, pgTypeText, false}, + {pgTypeVarchar, pgTypeText, false}, + + // SQL standards require that TIMESTAMP be default alias for "TIMESTAMP WITH TIME ZONE" + {sqltype.Timestamp, pgTypeTimestampTz, true}, + {sqltype.Timestamp, pgTypeTimestampWithTz, true}, + {sqltype.Timestamp, pgTypeTimestamp, true}, // Still, TIMESTAMP == TIMESTAMP + {sqltype.Timestamp, pgTypeTimeTz, false}, + {pgTypeTimestampTz, pgTypeTimestampWithTz, true}, + } { + eq := " ~ " + if !tt.want { + eq = " !~ " + } + t.Run(tt.typ1+eq+tt.typ2, func(t *testing.T) { + got := d.EquivalentType( + sqlschema.Column{SQLType: tt.typ1}, + sqlschema.Column{SQLType: tt.typ2}, + ) + require.Equal(t, tt.want, got) + }) + } + + }) + + t.Run("custom varchar length", func(t *testing.T) { + for _, tt := range []struct { + name string + col1, col2 sqlschema.Column + want bool + }{ + { + name: "varchars of different length are not equivalent", + col1: sqlschema.Column{SQLType: "varchar", VarcharLen: 10}, + col2: sqlschema.Column{SQLType: "varchar"}, + want: false, + }, + { + name: "varchar with no explicit length is equivalent to varchar of default length", + col1: sqlschema.Column{SQLType: "varchar", VarcharLen: d.DefaultVarcharLen()}, + col2: sqlschema.Column{SQLType: "varchar"}, + want: true, + }, + { + name: "characters with equal custom length", + col1: sqlschema.Column{SQLType: "character varying", VarcharLen: 200}, + col2: sqlschema.Column{SQLType: "varchar", VarcharLen: 200}, + want: true, + }, + } { + t.Run(tt.name, func(t *testing.T) { + got := d.EquivalentType(tt.col1, tt.col2) + require.Equal(t, tt.want, got) + }) + } + }) +} diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 42e200e2c..a4c1bcf81 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -2,11 +2,15 @@ package dbtest_test import ( "context" + "fmt" "testing" + "time" "github.com/stretchr/testify/require" "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate/sqlschema" + "github.com/uptrace/bun/schema" ) type Article struct { @@ -36,8 +40,9 @@ type Office struct { } type Publisher struct { - ID string `bun:"publisher_id,pk,default:gen_random_uuid(),unique:office_fk"` - Name string `bun:"publisher_name,unique,notnull,unique:office_fk"` + ID string `bun:"publisher_id,pk,default:gen_random_uuid(),unique:office_fk"` + Name string `bun:"publisher_name,unique,notnull,unique:office_fk"` + CreatedAt time.Time `bun:"created_at,default:current_timestamp"` // Writers write articles for this publisher. Writers []Journalist `bun:"m2m:publisher_to_journalists,join:Publisher=Author"` @@ -65,7 +70,6 @@ type Journalist struct { } func TestDatabaseInspector_Inspect(t *testing.T) { - testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { db.RegisterModel((*PublisherToJournalist)(nil)) @@ -93,15 +97,15 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Name: "offices", Columns: map[string]sqlschema.Column{ "office_name": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsPK: true, }, "publisher_id": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsNullable: true, }, "publisher_name": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsNullable: true, }, }, @@ -119,7 +123,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { DefaultValue: "", }, "editor": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsPK: false, IsNullable: false, IsAutoIncrement: false, @@ -127,7 +131,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { DefaultValue: "john doe", }, "title": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsPK: false, IsNullable: false, IsAutoIncrement: false, @@ -135,7 +139,8 @@ func TestDatabaseInspector_Inspect(t *testing.T) { DefaultValue: "", }, "locale": { - SQLType: "varchar(5)", + SQLType: sqltype.VarChar, + VarcharLen: 5, IsPK: false, IsNullable: true, IsAutoIncrement: false, @@ -159,7 +164,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { DefaultValue: "", }, "publisher_id": { - SQLType: "varchar", + SQLType: sqltype.VarChar, }, "author_id": { SQLType: "bigint", @@ -176,10 +181,10 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsIdentity: true, }, "first_name": { - SQLType: "varchar", + SQLType: sqltype.VarChar, }, "last_name": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsNullable: true, }, }, @@ -189,7 +194,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Name: "publisher_to_journalists", Columns: map[string]sqlschema.Column{ "publisher_id": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsPK: true, }, "author_id": { @@ -203,12 +208,17 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Name: "publishers", Columns: map[string]sqlschema.Column{ "publisher_id": { - SQLType: "varchar", + SQLType: sqltype.VarChar, IsPK: true, DefaultValue: "gen_random_uuid()", }, "publisher_name": { - SQLType: "varchar", + SQLType: sqltype.VarChar, + }, + "created_at": { + SQLType: "timestamp", + DefaultValue: "current_timestamp", + IsNullable: true, }, }, }, @@ -242,7 +252,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { // State.FKs store their database names, which differ from dialect to dialect. // Because of that we compare FKs and Tables separately. - require.Equal(t, wantTables, got.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, got.Tables) var fks []sqlschema.FK for fk := range got.FKs { @@ -271,3 +281,180 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str db.NewRaw("DROP SCHEMA IF EXISTS ?", bun.Ident(schema)).Exec(ctx) }) } + +// cmpTables compares table schemas using dialect-specific equivalence checks for column types +// and reports the differences as t.Error(). +func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got []sqlschema.Table) { + tb.Helper() + + require.Equal(tb, tableNames(want), tableNames(got), "different set of tables") + + // Now we are guaranteed to have the same tables. + for _, wt := range want { + tableName := wt.Name + // TODO(dyma): this will be simplified by map[string]Table + var gt sqlschema.Table + for i := range got { + if got[i].Name == tableName { + gt = got[i] + break + } + } + + var errs []string + for colName, wantCol := range wt.Columns { + errorf := func(format string, args ...interface{}) { + errs = append(errs, fmt.Sprintf("[%s.%s] "+format, append([]interface{}{tableName, colName}, args...)...)) + } + gotCol, ok := gt.Columns[colName] + if !ok { + errorf("column is missing") + continue + } + + if !d.EquivalentType(wantCol, gotCol) { + errorf("sql types are not equivalent:\n\t(+want)\t%s\n\t(-got)\t%s", formatType(wantCol), formatType(gotCol)) + } + + if wantCol.DefaultValue != gotCol.DefaultValue { + errorf("default values differ:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.DefaultValue, gotCol.DefaultValue) + } + + if wantCol.IsNullable != gotCol.IsNullable { + errorf("isNullable:\n\t(+want)\t%v\n\t(-got)\t%v", wantCol.IsNullable, gotCol.IsNullable) + } + + if wantCol.IsAutoIncrement != gotCol.IsAutoIncrement { + errorf("IsAutoIncrement:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsAutoIncrement, gotCol.IsAutoIncrement) + } + + if wantCol.IsIdentity != gotCol.IsIdentity { + errorf("IsIdentity:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsIdentity, gotCol.IsIdentity) + } + } + + for _, errMsg := range errs { + tb.Error(errMsg) + } + } +} + +func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.Column) { + var errs []string + for colName, wantCol := range want { + errorf := func(format string, args ...interface{}) { + errs = append(errs, fmt.Sprintf("[%s.%s] "+format, append([]interface{}{tableName, colName}, args...)...)) + } + gotCol, ok := got[colName] + if !ok { + errorf("column is missing") + continue + } + + if !d.EquivalentType(wantCol, gotCol) { + errorf("sql types are not equivalent:\n\t(+want)\t%s\n\t(-got)\t%s", formatType(wantCol), formatType(gotCol)) + } + + if wantCol.DefaultValue != gotCol.DefaultValue { + errorf("default values differ:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.DefaultValue, gotCol.DefaultValue) + } + + if wantCol.IsNullable != gotCol.IsNullable { + errorf("isNullable:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsNullable, gotCol.IsNullable) + } + + if wantCol.IsAutoIncrement != gotCol.IsAutoIncrement { + errorf("IsAutoIncrement:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsAutoIncrement, gotCol.IsAutoIncrement) + } + + if wantCol.IsIdentity != gotCol.IsIdentity { + errorf("IsIdentity:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsIdentity, gotCol.IsIdentity) + } + } + + for _, errMsg := range errs { + tb.Error(errMsg) + } +} + +func tableNames(tables []sqlschema.Table) (names []string) { + for i := range tables { + names = append(names, tables[i].Name) + } + return +} + +func formatType(c sqlschema.Column) string { + if c.VarcharLen == 0 { + return c.SQLType + } + return fmt.Sprintf("%s(%d)", c.SQLType, c.VarcharLen) +} + +func TestSchemaInspector_Inspect(t *testing.T) { + testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { + if _, ok := dialect.(sqlschema.InspectorDialect); !ok { + t.Skip(dialectName + " is not sqlschema.InspectorDialect") + } + + t.Run("default expressions are canonicalized", func(t *testing.T) { + type Model struct { + ID string `bun:",notnull,default:RANDOM()"` + Name string `bun:",notnull,default:'John Doe'"` + } + + tables := schema.NewTables(dialect) + tables.Register((*Model)(nil)) + inspector := sqlschema.NewSchemaInspector(tables) + + want := map[string]sqlschema.Column{ + "id": { + SQLType: sqltype.VarChar, + DefaultValue: "random()", + }, + "name": { + SQLType: sqltype.VarChar, + DefaultValue: "'John Doe'", + }, + } + + got, err := inspector.Inspect(context.Background()) + require.NoError(t, err) + + require.Len(t, got.Tables, 1) + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, got.Tables[0].Columns) + }) + + t.Run("parses custom varchar len", func(t *testing.T) { + type Model struct { + ID string `bun:",notnull,type:text"` + FirstName string `bun:",notnull,type:character varying(60)"` + LastName string `bun:",notnull,type:varchar(100)"` + } + + tables := schema.NewTables(dialect) + tables.Register((*Model)(nil)) + inspector := sqlschema.NewSchemaInspector(tables) + + want := map[string]sqlschema.Column{ + "id": { + SQLType: "text", + }, + "first_name": { + SQLType: "character varying", + VarcharLen: 60, + }, + "last_name": { + SQLType: "varchar", + VarcharLen: 100, + }, + } + + got, err := inspector.Inspect(context.Background()) + require.NoError(t, err) + + require.Len(t, got.Tables, 1) + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, got.Tables[0].Columns) + }) + }) +} diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index e362db7d1..5b82609eb 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -9,7 +9,6 @@ import ( "github.com/stretchr/testify/require" "github.com/uptrace/bun" - "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" ) @@ -206,7 +205,7 @@ func TestAutoMigrator_Run(t *testing.T) { {testCustomFKNameFunc}, {testForceRenameFK}, {testRenameColumnRenamesFK}, - // {testChangeColumnType}, + {testChangeColumnType_AutoCast}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -577,31 +576,31 @@ func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { require.Equal(t, "tennants_my_neighbour_fkey", fkName) } -func testChangeColumnType(t *testing.T, db *bun.DB) { +// testChangeColumnType_AutoCast checks type changes which can be type-casted automatically, +// i.e. do not require supplying a USING clause (pgdialect). +func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { type TableBefore struct { bun.BaseModel `bun:"table:table"` - // NewPK int64 `bun:"new_pk,notnull,unique"` - PK int32 `bun:"old_pk,pk,identity"` - DefaultExpr string `bun:"default_expr,default:gen_random_uuid()"` - Timestamp time.Time `bun:"ts"` - StillNullable string `bun:"not_null"` - TypeOverride string `bun:"type:char(100)"` - Logical bool `bun:"default:false"` + SmallInt int32 `bun:"bigger_int,pk,identity"` + Timestamp time.Time `bun:"ts"` + DefaultExpr string `bun:"default_expr,default:gen_random_uuid()"` + EmptyDefault string `bun:"empty_default"` + Nullable string `bun:"not_null"` + TypeOverride string `bun:"type:varchar(100)"` // ManyValues []string `bun:",array"` } type TableAfter struct { bun.BaseModel `bun:"table:table"` - // NewPK int64 `bun:",pk"` - PK int64 `bun:"old_pk,identity"` // ~~no longer PK (not identity)~~ (wip) - DefaultExpr string `bun:"default_expr,type:uuid,default:uuid_nil()"` // different default + type UUID - Timestamp time.Time `bun:"ts,default:current_timestamp"` // has default value now - NotNullable string `bun:"not_null,notnull"` // added NOT NULL - TypeOverride string `bun:"type:char(200)"` // new length - Logical uint8 `bun:"default:1"` // change type + different default - // ManyValues []string `bun:",array"` // did not change + BigInt int64 `bun:"bigger_int,pk,identity"` // int64 maps to bigint + Timestamp time.Time `bun:"ts,default:current_timestamp"` // has default value now + DefaultExpr string `bun:"default_expr,default:random()"` // different default + EmptyDefault string `bun:"empty_default,default:''"` // '' empty string default + NotNullable string `bun:"not_null,notnull"` // added NOT NULL + TypeOverride string `bun:"type:varchar(200)"` // new length + // ManyValues []string `bun:",array"` // did not change } wantTables := []sqlschema.Table{ @@ -613,31 +612,34 @@ func testChangeColumnType(t *testing.T, db *bun.DB) { // IsPK: true, // SQLType: "bigint", // }, - "old_pk": { - SQLType: "bigint", - IsPK: true, + "bigger_int": { + SQLType: "bigint", + IsPK: true, + IsIdentity: true, + }, + "ts": { + SQLType: "timestamp", // FIXME(dyma): convert "timestamp with time zone" to sqltype.Timestamp + DefaultValue: "current_timestamp", // FIXME(dyma): Convert driver-specific value to common "expressions" (e.g. CURRENT_TIMESTAMP == current_timestamp) OR lowercase all types. + IsNullable: true, }, "default_expr": { - SQLType: "uuid", + SQLType: "varchar", IsNullable: true, - DefaultValue: "uuid_nil()", + DefaultValue: "random()", }, - "ts": { - SQLType: sqltype.Timestamp, - DefaultValue: "current_timestamp", + "empty_default": { + SQLType: "varchar", IsNullable: true, + DefaultValue: "", // NOT "''" }, "not_null": { - SQLType: "varchar", + SQLType: "varchar", + IsNullable: false, }, "type_override": { - SQLType: "char(200)", + SQLType: "varchar", IsNullable: true, - }, - "logical": { - SQLType: "smallint", - DefaultValue: "1", - IsNullable: true, + VarcharLen: 200, }, // "many_values": { // SQLType: "array", @@ -657,7 +659,8 @@ func testChangeColumnType(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - require.Equal(t, wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + // require.Equal(t, wantTables, state.Tables } // // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package @@ -748,7 +751,7 @@ func testChangeColumnType(t *testing.T, db *bun.DB) { // }, // &migrate.DropTable{ // Schema: "billing", -// Name: "billing.subscriptions", // TODO: fix once schema is used correctly +// Name: "billing.subscriptions", // }, // &migrate.DropTable{ // Schema: defaultSchema, @@ -838,7 +841,7 @@ func testChangeColumnType(t *testing.T, db *bun.DB) { // }, // }, // { -// name: "create FKs for new tables", // TODO: update test case to detect an added column too +// name: "create FKs for new tables", // states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { // return getState(t, ctx, d, // (*ThingNoOwner)(nil), @@ -861,7 +864,7 @@ func testChangeColumnType(t *testing.T, db *bun.DB) { // }, // }, // { -// name: "drop FKs for dropped tables", // TODO: update test case to detect dropped columns too +// name: "drop FKs for dropped tables", // states: func(t testing.TB, ctx context.Context, d schema.Dialect) (sqlschema.State, sqlschema.State) { // stateDb := getState(t, ctx, d, (*Owner)(nil), (*Thing)(nil)) // stateModel := getState(t, ctx, d, (*ThingNoOwner)(nil)) diff --git a/migrate/auto.go b/migrate/auto.go index edb8f9f77..b1cacf691 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -35,7 +35,7 @@ func WithExcludeTable(tables ...string) AutoMigratorOption { // which is the default strategy. Perhaps it would make sense to allow disabling this and switching to separate (CreateTable + AddFK) func WithFKNameFunc(f func(sqlschema.FK) string) AutoMigratorOption { return func(m *AutoMigrator) { - m.diffOpts = append(m.diffOpts, fKNameFunc(f)) + m.diffOpts = append(m.diffOpts, withFKNameFunc(f)) } } @@ -44,7 +44,7 @@ func WithFKNameFunc(f func(sqlschema.FK) string) AutoMigratorOption { // and in those cases simply renaming the FK makes a lot more sense. func WithRenameFK(enabled bool) AutoMigratorOption { return func(m *AutoMigrator) { - m.diffOpts = append(m.diffOpts, detectRenamedFKs(enabled)) + m.diffOpts = append(m.diffOpts, withDetectRenamedFKs(enabled)) } } @@ -117,6 +117,7 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err return nil, err } am.dbInspector = dbInspector + am.diffOpts = append(am.diffOpts, withTypeEquivalenceFunc(db.Dialect().(sqlschema.InspectorDialect).EquivalentType)) dbMigrator, err := sqlschema.NewMigrator(db) if err != nil { @@ -192,4 +193,4 @@ func (am *AutoMigrator) Run(ctx context.Context) error { return fmt.Errorf("auto migrate: %w", err) } return nil -} \ No newline at end of file +} diff --git a/migrate/diff.go b/migrate/diff.go index 3406bb76c..75036a797 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -10,7 +10,128 @@ import ( "github.com/uptrace/bun/migrate/sqlschema" ) -// changeset is a set of changes to the database definition. +// Diff calculates the diff between the current database schema and the target state. +// The result changeset is not sorted, i.e. the caller should resolve dependencies +// before applying the changes. +func (d *detector) Diff() *changeset { + targetTables := newTableSet(d.target.Tables...) + currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) + + // These table-sets record changes to the targetTables set. + created := newTableSet() + renamed := newTableSet() + + // Discover CREATE/RENAME/DROP TABLE + addedTables := targetTables.Sub(currentTables) +AddedLoop: + for _, added := range addedTables.Values() { + removedTables := currentTables.Sub(targetTables) + for _, removed := range removedTables.Values() { + if d.canRename(removed, added) { + d.changes.Add(&RenameTable{ + Schema: removed.Schema, + OldName: removed.Name, + NewName: added.Name, + }) + + // Here we do not check for created / dropped columns, as well as column type changes, + // because it is only possible to detect a renamed table if its signature (see state.go) did not change. + d.detectColumnChanges(removed, added, false) + + // Update referenced table in all related FKs. + if d.detectRenamedFKs { + d.refMap.UpdateT(removed.T(), added.T()) + } + + renamed.Add(added) + + // Do not check this model further, we know it was renamed. + currentTables.Remove(removed.Name) + continue AddedLoop + } + } + // If a new table did not appear because of the rename operation, then it must've been created. + d.changes.Add(&CreateTable{ + Schema: added.Schema, + Name: added.Name, + Model: added.Model, + }) + created.Add(added) + } + + // Tables that aren't present anymore and weren't renamed or left untouched were deleted. + dropped := currentTables.Sub(targetTables) + for _, t := range dropped.Values() { + d.changes.Add(&DropTable{ + Schema: t.Schema, + Name: t.Name, + }) + } + + // Detect changes in existing tables that weren't renamed. + // + // TODO: here having State.Tables be a map[string]Table would be much more convenient. + // Then we can alse retire tableSet, or at least simplify it to a certain extent. + curEx := currentTables.Sub(dropped) + tarEx := targetTables.Sub(created).Sub(renamed) + for _, target := range tarEx.Values() { + // TODO(dyma): step is redundant if we have map[string]Table + var current sqlschema.Table + for _, cur := range curEx.Values() { + if cur.Name == target.Name { + current = cur + break + } + } + d.detectColumnChanges(current, target, true) + } + + // Compare and update FKs ---------------- + currentFKs := make(map[sqlschema.FK]string) + for k, v := range d.current.FKs { + currentFKs[k] = v + } + + if d.detectRenamedFKs { + // Add RenameFK migrations for updated FKs. + for old, renamed := range d.refMap.Updated() { + newName := d.fkNameFunc(renamed) + d.changes.Add(&RenameConstraint{ + FK: renamed, // TODO: make sure this is applied after the table/columns are renamed + OldName: d.current.FKs[old], + NewName: newName, + }) + + // Add this FK to currentFKs to prevent it from firing in the two loops below. + currentFKs[renamed] = newName + delete(currentFKs, old) + } + } + + // Add AddFK migrations for newly added FKs. + for fk := range d.target.FKs { + if _, ok := currentFKs[fk]; !ok { + d.changes.Add(&AddForeignKey{ + FK: fk, + ConstraintName: d.fkNameFunc(fk), + }) + } + } + + // Add DropFK migrations for removed FKs. + for fk, fkName := range currentFKs { + if _, ok := d.target.FKs[fk]; !ok { + d.changes.Add(&DropConstraint{ + FK: fk, + ConstraintName: fkName, + }) + } + } + + return &d.changes +} + +// changeset is a set of changes to the database schema definition. type changeset struct { operations []Operation } @@ -120,22 +241,29 @@ func (c *changeset) ResolveDependencies() error { type diffOption func(*detectorConfig) -func fKNameFunc(f func(sqlschema.FK) string) diffOption { +func withFKNameFunc(f func(sqlschema.FK) string) diffOption { return func(cfg *detectorConfig) { cfg.FKNameFunc = f } } -func detectRenamedFKs(enabled bool) diffOption { +func withDetectRenamedFKs(enabled bool) diffOption { return func(cfg *detectorConfig) { cfg.DetectRenamedFKs = enabled } } +func withTypeEquivalenceFunc(f sqlschema.TypeEquivalenceFunc) diffOption { + return func(cfg *detectorConfig) { + cfg.EqType = f + } +} + // detectorConfig controls how differences in the model states are resolved. type detectorConfig struct { FKNameFunc func(sqlschema.FK) string DetectRenamedFKs bool + EqType sqlschema.TypeEquivalenceFunc } type detector struct { @@ -151,7 +279,13 @@ type detector struct { // fkNameFunc builds the name for created/renamed FK contraints. fkNameFunc func(sqlschema.FK) string - // detectRenemedFKS controls how FKs are treated when their references (table/column) are renamed. + // eqType determines column type equivalence. + // Default is direct comparison with '==' operator, which is inaccurate + // due to the existence of dialect-specific type aliases. The caller + // should pass a concrete InspectorDialect.EquuivalentType for robust comparison. + eqType sqlschema.TypeEquivalenceFunc + + // detectRenemedFKs controls how FKs are treated when their references (table/column) are renamed. detectRenamedFKs bool } @@ -159,6 +293,9 @@ func newDetector(got, want sqlschema.State, opts ...diffOption) *detector { cfg := &detectorConfig{ FKNameFunc: defaultFKName, DetectRenamedFKs: false, + EqType: func(c1, c2 sqlschema.Column) bool { + return c1.SQLType == c2.SQLType && c1.VarcharLen == c2.VarcharLen + }, } for _, opt := range opts { opt(cfg) @@ -175,150 +312,63 @@ func newDetector(got, want sqlschema.State, opts ...diffOption) *detector { refMap: sqlschema.NewRefMap(existingFKs...), fkNameFunc: cfg.FKNameFunc, detectRenamedFKs: cfg.DetectRenamedFKs, + eqType: cfg.EqType, } } -func (d *detector) Diff() *changeset { - // Discover CREATE/RENAME/DROP TABLE - targetTables := newTableSet(d.target.Tables...) - currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) - - // These table-sets record changes to the targetTables set. - created := newTableSet() - renamed := newTableSet() - - addedTables := targetTables.Sub(currentTables) -AddedLoop: - for _, added := range addedTables.Values() { - removedTables := currentTables.Sub(targetTables) - for _, removed := range removedTables.Values() { - if d.canRename(removed, added) { - d.changes.Add(&RenameTable{ - Schema: removed.Schema, - OldName: removed.Name, - NewName: added.Name, - }) - - // Here we do not check for created / dropped columns,as well as column type changes, - // because it is only possible to detect a renamed table if its signature (see state.go) did not change. - d.detectRenamedColumns(removed, added) - - // Update referenced table in all related FKs. - if d.detectRenamedFKs { - d.refMap.UpdateT(removed.T(), added.T()) - } - - renamed.Add(added) - - // Do not check this model further, we know it was renamed. - currentTables.Remove(removed.Name) - continue AddedLoop - } - } - // If a new table did not appear because of the rename operation, then it must've been created. - d.changes.Add(&CreateTable{ - Schema: added.Schema, - Name: added.Name, - Model: added.Model, - }) - created.Add(added) - } - - // Tables that aren't present anymore and weren't renamed or left untouched were deleted. - dropped := currentTables.Sub(targetTables) - for _, t := range dropped.Values() { - d.changes.Add(&DropTable{ - Schema: t.Schema, - Name: t.Name, - }) - } - - // Detect changes in existing tables that weren't renamed. - // - // TODO: here having State.Tables be a map[string]Table would be much more convenient. - // Then we can alse retire tableSet, or at least simplify it to a certain extent. - curEx := currentTables.Sub(dropped) - tarEx := targetTables.Sub(created).Sub(renamed) - for _, target := range tarEx.Values() { - // TODO(dyma): step is redundant if we have map[string]Table - var current sqlschema.Table - for _, cur := range curEx.Values() { - if cur.Name == target.Name { - current = cur - break - } - } - d.detectRenamedColumns(current, target) - } - - // Compare and update FKs ---------------- - currentFKs := make(map[sqlschema.FK]string) - for k, v := range d.current.FKs { - currentFKs[k] = v - } - - if d.detectRenamedFKs { - // Add RenameFK migrations for updated FKs. - for old, renamed := range d.refMap.Updated() { - newName := d.fkNameFunc(renamed) - d.changes.Add(&RenameConstraint{ - FK: renamed, // TODO: make sure this is applied after the table/columns are renamed - OldName: d.current.FKs[old], - NewName: newName, - }) - - // Add this FK to currentFKs to prevent it from firing in the two loops below. - currentFKs[renamed] = newName - delete(currentFKs, old) - } - } - - // Add AddFK migrations for newly added FKs. - for fk := range d.target.FKs { - if _, ok := currentFKs[fk]; !ok { - d.changes.Add(&AddForeignKey{ - FK: fk, - ConstraintName: d.fkNameFunc(fk), - }) - } - } - - // Add DropFK migrations for removed FKs. - for fk, fkName := range currentFKs { - if _, ok := d.target.FKs[fk]; !ok { - d.changes.Add(&DropConstraint{ - FK: fk, - ConstraintName: fkName, - }) - } - } +// canRename checks if t1 can be renamed to t2. +func (d *detector) canRename(t1, t2 sqlschema.Table) bool { + return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2, d.equalColumns) +} - return &d.changes +func (d *detector) equalColumns(col1, col2 sqlschema.Column) bool { + return d.eqType(col1, col2) && + col1.DefaultValue == col2.DefaultValue && + col1.IsNullable == col2.IsNullable && + col1.IsAutoIncrement == col2.IsAutoIncrement && + col1.IsIdentity == col2.IsIdentity } -// canRename checks if t1 can be renamed to t2. -func (d detector) canRename(t1, t2 sqlschema.Table) bool { - return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2) +func (d *detector) makeTargetColDef(current, target sqlschema.Column) sqlschema.Column { + // Avoid unneccessary type-change migrations if the types are equivalent. + if d.eqType(current, target) { + target.SQLType = current.SQLType + target.VarcharLen = current.VarcharLen + } + return target } -func (d *detector) detectRenamedColumns(current, added sqlschema.Table) { - for aName, aCol := range added.Columns { - // This column exists in the database, so it wasn't renamed - if _, ok := current.Columns[aName]; ok { +// detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. +func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { + for tName, tCol := range target.Columns { + // This column exists in the database, so it hasn't been renamed. + if cCol, ok := current.Columns[tName]; ok { + if checkType && !d.equalColumns(cCol, tCol) { + d.changes.Add(&ChangeColumnType{ + Schema: target.Schema, + Table: target.Name, + Column: tName, + From: cCol, + To: d.makeTargetColDef(cCol, tCol), + }) + // TODO: Can I delete (current.Column, tName) then? Because if it's type has changed, it will never match in the line 343. + } continue } + + // Find the column with the same type and the for cName, cCol := range current.Columns { - if aCol != cCol { + if !d.equalColumns(tCol, cCol) { continue } d.changes.Add(&RenameColumn{ - Schema: added.Schema, - Table: added.Name, + Schema: target.Schema, + Table: target.Name, OldName: cName, - NewName: aName, + NewName: tName, }) delete(current.Columns, cName) // no need to check this column again - d.refMap.UpdateC(sqlschema.C(added.Schema, added.Name, cName), aName) + d.refMap.UpdateC(sqlschema.C(target.Schema, target.Name, cName), tName) break } } diff --git a/migrate/operations.go b/migrate/operations.go index 14671f86a..3d37c8c76 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -215,6 +215,7 @@ type ChangeColumnType struct { } var _ Operation = (*ChangeColumnType)(nil) +var _ sqlschema.Operation = (*ChangeColumnType)(nil) func (op *ChangeColumnType) GetReverse() Operation { return &ChangeColumnType{ @@ -226,6 +227,13 @@ func (op *ChangeColumnType) GetReverse() Operation { } } +func (op ChangeColumnType) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.Table, + } +} + // noop is a migration that doesn't change the schema. type noop struct{} diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 53fc95a0f..4c62289a3 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -3,6 +3,7 @@ package sqlschema import ( "context" "fmt" + "strconv" "strings" "github.com/uptrace/bun" @@ -12,6 +13,11 @@ import ( type InspectorDialect interface { schema.Dialect Inspector(db *bun.DB, excludeTables ...string) Inspector + + // EquivalentType returns true if col1 and co2 SQL types are equivalent, + // i.e. they might use dialect-specifc type aliases (SERIAL ~ SMALLINT) + // or specify the same VARCHAR length differently (VARCHAR(255) ~ VARCHAR). + EquivalentType(Column, Column) bool } type Inspector interface { @@ -53,9 +59,15 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { for _, t := range si.tables.All() { columns := make(map[string]Column) for _, f := range t.Fields { + + sqlType, length, err := parseLen(f.CreateTableSQLType) + if err != nil { + return state, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err) + } columns[f.Name] = Column{ - SQLType: strings.ToLower(f.CreateTableSQLType), - DefaultValue: f.SQLDefault, + SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq() + VarcharLen: length, + DefaultValue: exprToLower(f.SQLDefault), IsPK: f.IsPK, IsNullable: !f.NotNull, IsAutoIncrement: f.AutoIncrement, @@ -95,3 +107,25 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { } return state, nil } + +func parseLen(typ string) (string, int, error) { + paren := strings.Index(typ, "(") + if paren == -1 { + return typ, 0, nil + } + length, err := strconv.Atoi(typ[paren+1 : len(typ)-1]) + if err != nil { + return typ, 0, err + } + return typ[:paren], length, nil +} + +// exprToLower converts string to lowercase, if it does not contain a string literal 'lit'. +// Use it to ensure that user-defined default values in the models are always comparable +// to those returned by the database inspector, regardless of the case convention in individual drivers. +func exprToLower(s string) string { + if strings.HasPrefix(s, "'") && strings.HasSuffix(s, "'") { + return s + } + return strings.ToLower(s) +} diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index 40fda8320..789145196 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -1,6 +1,7 @@ package sqlschema import ( + "fmt" "strings" "github.com/uptrace/bun/schema" @@ -26,17 +27,32 @@ func (t *Table) T() tFQN { // Column stores attributes of a database column. type Column struct { SQLType string + VarcharLen int DefaultValue string IsPK bool IsNullable bool IsAutoIncrement bool IsIdentity bool + // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. } +func (c *Column) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { + b = append(b, c.SQLType...) + if c.VarcharLen == 0 { + return b, nil + } + b = append(b, "("...) + b = append(b, fmt.Sprint(c.VarcharLen)...) + b = append(b, ")"...) + return b, nil +} + +type TypeEquivalenceFunc func(Column, Column) bool + // EqualSignatures determines if two tables have the same "signature". -func EqualSignatures(t1, t2 Table) bool { - sig1 := newSignature(t1) - sig2 := newSignature(t2) +func EqualSignatures(t1, t2 Table, eq TypeEquivalenceFunc) bool { + sig1 := newSignature(t1, eq) + sig2 := newSignature(t2, eq) return sig1.Equals(sig2) } @@ -47,11 +63,14 @@ type signature struct { // underlying stores the number of occurences for each unique column type. // It helps to account for the fact that a table might have multiple columns that have the same type. underlying map[Column]int + + eq TypeEquivalenceFunc } -func newSignature(t Table) signature { +func newSignature(t Table, eq TypeEquivalenceFunc) signature { s := signature{ underlying: make(map[Column]int), + eq: eq, } s.scan(t) return s @@ -59,9 +78,27 @@ func newSignature(t Table) signature { // scan iterates over table's field and counts occurrences of each unique column definition. func (s *signature) scan(t Table) { - for _, c := range t.Columns { - s.underlying[c]++ + for _, scanCol := range t.Columns { + // This is slightly more expensive than if the columns could be compared directly + // and we always did s.underlying[col]++, but we get type-equivalence in return. + col, count := s.getCount(scanCol) + if count == 0 { + s.underlying[scanCol] = 1 + } else { + s.underlying[col]++ + } + } +} + +// getCount uses TypeEquivalenceFunc to find a column with the same (equivalent) SQL type +// and returns its count. Count 0 means there are no columns with of this type. +func (s *signature) getCount(keyCol Column) (key Column, count int) { + for col, cnt := range s.underlying { + if s.eq(col, keyCol) { + return col, cnt + } } + return keyCol, 0 } // Equals returns true if 2 signatures share an identical set of columns. @@ -69,8 +106,8 @@ func (s *signature) Equals(other signature) bool { if len(s.underlying) != len(other.underlying) { return false } - for k, count := range s.underlying { - if countOther, ok := other.underlying[k]; !ok || countOther != count { + for col, count := range s.underlying { + if _, countOther := other.getCount(col); countOther != count { return false } } @@ -252,8 +289,8 @@ func (r RefMap) UpdateT(oldT, newT tFQN) (n int) { return } -// UpdateC updates the column FQN in all FKs that depend on it, e.g. if a column is renamed, -// and so, only the column-name part of the FQN can be updated. Returns the number of updated entries. +// UpdateC updates the column FQN in all FKs that depend on it. E.g. if a column was renamed, +// only the column-name part of the FQN needs to be updated. Returns the number of updated entries. func (r RefMap) UpdateC(oldC cFQN, newColumn string) (n int) { for _, fk := range r { if ok, col := fk.dependsC(oldC); ok { From c3253a5c59b078607db9e216ddc11afdef546e05 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 20 Oct 2024 11:17:46 +0200 Subject: [PATCH 18/55] fix: ignore case for type equivalence --- dialect/pgdialect/sqltype.go | 6 +++--- dialect/pgdialect/sqltype_test.go | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/dialect/pgdialect/sqltype.go b/dialect/pgdialect/sqltype.go index a595df5be..ce7f9c8b5 100644 --- a/dialect/pgdialect/sqltype.go +++ b/dialect/pgdialect/sqltype.go @@ -126,12 +126,12 @@ var ( ) func (d *Dialect) EquivalentType(col1, col2 sqlschema.Column) bool { - if col1.SQLType == col2.SQLType { + typ1, typ2 := strings.ToUpper(col1.SQLType), strings.ToUpper(col2.SQLType) + + if typ1 == typ2 { return checkVarcharLen(col1, col2, d.DefaultVarcharLen()) } - typ1, typ2 := strings.ToUpper(col1.SQLType), strings.ToUpper(col2.SQLType) - switch { case char.IsAlias(typ1) && char.IsAlias(typ2): return checkVarcharLen(col1, col2, d.DefaultVarcharLen()) diff --git a/dialect/pgdialect/sqltype_test.go b/dialect/pgdialect/sqltype_test.go index 5a51e1275..77cf1e153 100644 --- a/dialect/pgdialect/sqltype_test.go +++ b/dialect/pgdialect/sqltype_test.go @@ -16,7 +16,8 @@ func TestInspectorDialect_EquivalentType(t *testing.T) { typ1, typ2 string want bool }{ - {"text", "text", true}, // identical types + {"text", "text", true}, // identical types + {"bigint", "BIGINT", true}, // case-insensitive {sqltype.VarChar, pgTypeVarchar, true}, {sqltype.VarChar, pgTypeCharacterVarying, true}, From a525038f06d99c07967695ac43ca7ea426e28a65 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 20 Oct 2024 11:19:57 +0200 Subject: [PATCH 19/55] test: use cmpColumns helper in cmpTables --- internal/dbtest/inspect_test.go | 36 +-------------------------------- 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index a4c1bcf81..346be401e 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -301,41 +301,7 @@ func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got []sqlschem } } - var errs []string - for colName, wantCol := range wt.Columns { - errorf := func(format string, args ...interface{}) { - errs = append(errs, fmt.Sprintf("[%s.%s] "+format, append([]interface{}{tableName, colName}, args...)...)) - } - gotCol, ok := gt.Columns[colName] - if !ok { - errorf("column is missing") - continue - } - - if !d.EquivalentType(wantCol, gotCol) { - errorf("sql types are not equivalent:\n\t(+want)\t%s\n\t(-got)\t%s", formatType(wantCol), formatType(gotCol)) - } - - if wantCol.DefaultValue != gotCol.DefaultValue { - errorf("default values differ:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.DefaultValue, gotCol.DefaultValue) - } - - if wantCol.IsNullable != gotCol.IsNullable { - errorf("isNullable:\n\t(+want)\t%v\n\t(-got)\t%v", wantCol.IsNullable, gotCol.IsNullable) - } - - if wantCol.IsAutoIncrement != gotCol.IsAutoIncrement { - errorf("IsAutoIncrement:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsAutoIncrement, gotCol.IsAutoIncrement) - } - - if wantCol.IsIdentity != gotCol.IsIdentity { - errorf("IsIdentity:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsIdentity, gotCol.IsIdentity) - } - } - - for _, errMsg := range errs { - tb.Error(errMsg) - } + cmpColumns(tb, d, wt.Name, wt.Columns, gt.Columns) } } From dd837795c31490fd8816eec0e9833e79fafdda32 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 20 Oct 2024 11:42:30 +0200 Subject: [PATCH 20/55] feat: add and drop IDENTITY FYI, identity is independent of primary keys and vice versa. It is only a method of generating new values for a column. Can be created and dropped in any order, as long as the column to add it to is declared NOT NULL --- dialect/pgdialect/alter_table.go | 29 ++++++++++++-------- dialect/pgdialect/dialect.go | 5 ++++ internal/dbtest/migrate_test.go | 47 +++++++++++++++++++++++++++++++- migrate/diff.go | 3 +- migrate/operations.go | 1 + 5 files changed, 71 insertions(+), 14 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 237e0bb78..d88974bb0 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -185,32 +185,31 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi return b, err } + // alterColumn never re-assigns err, so there is no need to check for err != nil after calling it var i int appendAlterColumn := func() { if i > 0 { - b = append(b, ", "...) + b = append(b, ","...) } b = append(b, " ALTER COLUMN "...) - b, err = bun.Ident(colDef.Column).AppendQuery(fmter, b) + b, _ = bun.Ident(colDef.Column).AppendQuery(fmter, b) i++ } got, want := colDef.From, colDef.To if want.SQLType != got.SQLType { - if appendAlterColumn(); err != nil { - return b, err - } + appendAlterColumn() b = append(b, " SET DATA TYPE "...) if b, err = want.AppendQuery(fmter, b); err != nil { return b, err } } + // Column must be declared NOT NULL before identity can be added. + // Although PG can resolve the order of operations itself, we make this explicit in the query. if want.IsNullable != got.IsNullable { - if appendAlterColumn(); err != nil { - return b, err - } + appendAlterColumn() if !want.IsNullable { b = append(b, " SET NOT NULL"...) } else { @@ -218,10 +217,18 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi } } - if want.DefaultValue != got.DefaultValue { - if appendAlterColumn(); err != nil { - return b, err + if want.IsIdentity != got.IsIdentity { + appendAlterColumn() + if !want.IsIdentity { + b = append(b, " DROP IDENTITY"...) + } else { + b = append(b, " ADD"...) + b = appendGeneratedAsIdentity(b) } + } + + if want.DefaultValue != got.DefaultValue { + appendAlterColumn() if want.DefaultValue == "" { b = append(b, " DROP DEFAULT"...) } else { diff --git a/dialect/pgdialect/dialect.go b/dialect/pgdialect/dialect.go index 73355b6c0..e95b581fd 100644 --- a/dialect/pgdialect/dialect.go +++ b/dialect/pgdialect/dialect.go @@ -123,5 +123,10 @@ func (d *Dialect) AppendUint64(b []byte, n uint64) []byte { } func (d *Dialect) AppendSequence(b []byte, _ *schema.Table, _ *schema.Field) []byte { + return appendGeneratedAsIdentity(b) +} + +// appendGeneratedAsIdentity appends GENERATED BY DEFAULT AS IDENTITY to the column definition. +func appendGeneratedAsIdentity(b []byte) []byte { return append(b, " GENERATED BY DEFAULT AS IDENTITY"...) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 5b82609eb..553114e93 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/require" "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" ) @@ -206,6 +207,7 @@ func TestAutoMigrator_Run(t *testing.T) { {testForceRenameFK}, {testRenameColumnRenamesFK}, {testChangeColumnType_AutoCast}, + {testIdentity}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -660,7 +662,50 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) - // require.Equal(t, wantTables, state.Tables +} + +func testIdentity(t *testing.T, db *bun.DB) { + type TableBefore struct { + bun.BaseModel `bun:"table:table"` + A int64 `bun:",notnull,identity"` + B int64 + } + + type TableAfter struct { + bun.BaseModel `bun:"table:table"` + A int64 `bun:",notnull"` + B int64 `bun:",notnull,identity"` + } + + wantTables := []sqlschema.Table{ + { + Schema: db.Dialect().DefaultSchema(), + Name: "table", + Columns: map[string]sqlschema.Column{ + "a": { + SQLType: sqltype.BigInt, + IsIdentity: false, // <- drop IDENTITY + }, + "b": { + SQLType: sqltype.BigInt, + IsIdentity: true, // <- add IDENTITY + }, + }, + }, + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, (*TableBefore)(nil)) + m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } // // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package diff --git a/migrate/diff.go b/migrate/diff.go index 75036a797..bb13723b4 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -356,9 +356,8 @@ func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkTyp continue } - // Find the column with the same type and the for cName, cCol := range current.Columns { - if !d.equalColumns(tCol, cCol) { + if _, keep := target.Columns[cName]; keep || !d.equalColumns(tCol, cCol) { continue } d.changes.Add(&RenameColumn{ diff --git a/migrate/operations.go b/migrate/operations.go index 3d37c8c76..950dd5e2e 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -206,6 +206,7 @@ func (op *DropConstraint) GetReverse() Operation { } } +// Change column type. type ChangeColumnType struct { Schema string Table string From 3fdd5b8f635f849a74e78c665274609f75245b19 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 21 Oct 2024 14:28:10 +0200 Subject: [PATCH 21/55] feat: add and drop columns - New operations: AddColumn and DropColumn - Fixed cmpColumns to find 'extra' columns - Refactored alter query builder in pgdialect --- dialect/pgdialect/alter_table.go | 64 +++++++++++++++++--------- internal/dbtest/inspect_test.go | 21 ++++++++- internal/dbtest/migrate_test.go | 46 +++++++++++++++++++ migrate/diff.go | 34 ++++++++++++-- migrate/operations.go | 79 ++++++++++++++++++++++++++++++++ 5 files changed, 217 insertions(+), 27 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index d88974bb0..63e4fa38a 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -41,12 +41,14 @@ func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) er switch change := change.(type) { case *migrate.CreateTable: + log.Printf("create table %q", change.Name) err = m.CreateTable(ctx, change.Model) if err != nil { return fmt.Errorf("apply changes: create table %s: %w", change.FQN(), err) } continue case *migrate.DropTable: + log.Printf("drop table %q", change.Name) err = m.DropTable(ctx, change.Schema, change.Name) if err != nil { return fmt.Errorf("apply changes: drop table %s: %w", change.FQN(), err) @@ -56,6 +58,10 @@ func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) er b, err = m.renameTable(fmter, b, change) case *migrate.RenameColumn: b, err = m.renameColumn(fmter, b, change) + case *migrate.AddColumn: + b, err = m.addColumn(fmter, b, change) + case *migrate.DropColumn: + b, err = m.dropColumn(fmter, b, change) case *migrate.DropConstraint: b, err = m.dropContraint(fmter, b, change) case *migrate.AddForeignKey: @@ -87,9 +93,7 @@ func (m *migrator) renameTable(fmter schema.Formatter, b []byte, rename *migrate return b, err } b = append(b, " RENAME TO "...) - if b, err = bun.Ident(rename.NewName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, rename.NewName) return b, nil } @@ -101,14 +105,36 @@ func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrat } b = append(b, " RENAME COLUMN "...) - if b, err = bun.Ident(rename.OldName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, rename.OldName) b = append(b, " TO "...) - if b, err = bun.Ident(rename.NewName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, rename.NewName) + + return b, nil +} + +func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddColumn) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := add.FQN() + b, _ = fqn.AppendQuery(fmter, b) + + b = append(b, " ADD COLUMN "...) + b = fmter.AppendName(b, add.Column) + b = append(b, " "...) + + b, _ = add.ColDef.AppendQuery(fmter, b) + + return b, nil +} + +func (m *migrator) dropColumn(fmter schema.Formatter, b []byte, drop *migrate.DropColumn) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + fqn := drop.FQN() + b, _ = fqn.AppendQuery(fmter, b) + + b = append(b, " DROP COLUMN "...) + b = fmter.AppendName(b, drop.Column) + return b, nil } @@ -120,14 +146,11 @@ func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *mi } b = append(b, " RENAME CONSTRAINT "...) - if b, err = bun.Ident(rename.OldName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, rename.OldName) b = append(b, " TO "...) - if b, err = bun.Ident(rename.NewName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, rename.NewName) + return b, nil } @@ -139,9 +162,8 @@ func (m *migrator) dropContraint(fmter schema.Formatter, b []byte, drop *migrate } b = append(b, " DROP CONSTRAINT "...) - if b, err = bun.Ident(drop.ConstraintName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, drop.ConstraintName) + return b, nil } @@ -153,9 +175,7 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. } b = append(b, " ADD CONSTRAINT "...) - if b, err = bun.Ident(add.ConstraintName).AppendQuery(fmter, b); err != nil { - return b, err - } + b = fmter.AppendIdent(b, add.ConstraintName) b = append(b, " FOREIGN KEY ("...) if b, err = add.FK.From.Column.Safe().AppendQuery(fmter, b); err != nil { @@ -192,7 +212,7 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi b = append(b, ","...) } b = append(b, " ALTER COLUMN "...) - b, _ = bun.Ident(colDef.Column).AppendQuery(fmter, b) + b = fmter.AppendIdent(b, colDef.Column) i++ } diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 346be401e..bd758d1f9 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -3,6 +3,7 @@ package dbtest_test import ( "context" "fmt" + "strings" "testing" "time" @@ -306,14 +307,17 @@ func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got []sqlschem } func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.Column) { + tb.Helper() var errs []string + + var missing []string for colName, wantCol := range want { errorf := func(format string, args ...interface{}) { errs = append(errs, fmt.Sprintf("[%s.%s] "+format, append([]interface{}{tableName, colName}, args...)...)) } gotCol, ok := got[colName] if !ok { - errorf("column is missing") + missing = append(missing, colName) continue } @@ -338,6 +342,21 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w } } + if len(missing) > 0 { + errs = append(errs, fmt.Sprintf("%q has missing columns: %q", tableName, strings.Join(missing, "\", \""))) + } + + var extra []string + for colName := range got { + if _, ok := want[colName]; !ok { + extra = append(extra, colName) + } + } + + if len(extra) > 0 { + errs = append(errs, fmt.Sprintf("%q has extra columns: %q", tableName, strings.Join(extra, "\", \""))) + } + for _, errMsg := range errs { tb.Error(errMsg) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 553114e93..bb424b9f7 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -208,6 +208,7 @@ func TestAutoMigrator_Run(t *testing.T) { {testRenameColumnRenamesFK}, {testChangeColumnType_AutoCast}, {testIdentity}, + {testAddDropColumn}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -708,6 +709,51 @@ func testIdentity(t *testing.T, db *bun.DB) { cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } +func testAddDropColumn(t *testing.T, db *bun.DB) { + type TableBefore struct { + bun.BaseModel `bun:"table:table"` + DoNotTouch string `bun:"do_not_touch"` + DropMe string `bun:"dropme"` + } + + type TableAfter struct { + bun.BaseModel `bun:"table:table"` + DoNotTouch string `bun:"do_not_touch"` + AddMe bool `bun:"addme"` + } + + wantTables := []sqlschema.Table{ + { + Schema: db.Dialect().DefaultSchema(), + Name: "table", + Columns: map[string]sqlschema.Column{ + "do_not_touch": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "addme": { + SQLType: sqltype.Boolean, + IsNullable: true, + }, + }, + }, + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, (*TableBefore)(nil)) + m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) +} + + // // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package // func TestDiff(t *testing.T) { // type Journal struct { diff --git a/migrate/diff.go b/migrate/diff.go index bb13723b4..1afa4dd96 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -340,8 +340,12 @@ func (d *detector) makeTargetColDef(current, target sqlschema.Column) sqlschema. // detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { +ChangedRenamed: for tName, tCol := range target.Columns { - // This column exists in the database, so it hasn't been renamed. + + // This column exists in the database, so it hasn't been renamed, dropped, or added. + // Still, we should not delete(columns, thisColumn), because later we will need to + // check that we do not try to rename a column to an already a name that already exists. if cCol, ok := current.Columns[tName]; ok { if checkType && !d.equalColumns(cCol, tCol) { d.changes.Add(&ChangeColumnType{ @@ -351,13 +355,15 @@ func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkTyp From: cCol, To: d.makeTargetColDef(cCol, tCol), }) - // TODO: Can I delete (current.Column, tName) then? Because if it's type has changed, it will never match in the line 343. } continue } + // Column tName does not exist in the database -- it's been either renamed or added. + // Find renamed columns first. for cName, cCol := range current.Columns { - if _, keep := target.Columns[cName]; keep || !d.equalColumns(tCol, cCol) { + // Cannot rename if a column with this name already exists or the types differ. + if _, exists := current.Columns[tName]; exists || !d.equalColumns(tCol, cCol) { continue } d.changes.Add(&RenameColumn{ @@ -368,7 +374,27 @@ func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkTyp }) delete(current.Columns, cName) // no need to check this column again d.refMap.UpdateC(sqlschema.C(target.Schema, target.Name, cName), tName) - break + + continue ChangedRenamed + } + + d.changes.Add(&AddColumn{ + Schema: target.Schema, + Table: target.Name, + Column: tName, + ColDef: tCol, + }) + } + + // Drop columns which do not exist in the target schema and were not renamed. + for cName, cCol := range current.Columns { + if _, keep := target.Columns[cName]; !keep { + d.changes.Add(&DropColumn{ + Schema: target.Schema, + Table: target.Name, + Column: cName, + ColDef: cCol, + }) } } } diff --git a/migrate/operations.go b/migrate/operations.go index 950dd5e2e..016910edc 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -120,6 +120,84 @@ func (op *RenameColumn) DependsOn(another Operation) bool { return ok && rt.Schema == op.Schema && rt.NewName == op.Table } +type AddColumn struct { + Schema string + Table string + Column string + ColDef sqlschema.Column +} + +var _ Operation = (*AddColumn)(nil) +var _ sqlschema.Operation = (*AddColumn)(nil) + +func (op *AddColumn) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.Table, + } +} + +func (op *AddColumn) GetReverse() Operation { + return &DropColumn{ + Schema: op.Schema, + Table: op.Table, + Column: op.Column, + } +} + +type DropColumn struct { + Schema string + Table string + Column string + ColDef sqlschema.Column +} + +var _ Operation = (*DropColumn)(nil) +var _ sqlschema.Operation = (*DropColumn)(nil) + +func (op *DropColumn) FQN() schema.FQN { + return schema.FQN{ + Schema: op.Schema, + Table: op.Table, + } +} + +func (op *DropColumn) GetReverse() Operation { + return &AddColumn{ + Schema: op.Schema, + Table: op.Table, + Column: op.Column, + ColDef: op.ColDef, + } +} + +func (op *DropColumn) DependsOn(another Operation) bool { + // TODO: refactor + if dc, ok := another.(*DropConstraint); ok { + var fCol bool + fCols := dc.FK.From.Column.Split() + for _, c := range fCols { + if c == op.Column { + fCol = true + break + } + } + + var tCol bool + tCols := dc.FK.To.Column.Split() + for _, c := range tCols { + if c == op.Column { + tCol = true + break + } + } + + return (dc.FK.From.Schema == op.Schema && dc.FK.From.Table == op.Table && fCol) || + (dc.FK.To.Schema == op.Schema && dc.FK.To.Table == op.Table && tCol) + } + return false +} + // RenameConstraint. type RenameConstraint struct { FK sqlschema.FK @@ -168,6 +246,7 @@ func (op *AddForeignKey) FQN() schema.FQN { func (op *AddForeignKey) DependsOn(another Operation) bool { switch another := another.(type) { case *RenameTable: + // TODO: provide some sort of "DependsOn" method for FK return another.Schema == op.FK.From.Schema && another.NewName == op.FK.From.Table case *CreateTable: return (another.Schema == op.FK.To.Schema && another.Name == op.FK.To.Table) || // either it's the referencing one From ebe1d5d6e44d474ca37ffa78669bdb765ad82773 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 21 Oct 2024 15:07:15 +0200 Subject: [PATCH 22/55] refactor: remove superficial sqlschema.Operation interface Each dialect has to type-switch the operation before building a query for it. Since the migrator knows the concrete type of each operation, they are free to provide FQN in any form. Using schema.FQN field from the start simplifies the data structure later. Empty inteface is better that a superficial one. --- dialect/pgdialect/alter_table.go | 50 +++++-------- internal/dbtest/migrate_test.go | 60 +++++++++++++++ migrate/diff.go | 28 +++---- migrate/operations.go | 121 +++++++------------------------ migrate/sqlschema/migrator.go | 12 +-- 5 files changed, 119 insertions(+), 152 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 63e4fa38a..89bae8042 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -24,7 +24,7 @@ type migrator struct { var _ sqlschema.Migrator = (*migrator)(nil) -func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) error { +func (m *migrator) Apply(ctx context.Context, changes ...interface{}) error { if len(changes) == 0 { return nil } @@ -41,17 +41,17 @@ func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) er switch change := change.(type) { case *migrate.CreateTable: - log.Printf("create table %q", change.Name) + log.Printf("create table %q", change.FQN.Table) err = m.CreateTable(ctx, change.Model) if err != nil { - return fmt.Errorf("apply changes: create table %s: %w", change.FQN(), err) + return fmt.Errorf("apply changes: create table %s: %w", change.FQN, err) } continue case *migrate.DropTable: - log.Printf("drop table %q", change.Name) - err = m.DropTable(ctx, change.Schema, change.Name) + log.Printf("drop table %q", change.FQN.Table) + err = m.DropTable(ctx, change.FQN) if err != nil { - return fmt.Errorf("apply changes: drop table %s: %w", change.FQN(), err) + return fmt.Errorf("apply changes: drop table %s: %w", change.FQN, err) } continue case *migrate.RenameTable: @@ -88,35 +88,29 @@ func (m *migrator) Apply(ctx context.Context, changes ...sqlschema.Operation) er func (m *migrator) renameTable(fmter schema.Formatter, b []byte, rename *migrate.RenameTable) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - fqn := rename.FQN() - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } + b, _ = rename.FQN.AppendQuery(fmter, b) + b = append(b, " RENAME TO "...) - b = fmter.AppendIdent(b, rename.NewName) + b = fmter.AppendName(b, rename.NewName) return b, nil } func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrate.RenameColumn) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - fqn := rename.FQN() - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } + b, _ = rename.FQN.AppendQuery(fmter, b) b = append(b, " RENAME COLUMN "...) - b = fmter.AppendIdent(b, rename.OldName) + b = fmter.AppendName(b, rename.OldName) b = append(b, " TO "...) - b = fmter.AppendIdent(b, rename.NewName) + b = fmter.AppendName(b, rename.NewName) return b, nil } func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddColumn) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - fqn := add.FQN() - b, _ = fqn.AppendQuery(fmter, b) + b, _ = add.FQN.AppendQuery(fmter, b) b = append(b, " ADD COLUMN "...) b = fmter.AppendName(b, add.Column) @@ -129,8 +123,7 @@ func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddC func (m *migrator) dropColumn(fmter schema.Formatter, b []byte, drop *migrate.DropColumn) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - fqn := drop.FQN() - b, _ = fqn.AppendQuery(fmter, b) + b, _ = drop.FQN.AppendQuery(fmter, b) b = append(b, " DROP COLUMN "...) b = fmter.AppendName(b, drop.Column) @@ -146,10 +139,10 @@ func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *mi } b = append(b, " RENAME CONSTRAINT "...) - b = fmter.AppendIdent(b, rename.OldName) + b = fmter.AppendName(b, rename.OldName) b = append(b, " TO "...) - b = fmter.AppendIdent(b, rename.NewName) + b = fmter.AppendName(b, rename.NewName) return b, nil } @@ -162,7 +155,7 @@ func (m *migrator) dropContraint(fmter schema.Formatter, b []byte, drop *migrate } b = append(b, " DROP CONSTRAINT "...) - b = fmter.AppendIdent(b, drop.ConstraintName) + b = fmter.AppendName(b, drop.ConstraintName) return b, nil } @@ -175,7 +168,7 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. } b = append(b, " ADD CONSTRAINT "...) - b = fmter.AppendIdent(b, add.ConstraintName) + b = fmter.AppendName(b, add.ConstraintName) b = append(b, " FOREIGN KEY ("...) if b, err = add.FK.From.Column.Safe().AppendQuery(fmter, b); err != nil { @@ -200,10 +193,7 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *migrate.ChangeColumnType) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - fqn := colDef.FQN() - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } + b, _ = colDef.FQN.AppendQuery(fmter, b) // alterColumn never re-assigns err, so there is no need to check for err != nil after calling it var i int @@ -212,7 +202,7 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi b = append(b, ","...) } b = append(b, " ALTER COLUMN "...) - b = fmter.AppendIdent(b, colDef.Column) + b = fmter.AppendName(b, colDef.Column) i++ } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index bb424b9f7..0259a498c 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -209,6 +209,7 @@ func TestAutoMigrator_Run(t *testing.T) { {testChangeColumnType_AutoCast}, {testIdentity}, {testAddDropColumn}, + // {testUnique}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -753,6 +754,65 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } +func testUnique(t *testing.T, db *bun.DB) { + type TableBefore struct { + bun.BaseModel `bun:"table:table"` + FirstName string `bun:"first_name,unique:full_name"` + LastName string `bun:"last_name,unique:full_name"` + Birthday string `bun:"birthday,unique"` + } + + type TableAfter struct { + bun.BaseModel `bun:"table:table"` + FirstName string `bun:"first_name,unique:full_name"` + MiddleName string `bun:"middle_name,unique:full_name"` // extend "full_name" unique group + LastName string `bun:"last_name,unique:full_name"` + Birthday string `bun:"birthday"` // doesn't have to be unique any more + Email string `bun:"email,unique"` // new column, unique + } + + wantTables := []sqlschema.Table{ + { + Schema: db.Dialect().DefaultSchema(), + Name: "table", + Columns: map[string]sqlschema.Column{ + "first_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "middle_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "last_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "birthday": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "email": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + }, + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, (*TableBefore)(nil)) + m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) +} // // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package // func TestDiff(t *testing.T) { diff --git a/migrate/diff.go b/migrate/diff.go index 1afa4dd96..30bd60b77 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -8,6 +8,7 @@ import ( "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" + "github.com/uptrace/bun/schema" ) // Diff calculates the diff between the current database schema and the target state. @@ -29,8 +30,7 @@ AddedLoop: for _, removed := range removedTables.Values() { if d.canRename(removed, added) { d.changes.Add(&RenameTable{ - Schema: removed.Schema, - OldName: removed.Name, + FQN: schema.FQN{removed.Schema, removed.Name}, NewName: added.Name, }) @@ -52,9 +52,8 @@ AddedLoop: } // If a new table did not appear because of the rename operation, then it must've been created. d.changes.Add(&CreateTable{ - Schema: added.Schema, - Name: added.Name, - Model: added.Model, + FQN: schema.FQN{added.Schema, added.Name}, + Model: added.Model, }) created.Add(added) } @@ -63,8 +62,7 @@ AddedLoop: dropped := currentTables.Sub(targetTables) for _, t := range dropped.Values() { d.changes.Add(&DropTable{ - Schema: t.Schema, - Name: t.Name, + FQN: schema.FQN{t.Schema, t.Name}, }) } @@ -144,9 +142,9 @@ func (c *changeset) Add(op ...Operation) { // Func creates a MigrationFunc that applies all operations all the changeset. func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc { return func(ctx context.Context, db *bun.DB) error { - var operations []sqlschema.Operation + var operations []interface{} for _, op := range c.operations { - operations = append(operations, op.(sqlschema.Operation)) + operations = append(operations, op.(interface{})) } return m.Apply(ctx, operations...) } @@ -349,8 +347,7 @@ ChangedRenamed: if cCol, ok := current.Columns[tName]; ok { if checkType && !d.equalColumns(cCol, tCol) { d.changes.Add(&ChangeColumnType{ - Schema: target.Schema, - Table: target.Name, + FQN: schema.FQN{target.Schema, target.Name}, Column: tName, From: cCol, To: d.makeTargetColDef(cCol, tCol), @@ -367,8 +364,7 @@ ChangedRenamed: continue } d.changes.Add(&RenameColumn{ - Schema: target.Schema, - Table: target.Name, + FQN: schema.FQN{target.Schema, target.Name}, OldName: cName, NewName: tName, }) @@ -379,8 +375,7 @@ ChangedRenamed: } d.changes.Add(&AddColumn{ - Schema: target.Schema, - Table: target.Name, + FQN: schema.FQN{target.Schema, target.Name}, Column: tName, ColDef: tCol, }) @@ -390,8 +385,7 @@ ChangedRenamed: for cName, cCol := range current.Columns { if _, keep := target.Columns[cName]; !keep { d.changes.Add(&DropColumn{ - Schema: target.Schema, - Table: target.Name, + FQN: schema.FQN{target.Schema, target.Name}, Column: cName, ColDef: cCol, }) diff --git a/migrate/operations.go b/migrate/operations.go index 016910edc..4b3958b5d 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -13,46 +13,27 @@ type Operation interface { // CreateTable type CreateTable struct { - Schema string - Name string - Model interface{} + FQN schema.FQN + Model interface{} } var _ Operation = (*CreateTable)(nil) -func (op *CreateTable) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.Name, - } -} - func (op *CreateTable) GetReverse() Operation { - return &DropTable{ - Schema: op.Schema, - Name: op.Name, - } + return &DropTable{FQN: op.FQN} } type DropTable struct { - Schema string - Name string + FQN schema.FQN } var _ Operation = (*DropTable)(nil) -func (op *DropTable) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.Name, - } -} - func (op *DropTable) DependsOn(another Operation) bool { d, ok := another.(*DropConstraint) // - return ok && ((d.FK.From.Schema == op.Schema && d.FK.From.Table == op.Name) || - (d.FK.To.Schema == op.Schema && d.FK.To.Table == op.Name)) + return ok && ((d.FK.From.Schema == op.FQN.Schema && d.FK.From.Table == op.FQN.Table) || + (d.FK.To.Schema == op.FQN.Schema && d.FK.To.Table == op.FQN.Table)) } // GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, @@ -65,51 +46,31 @@ func (op *DropTable) GetReverse() Operation { } type RenameTable struct { - Schema string - OldName string + FQN schema.FQN NewName string } var _ Operation = (*RenameTable)(nil) -var _ sqlschema.Operation = (*RenameTable)(nil) - -func (op *RenameTable) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.OldName, - } -} func (op *RenameTable) GetReverse() Operation { return &RenameTable{ - Schema: op.Schema, - OldName: op.NewName, - NewName: op.OldName, + FQN: schema.FQN{op.FQN.Schema, op.NewName}, + NewName: op.FQN.Table, } } // RenameColumn. type RenameColumn struct { - Schema string - Table string + FQN schema.FQN OldName string NewName string } var _ Operation = (*RenameColumn)(nil) -var _ sqlschema.Operation = (*RenameColumn)(nil) - -func (op *RenameColumn) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.Table, - } -} func (op *RenameColumn) GetReverse() Operation { return &RenameColumn{ - Schema: op.Schema, - Table: op.Table, + FQN: op.FQN, OldName: op.NewName, NewName: op.OldName, } @@ -117,55 +78,36 @@ func (op *RenameColumn) GetReverse() Operation { func (op *RenameColumn) DependsOn(another Operation) bool { rt, ok := another.(*RenameTable) - return ok && rt.Schema == op.Schema && rt.NewName == op.Table + return ok && rt.FQN.Schema == op.FQN.Schema && rt.NewName == op.FQN.Table } type AddColumn struct { - Schema string - Table string + FQN schema.FQN Column string ColDef sqlschema.Column } var _ Operation = (*AddColumn)(nil) -var _ sqlschema.Operation = (*AddColumn)(nil) - -func (op *AddColumn) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.Table, - } -} func (op *AddColumn) GetReverse() Operation { return &DropColumn{ - Schema: op.Schema, - Table: op.Table, + FQN: op.FQN, Column: op.Column, + ColDef: op.ColDef, } } type DropColumn struct { - Schema string - Table string + FQN schema.FQN Column string ColDef sqlschema.Column } var _ Operation = (*DropColumn)(nil) -var _ sqlschema.Operation = (*DropColumn)(nil) - -func (op *DropColumn) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.Table, - } -} func (op *DropColumn) GetReverse() Operation { return &AddColumn{ - Schema: op.Schema, - Table: op.Table, + FQN: op.FQN, Column: op.Column, ColDef: op.ColDef, } @@ -192,8 +134,8 @@ func (op *DropColumn) DependsOn(another Operation) bool { } } - return (dc.FK.From.Schema == op.Schema && dc.FK.From.Table == op.Table && fCol) || - (dc.FK.To.Schema == op.Schema && dc.FK.To.Table == op.Table && tCol) + return (dc.FK.From.Schema == op.FQN.Schema && dc.FK.From.Table == op.FQN.Table && fCol) || + (dc.FK.To.Schema == op.FQN.Schema && dc.FK.To.Table == op.FQN.Table && tCol) } return false } @@ -206,7 +148,6 @@ type RenameConstraint struct { } var _ Operation = (*RenameConstraint)(nil) -var _ sqlschema.Operation = (*RenameConstraint)(nil) func (op *RenameConstraint) FQN() schema.FQN { return schema.FQN{ @@ -217,7 +158,7 @@ func (op *RenameConstraint) FQN() schema.FQN { func (op *RenameConstraint) DependsOn(another Operation) bool { rt, ok := another.(*RenameTable) - return ok && rt.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table + return ok && rt.FQN.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table } func (op *RenameConstraint) GetReverse() Operation { @@ -234,7 +175,6 @@ type AddForeignKey struct { } var _ Operation = (*AddForeignKey)(nil) -var _ sqlschema.Operation = (*AddForeignKey)(nil) func (op *AddForeignKey) FQN() schema.FQN { return schema.FQN{ @@ -247,10 +187,10 @@ func (op *AddForeignKey) DependsOn(another Operation) bool { switch another := another.(type) { case *RenameTable: // TODO: provide some sort of "DependsOn" method for FK - return another.Schema == op.FK.From.Schema && another.NewName == op.FK.From.Table + return another.FQN.Schema == op.FK.From.Schema && another.NewName == op.FK.From.Table case *CreateTable: - return (another.Schema == op.FK.To.Schema && another.Name == op.FK.To.Table) || // either it's the referencing one - (another.Schema == op.FK.From.Schema && another.Name == op.FK.From.Table) // or the one being referenced + return (another.FQN.Schema == op.FK.To.Schema && another.FQN.Table == op.FK.To.Table) || // either it's the referencing one + (another.FQN.Schema == op.FK.From.Schema && another.FQN.Table == op.FK.From.Table) // or the one being referenced } return false } @@ -269,7 +209,6 @@ type DropConstraint struct { } var _ Operation = (*DropConstraint)(nil) -var _ sqlschema.Operation = (*DropConstraint)(nil) func (op *DropConstraint) FQN() schema.FQN { return schema.FQN{ @@ -287,33 +226,23 @@ func (op *DropConstraint) GetReverse() Operation { // Change column type. type ChangeColumnType struct { - Schema string - Table string + FQN schema.FQN Column string From sqlschema.Column To sqlschema.Column } var _ Operation = (*ChangeColumnType)(nil) -var _ sqlschema.Operation = (*ChangeColumnType)(nil) func (op *ChangeColumnType) GetReverse() Operation { return &ChangeColumnType{ - Schema: op.Schema, - Table: op.Table, + FQN: op.FQN, Column: op.Column, From: op.To, To: op.From, } } -func (op ChangeColumnType) FQN() schema.FQN { - return schema.FQN{ - Schema: op.Schema, - Table: op.Table, - } -} - // noop is a migration that doesn't change the schema. type noop struct{} diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 6087a8448..e4dc5a598 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -14,7 +14,7 @@ type MigratorDialect interface { } type Migrator interface { - Apply(ctx context.Context, changes ...Operation) error + Apply(ctx context.Context, changes ...interface{}) error } // migrator is a dialect-agnostic wrapper for sqlschema.MigratorDialect. @@ -49,16 +49,10 @@ func (m *BaseMigrator) CreateTable(ctx context.Context, model interface{}) error return nil } -func (m *BaseMigrator) DropTable(ctx context.Context, schema, name string) error { - _, err := m.db.NewDropTable().TableExpr("?.?", bun.Ident(schema), bun.Ident(name)).Exec(ctx) +func (m *BaseMigrator) DropTable(ctx context.Context, fqn schema.FQN) error { + _, err := m.db.NewDropTable().TableExpr(fqn.String()).Exec(ctx) if err != nil { return err } return nil } - -// Operation is a helper interface each migrate.Operation must implement -// so an not to handle this in every dialect separately. -type Operation interface { - FQN() schema.FQN -} From fed6012d177e55b8320b31ef37fc02a0cbf0b9f5 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Tue, 22 Oct 2024 14:49:25 +0200 Subject: [PATCH 23/55] fix: rename column only if the name does not exist in 'target' --- migrate/diff.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/migrate/diff.go b/migrate/diff.go index 30bd60b77..341329737 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -360,7 +360,7 @@ ChangedRenamed: // Find renamed columns first. for cName, cCol := range current.Columns { // Cannot rename if a column with this name already exists or the types differ. - if _, exists := current.Columns[tName]; exists || !d.equalColumns(tCol, cCol) { + if _, exists := target.Columns[cName]; exists || !d.equalColumns(tCol, cCol) { continue } d.changes.Add(&RenameColumn{ From 3c4d5d2c47be4652fb9b5cf1c6bd7b6c0a437287 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Tue, 22 Oct 2024 19:42:30 +0200 Subject: [PATCH 24/55] feat: support UNIQUE constraints Database inspector no longer mixes UNIQUE constraints with other constraint types, such as PK and FKs. While database drivers may require some FK and PK fields to be unique, these constraints are in practice distinct UNIQUE constraints and should not be synonymous to PK/FK constraints. Changes in UNIQUE constraints can be detected in any tables that haven't been dropped. --- dialect/pgdialect/alter_table.go | 33 ++++++++-- dialect/pgdialect/inspector.go | 102 +++++++----------------------- internal/dbtest/inspect_test.go | 67 ++++++++++++++++++-- internal/dbtest/migrate_test.go | 103 +++++++++++++++++++++++++++++-- migrate/diff.go | 43 +++++++++++-- migrate/operations.go | 58 +++++++++++++++++ migrate/sqlschema/inspector.go | 28 +++++++-- migrate/sqlschema/state.go | 50 +++++++++++---- 8 files changed, 369 insertions(+), 115 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 89bae8042..821191c53 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -62,10 +62,14 @@ func (m *migrator) Apply(ctx context.Context, changes ...interface{}) error { b, err = m.addColumn(fmter, b, change) case *migrate.DropColumn: b, err = m.dropColumn(fmter, b, change) - case *migrate.DropConstraint: - b, err = m.dropContraint(fmter, b, change) case *migrate.AddForeignKey: b, err = m.addForeignKey(fmter, b, change) + case *migrate.AddUniqueConstraint: + b, err = m.addUnique(fmter, b, change) + case *migrate.DropUniqueConstraint: + b, err = m.dropConstraint(fmter, b, change.FQN, change.Unique.Name) + case *migrate.DropConstraint: + b, err = m.dropConstraint(fmter, b, change.FQN(), change.ConstraintName) case *migrate.RenameConstraint: b, err = m.renameConstraint(fmter, b, change) case *migrate.ChangeColumnType: @@ -147,15 +151,34 @@ func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *mi return b, nil } -func (m *migrator) dropContraint(fmter schema.Formatter, b []byte, drop *migrate.DropConstraint) (_ []byte, err error) { +func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.AddUniqueConstraint) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + if b, err = change.FQN.AppendQuery(fmter, b); err != nil { + return b, err + } + + b = append(b, " ADD CONSTRAINT "...) + if change.Unique.Name != "" { + b = fmter.AppendName(b, change.Unique.Name) + } else { + // Default naming scheme for unique constraints in Postgres is __key + b = fmter.AppendName(b, fmt.Sprintf("%s_%s_key", change.FQN.Table, change.Unique.Columns)) + } + b = append(b, " UNIQUE ("...) + b, _ = change.Unique.Columns.Safe().AppendQuery(fmter, b) + b = append(b, ")"...) + + return b, nil +} + +func (m *migrator) dropConstraint(fmter schema.Formatter, b []byte, fqn schema.FQN, name string) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) - fqn := drop.FQN() if b, err = fqn.AppendQuery(fmter, b); err != nil { return b, err } b = append(b, " DROP CONSTRAINT "...) - b = fmter.AppendName(b, drop.ConstraintName) + b = fmter.AppendName(b, name) return b, nil } diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index c95e95cfb..dc4ea2707 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -48,7 +48,10 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { if err := in.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { return state, err } + colDefs := make(map[string]sqlschema.Column) + uniqueGroups := make(map[string][]string) + for _, c := range columns { def := c.Default if c.IsSerial || c.IsIdentity { @@ -66,12 +69,25 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { IsAutoIncrement: c.IsSerial, IsIdentity: c.IsIdentity, } + + for _, group := range c.UniqueGroups { + uniqueGroups[group] = append(uniqueGroups[group], c.Name) + } + } + + var unique []sqlschema.Unique + for name, columns := range uniqueGroups { + unique = append(unique, sqlschema.Unique{ + Name: name, + Columns: sqlschema.NewComposite(columns...), + }) } state.Tables = append(state.Tables, sqlschema.Table{ - Schema: table.Schema, - Name: table.Name, - Columns: colDefs, + Schema: table.Schema, + Name: table.Name, + Columns: colDefs, + UniqueContraints: unique, }) } @@ -106,8 +122,7 @@ type InformationSchemaColumn struct { IndentityType string `bun:"identity_type"` IsSerial bool `bun:"is_serial"` IsNullable bool `bun:"is_nullable"` - IsUnique bool `bun:"is_unique"` - UniqueGroup []string `bun:"unique_group,array"` + UniqueGroups []string `bun:"unique_groups,array"` } type ForeignKey struct { @@ -156,8 +171,7 @@ SELECT "c".column_default = format('nextval(''%s_%s_seq''::regclass)', "c".table_name, "c".column_name) AS is_serial, COALESCE("c".identity_type, '') AS identity_type, "c".is_nullable = 'YES' AS is_nullable, - 'u' = ANY("c".constraint_type) AS is_unique, - "c"."constraint_name" AS unique_group + "c"."unique_groups" AS unique_groups FROM ( SELECT "table_schema", @@ -170,7 +184,7 @@ FROM ( "c".is_nullable, att.array_dims, att.identity_type, - att."constraint_name", + att."unique_groups", att."constraint_type" FROM information_schema.columns "c" LEFT JOIN ( @@ -180,7 +194,7 @@ FROM ( "c".attname AS "column_name", "c".attndims AS array_dims, "c".attidentity AS identity_type, - ARRAY_AGG(con.conname) AS "constraint_name", + ARRAY_AGG(con.conname) FILTER (WHERE con.contype = 'u') AS "unique_groups", ARRAY_AGG(con.contype) AS "constraint_type" FROM ( SELECT @@ -200,76 +214,6 @@ FROM ( ) "c" WHERE "table_schema" = ? AND "table_name" = ? ORDER BY "table_schema", "table_name", "column_name" -` - - // sqlInspectSchema retrieves column type definitions for all user-defined tables. - // Other relations, such as views and indices, as well as Posgres's internal relations are excluded. - // - // TODO: implement scanning ORM relations for RawQuery too, so that one could scan this query directly to InformationSchemaTable. - sqlInspectSchema = ` -SELECT - "t"."table_schema", - "t".table_name, - "c".column_name, - "c".data_type, - "c".character_maximum_length::integer AS varchar_len, - "c".data_type = 'ARRAY' AS is_array, - COALESCE("c".array_dims, 0) AS array_dims, - CASE - WHEN "c".column_default ~ '^''.*''::.*$' THEN substring("c".column_default FROM '^''(.*)''::.*$') - ELSE "c".column_default - END AS "default", - "c".constraint_type = 'p' AS is_pk, - "c".is_identity = 'YES' AS is_identity, - "c".column_default = format('nextval(''%s_%s_seq''::regclass)', "t".table_name, "c".column_name) AS is_serial, - COALESCE("c".identity_type, '') AS identity_type, - "c".is_nullable = 'YES' AS is_nullable, - "c".constraint_type = 'u' AS is_unique, - "c"."constraint_name" AS unique_group -FROM information_schema.tables "t" - LEFT JOIN ( - SELECT - "table_schema", - "table_name", - "column_name", - "c".data_type, - "c".character_maximum_length, - "c".column_default, - "c".is_identity, - "c".is_nullable, - att.array_dims, - att.identity_type, - att."constraint_name", - att."constraint_type" - FROM information_schema.columns "c" - LEFT JOIN ( - SELECT - s.nspname AS table_schema, - "t".relname AS "table_name", - "c".attname AS "column_name", - "c".attndims AS array_dims, - "c".attidentity AS identity_type, - con.conname AS "constraint_name", - con.contype AS "constraint_type" - FROM ( - SELECT - conname, - contype, - connamespace, - conrelid, - conrelid AS attrelid, - UNNEST(conkey) AS attnum - FROM pg_constraint - ) con - LEFT JOIN pg_attribute "c" USING (attrelid, attnum) - LEFT JOIN pg_namespace s ON s.oid = con.connamespace - LEFT JOIN pg_class "t" ON "t".oid = con.conrelid - ) att USING (table_schema, "table_name", "column_name") - ) "c" USING (table_schema, "table_name") -WHERE table_type = 'BASE TABLE' - AND table_schema <> 'information_schema' - AND table_schema NOT LIKE 'pg_%' -ORDER BY table_schema, table_name ` // sqlInspectForeignKeys get FK definitions for user-defined tables. diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index bd758d1f9..6d3124261 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -42,7 +42,7 @@ type Office struct { type Publisher struct { ID string `bun:"publisher_id,pk,default:gen_random_uuid(),unique:office_fk"` - Name string `bun:"publisher_name,unique,notnull,unique:office_fk"` + Name string `bun:"publisher_name,notnull,unique:office_fk"` CreatedAt time.Time `bun:"created_at,default:current_timestamp"` // Writers write articles for this publisher. @@ -63,8 +63,9 @@ type PublisherToJournalist struct { type Journalist struct { bun.BaseModel `bun:"table:authors"` ID int `bun:"author_id,pk,identity"` - FirstName string `bun:",notnull"` - LastName string + FirstName string `bun:"first_name,notnull,unique:full_name"` + LastName string `bun:"last_name,notnull,unique:full_name"` + Email string `bun:"email,notnull,unique"` // Articles that this journalist has written. Articles []*Article `bun:"rel:has-many,join:author_id=author_id"` @@ -171,6 +172,9 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: "bigint", }, }, + UniqueContraints: []sqlschema.Unique{ + {Columns: sqlschema.NewComposite("editor", "title")}, + }, }, { Schema: defaultSchema, @@ -185,10 +189,16 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: sqltype.VarChar, }, "last_name": { - SQLType: sqltype.VarChar, - IsNullable: true, + SQLType: sqltype.VarChar, + }, + "email": { + SQLType: sqltype.VarChar, }, }, + UniqueContraints: []sqlschema.Unique{ + {Columns: sqlschema.NewComposite("first_name", "last_name")}, + {Columns: sqlschema.NewComposite("email")}, + }, }, { Schema: defaultSchema, @@ -222,6 +232,9 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsNullable: true, }, }, + UniqueContraints: []sqlschema.Unique{ + {Columns: sqlschema.NewComposite("publisher_id", "publisher_name")}, + }, }, } @@ -268,7 +281,7 @@ func mustCreateTableWithFKs(tb testing.TB, ctx context.Context, db *bun.DB, mode for _, model := range models { create := db.NewCreateTable().Model(model).WithForeignKeys() _, err := create.Exec(ctx) - require.NoError(tb, err, "must create table %q:", create.GetTableName()) + require.NoError(tb, err, "arrange: must create table %q:", create.GetTableName()) mustDropTableOnCleanup(tb, ctx, db, model) } } @@ -303,9 +316,11 @@ func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got []sqlschem } cmpColumns(tb, d, wt.Name, wt.Columns, gt.Columns) + cmpConstraints(tb, wt, gt) } } +// cmpColumns compares that column definitions on the tables are func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.Column) { tb.Helper() var errs []string @@ -362,6 +377,20 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w } } +// cmpConstraints compares constraints defined on the table with the expected ones. +func cmpConstraints(tb testing.TB, want, got sqlschema.Table) { + tb.Helper() + + // Only keep columns included in each unique constraint for comparison. + stripNames := func(uniques []sqlschema.Unique) (res []string) { + for _, u := range uniques { + res = append(res, u.Columns.String()) + } + return + } + require.ElementsMatch(tb, stripNames(want.UniqueContraints), stripNames(got.UniqueContraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) +} + func tableNames(tables []sqlschema.Table) (names []string) { for i := range tables { names = append(names, tables[i].Name) @@ -441,5 +470,31 @@ func TestSchemaInspector_Inspect(t *testing.T) { require.Len(t, got.Tables, 1) cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, got.Tables[0].Columns) }) + + t.Run("inspect unique constraints", func(t *testing.T) { + type Model struct { + ID string `bun:",unique"` + FirstName string `bun:"first_name,unique:full_name"` + LastName string `bun:"last_name,unique:full_name"` + } + + tables := schema.NewTables(dialect) + tables.Register((*Model)(nil)) + inspector := sqlschema.NewSchemaInspector(tables) + + want := sqlschema.Table{ + Name: "models", + UniqueContraints: []sqlschema.Unique{ + {Columns: sqlschema.NewComposite("id")}, + {Name: "full_name", Columns: sqlschema.NewComposite("first_name", "last_name")}, + }, + } + + got, err := inspector.Inspect(context.Background()) + require.NoError(t, err) + + require.Len(t, got.Tables, 1) + cmpConstraints(t, want, got.Tables[0]) + }) }) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 0259a498c..47d28b0e8 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -209,7 +209,8 @@ func TestAutoMigrator_Run(t *testing.T) { {testChangeColumnType_AutoCast}, {testIdentity}, {testAddDropColumn}, - // {testUnique}, + {testUnique}, + {testUniqueRenamedTable}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -542,16 +543,16 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { type TennantBefore struct { bun.BaseModel `bun:"table:tennants"` - ID int64 `bun:",pk,identity"` + ID int64 `bun:"id,pk,identity"` Apartment int8 - NeighbourID int64 + NeighbourID int64 `bun:"neighbour_id"` Neighbour *TennantBefore `bun:"rel:has-one,join:neighbour_id=id"` } type TennantAfter struct { bun.BaseModel `bun:"table:tennants"` - TennantID int64 `bun:",pk,identity"` + TennantID int64 `bun:"tennant_id,pk,identity"` Apartment int8 NeighbourID int64 `bun:"my_neighbour"` @@ -760,6 +761,8 @@ func testUnique(t *testing.T, db *bun.DB) { FirstName string `bun:"first_name,unique:full_name"` LastName string `bun:"last_name,unique:full_name"` Birthday string `bun:"birthday,unique"` + PetName string `bun:"pet_name,unique:pet"` + PetBreed string `bun:"pet_breed,unique:pet"` } type TableAfter struct { @@ -767,8 +770,12 @@ func testUnique(t *testing.T, db *bun.DB) { FirstName string `bun:"first_name,unique:full_name"` MiddleName string `bun:"middle_name,unique:full_name"` // extend "full_name" unique group LastName string `bun:"last_name,unique:full_name"` - Birthday string `bun:"birthday"` // doesn't have to be unique any more - Email string `bun:"email,unique"` // new column, unique + + Birthday string `bun:"birthday"` // doesn't have to be unique any more + Email string `bun:"email,unique"` // new column, unique + + PetName string `bun:"pet_name,unique"` + PetBreed string `bun:"pet_breed"` // shrink "pet" unique group } wantTables := []sqlschema.Table{ @@ -796,6 +803,90 @@ func testUnique(t *testing.T, db *bun.DB) { SQLType: sqltype.VarChar, IsNullable: true, }, + "pet_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "pet_breed": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + UniqueContraints: []sqlschema.Unique{ + {Columns: sqlschema.NewComposite("email")}, + {Columns: sqlschema.NewComposite("pet_name")}, + // We can only be sure of the user-defined index name + {Name: "full_name", Columns: sqlschema.NewComposite("first_name", "middle_name", "last_name")}, + }, + }, + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, (*TableBefore)(nil)) + m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) +} + +func testUniqueRenamedTable(t *testing.T, db *bun.DB) { + type TableBefore struct { + bun.BaseModel `bun:"table:before"` + FirstName string `bun:"first_name,unique:full_name"` + LastName string `bun:"last_name,unique:full_name"` + Birthday string `bun:"birthday,unique"` + PetName string `bun:"pet_name,unique:pet"` + PetBreed string `bun:"pet_breed,unique:pet"` + } + + type TableAfter struct { + bun.BaseModel `bun:"table:after"` + // Expand full_name unique group and rename it. + FirstName string `bun:"first_name,unique:birth_certificate"` + LastName string `bun:"last_name,unique:birth_certificate"` + Birthday string `bun:"birthday,unique:birth_certificate"` + + // pet_name and pet_breed have their own unique indices now. + PetName string `bun:"pet_name,unique"` + PetBreed string `bun:"pet_breed,unique"` + } + + wantTables := []sqlschema.Table{ + { + Schema: db.Dialect().DefaultSchema(), + Name: "after", + Columns: map[string]sqlschema.Column{ + "first_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "last_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "birthday": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "pet_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "pet_breed": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + UniqueContraints: []sqlschema.Unique{ + {Columns: sqlschema.NewComposite("pet_name")}, + {Columns: sqlschema.NewComposite("pet_breed")}, + {Name: "full_name", Columns: sqlschema.NewComposite("first_name", "last_name", "birthday")}, }, }, } diff --git a/migrate/diff.go b/migrate/diff.go index 341329737..99b0fbf13 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -37,6 +37,7 @@ AddedLoop: // Here we do not check for created / dropped columns, as well as column type changes, // because it is only possible to detect a renamed table if its signature (see state.go) did not change. d.detectColumnChanges(removed, added, false) + d.detectConstraintChanges(removed, added) // Update referenced table in all related FKs. if d.detectRenamedFKs { @@ -82,6 +83,7 @@ AddedLoop: } } d.detectColumnChanges(current, target, true) + d.detectConstraintChanges(current, target) } // Compare and update FKs ---------------- @@ -338,6 +340,8 @@ func (d *detector) makeTargetColDef(current, target sqlschema.Column) sqlschema. // detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { + fqn := schema.FQN{target.Schema, target.Name} + ChangedRenamed: for tName, tCol := range target.Columns { @@ -347,7 +351,7 @@ ChangedRenamed: if cCol, ok := current.Columns[tName]; ok { if checkType && !d.equalColumns(cCol, tCol) { d.changes.Add(&ChangeColumnType{ - FQN: schema.FQN{target.Schema, target.Name}, + FQN: fqn, Column: tName, From: cCol, To: d.makeTargetColDef(cCol, tCol), @@ -364,7 +368,7 @@ ChangedRenamed: continue } d.changes.Add(&RenameColumn{ - FQN: schema.FQN{target.Schema, target.Name}, + FQN: fqn, OldName: cName, NewName: tName, }) @@ -375,7 +379,7 @@ ChangedRenamed: } d.changes.Add(&AddColumn{ - FQN: schema.FQN{target.Schema, target.Name}, + FQN: fqn, Column: tName, ColDef: tCol, }) @@ -385,7 +389,7 @@ ChangedRenamed: for cName, cCol := range current.Columns { if _, keep := target.Columns[cName]; !keep { d.changes.Add(&DropColumn{ - FQN: schema.FQN{target.Schema, target.Name}, + FQN: fqn, Column: cName, ColDef: cCol, }) @@ -393,6 +397,37 @@ ChangedRenamed: } } +func (d *detector) detectConstraintChanges(current, target sqlschema.Table) { + fqn := schema.FQN{target.Schema, target.Name} + +Add: + for _, want := range target.UniqueContraints { + for _, got := range current.UniqueContraints { + if got.Equals(want) { + continue Add + } + } + d.changes.Add(&AddUniqueConstraint{ + FQN: fqn, + Unique: want, + }) + } + +Drop: + for _, got := range current.UniqueContraints { + for _, want := range target.UniqueContraints { + if got.Equals(want) { + continue Drop + } + } + + d.changes.Add(&DropUniqueConstraint{ + FQN: fqn, + Unique: got, + }) + } +} + // sqlschema utils ------------------------------------------------------------ // tableSet stores unique table definitions. diff --git a/migrate/operations.go b/migrate/operations.go index 4b3958b5d..c4bbc6b80 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -202,6 +202,7 @@ func (op *AddForeignKey) GetReverse() Operation { } } +// TODO: Rename to DropForeignKey // DropConstraint. type DropConstraint struct { FK sqlschema.FK @@ -224,6 +225,63 @@ func (op *DropConstraint) GetReverse() Operation { } } +type AddUniqueConstraint struct { + FQN schema.FQN + Unique sqlschema.Unique +} + +var _ Operation = (*AddUniqueConstraint)(nil) + +func (op *AddUniqueConstraint) GetReverse() Operation { + return &DropUniqueConstraint{ + FQN: op.FQN, + Unique: op.Unique, + } +} + +func (op *AddUniqueConstraint) DependsOn(another Operation) bool { + switch another := another.(type) { + case *AddColumn: + var sameColumn bool + for _, column := range op.Unique.Columns.Split() { + if column == another.Column { + sameColumn = true + break + } + } + return op.FQN == another.FQN && sameColumn + case *RenameTable: + return op.FQN.Schema == another.FQN.Schema && op.FQN.Table == another.NewName + case *DropUniqueConstraint: + // We want to drop the constraint with the same name before adding this one. + return op.FQN == another.FQN && op.Unique.Name == another.Unique.Name + default: + return false + } + +} + +type DropUniqueConstraint struct { + FQN schema.FQN + Unique sqlschema.Unique +} + +var _ Operation = (*DropUniqueConstraint)(nil) + +func (op *DropUniqueConstraint) DependsOn(another Operation) bool { + if rename, ok := another.(*RenameTable); ok { + return op.FQN.Schema == rename.FQN.Schema && op.FQN.Table == rename.NewName + } + return false +} + +func (op *DropUniqueConstraint) GetReverse() Operation { + return &AddUniqueConstraint{ + FQN: op.FQN, + Unique: op.Unique, + } +} + // Change column type. type ChangeColumnType struct { FQN schema.FQN diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 4c62289a3..cf809a343 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -75,11 +75,31 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { } } + var unique []Unique + for name, group := range t.Unique { + // Create a separate unique index for single-column unique constraints + // let each dialect apply the default naming convention. + if name == "" { + for _, f := range group { + unique = append(unique, Unique{Columns: NewComposite(f.Name)}) + } + continue + } + + // Set the name if it is a "unique group", in which case the user has provided the name. + var columns []string + for _, f := range group { + columns = append(columns, f.Name) + } + unique = append(unique, Unique{Name: name, Columns: NewComposite(columns...)}) + } + state.Tables = append(state.Tables, Table{ - Schema: t.Schema, - Name: t.Name, - Model: t.ZeroIface, - Columns: columns, + Schema: t.Schema, + Name: t.Name, + Model: t.ZeroIface, + Columns: columns, + UniqueContraints: unique, }) for _, rel := range t.Relations { diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index 789145196..b6139d29d 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -2,6 +2,7 @@ package sqlschema import ( "fmt" + "slices" "strings" "github.com/uptrace/bun/schema" @@ -13,10 +14,20 @@ type State struct { } type Table struct { - Schema string - Name string - Model interface{} + // Schema containing the table. + Schema string + + // Table name. + Name string + + // Model stores a pointer to the bun's underlying Go struct for the table. + Model interface{} + + // Columns map each column name to the column type definition. Columns map[string]Column + + // UniqueConstraints defined on the table. + UniqueContraints []Unique } // T returns a fully-qualified name object for the table. @@ -131,7 +142,7 @@ type cFQN struct { // C creates a fully-qualified column name object. func C(schema, table string, columns ...string) cFQN { - return cFQN{tFQN: T(schema, table), Column: newComposite(columns...)} + return cFQN{tFQN: T(schema, table), Column: NewComposite(columns...)} } // T returns the FQN of the column's parent table. @@ -143,8 +154,9 @@ func (c cFQN) T() tFQN { // Although having duplicated column references in a FK is illegal, composite neither validates nor enforces this constraint on the caller. type composite string -// newComposite creates a composite column from a slice of column names. -func newComposite(columns ...string) composite { +// NewComposite creates a composite column from a slice of column names. +func NewComposite(columns ...string) composite { + slices.Sort(columns) return composite(strings.Join(columns, ",")) } @@ -162,9 +174,14 @@ func (c composite) Split() []string { } // Contains checks that a composite column contains every part of another composite. -func (c composite) Contains(other composite) bool { +func (c composite) contains(other composite) bool { + return c.Contains(string(other)) +} + +// Contains checks that a composite column contains the current column. +func (c composite) Contains(other string) bool { var count int - checkColumns := other.Split() + checkColumns := composite(other).Split() wantCount := len(checkColumns) for _, check := range checkColumns { @@ -187,7 +204,7 @@ func (c composite) Replace(oldColumn, newColumn string) composite { for i, column := range columns { if column == oldColumn { columns[i] = newColumn - return newComposite(columns...) + return NewComposite(columns...) } } return c @@ -242,9 +259,9 @@ func (fk *FK) dependsT(t tFQN) (ok bool, cols []*cFQN) { // depends on C("a", "b", "c_1"), C("a", "b", "c_2"), C("w", "x", "y_1"), and C("w", "x", "y_2") func (fk *FK) dependsC(c cFQN) (bool, *cFQN) { switch { - case fk.From.Column.Contains(c.Column): + case fk.From.Column.contains(c.Column): return true, &fk.From - case fk.To.Column.Contains(c.Column): + case fk.To.Column.contains(c.Column): return true, &fk.To } return false, nil @@ -347,3 +364,14 @@ func (r RefMap) Deleted() (fks []FK) { } return } + +// Unique represents a unique constraint defined on 1 or more columns. +type Unique struct { + Name string + Columns composite +} + +// Equals checks that two unique constraint are the same, assuming both are defined for the same table. +func (u Unique) Equals(other Unique) bool { + return u.Columns == other.Columns +} From 5f6d494e47692c9ff041923572e585c3d4e2fe52 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 27 Oct 2024 12:16:30 +0100 Subject: [PATCH 25/55] chore: appease linter, initialize struct with named fields --- migrate/diff.go | 10 +++++----- migrate/operations.go | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/migrate/diff.go b/migrate/diff.go index 99b0fbf13..8dbe92766 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -30,7 +30,7 @@ AddedLoop: for _, removed := range removedTables.Values() { if d.canRename(removed, added) { d.changes.Add(&RenameTable{ - FQN: schema.FQN{removed.Schema, removed.Name}, + FQN: schema.FQN{Schema: removed.Schema, Table: removed.Name}, NewName: added.Name, }) @@ -53,7 +53,7 @@ AddedLoop: } // If a new table did not appear because of the rename operation, then it must've been created. d.changes.Add(&CreateTable{ - FQN: schema.FQN{added.Schema, added.Name}, + FQN: schema.FQN{Schema: added.Schema, Table: added.Name}, Model: added.Model, }) created.Add(added) @@ -63,7 +63,7 @@ AddedLoop: dropped := currentTables.Sub(targetTables) for _, t := range dropped.Values() { d.changes.Add(&DropTable{ - FQN: schema.FQN{t.Schema, t.Name}, + FQN: schema.FQN{Schema: t.Schema, Table: t.Name}, }) } @@ -340,7 +340,7 @@ func (d *detector) makeTargetColDef(current, target sqlschema.Column) sqlschema. // detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { - fqn := schema.FQN{target.Schema, target.Name} + fqn := schema.FQN{Schema: target.Schema, Table: target.Name} ChangedRenamed: for tName, tCol := range target.Columns { @@ -398,7 +398,7 @@ ChangedRenamed: } func (d *detector) detectConstraintChanges(current, target sqlschema.Table) { - fqn := schema.FQN{target.Schema, target.Name} + fqn := schema.FQN{Schema: target.Schema, Table: target.Name} Add: for _, want := range target.UniqueContraints { diff --git a/migrate/operations.go b/migrate/operations.go index c4bbc6b80..9603948e6 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -54,7 +54,7 @@ var _ Operation = (*RenameTable)(nil) func (op *RenameTable) GetReverse() Operation { return &RenameTable{ - FQN: schema.FQN{op.FQN.Schema, op.NewName}, + FQN: schema.FQN{Schema: op.FQN.Schema, Table: op.NewName}, NewName: op.FQN.Table, } } From 9b810dee4b1a721efb82c913099f39f52c44eb57 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 27 Oct 2024 12:30:30 +0100 Subject: [PATCH 26/55] fix: update schema.Field names BaseFields -> BasePKs and JoinFields -> JoinPKs were updated in https://github.com/uptrace/bun/commit/8648d6f56788d2d866e2bb51135c37118acd9b6f --- migrate/sqlschema/inspector.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index cf809a343..d76d9dc7e 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -111,10 +111,10 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { } var fromCols, toCols []string - for _, f := range rel.BaseFields { + for _, f := range rel.BasePKs { fromCols = append(fromCols, f.Name) } - for _, f := range rel.JoinFields { + for _, f := range rel.JoinPKs { toCols = append(toCols, f.Name) } From d08fa40cc87d67296a83a77448ea511531fc8cdd Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 27 Oct 2024 15:34:47 +0100 Subject: [PATCH 27/55] fix: implement DefaultSchema for Oracle dialect --- dialect/oracledialect/dialect.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dialect/oracledialect/dialect.go b/dialect/oracledialect/dialect.go index cc4806b3b..44360a0dd 100644 --- a/dialect/oracledialect/dialect.go +++ b/dialect/oracledialect/dialect.go @@ -89,6 +89,10 @@ func (d *Dialect) DefaultVarcharLen() int { return 255 } +func (d *Dialect) DefaultSchema() string { + return "app" +} + func (d *Dialect) AppendSequence(b []byte, table *schema.Table, field *schema.Field) []byte { return append(b, " GENERATED BY DEFAULT AS IDENTITY"...) } From b1ae32e9e9f45ff2a66e50bfd13bedcf6653d874 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 27 Oct 2024 15:42:06 +0100 Subject: [PATCH 28/55] fix: cleanup after testUniqueRenamedTable --- internal/dbtest/migrate_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 47d28b0e8..f9b886f5b 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -894,6 +894,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*TableBefore)(nil)) + mustDropTableOnCleanup(t, ctx, db, (*TableAfter)(nil)) m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) // Act From 694f873d61ed8d2f09032ae0c0dbec4b71c3719e Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 27 Oct 2024 16:30:24 +0100 Subject: [PATCH 29/55] fix: append IDENTITY to ADD COLUMN statement if needed --- dialect/pgdialect/alter_table.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 821191c53..7f9ab4492 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -122,6 +122,10 @@ func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddC b, _ = add.ColDef.AppendQuery(fmter, b) + if add.ColDef.IsIdentity { + b = appendGeneratedAsIdentity(b) + } + return b, nil } From a734629fa285406038cbe4a50798626b5ac08539 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 27 Oct 2024 17:50:37 +0100 Subject: [PATCH 30/55] feat: support modifying primary keys - Retrieve PK information on the table level rather than for each column individually. - Refine dependencies between migrations --- dialect/pgdialect/alter_table.go | 42 ++++++++++ dialect/pgdialect/inspector.go | 47 ++++++++--- internal/dbtest/inspect_test.go | 50 ++++++++---- internal/dbtest/migrate_test.go | 132 +++++++++++++++++++++++++++++-- migrate/diff.go | 26 ++++++ migrate/operations.go | 78 ++++++++++++++++-- migrate/sqlschema/inspector.go | 11 ++- migrate/sqlschema/state.go | 11 ++- 8 files changed, 358 insertions(+), 39 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 7f9ab4492..a1ac7f333 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -54,6 +54,31 @@ func (m *migrator) Apply(ctx context.Context, changes ...interface{}) error { return fmt.Errorf("apply changes: drop table %s: %w", change.FQN, err) } continue + case *migrate.ChangePrimaryKey: + // TODO: refactor! + b, err = m.dropConstraint(fmter, b, change.FQN, change.Old.Name) + if err != nil { + return fmt.Errorf("apply changes: %w", err) + } + + query := internal.String(b) + log.Println("exec query: " + query) + if _, err = conn.ExecContext(ctx, query); err != nil { + return fmt.Errorf("apply changes: %w", err) + } + + b = []byte{} + b, err = m.addPrimaryKey(fmter, b, change.FQN, change.New.Columns.Safe()) + if err != nil { + return fmt.Errorf("apply changes: %w", err) + } + + query = internal.String(b) + log.Println("exec query: " + query) + if _, err = conn.ExecContext(ctx, query); err != nil { + return fmt.Errorf("apply changes: %w", err) + } + continue case *migrate.RenameTable: b, err = m.renameTable(fmter, b, change) case *migrate.RenameColumn: @@ -62,6 +87,8 @@ func (m *migrator) Apply(ctx context.Context, changes ...interface{}) error { b, err = m.addColumn(fmter, b, change) case *migrate.DropColumn: b, err = m.dropColumn(fmter, b, change) + case *migrate.AddPrimaryKey: + b, err = m.addPrimaryKey(fmter, b, change.FQN, change.PK.Columns.Safe()) case *migrate.AddForeignKey: b, err = m.addForeignKey(fmter, b, change) case *migrate.AddUniqueConstraint: @@ -70,6 +97,8 @@ func (m *migrator) Apply(ctx context.Context, changes ...interface{}) error { b, err = m.dropConstraint(fmter, b, change.FQN, change.Unique.Name) case *migrate.DropConstraint: b, err = m.dropConstraint(fmter, b, change.FQN(), change.ConstraintName) + case *migrate.DropPrimaryKey: + b, err = m.dropConstraint(fmter, b, change.FQN, change.PK.Name) case *migrate.RenameConstraint: b, err = m.renameConstraint(fmter, b, change) case *migrate.ChangeColumnType: @@ -155,6 +184,19 @@ func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *mi return b, nil } +func (m *migrator) addPrimaryKey(fmter schema.Formatter, b []byte, fqn schema.FQN, columns schema.Safe) (_ []byte, err error) { + b = append(b, "ALTER TABLE "...) + if b, err = fqn.AppendQuery(fmter, b); err != nil { + return b, err + } + + b = append(b, " ADD PRIMARY KEY ("...) + b, _ = columns.AppendQuery(fmter, b) + b = append(b, ")"...) + + return b, nil +} + func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.AddUniqueConstraint) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) if b, err = change.FQN.AppendQuery(fmter, b); err != nil { diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index dc4ea2707..9bac008c1 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -64,7 +64,6 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { SQLType: c.DataType, VarcharLen: c.VarcharLen, DefaultValue: def, - IsPK: c.IsPK, IsNullable: c.IsNullable, IsAutoIncrement: c.IsSerial, IsIdentity: c.IsIdentity, @@ -83,11 +82,20 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { }) } + var pk *sqlschema.PK + if len(table.PrimaryKey.Columns) > 0 { + pk = &sqlschema.PK{ + Name: table.PrimaryKey.ConstraintName, + Columns: sqlschema.NewComposite(table.PrimaryKey.Columns...), + } + } + state.Tables = append(state.Tables, sqlschema.Table{ Schema: table.Schema, Name: table.Name, Columns: colDefs, UniqueContraints: unique, + PK: pk, }) } @@ -101,8 +109,9 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { } type InformationSchemaTable struct { - Schema string `bun:"table_schema,pk"` - Name string `bun:"table_name,pk"` + Schema string `bun:"table_schema,pk"` + Name string `bun:"table_name,pk"` + PrimaryKey PrimaryKey `bun:"embed:primary_key_"` Columns []*InformationSchemaColumn `bun:"rel:has-many,join:table_schema=table_schema,join:table_name=table_name"` } @@ -117,7 +126,6 @@ type InformationSchemaColumn struct { ArrayDims int `bun:"array_dims"` Default string `bun:"default"` IsDefaultLiteral bool `bun:"default_is_literal_expr"` - IsPK bool `bun:"is_pk"` IsIdentity bool `bun:"is_identity"` IndentityType string `bun:"identity_type"` IsSerial bool `bun:"is_serial"` @@ -135,18 +143,38 @@ type ForeignKey struct { TargetColumns []string `bun:"target_columns,array"` } +type PrimaryKey struct { + ConstraintName string `bun:"name"` + Columns []string `bun:"columns,array"` +} + const ( // sqlInspectTables retrieves all user-defined tables across all schemas. // It excludes relations from Postgres's reserved "pg_" schemas and views from the "information_schema". // Pass bun.In([]string{...}) to exclude tables from this inspection or bun.In([]string{''}) to include all results. sqlInspectTables = ` -SELECT "table_schema", "table_name" -FROM information_schema.tables +SELECT + "t".table_schema, + "t".table_name, + pk.name AS primary_key_name, + pk.columns AS primary_key_columns +FROM information_schema.tables "t" + LEFT JOIN ( + SELECT i.indrelid, "idx".relname AS "name", ARRAY_AGG("a".attname) AS "columns" + FROM pg_index i + JOIN pg_attribute "a" + ON "a".attrelid = i.indrelid + AND "a".attnum = ANY("i".indkey) + AND i.indisprimary + JOIN pg_class "idx" ON i.indexrelid = "idx".oid + GROUP BY 1, 2 + ) pk + ON ("t".table_schema || '.' || "t".table_name)::regclass = pk.indrelid WHERE table_type = 'BASE TABLE' - AND "table_schema" <> 'information_schema' - AND "table_schema" NOT LIKE 'pg_%' + AND "t".table_schema <> 'information_schema' + AND "t".table_schema NOT LIKE 'pg_%' AND "table_name" NOT IN (?) -ORDER BY "table_schema", "table_name" +ORDER BY "t".table_schema, "t".table_name ` // sqlInspectColumnsQuery retrieves column definitions for the specified table. @@ -166,7 +194,6 @@ SELECT ELSE "c".column_default END AS "default", "c".column_default ~ '^''.*''::.*$' OR "c".column_default ~ '^[0-9\.]+$' AS default_is_literal_expr, - 'p' = ANY("c".constraint_type) AS is_pk, "c".is_identity = 'YES' AS is_identity, "c".column_default = format('nextval(''%s_%s_seq''::regclass)', "c".table_name, "c".column_name) AS is_serial, COALESCE("c".identity_type, '') AS identity_type, diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 6d3124261..5b463cdef 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -100,7 +100,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Columns: map[string]sqlschema.Column{ "office_name": { SQLType: sqltype.VarChar, - IsPK: true, }, "publisher_id": { SQLType: sqltype.VarChar, @@ -111,6 +110,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsNullable: true, }, }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("office_name")}, }, { Schema: defaultSchema, @@ -118,7 +118,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Columns: map[string]sqlschema.Column{ "isbn": { SQLType: "bigint", - IsPK: true, IsNullable: false, IsAutoIncrement: false, IsIdentity: true, @@ -126,7 +125,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, "editor": { SQLType: sqltype.VarChar, - IsPK: false, IsNullable: false, IsAutoIncrement: false, IsIdentity: false, @@ -134,7 +132,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, "title": { SQLType: sqltype.VarChar, - IsPK: false, IsNullable: false, IsAutoIncrement: false, IsIdentity: false, @@ -143,7 +140,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { "locale": { SQLType: sqltype.VarChar, VarcharLen: 5, - IsPK: false, IsNullable: true, IsAutoIncrement: false, IsIdentity: false, @@ -151,7 +147,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, "page_count": { SQLType: "smallint", - IsPK: false, IsNullable: false, IsAutoIncrement: false, IsIdentity: false, @@ -159,7 +154,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, "book_count": { SQLType: "integer", - IsPK: false, IsNullable: false, IsAutoIncrement: true, IsIdentity: false, @@ -172,6 +166,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: "bigint", }, }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("isbn")}, UniqueContraints: []sqlschema.Unique{ {Columns: sqlschema.NewComposite("editor", "title")}, }, @@ -182,7 +177,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Columns: map[string]sqlschema.Column{ "author_id": { SQLType: "bigint", - IsPK: true, IsIdentity: true, }, "first_name": { @@ -195,6 +189,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: sqltype.VarChar, }, }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("author_id")}, UniqueContraints: []sqlschema.Unique{ {Columns: sqlschema.NewComposite("first_name", "last_name")}, {Columns: sqlschema.NewComposite("email")}, @@ -206,13 +201,12 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Columns: map[string]sqlschema.Column{ "publisher_id": { SQLType: sqltype.VarChar, - IsPK: true, }, "author_id": { SQLType: "bigint", - IsPK: true, }, }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("publisher_id", "author_id")}, }, { Schema: defaultSchema, @@ -220,7 +214,6 @@ func TestDatabaseInspector_Inspect(t *testing.T) { Columns: map[string]sqlschema.Column{ "publisher_id": { SQLType: sqltype.VarChar, - IsPK: true, DefaultValue: "gen_random_uuid()", }, "publisher_name": { @@ -232,6 +225,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsNullable: true, }, }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("publisher_id")}, UniqueContraints: []sqlschema.Unique{ {Columns: sqlschema.NewComposite("publisher_id", "publisher_name")}, }, @@ -301,7 +295,7 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got []sqlschema.Table) { tb.Helper() - require.Equal(tb, tableNames(want), tableNames(got), "different set of tables") + require.ElementsMatch(tb, tableNames(want), tableNames(got), "different set of tables") // Now we are guaranteed to have the same tables. for _, wt := range want { @@ -345,15 +339,15 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w } if wantCol.IsNullable != gotCol.IsNullable { - errorf("isNullable:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsNullable, gotCol.IsNullable) + errorf("isNullable:\n\t(+want)\t%t\n\t(-got)\t%t", wantCol.IsNullable, gotCol.IsNullable) } if wantCol.IsAutoIncrement != gotCol.IsAutoIncrement { - errorf("IsAutoIncrement:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsAutoIncrement, gotCol.IsAutoIncrement) + errorf("IsAutoIncrement:\n\t(+want)\t%s\b\t(-got)\t%t", wantCol.IsAutoIncrement, gotCol.IsAutoIncrement) } if wantCol.IsIdentity != gotCol.IsIdentity { - errorf("IsIdentity:\n\t(+want)\t%s\n\t(-got)\t%s", wantCol.IsIdentity, gotCol.IsIdentity) + errorf("IsIdentity:\n\t(+want)\t%t\n\t(-got)\t%t", wantCol.IsIdentity, gotCol.IsIdentity) } } @@ -381,6 +375,13 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w func cmpConstraints(tb testing.TB, want, got sqlschema.Table) { tb.Helper() + if want.PK != nil { + require.NotNilf(tb, got.PK, "table %q missing primary key, want: (%s)", want.Name, want.PK.Columns) + require.Equalf(tb, want.PK.Columns, got.PK.Columns, "table %q has wrong primary key", want.Name) + } else { + require.Nilf(tb, got.PK, "table %q shouldn't have a primary key", want.Name) + } + // Only keep columns included in each unique constraint for comparison. stripNames := func(uniques []sqlschema.Unique) (res []string) { for _, u := range uniques { @@ -496,5 +497,24 @@ func TestSchemaInspector_Inspect(t *testing.T) { require.Len(t, got.Tables, 1) cmpConstraints(t, want, got.Tables[0]) }) + t.Run("collects primary keys", func(t *testing.T) { + type Model struct { + ID string `bun:",pk"` + Email string `bun:",pk"` + Birthday time.Time `bun:",notnull"` + } + + tables := schema.NewTables(dialect) + tables.Register((*Model)(nil)) + inspector := sqlschema.NewSchemaInspector(tables) + want := sqlschema.NewComposite("id", "email") + + got, err := inspector.Inspect(context.Background()) + require.NoError(t, err) + + require.Len(t, got.Tables, 1) + require.NotNilf(t, got.Tables[0].PK, "did not register primary key, want (%s)", want) + require.Equal(t, want, got.Tables[0].PK.Columns, "wrong primary key columns") + }) }) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index f9b886f5b..a14811f3c 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -211,6 +211,7 @@ func TestAutoMigrator_Run(t *testing.T) { {testAddDropColumn}, {testUnique}, {testUniqueRenamedTable}, + {testUpdatePrimaryKeys}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -481,7 +482,7 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { // Database state type Original struct { bun.BaseModel `bun:"original"` - ID int64 `bun:",pk"` + ID int64 `bun:"id,pk"` } type Model1 struct { @@ -494,7 +495,7 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { // Model state type Renamed struct { bun.BaseModel `bun:"renamed"` - Count int64 `bun:",pk"` // renamed column in renamed model + Count int64 `bun:"count,pk"` // renamed column in renamed model } type Model2 struct { @@ -613,13 +614,8 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { Schema: db.Dialect().DefaultSchema(), Name: "table", Columns: map[string]sqlschema.Column{ - // "new_pk": { - // IsPK: true, - // SQLType: "bigint", - // }, "bigger_int": { SQLType: "bigint", - IsPK: true, IsIdentity: true, }, "ts": { @@ -650,6 +646,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // SQLType: "array", // }, }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("bigger_int")}, }, } @@ -906,6 +903,127 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } +func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { + // Has a composite primary key. + type DropPKBefore struct { + bun.BaseModel `bun:"table:drop_your_pks"` + FirstName string `bun:"first_name,pk"` + LastName string `bun:"last_name,pk"` + } + + // This table doesn't have any primary keys at all. + type AddNewPKBefore struct { + bun.BaseModel `bun:"table:add_new_pk"` + FirstName string `bun:"first_name"` + LastName string `bun:"last_name"` + } + + // Has an (identity) ID column as primary key. + type ChangePKBefore struct { + bun.BaseModel `bun:"table:change_pk"` + ID int64 `bun:"deprecated,pk,identity"` + FirstName string `bun:"first_name"` + LastName string `bun:"last_name"` + } + + // ------------------------ + + // Doesn't have any primary keys. + type DropPKAfter struct { + bun.BaseModel `bun:"table:drop_your_pks"` + FirstName string `bun:"first_name,notnull"` + LastName string `bun:"last_name,notnull"` + } + + // Has a new (identity) ID column as primary key. + type AddNewPKAfter struct { + bun.BaseModel `bun:"table:add_new_pk"` + ID int64 `bun:"new_id,pk,identity"` + FirstName string `bun:"first_name"` + LastName string `bun:"last_name"` + } + + // Has a composite primary key in place of the old ID. + type ChangePKAfter struct { + bun.BaseModel `bun:"table:change_pk"` + FirstName string `bun:"first_name,pk"` + LastName string `bun:"last_name,pk"` + } + + wantTables := []sqlschema.Table{ + { + Schema: db.Dialect().DefaultSchema(), + Name: "drop_your_pks", + Columns: map[string]sqlschema.Column{ + "first_name": { + SQLType: sqltype.VarChar, + IsNullable: false, + }, + "last_name": { + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }, + }, + { + Schema: db.Dialect().DefaultSchema(), + Name: "add_new_pk", + Columns: map[string]sqlschema.Column{ + "new_id": { + SQLType: sqltype.BigInt, + IsNullable: false, + IsIdentity: true, + }, + "first_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + "last_name": { + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("new_id")}, + }, + { + Schema: db.Dialect().DefaultSchema(), + Name: "change_pk", + Columns: map[string]sqlschema.Column{ + "first_name": { + SQLType: sqltype.VarChar, + IsNullable: false, + }, + "last_name": { + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }, + PK: &sqlschema.PK{Columns: sqlschema.NewComposite("first_name", "last_name")}, + }, + } + + ctx := context.Background() + inspect := inspectDbOrSkip(t, db) + mustResetModel(t, ctx, db, + (*DropPKBefore)(nil), + (*AddNewPKBefore)(nil), + (*ChangePKBefore)(nil), + ) + m := newAutoMigrator(t, db, migrate.WithModel( + (*DropPKAfter)(nil), + (*AddNewPKAfter)(nil), + (*ChangePKAfter)(nil)), + ) + + // Act + err := m.Run(ctx) + require.NoError(t, err) + + // Assert + state := inspect(ctx) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) +} + // // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package // func TestDiff(t *testing.T) { // type Journal struct { diff --git a/migrate/diff.go b/migrate/diff.go index 8dbe92766..79e401b91 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -375,6 +375,9 @@ ChangedRenamed: delete(current.Columns, cName) // no need to check this column again d.refMap.UpdateC(sqlschema.C(target.Schema, target.Name, cName), tName) + // Update primary key definition to avoid superficially recreating the constraint. + current.PK.Columns = current.PK.Columns.Replace(cName, tName) + continue ChangedRenamed } @@ -426,6 +429,29 @@ Drop: Unique: got, }) } + + // Detect primary key changes + if target.PK == nil && current.PK == nil { + return + } + switch { + case target.PK == nil && current.PK != nil: + d.changes.Add(&DropPrimaryKey{ + FQN: fqn, + PK: current.PK, + }) + case current.PK == nil && target.PK != nil: + d.changes.Add(&AddPrimaryKey{ + FQN: fqn, + PK: target.PK, + }) + case target.PK.Columns != current.PK.Columns: + d.changes.Add(&ChangePrimaryKey{ + FQN: fqn, + Old: current.PK, + New: target.PK, + }) + } } // sqlschema utils ------------------------------------------------------------ diff --git a/migrate/operations.go b/migrate/operations.go index 9603948e6..5c0ac47b8 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -115,9 +115,10 @@ func (op *DropColumn) GetReverse() Operation { func (op *DropColumn) DependsOn(another Operation) bool { // TODO: refactor - if dc, ok := another.(*DropConstraint); ok { + switch drop := another.(type) { + case *DropConstraint: var fCol bool - fCols := dc.FK.From.Column.Split() + fCols := drop.FK.From.Column.Split() for _, c := range fCols { if c == op.Column { fCol = true @@ -126,7 +127,7 @@ func (op *DropColumn) DependsOn(another Operation) bool { } var tCol bool - tCols := dc.FK.To.Column.Split() + tCols := drop.FK.To.Column.Split() for _, c := range tCols { if c == op.Column { tCol = true @@ -134,8 +135,13 @@ func (op *DropColumn) DependsOn(another Operation) bool { } } - return (dc.FK.From.Schema == op.FQN.Schema && dc.FK.From.Table == op.FQN.Table && fCol) || - (dc.FK.To.Schema == op.FQN.Schema && dc.FK.To.Table == op.FQN.Table && tCol) + return (drop.FK.From.Schema == op.FQN.Schema && drop.FK.From.Table == op.FQN.Table && fCol) || + (drop.FK.To.Schema == op.FQN.Schema && drop.FK.To.Table == op.FQN.Table && tCol) + + case *DropPrimaryKey: + return op.FQN == drop.FQN && drop.PK.Columns.Contains(op.Column) + case *ChangePrimaryKey: + return op.FQN == drop.FQN && drop.Old.Columns.Contains(op.Column) } return false } @@ -301,6 +307,68 @@ func (op *ChangeColumnType) GetReverse() Operation { } } +type DropPrimaryKey struct { + FQN schema.FQN + PK *sqlschema.PK +} + +var _ Operation = (*DropPrimaryKey)(nil) + +func (op *DropPrimaryKey) GetReverse() Operation { + return &AddPrimaryKey{ + FQN: op.FQN, + PK: op.PK, + } +} + +type AddPrimaryKey struct { + FQN schema.FQN + PK *sqlschema.PK +} + +var _ Operation = (*AddPrimaryKey)(nil) + +func (op *AddPrimaryKey) GetReverse() Operation { + return &DropPrimaryKey{ + FQN: op.FQN, + PK: op.PK, + } +} + +func (op *AddPrimaryKey) DependsOn(another Operation) bool { + switch another := another.(type) { + case *AddColumn: + return op.FQN == another.FQN && op.PK.Columns.Contains(another.Column) + } + return false +} + +type ChangePrimaryKey struct { + FQN schema.FQN + Old *sqlschema.PK + New *sqlschema.PK +} + +var _ Operation = (*AddPrimaryKey)(nil) + +func (op *ChangePrimaryKey) GetReverse() Operation { + return &ChangePrimaryKey{ + FQN: op.FQN, + Old: op.New, + New: op.Old, + } +} + +// func (op *ChangePrimaryKey) DependsOn(another Operation) bool { +// switch another := another.(type) { +// case *AddColumn: +// return op.FQN == another.FQN && op.PK.Columns.Contains(another.Column) +// case *RenameColumn: +// return op.FQN == another.FQN && op.PK.Columns.Contains(another.NewName) +// } +// return false +// } + // noop is a migration that doesn't change the schema. type noop struct{} diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index d76d9dc7e..49537237d 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -68,7 +68,6 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq() VarcharLen: length, DefaultValue: exprToLower(f.SQLDefault), - IsPK: f.IsPK, IsNullable: !f.NotNull, IsAutoIncrement: f.AutoIncrement, IsIdentity: f.Identity, @@ -94,12 +93,22 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { unique = append(unique, Unique{Name: name, Columns: NewComposite(columns...)}) } + var pk *PK + if len(t.PKs) > 0 { + var columns []string + for _, f := range t.PKs { + columns = append(columns, f.Name) + } + pk = &PK{Columns: NewComposite(columns...)} + } + state.Tables = append(state.Tables, Table{ Schema: t.Schema, Name: t.Name, Model: t.ZeroIface, Columns: columns, UniqueContraints: unique, + PK: pk, }) for _, rel := range t.Relations { diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go index b6139d29d..efafcad2c 100644 --- a/migrate/sqlschema/state.go +++ b/migrate/sqlschema/state.go @@ -28,6 +28,9 @@ type Table struct { // UniqueConstraints defined on the table. UniqueContraints []Unique + + // PrimaryKey holds the primary key definition if any. + PK *PK } // T returns a fully-qualified name object for the table. @@ -40,13 +43,13 @@ type Column struct { SQLType string VarcharLen int DefaultValue string - IsPK bool IsNullable bool IsAutoIncrement bool IsIdentity bool // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. } +// AppendQuery appends full SQL data type. func (c *Column) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { b = append(b, c.SQLType...) if c.VarcharLen == 0 { @@ -375,3 +378,9 @@ type Unique struct { func (u Unique) Equals(other Unique) bool { return u.Columns == other.Columns } + +// PK represents a primary key constraint defined on 1 or more columns. +type PK struct { + Name string + Columns composite +} From 1ef10c5431a97077f1ac62e05e2f1fb70ed7f6aa Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 28 Oct 2024 12:05:37 +0100 Subject: [PATCH 31/55] test: run for all dialects --- internal/dbtest/db_test.go | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/internal/dbtest/db_test.go b/internal/dbtest/db_test.go index b355efaee..ddc9d70a5 100644 --- a/internal/dbtest/db_test.go +++ b/internal/dbtest/db_test.go @@ -45,13 +45,13 @@ const ( ) var allDBs = map[string]func(tb testing.TB) *bun.DB{ - pgName: pg, - // pgxName: pgx, - // mysql5Name: mysql5, - // mysql8Name: mysql8, - // mariadbName: mariadb, - // sqliteName: sqlite, - // mssql2019Name: mssql2019, + pgName: pg, + pgxName: pgx, + mysql5Name: mysql5, + mysql8Name: mysql8, + mariadbName: mariadb, + sqliteName: sqlite, + mssql2019Name: mssql2019, } var allDialects = []func() schema.Dialect{ From 4ce21a44c6e090226ce6eba31dfbc95564d7dc3b Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 3 Nov 2024 09:45:05 +0100 Subject: [PATCH 32/55] refactor: make MakeQueryBytes available to other library packages --- db.go | 3 +-- internal/util.go | 6 ++++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/db.go b/db.go index 106dfe905..2f52a2248 100644 --- a/db.go +++ b/db.go @@ -703,6 +703,5 @@ func (tx Tx) NewDropColumn() *DropColumnQuery { //------------------------------------------------------------------------------ func (db *DB) makeQueryBytes() []byte { - // TODO: make this configurable? - return make([]byte, 0, 4096) + return internal.MakeQueryBytes() } diff --git a/internal/util.go b/internal/util.go index 66b92b3c5..3391351ab 100644 --- a/internal/util.go +++ b/internal/util.go @@ -79,3 +79,9 @@ func indirectNil(v reflect.Value) reflect.Value { } return v } + +// MakeQueryBytes returns zero-length byte slice with capacity of 4096. +func MakeQueryBytes() []byte { + // TODO: make this configurable? + return make([]byte, 0, 4096) +} \ No newline at end of file From 245e9686ae99390091b439adc8f64f01ebe72061 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 3 Nov 2024 09:50:59 +0100 Subject: [PATCH 33/55] refactor: append SQL in dialects but delegate execution to migrator - Add -Op suffix to all operations - Replace Apply() method with AppendSQL() in Migrator interface - Add snapshot tests for SQL generation - Suspended support for renaming constraints Included bugfixes: - changeColumnType uses "type equivalence" to update varchar length if needed - addColumn supports DEFAULT clause --- dialect/pgdialect/alter_table.go | 238 ++++++--------- internal/dbtest/migrate_test.go | 281 +----------------- internal/dbtest/query_test.go | 191 ++++++++++++ ...lterTable-pg-add_column_with_default_value | 1 + ...TestAlterTable-pg-add_column_with_identity | 1 + .../snapshots/TestAlterTable-pg-add_default | 1 + .../TestAlterTable-pg-add_foreign_key | 1 + .../snapshots/TestAlterTable-pg-add_identity | 1 + .../snapshots/TestAlterTable-pg-add_notnull | 1 + .../TestAlterTable-pg-add_primary_key | 1 + .../TestAlterTable-pg-add_unique_constraint | 1 + ...rTable-pg-change_column_type_int_to_bigint | 1 + .../TestAlterTable-pg-change_primary_key | 1 + .../snapshots/TestAlterTable-pg-create_table | 1 + .../snapshots/TestAlterTable-pg-drop_column | 1 + .../snapshots/TestAlterTable-pg-drop_default | 1 + .../TestAlterTable-pg-drop_foreign_key | 1 + .../snapshots/TestAlterTable-pg-drop_identity | 1 + .../TestAlterTable-pg-drop_primary_key | 1 + .../snapshots/TestAlterTable-pg-drop_table | 1 + .../TestAlterTable-pg-drop_unique_constraint | 1 + .../TestAlterTable-pg-increase_varchar_length | 1 + .../snapshots/TestAlterTable-pg-make_nullable | 1 + .../snapshots/TestAlterTable-pg-rename_column | 1 + .../snapshots/TestAlterTable-pg-rename_table | 1 + ...terTable-pgx-add_column_with_default_value | 1 + ...estAlterTable-pgx-add_column_with_identity | 1 + .../snapshots/TestAlterTable-pgx-add_default | 1 + .../TestAlterTable-pgx-add_foreign_key | 1 + .../snapshots/TestAlterTable-pgx-add_identity | 1 + .../snapshots/TestAlterTable-pgx-add_notnull | 1 + .../TestAlterTable-pgx-add_primary_key | 1 + .../TestAlterTable-pgx-add_unique_constraint | 1 + ...Table-pgx-change_column_type_int_to_bigint | 1 + .../TestAlterTable-pgx-change_primary_key | 1 + .../snapshots/TestAlterTable-pgx-create_table | 1 + .../snapshots/TestAlterTable-pgx-drop_column | 1 + .../snapshots/TestAlterTable-pgx-drop_default | 1 + .../TestAlterTable-pgx-drop_foreign_key | 1 + .../TestAlterTable-pgx-drop_identity | 1 + .../TestAlterTable-pgx-drop_primary_key | 1 + .../snapshots/TestAlterTable-pgx-drop_table | 1 + .../TestAlterTable-pgx-drop_unique_constraint | 1 + ...TestAlterTable-pgx-increase_varchar_length | 1 + .../TestAlterTable-pgx-make_nullable | 1 + .../TestAlterTable-pgx-rename_column | 1 + .../snapshots/TestAlterTable-pgx-rename_table | 1 + migrate/diff.go | 58 ++-- migrate/operations.go | 184 ++++++------ migrate/sqlschema/migrator.go | 19 +- 50 files changed, 454 insertions(+), 561 deletions(-) create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_default_value create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_identity create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_default create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_identity create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_notnull create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_primary_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_unique_constraint create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_column_type_int_to_bigint create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_primary_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-create_table create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_column create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_default create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_foreign_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_identity create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_primary_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_unique_constraint create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-increase_varchar_length create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-make_nullable create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_column create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_table create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_default_value create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_identity create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_default create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_identity create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_notnull create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_primary_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_unique_constraint create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_column_type_int_to_bigint create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_primary_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-create_table create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_column create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_default create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_foreign_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_identity create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_primary_key create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_unique_constraint create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-increase_varchar_length create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-make_nullable create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_column create mode 100644 internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_table diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index a1ac7f333..bfb118ade 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -1,12 +1,9 @@ package pgdialect import ( - "context" "fmt" - "log" "github.com/uptrace/bun" - "github.com/uptrace/bun/internal" "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" @@ -24,115 +21,64 @@ type migrator struct { var _ sqlschema.Migrator = (*migrator)(nil) -func (m *migrator) Apply(ctx context.Context, changes ...interface{}) error { - if len(changes) == 0 { - return nil - } - var conn bun.IConn - var err error - - if conn, err = m.db.Conn(ctx); err != nil { - return err - } - +func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err error) { fmter := m.db.Formatter() - for _, change := range changes { - var b []byte // TODO(dyma): call db.MakeQueryBytes - - switch change := change.(type) { - case *migrate.CreateTable: - log.Printf("create table %q", change.FQN.Table) - err = m.CreateTable(ctx, change.Model) - if err != nil { - return fmt.Errorf("apply changes: create table %s: %w", change.FQN, err) - } - continue - case *migrate.DropTable: - log.Printf("drop table %q", change.FQN.Table) - err = m.DropTable(ctx, change.FQN) - if err != nil { - return fmt.Errorf("apply changes: drop table %s: %w", change.FQN, err) - } - continue - case *migrate.ChangePrimaryKey: - // TODO: refactor! - b, err = m.dropConstraint(fmter, b, change.FQN, change.Old.Name) - if err != nil { - return fmt.Errorf("apply changes: %w", err) - } - - query := internal.String(b) - log.Println("exec query: " + query) - if _, err = conn.ExecContext(ctx, query); err != nil { - return fmt.Errorf("apply changes: %w", err) - } - - b = []byte{} - b, err = m.addPrimaryKey(fmter, b, change.FQN, change.New.Columns.Safe()) - if err != nil { - return fmt.Errorf("apply changes: %w", err) - } - - query = internal.String(b) - log.Println("exec query: " + query) - if _, err = conn.ExecContext(ctx, query); err != nil { - return fmt.Errorf("apply changes: %w", err) - } - continue - case *migrate.RenameTable: - b, err = m.renameTable(fmter, b, change) - case *migrate.RenameColumn: - b, err = m.renameColumn(fmter, b, change) - case *migrate.AddColumn: - b, err = m.addColumn(fmter, b, change) - case *migrate.DropColumn: - b, err = m.dropColumn(fmter, b, change) - case *migrate.AddPrimaryKey: - b, err = m.addPrimaryKey(fmter, b, change.FQN, change.PK.Columns.Safe()) - case *migrate.AddForeignKey: - b, err = m.addForeignKey(fmter, b, change) - case *migrate.AddUniqueConstraint: - b, err = m.addUnique(fmter, b, change) - case *migrate.DropUniqueConstraint: - b, err = m.dropConstraint(fmter, b, change.FQN, change.Unique.Name) - case *migrate.DropConstraint: - b, err = m.dropConstraint(fmter, b, change.FQN(), change.ConstraintName) - case *migrate.DropPrimaryKey: - b, err = m.dropConstraint(fmter, b, change.FQN, change.PK.Name) - case *migrate.RenameConstraint: - b, err = m.renameConstraint(fmter, b, change) - case *migrate.ChangeColumnType: - b, err = m.changeColumnType(fmter, b, change) - default: - return fmt.Errorf("apply changes: unknown operation %T", change) - } - if err != nil { - return fmt.Errorf("apply changes: %w", err) - } - query := internal.String(b) - log.Println("exec query: " + query) - if _, err = conn.ExecContext(ctx, query); err != nil { - return fmt.Errorf("apply changes: %w", err) - } + // Append ALTER TABLE statement to the enclosed query bytes []byte. + appendAlterTable := func(query []byte, fqn schema.FQN) []byte { + query = append(query, "ALTER TABLE "...) + query, _ = fqn.AppendQuery(fmter, query) + return append(query, " "...) + } + + switch change := operation.(type) { + case *migrate.CreateTableOp: + return m.AppendCreateTable(b, change.Model) + case *migrate.DropTableOp: + return m.AppendDropTable(b, change.FQN) + case *migrate.RenameTableOp: + b, err = m.renameTable(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.RenameColumnOp: + b, err = m.renameColumn(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.AddColumnOp: + b, err = m.addColumn(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.DropColumnOp: + b, err = m.dropColumn(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.AddPrimaryKeyOp: + b, err = m.addPrimaryKey(fmter, appendAlterTable(b, change.FQN), change.PK.Columns.Safe()) + case *migrate.ChangePrimaryKeyOp: + b, err = m.changePrimaryKey(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.DropPrimaryKeyOp: + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN), change.PK.Name) + case *migrate.AddUniqueConstraintOp: + b, err = m.addUnique(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.DropUniqueConstraintOp: + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN), change.Unique.Name) + case *migrate.ChangeColumnTypeOp: + b, err = m.changeColumnType(fmter, appendAlterTable(b, change.FQN), change) + case *migrate.AddForeignKeyOp: + b, err = m.addForeignKey(fmter, appendAlterTable(b, change.FQN()), change) + case *migrate.DropForeignKeyOp: + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN()), change.ConstraintName) + // case *migrate.RenameForeignKeyOp: + // b, err = m.renameConstraint(fmter, b, change) + default: + return nil, fmt.Errorf("append sql: unknown operation %T", change) + } + if err != nil { + return nil, fmt.Errorf("append sql: %w", err) } - return nil + return b, nil } -func (m *migrator) renameTable(fmter schema.Formatter, b []byte, rename *migrate.RenameTable) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - b, _ = rename.FQN.AppendQuery(fmter, b) - - b = append(b, " RENAME TO "...) +func (m *migrator) renameTable(fmter schema.Formatter, b []byte, rename *migrate.RenameTableOp) (_ []byte, err error) { + b = append(b, "RENAME TO "...) b = fmter.AppendName(b, rename.NewName) return b, nil } -func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrate.RenameColumn) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - b, _ = rename.FQN.AppendQuery(fmter, b) - - b = append(b, " RENAME COLUMN "...) +func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrate.RenameColumnOp) (_ []byte, err error) { + b = append(b, "RENAME COLUMN "...) b = fmter.AppendName(b, rename.OldName) b = append(b, " TO "...) @@ -141,16 +87,19 @@ func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrat return b, nil } -func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddColumn) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - b, _ = add.FQN.AppendQuery(fmter, b) - - b = append(b, " ADD COLUMN "...) +func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddColumnOp) (_ []byte, err error) { + b = append(b, "ADD COLUMN "...) b = fmter.AppendName(b, add.Column) b = append(b, " "...) b, _ = add.ColDef.AppendQuery(fmter, b) + if add.ColDef.DefaultValue != "" { + b = append(b, " DEFAULT "...) + b = append(b, add.ColDef.DefaultValue...) + b = append(b, " "...) + } + if add.ColDef.IsIdentity { b = appendGeneratedAsIdentity(b) } @@ -158,17 +107,22 @@ func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddC return b, nil } -func (m *migrator) dropColumn(fmter schema.Formatter, b []byte, drop *migrate.DropColumn) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - b, _ = drop.FQN.AppendQuery(fmter, b) - - b = append(b, " DROP COLUMN "...) +func (m *migrator) dropColumn(fmter schema.Formatter, b []byte, drop *migrate.DropColumnOp) (_ []byte, err error) { + b = append(b, "DROP COLUMN "...) b = fmter.AppendName(b, drop.Column) return b, nil } -func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *migrate.RenameConstraint) (_ []byte, err error) { +func (m *migrator) addPrimaryKey(fmter schema.Formatter, b []byte, columns schema.Safe) (_ []byte, err error) { + b = append(b, "ADD PRIMARY KEY ("...) + b, _ = columns.AppendQuery(fmter, b) + b = append(b, ")"...) + + return b, nil +} + +func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *migrate.RenameForeignKeyOp) (_ []byte, err error) { b = append(b, "ALTER TABLE "...) fqn := rename.FQN() if b, err = fqn.AppendQuery(fmter, b); err != nil { @@ -184,26 +138,15 @@ func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *mi return b, nil } -func (m *migrator) addPrimaryKey(fmter schema.Formatter, b []byte, fqn schema.FQN, columns schema.Safe) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } - - b = append(b, " ADD PRIMARY KEY ("...) - b, _ = columns.AppendQuery(fmter, b) - b = append(b, ")"...) - +func (m *migrator) changePrimaryKey(fmter schema.Formatter, b []byte, change *migrate.ChangePrimaryKeyOp) (_ []byte, err error) { + b, _ = m.dropConstraint(fmter, b, change.Old.Name) + b = append(b, ", "...) + b, _ = m.addPrimaryKey(fmter, b, change.New.Columns.Safe()) return b, nil } -func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.AddUniqueConstraint) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - if b, err = change.FQN.AppendQuery(fmter, b); err != nil { - return b, err - } - - b = append(b, " ADD CONSTRAINT "...) +func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.AddUniqueConstraintOp) (_ []byte, err error) { + b = append(b, "ADD CONSTRAINT "...) if change.Unique.Name != "" { b = fmter.AppendName(b, change.Unique.Name) } else { @@ -217,33 +160,22 @@ func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.A return b, nil } -func (m *migrator) dropConstraint(fmter schema.Formatter, b []byte, fqn schema.FQN, name string) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } - - b = append(b, " DROP CONSTRAINT "...) +func (m *migrator) dropConstraint(fmter schema.Formatter, b []byte, name string) (_ []byte, err error) { + b = append(b, "DROP CONSTRAINT "...) b = fmter.AppendName(b, name) return b, nil } -func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate.AddForeignKey) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - fqn := add.FQN() - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } - - b = append(b, " ADD CONSTRAINT "...) +func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate.AddForeignKeyOp) (_ []byte, err error) { + b = append(b, "ADD CONSTRAINT "...) b = fmter.AppendName(b, add.ConstraintName) b = append(b, " FOREIGN KEY ("...) if b, err = add.FK.From.Column.Safe().AppendQuery(fmter, b); err != nil { return b, err } - b = append(b, ") "...) + b = append(b, ")"...) other := schema.FQN{Schema: add.FK.To.Schema, Table: add.FK.To.Table} b = append(b, " REFERENCES "...) @@ -260,24 +192,22 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. return b, nil } -func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *migrate.ChangeColumnType) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - b, _ = colDef.FQN.AppendQuery(fmter, b) - +func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *migrate.ChangeColumnTypeOp) (_ []byte, err error) { // alterColumn never re-assigns err, so there is no need to check for err != nil after calling it var i int appendAlterColumn := func() { if i > 0 { - b = append(b, ","...) + b = append(b, ", "...) } - b = append(b, " ALTER COLUMN "...) + b = append(b, "ALTER COLUMN "...) b = fmter.AppendName(b, colDef.Column) i++ } got, want := colDef.From, colDef.To - if want.SQLType != got.SQLType { + inspector := m.db.Dialect().(sqlschema.InspectorDialect) + if !inspector.EquivalentType(want, got) { appendAlterColumn() b = append(b, " SET DATA TYPE "...) if b, err = want.AppendQuery(fmter, b); err != nil { diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index a14811f3c..28b45553c 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -203,15 +203,17 @@ func TestAutoMigrator_Run(t *testing.T) { {testRenamedColumns}, {testCreateDropTable}, {testAlterForeignKeys}, - {testCustomFKNameFunc}, - {testForceRenameFK}, - {testRenameColumnRenamesFK}, {testChangeColumnType_AutoCast}, {testIdentity}, {testAddDropColumn}, {testUnique}, {testUniqueRenamedTable}, {testUpdatePrimaryKeys}, + + // Suspended support for renaming foreign keys: + // {testCustomFKNameFunc}, + // {testForceRenameFK}, + // {testRenameColumnRenamesFK}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -1022,275 +1024,4 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) -} - -// // TODO: rewrite these tests into AutoMigrator tests, Diff should be moved to migrate/internal package -// func TestDiff(t *testing.T) { -// type Journal struct { -// ISBN string `bun:"isbn,pk"` -// Title string `bun:"title,notnull"` -// Pages int `bun:"page_count,notnull,default:0"` -// } - -// type Reader struct { -// Username string `bun:",pk,default:gen_random_uuid()"` -// } - -// type ExternalUsers struct { -// bun.BaseModel `bun:"external.users"` -// Name string `bun:",pk"` -// } - -// // ------------------------------------------------------------------------ -// type ThingNoOwner struct { -// bun.BaseModel `bun:"things"` -// ID int64 `bun:"thing_id,pk"` -// OwnerID int64 `bun:",notnull"` -// } - -// type Owner struct { -// ID int64 `bun:",pk"` -// } - -// type Thing struct { -// bun.BaseModel `bun:"things"` -// ID int64 `bun:"thing_id,pk"` -// OwnerID int64 `bun:",notnull"` - -// Owner *Owner `bun:"rel:belongs-to,join:owner_id=id"` -// } - -// testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { -// defaultSchema := dialect.DefaultSchema() - -// for _, tt := range []struct { -// name string -// states func(testing.TB, context.Context, schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) -// want []migrate.Operation -// }{ -// { -// name: "1 table renamed, 1 created, 2 dropped", -// states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { -// // Database state ------------- -// type Subscription struct { -// bun.BaseModel `bun:"table:billing.subscriptions"` -// } -// type Review struct{} - -// type Author struct { -// Name string `bun:"name"` -// } - -// // Model state ------------- -// type JournalRenamed struct { -// bun.BaseModel `bun:"table:journals_renamed"` - -// ISBN string `bun:"isbn,pk"` -// Title string `bun:"title,notnull"` -// Pages int `bun:"page_count,notnull,default:0"` -// } - -// return getState(tb, ctx, d, -// (*Author)(nil), -// (*Journal)(nil), -// (*Review)(nil), -// (*Subscription)(nil), -// ), getState(tb, ctx, d, -// (*Author)(nil), -// (*JournalRenamed)(nil), -// (*Reader)(nil), -// ) -// }, -// want: []migrate.Operation{ -// &migrate.RenameTable{ -// Schema: defaultSchema, -// From: "journals", -// To: "journals_renamed", -// }, -// &migrate.CreateTable{ -// Model: &Reader{}, // (*Reader)(nil) would be more idiomatic, but schema.Tables -// }, -// &migrate.DropTable{ -// Schema: "billing", -// Name: "billing.subscriptions", -// }, -// &migrate.DropTable{ -// Schema: defaultSchema, -// Name: "reviews", -// }, -// }, -// }, -// { -// name: "renaming does not work across schemas", -// states: func(tb testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { -// // Users have the same columns as the "added" ExternalUsers. -// // However, we should not recognize it as a RENAME, because only models in the same schema can be renamed. -// // Instead, this is a DROP + CREATE case. -// type Users struct { -// bun.BaseModel `bun:"external_users"` -// Name string `bun:",pk"` -// } - -// return getState(tb, ctx, d, -// (*Users)(nil), -// ), getState(t, ctx, d, -// (*ExternalUsers)(nil), -// ) -// }, -// want: []migrate.Operation{ -// &migrate.DropTable{ -// Schema: defaultSchema, -// Name: "external_users", -// }, -// &migrate.CreateTable{ -// Model: &ExternalUsers{}, -// }, -// }, -// }, -// { -// name: "detect new FKs on existing columns", -// states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { -// // database state -// type LonelyUser struct { -// bun.BaseModel `bun:"table:users"` -// Username string `bun:",pk"` -// DreamPetKind string `bun:"pet_kind,notnull"` -// DreamPetName string `bun:"pet_name,notnull"` -// ImaginaryFriend string `bun:"friend"` -// } - -// type Pet struct { -// Nickname string `bun:",pk"` -// Kind string `bun:",pk"` -// } - -// // model state -// type HappyUser struct { -// bun.BaseModel `bun:"table:users"` -// Username string `bun:",pk"` -// PetKind string `bun:"pet_kind,notnull"` -// PetName string `bun:"pet_name,notnull"` -// Friend string `bun:"friend"` - -// Pet *Pet `bun:"rel:has-one,join:pet_kind=kind,join:pet_name=nickname"` -// BestFriend *HappyUser `bun:"rel:has-one,join:friend=username"` -// } - -// return getState(t, ctx, d, -// (*LonelyUser)(nil), -// (*Pet)(nil), -// ), getState(t, ctx, d, -// (*HappyUser)(nil), -// (*Pet)(nil), -// ) -// }, -// want: []migrate.Operation{ -// &migrate.AddFK{ -// FK: sqlschema.FK{ -// From: sqlschema.C(defaultSchema, "users", "pet_kind", "pet_name"), -// To: sqlschema.C(defaultSchema, "pets", "kind", "nickname"), -// }, -// ConstraintName: "users_pet_kind_pet_name_fkey", -// }, -// &migrate.AddFK{ -// FK: sqlschema.FK{ -// From: sqlschema.C(defaultSchema, "users", "friend"), -// To: sqlschema.C(defaultSchema, "users", "username"), -// }, -// ConstraintName: "users_friend_fkey", -// }, -// }, -// }, -// { -// name: "create FKs for new tables", -// states: func(t testing.TB, ctx context.Context, d schema.Dialect) (stateDb sqlschema.State, stateModel sqlschema.State) { -// return getState(t, ctx, d, -// (*ThingNoOwner)(nil), -// ), getState(t, ctx, d, -// (*Owner)(nil), -// (*Thing)(nil), -// ) -// }, -// want: []migrate.Operation{ -// &migrate.CreateTable{ -// Model: &Owner{}, -// }, -// &migrate.AddFK{ -// FK: sqlschema.FK{ -// From: sqlschema.C(defaultSchema, "things", "owner_id"), -// To: sqlschema.C(defaultSchema, "owners", "id"), -// }, -// ConstraintName: "things_owner_id_fkey", -// }, -// }, -// }, -// { -// name: "drop FKs for dropped tables", -// states: func(t testing.TB, ctx context.Context, d schema.Dialect) (sqlschema.State, sqlschema.State) { -// stateDb := getState(t, ctx, d, (*Owner)(nil), (*Thing)(nil)) -// stateModel := getState(t, ctx, d, (*ThingNoOwner)(nil)) - -// // Normally a database state will have the names of the constraints filled in, but we need to mimic that for the test. -// stateDb.FKs[sqlschema.FK{ -// From: sqlschema.C(d.DefaultSchema(), "things", "owner_id"), -// To: sqlschema.C(d.DefaultSchema(), "owners", "id"), -// }] = "test_fkey" -// return stateDb, stateModel -// }, -// want: []migrate.Operation{ -// &migrate.DropTable{ -// Schema: defaultSchema, -// Name: "owners", -// }, -// &migrate.DropFK{ -// FK: sqlschema.FK{ -// From: sqlschema.C(defaultSchema, "things", "owner_id"), -// To: sqlschema.C(defaultSchema, "owners", "id"), -// }, -// ConstraintName: "test_fkey", -// }, -// }, -// }, -// } { -// t.Run(tt.name, func(t *testing.T) { -// ctx := context.Background() -// stateDb, stateModel := tt.states(t, ctx, dialect) - -// got := migrate.Diff(stateDb, stateModel).Operations() -// checkEqualChangeset(t, got, tt.want) -// }) -// } -// }) -// } - -// func checkEqualChangeset(tb testing.TB, got, want []migrate.Operation) { -// tb.Helper() - -// // Sort alphabetically to ensure we don't fail because of the wrong order -// sort.Slice(got, func(i, j int) bool { -// return got[i].String() < got[j].String() -// }) -// sort.Slice(want, func(i, j int) bool { -// return want[i].String() < want[j].String() -// }) - -// var cgot, cwant migrate.Changeset -// cgot.Add(got...) -// cwant.Add(want...) - -// require.Equal(tb, cwant.String(), cgot.String()) -// } - -// func getState(tb testing.TB, ctx context.Context, dialect schema.Dialect, models ...interface{}) sqlschema.State { -// tb.Helper() - -// tables := schema.NewTables(dialect) -// tables.Register(models...) - -// inspector := sqlschema.NewSchemaInspector(tables) -// state, err := inspector.Inspect(ctx) -// if err != nil { -// tb.Skip("get state: %w", err) -// } -// return state -// } +} \ No newline at end of file diff --git a/internal/dbtest/query_test.go b/internal/dbtest/query_test.go index f232d77e2..85a4e4699 100644 --- a/internal/dbtest/query_test.go +++ b/internal/dbtest/query_test.go @@ -9,8 +9,13 @@ import ( "time" "github.com/bradleyjkemp/cupaloy" + "github.com/stretchr/testify/require" "github.com/uptrace/bun" + "github.com/uptrace/bun/dialect/sqltype" + "github.com/uptrace/bun/internal" + "github.com/uptrace/bun/migrate" + "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -1576,3 +1581,189 @@ func TestQuery(t *testing.T) { } }) } + +func TestAlterTable(t *testing.T) { + type Movie struct { + bun.BaseModel `bun:"table:hobbies.movies"` + ID string + Director string `bun:"director,notnull"` + Budget int32 + ReleaseDate time.Time + HasOscar bool + Genre string + } + + fqn := schema.FQN{Schema: "hobbies", Table: "movies"} + + tests := []struct { + name string + operation interface{} + }{ + {name: "create table", operation: &migrate.CreateTableOp{ + FQN: fqn, + Model: (*Movie)(nil), + }}, + {name: "drop table", operation: &migrate.DropTableOp{ + FQN: fqn, + }}, + {name: "rename table", operation: &migrate.RenameTableOp{ + FQN: fqn, + NewName: "films", + }}, + {name: "rename column", operation: &migrate.RenameColumnOp{ + FQN: fqn, + OldName: "has_oscar", + NewName: "has_awards", + }}, + {name: "add column with default value", operation: &migrate.AddColumnOp{ + FQN: fqn, + Column: "language", + ColDef: sqlschema.Column{ + SQLType: "varchar", + VarcharLen: 20, + IsNullable: false, + DefaultValue: "'en-GB'", + }, + }}, + {name: "add column with identity", operation: &migrate.AddColumnOp{ + FQN: fqn, + Column: "n", + ColDef: sqlschema.Column{ + SQLType: sqltype.BigInt, + IsNullable: false, + IsIdentity: true, + }, + }}, + {name: "drop column", operation: &migrate.DropColumnOp{ + FQN: fqn, + Column: "director", + ColDef: sqlschema.Column{ + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }}, + {name: "add unique constraint", operation: &migrate.AddUniqueConstraintOp{ + FQN: fqn, + Unique: sqlschema.Unique{ + Name: "one_genre_per_director", + Columns: sqlschema.NewComposite("genre", "director"), + }, + }}, + {name: "drop unique constraint", operation: &migrate.DropUniqueConstraintOp{ + FQN: fqn, + Unique: sqlschema.Unique{ + Name: "one_genre_per_director", + Columns: sqlschema.NewComposite("genre", "director"), + }, + }}, + {name: "change column type int to bigint", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "budget", + From: sqlschema.Column{SQLType: sqltype.Integer}, + To: sqlschema.Column{SQLType: sqltype.BigInt}, + }}, + {name: "add default", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "budget", + From: sqlschema.Column{DefaultValue: ""}, + To: sqlschema.Column{DefaultValue: "100"}, + }}, + {name: "drop default", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "budget", + From: sqlschema.Column{DefaultValue: "100"}, + To: sqlschema.Column{DefaultValue: ""}, + }}, + {name: "make nullable", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "director", + From: sqlschema.Column{IsNullable: false}, + To: sqlschema.Column{IsNullable: true}, + }}, + {name: "add notnull", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "budget", + From: sqlschema.Column{IsNullable: true}, + To: sqlschema.Column{IsNullable: false}, + }}, + {name: "increase varchar length", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "language", + From: sqlschema.Column{SQLType: "varchar", VarcharLen: 20}, + To: sqlschema.Column{SQLType: "varchar", VarcharLen: 255}, + }}, + {name: "add identity", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "id", + From: sqlschema.Column{IsIdentity: false}, + To: sqlschema.Column{IsIdentity: true}, + }}, + {name: "drop identity", operation: &migrate.ChangeColumnTypeOp{ + FQN: fqn, + Column: "id", + From: sqlschema.Column{IsIdentity: true}, + To: sqlschema.Column{IsIdentity: false}, + }}, + {name: "add primary key", operation: &migrate.AddPrimaryKeyOp{ + FQN: fqn, + PK: &sqlschema.PK{ + Name: "new_pk", + Columns: sqlschema.NewComposite("id"), + }, + }}, + {name: "drop primary key", operation: &migrate.DropPrimaryKeyOp{ + FQN: fqn, + PK: &sqlschema.PK{ + Name: "new_pk", + Columns: sqlschema.NewComposite("id"), + }, + }}, + {name: "change primary key", operation: &migrate.ChangePrimaryKeyOp{ + FQN: fqn, + Old: &sqlschema.PK{ + Name: "old_pk", + Columns: sqlschema.NewComposite("id"), + }, + New: &sqlschema.PK{ + Name: "new_pk", + Columns: sqlschema.NewComposite("director", "genre"), + }, + }}, + {name: "add foreign key", operation: &migrate.AddForeignKeyOp{ + ConstraintName: "genre_description", + FK: sqlschema.FK{ + From: sqlschema.C("hobbies", "movies", "genre"), + To: sqlschema.C("wiki", "film_genres", "id"), + }, + }}, + {name: "drop foreign key", operation: &migrate.DropForeignKeyOp{ + ConstraintName: "genre_description", + FK: sqlschema.FK{ + From: sqlschema.C("hobbies", "movies", "genre"), + To: sqlschema.C("wiki", "film_genres", "id"), + }, + }}, + } + + testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { + migrator, err := sqlschema.NewMigrator(db) + if err != nil { + t.Skip(err) + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + b := internal.MakeQueryBytes() + + b, err := migrator.AppendSQL(b, tt.operation) + require.NoError(t, err, "append sql") + + if err == nil { + cupaloy.SnapshotT(t, string(b)) + } else { + cupaloy.SnapshotT(t, err.Error()) + } + }) + } + }) +} diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_default_value b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_default_value new file mode 100644 index 000000000..a64d90cf2 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_default_value @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD COLUMN "language" varchar(20) DEFAULT 'en-GB' diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_identity b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_identity new file mode 100644 index 000000000..138fde908 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_column_with_identity @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD COLUMN "n" BIGINT GENERATED BY DEFAULT AS IDENTITY diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_default b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_default new file mode 100644 index 000000000..0e9a0ffac --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_default @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" SET DEFAULT 100 diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key new file mode 100644 index 000000000..fb4ab1c49 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "genre_description" FOREIGN KEY (genre) REFERENCES "wiki"."film_genres" (id) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_identity b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_identity new file mode 100644 index 000000000..26f37ff66 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_identity @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_notnull b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_notnull new file mode 100644 index 000000000..303faec4b --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_notnull @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" SET NOT NULL diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_primary_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_primary_key new file mode 100644 index 000000000..06f8c9f0c --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_primary_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD PRIMARY KEY (id) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_unique_constraint b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_unique_constraint new file mode 100644 index 000000000..d7510186e --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_unique_constraint @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "one_genre_per_director" UNIQUE (director,genre) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_column_type_int_to_bigint b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_column_type_int_to_bigint new file mode 100644 index 000000000..9d275beaa --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_column_type_int_to_bigint @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" SET DATA TYPE BIGINT diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_primary_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_primary_key new file mode 100644 index 000000000..0cd97781c --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-change_primary_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "old_pk", ADD PRIMARY KEY (director,genre) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-create_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-create_table new file mode 100644 index 000000000..ae5b13ceb --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-create_table @@ -0,0 +1 @@ +CREATE TABLE "hobbies"."movies" ("id" VARCHAR, "director" VARCHAR NOT NULL, "budget" INTEGER, "release_date" TIMESTAMPTZ, "has_oscar" BOOLEAN, "genre" VARCHAR) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_column b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_column new file mode 100644 index 000000000..3d6912bf4 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_column @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP COLUMN "director" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_default b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_default new file mode 100644 index 000000000..87256b25f --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_default @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" DROP DEFAULT diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_foreign_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_foreign_key new file mode 100644 index 000000000..1779d1979 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_foreign_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "genre_description" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_identity b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_identity new file mode 100644 index 000000000..f53653126 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_identity @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "id" DROP IDENTITY diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_primary_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_primary_key new file mode 100644 index 000000000..0fa22de35 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_primary_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "new_pk" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table new file mode 100644 index 000000000..f74af4196 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table @@ -0,0 +1 @@ +DROP TABLE hobbies.movies diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_unique_constraint b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_unique_constraint new file mode 100644 index 000000000..a332ef3a2 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_unique_constraint @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "one_genre_per_director" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-increase_varchar_length b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-increase_varchar_length new file mode 100644 index 000000000..a244e5d68 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-increase_varchar_length @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "language" SET DATA TYPE varchar(255) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-make_nullable b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-make_nullable new file mode 100644 index 000000000..e2a6730a9 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-make_nullable @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "director" DROP NOT NULL diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_column b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_column new file mode 100644 index 000000000..96778e78d --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_column @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" RENAME COLUMN "has_oscar" TO "has_awards" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_table new file mode 100644 index 000000000..7711e43d9 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-rename_table @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" RENAME TO "films" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_default_value b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_default_value new file mode 100644 index 000000000..a64d90cf2 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_default_value @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD COLUMN "language" varchar(20) DEFAULT 'en-GB' diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_identity b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_identity new file mode 100644 index 000000000..138fde908 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_column_with_identity @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD COLUMN "n" BIGINT GENERATED BY DEFAULT AS IDENTITY diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_default b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_default new file mode 100644 index 000000000..0e9a0ffac --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_default @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" SET DEFAULT 100 diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key new file mode 100644 index 000000000..fb4ab1c49 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "genre_description" FOREIGN KEY (genre) REFERENCES "wiki"."film_genres" (id) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_identity b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_identity new file mode 100644 index 000000000..26f37ff66 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_identity @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_notnull b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_notnull new file mode 100644 index 000000000..303faec4b --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_notnull @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" SET NOT NULL diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_primary_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_primary_key new file mode 100644 index 000000000..06f8c9f0c --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_primary_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD PRIMARY KEY (id) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_unique_constraint b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_unique_constraint new file mode 100644 index 000000000..d7510186e --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_unique_constraint @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "one_genre_per_director" UNIQUE (director,genre) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_column_type_int_to_bigint b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_column_type_int_to_bigint new file mode 100644 index 000000000..9d275beaa --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_column_type_int_to_bigint @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" SET DATA TYPE BIGINT diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_primary_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_primary_key new file mode 100644 index 000000000..0cd97781c --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-change_primary_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "old_pk", ADD PRIMARY KEY (director,genre) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-create_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-create_table new file mode 100644 index 000000000..ae5b13ceb --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-create_table @@ -0,0 +1 @@ +CREATE TABLE "hobbies"."movies" ("id" VARCHAR, "director" VARCHAR NOT NULL, "budget" INTEGER, "release_date" TIMESTAMPTZ, "has_oscar" BOOLEAN, "genre" VARCHAR) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_column b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_column new file mode 100644 index 000000000..3d6912bf4 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_column @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP COLUMN "director" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_default b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_default new file mode 100644 index 000000000..87256b25f --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_default @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "budget" DROP DEFAULT diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_foreign_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_foreign_key new file mode 100644 index 000000000..1779d1979 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_foreign_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "genre_description" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_identity b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_identity new file mode 100644 index 000000000..f53653126 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_identity @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "id" DROP IDENTITY diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_primary_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_primary_key new file mode 100644 index 000000000..0fa22de35 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_primary_key @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "new_pk" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table new file mode 100644 index 000000000..f74af4196 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table @@ -0,0 +1 @@ +DROP TABLE hobbies.movies diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_unique_constraint b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_unique_constraint new file mode 100644 index 000000000..a332ef3a2 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_unique_constraint @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" DROP CONSTRAINT "one_genre_per_director" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-increase_varchar_length b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-increase_varchar_length new file mode 100644 index 000000000..a244e5d68 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-increase_varchar_length @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "language" SET DATA TYPE varchar(255) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-make_nullable b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-make_nullable new file mode 100644 index 000000000..e2a6730a9 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-make_nullable @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" ALTER COLUMN "director" DROP NOT NULL diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_column b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_column new file mode 100644 index 000000000..96778e78d --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_column @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" RENAME COLUMN "has_oscar" TO "has_awards" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_table new file mode 100644 index 000000000..7711e43d9 --- /dev/null +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-rename_table @@ -0,0 +1 @@ +ALTER TABLE "hobbies"."movies" RENAME TO "films" diff --git a/migrate/diff.go b/migrate/diff.go index 79e401b91..fa1743671 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/uptrace/bun" + "github.com/uptrace/bun/internal" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -29,7 +30,7 @@ AddedLoop: removedTables := currentTables.Sub(targetTables) for _, removed := range removedTables.Values() { if d.canRename(removed, added) { - d.changes.Add(&RenameTable{ + d.changes.Add(&RenameTableOp{ FQN: schema.FQN{Schema: removed.Schema, Table: removed.Name}, NewName: added.Name, }) @@ -52,7 +53,7 @@ AddedLoop: } } // If a new table did not appear because of the rename operation, then it must've been created. - d.changes.Add(&CreateTable{ + d.changes.Add(&CreateTableOp{ FQN: schema.FQN{Schema: added.Schema, Table: added.Name}, Model: added.Model, }) @@ -62,7 +63,7 @@ AddedLoop: // Tables that aren't present anymore and weren't renamed or left untouched were deleted. dropped := currentTables.Sub(targetTables) for _, t := range dropped.Values() { - d.changes.Add(&DropTable{ + d.changes.Add(&DropTableOp{ FQN: schema.FQN{Schema: t.Schema, Table: t.Name}, }) } @@ -96,7 +97,7 @@ AddedLoop: // Add RenameFK migrations for updated FKs. for old, renamed := range d.refMap.Updated() { newName := d.fkNameFunc(renamed) - d.changes.Add(&RenameConstraint{ + d.changes.Add(&RenameForeignKeyOp{ FK: renamed, // TODO: make sure this is applied after the table/columns are renamed OldName: d.current.FKs[old], NewName: newName, @@ -111,7 +112,7 @@ AddedLoop: // Add AddFK migrations for newly added FKs. for fk := range d.target.FKs { if _, ok := currentFKs[fk]; !ok { - d.changes.Add(&AddForeignKey{ + d.changes.Add(&AddForeignKeyOp{ FK: fk, ConstraintName: d.fkNameFunc(fk), }) @@ -121,7 +122,7 @@ AddedLoop: // Add DropFK migrations for removed FKs. for fk, fkName := range currentFKs { if _, ok := d.target.FKs[fk]; !ok { - d.changes.Add(&DropConstraint{ + d.changes.Add(&DropForeignKeyOp{ FK: fk, ConstraintName: fkName, }) @@ -144,11 +145,7 @@ func (c *changeset) Add(op ...Operation) { // Func creates a MigrationFunc that applies all operations all the changeset. func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc { return func(ctx context.Context, db *bun.DB) error { - var operations []interface{} - for _, op := range c.operations { - operations = append(operations, op.(interface{})) - } - return m.Apply(ctx, operations...) + return c.apply(ctx, db, m) } } @@ -166,6 +163,27 @@ func (c *changeset) Down(m sqlschema.Migrator) MigrationFunc { return reverse.Func(m) } +// apply generates SQL for each operation and executes it. +func (c *changeset) apply(ctx context.Context, db *bun.DB, m sqlschema.Migrator) error { + if len(c.operations) == 0 { + return nil + } + + for _, op := range c.operations { + b := internal.MakeQueryBytes() + b, err := m.AppendSQL(b, op) + if err != nil { + return fmt.Errorf("apply changes: %w", err) + } + + query := internal.String(b) + if _, err = db.ExecContext(ctx, query); err != nil { + return fmt.Errorf("apply changes: %w", err) + } + } + return nil +} + func (c *changeset) ResolveDependencies() error { if len(c.operations) <= 1 { return nil @@ -350,7 +368,7 @@ ChangedRenamed: // check that we do not try to rename a column to an already a name that already exists. if cCol, ok := current.Columns[tName]; ok { if checkType && !d.equalColumns(cCol, tCol) { - d.changes.Add(&ChangeColumnType{ + d.changes.Add(&ChangeColumnTypeOp{ FQN: fqn, Column: tName, From: cCol, @@ -367,7 +385,7 @@ ChangedRenamed: if _, exists := target.Columns[cName]; exists || !d.equalColumns(tCol, cCol) { continue } - d.changes.Add(&RenameColumn{ + d.changes.Add(&RenameColumnOp{ FQN: fqn, OldName: cName, NewName: tName, @@ -381,7 +399,7 @@ ChangedRenamed: continue ChangedRenamed } - d.changes.Add(&AddColumn{ + d.changes.Add(&AddColumnOp{ FQN: fqn, Column: tName, ColDef: tCol, @@ -391,7 +409,7 @@ ChangedRenamed: // Drop columns which do not exist in the target schema and were not renamed. for cName, cCol := range current.Columns { if _, keep := target.Columns[cName]; !keep { - d.changes.Add(&DropColumn{ + d.changes.Add(&DropColumnOp{ FQN: fqn, Column: cName, ColDef: cCol, @@ -410,7 +428,7 @@ Add: continue Add } } - d.changes.Add(&AddUniqueConstraint{ + d.changes.Add(&AddUniqueConstraintOp{ FQN: fqn, Unique: want, }) @@ -424,7 +442,7 @@ Drop: } } - d.changes.Add(&DropUniqueConstraint{ + d.changes.Add(&DropUniqueConstraintOp{ FQN: fqn, Unique: got, }) @@ -436,17 +454,17 @@ Drop: } switch { case target.PK == nil && current.PK != nil: - d.changes.Add(&DropPrimaryKey{ + d.changes.Add(&DropPrimaryKeyOp{ FQN: fqn, PK: current.PK, }) case current.PK == nil && target.PK != nil: - d.changes.Add(&AddPrimaryKey{ + d.changes.Add(&AddPrimaryKeyOp{ FQN: fqn, PK: target.PK, }) case target.PK.Columns != current.PK.Columns: - d.changes.Add(&ChangePrimaryKey{ + d.changes.Add(&ChangePrimaryKeyOp{ FQN: fqn, Old: current.PK, New: target.PK, diff --git a/migrate/operations.go b/migrate/operations.go index 5c0ac47b8..fffa9489b 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -11,26 +11,26 @@ type Operation interface { GetReverse() Operation } -// CreateTable -type CreateTable struct { +// CreateTableOp +type CreateTableOp struct { FQN schema.FQN Model interface{} } -var _ Operation = (*CreateTable)(nil) +var _ Operation = (*CreateTableOp)(nil) -func (op *CreateTable) GetReverse() Operation { - return &DropTable{FQN: op.FQN} +func (op *CreateTableOp) GetReverse() Operation { + return &DropTableOp{FQN: op.FQN} } -type DropTable struct { +type DropTableOp struct { FQN schema.FQN } -var _ Operation = (*DropTable)(nil) +var _ Operation = (*DropTableOp)(nil) -func (op *DropTable) DependsOn(another Operation) bool { - d, ok := another.(*DropConstraint) +func (op *DropTableOp) DependsOn(another Operation) bool { + d, ok := another.(*DropForeignKeyOp) // return ok && ((d.FK.From.Schema == op.FQN.Schema && d.FK.From.Table == op.FQN.Table) || (d.FK.To.Schema == op.FQN.Schema && d.FK.To.Table == op.FQN.Table)) @@ -41,82 +41,82 @@ func (op *DropTable) DependsOn(another Operation) bool { // // TODO: we can fetch table definitions for deleted tables // from the database engine and execute them as a raw query. -func (op *DropTable) GetReverse() Operation { +func (op *DropTableOp) GetReverse() Operation { return &noop{} } -type RenameTable struct { +type RenameTableOp struct { FQN schema.FQN NewName string } -var _ Operation = (*RenameTable)(nil) +var _ Operation = (*RenameTableOp)(nil) -func (op *RenameTable) GetReverse() Operation { - return &RenameTable{ +func (op *RenameTableOp) GetReverse() Operation { + return &RenameTableOp{ FQN: schema.FQN{Schema: op.FQN.Schema, Table: op.NewName}, NewName: op.FQN.Table, } } -// RenameColumn. -type RenameColumn struct { +// RenameColumnOp. +type RenameColumnOp struct { FQN schema.FQN OldName string NewName string } -var _ Operation = (*RenameColumn)(nil) +var _ Operation = (*RenameColumnOp)(nil) -func (op *RenameColumn) GetReverse() Operation { - return &RenameColumn{ +func (op *RenameColumnOp) GetReverse() Operation { + return &RenameColumnOp{ FQN: op.FQN, OldName: op.NewName, NewName: op.OldName, } } -func (op *RenameColumn) DependsOn(another Operation) bool { - rt, ok := another.(*RenameTable) +func (op *RenameColumnOp) DependsOn(another Operation) bool { + rt, ok := another.(*RenameTableOp) return ok && rt.FQN.Schema == op.FQN.Schema && rt.NewName == op.FQN.Table } -type AddColumn struct { +type AddColumnOp struct { FQN schema.FQN Column string ColDef sqlschema.Column } -var _ Operation = (*AddColumn)(nil) +var _ Operation = (*AddColumnOp)(nil) -func (op *AddColumn) GetReverse() Operation { - return &DropColumn{ +func (op *AddColumnOp) GetReverse() Operation { + return &DropColumnOp{ FQN: op.FQN, Column: op.Column, ColDef: op.ColDef, } } -type DropColumn struct { +type DropColumnOp struct { FQN schema.FQN Column string ColDef sqlschema.Column } -var _ Operation = (*DropColumn)(nil) +var _ Operation = (*DropColumnOp)(nil) -func (op *DropColumn) GetReverse() Operation { - return &AddColumn{ +func (op *DropColumnOp) GetReverse() Operation { + return &AddColumnOp{ FQN: op.FQN, Column: op.Column, ColDef: op.ColDef, } } -func (op *DropColumn) DependsOn(another Operation) bool { +func (op *DropColumnOp) DependsOn(another Operation) bool { // TODO: refactor switch drop := another.(type) { - case *DropConstraint: + case *DropForeignKeyOp: var fCol bool fCols := drop.FK.From.Column.Split() for _, c := range fCols { @@ -138,116 +138,114 @@ func (op *DropColumn) DependsOn(another Operation) bool { return (drop.FK.From.Schema == op.FQN.Schema && drop.FK.From.Table == op.FQN.Table && fCol) || (drop.FK.To.Schema == op.FQN.Schema && drop.FK.To.Table == op.FQN.Table && tCol) - case *DropPrimaryKey: + case *DropPrimaryKeyOp: return op.FQN == drop.FQN && drop.PK.Columns.Contains(op.Column) - case *ChangePrimaryKey: + case *ChangePrimaryKeyOp: return op.FQN == drop.FQN && drop.Old.Columns.Contains(op.Column) } return false } -// RenameConstraint. -type RenameConstraint struct { +// RenameForeignKeyOp. +type RenameForeignKeyOp struct { FK sqlschema.FK OldName string NewName string } -var _ Operation = (*RenameConstraint)(nil) +var _ Operation = (*RenameForeignKeyOp)(nil) -func (op *RenameConstraint) FQN() schema.FQN { +func (op *RenameForeignKeyOp) FQN() schema.FQN { return schema.FQN{ Schema: op.FK.From.Schema, Table: op.FK.From.Table, } } -func (op *RenameConstraint) DependsOn(another Operation) bool { - rt, ok := another.(*RenameTable) +func (op *RenameForeignKeyOp) DependsOn(another Operation) bool { + rt, ok := another.(*RenameTableOp) return ok && rt.FQN.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table } -func (op *RenameConstraint) GetReverse() Operation { - return &RenameConstraint{ +func (op *RenameForeignKeyOp) GetReverse() Operation { + return &RenameForeignKeyOp{ FK: op.FK, OldName: op.OldName, NewName: op.NewName, } } -type AddForeignKey struct { +type AddForeignKeyOp struct { FK sqlschema.FK ConstraintName string } -var _ Operation = (*AddForeignKey)(nil) +var _ Operation = (*AddForeignKeyOp)(nil) -func (op *AddForeignKey) FQN() schema.FQN { +func (op *AddForeignKeyOp) FQN() schema.FQN { return schema.FQN{ Schema: op.FK.From.Schema, Table: op.FK.From.Table, } } -func (op *AddForeignKey) DependsOn(another Operation) bool { +func (op *AddForeignKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { - case *RenameTable: + case *RenameTableOp: // TODO: provide some sort of "DependsOn" method for FK return another.FQN.Schema == op.FK.From.Schema && another.NewName == op.FK.From.Table - case *CreateTable: + case *CreateTableOp: return (another.FQN.Schema == op.FK.To.Schema && another.FQN.Table == op.FK.To.Table) || // either it's the referencing one (another.FQN.Schema == op.FK.From.Schema && another.FQN.Table == op.FK.From.Table) // or the one being referenced } return false } -func (op *AddForeignKey) GetReverse() Operation { - return &DropConstraint{ +func (op *AddForeignKeyOp) GetReverse() Operation { + return &DropForeignKeyOp{ FK: op.FK, ConstraintName: op.ConstraintName, } } -// TODO: Rename to DropForeignKey -// DropConstraint. -type DropConstraint struct { +type DropForeignKeyOp struct { FK sqlschema.FK ConstraintName string } -var _ Operation = (*DropConstraint)(nil) +var _ Operation = (*DropForeignKeyOp)(nil) -func (op *DropConstraint) FQN() schema.FQN { +func (op *DropForeignKeyOp) FQN() schema.FQN { return schema.FQN{ Schema: op.FK.From.Schema, Table: op.FK.From.Table, } } -func (op *DropConstraint) GetReverse() Operation { - return &AddForeignKey{ +func (op *DropForeignKeyOp) GetReverse() Operation { + return &AddForeignKeyOp{ FK: op.FK, ConstraintName: op.ConstraintName, } } -type AddUniqueConstraint struct { +type AddUniqueConstraintOp struct { FQN schema.FQN Unique sqlschema.Unique } -var _ Operation = (*AddUniqueConstraint)(nil) +var _ Operation = (*AddUniqueConstraintOp)(nil) -func (op *AddUniqueConstraint) GetReverse() Operation { - return &DropUniqueConstraint{ +func (op *AddUniqueConstraintOp) GetReverse() Operation { + return &DropUniqueConstraintOp{ FQN: op.FQN, Unique: op.Unique, } } -func (op *AddUniqueConstraint) DependsOn(another Operation) bool { +func (op *AddUniqueConstraintOp) DependsOn(another Operation) bool { switch another := another.(type) { - case *AddColumn: + case *AddColumnOp: var sameColumn bool for _, column := range op.Unique.Columns.Split() { if column == another.Column { @@ -256,9 +254,9 @@ func (op *AddUniqueConstraint) DependsOn(another Operation) bool { } } return op.FQN == another.FQN && sameColumn - case *RenameTable: + case *RenameTableOp: return op.FQN.Schema == another.FQN.Schema && op.FQN.Table == another.NewName - case *DropUniqueConstraint: + case *DropUniqueConstraintOp: // We want to drop the constraint with the same name before adding this one. return op.FQN == another.FQN && op.Unique.Name == another.Unique.Name default: @@ -267,39 +265,39 @@ func (op *AddUniqueConstraint) DependsOn(another Operation) bool { } -type DropUniqueConstraint struct { +type DropUniqueConstraintOp struct { FQN schema.FQN Unique sqlschema.Unique } -var _ Operation = (*DropUniqueConstraint)(nil) +var _ Operation = (*DropUniqueConstraintOp)(nil) -func (op *DropUniqueConstraint) DependsOn(another Operation) bool { - if rename, ok := another.(*RenameTable); ok { +func (op *DropUniqueConstraintOp) DependsOn(another Operation) bool { + if rename, ok := another.(*RenameTableOp); ok { return op.FQN.Schema == rename.FQN.Schema && op.FQN.Table == rename.NewName } return false } -func (op *DropUniqueConstraint) GetReverse() Operation { - return &AddUniqueConstraint{ +func (op *DropUniqueConstraintOp) GetReverse() Operation { + return &AddUniqueConstraintOp{ FQN: op.FQN, Unique: op.Unique, } } // Change column type. -type ChangeColumnType struct { +type ChangeColumnTypeOp struct { FQN schema.FQN Column string From sqlschema.Column To sqlschema.Column } -var _ Operation = (*ChangeColumnType)(nil) +var _ Operation = (*ChangeColumnTypeOp)(nil) -func (op *ChangeColumnType) GetReverse() Operation { - return &ChangeColumnType{ +func (op *ChangeColumnTypeOp) GetReverse() Operation { + return &ChangeColumnTypeOp{ FQN: op.FQN, Column: op.Column, From: op.To, @@ -307,68 +305,58 @@ func (op *ChangeColumnType) GetReverse() Operation { } } -type DropPrimaryKey struct { +type DropPrimaryKeyOp struct { FQN schema.FQN PK *sqlschema.PK } -var _ Operation = (*DropPrimaryKey)(nil) +var _ Operation = (*DropPrimaryKeyOp)(nil) -func (op *DropPrimaryKey) GetReverse() Operation { - return &AddPrimaryKey{ +func (op *DropPrimaryKeyOp) GetReverse() Operation { + return &AddPrimaryKeyOp{ FQN: op.FQN, PK: op.PK, } } -type AddPrimaryKey struct { +type AddPrimaryKeyOp struct { FQN schema.FQN PK *sqlschema.PK } -var _ Operation = (*AddPrimaryKey)(nil) +var _ Operation = (*AddPrimaryKeyOp)(nil) -func (op *AddPrimaryKey) GetReverse() Operation { - return &DropPrimaryKey{ +func (op *AddPrimaryKeyOp) GetReverse() Operation { + return &DropPrimaryKeyOp{ FQN: op.FQN, PK: op.PK, } } -func (op *AddPrimaryKey) DependsOn(another Operation) bool { +func (op *AddPrimaryKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { - case *AddColumn: + case *AddColumnOp: return op.FQN == another.FQN && op.PK.Columns.Contains(another.Column) } return false } -type ChangePrimaryKey struct { +type ChangePrimaryKeyOp struct { FQN schema.FQN Old *sqlschema.PK New *sqlschema.PK } -var _ Operation = (*AddPrimaryKey)(nil) +var _ Operation = (*AddPrimaryKeyOp)(nil) -func (op *ChangePrimaryKey) GetReverse() Operation { - return &ChangePrimaryKey{ +func (op *ChangePrimaryKeyOp) GetReverse() Operation { + return &ChangePrimaryKeyOp{ FQN: op.FQN, Old: op.New, New: op.Old, } } -// func (op *ChangePrimaryKey) DependsOn(another Operation) bool { -// switch another := another.(type) { -// case *AddColumn: -// return op.FQN == another.FQN && op.PK.Columns.Contains(another.Column) -// case *RenameColumn: -// return op.FQN == another.FQN && op.PK.Columns.Contains(another.NewName) -// } -// return false -// } - // noop is a migration that doesn't change the schema. type noop struct{} diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index e4dc5a598..3532da0d4 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -1,7 +1,6 @@ package sqlschema import ( - "context" "fmt" "github.com/uptrace/bun" @@ -14,7 +13,7 @@ type MigratorDialect interface { } type Migrator interface { - Apply(ctx context.Context, changes ...interface{}) error + AppendSQL(b []byte, operation interface{}) ([]byte, error) } // migrator is a dialect-agnostic wrapper for sqlschema.MigratorDialect. @@ -41,18 +40,10 @@ func NewBaseMigrator(db *bun.DB) *BaseMigrator { return &BaseMigrator{db: db} } -func (m *BaseMigrator) CreateTable(ctx context.Context, model interface{}) error { - _, err := m.db.NewCreateTable().Model(model).Exec(ctx) - if err != nil { - return err - } - return nil +func (m *BaseMigrator) AppendCreateTable(b []byte, model interface{}) ([]byte, error) { + return m.db.NewCreateTable().Model(model).AppendQuery(m.db.Formatter(), b) } -func (m *BaseMigrator) DropTable(ctx context.Context, fqn schema.FQN) error { - _, err := m.db.NewDropTable().TableExpr(fqn.String()).Exec(ctx) - if err != nil { - return err - } - return nil +func (m *BaseMigrator) AppendDropTable(b []byte, fqn schema.FQN) ([]byte, error) { + return m.db.NewDropTable().TableExpr(fqn.String()).AppendQuery(m.db.Formatter(), b) } From 1bf7cfd067e0e26ae212b0f7421e5abc6f67fb4f Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 4 Nov 2024 00:49:58 +0100 Subject: [PATCH 34/55] feat: create sql migrations and apply them - CreateSQLMigrations() writes up- and down- SQL to migration files in the migrations directory - Migrate() generates migration files and applies them, creating a corresponding entry in the database - Run() is deprecated. It was decided that AutoMigrator will not support in-place migrations for now - no-op operations produce a comment in the down files if the an operation is not reversible --- internal/dbtest/inspect_test.go | 2 +- internal/dbtest/migrate_test.go | 208 +++++++++++++++++++++++--------- migrate/auto.go | 79 +++++++++--- migrate/diff.go | 39 +++++- migrate/migrator.go | 2 +- 5 files changed, 247 insertions(+), 83 deletions(-) diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 5b463cdef..d228d210d 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -75,7 +75,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { db.RegisterModel((*PublisherToJournalist)(nil)) - dbInspector, err := sqlschema.NewInspector(db) + dbInspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) if err != nil { t.Skip(err) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 28b45553c..518909592 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -3,6 +3,8 @@ package dbtest_test import ( "context" "errors" + "os" + "path/filepath" "strings" "testing" "time" @@ -19,14 +21,28 @@ const ( migrationLocksTable = "test_migration_locks" ) +var migrationsDir = filepath.Join(os.TempDir(), "dbtest") + +// cleanupMigrations adds a cleanup function to reset migration tables. +// The reset does not run for skipped tests to avoid unnecessary work. +// +// Usage: +// +// testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { +// cleanupMigrations(t, ctx, db) +// // some test that may generate migration entries in the db +// }) func cleanupMigrations(tb testing.TB, ctx context.Context, db *bun.DB) { tb.Cleanup(func() { - var err error - _, err = db.NewDropTable().ModelTableExpr(migrationsTable).Exec(ctx) - require.NoError(tb, err, "drop %q table", migrationsTable) + if tb.Skipped() { + return + } - _, err = db.NewDropTable().ModelTableExpr(migrationLocksTable).Exec(ctx) - require.NoError(tb, err, "drop %q table", migrationLocksTable) + m := migrate.NewMigrator(db, migrate.NewMigrations(), + migrate.WithTableName(migrationsTable), + migrate.WithLocksTableName(migrationLocksTable), + ) + require.NoError(tb, m.Reset(ctx)) }) } @@ -163,27 +179,45 @@ func testMigrateUpError(t *testing.T, db *bun.DB) { require.Equal(t, []string{"down2", "down1"}, history) } -// newAutoMigrator creates an AutoMigrator configured to use test migratins/locks tables. -// If the dialect doesn't support schema inspections or migrations, the test will fail with the corresponding error. -func newAutoMigrator(tb testing.TB, db *bun.DB, opts ...migrate.AutoMigratorOption) *migrate.AutoMigrator { +// newAutoMigratorOrSkip creates an AutoMigrator configured to use test migratins/locks +// tables and dedicated migrations directory. If an AutoMigrator cannob be created because +// the dialect doesn't support either schema inspections or migrations, the test will be *skipped* +// with the corresponding error. +// Additionally, it will create the migrations directory and if +// one does not exist and add a function to tear it down on cleanup. +func newAutoMigratorOrSkip(tb testing.TB, db *bun.DB, opts ...migrate.AutoMigratorOption) *migrate.AutoMigrator { tb.Helper() opts = append(opts, migrate.WithTableNameAuto(migrationsTable), migrate.WithLocksTableNameAuto(migrationLocksTable), + migrate.WithMigrationsDirectoryAuto(migrationsDir), ) m, err := migrate.NewAutoMigrator(db, opts...) - require.NoError(tb, err) + if err != nil { + tb.Skip(err) + } + + err = os.MkdirAll(migrationsDir, os.ModePerm) + require.NoError(tb, err, "cannot continue test without migrations directory") + + tb.Cleanup(func() { + if err := os.RemoveAll(migrationsDir); err != nil { + tb.Logf("cleanup: remove migrations dir: %v", err) + } + }) + return m } // inspectDbOrSkip returns a function to inspect the current state of the database. -// It calls tb.Skip() if the current dialect doesn't support database inpection and -// fails the test if the inspector cannot successfully retrieve database state. +// The test will be *skipped* if the current dialect doesn't support database inpection +// and fail if the inspector cannot successfully retrieve database state. func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.State { tb.Helper() - inspector, err := sqlschema.NewInspector(db) + // AutoMigrator excludes these tables by default, but here we need to do this explicitly. + inspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) if err != nil { tb.Skip(err) } @@ -194,7 +228,78 @@ func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema. } } -func TestAutoMigrator_Run(t *testing.T) { +func TestAutoMigrator_CreateSQLMigrations(t *testing.T) { + type NewTable struct { + bun.BaseModel `bun:"table:new_table"` + Bar string + Baz time.Time + } + + testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { + ctx := context.Background() + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*NewTable)(nil))) + + migrations, err := m.CreateSQLMigrations(ctx) + require.NoError(t, err, "should create migrations successfully") + + require.Len(t, migrations, 2, "expected up/down migration pair") + require.DirExists(t, migrationsDir) + checkMigrationFileContains(t, ".up.sql", "CREATE TABLE") + checkMigrationFileContains(t, ".down.sql", "DROP TABLE") + }) +} + +// checkMigrationFileContains expected SQL snippet. +func checkMigrationFileContains(t *testing.T, fileSuffix string, content string) { + t.Helper() + + files, err := os.ReadDir(migrationsDir) + require.NoErrorf(t, err, "list files in %s", migrationsDir) + + for _, f := range files { + if strings.HasSuffix(f.Name(), fileSuffix) { + b, err := os.ReadFile(filepath.Join(migrationsDir, f.Name())) + require.NoError(t, err) + require.Containsf(t, string(b), content, "expected %s file to contain string", f.Name()) + return + } + } + t.Errorf("no *%s file in migrations directory (%s)", fileSuffix, migrationsDir) +} + +// checkMigrationFilesExist makes sure both up- and down- SQL migration files were created. +func checkMigrationFilesExist(t *testing.T) { + t.Helper() + + files, err := os.ReadDir(migrationsDir) + require.NoErrorf(t, err, "list files in %s", migrationsDir) + + var up, down bool + for _, f := range files { + if !up && strings.HasSuffix(f.Name(), ".up.sql") { + up = true + } else if !down && strings.HasSuffix(f.Name(), ".down.sql") { + down = true + } + } + + if !up { + t.Errorf("no .up.sql file created in migrations directory (%s)", migrationsDir) + } + if !down { + t.Errorf("no .down.sql file created in migrations directory (%s)", migrationsDir) + } +} + +func runMigrations(t *testing.T, m *migrate.AutoMigrator) { + t.Helper() + + _, err := m.Migrate(ctx) + require.NoError(t, err, "auto migration failed") + checkMigrationFilesExist(t) +} + +func TestAutoMigrator_Migrate(t *testing.T) { tests := []struct { fn func(t *testing.T, db *bun.DB) @@ -219,6 +324,11 @@ func TestAutoMigrator_Run(t *testing.T) { testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { for _, tt := range tests { t.Run(funcName(tt.fn), func(t *testing.T) { + // Because they are executed so fast, tests may generate migrations + // with the same timestamp, so that only the first of them will apply. + // To eliminate these side-effects we cleanup migration tables after + // after every test case. + cleanupMigrations(t, ctx, db) tt.fn(t, db) }) } @@ -241,16 +351,14 @@ func testRenameTable(t *testing.T, db *bun.DB) { inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*initial)(nil)) mustDropTableOnCleanup(t, ctx, db, (*changed)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*changed)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*changed)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) tables := state.Tables - require.Len(t, tables, 1) require.Equal(t, "changed", tables[0].Name) } @@ -272,16 +380,14 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*DropMe)(nil)) mustDropTableOnCleanup(t, ctx, db, (*CreateMe)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*CreateMe)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*CreateMe)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) tables := state.Tables - require.Len(t, tables, 1) require.Equal(t, "createme", tables[0].Name) } @@ -332,15 +438,14 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { ) mustDropTableOnCleanup(t, ctx, db, (*ThingsToOwner)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel( + m := newAutoMigratorOrSkip(t, db, migrate.WithModel( (*ThingCommon)(nil), (*OwnerCommon)(nil), (*ThingsToOwner)(nil), )) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -399,7 +504,7 @@ func testForceRenameFK(t *testing.T, db *bun.DB) { ) mustDropTableOnCleanup(t, ctx, db, (*Person)(nil)) - m := newAutoMigrator(t, db, + m := newAutoMigratorOrSkip(t, db, migrate.WithModel( (*Person)(nil), (*PersonalThing)(nil), @@ -413,13 +518,11 @@ func testForceRenameFK(t *testing.T, db *bun.DB) { ) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) schema := db.Dialect().DefaultSchema() - wantName, ok := state.FKs[sqlschema.FK{ From: sqlschema.C(schema, "things", "owner_id"), To: sqlschema.C(schema, "people", "id"), @@ -459,7 +562,7 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { (*Column)(nil), ) - m := newAutoMigrator(t, db, + m := newAutoMigratorOrSkip(t, db, migrate.WithFKNameFunc(func(sqlschema.FK) string { return "test_fkey" }), migrate.WithModel( (*TableM)(nil), @@ -468,8 +571,7 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { ) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -514,18 +616,16 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { (*Model1)(nil), ) mustDropTableOnCleanup(t, ctx, db, (*Renamed)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel( + m := newAutoMigratorOrSkip(t, db, migrate.WithModel( (*Model2)(nil), (*Renamed)(nil), )) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) - require.Len(t, state.Tables, 2) var renamed, model2 sqlschema.Table @@ -565,18 +665,16 @@ func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) mustCreateTableWithFKs(t, ctx, db, (*TennantBefore)(nil)) - m := newAutoMigrator(t, db, + m := newAutoMigratorOrSkip(t, db, migrate.WithRenameFK(true), migrate.WithModel((*TennantAfter)(nil)), ) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) - fkName := state.FKs[sqlschema.FK{ From: sqlschema.C(db.Dialect().DefaultSchema(), "tennants", "my_neighbour"), To: sqlschema.C(db.Dialect().DefaultSchema(), "tennants", "tennant_id"), @@ -655,11 +753,10 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*TableBefore)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -699,11 +796,10 @@ func testIdentity(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*TableBefore)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -743,11 +839,10 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*TableBefore)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -823,11 +918,10 @@ func testUnique(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*TableBefore)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -894,11 +988,10 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { inspect := inspectDbOrSkip(t, db) mustResetModel(t, ctx, db, (*TableBefore)(nil)) mustDropTableOnCleanup(t, ctx, db, (*TableAfter)(nil)) - m := newAutoMigrator(t, db, migrate.WithModel((*TableAfter)(nil))) + m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) @@ -1011,17 +1104,16 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { (*AddNewPKBefore)(nil), (*ChangePKBefore)(nil), ) - m := newAutoMigrator(t, db, migrate.WithModel( + m := newAutoMigratorOrSkip(t, db, migrate.WithModel( (*DropPKAfter)(nil), (*AddNewPKAfter)(nil), (*ChangePKAfter)(nil)), ) // Act - err := m.Run(ctx) - require.NoError(t, err) + runMigrations(t, m) // Assert state := inspect(ctx) cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) -} \ No newline at end of file +} diff --git a/migrate/auto.go b/migrate/auto.go index b1cacf691..70236e8e5 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -1,8 +1,11 @@ package migrate import ( + "bytes" "context" "fmt" + "os" + "path/filepath" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" @@ -72,6 +75,12 @@ func WithMarkAppliedOnSuccessAuto(enabled bool) AutoMigratorOption { } } +func WithMigrationsDirectoryAuto(directory string) AutoMigratorOption { + return func(m *AutoMigrator) { + m.migrationsOpts = append(m.migrationsOpts, WithMigrationsDirectory(directory)) + } +} + type AutoMigrator struct { db *bun.DB @@ -98,6 +107,9 @@ type AutoMigrator struct { // migratorOpts are passed to Migrator constructor. migratorOpts []MigratorOption + + // migrationsOpts are passed to Migrations constructor. + migrationsOpts []MigrationsOption } func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, error) { @@ -156,14 +168,37 @@ func (am *AutoMigrator) plan(ctx context.Context) (*changeset, error) { // Migrate writes required changes to a new migration file and runs the migration. // This will create and entry in the migrations table, making it possible to revert // the changes with Migrator.Rollback(). -func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) error { +func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) (*MigrationGroup, error) { + migrations, _, err := am.createSQLMigrations(ctx) + if err != nil { + return nil, fmt.Errorf("auto migrate: %w", err) + } + + migrator := NewMigrator(am.db, migrations, am.migratorOpts...) + if err := migrator.Init(ctx); err != nil { + return nil, fmt.Errorf("auto migrate: %w", err) + } + + group, err := migrator.Migrate(ctx, opts...) + if err != nil { + return nil, fmt.Errorf("auto migrate: %w", err) + } + return group, nil +} + +func (am *AutoMigrator) CreateSQLMigrations(ctx context.Context) ([]*MigrationFile, error) { + _, files, err := am.createSQLMigrations(ctx) + return files, err +} + +func (am *AutoMigrator) createSQLMigrations(ctx context.Context) (*Migrations, []*MigrationFile, error) { changes, err := am.plan(ctx) if err != nil { - return fmt.Errorf("auto migrate: %w", err) + return nil, nil, fmt.Errorf("create sql migrations: %w", err) } - migrations := NewMigrations() name, _ := genMigrationName("auto") + migrations := NewMigrations(am.migrationsOpts...) migrations.Add(Migration{ Name: name, Up: changes.Up(am.dbMigrator), @@ -171,26 +206,34 @@ func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) er Comment: "Changes detected by bun.migrate.AutoMigrator", }) - migrator := NewMigrator(am.db, migrations, am.migratorOpts...) - if err := migrator.Init(ctx); err != nil { - return fmt.Errorf("auto migrate: %w", err) + up, err := am.createSQL(ctx, migrations, name+".up.sql", changes) + if err != nil { + return nil, nil, fmt.Errorf("create sql migration up: %w", err) } - if _, err := migrator.Migrate(ctx, opts...); err != nil { - return fmt.Errorf("auto migrate: %w", err) + down, err := am.createSQL(ctx, migrations, name+".down.sql", changes.GetReverse()) + if err != nil { + return nil, nil, fmt.Errorf("create sql migration down: %w", err) } - return nil + return migrations, []*MigrationFile{up, down}, nil } -// Run runs required migrations in-place and without creating a database entry. -func (am *AutoMigrator) Run(ctx context.Context) error { - changes, err := am.plan(ctx) - if err != nil { - return fmt.Errorf("auto migrate: %w", err) +func (am *AutoMigrator) createSQL(_ context.Context, migrations *Migrations, fname string, changes *changeset) (*MigrationFile, error) { + var buf bytes.Buffer + if err := changes.WriteTo(&buf, am.dbMigrator); err != nil { + return nil, err } - up := changes.Up(am.dbMigrator) - if err := up(ctx, am.db); err != nil { - return fmt.Errorf("auto migrate: %w", err) + content := buf.Bytes() + + fpath := filepath.Join(migrations.getDirectory(), fname) + if err := os.WriteFile(fpath, content, 0o644); err != nil { + return nil, err + } + + mf := &MigrationFile{ + Name: fname, + Path: fpath, + Content: string(content), } - return nil + return mf, nil } diff --git a/migrate/diff.go b/migrate/diff.go index fa1743671..e1ea59832 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "io" "strings" "github.com/uptrace/bun" @@ -149,6 +150,15 @@ func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc { } } +// GetReverse returns a new changeset with each operation in it "reversed" and in reverse order. +func (c *changeset) GetReverse() *changeset { + var reverse changeset + for i := len(c.operations) - 1; i >= 0; i-- { + reverse.Add(c.operations[i].GetReverse()) + } + return &reverse +} + // Up is syntactic sugar. func (c *changeset) Up(m sqlschema.Migrator) MigrationFunc { return c.Func(m) @@ -156,11 +166,7 @@ func (c *changeset) Up(m sqlschema.Migrator) MigrationFunc { // Down is syntactic sugar. func (c *changeset) Down(m sqlschema.Migrator) MigrationFunc { - var reverse changeset - for i := len(c.operations) - 1; i >= 0; i-- { - reverse.Add(c.operations[i].GetReverse()) - } - return reverse.Func(m) + return c.GetReverse().Func(m) } // apply generates SQL for each operation and executes it. @@ -184,6 +190,29 @@ func (c *changeset) apply(ctx context.Context, db *bun.DB, m sqlschema.Migrator) return nil } +func (c *changeset) WriteTo(w io.Writer, m sqlschema.Migrator) error { + var err error + + b := internal.MakeQueryBytes() + for _, op := range c.operations { + if _, isNoop := op.(*noop); isNoop { + // TODO: write migration-specific commend instead + b = append(b, "-- Down-migrations are not supported for some changes.\n"...) + continue + } + + b, err = m.AppendSQL(b, op) + if err != nil { + return fmt.Errorf("write changeset: %w", err) + } + b = append(b, ";\n"...) + } + if _, err := w.Write(b); err != nil { + return fmt.Errorf("write changeset: %w", err) + } + return nil +} + func (c *changeset) ResolveDependencies() error { if len(c.operations) <= 1 { return nil diff --git a/migrate/migrator.go b/migrate/migrator.go index 9f1b5222c..d5a72aec0 100644 --- a/migrate/migrator.go +++ b/migrate/migrator.go @@ -314,7 +314,7 @@ func (m *Migrator) CreateSQLMigrations(ctx context.Context, name string) ([]*Mig return []*MigrationFile{up, down}, nil } -func (m *Migrator) createSQL(ctx context.Context, fname string, transactional bool) (*MigrationFile, error) { +func (m *Migrator) createSQL(_ context.Context, fname string, transactional bool) (*MigrationFile, error) { fpath := filepath.Join(m.migrations.getDirectory(), fname) template := sqlTemplate From 7289f234603bee1543412bfd4446b239a114df7d Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 4 Nov 2024 01:42:17 +0100 Subject: [PATCH 35/55] chore: add missing newline --- internal/util.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/util.go b/internal/util.go index 3391351ab..ba1341e61 100644 --- a/internal/util.go +++ b/internal/util.go @@ -84,4 +84,4 @@ func indirectNil(v reflect.Value) reflect.Value { func MakeQueryBytes() []byte { // TODO: make this configurable? return make([]byte, 0, 4096) -} \ No newline at end of file +} From 538bda1b00d4a5eca309581373594572c045c706 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 4 Nov 2024 20:20:57 +0100 Subject: [PATCH 36/55] refactor: replace noop Operation with a more fitting placeholder --- internal/dbtest/migrate_test.go | 18 +++++++++--------- migrate/diff.go | 11 ++++++++--- migrate/operations.go | 22 ++++++++++++++-------- 3 files changed, 31 insertions(+), 20 deletions(-) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 518909592..6119f0025 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -304,16 +304,16 @@ func TestAutoMigrator_Migrate(t *testing.T) { tests := []struct { fn func(t *testing.T, db *bun.DB) }{ - {testRenameTable}, - {testRenamedColumns}, + // {testRenameTable}, + // {testRenamedColumns}, {testCreateDropTable}, - {testAlterForeignKeys}, - {testChangeColumnType_AutoCast}, - {testIdentity}, - {testAddDropColumn}, - {testUnique}, - {testUniqueRenamedTable}, - {testUpdatePrimaryKeys}, + // {testAlterForeignKeys}, + // {testChangeColumnType_AutoCast}, + // {testIdentity}, + // {testAddDropColumn}, + // {testUnique}, + // {testUniqueRenamedTable}, + // {testUpdatePrimaryKeys}, // Suspended support for renaming foreign keys: // {testCustomFKNameFunc}, diff --git a/migrate/diff.go b/migrate/diff.go index e1ea59832..54e4b9a8f 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -176,6 +176,10 @@ func (c *changeset) apply(ctx context.Context, db *bun.DB, m sqlschema.Migrator) } for _, op := range c.operations { + if _, isComment := op.(*comment); isComment { + continue + } + b := internal.MakeQueryBytes() b, err := m.AppendSQL(b, op) if err != nil { @@ -195,9 +199,10 @@ func (c *changeset) WriteTo(w io.Writer, m sqlschema.Migrator) error { b := internal.MakeQueryBytes() for _, op := range c.operations { - if _, isNoop := op.(*noop); isNoop { - // TODO: write migration-specific commend instead - b = append(b, "-- Down-migrations are not supported for some changes.\n"...) + if c, isComment := op.(*comment); isComment { + b = append(b, "/*\n"...) + b = append(b, *c...) + b = append(b, "\n*/"...) continue } diff --git a/migrate/operations.go b/migrate/operations.go index fffa9489b..e4647656e 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -1,6 +1,8 @@ package migrate import ( + "fmt" + "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -38,11 +40,9 @@ func (op *DropTableOp) DependsOn(another Operation) bool { // GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, // but DropTable does not have the table's definition to create one. -// -// TODO: we can fetch table definitions for deleted tables -// from the database engine and execute them as a raw query. func (op *DropTableOp) GetReverse() Operation { - return &noop{} + c := comment(fmt.Sprintf("WARNING: \"DROP TABLE %s\" cannot be reversed automatically because table definition is not available", op.FQN.String())) + return &c } type RenameTableOp struct { @@ -357,9 +357,15 @@ func (op *ChangePrimaryKeyOp) GetReverse() Operation { } } -// noop is a migration that doesn't change the schema. -type noop struct{} +// comment denotes an Operation that cannot be executed. +// +// Operations, which cannot be reversed due to current technical limitations, +// may return &comment with a helpful message from their GetReverse() method. +// +// Chnagelog should skip it when applying operations or output as log message, +// and write it as an SQL comment when creating migration files. +type comment string -var _ Operation = (*noop)(nil) +var _ Operation = (*comment)(nil) -func (*noop) GetReverse() Operation { return &noop{} } +func (c *comment) GetReverse() Operation { return c } From 47a686548cce54551a7dfb6b5c7b7d2e10d48fc8 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 4 Nov 2024 22:47:48 +0100 Subject: [PATCH 37/55] refactor: simplify sqlschema package semantics This commit is aimed at making sqlschema models more extensible and the migration-planning logic more readable. Key changes are: - TableDefinition and Column definition both have Additional interface{} field to allow adding inspector-specific data which remains opaque to the general bulk of the code. For example, SchemaInspector passes (*Model)(nil) zero interface which is used to create a table. - TableDefinitions is structured as a map, similarly to ColumnDefinitions. Map lookups are convenient and are more readable than the custom tableSet struct we used before. The latter is now retired. - sqlschema exposes data structures for describing the databse schema and its interface should not be cluttered with utilities only used by Detector. Which is why both Signature and RefMap are moved to migrate package. - Simplified refMap only has the essential methods. It allows detector to avoid re-creating FKs for tables/columns that were renamed. - Refactored a whole bunch of DependsOn() methods for various operations to leverage the new unitility methods on sqlschema.ForeignKey and Columns. - Deleted tests for refMap, as they are now an implementation detail and not part of the API. --- dialect/pgdialect/alter_table.go | 19 +- dialect/pgdialect/inspector.go | 49 ++-- dialect/pgdialect/sqltype.go | 4 +- dialect/pgdialect/sqltype_test.go | 18 +- internal/dbtest/inspect_test.go | 150 ++++++------ internal/dbtest/migrate_test.go | 193 +++++++-------- internal/dbtest/query_test.go | 70 +++--- internal/dbtest/sqlschema_test.go | 222 ----------------- migrate/auto.go | 2 +- migrate/diff.go | 346 +++++++++++++------------- migrate/operations.go | 118 +++------ migrate/sqlschema/inspector.go | 53 ++-- migrate/sqlschema/schema.go | 163 +++++++++++++ migrate/sqlschema/state.go | 386 ------------------------------ 14 files changed, 663 insertions(+), 1130 deletions(-) delete mode 100644 internal/dbtest/sqlschema_test.go create mode 100644 migrate/sqlschema/schema.go delete mode 100644 migrate/sqlschema/state.go diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index bfb118ade..8ce588e06 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -45,11 +45,11 @@ func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err err case *migrate.DropColumnOp: b, err = m.dropColumn(fmter, appendAlterTable(b, change.FQN), change) case *migrate.AddPrimaryKeyOp: - b, err = m.addPrimaryKey(fmter, appendAlterTable(b, change.FQN), change.PK.Columns.Safe()) + b, err = m.addPrimaryKey(fmter, appendAlterTable(b, change.FQN), change.PrimaryKey) case *migrate.ChangePrimaryKeyOp: b, err = m.changePrimaryKey(fmter, appendAlterTable(b, change.FQN), change) case *migrate.DropPrimaryKeyOp: - b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN), change.PK.Name) + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN), change.PrimaryKey.Name) case *migrate.AddUniqueConstraintOp: b, err = m.addUnique(fmter, appendAlterTable(b, change.FQN), change) case *migrate.DropUniqueConstraintOp: @@ -114,9 +114,9 @@ func (m *migrator) dropColumn(fmter schema.Formatter, b []byte, drop *migrate.Dr return b, nil } -func (m *migrator) addPrimaryKey(fmter schema.Formatter, b []byte, columns schema.Safe) (_ []byte, err error) { +func (m *migrator) addPrimaryKey(fmter schema.Formatter, b []byte, pk sqlschema.PrimaryKey) (_ []byte, err error) { b = append(b, "ADD PRIMARY KEY ("...) - b, _ = columns.AppendQuery(fmter, b) + b, _ = pk.Columns.AppendQuery(fmter, b) b = append(b, ")"...) return b, nil @@ -141,7 +141,7 @@ func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *mi func (m *migrator) changePrimaryKey(fmter schema.Formatter, b []byte, change *migrate.ChangePrimaryKeyOp) (_ []byte, err error) { b, _ = m.dropConstraint(fmter, b, change.Old.Name) b = append(b, ", "...) - b, _ = m.addPrimaryKey(fmter, b, change.New.Columns.Safe()) + b, _ = m.addPrimaryKey(fmter, b, change.New) return b, nil } @@ -154,7 +154,7 @@ func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.A b = fmter.AppendName(b, fmt.Sprintf("%s_%s_key", change.FQN.Table, change.Unique.Columns)) } b = append(b, " UNIQUE ("...) - b, _ = change.Unique.Columns.Safe().AppendQuery(fmter, b) + b, _ = change.Unique.Columns.AppendQuery(fmter, b) b = append(b, ")"...) return b, nil @@ -172,19 +172,18 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. b = fmter.AppendName(b, add.ConstraintName) b = append(b, " FOREIGN KEY ("...) - if b, err = add.FK.From.Column.Safe().AppendQuery(fmter, b); err != nil { + if b, err = add.ForeignKey.From.Column.AppendQuery(fmter, b); err != nil { return b, err } b = append(b, ")"...) - other := schema.FQN{Schema: add.FK.To.Schema, Table: add.FK.To.Table} b = append(b, " REFERENCES "...) - if b, err = other.AppendQuery(fmter, b); err != nil { + if b, err = add.ForeignKey.To.FQN.AppendQuery(fmter, b); err != nil { return b, err } b = append(b, " ("...) - if b, err = add.FK.To.Column.Safe().AppendQuery(fmter, b); err != nil { + if b, err = add.ForeignKey.To.Column.AppendQuery(fmter, b); err != nil { return b, err } b = append(b, ")"...) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 9bac008c1..00b32cccf 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -23,8 +23,11 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { return &Inspector{db: db, excludeTables: excludeTables} } -func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { - var state sqlschema.State +func (in *Inspector) Inspect(ctx context.Context) (sqlschema.DatabaseSchema, error) { + schema := sqlschema.DatabaseSchema{ + TableDefinitions: make(map[string]sqlschema.TableDefinition), + ForeignKeys: make(map[sqlschema.ForeignKey]string), + } exclude := in.excludeTables if len(exclude) == 0 { @@ -34,22 +37,22 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { var tables []*InformationSchemaTable if err := in.db.NewRaw(sqlInspectTables, bun.In(exclude)).Scan(ctx, &tables); err != nil { - return state, err + return schema, err } var fks []*ForeignKey if err := in.db.NewRaw(sqlInspectForeignKeys, bun.In(exclude), bun.In(exclude)).Scan(ctx, &fks); err != nil { - return state, err + return schema, err } - state.FKs = make(map[sqlschema.FK]string, len(fks)) + schema.ForeignKeys = make(map[sqlschema.ForeignKey]string, len(fks)) for _, table := range tables { var columns []*InformationSchemaColumn if err := in.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { - return state, err + return schema, err } - colDefs := make(map[string]sqlschema.Column) + colDefs := make(map[string]sqlschema.ColumnDefinition) uniqueGroups := make(map[string][]string) for _, c := range columns { @@ -60,7 +63,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { def = strings.ToLower(def) } - colDefs[c.Name] = sqlschema.Column{ + colDefs[c.Name] = sqlschema.ColumnDefinition{ SQLType: c.DataType, VarcharLen: c.VarcharLen, DefaultValue: def, @@ -78,34 +81,34 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.State, error) { for name, columns := range uniqueGroups { unique = append(unique, sqlschema.Unique{ Name: name, - Columns: sqlschema.NewComposite(columns...), + Columns: sqlschema.NewColumns(columns...), }) } - var pk *sqlschema.PK + var pk *sqlschema.PrimaryKey if len(table.PrimaryKey.Columns) > 0 { - pk = &sqlschema.PK{ + pk = &sqlschema.PrimaryKey{ Name: table.PrimaryKey.ConstraintName, - Columns: sqlschema.NewComposite(table.PrimaryKey.Columns...), + Columns: sqlschema.NewColumns(table.PrimaryKey.Columns...), } } - state.Tables = append(state.Tables, sqlschema.Table{ - Schema: table.Schema, - Name: table.Name, - Columns: colDefs, - UniqueContraints: unique, - PK: pk, - }) + schema.TableDefinitions[table.Name] = sqlschema.TableDefinition{ + Schema: table.Schema, + Name: table.Name, + ColumnDefimitions: colDefs, + PrimaryKey: pk, + UniqueContraints: unique, + } } for _, fk := range fks { - state.FKs[sqlschema.FK{ - From: sqlschema.C(fk.SourceSchema, fk.SourceTable, fk.SourceColumns...), - To: sqlschema.C(fk.TargetSchema, fk.TargetTable, fk.TargetColumns...), + schema.ForeignKeys[sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(fk.SourceSchema, fk.SourceTable, fk.SourceColumns...), + To: sqlschema.NewColumnReference(fk.TargetSchema, fk.TargetTable, fk.TargetColumns...), }] = fk.ConstraintName } - return state, nil + return schema, nil } type InformationSchemaTable struct { diff --git a/dialect/pgdialect/sqltype.go b/dialect/pgdialect/sqltype.go index ce7f9c8b5..10741fc0e 100644 --- a/dialect/pgdialect/sqltype.go +++ b/dialect/pgdialect/sqltype.go @@ -125,7 +125,7 @@ var ( timestampTz = newAliases(sqltype.Timestamp, pgTypeTimestampTz, pgTypeTimestampWithTz) ) -func (d *Dialect) EquivalentType(col1, col2 sqlschema.Column) bool { +func (d *Dialect) EquivalentType(col1, col2 sqlschema.ColumnDefinition) bool { typ1, typ2 := strings.ToUpper(col1.SQLType), strings.ToUpper(col2.SQLType) if typ1 == typ2 { @@ -147,7 +147,7 @@ func (d *Dialect) EquivalentType(col1, col2 sqlschema.Column) bool { // if one specifies no VarcharLen and the other one has the default lenght for pgdialect. // We assume that the types are otherwise equivalent and that any non-character column // would have VarcharLen == 0; -func checkVarcharLen(col1, col2 sqlschema.Column, defaultLen int) bool { +func checkVarcharLen(col1, col2 sqlschema.ColumnDefinition, defaultLen int) bool { if col1.VarcharLen == col2.VarcharLen { return true } diff --git a/dialect/pgdialect/sqltype_test.go b/dialect/pgdialect/sqltype_test.go index 77cf1e153..6d634a19e 100644 --- a/dialect/pgdialect/sqltype_test.go +++ b/dialect/pgdialect/sqltype_test.go @@ -42,8 +42,8 @@ func TestInspectorDialect_EquivalentType(t *testing.T) { } t.Run(tt.typ1+eq+tt.typ2, func(t *testing.T) { got := d.EquivalentType( - sqlschema.Column{SQLType: tt.typ1}, - sqlschema.Column{SQLType: tt.typ2}, + sqlschema.ColumnDefinition{SQLType: tt.typ1}, + sqlschema.ColumnDefinition{SQLType: tt.typ2}, ) require.Equal(t, tt.want, got) }) @@ -54,25 +54,25 @@ func TestInspectorDialect_EquivalentType(t *testing.T) { t.Run("custom varchar length", func(t *testing.T) { for _, tt := range []struct { name string - col1, col2 sqlschema.Column + col1, col2 sqlschema.ColumnDefinition want bool }{ { name: "varchars of different length are not equivalent", - col1: sqlschema.Column{SQLType: "varchar", VarcharLen: 10}, - col2: sqlschema.Column{SQLType: "varchar"}, + col1: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 10}, + col2: sqlschema.ColumnDefinition{SQLType: "varchar"}, want: false, }, { name: "varchar with no explicit length is equivalent to varchar of default length", - col1: sqlschema.Column{SQLType: "varchar", VarcharLen: d.DefaultVarcharLen()}, - col2: sqlschema.Column{SQLType: "varchar"}, + col1: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: d.DefaultVarcharLen()}, + col2: sqlschema.ColumnDefinition{SQLType: "varchar"}, want: true, }, { name: "characters with equal custom length", - col1: sqlschema.Column{SQLType: "character varying", VarcharLen: 200}, - col2: sqlschema.Column{SQLType: "varchar", VarcharLen: 200}, + col1: sqlschema.ColumnDefinition{SQLType: "character varying", VarcharLen: 200}, + col2: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 200}, want: true, }, } { diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index d228d210d..7528afa0c 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -93,11 +93,11 @@ func TestDatabaseInspector_Inspect(t *testing.T) { defaultSchema := db.Dialect().DefaultSchema() // Tables come sorted alphabetically by schema and table. - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "offices": { Schema: "admin", Name: "offices", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "office_name": { SQLType: sqltype.VarChar, }, @@ -110,12 +110,12 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsNullable: true, }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("office_name")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, }, - { + "articles": { Schema: defaultSchema, Name: "articles", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "isbn": { SQLType: "bigint", IsNullable: false, @@ -166,15 +166,15 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: "bigint", }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("isbn")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("isbn")}, UniqueContraints: []sqlschema.Unique{ - {Columns: sqlschema.NewComposite("editor", "title")}, + {Columns: sqlschema.NewColumns("editor", "title")}, }, }, - { + "authors": { Schema: defaultSchema, Name: "authors", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "author_id": { SQLType: "bigint", IsIdentity: true, @@ -189,16 +189,16 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: sqltype.VarChar, }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("author_id")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("author_id")}, UniqueContraints: []sqlschema.Unique{ - {Columns: sqlschema.NewComposite("first_name", "last_name")}, - {Columns: sqlschema.NewComposite("email")}, + {Columns: sqlschema.NewColumns("first_name", "last_name")}, + {Columns: sqlschema.NewColumns("email")}, }, }, - { + "publisher_to_journalists": { Schema: defaultSchema, Name: "publisher_to_journalists", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "publisher_id": { SQLType: sqltype.VarChar, }, @@ -206,12 +206,12 @@ func TestDatabaseInspector_Inspect(t *testing.T) { SQLType: "bigint", }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("publisher_id", "author_id")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, }, - { + "publishers": { Schema: defaultSchema, Name: "publishers", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "publisher_id": { SQLType: sqltype.VarChar, DefaultValue: "gen_random_uuid()", @@ -225,33 +225,33 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsNullable: true, }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("publisher_id")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id")}, UniqueContraints: []sqlschema.Unique{ - {Columns: sqlschema.NewComposite("publisher_id", "publisher_name")}, + {Columns: sqlschema.NewColumns("publisher_id", "publisher_name")}, }, }, } - wantFKs := []sqlschema.FK{ - { // - From: sqlschema.C(defaultSchema, "articles", "publisher_id"), - To: sqlschema.C(defaultSchema, "publishers", "publisher_id"), + wantFKs := []sqlschema.ForeignKey{ + { + From: sqlschema.NewColumnReference(defaultSchema, "articles", "publisher_id"), + To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), }, { - From: sqlschema.C(defaultSchema, "articles", "author_id"), - To: sqlschema.C(defaultSchema, "authors", "author_id"), + From: sqlschema.NewColumnReference(defaultSchema, "articles", "author_id"), + To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), }, - { // - From: sqlschema.C("admin", "offices", "publisher_name", "publisher_id"), - To: sqlschema.C(defaultSchema, "publishers", "publisher_name", "publisher_id"), + { + From: sqlschema.NewColumnReference("admin", "offices", "publisher_name", "publisher_id"), + To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_name", "publisher_id"), }, - { // - From: sqlschema.C(defaultSchema, "publisher_to_journalists", "publisher_id"), - To: sqlschema.C(defaultSchema, "publishers", "publisher_id"), + { + From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "publisher_id"), + To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), }, - { // - From: sqlschema.C(defaultSchema, "publisher_to_journalists", "author_id"), - To: sqlschema.C(defaultSchema, "authors", "author_id"), + { + From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "author_id"), + To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), }, } @@ -260,10 +260,10 @@ func TestDatabaseInspector_Inspect(t *testing.T) { // State.FKs store their database names, which differ from dialect to dialect. // Because of that we compare FKs and Tables separately. - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, got.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, got.TableDefinitions) - var fks []sqlschema.FK - for fk := range got.FKs { + var fks []sqlschema.ForeignKey + for fk := range got.ForeignKeys { fks = append(fks, fk) } require.ElementsMatch(t, wantFKs, fks) @@ -292,30 +292,29 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str // cmpTables compares table schemas using dialect-specific equivalence checks for column types // and reports the differences as t.Error(). -func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got []sqlschema.Table) { +func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[string]sqlschema.TableDefinition) { tb.Helper() require.ElementsMatch(tb, tableNames(want), tableNames(got), "different set of tables") // Now we are guaranteed to have the same tables. - for _, wt := range want { - tableName := wt.Name + for wantName, wantTable := range want { // TODO(dyma): this will be simplified by map[string]Table - var gt sqlschema.Table + var gt sqlschema.TableDefinition for i := range got { - if got[i].Name == tableName { + if got[i].Name == wantName { gt = got[i] break } } - cmpColumns(tb, d, wt.Name, wt.Columns, gt.Columns) - cmpConstraints(tb, wt, gt) + cmpColumns(tb, d, wantName, wantTable.ColumnDefimitions, gt.ColumnDefimitions) + cmpConstraints(tb, wantTable, gt) } } // cmpColumns compares that column definitions on the tables are -func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.Column) { +func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.ColumnDefinition) { tb.Helper() var errs []string @@ -372,14 +371,14 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w } // cmpConstraints compares constraints defined on the table with the expected ones. -func cmpConstraints(tb testing.TB, want, got sqlschema.Table) { +func cmpConstraints(tb testing.TB, want, got sqlschema.TableDefinition) { tb.Helper() - if want.PK != nil { - require.NotNilf(tb, got.PK, "table %q missing primary key, want: (%s)", want.Name, want.PK.Columns) - require.Equalf(tb, want.PK.Columns, got.PK.Columns, "table %q has wrong primary key", want.Name) + if want.PrimaryKey != nil { + require.NotNilf(tb, got.PrimaryKey, "table %q missing primary key, want: (%s)", want.Name, want.PrimaryKey.Columns) + require.Equalf(tb, want.PrimaryKey.Columns, got.PrimaryKey.Columns, "table %q has wrong primary key", want.Name) } else { - require.Nilf(tb, got.PK, "table %q shouldn't have a primary key", want.Name) + require.Nilf(tb, got.PrimaryKey, "table %q shouldn't have a primary key", want.Name) } // Only keep columns included in each unique constraint for comparison. @@ -392,14 +391,14 @@ func cmpConstraints(tb testing.TB, want, got sqlschema.Table) { require.ElementsMatch(tb, stripNames(want.UniqueContraints), stripNames(got.UniqueContraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) } -func tableNames(tables []sqlschema.Table) (names []string) { - for i := range tables { - names = append(names, tables[i].Name) +func tableNames(tables map[string]sqlschema.TableDefinition) (names []string) { + for name := range tables { + names = append(names, name) } return } -func formatType(c sqlschema.Column) string { +func formatType(c sqlschema.ColumnDefinition) string { if c.VarcharLen == 0 { return c.SQLType } @@ -422,7 +421,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewSchemaInspector(tables) - want := map[string]sqlschema.Column{ + want := map[string]sqlschema.ColumnDefinition{ "id": { SQLType: sqltype.VarChar, DefaultValue: "random()", @@ -436,8 +435,11 @@ func TestSchemaInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.Tables, 1) - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, got.Tables[0].Columns) + require.Len(t, got.TableDefinitions, 1) + for _, table := range got.TableDefinitions { + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefimitions) + return + } }) t.Run("parses custom varchar len", func(t *testing.T) { @@ -451,7 +453,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewSchemaInspector(tables) - want := map[string]sqlschema.Column{ + want := map[string]sqlschema.ColumnDefinition{ "id": { SQLType: "text", }, @@ -468,8 +470,10 @@ func TestSchemaInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.Tables, 1) - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, got.Tables[0].Columns) + require.Len(t, got.TableDefinitions, 1) + for _, table := range got.TableDefinitions { + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefimitions) + } }) t.Run("inspect unique constraints", func(t *testing.T) { @@ -483,19 +487,22 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewSchemaInspector(tables) - want := sqlschema.Table{ + want := sqlschema.TableDefinition{ Name: "models", UniqueContraints: []sqlschema.Unique{ - {Columns: sqlschema.NewComposite("id")}, - {Name: "full_name", Columns: sqlschema.NewComposite("first_name", "last_name")}, + {Columns: sqlschema.NewColumns("id")}, + {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "last_name")}, }, } got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.Tables, 1) - cmpConstraints(t, want, got.Tables[0]) + require.Len(t, got.TableDefinitions, 1) + for _, table := range got.TableDefinitions { + cmpConstraints(t, want, table) + return + } }) t.Run("collects primary keys", func(t *testing.T) { type Model struct { @@ -507,14 +514,17 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables := schema.NewTables(dialect) tables.Register((*Model)(nil)) inspector := sqlschema.NewSchemaInspector(tables) - want := sqlschema.NewComposite("id", "email") + want := sqlschema.NewColumns("id", "email") got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.Tables, 1) - require.NotNilf(t, got.Tables[0].PK, "did not register primary key, want (%s)", want) - require.Equal(t, want, got.Tables[0].PK.Columns, "wrong primary key columns") + require.Len(t, got.TableDefinitions, 1) + for _, table := range got.TableDefinitions { + require.NotNilf(t, table.PrimaryKey, "did not register primary key, want (%s)", want) + require.Equal(t, want, table.PrimaryKey.Columns, "wrong primary key columns") + return + } }) }) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 6119f0025..b63667483 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -214,14 +214,14 @@ func newAutoMigratorOrSkip(tb testing.TB, db *bun.DB, opts ...migrate.AutoMigrat // inspectDbOrSkip returns a function to inspect the current state of the database. // The test will be *skipped* if the current dialect doesn't support database inpection // and fail if the inspector cannot successfully retrieve database state. -func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.State { +func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.DatabaseSchema { tb.Helper() // AutoMigrator excludes these tables by default, but here we need to do this explicitly. inspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) if err != nil { tb.Skip(err) } - return func(ctx context.Context) sqlschema.State { + return func(ctx context.Context) sqlschema.DatabaseSchema { state, err := inspector.Inspect(ctx) require.NoError(tb, err) return state @@ -304,16 +304,16 @@ func TestAutoMigrator_Migrate(t *testing.T) { tests := []struct { fn func(t *testing.T, db *bun.DB) }{ - // {testRenameTable}, - // {testRenamedColumns}, + {testRenameTable}, + {testRenamedColumns}, {testCreateDropTable}, - // {testAlterForeignKeys}, - // {testChangeColumnType_AutoCast}, - // {testIdentity}, - // {testAddDropColumn}, - // {testUnique}, - // {testUniqueRenamedTable}, - // {testUpdatePrimaryKeys}, + {testAlterForeignKeys}, + {testChangeColumnType_AutoCast}, + {testIdentity}, + {testAddDropColumn}, + {testUnique}, + {testUniqueRenamedTable}, + {testUpdatePrimaryKeys}, // Suspended support for renaming foreign keys: // {testCustomFKNameFunc}, @@ -358,9 +358,9 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - tables := state.Tables + tables := state.TableDefinitions require.Len(t, tables, 1) - require.Equal(t, "changed", tables[0].Name) + require.Contains(t, tables, "changed") } func testCreateDropTable(t *testing.T, db *bun.DB) { @@ -387,9 +387,9 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - tables := state.Tables + tables := state.TableDefinitions require.Len(t, tables, 1) - require.Equal(t, "createme", tables[0].Name) + require.Contains(t, tables, "createme") } func testAlterForeignKeys(t *testing.T, db *bun.DB) { @@ -421,10 +421,11 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { } type ThingsToOwner struct { - OwnerID int64 `bun:",notnull"` - Owner *OwnerCommon `bun:"rel:belongs-to,join:owner_id=id"` - ThingID int64 `bun:",notnull"` - Thing *ThingCommon `bun:"rel:belongs-to,join:thing_id=id"` + bun.BaseModel `bun:"things_to_owners"` + OwnerID int64 `bun:",notnull"` + Owner *OwnerCommon `bun:"rel:belongs-to,join:owner_id=id"` + ThingID int64 `bun:",notnull"` + Thing *ThingCommon `bun:"rel:belongs-to,join:thing_id=id"` } // Arrange @@ -452,19 +453,19 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { defaultSchema := db.Dialect().DefaultSchema() // Crated 2 new constraints - require.Contains(t, state.FKs, sqlschema.FK{ - From: sqlschema.C(defaultSchema, "things_to_owners", "owner_id"), - To: sqlschema.C(defaultSchema, "owners", "id"), + require.Contains(t, state.ForeignKeys, sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(defaultSchema, "things_to_owners", "owner_id"), + To: sqlschema.NewColumnReference(defaultSchema, "owners", "id"), }) - require.Contains(t, state.FKs, sqlschema.FK{ - From: sqlschema.C(defaultSchema, "things_to_owners", "thing_id"), - To: sqlschema.C(defaultSchema, "things", "id"), + require.Contains(t, state.ForeignKeys, sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(defaultSchema, "things_to_owners", "thing_id"), + To: sqlschema.NewColumnReference(defaultSchema, "things", "id"), }) // Dropped the initial one - require.NotContains(t, state.FKs, sqlschema.FK{ - From: sqlschema.C(defaultSchema, "things", "owner_id"), - To: sqlschema.C(defaultSchema, "owners", "id"), + require.NotContains(t, state.ForeignKeys, sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(defaultSchema, "things", "owner_id"), + To: sqlschema.NewColumnReference(defaultSchema, "owners", "id"), }) } @@ -510,9 +511,9 @@ func testForceRenameFK(t *testing.T, db *bun.DB) { (*PersonalThing)(nil), ), migrate.WithRenameFK(true), - migrate.WithFKNameFunc(func(fk sqlschema.FK) string { + migrate.WithFKNameFunc(func(fk sqlschema.ForeignKey) string { return strings.Join([]string{ - fk.From.Table, fk.To.Table, "fkey", + fk.From.FQN.Table, fk.To.FQN.Table, "fkey", }, "_") }), ) @@ -523,11 +524,11 @@ func testForceRenameFK(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) schema := db.Dialect().DefaultSchema() - wantName, ok := state.FKs[sqlschema.FK{ - From: sqlschema.C(schema, "things", "owner_id"), - To: sqlschema.C(schema, "people", "id"), + wantName, ok := state.ForeignKeys[sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(schema, "things", "owner_id"), + To: sqlschema.NewColumnReference(schema, "people", "id"), }] - require.True(t, ok, "expect state.FKs to contain things_people_fkey") + require.True(t, ok, "expect state.ForeignKeys to contain things_people_fkey") require.Equal(t, wantName, "things_people_fkey") } @@ -563,7 +564,7 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { ) m := newAutoMigratorOrSkip(t, db, - migrate.WithFKNameFunc(func(sqlschema.FK) string { return "test_fkey" }), + migrate.WithFKNameFunc(func(sqlschema.ForeignKey) string { return "test_fkey" }), migrate.WithModel( (*TableM)(nil), (*ColumnM)(nil), @@ -575,9 +576,9 @@ func testCustomFKNameFunc(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - fkName := state.FKs[sqlschema.FK{ - From: sqlschema.C(db.Dialect().DefaultSchema(), "columns", "attrelid"), - To: sqlschema.C(db.Dialect().DefaultSchema(), "tables", "oid"), + fkName := state.ForeignKeys[sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "columns", "attrelid"), + To: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "tables", "oid"), }] require.Equal(t, "test_fkey", fkName) } @@ -626,10 +627,10 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - require.Len(t, state.Tables, 2) + require.Len(t, state.TableDefinitions, 2) - var renamed, model2 sqlschema.Table - for _, tbl := range state.Tables { + var renamed, model2 sqlschema.TableDefinition + for _, tbl := range state.TableDefinitions { switch tbl.Name { case "renamed": renamed = tbl @@ -638,9 +639,9 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { } } - require.Contains(t, renamed.Columns, "count") - require.Contains(t, model2.Columns, "second_column") - require.Contains(t, model2.Columns, "do_not_rename") + require.Contains(t, renamed.ColumnDefimitions, "count") + require.Contains(t, model2.ColumnDefimitions, "second_column") + require.Contains(t, model2.ColumnDefimitions, "do_not_rename") } func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { @@ -675,9 +676,9 @@ func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - fkName := state.FKs[sqlschema.FK{ - From: sqlschema.C(db.Dialect().DefaultSchema(), "tennants", "my_neighbour"), - To: sqlschema.C(db.Dialect().DefaultSchema(), "tennants", "tennant_id"), + fkName := state.ForeignKeys[sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "tennants", "my_neighbour"), + To: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "tennants", "tennant_id"), }] require.Equal(t, "tennants_my_neighbour_fkey", fkName) } @@ -686,7 +687,7 @@ func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { // i.e. do not require supplying a USING clause (pgdialect). func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { type TableBefore struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:change_me_own_type"` SmallInt int32 `bun:"bigger_int,pk,identity"` Timestamp time.Time `bun:"ts"` @@ -698,7 +699,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { } type TableAfter struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:change_me_own_type"` BigInt int64 `bun:"bigger_int,pk,identity"` // int64 maps to bigint Timestamp time.Time `bun:"ts,default:current_timestamp"` // has default value now @@ -709,11 +710,11 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // ManyValues []string `bun:",array"` // did not change } - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "change_me_own_type": { Schema: db.Dialect().DefaultSchema(), - Name: "table", - Columns: map[string]sqlschema.Column{ + Name: "change_me_own_type", + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "bigger_int": { SQLType: "bigint", IsIdentity: true, @@ -746,7 +747,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // SQLType: "array", // }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("bigger_int")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("bigger_int")}, }, } @@ -760,27 +761,27 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) } func testIdentity(t *testing.T, db *bun.DB) { type TableBefore struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:bourne_identity"` A int64 `bun:",notnull,identity"` B int64 } type TableAfter struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:bourne_identity"` A int64 `bun:",notnull"` B int64 `bun:",notnull,identity"` } - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "bourne_identity": { Schema: db.Dialect().DefaultSchema(), - Name: "table", - Columns: map[string]sqlschema.Column{ + Name: "bourne_identity", + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "a": { SQLType: sqltype.BigInt, IsIdentity: false, // <- drop IDENTITY @@ -803,27 +804,27 @@ func testIdentity(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) } func testAddDropColumn(t *testing.T, db *bun.DB) { type TableBefore struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:column_madness"` DoNotTouch string `bun:"do_not_touch"` DropMe string `bun:"dropme"` } type TableAfter struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:column_madness"` DoNotTouch string `bun:"do_not_touch"` AddMe bool `bun:"addme"` } - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "column_madness": { Schema: db.Dialect().DefaultSchema(), - Name: "table", - Columns: map[string]sqlschema.Column{ + Name: "column_madness", + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "do_not_touch": { SQLType: sqltype.VarChar, IsNullable: true, @@ -846,12 +847,12 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) } func testUnique(t *testing.T, db *bun.DB) { type TableBefore struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:uniqlo_stores"` FirstName string `bun:"first_name,unique:full_name"` LastName string `bun:"last_name,unique:full_name"` Birthday string `bun:"birthday,unique"` @@ -860,7 +861,7 @@ func testUnique(t *testing.T, db *bun.DB) { } type TableAfter struct { - bun.BaseModel `bun:"table:table"` + bun.BaseModel `bun:"table:uniqlo_stores"` FirstName string `bun:"first_name,unique:full_name"` MiddleName string `bun:"middle_name,unique:full_name"` // extend "full_name" unique group LastName string `bun:"last_name,unique:full_name"` @@ -872,11 +873,11 @@ func testUnique(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed"` // shrink "pet" unique group } - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "uniqlo_stores": { Schema: db.Dialect().DefaultSchema(), - Name: "table", - Columns: map[string]sqlschema.Column{ + Name: "uniqlo_stores", + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: true, @@ -907,10 +908,10 @@ func testUnique(t *testing.T, db *bun.DB) { }, }, UniqueContraints: []sqlschema.Unique{ - {Columns: sqlschema.NewComposite("email")}, - {Columns: sqlschema.NewComposite("pet_name")}, + {Columns: sqlschema.NewColumns("email")}, + {Columns: sqlschema.NewColumns("pet_name")}, // We can only be sure of the user-defined index name - {Name: "full_name", Columns: sqlschema.NewComposite("first_name", "middle_name", "last_name")}, + {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "middle_name", "last_name")}, }, }, } @@ -925,7 +926,7 @@ func testUnique(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) } func testUniqueRenamedTable(t *testing.T, db *bun.DB) { @@ -950,11 +951,11 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed,unique"` } - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "after": { Schema: db.Dialect().DefaultSchema(), Name: "after", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: true, @@ -977,9 +978,9 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { }, }, UniqueContraints: []sqlschema.Unique{ - {Columns: sqlschema.NewComposite("pet_name")}, - {Columns: sqlschema.NewComposite("pet_breed")}, - {Name: "full_name", Columns: sqlschema.NewComposite("first_name", "last_name", "birthday")}, + {Columns: sqlschema.NewColumns("pet_name")}, + {Columns: sqlschema.NewColumns("pet_breed")}, + {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "last_name", "birthday")}, }, }, } @@ -995,7 +996,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) } func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { @@ -1045,11 +1046,11 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { LastName string `bun:"last_name,pk"` } - wantTables := []sqlschema.Table{ - { + wantTables := map[string]sqlschema.TableDefinition{ + "drop_your_pks": { Schema: db.Dialect().DefaultSchema(), Name: "drop_your_pks", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: false, @@ -1060,10 +1061,10 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { }, }, }, - { + "add_new_pk": { Schema: db.Dialect().DefaultSchema(), Name: "add_new_pk", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "new_id": { SQLType: sqltype.BigInt, IsNullable: false, @@ -1078,12 +1079,12 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { IsNullable: true, }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("new_id")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("new_id")}, }, - { + "change_pk": { Schema: db.Dialect().DefaultSchema(), Name: "change_pk", - Columns: map[string]sqlschema.Column{ + ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: false, @@ -1093,7 +1094,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { IsNullable: false, }, }, - PK: &sqlschema.PK{Columns: sqlschema.NewComposite("first_name", "last_name")}, + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("first_name", "last_name")}, }, } @@ -1115,5 +1116,5 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) } diff --git a/internal/dbtest/query_test.go b/internal/dbtest/query_test.go index 85a4e4699..541c0d7c4 100644 --- a/internal/dbtest/query_test.go +++ b/internal/dbtest/query_test.go @@ -1618,7 +1618,7 @@ func TestAlterTable(t *testing.T) { {name: "add column with default value", operation: &migrate.AddColumnOp{ FQN: fqn, Column: "language", - ColDef: sqlschema.Column{ + ColDef: sqlschema.ColumnDefinition{ SQLType: "varchar", VarcharLen: 20, IsNullable: false, @@ -1628,7 +1628,7 @@ func TestAlterTable(t *testing.T) { {name: "add column with identity", operation: &migrate.AddColumnOp{ FQN: fqn, Column: "n", - ColDef: sqlschema.Column{ + ColDef: sqlschema.ColumnDefinition{ SQLType: sqltype.BigInt, IsNullable: false, IsIdentity: true, @@ -1637,7 +1637,7 @@ func TestAlterTable(t *testing.T) { {name: "drop column", operation: &migrate.DropColumnOp{ FQN: fqn, Column: "director", - ColDef: sqlschema.Column{ + ColDef: sqlschema.ColumnDefinition{ SQLType: sqltype.VarChar, IsNullable: false, }, @@ -1646,101 +1646,101 @@ func TestAlterTable(t *testing.T) { FQN: fqn, Unique: sqlschema.Unique{ Name: "one_genre_per_director", - Columns: sqlschema.NewComposite("genre", "director"), + Columns: sqlschema.NewColumns("genre", "director"), }, }}, {name: "drop unique constraint", operation: &migrate.DropUniqueConstraintOp{ FQN: fqn, Unique: sqlschema.Unique{ Name: "one_genre_per_director", - Columns: sqlschema.NewComposite("genre", "director"), + Columns: sqlschema.NewColumns("genre", "director"), }, }}, {name: "change column type int to bigint", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.Column{SQLType: sqltype.Integer}, - To: sqlschema.Column{SQLType: sqltype.BigInt}, + From: sqlschema.ColumnDefinition{SQLType: sqltype.Integer}, + To: sqlschema.ColumnDefinition{SQLType: sqltype.BigInt}, }}, {name: "add default", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.Column{DefaultValue: ""}, - To: sqlschema.Column{DefaultValue: "100"}, + From: sqlschema.ColumnDefinition{DefaultValue: ""}, + To: sqlschema.ColumnDefinition{DefaultValue: "100"}, }}, {name: "drop default", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.Column{DefaultValue: "100"}, - To: sqlschema.Column{DefaultValue: ""}, + From: sqlschema.ColumnDefinition{DefaultValue: "100"}, + To: sqlschema.ColumnDefinition{DefaultValue: ""}, }}, {name: "make nullable", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "director", - From: sqlschema.Column{IsNullable: false}, - To: sqlschema.Column{IsNullable: true}, + From: sqlschema.ColumnDefinition{IsNullable: false}, + To: sqlschema.ColumnDefinition{IsNullable: true}, }}, {name: "add notnull", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.Column{IsNullable: true}, - To: sqlschema.Column{IsNullable: false}, + From: sqlschema.ColumnDefinition{IsNullable: true}, + To: sqlschema.ColumnDefinition{IsNullable: false}, }}, {name: "increase varchar length", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "language", - From: sqlschema.Column{SQLType: "varchar", VarcharLen: 20}, - To: sqlschema.Column{SQLType: "varchar", VarcharLen: 255}, + From: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 20}, + To: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 255}, }}, {name: "add identity", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "id", - From: sqlschema.Column{IsIdentity: false}, - To: sqlschema.Column{IsIdentity: true}, + From: sqlschema.ColumnDefinition{IsIdentity: false}, + To: sqlschema.ColumnDefinition{IsIdentity: true}, }}, {name: "drop identity", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "id", - From: sqlschema.Column{IsIdentity: true}, - To: sqlschema.Column{IsIdentity: false}, + From: sqlschema.ColumnDefinition{IsIdentity: true}, + To: sqlschema.ColumnDefinition{IsIdentity: false}, }}, {name: "add primary key", operation: &migrate.AddPrimaryKeyOp{ FQN: fqn, - PK: &sqlschema.PK{ + PrimaryKey: sqlschema.PrimaryKey{ Name: "new_pk", - Columns: sqlschema.NewComposite("id"), + Columns: sqlschema.NewColumns("id"), }, }}, {name: "drop primary key", operation: &migrate.DropPrimaryKeyOp{ FQN: fqn, - PK: &sqlschema.PK{ + PrimaryKey: sqlschema.PrimaryKey{ Name: "new_pk", - Columns: sqlschema.NewComposite("id"), + Columns: sqlschema.NewColumns("id"), }, }}, {name: "change primary key", operation: &migrate.ChangePrimaryKeyOp{ FQN: fqn, - Old: &sqlschema.PK{ + Old: sqlschema.PrimaryKey{ Name: "old_pk", - Columns: sqlschema.NewComposite("id"), + Columns: sqlschema.NewColumns("id"), }, - New: &sqlschema.PK{ + New: sqlschema.PrimaryKey{ Name: "new_pk", - Columns: sqlschema.NewComposite("director", "genre"), + Columns: sqlschema.NewColumns("director", "genre"), }, }}, {name: "add foreign key", operation: &migrate.AddForeignKeyOp{ ConstraintName: "genre_description", - FK: sqlschema.FK{ - From: sqlschema.C("hobbies", "movies", "genre"), - To: sqlschema.C("wiki", "film_genres", "id"), + ForeignKey: sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference("hobbies", "movies", "genre"), + To: sqlschema.NewColumnReference("wiki", "film_genres", "id"), }, }}, {name: "drop foreign key", operation: &migrate.DropForeignKeyOp{ ConstraintName: "genre_description", - FK: sqlschema.FK{ - From: sqlschema.C("hobbies", "movies", "genre"), - To: sqlschema.C("wiki", "film_genres", "id"), + ForeignKey: sqlschema.ForeignKey{ + From: sqlschema.NewColumnReference("hobbies", "movies", "genre"), + To: sqlschema.NewColumnReference("wiki", "film_genres", "id"), }, }}, } diff --git a/internal/dbtest/sqlschema_test.go b/internal/dbtest/sqlschema_test.go deleted file mode 100644 index 29f709e14..000000000 --- a/internal/dbtest/sqlschema_test.go +++ /dev/null @@ -1,222 +0,0 @@ -package dbtest_test - -import ( - "testing" - - "github.com/stretchr/testify/require" - "github.com/uptrace/bun/migrate/sqlschema" -) - -func TestRefMap_Update(t *testing.T) { - for _, tt := range []struct { - name string - fks []sqlschema.FK - update func(rm sqlschema.RefMap) int - wantUpdated int - wantFKs []sqlschema.FK - }{ - { - name: "update table reference in all FKs that reference its columns", - fks: []sqlschema.FK{ - { - From: sqlschema.C("x", "y", "z"), - To: sqlschema.C("a", "b", "c"), - }, - { - From: sqlschema.C("m", "n", "o"), - To: sqlschema.C("a", "b", "d"), - }, - }, - update: func(rm sqlschema.RefMap) int { - return rm.UpdateT(sqlschema.T("a", "b"), sqlschema.T("a", "new_b")) - }, - wantUpdated: 2, - wantFKs: []sqlschema.FK{ // checking 1 of the 2 updated ones should be enough - { - From: sqlschema.C("x", "y", "z"), - To: sqlschema.C("a", "new_b", "c"), - }, - }, - }, - { - name: "update table reference in FK which points to the same table", - fks: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "child"), - To: sqlschema.C("a", "b", "parent"), - }, - }, - update: func(rm sqlschema.RefMap) int { - return rm.UpdateT(sqlschema.T("a", "b"), sqlschema.T("a", "new_b")) - }, - wantUpdated: 1, - wantFKs: []sqlschema.FK{ - { - From: sqlschema.C("a", "new_b", "child"), - To: sqlschema.C("a", "new_b", "parent"), - }, - }, - }, - { - name: "update column reference in all FKs which depend on it", - fks: []sqlschema.FK{ - { - From: sqlschema.C("x", "y", "z"), - To: sqlschema.C("a", "b", "c"), - }, - { - From: sqlschema.C("a", "b", "c"), - To: sqlschema.C("m", "n", "o"), - }, - }, - update: func(rm sqlschema.RefMap) int { - return rm.UpdateC(sqlschema.C("a", "b", "c"), "c_new") - }, - wantUpdated: 2, - wantFKs: []sqlschema.FK{ - { - From: sqlschema.C("x", "y", "z"), - To: sqlschema.C("a", "b", "c_new"), - }, - }, - }, - { - name: "foreign keys defined on multiple columns", - fks: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c1", "c2"), - To: sqlschema.C("q", "r", "s1", "s2"), - }, - { - From: sqlschema.C("m", "n", "o", "p"), - To: sqlschema.C("a", "b", "c2"), - }, - }, - update: func(rm sqlschema.RefMap) int { - return rm.UpdateC(sqlschema.C("a", "b", "c2"), "x2") - }, - wantUpdated: 2, - wantFKs: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c1", "x2"), - To: sqlschema.C("q", "r", "s1", "s2"), - }, - }, - }, - } { - t.Run(tt.name, func(t *testing.T) { - rm := sqlschema.NewRefMap(tt.fks...) - - n := tt.update(rm) - - require.Equal(t, tt.wantUpdated, n) - require.Equal(t, tt.wantUpdated, len(rm.Updated())) - checkHasFK(t, rm, tt.wantFKs...) - }) - } -} - -func checkHasFK(tb testing.TB, rm sqlschema.RefMap, fks ...sqlschema.FK) { -outer: - for _, want := range fks { - for _, gotptr := range rm { - if got := *gotptr; got == want { - continue outer - } - } - tb.Fatalf("did not find FK%+v", want) - } -} - -func TestRefMap_Delete(t *testing.T) { - for _, tt := range []struct { - name string - fks []sqlschema.FK - del func(rm sqlschema.RefMap) int - wantDeleted []sqlschema.FK - }{ - { - name: "delete FKs that depend on the table", - fks: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c"), - To: sqlschema.C("x", "y", "z"), - }, - { - From: sqlschema.C("m", "n", "o"), - To: sqlschema.C("a", "b", "d"), - }, - { - From: sqlschema.C("q", "r", "s"), - To: sqlschema.C("w", "w", "w"), - }, - }, - del: func(rm sqlschema.RefMap) int { - return rm.DeleteT(sqlschema.T("a", "b")) - }, - wantDeleted: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c"), - To: sqlschema.C("x", "y", "z"), - }, - { - From: sqlschema.C("m", "n", "o"), - To: sqlschema.C("a", "b", "d"), - }, - }, - }, - { - name: "delete FKs that depend on the column", - fks: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c"), - To: sqlschema.C("x", "y", "z"), - }, - { - From: sqlschema.C("q", "r", "s"), - To: sqlschema.C("w", "w", "w"), - }, - }, - del: func(rm sqlschema.RefMap) int { - return rm.DeleteC(sqlschema.C("a", "b", "c")) - }, - wantDeleted: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c"), - To: sqlschema.C("x", "y", "z"), - }, - }, - }, - { - name: "foreign keys defined on multiple columns", - fks: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c1", "c2"), - To: sqlschema.C("q", "r", "s1", "s2"), - }, - { - From: sqlschema.C("m", "n", "o", "p"), - To: sqlschema.C("a", "b", "c2"), - }, - }, - del: func(rm sqlschema.RefMap) int { - return rm.DeleteC(sqlschema.C("a", "b", "c1")) - }, - wantDeleted: []sqlschema.FK{ - { - From: sqlschema.C("a", "b", "c1", "c2"), - To: sqlschema.C("q", "r", "s1", "s2"), - }, - }, - }, - } { - t.Run(tt.name, func(t *testing.T) { - rm := sqlschema.NewRefMap(tt.fks...) - - n := tt.del(rm) - - require.Equal(t, len(tt.wantDeleted), n) - require.ElementsMatch(t, rm.Deleted(), tt.wantDeleted) - }) - } -} diff --git a/migrate/auto.go b/migrate/auto.go index 70236e8e5..6376c5137 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -36,7 +36,7 @@ func WithExcludeTable(tables ...string) AutoMigratorOption { // // More generally, this option will have no effect whenever FKs are included in the CREATE TABLE definition, // which is the default strategy. Perhaps it would make sense to allow disabling this and switching to separate (CreateTable + AddFK) -func WithFKNameFunc(f func(sqlschema.FK) string) AutoMigratorOption { +func WithFKNameFunc(f func(sqlschema.ForeignKey) string) AutoMigratorOption { return func(m *AutoMigrator) { m.diffOpts = append(m.diffOpts, withFKNameFunc(f)) } diff --git a/migrate/diff.go b/migrate/diff.go index 54e4b9a8f..8dd1cf038 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -17,115 +17,70 @@ import ( // The result changeset is not sorted, i.e. the caller should resolve dependencies // before applying the changes. func (d *detector) Diff() *changeset { - targetTables := newTableSet(d.target.Tables...) - currentTables := newTableSet(d.current.Tables...) // keeps state (which models still need to be checked) - - // These table-sets record changes to the targetTables set. - created := newTableSet() - renamed := newTableSet() - - // Discover CREATE/RENAME/DROP TABLE - addedTables := targetTables.Sub(currentTables) -AddedLoop: - for _, added := range addedTables.Values() { - removedTables := currentTables.Sub(targetTables) - for _, removed := range removedTables.Values() { - if d.canRename(removed, added) { +RenameCreate: + for wantName, wantTable := range d.target.TableDefinitions { + + // A table with this name exists in the database. We assume that schema objects won't + // be renamed to an already existing name, nor do we support such cases. + // Simply check if the table definition has changed. + if haveTable, ok := d.current.TableDefinitions[wantName]; ok { + d.detectColumnChanges(haveTable, wantTable, true) + d.detectConstraintChanges(haveTable, wantTable) + continue + } + + // Find all renamed tables. We assume that renamed tables have the same signature. + for haveName, haveTable := range d.current.TableDefinitions { + if _, exists := d.target.TableDefinitions[haveName]; !exists && d.canRename(haveTable, wantTable) { d.changes.Add(&RenameTableOp{ - FQN: schema.FQN{Schema: removed.Schema, Table: removed.Name}, - NewName: added.Name, + FQN: haveTable.FQN(), + NewName: wantName, }) - - // Here we do not check for created / dropped columns, as well as column type changes, - // because it is only possible to detect a renamed table if its signature (see state.go) did not change. - d.detectColumnChanges(removed, added, false) - d.detectConstraintChanges(removed, added) - - // Update referenced table in all related FKs. - if d.detectRenamedFKs { - d.refMap.UpdateT(removed.T(), added.T()) - } - - renamed.Add(added) - - // Do not check this model further, we know it was renamed. - currentTables.Remove(removed.Name) - continue AddedLoop + d.refMap.RenameTable(haveTable.FQN(), wantName) + + // Find renamed columns, if any, and check if constraints (PK, UNIQUE) have been updated. + // We need not check wantTable any further. + d.detectColumnChanges(haveTable, wantTable, false) + d.detectConstraintChanges(haveTable, wantTable) + delete(d.current.TableDefinitions, haveName) + continue RenameCreate } } - // If a new table did not appear because of the rename operation, then it must've been created. - d.changes.Add(&CreateTableOp{ - FQN: schema.FQN{Schema: added.Schema, Table: added.Name}, - Model: added.Model, - }) - created.Add(added) - } - // Tables that aren't present anymore and weren't renamed or left untouched were deleted. - dropped := currentTables.Sub(targetTables) - for _, t := range dropped.Values() { - d.changes.Add(&DropTableOp{ - FQN: schema.FQN{Schema: t.Schema, Table: t.Name}, + // If wantTable does not exist in the database and was not renamed + // then we need to create this table in the database. + additional := wantTable.Additional.(sqlschema.SchemaTable) + d.changes.Add(&CreateTableOp{ + FQN: wantTable.FQN(), + Model: additional.Model, }) } - // Detect changes in existing tables that weren't renamed. - // - // TODO: here having State.Tables be a map[string]Table would be much more convenient. - // Then we can alse retire tableSet, or at least simplify it to a certain extent. - curEx := currentTables.Sub(dropped) - tarEx := targetTables.Sub(created).Sub(renamed) - for _, target := range tarEx.Values() { - // TODO(dyma): step is redundant if we have map[string]Table - var current sqlschema.Table - for _, cur := range curEx.Values() { - if cur.Name == target.Name { - current = cur - break - } - } - d.detectColumnChanges(current, target, true) - d.detectConstraintChanges(current, target) - } - - // Compare and update FKs ---------------- - currentFKs := make(map[sqlschema.FK]string) - for k, v := range d.current.FKs { - currentFKs[k] = v - } - - if d.detectRenamedFKs { - // Add RenameFK migrations for updated FKs. - for old, renamed := range d.refMap.Updated() { - newName := d.fkNameFunc(renamed) - d.changes.Add(&RenameForeignKeyOp{ - FK: renamed, // TODO: make sure this is applied after the table/columns are renamed - OldName: d.current.FKs[old], - NewName: newName, + // Drop any remaining "current" tables which do not have a model. + for name, table := range d.current.TableDefinitions { + if _, keep := d.target.TableDefinitions[name]; !keep { + d.changes.Add(&DropTableOp{ + FQN: table.FQN(), }) - - // Add this FK to currentFKs to prevent it from firing in the two loops below. - currentFKs[renamed] = newName - delete(currentFKs, old) } } - // Add AddFK migrations for newly added FKs. - for fk := range d.target.FKs { + currentFKs := d.refMap.Deref() + + for fk := range d.target.ForeignKeys { if _, ok := currentFKs[fk]; !ok { d.changes.Add(&AddForeignKeyOp{ - FK: fk, + ForeignKey: fk, ConstraintName: d.fkNameFunc(fk), }) } } - // Add DropFK migrations for removed FKs. - for fk, fkName := range currentFKs { - if _, ok := d.target.FKs[fk]; !ok { + for fk, name := range currentFKs { + if _, ok := d.target.ForeignKeys[fk]; !ok { d.changes.Add(&DropForeignKeyOp{ - FK: fk, - ConstraintName: fkName, + ConstraintName: name, + ForeignKey: fk, }) } } @@ -293,9 +248,9 @@ func (c *changeset) ResolveDependencies() error { type diffOption func(*detectorConfig) -func withFKNameFunc(f func(sqlschema.FK) string) diffOption { +func withFKNameFunc(f func(sqlschema.ForeignKey) string) diffOption { return func(cfg *detectorConfig) { - cfg.FKNameFunc = f + // cfg.FKNameFunc = f } } @@ -305,7 +260,7 @@ func withDetectRenamedFKs(enabled bool) diffOption { } } -func withTypeEquivalenceFunc(f sqlschema.TypeEquivalenceFunc) diffOption { +func withTypeEquivalenceFunc(f TypeEquivalenceFunc) diffOption { return func(cfg *detectorConfig) { cfg.EqType = f } @@ -313,39 +268,40 @@ func withTypeEquivalenceFunc(f sqlschema.TypeEquivalenceFunc) diffOption { // detectorConfig controls how differences in the model states are resolved. type detectorConfig struct { - FKNameFunc func(sqlschema.FK) string + FKNameFunc func(sqlschema.ForeignKey) string DetectRenamedFKs bool - EqType sqlschema.TypeEquivalenceFunc + EqType TypeEquivalenceFunc } +// detector may modify the passed database schemas, so it isn't safe to re-use them. type detector struct { // current state represents the existing database schema. - current sqlschema.State + current sqlschema.DatabaseSchema // target state represents the database schema defined in bun models. - target sqlschema.State + target sqlschema.DatabaseSchema changes changeset - refMap sqlschema.RefMap + refMap refMap // fkNameFunc builds the name for created/renamed FK contraints. - fkNameFunc func(sqlschema.FK) string + fkNameFunc func(sqlschema.ForeignKey) string // eqType determines column type equivalence. // Default is direct comparison with '==' operator, which is inaccurate // due to the existence of dialect-specific type aliases. The caller // should pass a concrete InspectorDialect.EquuivalentType for robust comparison. - eqType sqlschema.TypeEquivalenceFunc + eqType TypeEquivalenceFunc // detectRenemedFKs controls how FKs are treated when their references (table/column) are renamed. detectRenamedFKs bool } -func newDetector(got, want sqlschema.State, opts ...diffOption) *detector { +func newDetector(got, want sqlschema.DatabaseSchema, opts ...diffOption) *detector { cfg := &detectorConfig{ FKNameFunc: defaultFKName, DetectRenamedFKs: false, - EqType: func(c1, c2 sqlschema.Column) bool { + EqType: func(c1, c2 sqlschema.ColumnDefinition) bool { return c1.SQLType == c2.SQLType && c1.VarcharLen == c2.VarcharLen }, } @@ -353,15 +309,10 @@ func newDetector(got, want sqlschema.State, opts ...diffOption) *detector { opt(cfg) } - var existingFKs []sqlschema.FK - for fk := range got.FKs { - existingFKs = append(existingFKs, fk) - } - return &detector{ current: got, target: want, - refMap: sqlschema.NewRefMap(existingFKs...), + refMap: newRefMap(got.ForeignKeys), fkNameFunc: cfg.FKNameFunc, detectRenamedFKs: cfg.DetectRenamedFKs, eqType: cfg.EqType, @@ -369,11 +320,11 @@ func newDetector(got, want sqlschema.State, opts ...diffOption) *detector { } // canRename checks if t1 can be renamed to t2. -func (d *detector) canRename(t1, t2 sqlschema.Table) bool { - return t1.Schema == t2.Schema && sqlschema.EqualSignatures(t1, t2, d.equalColumns) +func (d *detector) canRename(t1, t2 sqlschema.TableDefinition) bool { + return t1.Schema == t2.Schema && equalSignatures(t1, t2, d.equalColumns) } -func (d *detector) equalColumns(col1, col2 sqlschema.Column) bool { +func (d *detector) equalColumns(col1, col2 sqlschema.ColumnDefinition) bool { return d.eqType(col1, col2) && col1.DefaultValue == col2.DefaultValue && col1.IsNullable == col2.IsNullable && @@ -381,7 +332,7 @@ func (d *detector) equalColumns(col1, col2 sqlschema.Column) bool { col1.IsIdentity == col2.IsIdentity } -func (d *detector) makeTargetColDef(current, target sqlschema.Column) sqlschema.Column { +func (d *detector) makeTargetColDef(current, target sqlschema.ColumnDefinition) sqlschema.ColumnDefinition { // Avoid unneccessary type-change migrations if the types are equivalent. if d.eqType(current, target) { target.SQLType = current.SQLType @@ -391,16 +342,16 @@ func (d *detector) makeTargetColDef(current, target sqlschema.Column) sqlschema. } // detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. -func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { +func (d *detector) detectColumnChanges(current, target sqlschema.TableDefinition, checkType bool) { fqn := schema.FQN{Schema: target.Schema, Table: target.Name} ChangedRenamed: - for tName, tCol := range target.Columns { + for tName, tCol := range target.ColumnDefimitions { // This column exists in the database, so it hasn't been renamed, dropped, or added. // Still, we should not delete(columns, thisColumn), because later we will need to // check that we do not try to rename a column to an already a name that already exists. - if cCol, ok := current.Columns[tName]; ok { + if cCol, ok := current.ColumnDefimitions[tName]; ok { if checkType && !d.equalColumns(cCol, tCol) { d.changes.Add(&ChangeColumnTypeOp{ FQN: fqn, @@ -414,9 +365,9 @@ ChangedRenamed: // Column tName does not exist in the database -- it's been either renamed or added. // Find renamed columns first. - for cName, cCol := range current.Columns { + for cName, cCol := range current.ColumnDefimitions { // Cannot rename if a column with this name already exists or the types differ. - if _, exists := target.Columns[cName]; exists || !d.equalColumns(tCol, cCol) { + if _, exists := target.ColumnDefimitions[cName]; exists || !d.equalColumns(tCol, cCol) { continue } d.changes.Add(&RenameColumnOp{ @@ -424,11 +375,11 @@ ChangedRenamed: OldName: cName, NewName: tName, }) - delete(current.Columns, cName) // no need to check this column again - d.refMap.UpdateC(sqlschema.C(target.Schema, target.Name, cName), tName) + d.refMap.RenameColumn(fqn, cName, tName) + delete(current.ColumnDefimitions, cName) // no need to check this column again // Update primary key definition to avoid superficially recreating the constraint. - current.PK.Columns = current.PK.Columns.Replace(cName, tName) + current.PrimaryKey.Columns.Replace(cName, tName) continue ChangedRenamed } @@ -441,8 +392,8 @@ ChangedRenamed: } // Drop columns which do not exist in the target schema and were not renamed. - for cName, cCol := range current.Columns { - if _, keep := target.Columns[cName]; !keep { + for cName, cCol := range current.ColumnDefimitions { + if _, keep := target.ColumnDefimitions[cName]; !keep { d.changes.Add(&DropColumnOp{ FQN: fqn, Column: cName, @@ -452,7 +403,7 @@ ChangedRenamed: } } -func (d *detector) detectConstraintChanges(current, target sqlschema.Table) { +func (d *detector) detectConstraintChanges(current, target sqlschema.TableDefinition) { fqn := schema.FQN{Schema: target.Schema, Table: target.Name} Add: @@ -483,93 +434,138 @@ Drop: } // Detect primary key changes - if target.PK == nil && current.PK == nil { + if target.PrimaryKey == nil && current.PrimaryKey == nil { return } switch { - case target.PK == nil && current.PK != nil: + case target.PrimaryKey == nil && current.PrimaryKey != nil: d.changes.Add(&DropPrimaryKeyOp{ - FQN: fqn, - PK: current.PK, + FQN: fqn, + PrimaryKey: *current.PrimaryKey, }) - case current.PK == nil && target.PK != nil: + case current.PrimaryKey == nil && target.PrimaryKey != nil: d.changes.Add(&AddPrimaryKeyOp{ - FQN: fqn, - PK: target.PK, + FQN: fqn, + PrimaryKey: *target.PrimaryKey, }) - case target.PK.Columns != current.PK.Columns: + case target.PrimaryKey.Columns != current.PrimaryKey.Columns: d.changes.Add(&ChangePrimaryKeyOp{ FQN: fqn, - Old: current.PK, - New: target.PK, + Old: *current.PrimaryKey, + New: *target.PrimaryKey, }) } } -// sqlschema utils ------------------------------------------------------------ +// defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. +func defaultFKName(fk sqlschema.ForeignKey) string { + columnnames := strings.Join(fk.From.Column.Split(), "_") + return fmt.Sprintf("%s_%s_fkey", fk.From.FQN.Table, columnnames) +} -// tableSet stores unique table definitions. -type tableSet struct { - underlying map[string]sqlschema.Table +type TypeEquivalenceFunc func(sqlschema.ColumnDefinition, sqlschema.ColumnDefinition) bool + +// equalSignatures determines if two tables have the same "signature". +func equalSignatures(t1, t2 sqlschema.TableDefinition, eq TypeEquivalenceFunc) bool { + sig1 := newSignature(t1, eq) + sig2 := newSignature(t2, eq) + return sig1.Equals(sig2) } -func newTableSet(initial ...sqlschema.Table) *tableSet { - set := &tableSet{ - underlying: make(map[string]sqlschema.Table), - } - for _, t := range initial { - set.Add(t) - } - return set +// signature is a set of column definitions, which allows "relation/name-agnostic" comparison between them; +// meaning that two columns are considered equal if their types are the same. +type signature struct { + + // underlying stores the number of occurences for each unique column type. + // It helps to account for the fact that a table might have multiple columns that have the same type. + underlying map[sqlschema.ColumnDefinition]int + + eq TypeEquivalenceFunc } -func (set *tableSet) Add(t sqlschema.Table) { - set.underlying[t.Name] = t +func newSignature(t sqlschema.TableDefinition, eq TypeEquivalenceFunc) signature { + s := signature{ + underlying: make(map[sqlschema.ColumnDefinition]int), + eq: eq, + } + s.scan(t) + return s } -func (set *tableSet) Remove(s string) { - delete(set.underlying, s) +// scan iterates over table's field and counts occurrences of each unique column definition. +func (s *signature) scan(t sqlschema.TableDefinition) { + for _, scanCol := range t.ColumnDefimitions { + // This is slightly more expensive than if the columns could be compared directly + // and we always did s.underlying[col]++, but we get type-equivalence in return. + col, count := s.getCount(scanCol) + if count == 0 { + s.underlying[scanCol] = 1 + } else { + s.underlying[col]++ + } + } } -func (set *tableSet) Values() (tables []sqlschema.Table) { - for _, t := range set.underlying { - tables = append(tables, t) +// getCount uses TypeEquivalenceFunc to find a column with the same (equivalent) SQL type +// and returns its count. Count 0 means there are no columns with of this type. +func (s *signature) getCount(keyCol sqlschema.ColumnDefinition) (key sqlschema.ColumnDefinition, count int) { + for col, cnt := range s.underlying { + if s.eq(col, keyCol) { + return col, cnt + } } - return + return keyCol, 0 } -func (set *tableSet) Sub(other *tableSet) *tableSet { - res := set.clone() - for v := range other.underlying { - if _, ok := set.underlying[v]; ok { - res.Remove(v) +// Equals returns true if 2 signatures share an identical set of columns. +func (s *signature) Equals(other signature) bool { + if len(s.underlying) != len(other.underlying) { + return false + } + for col, count := range s.underlying { + if _, countOther := other.getCount(col); countOther != count { + return false } } - return res + return true } -func (set *tableSet) clone() *tableSet { - res := newTableSet() - for _, t := range set.underlying { - res.Add(t) +type refMap map[*sqlschema.ForeignKey]string + +func newRefMap(fks map[sqlschema.ForeignKey]string) refMap { + rm := make(map[*sqlschema.ForeignKey]string) + for fk, name := range fks { + rm[&fk] = name } - return res + return rm } -// String is a debug helper to get a list of table names in the set. -func (set *tableSet) String() string { - var s strings.Builder - for k := range set.underlying { - if s.Len() > 0 { - s.WriteString(", ") +func (rm refMap) RenameTable(table schema.FQN, newName string) { + for fk := range rm { + switch table { + case fk.From.FQN: + fk.From.FQN.Table = newName + case fk.To.FQN: + fk.To.FQN.Table = newName } - s.WriteString(k) } - return s.String() } -// defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. -func defaultFKName(fk sqlschema.FK) string { - columnnames := strings.Join(fk.From.Column.Split(), "_") - return fmt.Sprintf("%s_%s_fkey", fk.From.Table, columnnames) +func (rm refMap) RenameColumn(table schema.FQN, column, newName string) { + for fk := range rm { + if table == fk.From.FQN { + fk.From.Column.Replace(column, newName) + } + if table == fk.To.FQN { + fk.To.Column.Replace(column, newName) + } + } +} + +func (rm refMap) Deref() map[sqlschema.ForeignKey]string { + out := make(map[sqlschema.ForeignKey]string) + for fk, name := range rm { + out[*fk] = name + } + return out } diff --git a/migrate/operations.go b/migrate/operations.go index e4647656e..cfb4cb455 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -32,10 +32,8 @@ type DropTableOp struct { var _ Operation = (*DropTableOp)(nil) func (op *DropTableOp) DependsOn(another Operation) bool { - d, ok := another.(*DropForeignKeyOp) - // - return ok && ((d.FK.From.Schema == op.FQN.Schema && d.FK.From.Table == op.FQN.Table) || - (d.FK.To.Schema == op.FQN.Schema && d.FK.To.Table == op.FQN.Table)) + drop, ok := another.(*DropForeignKeyOp) + return ok && drop.ForeignKey.DependsOnTable(op.FQN) } // GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, @@ -77,14 +75,14 @@ func (op *RenameColumnOp) GetReverse() Operation { } func (op *RenameColumnOp) DependsOn(another Operation) bool { - rt, ok := another.(*RenameTableOp) - return ok && rt.FQN.Schema == op.FQN.Schema && rt.NewName == op.FQN.Table + rename, ok := another.(*RenameTableOp) + return ok && op.FQN.Schema == rename.FQN.Schema && op.FQN.Table == rename.NewName } type AddColumnOp struct { FQN schema.FQN Column string - ColDef sqlschema.Column + ColDef sqlschema.ColumnDefinition } var _ Operation = (*AddColumnOp)(nil) @@ -100,7 +98,7 @@ func (op *AddColumnOp) GetReverse() Operation { type DropColumnOp struct { FQN schema.FQN Column string - ColDef sqlschema.Column + ColDef sqlschema.ColumnDefinition } var _ Operation = (*DropColumnOp)(nil) @@ -114,32 +112,11 @@ func (op *DropColumnOp) GetReverse() Operation { } func (op *DropColumnOp) DependsOn(another Operation) bool { - // TODO: refactor switch drop := another.(type) { case *DropForeignKeyOp: - var fCol bool - fCols := drop.FK.From.Column.Split() - for _, c := range fCols { - if c == op.Column { - fCol = true - break - } - } - - var tCol bool - tCols := drop.FK.To.Column.Split() - for _, c := range tCols { - if c == op.Column { - tCol = true - break - } - } - - return (drop.FK.From.Schema == op.FQN.Schema && drop.FK.From.Table == op.FQN.Table && fCol) || - (drop.FK.To.Schema == op.FQN.Schema && drop.FK.To.Table == op.FQN.Table && tCol) - + return drop.ForeignKey.DependsOnColumn(op.FQN, op.Column) case *DropPrimaryKeyOp: - return op.FQN == drop.FQN && drop.PK.Columns.Contains(op.Column) + return op.FQN == drop.FQN && drop.PrimaryKey.Columns.Contains(op.Column) case *ChangePrimaryKeyOp: return op.FQN == drop.FQN && drop.Old.Columns.Contains(op.Column) } @@ -148,7 +125,7 @@ func (op *DropColumnOp) DependsOn(another Operation) bool { // RenameForeignKeyOp. type RenameForeignKeyOp struct { - FK sqlschema.FK + FK sqlschema.ForeignKey OldName string NewName string } @@ -156,16 +133,13 @@ type RenameForeignKeyOp struct { var _ Operation = (*RenameForeignKeyOp)(nil) func (op *RenameForeignKeyOp) FQN() schema.FQN { - return schema.FQN{ - Schema: op.FK.From.Schema, - Table: op.FK.From.Table, - } + return op.FK.From.FQN } -func (op *RenameForeignKeyOp) DependsOn(another Operation) bool { - rt, ok := another.(*RenameTableOp) - return ok && rt.FQN.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table -} +// func (op *RenameForeignKeyOp) DependsOn(another Operation) bool { +// rt, ok := another.(*RenameTableOp) +// return ok && rt.FQN.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table +// } func (op *RenameForeignKeyOp) GetReverse() Operation { return &RenameForeignKeyOp{ @@ -176,55 +150,48 @@ func (op *RenameForeignKeyOp) GetReverse() Operation { } type AddForeignKeyOp struct { - FK sqlschema.FK + ForeignKey sqlschema.ForeignKey ConstraintName string } var _ Operation = (*AddForeignKeyOp)(nil) func (op *AddForeignKeyOp) FQN() schema.FQN { - return schema.FQN{ - Schema: op.FK.From.Schema, - Table: op.FK.From.Table, - } + return op.ForeignKey.From.FQN } func (op *AddForeignKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { case *RenameTableOp: - // TODO: provide some sort of "DependsOn" method for FK - return another.FQN.Schema == op.FK.From.Schema && another.NewName == op.FK.From.Table + return op.ForeignKey.DependsOnTable(another.FQN) || + op.ForeignKey.DependsOnTable(schema.FQN{Schema: another.FQN.Schema, Table: another.NewName}) case *CreateTableOp: - return (another.FQN.Schema == op.FK.To.Schema && another.FQN.Table == op.FK.To.Table) || // either it's the referencing one - (another.FQN.Schema == op.FK.From.Schema && another.FQN.Table == op.FK.From.Table) // or the one being referenced + return op.ForeignKey.DependsOnTable(another.FQN) } return false } func (op *AddForeignKeyOp) GetReverse() Operation { return &DropForeignKeyOp{ - FK: op.FK, + ForeignKey: op.ForeignKey, ConstraintName: op.ConstraintName, } } type DropForeignKeyOp struct { - FK sqlschema.FK + ForeignKey sqlschema.ForeignKey ConstraintName string } var _ Operation = (*DropForeignKeyOp)(nil) func (op *DropForeignKeyOp) FQN() schema.FQN { - return schema.FQN{ - Schema: op.FK.From.Schema, - Table: op.FK.From.Table, - } + return op.ForeignKey.From.FQN } func (op *DropForeignKeyOp) GetReverse() Operation { return &AddForeignKeyOp{ - FK: op.FK, + ForeignKey: op.ForeignKey, ConstraintName: op.ConstraintName, } } @@ -246,14 +213,7 @@ func (op *AddUniqueConstraintOp) GetReverse() Operation { func (op *AddUniqueConstraintOp) DependsOn(another Operation) bool { switch another := another.(type) { case *AddColumnOp: - var sameColumn bool - for _, column := range op.Unique.Columns.Split() { - if column == another.Column { - sameColumn = true - break - } - } - return op.FQN == another.FQN && sameColumn + return op.FQN == another.FQN && op.Unique.Columns.Contains(another.Column) case *RenameTableOp: return op.FQN.Schema == another.FQN.Schema && op.FQN.Table == another.NewName case *DropUniqueConstraintOp: @@ -290,8 +250,8 @@ func (op *DropUniqueConstraintOp) GetReverse() Operation { type ChangeColumnTypeOp struct { FQN schema.FQN Column string - From sqlschema.Column - To sqlschema.Column + From sqlschema.ColumnDefinition + To sqlschema.ColumnDefinition } var _ Operation = (*ChangeColumnTypeOp)(nil) @@ -306,45 +266,45 @@ func (op *ChangeColumnTypeOp) GetReverse() Operation { } type DropPrimaryKeyOp struct { - FQN schema.FQN - PK *sqlschema.PK + FQN schema.FQN + PrimaryKey sqlschema.PrimaryKey } var _ Operation = (*DropPrimaryKeyOp)(nil) func (op *DropPrimaryKeyOp) GetReverse() Operation { return &AddPrimaryKeyOp{ - FQN: op.FQN, - PK: op.PK, + FQN: op.FQN, + PrimaryKey: op.PrimaryKey, } } type AddPrimaryKeyOp struct { - FQN schema.FQN - PK *sqlschema.PK + FQN schema.FQN + PrimaryKey sqlschema.PrimaryKey } var _ Operation = (*AddPrimaryKeyOp)(nil) func (op *AddPrimaryKeyOp) GetReverse() Operation { return &DropPrimaryKeyOp{ - FQN: op.FQN, - PK: op.PK, + FQN: op.FQN, + PrimaryKey: op.PrimaryKey, } } func (op *AddPrimaryKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { case *AddColumnOp: - return op.FQN == another.FQN && op.PK.Columns.Contains(another.Column) + return op.FQN == another.FQN && op.PrimaryKey.Columns.Contains(another.Column) } return false } type ChangePrimaryKeyOp struct { FQN schema.FQN - Old *sqlschema.PK - New *sqlschema.PK + Old sqlschema.PrimaryKey + New sqlschema.PrimaryKey } var _ Operation = (*AddPrimaryKeyOp)(nil) @@ -358,10 +318,10 @@ func (op *ChangePrimaryKeyOp) GetReverse() Operation { } // comment denotes an Operation that cannot be executed. -// +// // Operations, which cannot be reversed due to current technical limitations, // may return &comment with a helpful message from their GetReverse() method. -// +// // Chnagelog should skip it when applying operations or output as log message, // and write it as an SQL comment when creating migration files. type comment string diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 49537237d..4f24051e8 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -17,11 +17,11 @@ type InspectorDialect interface { // EquivalentType returns true if col1 and co2 SQL types are equivalent, // i.e. they might use dialect-specifc type aliases (SERIAL ~ SMALLINT) // or specify the same VARCHAR length differently (VARCHAR(255) ~ VARCHAR). - EquivalentType(Column, Column) bool + EquivalentType(ColumnDefinition, ColumnDefinition) bool } type Inspector interface { - Inspect(ctx context.Context) (State, error) + Inspect(ctx context.Context) (DatabaseSchema, error) } type inspector struct { @@ -38,6 +38,12 @@ func NewInspector(db *bun.DB, excludeTables ...string) (Inspector, error) { }, nil } +// SchemaTable provides additional table metadata that is only accessible from scanning Go models. +type SchemaTable struct { + // Model stores the zero interface to the underlying Go struct. + Model interface{} +} + // SchemaInspector creates the current project state from the passed bun.Models. // Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. type SchemaInspector struct { @@ -52,19 +58,20 @@ func NewSchemaInspector(tables *schema.Tables) *SchemaInspector { } } -func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { - state := State{ - FKs: make(map[FK]string), +func (si *SchemaInspector) Inspect(ctx context.Context) (DatabaseSchema, error) { + state := DatabaseSchema{ + TableDefinitions: make(map[string]TableDefinition), + ForeignKeys: make(map[ForeignKey]string), } for _, t := range si.tables.All() { - columns := make(map[string]Column) + columns := make(map[string]ColumnDefinition) for _, f := range t.Fields { sqlType, length, err := parseLen(f.CreateTableSQLType) if err != nil { return state, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err) } - columns[f.Name] = Column{ + columns[f.Name] = ColumnDefinition{ SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq() VarcharLen: length, DefaultValue: exprToLower(f.SQLDefault), @@ -80,7 +87,7 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { // let each dialect apply the default naming convention. if name == "" { for _, f := range group { - unique = append(unique, Unique{Columns: NewComposite(f.Name)}) + unique = append(unique, Unique{Columns: NewColumns(f.Name)}) } continue } @@ -90,26 +97,28 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { for _, f := range group { columns = append(columns, f.Name) } - unique = append(unique, Unique{Name: name, Columns: NewComposite(columns...)}) + unique = append(unique, Unique{Name: name, Columns: NewColumns(columns...)}) } - var pk *PK + var pk *PrimaryKey if len(t.PKs) > 0 { var columns []string for _, f := range t.PKs { columns = append(columns, f.Name) } - pk = &PK{Columns: NewComposite(columns...)} + pk = &PrimaryKey{Columns: NewColumns(columns...)} } - state.Tables = append(state.Tables, Table{ - Schema: t.Schema, - Name: t.Name, - Model: t.ZeroIface, - Columns: columns, - UniqueContraints: unique, - PK: pk, - }) + state.TableDefinitions[t.Name] = TableDefinition{ + Schema: t.Schema, + Name: t.Name, + ColumnDefimitions: columns, + UniqueContraints: unique, + PrimaryKey: pk, + Additional: SchemaTable{ + Model: t.ZeroIface, + }, + } for _, rel := range t.Relations { // These relations are nominal and do not need a foreign key to be declared in the current table. @@ -128,9 +137,9 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (State, error) { } target := rel.JoinTable - state.FKs[FK{ - From: C(t.Schema, t.Name, fromCols...), - To: C(target.Schema, target.Name, toCols...), + state.ForeignKeys[ForeignKey{ + From: NewColumnReference(t.Schema, t.Name, fromCols...), + To: NewColumnReference(target.Schema, target.Name, toCols...), }] = "" } } diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/schema.go new file mode 100644 index 000000000..6df87d1e4 --- /dev/null +++ b/migrate/sqlschema/schema.go @@ -0,0 +1,163 @@ +package sqlschema + +import ( + "fmt" + "slices" + "strings" + + "github.com/uptrace/bun/schema" +) + +type DatabaseSchema struct { + TableDefinitions map[string]TableDefinition + ForeignKeys map[ForeignKey]string +} + +type TableDefinition struct { + Schema string + Name string + + // ColumnDefimitions map each column name to the column definition. + ColumnDefimitions map[string]ColumnDefinition + + // PrimaryKey holds the primary key definition. + // A nil value means that no primary key is defined for the table. + PrimaryKey *PrimaryKey + + // UniqueConstraints defined on the table. + UniqueContraints []Unique + + // Additional metadata that Inspector implementations might provide about the table. + Additional interface{} +} + +func (t TableDefinition) FQN() schema.FQN { + return schema.FQN{Schema: t.Schema, Table: t.Name} +} + +// ColumnDefinition stores attributes of a database column. +type ColumnDefinition struct { + SQLType string + VarcharLen int + DefaultValue string + IsNullable bool + IsAutoIncrement bool + IsIdentity bool + + // Additional metadata that Inspector implementations might provide about the column. + Additional interface{} + + // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. +} + +// AppendQuery appends full SQL data type. +func (c *ColumnDefinition) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { + b = append(b, c.SQLType...) + if c.VarcharLen == 0 { + return b, nil + } + b = append(b, "("...) + b = append(b, fmt.Sprint(c.VarcharLen)...) + b = append(b, ")"...) + return b, nil +} + +type ForeignKey struct { + From ColumnReference + To ColumnReference +} + +func NewColumnReference(schemaName, tableName string, columns ...string) ColumnReference { + return ColumnReference{ + FQN: schema.FQN{Schema: schemaName, Table: tableName}, + Column: NewColumns(columns...), + } +} + +func (fk ForeignKey) DependsOnTable(fqn schema.FQN) bool { + return fk.From.FQN == fqn || fk.To.FQN == fqn +} + +func (fk ForeignKey) DependsOnColumn(fqn schema.FQN, column string) bool { + return fk.DependsOnTable(fqn) && + (fk.From.Column.Contains(column) || fk.To.Column.Contains(column)) +} + +// Columns is a hashable representation of []string used to define schema constraints that depend on multiple columns. +// Although having duplicated column references in these constraints is illegal, Columns neither validates nor enforces this constraint on the caller. +type Columns string + +// NewColumns creates a composite column from a slice of column names. +func NewColumns(columns ...string) Columns { + slices.Sort(columns) + return Columns(strings.Join(columns, ",")) +} + +func (c *Columns) String() string { + return string(*c) +} + +func (c *Columns) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + return schema.Safe(*c).AppendQuery(fmter, b) +} + +// Split returns a slice of column names that make up the composite. +func (c *Columns) Split() []string { + return strings.Split(c.String(), ",") +} + +// ContainsColumns checks that columns in "other" are a subset of current colums. +func (c *Columns) ContainsColumns(other Columns) bool { + columns := c.Split() +Outer: + for _, check := range other.Split() { + for _, column := range columns { + if check == column { + continue Outer + } + } + return false + } + return true +} + +// Contains checks that a composite column contains the current column. +func (c *Columns) Contains(other string) bool { + return c.ContainsColumns(Columns(other)) +} + +// Replace renames a column if it is part of the composite. +// If a composite consists of multiple columns, only one column will be renamed. +func (c *Columns) Replace(oldColumn, newColumn string) bool { + columns := c.Split() + for i, column := range columns { + if column == oldColumn { + columns[i] = newColumn + *c = NewColumns(columns...) + return true + } + } + return false +} + +// Unique represents a unique constraint defined on 1 or more columns. +type Unique struct { + Name string + Columns Columns +} + +// Equals checks that two unique constraint are the same, assuming both are defined for the same table. +func (u Unique) Equals(other Unique) bool { + return u.Columns == other.Columns +} + +// PrimaryKey represents a primary key constraint defined on 1 or more columns. +type PrimaryKey struct { + Name string + Columns Columns +} + +type ColumnReference struct { + FQN schema.FQN + Column Columns +} diff --git a/migrate/sqlschema/state.go b/migrate/sqlschema/state.go deleted file mode 100644 index efafcad2c..000000000 --- a/migrate/sqlschema/state.go +++ /dev/null @@ -1,386 +0,0 @@ -package sqlschema - -import ( - "fmt" - "slices" - "strings" - - "github.com/uptrace/bun/schema" -) - -type State struct { - Tables []Table - FKs map[FK]string -} - -type Table struct { - // Schema containing the table. - Schema string - - // Table name. - Name string - - // Model stores a pointer to the bun's underlying Go struct for the table. - Model interface{} - - // Columns map each column name to the column type definition. - Columns map[string]Column - - // UniqueConstraints defined on the table. - UniqueContraints []Unique - - // PrimaryKey holds the primary key definition if any. - PK *PK -} - -// T returns a fully-qualified name object for the table. -func (t *Table) T() tFQN { - return T(t.Schema, t.Name) -} - -// Column stores attributes of a database column. -type Column struct { - SQLType string - VarcharLen int - DefaultValue string - IsNullable bool - IsAutoIncrement bool - IsIdentity bool - // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. -} - -// AppendQuery appends full SQL data type. -func (c *Column) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { - b = append(b, c.SQLType...) - if c.VarcharLen == 0 { - return b, nil - } - b = append(b, "("...) - b = append(b, fmt.Sprint(c.VarcharLen)...) - b = append(b, ")"...) - return b, nil -} - -type TypeEquivalenceFunc func(Column, Column) bool - -// EqualSignatures determines if two tables have the same "signature". -func EqualSignatures(t1, t2 Table, eq TypeEquivalenceFunc) bool { - sig1 := newSignature(t1, eq) - sig2 := newSignature(t2, eq) - return sig1.Equals(sig2) -} - -// signature is a set of column definitions, which allows "relation/name-agnostic" comparison between them; -// meaning that two columns are considered equal if their types are the same. -type signature struct { - - // underlying stores the number of occurences for each unique column type. - // It helps to account for the fact that a table might have multiple columns that have the same type. - underlying map[Column]int - - eq TypeEquivalenceFunc -} - -func newSignature(t Table, eq TypeEquivalenceFunc) signature { - s := signature{ - underlying: make(map[Column]int), - eq: eq, - } - s.scan(t) - return s -} - -// scan iterates over table's field and counts occurrences of each unique column definition. -func (s *signature) scan(t Table) { - for _, scanCol := range t.Columns { - // This is slightly more expensive than if the columns could be compared directly - // and we always did s.underlying[col]++, but we get type-equivalence in return. - col, count := s.getCount(scanCol) - if count == 0 { - s.underlying[scanCol] = 1 - } else { - s.underlying[col]++ - } - } -} - -// getCount uses TypeEquivalenceFunc to find a column with the same (equivalent) SQL type -// and returns its count. Count 0 means there are no columns with of this type. -func (s *signature) getCount(keyCol Column) (key Column, count int) { - for col, cnt := range s.underlying { - if s.eq(col, keyCol) { - return col, cnt - } - } - return keyCol, 0 -} - -// Equals returns true if 2 signatures share an identical set of columns. -func (s *signature) Equals(other signature) bool { - if len(s.underlying) != len(other.underlying) { - return false - } - for col, count := range s.underlying { - if _, countOther := other.getCount(col); countOther != count { - return false - } - } - return true -} - -// tFQN is a fully-qualified table name. -type tFQN struct { - Schema string - Table string -} - -// T creates a fully-qualified table name object. -func T(schema, table string) tFQN { return tFQN{Schema: schema, Table: table} } - -// cFQN is a fully-qualified column name. -type cFQN struct { - tFQN - Column composite -} - -// C creates a fully-qualified column name object. -func C(schema, table string, columns ...string) cFQN { - return cFQN{tFQN: T(schema, table), Column: NewComposite(columns...)} -} - -// T returns the FQN of the column's parent table. -func (c cFQN) T() tFQN { - return c.tFQN -} - -// composite is a hashable representation of []string used to define FKs that depend on multiple columns. -// Although having duplicated column references in a FK is illegal, composite neither validates nor enforces this constraint on the caller. -type composite string - -// NewComposite creates a composite column from a slice of column names. -func NewComposite(columns ...string) composite { - slices.Sort(columns) - return composite(strings.Join(columns, ",")) -} - -func (c composite) String() string { - return string(c) -} - -func (c composite) Safe() schema.Safe { - return schema.Safe(c) -} - -// Split returns a slice of column names that make up the composite. -func (c composite) Split() []string { - return strings.Split(c.String(), ",") -} - -// Contains checks that a composite column contains every part of another composite. -func (c composite) contains(other composite) bool { - return c.Contains(string(other)) -} - -// Contains checks that a composite column contains the current column. -func (c composite) Contains(other string) bool { - var count int - checkColumns := composite(other).Split() - wantCount := len(checkColumns) - - for _, check := range checkColumns { - for _, column := range c.Split() { - if check == column { - count++ - } - if count == wantCount { - return true - } - } - } - return count == wantCount -} - -// Replace renames a column if it is part of the composite. -// If a composite consists of multiple columns, only one column will be renamed. -func (c composite) Replace(oldColumn, newColumn string) composite { - columns := c.Split() - for i, column := range columns { - if column == oldColumn { - columns[i] = newColumn - return NewComposite(columns...) - } - } - return c -} - -// FK defines a foreign key constraint. -// -// Example: -// -// fk := FK{ -// From: C("a", "b", "c_1", "c_2"), // supports multicolumn FKs -// To: C("w", "x", "y_1", "y_2") -// } -type FK struct { - From cFQN // From is the referencing column. - To cFQN // To is the referenced column. -} - -// dependsT checks if either part of the FK's definition mentions T -// and returns the columns that belong to T. Notice that *C allows modifying the column's FQN. -// -// Example: -// -// FK{ -// From: C("a", "b", "c"), -// To: C("x", "y", "z"), -// } -// depends on T("a", "b") and T("x", "y") -func (fk *FK) dependsT(t tFQN) (ok bool, cols []*cFQN) { - if c := &fk.From; c.T() == t { - ok = true - cols = append(cols, c) - } - if c := &fk.To; c.T() == t { - ok = true - cols = append(cols, c) - } - if !ok { - return false, nil - } - return -} - -// dependsC checks if the FK definition mentions C and returns a modifiable FQN of the matching column. -// -// Example: -// -// FK{ -// From: C("a", "b", "c_1", "c_2"), -// To: C("w", "x", "y_1", "y_2"), -// } -// depends on C("a", "b", "c_1"), C("a", "b", "c_2"), C("w", "x", "y_1"), and C("w", "x", "y_2") -func (fk *FK) dependsC(c cFQN) (bool, *cFQN) { - switch { - case fk.From.Column.contains(c.Column): - return true, &fk.From - case fk.To.Column.contains(c.Column): - return true, &fk.To - } - return false, nil -} - -// RefMap helps detecting modified FK relations. -// It starts with an initial state and provides methods to update and delete -// foreign key relations based on the column or table they depend on. -// -// Note: this is only important/necessary if we want to rename FKs instead of re-creating them. -// Most of the time it wouldn't make a difference, but there may be cases in which re-creating FKs could be costly -// and renaming them would be preferred. -type RefMap map[FK]*FK - -// deleted is a special value that RefMap uses to denote a deleted FK constraint. -var deleted FK - -// NewRefMap records the FK's initial state to a RefMap. -func NewRefMap(fks ...FK) RefMap { - ref := make(RefMap) - for _, fk := range fks { - copyfk := fk - ref[fk] = ©fk - } - return ref -} - -// UpdateT updates the table FQN in all FKs that depend on it, e.g. if a table is renamed or moved to a different schema. -// Returns the number of updated entries. -func (r RefMap) UpdateT(oldT, newT tFQN) (n int) { - for _, fk := range r { - ok, cols := fk.dependsT(oldT) - if !ok { - continue - } - for _, c := range cols { - c.Schema = newT.Schema - c.Table = newT.Table - } - n++ - } - return -} - -// UpdateC updates the column FQN in all FKs that depend on it. E.g. if a column was renamed, -// only the column-name part of the FQN needs to be updated. Returns the number of updated entries. -func (r RefMap) UpdateC(oldC cFQN, newColumn string) (n int) { - for _, fk := range r { - if ok, col := fk.dependsC(oldC); ok { - oldColumns := oldC.Column.Split() - // updateC will only update 1 column per invocation. - col.Column = col.Column.Replace(oldColumns[0], newColumn) - n++ - } - } - return -} - -// DeleteT marks all FKs that depend on the table as deleted. -// Returns the number of deleted entries. -func (r RefMap) DeleteT(t tFQN) (n int) { - for old, fk := range r { - if ok, _ := fk.dependsT(t); ok { - r[old] = &deleted - n++ - } - } - return -} - -// DeleteC marks all FKs that depend on the column as deleted. -// Returns the number of deleted entries. -func (r RefMap) DeleteC(c cFQN) (n int) { - for old, fk := range r { - if ok, _ := fk.dependsC(c); ok { - r[old] = &deleted - n++ - } - } - return -} - -// Updated returns FKs that were updated, both their old and new defitions. -func (r RefMap) Updated() map[FK]FK { - fks := make(map[FK]FK) - for old, fk := range r { - if old != *fk { - fks[old] = *fk - } - } - return fks -} - -// Deleted gets all FKs that were marked as deleted. -func (r RefMap) Deleted() (fks []FK) { - for old, fk := range r { - if fk == &deleted { - fks = append(fks, old) - } - } - return -} - -// Unique represents a unique constraint defined on 1 or more columns. -type Unique struct { - Name string - Columns composite -} - -// Equals checks that two unique constraint are the same, assuming both are defined for the same table. -func (u Unique) Equals(other Unique) bool { - return u.Columns == other.Columns -} - -// PK represents a primary key constraint defined on 1 or more columns. -type PK struct { - Name string - Columns composite -} From 8fe0261d8bd7805e0a7a9733f6218852168519ce Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Fri, 8 Nov 2024 00:54:57 +0100 Subject: [PATCH 38/55] refactor(sqlschema): introduce Schema, Table, and Column interfaces - TableDefinition (base implementation) now uses FQN as a key for every table to be able to store tables from different schemas with the same name --- dialect/pgdialect/alter_table.go | 33 ++- dialect/pgdialect/inspector.go | 37 ++- dialect/pgdialect/sqltype.go | 12 +- internal/dbtest/inspect_test.go | 77 ++--- internal/dbtest/migrate_test.go | 61 ++-- migrate/auto.go | 152 +++++++++- migrate/diff.go | 488 ++++++++++++------------------- migrate/operations.go | 8 +- migrate/sqlschema/inspector.go | 84 ++++-- migrate/sqlschema/schema.go | 86 +++++- 10 files changed, 599 insertions(+), 439 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 8ce588e06..971e9626b 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -92,15 +92,19 @@ func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddC b = fmter.AppendName(b, add.Column) b = append(b, " "...) - b, _ = add.ColDef.AppendQuery(fmter, b) + colDef, ok := add.ColDef.(sqlschema.ColumnDefinition) + if !ok { + return nil, fmt.Errorf("column %q does not implement sqlschema.ColumnDefinition, got %T", add.ColDef.GetName(), add.ColDef) + } + b, _ = colDef.AppendQuery(fmter, b) - if add.ColDef.DefaultValue != "" { + if add.ColDef.GetDefaultValue() != "" { b = append(b, " DEFAULT "...) - b = append(b, add.ColDef.DefaultValue...) + b = append(b, add.ColDef.GetDefaultValue()...) b = append(b, " "...) } - if add.ColDef.IsIdentity { + if add.ColDef.GetIsIdentity() { b = appendGeneratedAsIdentity(b) } @@ -207,27 +211,32 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi inspector := m.db.Dialect().(sqlschema.InspectorDialect) if !inspector.EquivalentType(want, got) { + colDef, ok := want.(sqlschema.ColumnDefinition) + if !ok { + return nil, fmt.Errorf("column %q does not implement sqlschema.ColumnDefinition, got %T", want.GetName(), want) + } + appendAlterColumn() b = append(b, " SET DATA TYPE "...) - if b, err = want.AppendQuery(fmter, b); err != nil { + if b, err = colDef.AppendQuery(fmter, b); err != nil { return b, err } } // Column must be declared NOT NULL before identity can be added. // Although PG can resolve the order of operations itself, we make this explicit in the query. - if want.IsNullable != got.IsNullable { + if want.GetIsNullable() != got.GetIsNullable() { appendAlterColumn() - if !want.IsNullable { + if !want.GetIsNullable() { b = append(b, " SET NOT NULL"...) } else { b = append(b, " DROP NOT NULL"...) } } - if want.IsIdentity != got.IsIdentity { + if want.GetIsIdentity() != got.GetIsIdentity() { appendAlterColumn() - if !want.IsIdentity { + if !want.GetIsIdentity() { b = append(b, " DROP IDENTITY"...) } else { b = append(b, " ADD"...) @@ -235,13 +244,13 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi } } - if want.DefaultValue != got.DefaultValue { + if want.GetDefaultValue() != got.GetDefaultValue() { appendAlterColumn() - if want.DefaultValue == "" { + if want.GetDefaultValue() == "" { b = append(b, " DROP DEFAULT"...) } else { b = append(b, " SET DEFAULT "...) - b = append(b, want.DefaultValue...) + b = append(b, want.GetDefaultValue()...) } } diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 00b32cccf..07665df21 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -6,6 +6,13 @@ import ( "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" + "github.com/uptrace/bun/schema" +) + +type ( + Schema = sqlschema.DatabaseSchema + Table = sqlschema.TableDefinition + Column = sqlschema.ColumnDefinition ) func (d *Dialect) Inspector(db *bun.DB, excludeTables ...string) sqlschema.Inspector { @@ -23,9 +30,9 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { return &Inspector{db: db, excludeTables: excludeTables} } -func (in *Inspector) Inspect(ctx context.Context) (sqlschema.DatabaseSchema, error) { - schema := sqlschema.DatabaseSchema{ - TableDefinitions: make(map[string]sqlschema.TableDefinition), +func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { + dbSchema := Schema{ + TableDefinitions: make(map[schema.FQN]Table), ForeignKeys: make(map[sqlschema.ForeignKey]string), } @@ -37,22 +44,22 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.DatabaseSchema, err var tables []*InformationSchemaTable if err := in.db.NewRaw(sqlInspectTables, bun.In(exclude)).Scan(ctx, &tables); err != nil { - return schema, err + return dbSchema, err } var fks []*ForeignKey if err := in.db.NewRaw(sqlInspectForeignKeys, bun.In(exclude), bun.In(exclude)).Scan(ctx, &fks); err != nil { - return schema, err + return dbSchema, err } - schema.ForeignKeys = make(map[sqlschema.ForeignKey]string, len(fks)) + dbSchema.ForeignKeys = make(map[sqlschema.ForeignKey]string, len(fks)) for _, table := range tables { var columns []*InformationSchemaColumn if err := in.db.NewRaw(sqlInspectColumnsQuery, table.Schema, table.Name).Scan(ctx, &columns); err != nil { - return schema, err + return dbSchema, err } - colDefs := make(map[string]sqlschema.ColumnDefinition) + colDefs := make(map[string]Column) uniqueGroups := make(map[string][]string) for _, c := range columns { @@ -63,7 +70,8 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.DatabaseSchema, err def = strings.ToLower(def) } - colDefs[c.Name] = sqlschema.ColumnDefinition{ + colDefs[c.Name] = Column{ + Name: c.Name, SQLType: c.DataType, VarcharLen: c.VarcharLen, DefaultValue: def, @@ -93,22 +101,23 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.DatabaseSchema, err } } - schema.TableDefinitions[table.Name] = sqlschema.TableDefinition{ + fqn := schema.FQN{Schema: table.Schema, Table: table.Name} + dbSchema.TableDefinitions[fqn] = Table{ Schema: table.Schema, Name: table.Name, - ColumnDefimitions: colDefs, + ColumnDefinitions: colDefs, PrimaryKey: pk, - UniqueContraints: unique, + UniqueConstraints: unique, } } for _, fk := range fks { - schema.ForeignKeys[sqlschema.ForeignKey{ + dbSchema.ForeignKeys[sqlschema.ForeignKey{ From: sqlschema.NewColumnReference(fk.SourceSchema, fk.SourceTable, fk.SourceColumns...), To: sqlschema.NewColumnReference(fk.TargetSchema, fk.TargetTable, fk.TargetColumns...), }] = fk.ConstraintName } - return schema, nil + return dbSchema, nil } type InformationSchemaTable struct { diff --git a/dialect/pgdialect/sqltype.go b/dialect/pgdialect/sqltype.go index 10741fc0e..fcb9f8ebb 100644 --- a/dialect/pgdialect/sqltype.go +++ b/dialect/pgdialect/sqltype.go @@ -125,8 +125,8 @@ var ( timestampTz = newAliases(sqltype.Timestamp, pgTypeTimestampTz, pgTypeTimestampWithTz) ) -func (d *Dialect) EquivalentType(col1, col2 sqlschema.ColumnDefinition) bool { - typ1, typ2 := strings.ToUpper(col1.SQLType), strings.ToUpper(col2.SQLType) +func (d *Dialect) EquivalentType(col1, col2 sqlschema.Column) bool { + typ1, typ2 := strings.ToUpper(col1.GetSQLType()), strings.ToUpper(col2.GetSQLType()) if typ1 == typ2 { return checkVarcharLen(col1, col2, d.DefaultVarcharLen()) @@ -147,12 +147,14 @@ func (d *Dialect) EquivalentType(col1, col2 sqlschema.ColumnDefinition) bool { // if one specifies no VarcharLen and the other one has the default lenght for pgdialect. // We assume that the types are otherwise equivalent and that any non-character column // would have VarcharLen == 0; -func checkVarcharLen(col1, col2 sqlschema.ColumnDefinition, defaultLen int) bool { - if col1.VarcharLen == col2.VarcharLen { +func checkVarcharLen(col1, col2 sqlschema.Column, defaultLen int) bool { + vl1, vl2 := col1.GetVarcharLen(), col2.GetVarcharLen() + + if vl1 == vl2 { return true } - if (col1.VarcharLen == 0 && col2.VarcharLen == defaultLen) || (col1.VarcharLen == defaultLen && col2.VarcharLen == 0) { + if (vl1 == 0 && vl2 == defaultLen) || (vl1 == defaultLen && vl2 == 0) { return true } return false diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 7528afa0c..f8f5fbdd7 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -93,11 +93,11 @@ func TestDatabaseInspector_Inspect(t *testing.T) { defaultSchema := db.Dialect().DefaultSchema() // Tables come sorted alphabetically by schema and table. - wantTables := map[string]sqlschema.TableDefinition{ - "offices": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: "admin", Table: "offices"}: { Schema: "admin", Name: "offices", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "office_name": { SQLType: sqltype.VarChar, }, @@ -112,10 +112,10 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, }, - "articles": { + {Schema: defaultSchema, Table: "articles"}: { Schema: defaultSchema, Name: "articles", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "isbn": { SQLType: "bigint", IsNullable: false, @@ -167,14 +167,14 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("isbn")}, - UniqueContraints: []sqlschema.Unique{ + UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("editor", "title")}, }, }, - "authors": { + {Schema: defaultSchema, Table: "authors"}: { Schema: defaultSchema, Name: "authors", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "author_id": { SQLType: "bigint", IsIdentity: true, @@ -190,15 +190,15 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("author_id")}, - UniqueContraints: []sqlschema.Unique{ + UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("first_name", "last_name")}, {Columns: sqlschema.NewColumns("email")}, }, }, - "publisher_to_journalists": { + {Schema: defaultSchema, Table: "publisher_to_journalists"}: { Schema: defaultSchema, Name: "publisher_to_journalists", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "publisher_id": { SQLType: sqltype.VarChar, }, @@ -208,10 +208,10 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, }, - "publishers": { + {Schema: defaultSchema, Table: "publishers"}: { Schema: defaultSchema, Name: "publishers", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "publisher_id": { SQLType: sqltype.VarChar, DefaultValue: "gen_random_uuid()", @@ -226,7 +226,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { }, }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id")}, - UniqueContraints: []sqlschema.Unique{ + UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("publisher_id", "publisher_name")}, }, }, @@ -260,10 +260,11 @@ func TestDatabaseInspector_Inspect(t *testing.T) { // State.FKs store their database names, which differ from dialect to dialect. // Because of that we compare FKs and Tables separately. - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, got.TableDefinitions) + gotTables := got.(sqlschema.DatabaseSchema).TableDefinitions + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, gotTables) var fks []sqlschema.ForeignKey - for fk := range got.ForeignKeys { + for fk := range got.GetForeignKeys() { fks = append(fks, fk) } require.ElementsMatch(t, wantFKs, fks) @@ -292,23 +293,23 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str // cmpTables compares table schemas using dialect-specific equivalence checks for column types // and reports the differences as t.Error(). -func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[string]sqlschema.TableDefinition) { +func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema.FQN]sqlschema.TableDefinition) { tb.Helper() require.ElementsMatch(tb, tableNames(want), tableNames(got), "different set of tables") // Now we are guaranteed to have the same tables. - for wantName, wantTable := range want { + for _, wantTable := range want { // TODO(dyma): this will be simplified by map[string]Table var gt sqlschema.TableDefinition for i := range got { - if got[i].Name == wantName { + if got[i].Name == wantTable.Name { gt = got[i] break } } - cmpColumns(tb, d, wantName, wantTable.ColumnDefimitions, gt.ColumnDefimitions) + cmpColumns(tb, d, wantTable.Name, wantTable.ColumnDefinitions, gt.ColumnDefinitions) cmpConstraints(tb, wantTable, gt) } } @@ -388,12 +389,12 @@ func cmpConstraints(tb testing.TB, want, got sqlschema.TableDefinition) { } return } - require.ElementsMatch(tb, stripNames(want.UniqueContraints), stripNames(got.UniqueContraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) + require.ElementsMatch(tb, stripNames(want.UniqueConstraints), stripNames(got.UniqueConstraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) } -func tableNames(tables map[string]sqlschema.TableDefinition) (names []string) { - for name := range tables { - names = append(names, name) +func tableNames(tables map[schema.FQN]sqlschema.TableDefinition) (names []string) { + for fqn := range tables { + names = append(names, fqn.Table) } return } @@ -435,9 +436,10 @@ func TestSchemaInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.TableDefinitions, 1) - for _, table := range got.TableDefinitions { - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefimitions) + gotTables := got.(sqlschema.BunModelSchema).ModelTables + require.Len(t, gotTables, 1) + for _, table := range gotTables { + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefinitions) return } }) @@ -470,9 +472,10 @@ func TestSchemaInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.TableDefinitions, 1) - for _, table := range got.TableDefinitions { - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefimitions) + gotTables := got.(sqlschema.BunModelSchema).ModelTables + require.Len(t, gotTables, 1) + for _, table := range gotTables { + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefinitions) } }) @@ -489,7 +492,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { want := sqlschema.TableDefinition{ Name: "models", - UniqueContraints: []sqlschema.Unique{ + UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("id")}, {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "last_name")}, }, @@ -498,9 +501,10 @@ func TestSchemaInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.TableDefinitions, 1) - for _, table := range got.TableDefinitions { - cmpConstraints(t, want, table) + gotTables := got.(sqlschema.BunModelSchema).ModelTables + require.Len(t, gotTables, 1) + for _, table := range gotTables { + cmpConstraints(t, want, table.TableDefinition) return } }) @@ -519,8 +523,9 @@ func TestSchemaInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - require.Len(t, got.TableDefinitions, 1) - for _, table := range got.TableDefinitions { + gotTables := got.(sqlschema.BunModelSchema).ModelTables + require.Len(t, gotTables, 1) + for _, table := range gotTables { require.NotNilf(t, table.PrimaryKey, "did not register primary key, want (%s)", want) require.Equal(t, want, table.PrimaryKey.Columns, "wrong primary key columns") return diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index b63667483..bbdcb6061 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -14,6 +14,7 @@ import ( "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" + "github.com/uptrace/bun/schema" ) const ( @@ -224,7 +225,7 @@ func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema. return func(ctx context.Context) sqlschema.DatabaseSchema { state, err := inspector.Inspect(ctx) require.NoError(tb, err) - return state + return state.(sqlschema.DatabaseSchema) } } @@ -360,7 +361,7 @@ func testRenameTable(t *testing.T, db *bun.DB) { state := inspect(ctx) tables := state.TableDefinitions require.Len(t, tables, 1) - require.Contains(t, tables, "changed") + require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "changed"}) } func testCreateDropTable(t *testing.T, db *bun.DB) { @@ -389,7 +390,7 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { state := inspect(ctx) tables := state.TableDefinitions require.Len(t, tables, 1) - require.Contains(t, tables, "createme") + require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "createme"}) } func testAlterForeignKeys(t *testing.T, db *bun.DB) { @@ -639,9 +640,9 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { } } - require.Contains(t, renamed.ColumnDefimitions, "count") - require.Contains(t, model2.ColumnDefimitions, "second_column") - require.Contains(t, model2.ColumnDefimitions, "do_not_rename") + require.Contains(t, renamed.ColumnDefinitions, "count") + require.Contains(t, model2.ColumnDefinitions, "second_column") + require.Contains(t, model2.ColumnDefinitions, "do_not_rename") } func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { @@ -710,11 +711,11 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // ManyValues []string `bun:",array"` // did not change } - wantTables := map[string]sqlschema.TableDefinition{ - "change_me_own_type": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: db.Dialect().DefaultSchema(), Table: "change_me_own_type"}: { Schema: db.Dialect().DefaultSchema(), Name: "change_me_own_type", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "bigger_int": { SQLType: "bigint", IsIdentity: true, @@ -777,11 +778,11 @@ func testIdentity(t *testing.T, db *bun.DB) { B int64 `bun:",notnull,identity"` } - wantTables := map[string]sqlschema.TableDefinition{ - "bourne_identity": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: db.Dialect().DefaultSchema(), Table: "bourne_identity"}: { Schema: db.Dialect().DefaultSchema(), Name: "bourne_identity", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "a": { SQLType: sqltype.BigInt, IsIdentity: false, // <- drop IDENTITY @@ -820,11 +821,11 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { AddMe bool `bun:"addme"` } - wantTables := map[string]sqlschema.TableDefinition{ - "column_madness": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: db.Dialect().DefaultSchema(), Table: "column_madness"}: { Schema: db.Dialect().DefaultSchema(), Name: "column_madness", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "do_not_touch": { SQLType: sqltype.VarChar, IsNullable: true, @@ -873,11 +874,11 @@ func testUnique(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed"` // shrink "pet" unique group } - wantTables := map[string]sqlschema.TableDefinition{ - "uniqlo_stores": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: db.Dialect().DefaultSchema(), Table: "uniqlo_stores"}: { Schema: db.Dialect().DefaultSchema(), Name: "uniqlo_stores", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: true, @@ -907,7 +908,7 @@ func testUnique(t *testing.T, db *bun.DB) { IsNullable: true, }, }, - UniqueContraints: []sqlschema.Unique{ + UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("email")}, {Columns: sqlschema.NewColumns("pet_name")}, // We can only be sure of the user-defined index name @@ -951,11 +952,11 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed,unique"` } - wantTables := map[string]sqlschema.TableDefinition{ - "after": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: db.Dialect().DefaultSchema(), Table: "after"}: { Schema: db.Dialect().DefaultSchema(), Name: "after", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: true, @@ -977,7 +978,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { IsNullable: true, }, }, - UniqueContraints: []sqlschema.Unique{ + UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("pet_name")}, {Columns: sqlschema.NewColumns("pet_breed")}, {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "last_name", "birthday")}, @@ -1046,11 +1047,11 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { LastName string `bun:"last_name,pk"` } - wantTables := map[string]sqlschema.TableDefinition{ - "drop_your_pks": { + wantTables := map[schema.FQN]sqlschema.TableDefinition{ + {Schema: db.Dialect().DefaultSchema(), Table: "drop_your_pks"}: { Schema: db.Dialect().DefaultSchema(), Name: "drop_your_pks", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: false, @@ -1061,10 +1062,10 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { }, }, }, - "add_new_pk": { + {Schema: db.Dialect().DefaultSchema(), Table: "add_new_pk"}: { Schema: db.Dialect().DefaultSchema(), Name: "add_new_pk", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "new_id": { SQLType: sqltype.BigInt, IsNullable: false, @@ -1081,10 +1082,10 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("new_id")}, }, - "change_pk": { + {Schema: db.Dialect().DefaultSchema(), Table: "change_pk"}: { Schema: db.Dialect().DefaultSchema(), Name: "change_pk", - ColumnDefimitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ "first_name": { SQLType: sqltype.VarChar, IsNullable: false, diff --git a/migrate/auto.go b/migrate/auto.go index 6376c5137..34abe333a 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -3,11 +3,14 @@ package migrate import ( "bytes" "context" + "errors" "fmt" + "io" "os" "path/filepath" "github.com/uptrace/bun" + "github.com/uptrace/bun/internal" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -157,8 +160,7 @@ func (am *AutoMigrator) plan(ctx context.Context) (*changeset, error) { return nil, err } - detector := newDetector(got, want, am.diffOpts...) - changes := detector.Diff() + changes := diff(got, want, am.diffOpts...) if err := changes.ResolveDependencies(); err != nil { return nil, fmt.Errorf("plan migrations: %w", err) } @@ -237,3 +239,149 @@ func (am *AutoMigrator) createSQL(_ context.Context, migrations *Migrations, fna } return mf, nil } + +// Func creates a MigrationFunc that applies all operations all the changeset. +func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc { + return func(ctx context.Context, db *bun.DB) error { + return c.apply(ctx, db, m) + } +} + +// GetReverse returns a new changeset with each operation in it "reversed" and in reverse order. +func (c *changeset) GetReverse() *changeset { + var reverse changeset + for i := len(c.operations) - 1; i >= 0; i-- { + reverse.Add(c.operations[i].GetReverse()) + } + return &reverse +} + +// Up is syntactic sugar. +func (c *changeset) Up(m sqlschema.Migrator) MigrationFunc { + return c.Func(m) +} + +// Down is syntactic sugar. +func (c *changeset) Down(m sqlschema.Migrator) MigrationFunc { + return c.GetReverse().Func(m) +} + +// apply generates SQL for each operation and executes it. +func (c *changeset) apply(ctx context.Context, db *bun.DB, m sqlschema.Migrator) error { + if len(c.operations) == 0 { + return nil + } + + for _, op := range c.operations { + if _, isComment := op.(*comment); isComment { + continue + } + + b := internal.MakeQueryBytes() + b, err := m.AppendSQL(b, op) + if err != nil { + return fmt.Errorf("apply changes: %w", err) + } + + query := internal.String(b) + if _, err = db.ExecContext(ctx, query); err != nil { + return fmt.Errorf("apply changes: %w", err) + } + } + return nil +} + +func (c *changeset) WriteTo(w io.Writer, m sqlschema.Migrator) error { + var err error + + b := internal.MakeQueryBytes() + for _, op := range c.operations { + if c, isComment := op.(*comment); isComment { + b = append(b, "/*\n"...) + b = append(b, *c...) + b = append(b, "\n*/"...) + continue + } + + b, err = m.AppendSQL(b, op) + if err != nil { + return fmt.Errorf("write changeset: %w", err) + } + b = append(b, ";\n"...) + } + if _, err := w.Write(b); err != nil { + return fmt.Errorf("write changeset: %w", err) + } + return nil +} + +func (c *changeset) ResolveDependencies() error { + if len(c.operations) <= 1 { + return nil + } + + const ( + unvisited = iota + current + visited + ) + + status := make(map[Operation]int, len(c.operations)) + for _, op := range c.operations { + status[op] = unvisited + } + + var resolved []Operation + var nextOp Operation + var visit func(op Operation) error + + next := func() bool { + for op, s := range status { + if s == unvisited { + nextOp = op + return true + } + } + return false + } + + // visit iterates over c.operations until it finds all operations that depend on the current one + // or runs into cirtular dependency, in which case it will return an error. + visit = func(op Operation) error { + switch status[op] { + case visited: + return nil + case current: + // TODO: add details (circle) to the error message + return errors.New("detected circular dependency") + } + + status[op] = current + + for _, another := range c.operations { + if dop, hasDeps := another.(interface { + DependsOn(Operation) bool + }); another == op || !hasDeps || !dop.DependsOn(op) { + continue + } + if err := visit(another); err != nil { + return err + } + } + + status[op] = visited + + // Any dependent nodes would've already been added to the list by now, so we prepend. + resolved = append([]Operation{op}, resolved...) + return nil + } + + for next() { + if err := visit(nextOp); err != nil { + return err + } + } + + c.operations = resolved + return nil +} diff --git a/migrate/diff.go b/migrate/diff.go index 8dd1cf038..0464f386f 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -1,73 +1,86 @@ package migrate import ( - "context" - "errors" "fmt" - "io" "strings" - "github.com/uptrace/bun" - "github.com/uptrace/bun/internal" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) -// Diff calculates the diff between the current database schema and the target state. -// The result changeset is not sorted, i.e. the caller should resolve dependencies -// before applying the changes. -func (d *detector) Diff() *changeset { +// changeset is a set of changes to the database schema definition. +type changeset struct { + operations []Operation +} + +// Add new operations to the changeset. +func (c *changeset) Add(op ...Operation) { + c.operations = append(c.operations, op...) +} + +// diff calculates the diff between the current database schema and the target state. +// The changeset is not sorted -- the caller should resolve dependencies before applying the changes. +func diff(got, want sqlschema.Schema, opts ...diffOption) *changeset { + d := newDetector(got, want, opts...) + return d.detectChanges() +} + +func (d *detector) detectChanges() *changeset { + currentTables := d.mapNameToTable(d.current) + targetTables := d.mapNameToTable(d.target) + RenameCreate: - for wantName, wantTable := range d.target.TableDefinitions { + for wantName, wantTable := range targetTables { // A table with this name exists in the database. We assume that schema objects won't // be renamed to an already existing name, nor do we support such cases. // Simply check if the table definition has changed. - if haveTable, ok := d.current.TableDefinitions[wantName]; ok { + if haveTable, ok := currentTables[wantName]; ok { d.detectColumnChanges(haveTable, wantTable, true) d.detectConstraintChanges(haveTable, wantTable) continue } // Find all renamed tables. We assume that renamed tables have the same signature. - for haveName, haveTable := range d.current.TableDefinitions { - if _, exists := d.target.TableDefinitions[haveName]; !exists && d.canRename(haveTable, wantTable) { + for haveName, haveTable := range currentTables { + if _, exists := targetTables[haveName]; !exists && d.canRename(haveTable, wantTable) { d.changes.Add(&RenameTableOp{ - FQN: haveTable.FQN(), + FQN: haveTable.GetFQN(), NewName: wantName, }) - d.refMap.RenameTable(haveTable.FQN(), wantName) + d.refMap.RenameTable(haveTable.GetFQN(), wantName) // Find renamed columns, if any, and check if constraints (PK, UNIQUE) have been updated. // We need not check wantTable any further. d.detectColumnChanges(haveTable, wantTable, false) d.detectConstraintChanges(haveTable, wantTable) - delete(d.current.TableDefinitions, haveName) + delete(currentTables, haveName) continue RenameCreate } } // If wantTable does not exist in the database and was not renamed // then we need to create this table in the database. - additional := wantTable.Additional.(sqlschema.SchemaTable) + additional := wantTable.(sqlschema.ModelTable) d.changes.Add(&CreateTableOp{ - FQN: wantTable.FQN(), + FQN: wantTable.GetFQN(), Model: additional.Model, }) } // Drop any remaining "current" tables which do not have a model. - for name, table := range d.current.TableDefinitions { - if _, keep := d.target.TableDefinitions[name]; !keep { + for name, table := range currentTables { + if _, keep := targetTables[name]; !keep { d.changes.Add(&DropTableOp{ - FQN: table.FQN(), + FQN: table.GetFQN(), }) } } + targetFKs := d.target.GetForeignKeys() currentFKs := d.refMap.Deref() - for fk := range d.target.ForeignKeys { + for fk := range targetFKs { if _, ok := currentFKs[fk]; !ok { d.changes.Add(&AddForeignKeyOp{ ForeignKey: fk, @@ -77,7 +90,7 @@ RenameCreate: } for fk, name := range currentFKs { - if _, ok := d.target.ForeignKeys[fk]; !ok { + if _, ok := targetFKs[fk]; !ok { d.changes.Add(&DropForeignKeyOp{ ConstraintName: name, ForeignKey: fk, @@ -88,162 +101,144 @@ RenameCreate: return &d.changes } -// changeset is a set of changes to the database schema definition. -type changeset struct { - operations []Operation -} - -// Add new operations to the changeset. -func (c *changeset) Add(op ...Operation) { - c.operations = append(c.operations, op...) -} - -// Func creates a MigrationFunc that applies all operations all the changeset. -func (c *changeset) Func(m sqlschema.Migrator) MigrationFunc { - return func(ctx context.Context, db *bun.DB) error { - return c.apply(ctx, db, m) - } -} - -// GetReverse returns a new changeset with each operation in it "reversed" and in reverse order. -func (c *changeset) GetReverse() *changeset { - var reverse changeset - for i := len(c.operations) - 1; i >= 0; i-- { - reverse.Add(c.operations[i].GetReverse()) - } - return &reverse -} - -// Up is syntactic sugar. -func (c *changeset) Up(m sqlschema.Migrator) MigrationFunc { - return c.Func(m) -} - -// Down is syntactic sugar. -func (c *changeset) Down(m sqlschema.Migrator) MigrationFunc { - return c.GetReverse().Func(m) -} +// detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. +func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { + currentColumns := d.mapNameToColumn(current) + targetColumns := d.mapNameToColumn(target) -// apply generates SQL for each operation and executes it. -func (c *changeset) apply(ctx context.Context, db *bun.DB, m sqlschema.Migrator) error { - if len(c.operations) == 0 { - return nil - } +ChangeRename: + for tName, tCol := range targetColumns { - for _, op := range c.operations { - if _, isComment := op.(*comment); isComment { + // This column exists in the database, so it hasn't been renamed, dropped, or added. + // Still, we should not delete(columns, thisColumn), because later we will need to + // check that we do not try to rename a column to an already a name that already exists. + if cCol, ok := currentColumns[tName]; ok { + if checkType && !d.equalColumns(cCol, tCol) { + d.changes.Add(&ChangeColumnTypeOp{ + FQN: target.GetFQN(), + Column: tName, + From: cCol, + To: d.makeTargetColDef(cCol, tCol), + }) + } continue } - b := internal.MakeQueryBytes() - b, err := m.AppendSQL(b, op) - if err != nil { - return fmt.Errorf("apply changes: %w", err) - } - - query := internal.String(b) - if _, err = db.ExecContext(ctx, query); err != nil { - return fmt.Errorf("apply changes: %w", err) - } - } - return nil -} - -func (c *changeset) WriteTo(w io.Writer, m sqlschema.Migrator) error { - var err error + // Column tName does not exist in the database -- it's been either renamed or added. + // Find renamed columns first. + for cName, cCol := range currentColumns { + // Cannot rename if a column with this name already exists or the types differ. + if _, exists := targetColumns[cName]; exists || !d.equalColumns(tCol, cCol) { + continue + } + d.changes.Add(&RenameColumnOp{ + FQN: target.GetFQN(), + OldName: cName, + NewName: tName, + }) + d.refMap.RenameColumn(target.GetFQN(), cName, tName) + delete(currentColumns, cName) // no need to check this column again - b := internal.MakeQueryBytes() - for _, op := range c.operations { - if c, isComment := op.(*comment); isComment { - b = append(b, "/*\n"...) - b = append(b, *c...) - b = append(b, "\n*/"...) - continue - } + // Update primary key definition to avoid superficially recreating the constraint. + current.GetPrimaryKey().Columns.Replace(cName, tName) - b, err = m.AppendSQL(b, op) - if err != nil { - return fmt.Errorf("write changeset: %w", err) + continue ChangeRename } - b = append(b, ";\n"...) - } - if _, err := w.Write(b); err != nil { - return fmt.Errorf("write changeset: %w", err) - } - return nil -} -func (c *changeset) ResolveDependencies() error { - if len(c.operations) <= 1 { - return nil + d.changes.Add(&AddColumnOp{ + FQN: target.GetFQN(), + Column: tName, + ColDef: tCol, + }) } - const ( - unvisited = iota - current - visited - ) - - var resolved []Operation - var visit func(op Operation) error - - var nextOp Operation - var next func() bool - - status := make(map[Operation]int, len(c.operations)) - for _, op := range c.operations { - status[op] = unvisited + // Drop columns which do not exist in the target schema and were not renamed. + for cName, cCol := range currentColumns { + if _, keep := targetColumns[cName]; !keep { + d.changes.Add(&DropColumnOp{ + FQN: target.GetFQN(), + Column: cName, + ColDef: cCol, + }) + } } +} - next = func() bool { - for op, s := range status { - if s == unvisited { - nextOp = op - return true +func (d *detector) detectConstraintChanges(current, target sqlschema.Table) { +Add: + for _, want := range target.GetUniqueConstraints() { + for _, got := range current.GetUniqueConstraints() { + if got.Equals(want) { + continue Add } } - return false + d.changes.Add(&AddUniqueConstraintOp{ + FQN: target.GetFQN(), + Unique: want, + }) } - // visit iterates over c.operations until it finds all operations that depend on the current one - // or runs into cirtular dependency, in which case it will return an error. - visit = func(op Operation) error { - switch status[op] { - case visited: - return nil - case current: - // TODO: add details (circle) to the error message - return errors.New("detected circular dependency") - } - - status[op] = current - - for _, another := range c.operations { - if dop, hasDeps := another.(interface { - DependsOn(Operation) bool - }); another == op || !hasDeps || !dop.DependsOn(op) { - continue - } - if err := visit(another); err != nil { - return err +Drop: + for _, got := range current.GetUniqueConstraints() { + for _, want := range target.GetUniqueConstraints() { + if got.Equals(want) { + continue Drop } } - status[op] = visited + d.changes.Add(&DropUniqueConstraintOp{ + FQN: target.GetFQN(), + Unique: got, + }) + } - // Any dependent nodes would've already been added to the list by now, so we prepend. - resolved = append([]Operation{op}, resolved...) - return nil + targetPK := target.GetPrimaryKey() + currentPK := current.GetPrimaryKey() + + // Detect primary key changes + if targetPK == nil && currentPK == nil { + return + } + switch { + case targetPK == nil && currentPK != nil: + d.changes.Add(&DropPrimaryKeyOp{ + FQN: target.GetFQN(), + PrimaryKey: *currentPK, + }) + case currentPK == nil && targetPK != nil: + d.changes.Add(&AddPrimaryKeyOp{ + FQN: target.GetFQN(), + PrimaryKey: *targetPK, + }) + case targetPK.Columns != currentPK.Columns: + d.changes.Add(&ChangePrimaryKeyOp{ + FQN: target.GetFQN(), + Old: *currentPK, + New: *targetPK, + }) } +} - for next() { - if err := visit(nextOp); err != nil { - return err - } +func newDetector(got, want sqlschema.Schema, opts ...diffOption) *detector { + cfg := &detectorConfig{ + FKNameFunc: defaultFKName, + DetectRenamedFKs: false, + EqType: func(c1, c2 sqlschema.Column) bool { + return c1.GetSQLType() == c2.GetSQLType() && c1.GetVarcharLen() == c2.GetVarcharLen() + }, + } + for _, opt := range opts { + opt(cfg) } - c.operations = resolved - return nil + return &detector{ + current: got, + target: want, + refMap: newRefMap(got.GetForeignKeys()), + fkNameFunc: cfg.FKNameFunc, + detectRenamedFKs: cfg.DetectRenamedFKs, + eqType: cfg.EqType, + } } type diffOption func(*detectorConfig) @@ -276,10 +271,10 @@ type detectorConfig struct { // detector may modify the passed database schemas, so it isn't safe to re-use them. type detector struct { // current state represents the existing database schema. - current sqlschema.DatabaseSchema + current sqlschema.Schema // target state represents the database schema defined in bun models. - target sqlschema.DatabaseSchema + target sqlschema.Schema changes changeset refMap refMap @@ -297,164 +292,50 @@ type detector struct { detectRenamedFKs bool } -func newDetector(got, want sqlschema.DatabaseSchema, opts ...diffOption) *detector { - cfg := &detectorConfig{ - FKNameFunc: defaultFKName, - DetectRenamedFKs: false, - EqType: func(c1, c2 sqlschema.ColumnDefinition) bool { - return c1.SQLType == c2.SQLType && c1.VarcharLen == c2.VarcharLen - }, - } - for _, opt := range opts { - opt(cfg) - } - - return &detector{ - current: got, - target: want, - refMap: newRefMap(got.ForeignKeys), - fkNameFunc: cfg.FKNameFunc, - detectRenamedFKs: cfg.DetectRenamedFKs, - eqType: cfg.EqType, - } -} - // canRename checks if t1 can be renamed to t2. -func (d *detector) canRename(t1, t2 sqlschema.TableDefinition) bool { - return t1.Schema == t2.Schema && equalSignatures(t1, t2, d.equalColumns) +func (d detector) canRename(t1, t2 sqlschema.Table) bool { + return t1.GetSchema() == t2.GetSchema() && equalSignatures(t1, t2, d.equalColumns) } -func (d *detector) equalColumns(col1, col2 sqlschema.ColumnDefinition) bool { +func (d detector) equalColumns(col1, col2 sqlschema.Column) bool { return d.eqType(col1, col2) && - col1.DefaultValue == col2.DefaultValue && - col1.IsNullable == col2.IsNullable && - col1.IsAutoIncrement == col2.IsAutoIncrement && - col1.IsIdentity == col2.IsIdentity + col1.GetDefaultValue() == col2.GetDefaultValue() && + col1.GetIsNullable() == col2.GetIsNullable() && + col1.GetIsAutoIncrement() == col2.GetIsAutoIncrement() && + col1.GetIsIdentity() == col2.GetIsIdentity() } -func (d *detector) makeTargetColDef(current, target sqlschema.ColumnDefinition) sqlschema.ColumnDefinition { +func (d detector) makeTargetColDef(current, target sqlschema.Column) sqlschema.Column { // Avoid unneccessary type-change migrations if the types are equivalent. if d.eqType(current, target) { - target.SQLType = current.SQLType - target.VarcharLen = current.VarcharLen + target = sqlschema.ColumnDefinition{ + Name: target.GetName(), + DefaultValue: target.GetDefaultValue(), + IsNullable: target.GetIsNullable(), + IsAutoIncrement: target.GetIsAutoIncrement(), + IsIdentity: target.GetIsIdentity(), + + SQLType: current.GetSQLType(), + VarcharLen: current.GetVarcharLen(), + } } return target } -// detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. -func (d *detector) detectColumnChanges(current, target sqlschema.TableDefinition, checkType bool) { - fqn := schema.FQN{Schema: target.Schema, Table: target.Name} - -ChangedRenamed: - for tName, tCol := range target.ColumnDefimitions { - - // This column exists in the database, so it hasn't been renamed, dropped, or added. - // Still, we should not delete(columns, thisColumn), because later we will need to - // check that we do not try to rename a column to an already a name that already exists. - if cCol, ok := current.ColumnDefimitions[tName]; ok { - if checkType && !d.equalColumns(cCol, tCol) { - d.changes.Add(&ChangeColumnTypeOp{ - FQN: fqn, - Column: tName, - From: cCol, - To: d.makeTargetColDef(cCol, tCol), - }) - } - continue - } - - // Column tName does not exist in the database -- it's been either renamed or added. - // Find renamed columns first. - for cName, cCol := range current.ColumnDefimitions { - // Cannot rename if a column with this name already exists or the types differ. - if _, exists := target.ColumnDefimitions[cName]; exists || !d.equalColumns(tCol, cCol) { - continue - } - d.changes.Add(&RenameColumnOp{ - FQN: fqn, - OldName: cName, - NewName: tName, - }) - d.refMap.RenameColumn(fqn, cName, tName) - delete(current.ColumnDefimitions, cName) // no need to check this column again - - // Update primary key definition to avoid superficially recreating the constraint. - current.PrimaryKey.Columns.Replace(cName, tName) - - continue ChangedRenamed - } - - d.changes.Add(&AddColumnOp{ - FQN: fqn, - Column: tName, - ColDef: tCol, - }) - } - - // Drop columns which do not exist in the target schema and were not renamed. - for cName, cCol := range current.ColumnDefimitions { - if _, keep := target.ColumnDefimitions[cName]; !keep { - d.changes.Add(&DropColumnOp{ - FQN: fqn, - Column: cName, - ColDef: cCol, - }) - } +func (d *detector) mapNameToTable(s sqlschema.Schema) map[string]sqlschema.Table { + m := make(map[string]sqlschema.Table) + for _, t := range s.GetTables() { + m[t.GetName()] = t } + return m } -func (d *detector) detectConstraintChanges(current, target sqlschema.TableDefinition) { - fqn := schema.FQN{Schema: target.Schema, Table: target.Name} - -Add: - for _, want := range target.UniqueContraints { - for _, got := range current.UniqueContraints { - if got.Equals(want) { - continue Add - } - } - d.changes.Add(&AddUniqueConstraintOp{ - FQN: fqn, - Unique: want, - }) - } - -Drop: - for _, got := range current.UniqueContraints { - for _, want := range target.UniqueContraints { - if got.Equals(want) { - continue Drop - } - } - - d.changes.Add(&DropUniqueConstraintOp{ - FQN: fqn, - Unique: got, - }) - } - - // Detect primary key changes - if target.PrimaryKey == nil && current.PrimaryKey == nil { - return - } - switch { - case target.PrimaryKey == nil && current.PrimaryKey != nil: - d.changes.Add(&DropPrimaryKeyOp{ - FQN: fqn, - PrimaryKey: *current.PrimaryKey, - }) - case current.PrimaryKey == nil && target.PrimaryKey != nil: - d.changes.Add(&AddPrimaryKeyOp{ - FQN: fqn, - PrimaryKey: *target.PrimaryKey, - }) - case target.PrimaryKey.Columns != current.PrimaryKey.Columns: - d.changes.Add(&ChangePrimaryKeyOp{ - FQN: fqn, - Old: *current.PrimaryKey, - New: *target.PrimaryKey, - }) +func (d *detector) mapNameToColumn(t sqlschema.Table) map[string]sqlschema.Column { + m := make(map[string]sqlschema.Column) + for _, c := range t.GetColumns() { + m[c.GetName()] = c } + return m } // defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. @@ -463,10 +344,10 @@ func defaultFKName(fk sqlschema.ForeignKey) string { return fmt.Sprintf("%s_%s_fkey", fk.From.FQN.Table, columnnames) } -type TypeEquivalenceFunc func(sqlschema.ColumnDefinition, sqlschema.ColumnDefinition) bool +type TypeEquivalenceFunc func(sqlschema.Column, sqlschema.Column) bool // equalSignatures determines if two tables have the same "signature". -func equalSignatures(t1, t2 sqlschema.TableDefinition, eq TypeEquivalenceFunc) bool { +func equalSignatures(t1, t2 sqlschema.Table, eq TypeEquivalenceFunc) bool { sig1 := newSignature(t1, eq) sig2 := newSignature(t2, eq) return sig1.Equals(sig2) @@ -483,7 +364,7 @@ type signature struct { eq TypeEquivalenceFunc } -func newSignature(t sqlschema.TableDefinition, eq TypeEquivalenceFunc) signature { +func newSignature(t sqlschema.Table, eq TypeEquivalenceFunc) signature { s := signature{ underlying: make(map[sqlschema.ColumnDefinition]int), eq: eq, @@ -493,8 +374,9 @@ func newSignature(t sqlschema.TableDefinition, eq TypeEquivalenceFunc) signature } // scan iterates over table's field and counts occurrences of each unique column definition. -func (s *signature) scan(t sqlschema.TableDefinition) { - for _, scanCol := range t.ColumnDefimitions { +func (s *signature) scan(t sqlschema.Table) { + for _, icol := range t.GetColumns() { + scanCol := icol.(sqlschema.ColumnDefinition) // This is slightly more expensive than if the columns could be compared directly // and we always did s.underlying[col]++, but we get type-equivalence in return. col, count := s.getCount(scanCol) diff --git a/migrate/operations.go b/migrate/operations.go index cfb4cb455..856a2ee20 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -82,7 +82,7 @@ func (op *RenameColumnOp) DependsOn(another Operation) bool { type AddColumnOp struct { FQN schema.FQN Column string - ColDef sqlschema.ColumnDefinition + ColDef sqlschema.Column } var _ Operation = (*AddColumnOp)(nil) @@ -98,7 +98,7 @@ func (op *AddColumnOp) GetReverse() Operation { type DropColumnOp struct { FQN schema.FQN Column string - ColDef sqlschema.ColumnDefinition + ColDef sqlschema.Column } var _ Operation = (*DropColumnOp)(nil) @@ -250,8 +250,8 @@ func (op *DropUniqueConstraintOp) GetReverse() Operation { type ChangeColumnTypeOp struct { FQN schema.FQN Column string - From sqlschema.ColumnDefinition - To sqlschema.ColumnDefinition + From sqlschema.Column + To sqlschema.Column } var _ Operation = (*ChangeColumnTypeOp)(nil) diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 4f24051e8..616aea922 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -17,11 +17,35 @@ type InspectorDialect interface { // EquivalentType returns true if col1 and co2 SQL types are equivalent, // i.e. they might use dialect-specifc type aliases (SERIAL ~ SMALLINT) // or specify the same VARCHAR length differently (VARCHAR(255) ~ VARCHAR). - EquivalentType(ColumnDefinition, ColumnDefinition) bool + EquivalentType(Column, Column) bool } type Inspector interface { - Inspect(ctx context.Context) (DatabaseSchema, error) + Inspect(ctx context.Context) (Schema, error) +} + +type Schema interface { + GetTables() []Table + GetForeignKeys() map[ForeignKey]string +} + +type Table interface { + GetSchema() string + GetName() string + GetColumns() []Column + GetPrimaryKey() *PrimaryKey + GetUniqueConstraints() []Unique + GetFQN() schema.FQN +} + +type Column interface { + GetName() string + GetSQLType() string + GetVarcharLen() int + GetDefaultValue() string + GetIsNullable() bool + GetIsAutoIncrement() bool + GetIsIdentity() bool } type inspector struct { @@ -38,12 +62,6 @@ func NewInspector(db *bun.DB, excludeTables ...string) (Inspector, error) { }, nil } -// SchemaTable provides additional table metadata that is only accessible from scanning Go models. -type SchemaTable struct { - // Model stores the zero interface to the underlying Go struct. - Model interface{} -} - // SchemaInspector creates the current project state from the passed bun.Models. // Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. type SchemaInspector struct { @@ -58,10 +76,34 @@ func NewSchemaInspector(tables *schema.Tables) *SchemaInspector { } } -func (si *SchemaInspector) Inspect(ctx context.Context) (DatabaseSchema, error) { - state := DatabaseSchema{ - TableDefinitions: make(map[string]TableDefinition), - ForeignKeys: make(map[ForeignKey]string), +type BunModelSchema struct { + DatabaseSchema + + ModelTables map[schema.FQN]ModelTable +} + +func (ms BunModelSchema) GetTables() []Table { + var tables []Table + for _, t := range ms.ModelTables { + tables = append(tables, t) + } + return tables +} + +// ModelTable provides additional table metadata that is only accessible from scanning Go models. +type ModelTable struct { + TableDefinition + + // Model stores the zero interface to the underlying Go struct. + Model interface{} +} + +func (si *SchemaInspector) Inspect(ctx context.Context) (Schema, error) { + state := BunModelSchema{ + DatabaseSchema: DatabaseSchema{ + ForeignKeys: make(map[ForeignKey]string), + }, + ModelTables: make(map[schema.FQN]ModelTable), } for _, t := range si.tables.All() { columns := make(map[string]ColumnDefinition) @@ -72,6 +114,7 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (DatabaseSchema, error) return state, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err) } columns[f.Name] = ColumnDefinition{ + Name: f.Name, SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq() VarcharLen: length, DefaultValue: exprToLower(f.SQLDefault), @@ -109,15 +152,16 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (DatabaseSchema, error) pk = &PrimaryKey{Columns: NewColumns(columns...)} } - state.TableDefinitions[t.Name] = TableDefinition{ - Schema: t.Schema, - Name: t.Name, - ColumnDefimitions: columns, - UniqueContraints: unique, - PrimaryKey: pk, - Additional: SchemaTable{ - Model: t.ZeroIface, + fqn := schema.FQN{Schema: t.Schema, Table: t.Name} + state.ModelTables[fqn] = ModelTable{ + TableDefinition: TableDefinition{ + Schema: t.Schema, + Name: t.Name, + ColumnDefinitions: columns, + UniqueConstraints: unique, + PrimaryKey: pk, }, + Model: t.ZeroIface, } for _, rel := range t.Relations { diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/schema.go index 6df87d1e4..c833568ea 100644 --- a/migrate/sqlschema/schema.go +++ b/migrate/sqlschema/schema.go @@ -9,45 +9,69 @@ import ( ) type DatabaseSchema struct { - TableDefinitions map[string]TableDefinition + TableDefinitions map[schema.FQN]TableDefinition ForeignKeys map[ForeignKey]string } +var _ Schema = (*DatabaseSchema)(nil) + type TableDefinition struct { Schema string Name string - // ColumnDefimitions map each column name to the column definition. - ColumnDefimitions map[string]ColumnDefinition + // ColumnDefinitions map each column name to the column definition. + ColumnDefinitions map[string]ColumnDefinition // PrimaryKey holds the primary key definition. // A nil value means that no primary key is defined for the table. PrimaryKey *PrimaryKey // UniqueConstraints defined on the table. - UniqueContraints []Unique - - // Additional metadata that Inspector implementations might provide about the table. - Additional interface{} + UniqueConstraints []Unique } -func (t TableDefinition) FQN() schema.FQN { - return schema.FQN{Schema: t.Schema, Table: t.Name} -} +var _ Table = (*TableDefinition)(nil) // ColumnDefinition stores attributes of a database column. type ColumnDefinition struct { + Name string SQLType string VarcharLen int DefaultValue string IsNullable bool IsAutoIncrement bool IsIdentity bool + // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. +} - // Additional metadata that Inspector implementations might provide about the column. - Additional interface{} +var _ Column = (*ColumnDefinition)(nil) - // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. +func (cd ColumnDefinition) GetName() string { + return cd.Name +} + +func (cd ColumnDefinition) GetSQLType() string { + return cd.SQLType +} + +func (cd ColumnDefinition) GetVarcharLen() int { + return cd.VarcharLen +} + +func (cd ColumnDefinition) GetDefaultValue() string { + return cd.DefaultValue +} + +func (cd ColumnDefinition) GetIsNullable() bool { + return cd.IsNullable +} + +func (cd ColumnDefinition) GetIsAutoIncrement() bool { + return cd.IsAutoIncrement +} + +func (cd ColumnDefinition) GetIsIdentity() bool { + return cd.IsIdentity } // AppendQuery appends full SQL data type. @@ -161,3 +185,39 @@ type ColumnReference struct { FQN schema.FQN Column Columns } + +func (ds DatabaseSchema) GetTables() []Table { + var tables []Table + for i := range ds.TableDefinitions { + tables = append(tables, ds.TableDefinitions[i]) + } + return tables +} + +func (ds DatabaseSchema) GetForeignKeys() map[ForeignKey]string { + return ds.ForeignKeys +} + +func (td TableDefinition) GetSchema() string { + return td.Schema +} +func (td TableDefinition) GetName() string { + return td.Name +} +func (td TableDefinition) GetColumns() []Column { + var columns []Column + for i := range td.ColumnDefinitions { + columns = append(columns, td.ColumnDefinitions[i]) + } + return columns +} +func (td TableDefinition) GetPrimaryKey() *PrimaryKey { + return td.PrimaryKey +} +func (td TableDefinition) GetUniqueConstraints() []Unique { + return td.UniqueConstraints +} + +func (t TableDefinition) GetFQN() schema.FQN { + return schema.FQN{Schema: t.Schema, Table: t.Name} +} From 42fdced125f37ffc103d123ee0267611a264c561 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Fri, 8 Nov 2024 01:03:07 +0100 Subject: [PATCH 39/55] chore: drop foreign key renames This feature will be added in the follow-up pull requests. --- dialect/pgdialect/alter_table.go | 28 ++---- internal/dbtest/migrate_test.go | 158 ------------------------------- migrate/auto.go | 25 +---- migrate/diff.go | 26 +---- migrate/operations.go | 26 ----- 5 files changed, 11 insertions(+), 252 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 971e9626b..a73be5c13 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -2,6 +2,7 @@ package pgdialect import ( "fmt" + "strings" "github.com/uptrace/bun" "github.com/uptrace/bun/migrate" @@ -60,8 +61,6 @@ func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err err b, err = m.addForeignKey(fmter, appendAlterTable(b, change.FQN()), change) case *migrate.DropForeignKeyOp: b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN()), change.ConstraintName) - // case *migrate.RenameForeignKeyOp: - // b, err = m.renameConstraint(fmter, b, change) default: return nil, fmt.Errorf("append sql: unknown operation %T", change) } @@ -126,22 +125,6 @@ func (m *migrator) addPrimaryKey(fmter schema.Formatter, b []byte, pk sqlschema. return b, nil } -func (m *migrator) renameConstraint(fmter schema.Formatter, b []byte, rename *migrate.RenameForeignKeyOp) (_ []byte, err error) { - b = append(b, "ALTER TABLE "...) - fqn := rename.FQN() - if b, err = fqn.AppendQuery(fmter, b); err != nil { - return b, err - } - - b = append(b, " RENAME CONSTRAINT "...) - b = fmter.AppendName(b, rename.OldName) - - b = append(b, " TO "...) - b = fmter.AppendName(b, rename.NewName) - - return b, nil -} - func (m *migrator) changePrimaryKey(fmter schema.Formatter, b []byte, change *migrate.ChangePrimaryKeyOp) (_ []byte, err error) { b, _ = m.dropConstraint(fmter, b, change.Old.Name) b = append(b, ", "...) @@ -173,7 +156,14 @@ func (m *migrator) dropConstraint(fmter schema.Formatter, b []byte, name string) func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate.AddForeignKeyOp) (_ []byte, err error) { b = append(b, "ADD CONSTRAINT "...) - b = fmter.AppendName(b, add.ConstraintName) + + name := add.ConstraintName + if name == "" { + colRef := add.ForeignKey.From + columns := strings.Join(colRef.Column.Split(), "_") + name = fmt.Sprintf("%s_%s_fkey", colRef.FQN.Table, columns) + } + b = fmter.AppendName(b, name) b = append(b, " FOREIGN KEY ("...) if b, err = add.ForeignKey.From.Column.AppendQuery(fmter, b); err != nil { diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index bbdcb6061..bcfa6d4c9 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -315,11 +315,6 @@ func TestAutoMigrator_Migrate(t *testing.T) { {testUnique}, {testUniqueRenamedTable}, {testUpdatePrimaryKeys}, - - // Suspended support for renaming foreign keys: - // {testCustomFKNameFunc}, - // {testForceRenameFK}, - // {testRenameColumnRenamesFK}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -470,120 +465,6 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { }) } -func testForceRenameFK(t *testing.T, db *bun.DB) { - // Database state - type Owner struct { - ID int64 `bun:",pk"` - } - - type OwnedThing struct { - bun.BaseModel `bun:"table:things"` - ID int64 `bun:",pk"` - OwnerID int64 `bun:"owner_id,notnull"` - - Owner *Owner `bun:"rel:belongs-to,join:owner_id=id"` - } - - // Model state - type Person struct { - ID int64 `bun:",pk"` - } - - type PersonalThing struct { - bun.BaseModel `bun:"table:things"` - ID int64 `bun:",pk"` - PersonID int64 `bun:"owner_id,notnull"` - - Owner *Person `bun:"rel:belongs-to,join:owner_id=id"` - } - - ctx := context.Background() - inspect := inspectDbOrSkip(t, db) - - mustCreateTableWithFKs(t, ctx, db, - (*Owner)(nil), - (*OwnedThing)(nil), - ) - mustDropTableOnCleanup(t, ctx, db, (*Person)(nil)) - - m := newAutoMigratorOrSkip(t, db, - migrate.WithModel( - (*Person)(nil), - (*PersonalThing)(nil), - ), - migrate.WithRenameFK(true), - migrate.WithFKNameFunc(func(fk sqlschema.ForeignKey) string { - return strings.Join([]string{ - fk.From.FQN.Table, fk.To.FQN.Table, "fkey", - }, "_") - }), - ) - - // Act - runMigrations(t, m) - - // Assert - state := inspect(ctx) - schema := db.Dialect().DefaultSchema() - wantName, ok := state.ForeignKeys[sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(schema, "things", "owner_id"), - To: sqlschema.NewColumnReference(schema, "people", "id"), - }] - require.True(t, ok, "expect state.ForeignKeys to contain things_people_fkey") - require.Equal(t, wantName, "things_people_fkey") -} - -func testCustomFKNameFunc(t *testing.T, db *bun.DB) { - // Database state - type Column struct { - OID int64 `bun:",pk"` - RelID int64 `bun:"attrelid,notnull"` - } - type Table struct { - OID int64 `bun:",pk"` - } - - // Model state - type ColumnM struct { - bun.BaseModel `bun:"table:columns"` - OID int64 `bun:",pk"` - RelID int64 `bun:"attrelid,notnull"` - - Table *Table `bun:"rel:belongs-to,join:attrelid=oid"` - } - type TableM struct { - bun.BaseModel `bun:"table:tables"` - OID int64 `bun:",pk"` - } - - ctx := context.Background() - inspect := inspectDbOrSkip(t, db) - - mustCreateTableWithFKs(t, ctx, db, - (*Table)(nil), - (*Column)(nil), - ) - - m := newAutoMigratorOrSkip(t, db, - migrate.WithFKNameFunc(func(sqlschema.ForeignKey) string { return "test_fkey" }), - migrate.WithModel( - (*TableM)(nil), - (*ColumnM)(nil), - ), - ) - - // Act - runMigrations(t, m) - - // Assert - state := inspect(ctx) - fkName := state.ForeignKeys[sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "columns", "attrelid"), - To: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "tables", "oid"), - }] - require.Equal(t, "test_fkey", fkName) -} - func testRenamedColumns(t *testing.T, db *bun.DB) { // Database state type Original struct { @@ -645,45 +526,6 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { require.Contains(t, model2.ColumnDefinitions, "do_not_rename") } -func testRenameColumnRenamesFK(t *testing.T, db *bun.DB) { - type TennantBefore struct { - bun.BaseModel `bun:"table:tennants"` - ID int64 `bun:"id,pk,identity"` - Apartment int8 - NeighbourID int64 `bun:"neighbour_id"` - - Neighbour *TennantBefore `bun:"rel:has-one,join:neighbour_id=id"` - } - - type TennantAfter struct { - bun.BaseModel `bun:"table:tennants"` - TennantID int64 `bun:"tennant_id,pk,identity"` - Apartment int8 - NeighbourID int64 `bun:"my_neighbour"` - - Neighbour *TennantAfter `bun:"rel:has-one,join:my_neighbour=tennant_id"` - } - - ctx := context.Background() - inspect := inspectDbOrSkip(t, db) - mustCreateTableWithFKs(t, ctx, db, (*TennantBefore)(nil)) - m := newAutoMigratorOrSkip(t, db, - migrate.WithRenameFK(true), - migrate.WithModel((*TennantAfter)(nil)), - ) - - // Act - runMigrations(t, m) - - // Assert - state := inspect(ctx) - fkName := state.ForeignKeys[sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "tennants", "my_neighbour"), - To: sqlschema.NewColumnReference(db.Dialect().DefaultSchema(), "tennants", "tennant_id"), - }] - require.Equal(t, "tennants_my_neighbour_fkey", fkName) -} - // testChangeColumnType_AutoCast checks type changes which can be type-casted automatically, // i.e. do not require supplying a USING clause (pgdialect). func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { diff --git a/migrate/auto.go b/migrate/auto.go index 34abe333a..44f595ba5 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -31,29 +31,6 @@ func WithExcludeTable(tables ...string) AutoMigratorOption { } } -// WithFKNameFunc sets the function to build a new name for created or renamed FK constraints. -// -// Notice: this option is not supported in SQLite dialect and will have no effect. -// SQLite does not implement ADD CONSTRAINT, so adding or renaming a constraint will require re-creating the table. -// We need to support custom FKNameFunc in CreateTable to control how FKs are named. -// -// More generally, this option will have no effect whenever FKs are included in the CREATE TABLE definition, -// which is the default strategy. Perhaps it would make sense to allow disabling this and switching to separate (CreateTable + AddFK) -func WithFKNameFunc(f func(sqlschema.ForeignKey) string) AutoMigratorOption { - return func(m *AutoMigrator) { - m.diffOpts = append(m.diffOpts, withFKNameFunc(f)) - } -} - -// WithRenameFK prevents AutoMigrator from recreating foreign keys when their dependent relations are renamed, -// and forces it to run a RENAME CONSTRAINT query instead. Creating an index on a large table can take a very long time, -// and in those cases simply renaming the FK makes a lot more sense. -func WithRenameFK(enabled bool) AutoMigratorOption { - return func(m *AutoMigrator) { - m.diffOpts = append(m.diffOpts, withDetectRenamedFKs(enabled)) - } -} - // WithTableNameAuto overrides default migrations table name. func WithTableNameAuto(table string) AutoMigratorOption { return func(m *AutoMigrator) { @@ -325,7 +302,7 @@ func (c *changeset) ResolveDependencies() error { current visited ) - + status := make(map[Operation]int, len(c.operations)) for _, op := range c.operations { status[op] = unvisited diff --git a/migrate/diff.go b/migrate/diff.go index 0464f386f..bc945eaeb 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -84,7 +84,7 @@ RenameCreate: if _, ok := currentFKs[fk]; !ok { d.changes.Add(&AddForeignKeyOp{ ForeignKey: fk, - ConstraintName: d.fkNameFunc(fk), + ConstraintName: "", // leave empty to let each dialect apply their convention }) } } @@ -221,8 +221,6 @@ Drop: func newDetector(got, want sqlschema.Schema, opts ...diffOption) *detector { cfg := &detectorConfig{ - FKNameFunc: defaultFKName, - DetectRenamedFKs: false, EqType: func(c1, c2 sqlschema.Column) bool { return c1.GetSQLType() == c2.GetSQLType() && c1.GetVarcharLen() == c2.GetVarcharLen() }, @@ -235,26 +233,12 @@ func newDetector(got, want sqlschema.Schema, opts ...diffOption) *detector { current: got, target: want, refMap: newRefMap(got.GetForeignKeys()), - fkNameFunc: cfg.FKNameFunc, - detectRenamedFKs: cfg.DetectRenamedFKs, eqType: cfg.EqType, } } type diffOption func(*detectorConfig) -func withFKNameFunc(f func(sqlschema.ForeignKey) string) diffOption { - return func(cfg *detectorConfig) { - // cfg.FKNameFunc = f - } -} - -func withDetectRenamedFKs(enabled bool) diffOption { - return func(cfg *detectorConfig) { - cfg.DetectRenamedFKs = enabled - } -} - func withTypeEquivalenceFunc(f TypeEquivalenceFunc) diffOption { return func(cfg *detectorConfig) { cfg.EqType = f @@ -263,8 +247,6 @@ func withTypeEquivalenceFunc(f TypeEquivalenceFunc) diffOption { // detectorConfig controls how differences in the model states are resolved. type detectorConfig struct { - FKNameFunc func(sqlschema.ForeignKey) string - DetectRenamedFKs bool EqType TypeEquivalenceFunc } @@ -279,17 +261,11 @@ type detector struct { changes changeset refMap refMap - // fkNameFunc builds the name for created/renamed FK contraints. - fkNameFunc func(sqlschema.ForeignKey) string - // eqType determines column type equivalence. // Default is direct comparison with '==' operator, which is inaccurate // due to the existence of dialect-specific type aliases. The caller // should pass a concrete InspectorDialect.EquuivalentType for robust comparison. eqType TypeEquivalenceFunc - - // detectRenemedFKs controls how FKs are treated when their references (table/column) are renamed. - detectRenamedFKs bool } // canRename checks if t1 can be renamed to t2. diff --git a/migrate/operations.go b/migrate/operations.go index 856a2ee20..e9bf6383b 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -123,32 +123,6 @@ func (op *DropColumnOp) DependsOn(another Operation) bool { return false } -// RenameForeignKeyOp. -type RenameForeignKeyOp struct { - FK sqlschema.ForeignKey - OldName string - NewName string -} - -var _ Operation = (*RenameForeignKeyOp)(nil) - -func (op *RenameForeignKeyOp) FQN() schema.FQN { - return op.FK.From.FQN -} - -// func (op *RenameForeignKeyOp) DependsOn(another Operation) bool { -// rt, ok := another.(*RenameTableOp) -// return ok && rt.FQN.Schema == op.FK.From.Schema && rt.NewName == op.FK.From.Table -// } - -func (op *RenameForeignKeyOp) GetReverse() Operation { - return &RenameForeignKeyOp{ - FK: op.FK, - OldName: op.OldName, - NewName: op.NewName, - } -} - type AddForeignKeyOp struct { ForeignKey sqlschema.ForeignKey ConstraintName string From da0d8e304001c4b90fe44e809a01374daba9b760 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Fri, 8 Nov 2024 01:05:07 +0100 Subject: [PATCH 40/55] ci: delete unused function, appease lintr --- migrate/diff.go | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/migrate/diff.go b/migrate/diff.go index bc945eaeb..0fd57a234 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -1,9 +1,6 @@ package migrate import ( - "fmt" - "strings" - "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" ) @@ -230,10 +227,10 @@ func newDetector(got, want sqlschema.Schema, opts ...diffOption) *detector { } return &detector{ - current: got, - target: want, - refMap: newRefMap(got.GetForeignKeys()), - eqType: cfg.EqType, + current: got, + target: want, + refMap: newRefMap(got.GetForeignKeys()), + eqType: cfg.EqType, } } @@ -247,7 +244,7 @@ func withTypeEquivalenceFunc(f TypeEquivalenceFunc) diffOption { // detectorConfig controls how differences in the model states are resolved. type detectorConfig struct { - EqType TypeEquivalenceFunc + EqType TypeEquivalenceFunc } // detector may modify the passed database schemas, so it isn't safe to re-use them. @@ -314,12 +311,6 @@ func (d *detector) mapNameToColumn(t sqlschema.Table) map[string]sqlschema.Colum return m } -// defaultFKName returns a name for the FK constraint in the format {tablename}_{columnname(s)}_fkey, following the Postgres convention. -func defaultFKName(fk sqlschema.ForeignKey) string { - columnnames := strings.Join(fk.From.Column.Split(), "_") - return fmt.Sprintf("%s_%s_fkey", fk.From.FQN.Table, columnnames) -} - type TypeEquivalenceFunc func(sqlschema.Column, sqlschema.Column) bool // equalSignatures determines if two tables have the same "signature". From c3320f624830dc2fe99af2c7cbe492b2a83f9e4a Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Fri, 8 Nov 2024 15:23:31 +0100 Subject: [PATCH 41/55] feat: create transactional migration files --- internal/dbtest/migrate_test.go | 31 ++++++++++++++++++++-------- migrate/auto.go | 36 ++++++++++++++++++++++++++------- 2 files changed, 52 insertions(+), 15 deletions(-) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index bcfa6d4c9..52dd2c276 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -240,18 +240,31 @@ func TestAutoMigrator_CreateSQLMigrations(t *testing.T) { ctx := context.Background() m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*NewTable)(nil))) - migrations, err := m.CreateSQLMigrations(ctx) - require.NoError(t, err, "should create migrations successfully") + t.Run("basic", func(t *testing.T) { + migrations, err := m.CreateSQLMigrations(ctx) + require.NoError(t, err, "should create migrations successfully") + + require.Len(t, migrations, 2, "expected up/down migration pair") + require.DirExists(t, migrationsDir) + checkMigrationFileContains(t, ".up.sql", "CREATE TABLE") + checkMigrationFileContains(t, ".down.sql", "DROP TABLE") + }) + + t.Run("transactional", func(t *testing.T) { + migrations, err := m.CreateTxSQLMigrations(ctx) + require.NoError(t, err, "should create migrations successfully") + + require.Len(t, migrations, 2, "expected up/down migration pair") + require.DirExists(t, migrationsDir) + checkMigrationFileContains(t, "tx.up.sql", "CREATE TABLE", "SET statement_timeout = 0") + checkMigrationFileContains(t, "tx.down.sql", "DROP TABLE", "SET statement_timeout = 0") + }) - require.Len(t, migrations, 2, "expected up/down migration pair") - require.DirExists(t, migrationsDir) - checkMigrationFileContains(t, ".up.sql", "CREATE TABLE") - checkMigrationFileContains(t, ".down.sql", "DROP TABLE") }) } // checkMigrationFileContains expected SQL snippet. -func checkMigrationFileContains(t *testing.T, fileSuffix string, content string) { +func checkMigrationFileContains(t *testing.T, fileSuffix string, snippets ...string) { t.Helper() files, err := os.ReadDir(migrationsDir) @@ -261,7 +274,9 @@ func checkMigrationFileContains(t *testing.T, fileSuffix string, content string) if strings.HasSuffix(f.Name(), fileSuffix) { b, err := os.ReadFile(filepath.Join(migrationsDir, f.Name())) require.NoError(t, err) - require.Containsf(t, string(b), content, "expected %s file to contain string", f.Name()) + for _, content := range snippets { + require.Containsf(t, string(b), content, "expected %s file to contain string", f.Name()) + } return } } diff --git a/migrate/auto.go b/migrate/auto.go index 44f595ba5..10ab3f2a7 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -25,6 +25,8 @@ func WithModel(models ...interface{}) AutoMigratorOption { } // WithExcludeTable tells the AutoMigrator to ignore a table in the database. +// This prevents AutoMigrator from dropping tables which may exist in the schema +// but which are not used by the application. func WithExcludeTable(tables ...string) AutoMigratorOption { return func(m *AutoMigrator) { m.excludeTables = append(m.excludeTables, tables...) @@ -55,6 +57,7 @@ func WithMarkAppliedOnSuccessAuto(enabled bool) AutoMigratorOption { } } +// WithMigrationsDirectoryAuto overrides the default directory for migration files. func WithMigrationsDirectoryAuto(directory string) AutoMigratorOption { return func(m *AutoMigrator) { m.migrationsOpts = append(m.migrationsOpts, WithMigrationsDirectory(directory)) @@ -146,9 +149,9 @@ func (am *AutoMigrator) plan(ctx context.Context) (*changeset, error) { // Migrate writes required changes to a new migration file and runs the migration. // This will create and entry in the migrations table, making it possible to revert -// the changes with Migrator.Rollback(). +// the changes with Migrator.Rollback(). MigrationOptions are passed on to Migrator.Migrate(). func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) (*MigrationGroup, error) { - migrations, _, err := am.createSQLMigrations(ctx) + migrations, _, err := am.createSQLMigrations(ctx, false) if err != nil { return nil, fmt.Errorf("auto migrate: %w", err) } @@ -165,12 +168,21 @@ func (am *AutoMigrator) Migrate(ctx context.Context, opts ...MigrationOption) (* return group, nil } +// CreateSQLMigration writes required changes to a new migration file. +// Use migrate.Migrator to apply the generated migrations. func (am *AutoMigrator) CreateSQLMigrations(ctx context.Context) ([]*MigrationFile, error) { - _, files, err := am.createSQLMigrations(ctx) + _, files, err := am.createSQLMigrations(ctx, true) return files, err } -func (am *AutoMigrator) createSQLMigrations(ctx context.Context) (*Migrations, []*MigrationFile, error) { +// CreateTxSQLMigration writes required changes to a new migration file making sure they will be executed +// in a transaction when applied. Use migrate.Migrator to apply the generated migrations. +func (am *AutoMigrator) CreateTxSQLMigrations(ctx context.Context) ([]*MigrationFile, error) { + _, files, err := am.createSQLMigrations(ctx, false) + return files, err +} + +func (am *AutoMigrator) createSQLMigrations(ctx context.Context, transactional bool) (*Migrations, []*MigrationFile, error) { changes, err := am.plan(ctx) if err != nil { return nil, nil, fmt.Errorf("create sql migrations: %w", err) @@ -185,20 +197,30 @@ func (am *AutoMigrator) createSQLMigrations(ctx context.Context) (*Migrations, [ Comment: "Changes detected by bun.migrate.AutoMigrator", }) - up, err := am.createSQL(ctx, migrations, name+".up.sql", changes) + // Append .tx.up.sql or .up.sql to migration name, dependin if it should be transactional. + fname := func(direction string) string { + return name + map[bool]string{true: ".tx.", false: "."}[transactional] + direction + ".sql" + } + + up, err := am.createSQL(ctx, migrations, fname("up"), changes, transactional) if err != nil { return nil, nil, fmt.Errorf("create sql migration up: %w", err) } - down, err := am.createSQL(ctx, migrations, name+".down.sql", changes.GetReverse()) + down, err := am.createSQL(ctx, migrations, fname("down"), changes.GetReverse(), transactional) if err != nil { return nil, nil, fmt.Errorf("create sql migration down: %w", err) } return migrations, []*MigrationFile{up, down}, nil } -func (am *AutoMigrator) createSQL(_ context.Context, migrations *Migrations, fname string, changes *changeset) (*MigrationFile, error) { +func (am *AutoMigrator) createSQL(_ context.Context, migrations *Migrations, fname string, changes *changeset, transactional bool) (*MigrationFile, error) { var buf bytes.Buffer + + if transactional { + buf.WriteString("SET statement_timeout = 0;") + } + if err := changes.WriteTo(&buf, am.dbMigrator); err != nil { return nil, err } From b3a9697d069e694d82fc523fc9b199c1d5c987b8 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Fri, 8 Nov 2024 16:46:16 +0100 Subject: [PATCH 42/55] chore: add in-code documentation --- internal/dbtest/inspect_test.go | 10 ++++---- migrate/auto.go | 32 +++++++++++++++++++++++- migrate/diff.go | 10 +++++++- migrate/operations.go | 43 ++++++++++++++++++++++++++++++--- migrate/sqlschema/inspector.go | 31 ++++++++++++++---------- migrate/sqlschema/schema.go | 3 +++ 6 files changed, 106 insertions(+), 23 deletions(-) diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index f8f5fbdd7..f63f0b037 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -406,7 +406,7 @@ func formatType(c sqlschema.ColumnDefinition) string { return fmt.Sprintf("%s(%d)", c.SQLType, c.VarcharLen) } -func TestSchemaInspector_Inspect(t *testing.T) { +func TestBunModelInspector_Inspect(t *testing.T) { testEachDialect(t, func(t *testing.T, dialectName string, dialect schema.Dialect) { if _, ok := dialect.(sqlschema.InspectorDialect); !ok { t.Skip(dialectName + " is not sqlschema.InspectorDialect") @@ -420,7 +420,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables := schema.NewTables(dialect) tables.Register((*Model)(nil)) - inspector := sqlschema.NewSchemaInspector(tables) + inspector := sqlschema.NewBunModelInspector(tables) want := map[string]sqlschema.ColumnDefinition{ "id": { @@ -453,7 +453,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables := schema.NewTables(dialect) tables.Register((*Model)(nil)) - inspector := sqlschema.NewSchemaInspector(tables) + inspector := sqlschema.NewBunModelInspector(tables) want := map[string]sqlschema.ColumnDefinition{ "id": { @@ -488,7 +488,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables := schema.NewTables(dialect) tables.Register((*Model)(nil)) - inspector := sqlschema.NewSchemaInspector(tables) + inspector := sqlschema.NewBunModelInspector(tables) want := sqlschema.TableDefinition{ Name: "models", @@ -517,7 +517,7 @@ func TestSchemaInspector_Inspect(t *testing.T) { tables := schema.NewTables(dialect) tables.Register((*Model)(nil)) - inspector := sqlschema.NewSchemaInspector(tables) + inspector := sqlschema.NewBunModelInspector(tables) want := sqlschema.NewColumns("id", "email") got, err := inspector.Inspect(context.Background()) diff --git a/migrate/auto.go b/migrate/auto.go index 10ab3f2a7..95b0da2fa 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -64,6 +64,36 @@ func WithMigrationsDirectoryAuto(directory string) AutoMigratorOption { } } +// AutoMigrator performs automated schema migrations. +// +// It is designed to be a drop-in replacement for some Migrator functionality and supports all existing +// configuration options. +// Similarly to Migrator, it has methods to create SQL migrations, write them to a file, and apply them. +// Unlike Migrator, it detects the differences between the state defined by bun models and the current +// database schema automatically. +// +// Usage: +// 1. Generate migrations and apply them au once with AutoMigrator.Migrate(). +// 2. Create up- and down-SQL migration files and apply migrations using Migrator.Migrate(). +// +// While both methods produce complete, reversible migrations (with entries in the database +// and SQL migration files), prefer creating migrations and applying them separately for +// any non-trivial cases to ensure AutoMigrator detects expected changes correctly. +// +// Limitations: +// - AutoMigrator only supports a subset of the possible ALTER TABLE modifications. +// - Some changes are not automatically reversible. For example, you would need to manually +// add a CREATE TABLE query to the .down migration file to revert a DROP TABLE migration. +// - Does not validate most dialect-specific constraints. For example, when changing column +// data type, make sure the data con be auto-casted to the new type. +// - Due to how the schema-state diff is calculated, it is not possible to rename a table and +// modify any of its columns' _data type_ in a single run. This will cause the AutoMigrator +// to drop and re-create the table under a different name; it is better to apply this change in 2 steps. +// Renaming a table and renaming its columns at the same time is possible. +// - Renaming table/column to an existing name, i.e. like this [A->B] [B->C], is not possible due to how +// AutoMigrator distinguishes "rename" and "unchanged" columns. +// +// Dialect must implement both sqlschema.Inspector and sqlschema.Migrator to be used with AutoMigrator. type AutoMigrator struct { db *bun.DB @@ -122,7 +152,7 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err tables := schema.NewTables(db.Dialect()) tables.Register(am.includeModels...) - am.modelInspector = sqlschema.NewSchemaInspector(tables) + am.modelInspector = sqlschema.NewBunModelInspector(tables) return am, nil } diff --git a/migrate/diff.go b/migrate/diff.go index 0fd57a234..237a05000 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -58,7 +58,7 @@ RenameCreate: // If wantTable does not exist in the database and was not renamed // then we need to create this table in the database. - additional := wantTable.(sqlschema.ModelTable) + additional := wantTable.(sqlschema.BunTable) d.changes.Add(&CreateTableOp{ FQN: wantTable.GetFQN(), Model: additional.Model, @@ -379,6 +379,11 @@ func (s *signature) Equals(other signature) bool { return true } +// refMap is a utility for tracking superficial changes in foreign keys, +// which do not require any modificiation in the database. +// Modern SQL dialects automatically updated foreign key constraints whenever +// a column or a table is renamed. Detector can use refMap to ignore any +// differences in foreign keys which were caused by renamed column/table. type refMap map[*sqlschema.ForeignKey]string func newRefMap(fks map[sqlschema.ForeignKey]string) refMap { @@ -389,6 +394,7 @@ func newRefMap(fks map[sqlschema.ForeignKey]string) refMap { return rm } +// RenameT updates table name in all foreign key definions which depend on it. func (rm refMap) RenameTable(table schema.FQN, newName string) { for fk := range rm { switch table { @@ -400,6 +406,7 @@ func (rm refMap) RenameTable(table schema.FQN, newName string) { } } +// RenameColumn updates column name in all foreign key definions which depend on it. func (rm refMap) RenameColumn(table schema.FQN, column, newName string) { for fk := range rm { if table == fk.From.FQN { @@ -411,6 +418,7 @@ func (rm refMap) RenameColumn(table schema.FQN, column, newName string) { } } +// Deref returns copies of ForeignKey values to a map. func (rm refMap) Deref() map[sqlschema.ForeignKey]string { out := make(map[sqlschema.ForeignKey]string) for fk, name := range rm { diff --git a/migrate/operations.go b/migrate/operations.go index e9bf6383b..41f5bd6ef 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -9,11 +9,28 @@ import ( // Operation encapsulates the request to change a database definition // and knowns which operation can revert it. +// +// It is useful to define "monolith" Operations whenever possible, +// even though they a dialect may require several distinct steps to apply them. +// For example, changing a primary key involves first dropping the old constraint +// before generating the new one. Yet, this is only an implementation detail and +// passing a higher-level ChangePrimaryKeyOp will give the dialect more information +// about the applied change. +// +// Some operations might be irreversible due to technical limitations. Returning +// a *comment from GetReverse() will add an explanatory note to the generate migation file. +// +// To declare dependency on another Operation, operations should implement +// { DependsOn(Operation) bool } interface, which Changeset will use to resolve dependencies. type Operation interface { GetReverse() Operation } -// CreateTableOp +// CreateTableOp creates a new table in the schema. +// +// It does not report dependency on any other migration and may be executed first. +// Make sure the dialect does not include FOREIGN KEY constraints in the CREATE TABLE +// statement, as those may potentially reference not-yet-existing columns/tables. type CreateTableOp struct { FQN schema.FQN Model interface{} @@ -25,6 +42,7 @@ func (op *CreateTableOp) GetReverse() Operation { return &DropTableOp{FQN: op.FQN} } +// DropTableOp drops a database table. This operation is not reversible. type DropTableOp struct { FQN schema.FQN } @@ -43,6 +61,7 @@ func (op *DropTableOp) GetReverse() Operation { return &c } +// RenameTableOp renames the table. Note, that changing the "schema" part of the table's FQN is not allowed. type RenameTableOp struct { FQN schema.FQN NewName string @@ -57,7 +76,8 @@ func (op *RenameTableOp) GetReverse() Operation { } } -// RenameColumnOp. +// RenameColumnOp renames a column in the table. If the changeset includes a rename operation +// for the column's table, it should be executed first. type RenameColumnOp struct { FQN schema.FQN OldName string @@ -79,6 +99,7 @@ func (op *RenameColumnOp) DependsOn(another Operation) bool { return ok && op.FQN.Schema == rename.FQN.Schema && op.FQN.Table == rename.NewName } +// AddColumnOp adds a new column to the table. type AddColumnOp struct { FQN schema.FQN Column string @@ -95,6 +116,12 @@ func (op *AddColumnOp) GetReverse() Operation { } } +// DropColumnOp drop a column from the table. +// +// While some dialects allow DROP CASCADE to drop dependent constraints, +// explicit handling on constraints is preferred for transparency and debugging. +// DropColumnOp depends on DropForeignKeyOp, DropPrimaryKeyOp, and ChangePrimaryKeyOp +// if any of the constraints is defined on this table. type DropColumnOp struct { FQN schema.FQN Column string @@ -123,6 +150,7 @@ func (op *DropColumnOp) DependsOn(another Operation) bool { return false } +// AddForeignKey adds a new FOREIGN KEY constraint. type AddForeignKeyOp struct { ForeignKey sqlschema.ForeignKey ConstraintName string @@ -152,6 +180,7 @@ func (op *AddForeignKeyOp) GetReverse() Operation { } } +// DropForeignKeyOp drops a FOREIGN KEY constraint. type DropForeignKeyOp struct { ForeignKey sqlschema.ForeignKey ConstraintName string @@ -170,6 +199,7 @@ func (op *DropForeignKeyOp) GetReverse() Operation { } } +// AddUniqueConstraintOp adds new UNIQUE constraint to the table. type AddUniqueConstraintOp struct { FQN schema.FQN Unique sqlschema.Unique @@ -199,6 +229,7 @@ func (op *AddUniqueConstraintOp) DependsOn(another Operation) bool { } +// DropUniqueConstraintOp drops a UNIQUE constraint. type DropUniqueConstraintOp struct { FQN schema.FQN Unique sqlschema.Unique @@ -220,7 +251,10 @@ func (op *DropUniqueConstraintOp) GetReverse() Operation { } } -// Change column type. +// ChangeColumnTypeOp set a new data type for the column. +// The two types should be such that the data can be auto-casted from one to another. +// E.g. reducing VARCHAR lenght is not possible in most dialects. +// AutoMigrator does not enforce or validate these rules. type ChangeColumnTypeOp struct { FQN schema.FQN Column string @@ -239,6 +273,7 @@ func (op *ChangeColumnTypeOp) GetReverse() Operation { } } +// DropPrimaryKeyOp drops the table's PRIMARY KEY. type DropPrimaryKeyOp struct { FQN schema.FQN PrimaryKey sqlschema.PrimaryKey @@ -253,6 +288,7 @@ func (op *DropPrimaryKeyOp) GetReverse() Operation { } } +// AddPrimaryKeyOp adds a new PRIMARY KEY to the table. type AddPrimaryKeyOp struct { FQN schema.FQN PrimaryKey sqlschema.PrimaryKey @@ -275,6 +311,7 @@ func (op *AddPrimaryKeyOp) DependsOn(another Operation) bool { return false } +// ChangePrimaryKeyOp changes the PRIMARY KEY of the table. type ChangePrimaryKeyOp struct { FQN schema.FQN Old sqlschema.PrimaryKey diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 616aea922..bb05c5b75 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -20,10 +20,12 @@ type InspectorDialect interface { EquivalentType(Column, Column) bool } +// Inspector reads schema state. type Inspector interface { Inspect(ctx context.Context) (Schema, error) } +// Schema is an abstract collection of database objects. type Schema interface { GetTables() []Table GetForeignKeys() map[ForeignKey]string @@ -48,10 +50,12 @@ type Column interface { GetIsIdentity() bool } +// inspector is opaque pointer to a databse inspector. type inspector struct { Inspector } +// NewInspector creates a new database inspector, if the dialect supports it. func NewInspector(db *bun.DB, excludeTables ...string) (Inspector, error) { dialect, ok := (db.Dialect()).(InspectorDialect) if !ok { @@ -62,24 +66,25 @@ func NewInspector(db *bun.DB, excludeTables ...string) (Inspector, error) { }, nil } -// SchemaInspector creates the current project state from the passed bun.Models. -// Do not recycle SchemaInspector for different sets of models, as older models will not be de-registerred before the next run. -type SchemaInspector struct { +// BunModelInspector creates the current project state from the passed bun.Models. +// Do not recycle BunModelInspector for different sets of models, as older models will not be de-registerred before the next run. +type BunModelInspector struct { tables *schema.Tables } -var _ Inspector = (*SchemaInspector)(nil) +var _ Inspector = (*BunModelInspector)(nil) -func NewSchemaInspector(tables *schema.Tables) *SchemaInspector { - return &SchemaInspector{ +func NewBunModelInspector(tables *schema.Tables) *BunModelInspector { + return &BunModelInspector{ tables: tables, } } +// BunModelSchema is the schema state derived from bun table models. type BunModelSchema struct { DatabaseSchema - ModelTables map[schema.FQN]ModelTable + ModelTables map[schema.FQN]BunTable } func (ms BunModelSchema) GetTables() []Table { @@ -90,22 +95,22 @@ func (ms BunModelSchema) GetTables() []Table { return tables } -// ModelTable provides additional table metadata that is only accessible from scanning Go models. -type ModelTable struct { +// BunTable provides additional table metadata that is only accessible from scanning bun models. +type BunTable struct { TableDefinition // Model stores the zero interface to the underlying Go struct. Model interface{} } -func (si *SchemaInspector) Inspect(ctx context.Context) (Schema, error) { +func (bmi *BunModelInspector) Inspect(ctx context.Context) (Schema, error) { state := BunModelSchema{ DatabaseSchema: DatabaseSchema{ ForeignKeys: make(map[ForeignKey]string), }, - ModelTables: make(map[schema.FQN]ModelTable), + ModelTables: make(map[schema.FQN]BunTable), } - for _, t := range si.tables.All() { + for _, t := range bmi.tables.All() { columns := make(map[string]ColumnDefinition) for _, f := range t.Fields { @@ -153,7 +158,7 @@ func (si *SchemaInspector) Inspect(ctx context.Context) (Schema, error) { } fqn := schema.FQN{Schema: t.Schema, Table: t.Name} - state.ModelTables[fqn] = ModelTable{ + state.ModelTables[fqn] = BunTable{ TableDefinition: TableDefinition{ Schema: t.Schema, Name: t.Name, diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/schema.go index c833568ea..f9c22c5d0 100644 --- a/migrate/sqlschema/schema.go +++ b/migrate/sqlschema/schema.go @@ -8,6 +8,9 @@ import ( "github.com/uptrace/bun/schema" ) +// DatabaseSchema provides a default implementation of the Schema interface. +// Dialects which support schema inspection may return it directly from Inspect() +// or embed it in their custom schema structs. type DatabaseSchema struct { TableDefinitions map[schema.FQN]TableDefinition ForeignKeys map[ForeignKey]string From 2e5b2fa3c203c09ae6af6ef7f5ebf6f4a6207b15 Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sat, 9 Nov 2024 12:34:19 +0200 Subject: [PATCH 43/55] rename ColumnDefinition to BaseColumn and tweak interface --- dialect/pgdialect/alter_table.go | 14 ++++-------- dialect/pgdialect/inspector.go | 12 +++++----- dialect/pgdialect/sqltype_test.go | 20 ++++++++-------- internal/dbtest/inspect_test.go | 18 +++++++-------- internal/dbtest/migrate_test.go | 16 ++++++------- internal/dbtest/query_test.go | 38 +++++++++++++++---------------- migrate/diff.go | 18 +++++++-------- migrate/sqlschema/inspector.go | 6 +++-- migrate/sqlschema/schema.go | 29 +++++++++++------------ 9 files changed, 84 insertions(+), 87 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index a73be5c13..0d9f45f28 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -91,11 +91,10 @@ func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddC b = fmter.AppendName(b, add.Column) b = append(b, " "...) - colDef, ok := add.ColDef.(sqlschema.ColumnDefinition) - if !ok { - return nil, fmt.Errorf("column %q does not implement sqlschema.ColumnDefinition, got %T", add.ColDef.GetName(), add.ColDef) + b, err = add.ColDef.AppendQuery(fmter, b) + if err != nil { + return nil, err } - b, _ = colDef.AppendQuery(fmter, b) if add.ColDef.GetDefaultValue() != "" { b = append(b, " DEFAULT "...) @@ -201,14 +200,9 @@ func (m *migrator) changeColumnType(fmter schema.Formatter, b []byte, colDef *mi inspector := m.db.Dialect().(sqlschema.InspectorDialect) if !inspector.EquivalentType(want, got) { - colDef, ok := want.(sqlschema.ColumnDefinition) - if !ok { - return nil, fmt.Errorf("column %q does not implement sqlschema.ColumnDefinition, got %T", want.GetName(), want) - } - appendAlterColumn() b = append(b, " SET DATA TYPE "...) - if b, err = colDef.AppendQuery(fmter, b); err != nil { + if b, err = want.AppendQuery(fmter, b); err != nil { return b, err } } diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 07665df21..5fe847ee9 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -12,7 +12,7 @@ import ( type ( Schema = sqlschema.DatabaseSchema Table = sqlschema.TableDefinition - Column = sqlschema.ColumnDefinition + Column = sqlschema.BaseColumn ) func (d *Dialect) Inspector(db *bun.DB, excludeTables ...string) sqlschema.Inspector { @@ -59,7 +59,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { return dbSchema, err } - colDefs := make(map[string]Column) + colDefs := make(map[string]*Column) uniqueGroups := make(map[string][]string) for _, c := range columns { @@ -70,7 +70,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { def = strings.ToLower(def) } - colDefs[c.Name] = Column{ + colDefs[c.Name] = &Column{ Name: c.Name, SQLType: c.DataType, VarcharLen: c.VarcharLen, @@ -173,7 +173,7 @@ SELECT FROM information_schema.tables "t" LEFT JOIN ( SELECT i.indrelid, "idx".relname AS "name", ARRAY_AGG("a".attname) AS "columns" - FROM pg_index i + FROM pg_index i JOIN pg_attribute "a" ON "a".attrelid = i.indrelid AND "a".attnum = ANY("i".indkey) @@ -235,8 +235,8 @@ FROM ( "c".attidentity AS identity_type, ARRAY_AGG(con.conname) FILTER (WHERE con.contype = 'u') AS "unique_groups", ARRAY_AGG(con.contype) AS "constraint_type" - FROM ( - SELECT + FROM ( + SELECT conname, contype, connamespace, diff --git a/dialect/pgdialect/sqltype_test.go b/dialect/pgdialect/sqltype_test.go index 6d634a19e..4f707a7e9 100644 --- a/dialect/pgdialect/sqltype_test.go +++ b/dialect/pgdialect/sqltype_test.go @@ -42,8 +42,8 @@ func TestInspectorDialect_EquivalentType(t *testing.T) { } t.Run(tt.typ1+eq+tt.typ2, func(t *testing.T) { got := d.EquivalentType( - sqlschema.ColumnDefinition{SQLType: tt.typ1}, - sqlschema.ColumnDefinition{SQLType: tt.typ2}, + &sqlschema.BaseColumn{SQLType: tt.typ1}, + &sqlschema.BaseColumn{SQLType: tt.typ2}, ) require.Equal(t, tt.want, got) }) @@ -54,30 +54,30 @@ func TestInspectorDialect_EquivalentType(t *testing.T) { t.Run("custom varchar length", func(t *testing.T) { for _, tt := range []struct { name string - col1, col2 sqlschema.ColumnDefinition + col1, col2 sqlschema.BaseColumn want bool }{ { name: "varchars of different length are not equivalent", - col1: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 10}, - col2: sqlschema.ColumnDefinition{SQLType: "varchar"}, + col1: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 10}, + col2: sqlschema.BaseColumn{SQLType: "varchar"}, want: false, }, { name: "varchar with no explicit length is equivalent to varchar of default length", - col1: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: d.DefaultVarcharLen()}, - col2: sqlschema.ColumnDefinition{SQLType: "varchar"}, + col1: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: d.DefaultVarcharLen()}, + col2: sqlschema.BaseColumn{SQLType: "varchar"}, want: true, }, { name: "characters with equal custom length", - col1: sqlschema.ColumnDefinition{SQLType: "character varying", VarcharLen: 200}, - col2: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 200}, + col1: sqlschema.BaseColumn{SQLType: "character varying", VarcharLen: 200}, + col2: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 200}, want: true, }, } { t.Run(tt.name, func(t *testing.T) { - got := d.EquivalentType(tt.col1, tt.col2) + got := d.EquivalentType(&tt.col1, &tt.col2) require.Equal(t, tt.want, got) }) } diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index f63f0b037..d74e1b012 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -97,7 +97,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Schema: "admin", Table: "offices"}: { Schema: "admin", Name: "offices", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "office_name": { SQLType: sqltype.VarChar, }, @@ -115,7 +115,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Schema: defaultSchema, Table: "articles"}: { Schema: defaultSchema, Name: "articles", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "isbn": { SQLType: "bigint", IsNullable: false, @@ -174,7 +174,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Schema: defaultSchema, Table: "authors"}: { Schema: defaultSchema, Name: "authors", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "author_id": { SQLType: "bigint", IsIdentity: true, @@ -198,7 +198,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Schema: defaultSchema, Table: "publisher_to_journalists"}: { Schema: defaultSchema, Name: "publisher_to_journalists", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "publisher_id": { SQLType: sqltype.VarChar, }, @@ -211,7 +211,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Schema: defaultSchema, Table: "publishers"}: { Schema: defaultSchema, Name: "publishers", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "publisher_id": { SQLType: sqltype.VarChar, DefaultValue: "gen_random_uuid()", @@ -315,7 +315,7 @@ func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema } // cmpColumns compares that column definitions on the tables are -func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.ColumnDefinition) { +func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.BaseColumn) { tb.Helper() var errs []string @@ -399,7 +399,7 @@ func tableNames(tables map[schema.FQN]sqlschema.TableDefinition) (names []string return } -func formatType(c sqlschema.ColumnDefinition) string { +func formatType(c sqlschema.BaseColumn) string { if c.VarcharLen == 0 { return c.SQLType } @@ -422,7 +422,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := map[string]sqlschema.ColumnDefinition{ + want := map[string]sqlschema.BaseColumn{ "id": { SQLType: sqltype.VarChar, DefaultValue: "random()", @@ -455,7 +455,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := map[string]sqlschema.ColumnDefinition{ + want := map[string]sqlschema.BaseColumn{ "id": { SQLType: "text", }, diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 52dd2c276..9cf87a72b 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -572,7 +572,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "change_me_own_type"}: { Schema: db.Dialect().DefaultSchema(), Name: "change_me_own_type", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "bigger_int": { SQLType: "bigint", IsIdentity: true, @@ -639,7 +639,7 @@ func testIdentity(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "bourne_identity"}: { Schema: db.Dialect().DefaultSchema(), Name: "bourne_identity", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "a": { SQLType: sqltype.BigInt, IsIdentity: false, // <- drop IDENTITY @@ -682,7 +682,7 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "column_madness"}: { Schema: db.Dialect().DefaultSchema(), Name: "column_madness", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "do_not_touch": { SQLType: sqltype.VarChar, IsNullable: true, @@ -735,7 +735,7 @@ func testUnique(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "uniqlo_stores"}: { Schema: db.Dialect().DefaultSchema(), Name: "uniqlo_stores", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "first_name": { SQLType: sqltype.VarChar, IsNullable: true, @@ -813,7 +813,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "after"}: { Schema: db.Dialect().DefaultSchema(), Name: "after", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "first_name": { SQLType: sqltype.VarChar, IsNullable: true, @@ -908,7 +908,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "drop_your_pks"}: { Schema: db.Dialect().DefaultSchema(), Name: "drop_your_pks", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "first_name": { SQLType: sqltype.VarChar, IsNullable: false, @@ -922,7 +922,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "add_new_pk"}: { Schema: db.Dialect().DefaultSchema(), Name: "add_new_pk", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "new_id": { SQLType: sqltype.BigInt, IsNullable: false, @@ -942,7 +942,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { {Schema: db.Dialect().DefaultSchema(), Table: "change_pk"}: { Schema: db.Dialect().DefaultSchema(), Name: "change_pk", - ColumnDefinitions: map[string]sqlschema.ColumnDefinition{ + ColumnDefinitions: map[string]sqlschema.BaseColumn{ "first_name": { SQLType: sqltype.VarChar, IsNullable: false, diff --git a/internal/dbtest/query_test.go b/internal/dbtest/query_test.go index 541c0d7c4..1d074f7ef 100644 --- a/internal/dbtest/query_test.go +++ b/internal/dbtest/query_test.go @@ -1618,7 +1618,7 @@ func TestAlterTable(t *testing.T) { {name: "add column with default value", operation: &migrate.AddColumnOp{ FQN: fqn, Column: "language", - ColDef: sqlschema.ColumnDefinition{ + ColDef: sqlschema.BaseColumn{ SQLType: "varchar", VarcharLen: 20, IsNullable: false, @@ -1628,7 +1628,7 @@ func TestAlterTable(t *testing.T) { {name: "add column with identity", operation: &migrate.AddColumnOp{ FQN: fqn, Column: "n", - ColDef: sqlschema.ColumnDefinition{ + ColDef: sqlschema.BaseColumn{ SQLType: sqltype.BigInt, IsNullable: false, IsIdentity: true, @@ -1637,7 +1637,7 @@ func TestAlterTable(t *testing.T) { {name: "drop column", operation: &migrate.DropColumnOp{ FQN: fqn, Column: "director", - ColDef: sqlschema.ColumnDefinition{ + ColDef: sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, @@ -1659,50 +1659,50 @@ func TestAlterTable(t *testing.T) { {name: "change column type int to bigint", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.ColumnDefinition{SQLType: sqltype.Integer}, - To: sqlschema.ColumnDefinition{SQLType: sqltype.BigInt}, + From: sqlschema.BaseColumn{SQLType: sqltype.Integer}, + To: sqlschema.BaseColumn{SQLType: sqltype.BigInt}, }}, {name: "add default", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.ColumnDefinition{DefaultValue: ""}, - To: sqlschema.ColumnDefinition{DefaultValue: "100"}, + From: sqlschema.BaseColumn{DefaultValue: ""}, + To: sqlschema.BaseColumn{DefaultValue: "100"}, }}, {name: "drop default", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.ColumnDefinition{DefaultValue: "100"}, - To: sqlschema.ColumnDefinition{DefaultValue: ""}, + From: sqlschema.BaseColumn{DefaultValue: "100"}, + To: sqlschema.BaseColumn{DefaultValue: ""}, }}, {name: "make nullable", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "director", - From: sqlschema.ColumnDefinition{IsNullable: false}, - To: sqlschema.ColumnDefinition{IsNullable: true}, + From: sqlschema.BaseColumn{IsNullable: false}, + To: sqlschema.BaseColumn{IsNullable: true}, }}, {name: "add notnull", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.ColumnDefinition{IsNullable: true}, - To: sqlschema.ColumnDefinition{IsNullable: false}, + From: sqlschema.BaseColumn{IsNullable: true}, + To: sqlschema.BaseColumn{IsNullable: false}, }}, {name: "increase varchar length", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "language", - From: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 20}, - To: sqlschema.ColumnDefinition{SQLType: "varchar", VarcharLen: 255}, + From: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 20}, + To: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 255}, }}, {name: "add identity", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "id", - From: sqlschema.ColumnDefinition{IsIdentity: false}, - To: sqlschema.ColumnDefinition{IsIdentity: true}, + From: sqlschema.BaseColumn{IsIdentity: false}, + To: sqlschema.BaseColumn{IsIdentity: true}, }}, {name: "drop identity", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "id", - From: sqlschema.ColumnDefinition{IsIdentity: true}, - To: sqlschema.ColumnDefinition{IsIdentity: false}, + From: sqlschema.BaseColumn{IsIdentity: true}, + To: sqlschema.BaseColumn{IsIdentity: false}, }}, {name: "add primary key", operation: &migrate.AddPrimaryKeyOp{ FQN: fqn, diff --git a/migrate/diff.go b/migrate/diff.go index 237a05000..4d6a177c6 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -281,7 +281,7 @@ func (d detector) equalColumns(col1, col2 sqlschema.Column) bool { func (d detector) makeTargetColDef(current, target sqlschema.Column) sqlschema.Column { // Avoid unneccessary type-change migrations if the types are equivalent. if d.eqType(current, target) { - target = sqlschema.ColumnDefinition{ + target = &sqlschema.BaseColumn{ Name: target.GetName(), DefaultValue: target.GetDefaultValue(), IsNullable: target.GetIsNullable(), @@ -326,14 +326,14 @@ type signature struct { // underlying stores the number of occurences for each unique column type. // It helps to account for the fact that a table might have multiple columns that have the same type. - underlying map[sqlschema.ColumnDefinition]int + underlying map[sqlschema.BaseColumn]int eq TypeEquivalenceFunc } func newSignature(t sqlschema.Table, eq TypeEquivalenceFunc) signature { s := signature{ - underlying: make(map[sqlschema.ColumnDefinition]int), + underlying: make(map[sqlschema.BaseColumn]int), eq: eq, } s.scan(t) @@ -343,12 +343,12 @@ func newSignature(t sqlschema.Table, eq TypeEquivalenceFunc) signature { // scan iterates over table's field and counts occurrences of each unique column definition. func (s *signature) scan(t sqlschema.Table) { for _, icol := range t.GetColumns() { - scanCol := icol.(sqlschema.ColumnDefinition) + scanCol := icol.(*sqlschema.BaseColumn) // This is slightly more expensive than if the columns could be compared directly // and we always did s.underlying[col]++, but we get type-equivalence in return. - col, count := s.getCount(scanCol) + col, count := s.getCount(*scanCol) if count == 0 { - s.underlying[scanCol] = 1 + s.underlying[*scanCol] = 1 } else { s.underlying[col]++ } @@ -357,9 +357,9 @@ func (s *signature) scan(t sqlschema.Table) { // getCount uses TypeEquivalenceFunc to find a column with the same (equivalent) SQL type // and returns its count. Count 0 means there are no columns with of this type. -func (s *signature) getCount(keyCol sqlschema.ColumnDefinition) (key sqlschema.ColumnDefinition, count int) { +func (s *signature) getCount(keyCol sqlschema.BaseColumn) (key sqlschema.BaseColumn, count int) { for col, cnt := range s.underlying { - if s.eq(col, keyCol) { + if s.eq(&col, &keyCol) { return col, cnt } } @@ -418,7 +418,7 @@ func (rm refMap) RenameColumn(table schema.FQN, column, newName string) { } } -// Deref returns copies of ForeignKey values to a map. +// Deref returns copies of ForeignKey values to a map. func (rm refMap) Deref() map[sqlschema.ForeignKey]string { out := make(map[sqlschema.ForeignKey]string) for fk, name := range rm { diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index bb05c5b75..6cb9d3215 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -28,6 +28,7 @@ type Inspector interface { // Schema is an abstract collection of database objects. type Schema interface { GetTables() []Table + // TODO: this probably should be a list so we have keys order and stable query generation GetForeignKeys() map[ForeignKey]string } @@ -48,6 +49,7 @@ type Column interface { GetIsNullable() bool GetIsAutoIncrement() bool GetIsIdentity() bool + AppendQuery(schema.Formatter, []byte) ([]byte, error) } // inspector is opaque pointer to a databse inspector. @@ -111,14 +113,14 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Schema, error) { ModelTables: make(map[schema.FQN]BunTable), } for _, t := range bmi.tables.All() { - columns := make(map[string]ColumnDefinition) + columns := make(map[string]*BaseColumn) for _, f := range t.Fields { sqlType, length, err := parseLen(f.CreateTableSQLType) if err != nil { return state, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err) } - columns[f.Name] = ColumnDefinition{ + columns[f.Name] = &BaseColumn{ Name: f.Name, SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq() VarcharLen: length, diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/schema.go index f9c22c5d0..166c0ecf8 100644 --- a/migrate/sqlschema/schema.go +++ b/migrate/sqlschema/schema.go @@ -23,7 +23,7 @@ type TableDefinition struct { Name string // ColumnDefinitions map each column name to the column definition. - ColumnDefinitions map[string]ColumnDefinition + ColumnDefinitions map[string]*BaseColumn // PrimaryKey holds the primary key definition. // A nil value means that no primary key is defined for the table. @@ -35,8 +35,8 @@ type TableDefinition struct { var _ Table = (*TableDefinition)(nil) -// ColumnDefinition stores attributes of a database column. -type ColumnDefinition struct { +// BaseColumn stores attributes of a database column. +type BaseColumn struct { Name string SQLType string VarcharLen int @@ -47,38 +47,38 @@ type ColumnDefinition struct { // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. } -var _ Column = (*ColumnDefinition)(nil) +var _ Column = (*BaseColumn)(nil) -func (cd ColumnDefinition) GetName() string { +func (cd BaseColumn) GetName() string { return cd.Name } -func (cd ColumnDefinition) GetSQLType() string { +func (cd BaseColumn) GetSQLType() string { return cd.SQLType } -func (cd ColumnDefinition) GetVarcharLen() int { +func (cd BaseColumn) GetVarcharLen() int { return cd.VarcharLen } -func (cd ColumnDefinition) GetDefaultValue() string { +func (cd BaseColumn) GetDefaultValue() string { return cd.DefaultValue } -func (cd ColumnDefinition) GetIsNullable() bool { +func (cd BaseColumn) GetIsNullable() bool { return cd.IsNullable } -func (cd ColumnDefinition) GetIsAutoIncrement() bool { +func (cd BaseColumn) GetIsAutoIncrement() bool { return cd.IsAutoIncrement } -func (cd ColumnDefinition) GetIsIdentity() bool { +func (cd BaseColumn) GetIsIdentity() bool { return cd.IsIdentity } // AppendQuery appends full SQL data type. -func (c *ColumnDefinition) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { +func (c *BaseColumn) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { b = append(b, c.SQLType...) if c.VarcharLen == 0 { return b, nil @@ -209,8 +209,9 @@ func (td TableDefinition) GetName() string { } func (td TableDefinition) GetColumns() []Column { var columns []Column - for i := range td.ColumnDefinitions { - columns = append(columns, td.ColumnDefinitions[i]) + // FIXME: columns will be returned in a random order + for colName := range td.ColumnDefinitions { + columns = append(columns, td.ColumnDefinitions[colName]) } return columns } From acd256068b2ae1d02605469016765eae4f14b00b Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sat, 9 Nov 2024 12:39:45 +0200 Subject: [PATCH 44/55] rename TableDefinition to BaseTable --- dialect/pgdialect/inspector.go | 8 +-- internal/dbtest/inspect_test.go | 16 +++--- internal/dbtest/migrate_test.go | 34 ++++++------ migrate/sqlschema/column.go | 72 ++++++++++++++++++++++++ migrate/sqlschema/inspector.go | 24 +------- migrate/sqlschema/schema.go | 98 ++++----------------------------- migrate/sqlschema/table.go | 35 ++++++++++++ 7 files changed, 148 insertions(+), 139 deletions(-) create mode 100644 migrate/sqlschema/column.go create mode 100644 migrate/sqlschema/table.go diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 5fe847ee9..b955d1b67 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -11,7 +11,7 @@ import ( type ( Schema = sqlschema.DatabaseSchema - Table = sqlschema.TableDefinition + Table = sqlschema.BaseTable Column = sqlschema.BaseColumn ) @@ -32,8 +32,8 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { dbSchema := Schema{ - TableDefinitions: make(map[schema.FQN]Table), - ForeignKeys: make(map[sqlschema.ForeignKey]string), + BaseTables: make(map[schema.FQN]Table), + ForeignKeys: make(map[sqlschema.ForeignKey]string), } exclude := in.excludeTables @@ -102,7 +102,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { } fqn := schema.FQN{Schema: table.Schema, Table: table.Name} - dbSchema.TableDefinitions[fqn] = Table{ + dbSchema.BaseTables[fqn] = Table{ Schema: table.Schema, Name: table.Name, ColumnDefinitions: colDefs, diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index d74e1b012..f9be48d82 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -93,7 +93,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { defaultSchema := db.Dialect().DefaultSchema() // Tables come sorted alphabetically by schema and table. - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: "admin", Table: "offices"}: { Schema: "admin", Name: "offices", @@ -260,7 +260,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { // State.FKs store their database names, which differ from dialect to dialect. // Because of that we compare FKs and Tables separately. - gotTables := got.(sqlschema.DatabaseSchema).TableDefinitions + gotTables := got.(sqlschema.DatabaseSchema).BaseTables cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, gotTables) var fks []sqlschema.ForeignKey @@ -293,7 +293,7 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str // cmpTables compares table schemas using dialect-specific equivalence checks for column types // and reports the differences as t.Error(). -func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema.FQN]sqlschema.TableDefinition) { +func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema.FQN]sqlschema.BaseTable) { tb.Helper() require.ElementsMatch(tb, tableNames(want), tableNames(got), "different set of tables") @@ -301,7 +301,7 @@ func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema // Now we are guaranteed to have the same tables. for _, wantTable := range want { // TODO(dyma): this will be simplified by map[string]Table - var gt sqlschema.TableDefinition + var gt sqlschema.BaseTable for i := range got { if got[i].Name == wantTable.Name { gt = got[i] @@ -372,7 +372,7 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w } // cmpConstraints compares constraints defined on the table with the expected ones. -func cmpConstraints(tb testing.TB, want, got sqlschema.TableDefinition) { +func cmpConstraints(tb testing.TB, want, got sqlschema.BaseTable) { tb.Helper() if want.PrimaryKey != nil { @@ -392,7 +392,7 @@ func cmpConstraints(tb testing.TB, want, got sqlschema.TableDefinition) { require.ElementsMatch(tb, stripNames(want.UniqueConstraints), stripNames(got.UniqueConstraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) } -func tableNames(tables map[schema.FQN]sqlschema.TableDefinition) (names []string) { +func tableNames(tables map[schema.FQN]sqlschema.BaseTable) (names []string) { for fqn := range tables { names = append(names, fqn.Table) } @@ -490,7 +490,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := sqlschema.TableDefinition{ + want := sqlschema.BaseTable{ Name: "models", UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("id")}, @@ -504,7 +504,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { gotTables := got.(sqlschema.BunModelSchema).ModelTables require.Len(t, gotTables, 1) for _, table := range gotTables { - cmpConstraints(t, want, table.TableDefinition) + cmpConstraints(t, want, table.BaseTable) return } }) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 9cf87a72b..592178d5e 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -369,7 +369,7 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - tables := state.TableDefinitions + tables := state.BaseTables require.Len(t, tables, 1) require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "changed"}) } @@ -398,7 +398,7 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - tables := state.TableDefinitions + tables := state.BaseTables require.Len(t, tables, 1) require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "createme"}) } @@ -524,10 +524,10 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - require.Len(t, state.TableDefinitions, 2) + require.Len(t, state.BaseTables, 2) - var renamed, model2 sqlschema.TableDefinition - for _, tbl := range state.TableDefinitions { + var renamed, model2 sqlschema.BaseTable + for _, tbl := range state.BaseTables { switch tbl.Name { case "renamed": renamed = tbl @@ -568,7 +568,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // ManyValues []string `bun:",array"` // did not change } - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: db.Dialect().DefaultSchema(), Table: "change_me_own_type"}: { Schema: db.Dialect().DefaultSchema(), Name: "change_me_own_type", @@ -619,7 +619,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) } func testIdentity(t *testing.T, db *bun.DB) { @@ -635,7 +635,7 @@ func testIdentity(t *testing.T, db *bun.DB) { B int64 `bun:",notnull,identity"` } - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: db.Dialect().DefaultSchema(), Table: "bourne_identity"}: { Schema: db.Dialect().DefaultSchema(), Name: "bourne_identity", @@ -662,7 +662,7 @@ func testIdentity(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) } func testAddDropColumn(t *testing.T, db *bun.DB) { @@ -678,7 +678,7 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { AddMe bool `bun:"addme"` } - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: db.Dialect().DefaultSchema(), Table: "column_madness"}: { Schema: db.Dialect().DefaultSchema(), Name: "column_madness", @@ -705,7 +705,7 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) } func testUnique(t *testing.T, db *bun.DB) { @@ -731,7 +731,7 @@ func testUnique(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed"` // shrink "pet" unique group } - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: db.Dialect().DefaultSchema(), Table: "uniqlo_stores"}: { Schema: db.Dialect().DefaultSchema(), Name: "uniqlo_stores", @@ -784,7 +784,7 @@ func testUnique(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) } func testUniqueRenamedTable(t *testing.T, db *bun.DB) { @@ -809,7 +809,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed,unique"` } - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: db.Dialect().DefaultSchema(), Table: "after"}: { Schema: db.Dialect().DefaultSchema(), Name: "after", @@ -854,7 +854,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) } func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { @@ -904,7 +904,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { LastName string `bun:"last_name,pk"` } - wantTables := map[schema.FQN]sqlschema.TableDefinition{ + wantTables := map[schema.FQN]sqlschema.BaseTable{ {Schema: db.Dialect().DefaultSchema(), Table: "drop_your_pks"}: { Schema: db.Dialect().DefaultSchema(), Name: "drop_your_pks", @@ -974,5 +974,5 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.TableDefinitions) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) } diff --git a/migrate/sqlschema/column.go b/migrate/sqlschema/column.go new file mode 100644 index 000000000..5a8b70483 --- /dev/null +++ b/migrate/sqlschema/column.go @@ -0,0 +1,72 @@ +package sqlschema + +import ( + "fmt" + + "github.com/uptrace/bun/schema" +) + +type Column interface { + GetName() string + GetSQLType() string + GetVarcharLen() int + GetDefaultValue() string + GetIsNullable() bool + GetIsAutoIncrement() bool + GetIsIdentity() bool + AppendQuery(schema.Formatter, []byte) ([]byte, error) +} + +var _ Column = (*BaseColumn)(nil) + +// BaseColumn stores attributes of a database column. +type BaseColumn struct { + Name string + SQLType string + VarcharLen int + DefaultValue string + IsNullable bool + IsAutoIncrement bool + IsIdentity bool + // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. +} + +func (cd BaseColumn) GetName() string { + return cd.Name +} + +func (cd BaseColumn) GetSQLType() string { + return cd.SQLType +} + +func (cd BaseColumn) GetVarcharLen() int { + return cd.VarcharLen +} + +func (cd BaseColumn) GetDefaultValue() string { + return cd.DefaultValue +} + +func (cd BaseColumn) GetIsNullable() bool { + return cd.IsNullable +} + +func (cd BaseColumn) GetIsAutoIncrement() bool { + return cd.IsAutoIncrement +} + +func (cd BaseColumn) GetIsIdentity() bool { + return cd.IsIdentity +} + +// AppendQuery appends full SQL data type. +func (c *BaseColumn) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { + b = append(b, c.SQLType...) + if c.VarcharLen == 0 { + return b, nil + } + b = append(b, "("...) + b = append(b, fmt.Sprint(c.VarcharLen)...) + b = append(b, ")"...) + return b, nil +} diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 6cb9d3215..d8b882182 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -32,26 +32,6 @@ type Schema interface { GetForeignKeys() map[ForeignKey]string } -type Table interface { - GetSchema() string - GetName() string - GetColumns() []Column - GetPrimaryKey() *PrimaryKey - GetUniqueConstraints() []Unique - GetFQN() schema.FQN -} - -type Column interface { - GetName() string - GetSQLType() string - GetVarcharLen() int - GetDefaultValue() string - GetIsNullable() bool - GetIsAutoIncrement() bool - GetIsIdentity() bool - AppendQuery(schema.Formatter, []byte) ([]byte, error) -} - // inspector is opaque pointer to a databse inspector. type inspector struct { Inspector @@ -99,7 +79,7 @@ func (ms BunModelSchema) GetTables() []Table { // BunTable provides additional table metadata that is only accessible from scanning bun models. type BunTable struct { - TableDefinition + BaseTable // Model stores the zero interface to the underlying Go struct. Model interface{} @@ -161,7 +141,7 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Schema, error) { fqn := schema.FQN{Schema: t.Schema, Table: t.Name} state.ModelTables[fqn] = BunTable{ - TableDefinition: TableDefinition{ + BaseTable: BaseTable{ Schema: t.Schema, Name: t.Name, ColumnDefinitions: columns, diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/schema.go index 166c0ecf8..f823d4cd8 100644 --- a/migrate/sqlschema/schema.go +++ b/migrate/sqlschema/schema.go @@ -1,7 +1,6 @@ package sqlschema import ( - "fmt" "slices" "strings" @@ -12,83 +11,12 @@ import ( // Dialects which support schema inspection may return it directly from Inspect() // or embed it in their custom schema structs. type DatabaseSchema struct { - TableDefinitions map[schema.FQN]TableDefinition - ForeignKeys map[ForeignKey]string + BaseTables map[schema.FQN]BaseTable + ForeignKeys map[ForeignKey]string } var _ Schema = (*DatabaseSchema)(nil) -type TableDefinition struct { - Schema string - Name string - - // ColumnDefinitions map each column name to the column definition. - ColumnDefinitions map[string]*BaseColumn - - // PrimaryKey holds the primary key definition. - // A nil value means that no primary key is defined for the table. - PrimaryKey *PrimaryKey - - // UniqueConstraints defined on the table. - UniqueConstraints []Unique -} - -var _ Table = (*TableDefinition)(nil) - -// BaseColumn stores attributes of a database column. -type BaseColumn struct { - Name string - SQLType string - VarcharLen int - DefaultValue string - IsNullable bool - IsAutoIncrement bool - IsIdentity bool - // TODO: add Precision and Cardinality for timestamps/bit-strings/floats and arrays respectively. -} - -var _ Column = (*BaseColumn)(nil) - -func (cd BaseColumn) GetName() string { - return cd.Name -} - -func (cd BaseColumn) GetSQLType() string { - return cd.SQLType -} - -func (cd BaseColumn) GetVarcharLen() int { - return cd.VarcharLen -} - -func (cd BaseColumn) GetDefaultValue() string { - return cd.DefaultValue -} - -func (cd BaseColumn) GetIsNullable() bool { - return cd.IsNullable -} - -func (cd BaseColumn) GetIsAutoIncrement() bool { - return cd.IsAutoIncrement -} - -func (cd BaseColumn) GetIsIdentity() bool { - return cd.IsIdentity -} - -// AppendQuery appends full SQL data type. -func (c *BaseColumn) AppendQuery(fmter schema.Formatter, b []byte) (_ []byte, err error) { - b = append(b, c.SQLType...) - if c.VarcharLen == 0 { - return b, nil - } - b = append(b, "("...) - b = append(b, fmt.Sprint(c.VarcharLen)...) - b = append(b, ")"...) - return b, nil -} - type ForeignKey struct { From ColumnReference To ColumnReference @@ -178,12 +106,6 @@ func (u Unique) Equals(other Unique) bool { return u.Columns == other.Columns } -// PrimaryKey represents a primary key constraint defined on 1 or more columns. -type PrimaryKey struct { - Name string - Columns Columns -} - type ColumnReference struct { FQN schema.FQN Column Columns @@ -191,8 +113,8 @@ type ColumnReference struct { func (ds DatabaseSchema) GetTables() []Table { var tables []Table - for i := range ds.TableDefinitions { - tables = append(tables, ds.TableDefinitions[i]) + for i := range ds.BaseTables { + tables = append(tables, ds.BaseTables[i]) } return tables } @@ -201,13 +123,13 @@ func (ds DatabaseSchema) GetForeignKeys() map[ForeignKey]string { return ds.ForeignKeys } -func (td TableDefinition) GetSchema() string { +func (td BaseTable) GetSchema() string { return td.Schema } -func (td TableDefinition) GetName() string { +func (td BaseTable) GetName() string { return td.Name } -func (td TableDefinition) GetColumns() []Column { +func (td BaseTable) GetColumns() []Column { var columns []Column // FIXME: columns will be returned in a random order for colName := range td.ColumnDefinitions { @@ -215,13 +137,13 @@ func (td TableDefinition) GetColumns() []Column { } return columns } -func (td TableDefinition) GetPrimaryKey() *PrimaryKey { +func (td BaseTable) GetPrimaryKey() *PrimaryKey { return td.PrimaryKey } -func (td TableDefinition) GetUniqueConstraints() []Unique { +func (td BaseTable) GetUniqueConstraints() []Unique { return td.UniqueConstraints } -func (t TableDefinition) GetFQN() schema.FQN { +func (t BaseTable) GetFQN() schema.FQN { return schema.FQN{Schema: t.Schema, Table: t.Name} } diff --git a/migrate/sqlschema/table.go b/migrate/sqlschema/table.go new file mode 100644 index 000000000..50e479efb --- /dev/null +++ b/migrate/sqlschema/table.go @@ -0,0 +1,35 @@ +package sqlschema + +import "github.com/uptrace/bun/schema" + +type Table interface { + GetSchema() string + GetName() string + GetColumns() []Column + GetPrimaryKey() *PrimaryKey + GetUniqueConstraints() []Unique + GetFQN() schema.FQN +} + +var _ Table = (*BaseTable)(nil) + +type BaseTable struct { + Schema string + Name string + + // ColumnDefinitions map each column name to the column definition. + ColumnDefinitions map[string]*BaseColumn + + // PrimaryKey holds the primary key definition. + // A nil value means that no primary key is defined for the table. + PrimaryKey *PrimaryKey + + // UniqueConstraints defined on the table. + UniqueConstraints []Unique +} + +// PrimaryKey represents a primary key constraint defined on 1 or more columns. +type PrimaryKey struct { + Name string + Columns Columns +} From 8af7563a3b348603f3809eae376f552799e05da6 Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sat, 9 Nov 2024 13:23:43 +0200 Subject: [PATCH 45/55] fix most tests --- dialect/pgdialect/inspector.go | 8 +- internal/dbtest/inspect_test.go | 120 +++++++++++++++-------------- internal/dbtest/migrate_test.go | 132 ++++++++++++++++---------------- internal/dbtest/query_test.go | 38 ++++----- migrate/diff.go | 2 +- migrate/sqlschema/column.go | 3 +- migrate/sqlschema/inspector.go | 10 +-- migrate/sqlschema/schema.go | 31 +------- migrate/sqlschema/table.go | 34 +++++++- 9 files changed, 197 insertions(+), 181 deletions(-) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index b955d1b67..6321f9ad2 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -32,7 +32,7 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { dbSchema := Schema{ - BaseTables: make(map[schema.FQN]Table), + Tables: make(map[schema.FQN]sqlschema.Table), ForeignKeys: make(map[sqlschema.ForeignKey]string), } @@ -59,7 +59,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { return dbSchema, err } - colDefs := make(map[string]*Column) + colDefs := make(map[string]sqlschema.Column) uniqueGroups := make(map[string][]string) for _, c := range columns { @@ -102,10 +102,10 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { } fqn := schema.FQN{Schema: table.Schema, Table: table.Name} - dbSchema.BaseTables[fqn] = Table{ + dbSchema.Tables[fqn] = &Table{ Schema: table.Schema, Name: table.Name, - ColumnDefinitions: colDefs, + Columns: colDefs, PrimaryKey: pk, UniqueConstraints: unique, } diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index f9be48d82..9eb77ab21 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -93,51 +93,51 @@ func TestDatabaseInspector_Inspect(t *testing.T) { defaultSchema := db.Dialect().DefaultSchema() // Tables come sorted alphabetically by schema and table. - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: "admin", Table: "offices"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: "admin", Table: "offices"}: &sqlschema.BaseTable{ Schema: "admin", Name: "offices", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "office_name": { + Columns: map[string]sqlschema.Column{ + "office_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, - "publisher_id": { + "publisher_id": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "publisher_name": { + "publisher_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, }, - {Schema: defaultSchema, Table: "articles"}: { + {Schema: defaultSchema, Table: "articles"}: &sqlschema.BaseTable{ Schema: defaultSchema, Name: "articles", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "isbn": { + Columns: map[string]sqlschema.Column{ + "isbn": &sqlschema.BaseColumn{ SQLType: "bigint", IsNullable: false, IsAutoIncrement: false, IsIdentity: true, DefaultValue: "", }, - "editor": { + "editor": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, IsAutoIncrement: false, IsIdentity: false, DefaultValue: "john doe", }, - "title": { + "title": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, IsAutoIncrement: false, IsIdentity: false, DefaultValue: "", }, - "locale": { + "locale": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, VarcharLen: 5, IsNullable: true, @@ -145,24 +145,24 @@ func TestDatabaseInspector_Inspect(t *testing.T) { IsIdentity: false, DefaultValue: "en-GB", }, - "page_count": { + "page_count": &sqlschema.BaseColumn{ SQLType: "smallint", IsNullable: false, IsAutoIncrement: false, IsIdentity: false, DefaultValue: "1", }, - "book_count": { + "book_count": &sqlschema.BaseColumn{ SQLType: "integer", IsNullable: false, IsAutoIncrement: true, IsIdentity: false, DefaultValue: "", }, - "publisher_id": { + "publisher_id": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, - "author_id": { + "author_id": &sqlschema.BaseColumn{ SQLType: "bigint", }, }, @@ -171,21 +171,21 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Columns: sqlschema.NewColumns("editor", "title")}, }, }, - {Schema: defaultSchema, Table: "authors"}: { + {Schema: defaultSchema, Table: "authors"}: &sqlschema.BaseTable{ Schema: defaultSchema, Name: "authors", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "author_id": { + Columns: map[string]sqlschema.Column{ + "author_id": &sqlschema.BaseColumn{ SQLType: "bigint", IsIdentity: true, }, - "first_name": { + "first_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, - "email": { + "email": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, }, @@ -195,31 +195,31 @@ func TestDatabaseInspector_Inspect(t *testing.T) { {Columns: sqlschema.NewColumns("email")}, }, }, - {Schema: defaultSchema, Table: "publisher_to_journalists"}: { + {Schema: defaultSchema, Table: "publisher_to_journalists"}: &sqlschema.BaseTable{ Schema: defaultSchema, Name: "publisher_to_journalists", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "publisher_id": { + Columns: map[string]sqlschema.Column{ + "publisher_id": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, - "author_id": { + "author_id": &sqlschema.BaseColumn{ SQLType: "bigint", }, }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, }, - {Schema: defaultSchema, Table: "publishers"}: { + {Schema: defaultSchema, Table: "publishers"}: &sqlschema.BaseTable{ Schema: defaultSchema, Name: "publishers", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "publisher_id": { + Columns: map[string]sqlschema.Column{ + "publisher_id": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, DefaultValue: "gen_random_uuid()", }, - "publisher_name": { + "publisher_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, }, - "created_at": { + "created_at": &sqlschema.BaseColumn{ SQLType: "timestamp", DefaultValue: "current_timestamp", IsNullable: true, @@ -260,7 +260,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { // State.FKs store their database names, which differ from dialect to dialect. // Because of that we compare FKs and Tables separately. - gotTables := got.(sqlschema.DatabaseSchema).BaseTables + gotTables := got.(sqlschema.DatabaseSchema).Tables cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, gotTables) var fks []sqlschema.ForeignKey @@ -293,7 +293,9 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str // cmpTables compares table schemas using dialect-specific equivalence checks for column types // and reports the differences as t.Error(). -func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema.FQN]sqlschema.BaseTable) { +func cmpTables( + tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema.FQN]sqlschema.Table, +) { tb.Helper() require.ElementsMatch(tb, tableNames(want), tableNames(got), "different set of tables") @@ -301,21 +303,26 @@ func cmpTables(tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema // Now we are guaranteed to have the same tables. for _, wantTable := range want { // TODO(dyma): this will be simplified by map[string]Table - var gt sqlschema.BaseTable + var gt sqlschema.Table for i := range got { - if got[i].Name == wantTable.Name { + if got[i].GetName() == wantTable.GetName() { gt = got[i] break } } - cmpColumns(tb, d, wantTable.Name, wantTable.ColumnDefinitions, gt.ColumnDefinitions) - cmpConstraints(tb, wantTable, gt) + cmpColumns(tb, d, wantTable.GetName(), wantTable.(*sqlschema.BaseTable).Columns, gt.(*sqlschema.BaseTable).Columns) + cmpConstraints(tb, wantTable.(*sqlschema.BaseTable), gt.(*sqlschema.BaseTable)) } } // cmpColumns compares that column definitions on the tables are -func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, want, got map[string]sqlschema.BaseColumn) { +func cmpColumns( + tb testing.TB, + d sqlschema.InspectorDialect, + tableName string, + want, got map[string]sqlschema.Column, +) { tb.Helper() var errs []string @@ -324,7 +331,8 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w errorf := func(format string, args ...interface{}) { errs = append(errs, fmt.Sprintf("[%s.%s] "+format, append([]interface{}{tableName, colName}, args...)...)) } - gotCol, ok := got[colName] + wantCol := wantCol.(*sqlschema.BaseColumn) + gotCol, ok := got[colName].(*sqlschema.BaseColumn) if !ok { missing = append(missing, colName) continue @@ -372,7 +380,7 @@ func cmpColumns(tb testing.TB, d sqlschema.InspectorDialect, tableName string, w } // cmpConstraints compares constraints defined on the table with the expected ones. -func cmpConstraints(tb testing.TB, want, got sqlschema.BaseTable) { +func cmpConstraints(tb testing.TB, want, got *sqlschema.BaseTable) { tb.Helper() if want.PrimaryKey != nil { @@ -392,18 +400,18 @@ func cmpConstraints(tb testing.TB, want, got sqlschema.BaseTable) { require.ElementsMatch(tb, stripNames(want.UniqueConstraints), stripNames(got.UniqueConstraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) } -func tableNames(tables map[schema.FQN]sqlschema.BaseTable) (names []string) { +func tableNames(tables map[schema.FQN]sqlschema.Table) (names []string) { for fqn := range tables { names = append(names, fqn.Table) } return } -func formatType(c sqlschema.BaseColumn) string { - if c.VarcharLen == 0 { - return c.SQLType +func formatType(c sqlschema.Column) string { + if c.GetVarcharLen() == 0 { + return c.GetSQLType() } - return fmt.Sprintf("%s(%d)", c.SQLType, c.VarcharLen) + return fmt.Sprintf("%s(%d)", c.GetSQLType(), c.GetVarcharLen()) } func TestBunModelInspector_Inspect(t *testing.T) { @@ -422,12 +430,12 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := map[string]sqlschema.BaseColumn{ - "id": { + want := map[string]sqlschema.Column{ + "id": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, DefaultValue: "random()", }, - "name": { + "name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, DefaultValue: "'John Doe'", }, @@ -439,7 +447,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { gotTables := got.(sqlschema.BunModelSchema).ModelTables require.Len(t, gotTables, 1) for _, table := range gotTables { - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefinitions) + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.Columns) return } }) @@ -455,15 +463,15 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := map[string]sqlschema.BaseColumn{ - "id": { + want := map[string]sqlschema.Column{ + "id": &sqlschema.BaseColumn{ SQLType: "text", }, - "first_name": { + "first_name": &sqlschema.BaseColumn{ SQLType: "character varying", VarcharLen: 60, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: "varchar", VarcharLen: 100, }, @@ -475,7 +483,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { gotTables := got.(sqlschema.BunModelSchema).ModelTables require.Len(t, gotTables, 1) for _, table := range gotTables { - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.ColumnDefinitions) + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.Columns) } }) @@ -490,7 +498,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := sqlschema.BaseTable{ + want := &sqlschema.BaseTable{ Name: "models", UniqueConstraints: []sqlschema.Unique{ {Columns: sqlschema.NewColumns("id")}, @@ -504,7 +512,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { gotTables := got.(sqlschema.BunModelSchema).ModelTables require.Len(t, gotTables, 1) for _, table := range gotTables { - cmpConstraints(t, want, table.BaseTable) + cmpConstraints(t, want, &table.BaseTable) return } }) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 592178d5e..3e7d689f6 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -369,7 +369,7 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - tables := state.BaseTables + tables := state.Tables require.Len(t, tables, 1) require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "changed"}) } @@ -398,7 +398,7 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - tables := state.BaseTables + tables := state.Tables require.Len(t, tables, 1) require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "createme"}) } @@ -524,11 +524,11 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - require.Len(t, state.BaseTables, 2) + require.Len(t, state.Tables, 2) - var renamed, model2 sqlschema.BaseTable - for _, tbl := range state.BaseTables { - switch tbl.Name { + var renamed, model2 sqlschema.Table + for _, tbl := range state.Tables { + switch tbl.GetName() { case "renamed": renamed = tbl case "models": @@ -536,9 +536,9 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { } } - require.Contains(t, renamed.ColumnDefinitions, "count") - require.Contains(t, model2.ColumnDefinitions, "second_column") - require.Contains(t, model2.ColumnDefinitions, "do_not_rename") + require.Contains(t, renamed.GetColumns(), "count") + require.Contains(t, model2.GetColumns(), "second_column") + require.Contains(t, model2.GetColumns(), "do_not_rename") } // testChangeColumnType_AutoCast checks type changes which can be type-casted automatically, @@ -568,35 +568,35 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // ManyValues []string `bun:",array"` // did not change } - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: db.Dialect().DefaultSchema(), Table: "change_me_own_type"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: db.Dialect().DefaultSchema(), Table: "change_me_own_type"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "change_me_own_type", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "bigger_int": { + Columns: map[string]sqlschema.Column{ + "bigger_int": &sqlschema.BaseColumn{ SQLType: "bigint", IsIdentity: true, }, - "ts": { + "ts": &sqlschema.BaseColumn{ SQLType: "timestamp", // FIXME(dyma): convert "timestamp with time zone" to sqltype.Timestamp DefaultValue: "current_timestamp", // FIXME(dyma): Convert driver-specific value to common "expressions" (e.g. CURRENT_TIMESTAMP == current_timestamp) OR lowercase all types. IsNullable: true, }, - "default_expr": { + "default_expr": &sqlschema.BaseColumn{ SQLType: "varchar", IsNullable: true, DefaultValue: "random()", }, - "empty_default": { + "empty_default": &sqlschema.BaseColumn{ SQLType: "varchar", IsNullable: true, DefaultValue: "", // NOT "''" }, - "not_null": { + "not_null": &sqlschema.BaseColumn{ SQLType: "varchar", IsNullable: false, }, - "type_override": { + "type_override": &sqlschema.BaseColumn{ SQLType: "varchar", IsNullable: true, VarcharLen: 200, @@ -619,7 +619,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } func testIdentity(t *testing.T, db *bun.DB) { @@ -635,16 +635,16 @@ func testIdentity(t *testing.T, db *bun.DB) { B int64 `bun:",notnull,identity"` } - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: db.Dialect().DefaultSchema(), Table: "bourne_identity"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: db.Dialect().DefaultSchema(), Table: "bourne_identity"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "bourne_identity", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "a": { + Columns: map[string]sqlschema.Column{ + "a": &sqlschema.BaseColumn{ SQLType: sqltype.BigInt, IsIdentity: false, // <- drop IDENTITY }, - "b": { + "b": &sqlschema.BaseColumn{ SQLType: sqltype.BigInt, IsIdentity: true, // <- add IDENTITY }, @@ -662,7 +662,7 @@ func testIdentity(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } func testAddDropColumn(t *testing.T, db *bun.DB) { @@ -678,16 +678,16 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { AddMe bool `bun:"addme"` } - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: db.Dialect().DefaultSchema(), Table: "column_madness"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: db.Dialect().DefaultSchema(), Table: "column_madness"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "column_madness", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "do_not_touch": { + Columns: map[string]sqlschema.Column{ + "do_not_touch": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "addme": { + "addme": &sqlschema.BaseColumn{ SQLType: sqltype.Boolean, IsNullable: true, }, @@ -705,7 +705,7 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } func testUnique(t *testing.T, db *bun.DB) { @@ -731,36 +731,36 @@ func testUnique(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed"` // shrink "pet" unique group } - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: db.Dialect().DefaultSchema(), Table: "uniqlo_stores"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: db.Dialect().DefaultSchema(), Table: "uniqlo_stores"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "uniqlo_stores", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "first_name": { + Columns: map[string]sqlschema.Column{ + "first_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "middle_name": { + "middle_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "birthday": { + "birthday": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "email": { + "email": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "pet_name": { + "pet_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "pet_breed": { + "pet_breed": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, @@ -784,7 +784,7 @@ func testUnique(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } func testUniqueRenamedTable(t *testing.T, db *bun.DB) { @@ -809,28 +809,28 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed,unique"` } - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: db.Dialect().DefaultSchema(), Table: "after"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: db.Dialect().DefaultSchema(), Table: "after"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "after", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "first_name": { + Columns: map[string]sqlschema.Column{ + "first_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "birthday": { + "birthday": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "pet_name": { + "pet_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "pet_breed": { + "pet_breed": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, @@ -854,7 +854,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { @@ -904,50 +904,50 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { LastName string `bun:"last_name,pk"` } - wantTables := map[schema.FQN]sqlschema.BaseTable{ - {Schema: db.Dialect().DefaultSchema(), Table: "drop_your_pks"}: { + wantTables := map[schema.FQN]sqlschema.Table{ + {Schema: db.Dialect().DefaultSchema(), Table: "drop_your_pks"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "drop_your_pks", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "first_name": { + Columns: map[string]sqlschema.Column{ + "first_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, }, }, - {Schema: db.Dialect().DefaultSchema(), Table: "add_new_pk"}: { + {Schema: db.Dialect().DefaultSchema(), Table: "add_new_pk"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "add_new_pk", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "new_id": { + Columns: map[string]sqlschema.Column{ + "new_id": &sqlschema.BaseColumn{ SQLType: sqltype.BigInt, IsNullable: false, IsIdentity: true, }, - "first_name": { + "first_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: true, }, }, PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("new_id")}, }, - {Schema: db.Dialect().DefaultSchema(), Table: "change_pk"}: { + {Schema: db.Dialect().DefaultSchema(), Table: "change_pk"}: &sqlschema.BaseTable{ Schema: db.Dialect().DefaultSchema(), Name: "change_pk", - ColumnDefinitions: map[string]sqlschema.BaseColumn{ - "first_name": { + Columns: map[string]sqlschema.Column{ + "first_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, - "last_name": { + "last_name": &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, @@ -974,5 +974,5 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.BaseTables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) } diff --git a/internal/dbtest/query_test.go b/internal/dbtest/query_test.go index 1d074f7ef..093c940c2 100644 --- a/internal/dbtest/query_test.go +++ b/internal/dbtest/query_test.go @@ -1618,7 +1618,7 @@ func TestAlterTable(t *testing.T) { {name: "add column with default value", operation: &migrate.AddColumnOp{ FQN: fqn, Column: "language", - ColDef: sqlschema.BaseColumn{ + ColDef: &sqlschema.BaseColumn{ SQLType: "varchar", VarcharLen: 20, IsNullable: false, @@ -1628,7 +1628,7 @@ func TestAlterTable(t *testing.T) { {name: "add column with identity", operation: &migrate.AddColumnOp{ FQN: fqn, Column: "n", - ColDef: sqlschema.BaseColumn{ + ColDef: &sqlschema.BaseColumn{ SQLType: sqltype.BigInt, IsNullable: false, IsIdentity: true, @@ -1637,7 +1637,7 @@ func TestAlterTable(t *testing.T) { {name: "drop column", operation: &migrate.DropColumnOp{ FQN: fqn, Column: "director", - ColDef: sqlschema.BaseColumn{ + ColDef: &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, @@ -1659,50 +1659,50 @@ func TestAlterTable(t *testing.T) { {name: "change column type int to bigint", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.BaseColumn{SQLType: sqltype.Integer}, - To: sqlschema.BaseColumn{SQLType: sqltype.BigInt}, + From: &sqlschema.BaseColumn{SQLType: sqltype.Integer}, + To: &sqlschema.BaseColumn{SQLType: sqltype.BigInt}, }}, {name: "add default", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.BaseColumn{DefaultValue: ""}, - To: sqlschema.BaseColumn{DefaultValue: "100"}, + From: &sqlschema.BaseColumn{DefaultValue: ""}, + To: &sqlschema.BaseColumn{DefaultValue: "100"}, }}, {name: "drop default", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.BaseColumn{DefaultValue: "100"}, - To: sqlschema.BaseColumn{DefaultValue: ""}, + From: &sqlschema.BaseColumn{DefaultValue: "100"}, + To: &sqlschema.BaseColumn{DefaultValue: ""}, }}, {name: "make nullable", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "director", - From: sqlschema.BaseColumn{IsNullable: false}, - To: sqlschema.BaseColumn{IsNullable: true}, + From: &sqlschema.BaseColumn{IsNullable: false}, + To: &sqlschema.BaseColumn{IsNullable: true}, }}, {name: "add notnull", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "budget", - From: sqlschema.BaseColumn{IsNullable: true}, - To: sqlschema.BaseColumn{IsNullable: false}, + From: &sqlschema.BaseColumn{IsNullable: true}, + To: &sqlschema.BaseColumn{IsNullable: false}, }}, {name: "increase varchar length", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "language", - From: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 20}, - To: sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 255}, + From: &sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 20}, + To: &sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 255}, }}, {name: "add identity", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "id", - From: sqlschema.BaseColumn{IsIdentity: false}, - To: sqlschema.BaseColumn{IsIdentity: true}, + From: &sqlschema.BaseColumn{IsIdentity: false}, + To: &sqlschema.BaseColumn{IsIdentity: true}, }}, {name: "drop identity", operation: &migrate.ChangeColumnTypeOp{ FQN: fqn, Column: "id", - From: sqlschema.BaseColumn{IsIdentity: true}, - To: sqlschema.BaseColumn{IsIdentity: false}, + From: &sqlschema.BaseColumn{IsIdentity: true}, + To: &sqlschema.BaseColumn{IsIdentity: false}, }}, {name: "add primary key", operation: &migrate.AddPrimaryKeyOp{ FQN: fqn, diff --git a/migrate/diff.go b/migrate/diff.go index 4d6a177c6..e1fdd160e 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -58,7 +58,7 @@ RenameCreate: // If wantTable does not exist in the database and was not renamed // then we need to create this table in the database. - additional := wantTable.(sqlschema.BunTable) + additional := wantTable.(*sqlschema.BunTable) d.changes.Add(&CreateTableOp{ FQN: wantTable.GetFQN(), Model: additional.Model, diff --git a/migrate/sqlschema/column.go b/migrate/sqlschema/column.go index 5a8b70483..95d9a3efc 100644 --- a/migrate/sqlschema/column.go +++ b/migrate/sqlschema/column.go @@ -19,7 +19,8 @@ type Column interface { var _ Column = (*BaseColumn)(nil) -// BaseColumn stores attributes of a database column. +// BaseColumn is a base column definition that stores various attributes of a column. +// It MUST only be used by dialects to implement the Column interface. type BaseColumn struct { Name string SQLType string diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index d8b882182..24c52fcb2 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -66,7 +66,7 @@ func NewBunModelInspector(tables *schema.Tables) *BunModelInspector { type BunModelSchema struct { DatabaseSchema - ModelTables map[schema.FQN]BunTable + ModelTables map[schema.FQN]*BunTable } func (ms BunModelSchema) GetTables() []Table { @@ -90,10 +90,10 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Schema, error) { DatabaseSchema: DatabaseSchema{ ForeignKeys: make(map[ForeignKey]string), }, - ModelTables: make(map[schema.FQN]BunTable), + ModelTables: make(map[schema.FQN]*BunTable), } for _, t := range bmi.tables.All() { - columns := make(map[string]*BaseColumn) + columns := make(map[string]Column) for _, f := range t.Fields { sqlType, length, err := parseLen(f.CreateTableSQLType) @@ -140,11 +140,11 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Schema, error) { } fqn := schema.FQN{Schema: t.Schema, Table: t.Name} - state.ModelTables[fqn] = BunTable{ + state.ModelTables[fqn] = &BunTable{ BaseTable: BaseTable{ Schema: t.Schema, Name: t.Name, - ColumnDefinitions: columns, + Columns: columns, UniqueConstraints: unique, PrimaryKey: pk, }, diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/schema.go index f823d4cd8..3d72a0a37 100644 --- a/migrate/sqlschema/schema.go +++ b/migrate/sqlschema/schema.go @@ -11,7 +11,7 @@ import ( // Dialects which support schema inspection may return it directly from Inspect() // or embed it in their custom schema structs. type DatabaseSchema struct { - BaseTables map[schema.FQN]BaseTable + Tables map[schema.FQN]Table ForeignKeys map[ForeignKey]string } @@ -113,8 +113,8 @@ type ColumnReference struct { func (ds DatabaseSchema) GetTables() []Table { var tables []Table - for i := range ds.BaseTables { - tables = append(tables, ds.BaseTables[i]) + for i := range ds.Tables { + tables = append(tables, ds.Tables[i]) } return tables } @@ -122,28 +122,3 @@ func (ds DatabaseSchema) GetTables() []Table { func (ds DatabaseSchema) GetForeignKeys() map[ForeignKey]string { return ds.ForeignKeys } - -func (td BaseTable) GetSchema() string { - return td.Schema -} -func (td BaseTable) GetName() string { - return td.Name -} -func (td BaseTable) GetColumns() []Column { - var columns []Column - // FIXME: columns will be returned in a random order - for colName := range td.ColumnDefinitions { - columns = append(columns, td.ColumnDefinitions[colName]) - } - return columns -} -func (td BaseTable) GetPrimaryKey() *PrimaryKey { - return td.PrimaryKey -} -func (td BaseTable) GetUniqueConstraints() []Unique { - return td.UniqueConstraints -} - -func (t BaseTable) GetFQN() schema.FQN { - return schema.FQN{Schema: t.Schema, Table: t.Name} -} diff --git a/migrate/sqlschema/table.go b/migrate/sqlschema/table.go index 50e479efb..54877f60b 100644 --- a/migrate/sqlschema/table.go +++ b/migrate/sqlschema/table.go @@ -13,12 +13,15 @@ type Table interface { var _ Table = (*BaseTable)(nil) +// BaseTable is a base table definition. +// It MUST only be used by dialects to implement the Table interface. type BaseTable struct { Schema string Name string // ColumnDefinitions map each column name to the column definition. - ColumnDefinitions map[string]*BaseColumn + // TODO: this must be an ordered map so the order of columns is preserved + Columns map[string]Column // PrimaryKey holds the primary key definition. // A nil value means that no primary key is defined for the table. @@ -33,3 +36,32 @@ type PrimaryKey struct { Name string Columns Columns } + +func (td *BaseTable) GetSchema() string { + return td.Schema +} + +func (td *BaseTable) GetName() string { + return td.Name +} + +func (td *BaseTable) GetColumns() []Column { + var columns []Column + // FIXME: columns will be returned in a random order + for colName := range td.Columns { + columns = append(columns, td.Columns[colName]) + } + return columns +} + +func (td *BaseTable) GetPrimaryKey() *PrimaryKey { + return td.PrimaryKey +} + +func (td *BaseTable) GetUniqueConstraints() []Unique { + return td.UniqueConstraints +} + +func (t *BaseTable) GetFQN() schema.FQN { + return schema.FQN{Schema: t.Schema, Table: t.Name} +} From 53eabcf556d7cbe209362bdc4c4fce5f4f079056 Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sat, 9 Nov 2024 13:37:47 +0200 Subject: [PATCH 46/55] rename Schema to Database --- dialect/pgdialect/inspector.go | 4 +- internal/dbtest/inspect_test.go | 2 +- internal/dbtest/migrate_test.go | 6 +-- migrate/diff.go | 10 ++--- migrate/sqlschema/column.go | 4 +- migrate/sqlschema/{schema.go => database.go} | 40 +++++++++++--------- migrate/sqlschema/inspector.go | 15 ++------ migrate/sqlschema/table.go | 4 +- 8 files changed, 44 insertions(+), 41 deletions(-) rename migrate/sqlschema/{schema.go => database.go} (86%) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 6321f9ad2..67fbe96d8 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -10,7 +10,7 @@ import ( ) type ( - Schema = sqlschema.DatabaseSchema + Schema = sqlschema.BaseDatabase Table = sqlschema.BaseTable Column = sqlschema.BaseColumn ) @@ -30,7 +30,7 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { return &Inspector{db: db, excludeTables: excludeTables} } -func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Schema, error) { +func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { dbSchema := Schema{ Tables: make(map[schema.FQN]sqlschema.Table), ForeignKeys: make(map[sqlschema.ForeignKey]string), diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 9eb77ab21..9f60af30e 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -260,7 +260,7 @@ func TestDatabaseInspector_Inspect(t *testing.T) { // State.FKs store their database names, which differ from dialect to dialect. // Because of that we compare FKs and Tables separately. - gotTables := got.(sqlschema.DatabaseSchema).Tables + gotTables := got.(sqlschema.BaseDatabase).Tables cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, gotTables) var fks []sqlschema.ForeignKey diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 3e7d689f6..2b45a0268 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -215,17 +215,17 @@ func newAutoMigratorOrSkip(tb testing.TB, db *bun.DB, opts ...migrate.AutoMigrat // inspectDbOrSkip returns a function to inspect the current state of the database. // The test will be *skipped* if the current dialect doesn't support database inpection // and fail if the inspector cannot successfully retrieve database state. -func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.DatabaseSchema { +func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.BaseDatabase { tb.Helper() // AutoMigrator excludes these tables by default, but here we need to do this explicitly. inspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) if err != nil { tb.Skip(err) } - return func(ctx context.Context) sqlschema.DatabaseSchema { + return func(ctx context.Context) sqlschema.BaseDatabase { state, err := inspector.Inspect(ctx) require.NoError(tb, err) - return state.(sqlschema.DatabaseSchema) + return state.(sqlschema.BaseDatabase) } } diff --git a/migrate/diff.go b/migrate/diff.go index e1fdd160e..b9149b303 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -17,7 +17,7 @@ func (c *changeset) Add(op ...Operation) { // diff calculates the diff between the current database schema and the target state. // The changeset is not sorted -- the caller should resolve dependencies before applying the changes. -func diff(got, want sqlschema.Schema, opts ...diffOption) *changeset { +func diff(got, want sqlschema.Database, opts ...diffOption) *changeset { d := newDetector(got, want, opts...) return d.detectChanges() } @@ -216,7 +216,7 @@ Drop: } } -func newDetector(got, want sqlschema.Schema, opts ...diffOption) *detector { +func newDetector(got, want sqlschema.Database, opts ...diffOption) *detector { cfg := &detectorConfig{ EqType: func(c1, c2 sqlschema.Column) bool { return c1.GetSQLType() == c2.GetSQLType() && c1.GetVarcharLen() == c2.GetVarcharLen() @@ -250,10 +250,10 @@ type detectorConfig struct { // detector may modify the passed database schemas, so it isn't safe to re-use them. type detector struct { // current state represents the existing database schema. - current sqlschema.Schema + current sqlschema.Database // target state represents the database schema defined in bun models. - target sqlschema.Schema + target sqlschema.Database changes changeset refMap refMap @@ -295,7 +295,7 @@ func (d detector) makeTargetColDef(current, target sqlschema.Column) sqlschema.C return target } -func (d *detector) mapNameToTable(s sqlschema.Schema) map[string]sqlschema.Table { +func (d *detector) mapNameToTable(s sqlschema.Database) map[string]sqlschema.Table { m := make(map[string]sqlschema.Table) for _, t := range s.GetTables() { m[t.GetName()] = t diff --git a/migrate/sqlschema/column.go b/migrate/sqlschema/column.go index 95d9a3efc..60f7ea8a6 100644 --- a/migrate/sqlschema/column.go +++ b/migrate/sqlschema/column.go @@ -20,7 +20,9 @@ type Column interface { var _ Column = (*BaseColumn)(nil) // BaseColumn is a base column definition that stores various attributes of a column. -// It MUST only be used by dialects to implement the Column interface. +// +// Dialects and only dialects can use it to implement the Column interface. +// Other packages must use the Column interface. type BaseColumn struct { Name string SQLType string diff --git a/migrate/sqlschema/schema.go b/migrate/sqlschema/database.go similarity index 86% rename from migrate/sqlschema/schema.go rename to migrate/sqlschema/database.go index 3d72a0a37..66bdff19e 100644 --- a/migrate/sqlschema/schema.go +++ b/migrate/sqlschema/database.go @@ -7,15 +7,33 @@ import ( "github.com/uptrace/bun/schema" ) -// DatabaseSchema provides a default implementation of the Schema interface. -// Dialects which support schema inspection may return it directly from Inspect() -// or embed it in their custom schema structs. -type DatabaseSchema struct { +type Database interface { + GetTables() []Table + GetForeignKeys() map[ForeignKey]string +} + +var _ Database = (*BaseDatabase)(nil) + +// BaseDatabase is a base database definition. +// +// Dialects and only dialects can use it to implement the Database interface. +// Other packages must use the Database interface. +type BaseDatabase struct { Tables map[schema.FQN]Table ForeignKeys map[ForeignKey]string } -var _ Schema = (*DatabaseSchema)(nil) +func (ds BaseDatabase) GetTables() []Table { + var tables []Table + for i := range ds.Tables { + tables = append(tables, ds.Tables[i]) + } + return tables +} + +func (ds BaseDatabase) GetForeignKeys() map[ForeignKey]string { + return ds.ForeignKeys +} type ForeignKey struct { From ColumnReference @@ -110,15 +128,3 @@ type ColumnReference struct { FQN schema.FQN Column Columns } - -func (ds DatabaseSchema) GetTables() []Table { - var tables []Table - for i := range ds.Tables { - tables = append(tables, ds.Tables[i]) - } - return tables -} - -func (ds DatabaseSchema) GetForeignKeys() map[ForeignKey]string { - return ds.ForeignKeys -} diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 24c52fcb2..74d791330 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -22,14 +22,7 @@ type InspectorDialect interface { // Inspector reads schema state. type Inspector interface { - Inspect(ctx context.Context) (Schema, error) -} - -// Schema is an abstract collection of database objects. -type Schema interface { - GetTables() []Table - // TODO: this probably should be a list so we have keys order and stable query generation - GetForeignKeys() map[ForeignKey]string + Inspect(ctx context.Context) (Database, error) } // inspector is opaque pointer to a databse inspector. @@ -64,7 +57,7 @@ func NewBunModelInspector(tables *schema.Tables) *BunModelInspector { // BunModelSchema is the schema state derived from bun table models. type BunModelSchema struct { - DatabaseSchema + BaseDatabase ModelTables map[schema.FQN]*BunTable } @@ -85,9 +78,9 @@ type BunTable struct { Model interface{} } -func (bmi *BunModelInspector) Inspect(ctx context.Context) (Schema, error) { +func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { state := BunModelSchema{ - DatabaseSchema: DatabaseSchema{ + BaseDatabase: BaseDatabase{ ForeignKeys: make(map[ForeignKey]string), }, ModelTables: make(map[schema.FQN]*BunTable), diff --git a/migrate/sqlschema/table.go b/migrate/sqlschema/table.go index 54877f60b..44667e799 100644 --- a/migrate/sqlschema/table.go +++ b/migrate/sqlschema/table.go @@ -14,7 +14,9 @@ type Table interface { var _ Table = (*BaseTable)(nil) // BaseTable is a base table definition. -// It MUST only be used by dialects to implement the Table interface. +// +// Dialects and only dialects can use it to implement the Table interface. +// Other packages must use the Table interface. type BaseTable struct { Schema string Name string From a08c009c58bf3283ce0d39d4aff04beaeff58417 Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sun, 10 Nov 2024 14:29:23 +0200 Subject: [PATCH 47/55] add orderedmap --- dialect/pgdialect/inspector.go | 15 +- go.mod | 8 +- go.sum | 9 + internal/dbtest/go.mod | 8 +- internal/dbtest/go.sum | 9 + internal/dbtest/inspect_test.go | 449 ++++++++++++++++------------ internal/dbtest/migrate_test.go | 504 ++++++++++++++++++++------------ migrate/diff.go | 54 ++-- migrate/sqlschema/database.go | 13 +- migrate/sqlschema/inspector.go | 26 +- migrate/sqlschema/table.go | 19 +- 11 files changed, 659 insertions(+), 455 deletions(-) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 67fbe96d8..d4061a487 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -6,7 +6,7 @@ import ( "github.com/uptrace/bun" "github.com/uptrace/bun/migrate/sqlschema" - "github.com/uptrace/bun/schema" + orderedmap "github.com/wk8/go-ordered-map/v2" ) type ( @@ -32,7 +32,7 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { dbSchema := Schema{ - Tables: make(map[schema.FQN]sqlschema.Table), + Tables: orderedmap.New[string, sqlschema.Table](), ForeignKeys: make(map[sqlschema.ForeignKey]string), } @@ -59,7 +59,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { return dbSchema, err } - colDefs := make(map[string]sqlschema.Column) + colDefs := orderedmap.New[string, sqlschema.Column]() uniqueGroups := make(map[string][]string) for _, c := range columns { @@ -70,7 +70,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { def = strings.ToLower(def) } - colDefs[c.Name] = &Column{ + colDefs.Set(c.Name, &Column{ Name: c.Name, SQLType: c.DataType, VarcharLen: c.VarcharLen, @@ -78,7 +78,7 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { IsNullable: c.IsNullable, IsAutoIncrement: c.IsSerial, IsIdentity: c.IsIdentity, - } + }) for _, group := range c.UniqueGroups { uniqueGroups[group] = append(uniqueGroups[group], c.Name) @@ -101,14 +101,13 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { } } - fqn := schema.FQN{Schema: table.Schema, Table: table.Name} - dbSchema.Tables[fqn] = &Table{ + dbSchema.Tables.Set(table.Name, &Table{ Schema: table.Schema, Name: table.Name, Columns: colDefs, PrimaryKey: pk, UniqueConstraints: unique, - } + }) } for _, fk := range fks { diff --git a/go.mod b/go.mod index 2359d474e..bfdaff905 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun -go 1.22 +go 1.23 + +toolchain go1.23.2 require ( github.com/jinzhu/inflection v1.0.0 @@ -9,10 +11,14 @@ require ( github.com/stretchr/testify v1.8.1 github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc github.com/vmihailenco/msgpack/v5 v5.4.1 + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect diff --git a/go.sum b/go.sum index b35837f9f..c08e09e39 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,7 @@ +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -5,9 +9,12 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -37,6 +44,8 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= diff --git a/internal/dbtest/go.mod b/internal/dbtest/go.mod index f205f3fd2..ebe262e80 100644 --- a/internal/dbtest/go.mod +++ b/internal/dbtest/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/internal/dbtest -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -42,6 +42,8 @@ require ( ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/fatih/color v1.18.0 // indirect @@ -56,6 +58,7 @@ require ( github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect github.com/jackc/pgtype v1.7.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-sqlite3 v1.14.24 // indirect @@ -67,6 +70,7 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect golang.org/x/crypto v0.28.0 // indirect golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect golang.org/x/mod v0.21.0 // indirect diff --git a/internal/dbtest/go.sum b/internal/dbtest/go.sum index 5dfed193e..7983053d0 100644 --- a/internal/dbtest/go.sum +++ b/internal/dbtest/go.sum @@ -24,6 +24,8 @@ github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6l github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= @@ -32,6 +34,8 @@ github.com/bradleyjkemp/cupaloy v2.3.0+incompatible h1:UafIjBvWQmS9i/xRg+CamMrnL github.com/bradleyjkemp/cupaloy v2.3.0+incompatible/go.mod h1:Au1Xw1sgaJ5iSFktEhYsS0dbQiS1B0/XMXl+42y9Ilk= github.com/brianvoe/gofakeit/v6 v6.4.1 h1:u4lPnxVNr648hEyoIz31A8zrQl5woUQbCgqjAj/n/Y4= github.com/brianvoe/gofakeit/v6 v6.4.1/go.mod h1:palrJUk4Fyw38zIFB/uBZqsgzW5VsNllhHKKwAebzew= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -202,6 +206,7 @@ github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -226,6 +231,8 @@ github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -371,6 +378,8 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 9f60af30e..7cb0ea8ec 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -12,6 +12,7 @@ import ( "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate/sqlschema" "github.com/uptrace/bun/schema" + orderedmap "github.com/wk8/go-ordered-map/v2" ) type Article struct { @@ -93,144 +94,219 @@ func TestDatabaseInspector_Inspect(t *testing.T) { defaultSchema := db.Dialect().DefaultSchema() // Tables come sorted alphabetically by schema and table. - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: "admin", Table: "offices"}: &sqlschema.BaseTable{ - Schema: "admin", - Name: "offices", - Columns: map[string]sqlschema.Column{ - "office_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - "publisher_id": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "publisher_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "offices", + Value: &sqlschema.BaseTable{ + Schema: "admin", + Name: "offices", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "office_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, }, - {Schema: defaultSchema, Table: "articles"}: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "articles", - Columns: map[string]sqlschema.Column{ - "isbn": &sqlschema.BaseColumn{ - SQLType: "bigint", - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: true, - DefaultValue: "", - }, - "editor": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "john doe", - }, - "title": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "", - }, - "locale": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - VarcharLen: 5, - IsNullable: true, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "en-GB", - }, - "page_count": &sqlschema.BaseColumn{ - SQLType: "smallint", - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "1", - }, - "book_count": &sqlschema.BaseColumn{ - SQLType: "integer", - IsNullable: false, - IsAutoIncrement: true, - IsIdentity: false, - DefaultValue: "", - }, - "publisher_id": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - "author_id": &sqlschema.BaseColumn{ - SQLType: "bigint", + orderedmap.Pair[string, sqlschema.Table]{ + Key: "articles", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "articles", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "isbn", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: true, + DefaultValue: "", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "editor", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "john doe", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "title", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "locale", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + VarcharLen: 5, + IsNullable: true, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "en-GB", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "page_count", + Value: &sqlschema.BaseColumn{ + SQLType: "smallint", + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "1", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "book_count", + Value: &sqlschema.BaseColumn{ + SQLType: "integer", + IsNullable: false, + IsAutoIncrement: true, + IsIdentity: false, + DefaultValue: "", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "author_id", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("isbn")}, + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("editor", "title")}, }, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("isbn")}, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("editor", "title")}, - }, }, - {Schema: defaultSchema, Table: "authors"}: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "authors", - Columns: map[string]sqlschema.Column{ - "author_id": &sqlschema.BaseColumn{ - SQLType: "bigint", - IsIdentity: true, - }, - "first_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - "last_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - "email": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "authors", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "authors", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "author_id", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + IsIdentity: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "email", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("author_id")}, + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("first_name", "last_name")}, + {Columns: sqlschema.NewColumns("email")}, }, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("author_id")}, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("first_name", "last_name")}, - {Columns: sqlschema.NewColumns("email")}, - }, }, - {Schema: defaultSchema, Table: "publisher_to_journalists"}: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "publisher_to_journalists", - Columns: map[string]sqlschema.Column{ - "publisher_id": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - "author_id": &sqlschema.BaseColumn{ - SQLType: "bigint", - }, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "publisher_to_journalists", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "publisher_to_journalists", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "author_id", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, }, - {Schema: defaultSchema, Table: "publishers"}: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "publishers", - Columns: map[string]sqlschema.Column{ - "publisher_id": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - DefaultValue: "gen_random_uuid()", - }, - "publisher_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - "created_at": &sqlschema.BaseColumn{ - SQLType: "timestamp", - DefaultValue: "current_timestamp", - IsNullable: true, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "publishers", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "publishers", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + DefaultValue: "gen_random_uuid()", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "created_at", + Value: &sqlschema.BaseColumn{ + SQLType: "timestamp", + DefaultValue: "current_timestamp", + IsNullable: true, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id")}, + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("publisher_id", "publisher_name")}, }, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id")}, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("publisher_id", "publisher_name")}, - }, }, - } + )) wantFKs := []sqlschema.ForeignKey{ { @@ -294,25 +370,20 @@ func mustCreateSchema(tb testing.TB, ctx context.Context, db *bun.DB, schema str // cmpTables compares table schemas using dialect-specific equivalence checks for column types // and reports the differences as t.Error(). func cmpTables( - tb testing.TB, d sqlschema.InspectorDialect, want, got map[schema.FQN]sqlschema.Table, + tb testing.TB, + d sqlschema.InspectorDialect, + want, got *orderedmap.OrderedMap[string, sqlschema.Table], ) { tb.Helper() require.ElementsMatch(tb, tableNames(want), tableNames(got), "different set of tables") // Now we are guaranteed to have the same tables. - for _, wantTable := range want { - // TODO(dyma): this will be simplified by map[string]Table - var gt sqlschema.Table - for i := range got { - if got[i].GetName() == wantTable.GetName() { - gt = got[i] - break - } - } - - cmpColumns(tb, d, wantTable.GetName(), wantTable.(*sqlschema.BaseTable).Columns, gt.(*sqlschema.BaseTable).Columns) - cmpConstraints(tb, wantTable.(*sqlschema.BaseTable), gt.(*sqlschema.BaseTable)) + for tableName, wantTable := range want.FromOldest() { + gotTable, ok := got.Get(tableName) + require.True(tb, ok) + cmpColumns(tb, d, wantTable.GetName(), wantTable.GetColumns(), gotTable.GetColumns()) + cmpConstraints(tb, wantTable.(*sqlschema.BaseTable), gotTable.(*sqlschema.BaseTable)) } } @@ -321,18 +392,18 @@ func cmpColumns( tb testing.TB, d sqlschema.InspectorDialect, tableName string, - want, got map[string]sqlschema.Column, + want, got *orderedmap.OrderedMap[string, sqlschema.Column], ) { tb.Helper() var errs []string var missing []string - for colName, wantCol := range want { + for colName, wantCol := range want.FromOldest() { errorf := func(format string, args ...interface{}) { errs = append(errs, fmt.Sprintf("[%s.%s] "+format, append([]interface{}{tableName, colName}, args...)...)) } wantCol := wantCol.(*sqlschema.BaseColumn) - gotCol, ok := got[colName].(*sqlschema.BaseColumn) + gotCol, ok := got.Value(colName).(*sqlschema.BaseColumn) if !ok { missing = append(missing, colName) continue @@ -364,8 +435,8 @@ func cmpColumns( } var extra []string - for colName := range got { - if _, ok := want[colName]; !ok { + for colName := range got.FromOldest() { + if _, ok := want.Get(colName); !ok { extra = append(extra, colName) } } @@ -400,9 +471,9 @@ func cmpConstraints(tb testing.TB, want, got *sqlschema.BaseTable) { require.ElementsMatch(tb, stripNames(want.UniqueConstraints), stripNames(got.UniqueConstraints), "table %q does not have expected unique constraints (listA=want, listB=got)", want.Name) } -func tableNames(tables map[schema.FQN]sqlschema.Table) (names []string) { - for fqn := range tables { - names = append(names, fqn.Table) +func tableNames(tables *orderedmap.OrderedMap[string, sqlschema.Table]) (names []string) { + for name := range tables.FromOldest() { + names = append(names, name) } return } @@ -430,24 +501,30 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := map[string]sqlschema.Column{ - "id": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - DefaultValue: "random()", + want := orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + DefaultValue: "random()", + }, }, - "name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - DefaultValue: "'John Doe'", + orderedmap.Pair[string, sqlschema.Column]{ + Key: "name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + DefaultValue: "'John Doe'", + }, }, - } + )) got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - gotTables := got.(sqlschema.BunModelSchema).ModelTables - require.Len(t, gotTables, 1) - for _, table := range gotTables { - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.Columns) + gotTables := got.GetTables() + require.Equal(t, 1, gotTables.Len()) + for _, table := range gotTables.FromOldest() { + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.GetColumns()) return } }) @@ -463,27 +540,36 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - want := map[string]sqlschema.Column{ - "id": &sqlschema.BaseColumn{ - SQLType: "text", + want := orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "id", + Value: &sqlschema.BaseColumn{ + SQLType: "text", + }, }, - "first_name": &sqlschema.BaseColumn{ - SQLType: "character varying", - VarcharLen: 60, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: "character varying", + VarcharLen: 60, + }, }, - "last_name": &sqlschema.BaseColumn{ - SQLType: "varchar", - VarcharLen: 100, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: "varchar", + VarcharLen: 100, + }, }, - } + )) got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - gotTables := got.(sqlschema.BunModelSchema).ModelTables - require.Len(t, gotTables, 1) - for _, table := range gotTables { - cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.Columns) + gotTables := got.GetTables() + require.Equal(t, 1, gotTables.Len()) + for _, table := range gotTables.FromOldest() { + cmpColumns(t, dialect.(sqlschema.InspectorDialect), "model", want, table.GetColumns()) } }) @@ -509,10 +595,10 @@ func TestBunModelInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - gotTables := got.(sqlschema.BunModelSchema).ModelTables - require.Len(t, gotTables, 1) - for _, table := range gotTables { - cmpConstraints(t, want, &table.BaseTable) + gotTables := got.GetTables() + require.Equal(t, 1, gotTables.Len()) + for _, table := range gotTables.FromOldest() { + cmpConstraints(t, want, &table.(*sqlschema.BunTable).BaseTable) return } }) @@ -531,11 +617,12 @@ func TestBunModelInspector_Inspect(t *testing.T) { got, err := inspector.Inspect(context.Background()) require.NoError(t, err) - gotTables := got.(sqlschema.BunModelSchema).ModelTables - require.Len(t, gotTables, 1) - for _, table := range gotTables { - require.NotNilf(t, table.PrimaryKey, "did not register primary key, want (%s)", want) - require.Equal(t, want, table.PrimaryKey.Columns, "wrong primary key columns") + gotTables := got.GetTables() + require.Equal(t, 1, gotTables.Len()) + for _, table := range gotTables.FromOldest() { + pk := table.GetPrimaryKey() + require.NotNilf(t, pk, "did not register primary key, want (%s)", want) + require.Equal(t, want, pk.Columns, "wrong primary key columns") return } }) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 2b45a0268..9ceca55a6 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -14,7 +14,7 @@ import ( "github.com/uptrace/bun/dialect/sqltype" "github.com/uptrace/bun/migrate" "github.com/uptrace/bun/migrate/sqlschema" - "github.com/uptrace/bun/schema" + orderedmap "github.com/wk8/go-ordered-map/v2" ) const ( @@ -370,8 +370,9 @@ func testRenameTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) tables := state.Tables - require.Len(t, tables, 1) - require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "changed"}) + require.Equal(t, 1, tables.Len()) + _, found := tables.Get("changed") + require.True(t, found) } func testCreateDropTable(t *testing.T, db *bun.DB) { @@ -399,8 +400,9 @@ func testCreateDropTable(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) tables := state.Tables - require.Len(t, tables, 1) - require.Contains(t, tables, schema.FQN{Schema: db.Dialect().DefaultSchema(), Table: "createme"}) + require.Equal(t, 1, tables.Len()) + _, found := tables.Get("createme") + require.True(t, found) } func testAlterForeignKeys(t *testing.T, db *bun.DB) { @@ -524,10 +526,10 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - require.Len(t, state.Tables, 2) + require.Equal(t, 2, state.Tables.Len()) var renamed, model2 sqlschema.Table - for _, tbl := range state.Tables { + for _, tbl := range state.Tables.FromOldest() { switch tbl.GetName() { case "renamed": renamed = tbl @@ -536,9 +538,9 @@ func testRenamedColumns(t *testing.T, db *bun.DB) { } } - require.Contains(t, renamed.GetColumns(), "count") - require.Contains(t, model2.GetColumns(), "second_column") - require.Contains(t, model2.GetColumns(), "do_not_rename") + require.NotNil(t, renamed.GetColumns().Value("count")) + require.NotNil(t, model2.GetColumns().Value("second_column")) + require.NotNil(t, model2.GetColumns().Value("do_not_rename")) } // testChangeColumnType_AutoCast checks type changes which can be type-casted automatically, @@ -568,46 +570,70 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // ManyValues []string `bun:",array"` // did not change } - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: db.Dialect().DefaultSchema(), Table: "change_me_own_type"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "change_me_own_type", - Columns: map[string]sqlschema.Column{ - "bigger_int": &sqlschema.BaseColumn{ - SQLType: "bigint", - IsIdentity: true, - }, - "ts": &sqlschema.BaseColumn{ - SQLType: "timestamp", // FIXME(dyma): convert "timestamp with time zone" to sqltype.Timestamp - DefaultValue: "current_timestamp", // FIXME(dyma): Convert driver-specific value to common "expressions" (e.g. CURRENT_TIMESTAMP == current_timestamp) OR lowercase all types. - IsNullable: true, - }, - "default_expr": &sqlschema.BaseColumn{ - SQLType: "varchar", - IsNullable: true, - DefaultValue: "random()", - }, - "empty_default": &sqlschema.BaseColumn{ - SQLType: "varchar", - IsNullable: true, - DefaultValue: "", // NOT "''" - }, - "not_null": &sqlschema.BaseColumn{ - SQLType: "varchar", - IsNullable: false, - }, - "type_override": &sqlschema.BaseColumn{ - SQLType: "varchar", - IsNullable: true, - VarcharLen: 200, - }, - // "many_values": { - // SQLType: "array", - // }, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "change_me_own_type", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "change_me_own_type", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "bigger_int", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + IsIdentity: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "ts", + Value: &sqlschema.BaseColumn{ + SQLType: "timestamp", // FIXME(dyma): convert "timestamp with time zone" to sqltype.Timestamp + DefaultValue: "current_timestamp", // FIXME(dyma): Convert driver-specific value to common "expressions" (e.g. CURRENT_TIMESTAMP == current_timestamp) OR lowercase all types. + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "default_expr", + Value: &sqlschema.BaseColumn{ + SQLType: "varchar", + IsNullable: true, + DefaultValue: "random()", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "empty_default", + Value: &sqlschema.BaseColumn{ + SQLType: "varchar", + IsNullable: true, + DefaultValue: "", // NOT "''" + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "not_null", + Value: &sqlschema.BaseColumn{ + SQLType: "varchar", + IsNullable: false, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "type_override", + Value: &sqlschema.BaseColumn{ + SQLType: "varchar", + IsNullable: true, + VarcharLen: 200, + }, + }, + // orderedmap.Pair[string, sqlschema.Column]{ + // Key: "many_values", + // Value: &sqlschema.BaseColumn{ + // SQLType: "array", + // }, + // }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("bigger_int")}, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("bigger_int")}, }, - } + )) ctx := context.Background() inspect := inspectDbOrSkip(t, db) @@ -619,7 +645,7 @@ func testChangeColumnType_AutoCast(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.GetTables()) } func testIdentity(t *testing.T, db *bun.DB) { @@ -635,22 +661,31 @@ func testIdentity(t *testing.T, db *bun.DB) { B int64 `bun:",notnull,identity"` } - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: db.Dialect().DefaultSchema(), Table: "bourne_identity"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "bourne_identity", - Columns: map[string]sqlschema.Column{ - "a": &sqlschema.BaseColumn{ - SQLType: sqltype.BigInt, - IsIdentity: false, // <- drop IDENTITY - }, - "b": &sqlschema.BaseColumn{ - SQLType: sqltype.BigInt, - IsIdentity: true, // <- add IDENTITY - }, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "bourne_identity", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "bourne_identity", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "a", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.BigInt, + IsIdentity: false, // <- drop IDENTITY + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "b", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.BigInt, + IsIdentity: true, // <- add IDENTITY + }, + }, + )), }, }, - } + )) ctx := context.Background() inspect := inspectDbOrSkip(t, db) @@ -662,7 +697,7 @@ func testIdentity(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.GetTables()) } func testAddDropColumn(t *testing.T, db *bun.DB) { @@ -678,22 +713,31 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { AddMe bool `bun:"addme"` } - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: db.Dialect().DefaultSchema(), Table: "column_madness"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "column_madness", - Columns: map[string]sqlschema.Column{ - "do_not_touch": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "addme": &sqlschema.BaseColumn{ - SQLType: sqltype.Boolean, - IsNullable: true, - }, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "column_madness", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "column_madness", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "do_not_touch", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "addme", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.Boolean, + IsNullable: true, + }, + }, + )), }, }, - } + )) ctx := context.Background() inspect := inspectDbOrSkip(t, db) @@ -705,7 +749,7 @@ func testAddDropColumn(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.GetTables()) } func testUnique(t *testing.T, db *bun.DB) { @@ -731,48 +775,72 @@ func testUnique(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed"` // shrink "pet" unique group } - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: db.Dialect().DefaultSchema(), Table: "uniqlo_stores"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "uniqlo_stores", - Columns: map[string]sqlschema.Column{ - "first_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "middle_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "last_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "birthday": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "email": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "uniqlo_stores", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "uniqlo_stores", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "middle_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "birthday", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "email", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "pet_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "pet_breed", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + )), + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("email")}, + {Columns: sqlschema.NewColumns("pet_name")}, + // We can only be sure of the user-defined index name + {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "middle_name", "last_name")}, }, - "pet_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "pet_breed": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - }, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("email")}, - {Columns: sqlschema.NewColumns("pet_name")}, - // We can only be sure of the user-defined index name - {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "middle_name", "last_name")}, }, }, - } + )) ctx := context.Background() inspect := inspectDbOrSkip(t, db) @@ -784,7 +852,7 @@ func testUnique(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.Tables) + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, state.GetTables()) } func testUniqueRenamedTable(t *testing.T, db *bun.DB) { @@ -809,39 +877,57 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { PetBreed string `bun:"pet_breed,unique"` } - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: db.Dialect().DefaultSchema(), Table: "after"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "after", - Columns: map[string]sqlschema.Column{ - "first_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "after", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "after", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "birthday", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "pet_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "pet_breed", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + )), + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("pet_name")}, + {Columns: sqlschema.NewColumns("pet_breed")}, + {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "last_name", "birthday")}, }, - "last_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "birthday": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "pet_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "pet_breed": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - }, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("pet_name")}, - {Columns: sqlschema.NewColumns("pet_breed")}, - {Name: "full_name", Columns: sqlschema.NewColumns("first_name", "last_name", "birthday")}, }, }, - } + )) ctx := context.Background() inspect := inspectDbOrSkip(t, db) @@ -904,57 +990,87 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { LastName string `bun:"last_name,pk"` } - wantTables := map[schema.FQN]sqlschema.Table{ - {Schema: db.Dialect().DefaultSchema(), Table: "drop_your_pks"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "drop_your_pks", - Columns: map[string]sqlschema.Column{ - "first_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - }, - "last_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - }, + wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "drop_your_pks", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "drop_your_pks", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }, + )), }, }, - {Schema: db.Dialect().DefaultSchema(), Table: "add_new_pk"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "add_new_pk", - Columns: map[string]sqlschema.Column{ - "new_id": &sqlschema.BaseColumn{ - SQLType: sqltype.BigInt, - IsNullable: false, - IsIdentity: true, - }, - "first_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - "last_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "add_new_pk", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "add_new_pk", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "new_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.BigInt, + IsNullable: false, + IsIdentity: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("new_id")}, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("new_id")}, }, - {Schema: db.Dialect().DefaultSchema(), Table: "change_pk"}: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), - Name: "change_pk", - Columns: map[string]sqlschema.Column{ - "first_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - }, - "last_name": &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - }, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "change_pk", + Value: &sqlschema.BaseTable{ + Schema: db.Dialect().DefaultSchema(), + Name: "change_pk", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("first_name", "last_name")}, }, - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("first_name", "last_name")}, }, - } + )) ctx := context.Background() inspect := inspectDbOrSkip(t, db) diff --git a/migrate/diff.go b/migrate/diff.go index b9149b303..5c93a33e0 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -23,24 +23,24 @@ func diff(got, want sqlschema.Database, opts ...diffOption) *changeset { } func (d *detector) detectChanges() *changeset { - currentTables := d.mapNameToTable(d.current) - targetTables := d.mapNameToTable(d.target) + currentTables := d.current.GetTables() + targetTables := d.target.GetTables() RenameCreate: - for wantName, wantTable := range targetTables { + for wantName, wantTable := range targetTables.FromOldest() { // A table with this name exists in the database. We assume that schema objects won't // be renamed to an already existing name, nor do we support such cases. // Simply check if the table definition has changed. - if haveTable, ok := currentTables[wantName]; ok { + if haveTable, ok := currentTables.Get(wantName); ok { d.detectColumnChanges(haveTable, wantTable, true) d.detectConstraintChanges(haveTable, wantTable) continue } // Find all renamed tables. We assume that renamed tables have the same signature. - for haveName, haveTable := range currentTables { - if _, exists := targetTables[haveName]; !exists && d.canRename(haveTable, wantTable) { + for haveName, haveTable := range currentTables.FromOldest() { + if _, exists := targetTables.Get(haveName); !exists && d.canRename(haveTable, wantTable) { d.changes.Add(&RenameTableOp{ FQN: haveTable.GetFQN(), NewName: wantName, @@ -51,7 +51,7 @@ RenameCreate: // We need not check wantTable any further. d.detectColumnChanges(haveTable, wantTable, false) d.detectConstraintChanges(haveTable, wantTable) - delete(currentTables, haveName) + currentTables.Delete(haveName) continue RenameCreate } } @@ -66,8 +66,8 @@ RenameCreate: } // Drop any remaining "current" tables which do not have a model. - for name, table := range currentTables { - if _, keep := targetTables[name]; !keep { + for name, table := range currentTables.FromOldest() { + if _, keep := targetTables.Get(name); !keep { d.changes.Add(&DropTableOp{ FQN: table.GetFQN(), }) @@ -100,16 +100,16 @@ RenameCreate: // detechColumnChanges finds renamed columns and, if checkType == true, columns with changed type. func (d *detector) detectColumnChanges(current, target sqlschema.Table, checkType bool) { - currentColumns := d.mapNameToColumn(current) - targetColumns := d.mapNameToColumn(target) + currentColumns := current.GetColumns() + targetColumns := target.GetColumns() ChangeRename: - for tName, tCol := range targetColumns { + for tName, tCol := range targetColumns.FromOldest() { // This column exists in the database, so it hasn't been renamed, dropped, or added. // Still, we should not delete(columns, thisColumn), because later we will need to // check that we do not try to rename a column to an already a name that already exists. - if cCol, ok := currentColumns[tName]; ok { + if cCol, ok := currentColumns.Get(tName); ok { if checkType && !d.equalColumns(cCol, tCol) { d.changes.Add(&ChangeColumnTypeOp{ FQN: target.GetFQN(), @@ -123,9 +123,9 @@ ChangeRename: // Column tName does not exist in the database -- it's been either renamed or added. // Find renamed columns first. - for cName, cCol := range currentColumns { + for cName, cCol := range currentColumns.FromOldest() { // Cannot rename if a column with this name already exists or the types differ. - if _, exists := targetColumns[cName]; exists || !d.equalColumns(tCol, cCol) { + if _, exists := targetColumns.Get(cName); exists || !d.equalColumns(tCol, cCol) { continue } d.changes.Add(&RenameColumnOp{ @@ -134,7 +134,7 @@ ChangeRename: NewName: tName, }) d.refMap.RenameColumn(target.GetFQN(), cName, tName) - delete(currentColumns, cName) // no need to check this column again + currentColumns.Delete(cName) // no need to check this column again // Update primary key definition to avoid superficially recreating the constraint. current.GetPrimaryKey().Columns.Replace(cName, tName) @@ -150,8 +150,8 @@ ChangeRename: } // Drop columns which do not exist in the target schema and were not renamed. - for cName, cCol := range currentColumns { - if _, keep := targetColumns[cName]; !keep { + for cName, cCol := range currentColumns.FromOldest() { + if _, keep := targetColumns.Get(cName); !keep { d.changes.Add(&DropColumnOp{ FQN: target.GetFQN(), Column: cName, @@ -295,22 +295,6 @@ func (d detector) makeTargetColDef(current, target sqlschema.Column) sqlschema.C return target } -func (d *detector) mapNameToTable(s sqlschema.Database) map[string]sqlschema.Table { - m := make(map[string]sqlschema.Table) - for _, t := range s.GetTables() { - m[t.GetName()] = t - } - return m -} - -func (d *detector) mapNameToColumn(t sqlschema.Table) map[string]sqlschema.Column { - m := make(map[string]sqlschema.Column) - for _, c := range t.GetColumns() { - m[c.GetName()] = c - } - return m -} - type TypeEquivalenceFunc func(sqlschema.Column, sqlschema.Column) bool // equalSignatures determines if two tables have the same "signature". @@ -342,7 +326,7 @@ func newSignature(t sqlschema.Table, eq TypeEquivalenceFunc) signature { // scan iterates over table's field and counts occurrences of each unique column definition. func (s *signature) scan(t sqlschema.Table) { - for _, icol := range t.GetColumns() { + for _, icol := range t.GetColumns().FromOldest() { scanCol := icol.(*sqlschema.BaseColumn) // This is slightly more expensive than if the columns could be compared directly // and we always did s.underlying[col]++, but we get type-equivalence in return. diff --git a/migrate/sqlschema/database.go b/migrate/sqlschema/database.go index 66bdff19e..9800306db 100644 --- a/migrate/sqlschema/database.go +++ b/migrate/sqlschema/database.go @@ -5,10 +5,11 @@ import ( "strings" "github.com/uptrace/bun/schema" + orderedmap "github.com/wk8/go-ordered-map/v2" ) type Database interface { - GetTables() []Table + GetTables() *orderedmap.OrderedMap[string, Table] GetForeignKeys() map[ForeignKey]string } @@ -19,16 +20,12 @@ var _ Database = (*BaseDatabase)(nil) // Dialects and only dialects can use it to implement the Database interface. // Other packages must use the Database interface. type BaseDatabase struct { - Tables map[schema.FQN]Table + Tables *orderedmap.OrderedMap[string, Table] ForeignKeys map[ForeignKey]string } -func (ds BaseDatabase) GetTables() []Table { - var tables []Table - for i := range ds.Tables { - tables = append(tables, ds.Tables[i]) - } - return tables +func (ds BaseDatabase) GetTables() *orderedmap.OrderedMap[string, Table] { + return ds.Tables } func (ds BaseDatabase) GetForeignKeys() map[ForeignKey]string { diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 74d791330..087a7f9f9 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -8,6 +8,7 @@ import ( "github.com/uptrace/bun" "github.com/uptrace/bun/schema" + orderedmap "github.com/wk8/go-ordered-map/v2" ) type InspectorDialect interface { @@ -59,15 +60,11 @@ func NewBunModelInspector(tables *schema.Tables) *BunModelInspector { type BunModelSchema struct { BaseDatabase - ModelTables map[schema.FQN]*BunTable + Tables *orderedmap.OrderedMap[string, Table] } -func (ms BunModelSchema) GetTables() []Table { - var tables []Table - for _, t := range ms.ModelTables { - tables = append(tables, t) - } - return tables +func (ms BunModelSchema) GetTables() *orderedmap.OrderedMap[string, Table] { + return ms.Tables } // BunTable provides additional table metadata that is only accessible from scanning bun models. @@ -83,17 +80,17 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { BaseDatabase: BaseDatabase{ ForeignKeys: make(map[ForeignKey]string), }, - ModelTables: make(map[schema.FQN]*BunTable), + Tables: orderedmap.New[string, Table](), } for _, t := range bmi.tables.All() { - columns := make(map[string]Column) + columns := orderedmap.New[string, Column]() for _, f := range t.Fields { sqlType, length, err := parseLen(f.CreateTableSQLType) if err != nil { - return state, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err) + return nil, fmt.Errorf("parse length in %q: %w", f.CreateTableSQLType, err) } - columns[f.Name] = &BaseColumn{ + columns.Set(f.Name, &BaseColumn{ Name: f.Name, SQLType: strings.ToLower(sqlType), // TODO(dyma): maybe this is not necessary after Column.Eq() VarcharLen: length, @@ -101,7 +98,7 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { IsNullable: !f.NotNull, IsAutoIncrement: f.AutoIncrement, IsIdentity: f.Identity, - } + }) } var unique []Unique @@ -132,8 +129,7 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { pk = &PrimaryKey{Columns: NewColumns(columns...)} } - fqn := schema.FQN{Schema: t.Schema, Table: t.Name} - state.ModelTables[fqn] = &BunTable{ + state.Tables.Set(t.Name, &BunTable{ BaseTable: BaseTable{ Schema: t.Schema, Name: t.Name, @@ -142,7 +138,7 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { PrimaryKey: pk, }, Model: t.ZeroIface, - } + }) for _, rel := range t.Relations { // These relations are nominal and do not need a foreign key to be declared in the current table. diff --git a/migrate/sqlschema/table.go b/migrate/sqlschema/table.go index 44667e799..e5e71479c 100644 --- a/migrate/sqlschema/table.go +++ b/migrate/sqlschema/table.go @@ -1,11 +1,14 @@ package sqlschema -import "github.com/uptrace/bun/schema" +import ( + "github.com/uptrace/bun/schema" + orderedmap "github.com/wk8/go-ordered-map/v2" +) type Table interface { GetSchema() string GetName() string - GetColumns() []Column + GetColumns() *orderedmap.OrderedMap[string, Column] GetPrimaryKey() *PrimaryKey GetUniqueConstraints() []Unique GetFQN() schema.FQN @@ -22,8 +25,7 @@ type BaseTable struct { Name string // ColumnDefinitions map each column name to the column definition. - // TODO: this must be an ordered map so the order of columns is preserved - Columns map[string]Column + Columns *orderedmap.OrderedMap[string, Column] // PrimaryKey holds the primary key definition. // A nil value means that no primary key is defined for the table. @@ -47,13 +49,8 @@ func (td *BaseTable) GetName() string { return td.Name } -func (td *BaseTable) GetColumns() []Column { - var columns []Column - // FIXME: columns will be returned in a random order - for colName := range td.Columns { - columns = append(columns, td.Columns[colName]) - } - return columns +func (td *BaseTable) GetColumns() *orderedmap.OrderedMap[string, Column] { + return td.Columns } func (td *BaseTable) GetPrimaryKey() *PrimaryKey { From 9abdfca2ce33c66e1ae3ddb69808a989dd3e41e0 Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sun, 10 Nov 2024 14:33:27 +0200 Subject: [PATCH 48/55] remove go 1.22 --- .github/workflows/build.yml | 2 +- dbfixture/go.mod | 6 +- dbfixture/go.sum | 4 +- dialect/mssqldialect/go.mod | 8 +- dialect/mssqldialect/go.sum | 8 +- dialect/mysqldialect/go.mod | 8 +- dialect/mysqldialect/go.sum | 8 +- dialect/pgdialect/go.mod | 10 ++- dialect/pgdialect/go.sum | 13 ++- dialect/sqlitedialect/go.mod | 6 +- dialect/sqlitedialect/go.sum | 4 +- driver/pgdriver/go.mod | 8 +- driver/pgdriver/go.sum | 8 +- driver/sqliteshim/go.mod | 4 +- driver/sqliteshim/go.sum | 20 ++--- example/basic/go.mod | 8 +- example/basic/go.sum | 20 ++--- example/create-table-index/go.mod | 8 +- example/create-table-index/go.sum | 20 ++--- example/cursor-pagination/go.mod | 8 +- example/cursor-pagination/go.sum | 20 ++--- example/custom-type/go.mod | 8 +- example/custom-type/go.sum | 20 ++--- example/fixture/go.mod | 8 +- example/fixture/go.sum | 20 ++--- example/get-where-fields/go.mod | 8 +- example/get-where-fields/go.sum | 20 ++--- example/migrate/go.mod | 14 +++- example/migrate/go.sum | 37 ++++++--- example/model-hooks/go.mod | 8 +- example/model-hooks/go.sum | 20 ++--- example/multi-tenant/go.mod | 8 +- example/multi-tenant/go.sum | 20 ++--- example/opentelemetry/go.mod | 51 +++++++----- example/opentelemetry/go.sum | 104 ++++++++++++++---------- example/pg-faceted-search/go.mod | 12 ++- example/pg-faceted-search/go.sum | 17 +++- example/pg-listen/go.mod | 13 ++- example/pg-listen/go.sum | 24 +++++- example/placeholders/go.mod | 8 +- example/placeholders/go.sum | 20 ++--- example/rel-belongs-to/go.mod | 8 +- example/rel-belongs-to/go.sum | 20 ++--- example/rel-has-many-polymorphic/go.mod | 8 +- example/rel-has-many-polymorphic/go.sum | 20 ++--- example/rel-has-many/go.mod | 8 +- example/rel-has-many/go.sum | 20 ++--- example/rel-has-one/go.mod | 8 +- example/rel-has-one/go.sum | 20 ++--- example/rel-join-condition/go.mod | 8 +- example/rel-join-condition/go.sum | 20 ++--- example/rel-many-to-many-self/go.mod | 8 +- example/rel-many-to-many-self/go.sum | 20 ++--- example/rel-many-to-many/go.mod | 8 +- example/rel-many-to-many/go.sum | 20 ++--- example/string-representation/go.mod | 8 +- example/string-representation/go.sum | 20 ++--- example/trivial/go.mod | 17 ++-- example/trivial/go.sum | 39 ++++++--- example/tx-composition/go.mod | 8 +- example/tx-composition/go.sum | 20 ++--- extra/bundebug/go.mod | 6 +- extra/bundebug/go.sum | 4 +- extra/bunotel/go.mod | 12 +-- extra/bunotel/go.sum | 16 ++-- extra/bunrelic/go.mod | 14 ++-- extra/bunrelic/go.sum | 22 ++--- extra/bunslog/go.mod | 6 +- extra/bunslog/go.sum | 4 +- go.mod | 2 +- go.sum | 4 +- internal/dbtest/go.mod | 12 +-- internal/dbtest/go.sum | 28 +++---- 73 files changed, 609 insertions(+), 470 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 17dd1a059..ce4ba40bc 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -14,7 +14,7 @@ jobs: strategy: fail-fast: false matrix: - go-version: [1.22.x, 1.23.x] + go-version: [1.23.x] services: postgres: diff --git a/dbfixture/go.mod b/dbfixture/go.mod index df1dd1727..ea60be6f6 100644 --- a/dbfixture/go.mod +++ b/dbfixture/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/dbfixture -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../ @@ -16,5 +18,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/dbfixture/go.sum b/dbfixture/go.sum index 5080914c4..2bc24601d 100644 --- a/dbfixture/go.sum +++ b/dbfixture/go.sum @@ -19,8 +19,8 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/dialect/mssqldialect/go.mod b/dialect/mssqldialect/go.mod index 99840a1e4..6913d91cf 100755 --- a/dialect/mssqldialect/go.mod +++ b/dialect/mssqldialect/go.mod @@ -1,14 +1,14 @@ module github.com/uptrace/bun/dialect/mssqldialect -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. require ( github.com/uptrace/bun v1.2.5 - golang.org/x/mod v0.21.0 + golang.org/x/mod v0.22.0 ) require ( @@ -17,5 +17,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/dialect/mssqldialect/go.sum b/dialect/mssqldialect/go.sum index a84cfddd3..35b673cde 100644 --- a/dialect/mssqldialect/go.sum +++ b/dialect/mssqldialect/go.sum @@ -14,9 +14,9 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/dialect/mysqldialect/go.mod b/dialect/mysqldialect/go.mod index b82b43208..d7ab6f81a 100644 --- a/dialect/mysqldialect/go.mod +++ b/dialect/mysqldialect/go.mod @@ -1,14 +1,14 @@ module github.com/uptrace/bun/dialect/mysqldialect -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. require ( github.com/uptrace/bun v1.2.5 - golang.org/x/mod v0.21.0 + golang.org/x/mod v0.22.0 ) require ( @@ -17,5 +17,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/dialect/mysqldialect/go.sum b/dialect/mysqldialect/go.sum index a84cfddd3..35b673cde 100644 --- a/dialect/mysqldialect/go.sum +++ b/dialect/mysqldialect/go.sum @@ -14,9 +14,9 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/dialect/pgdialect/go.mod b/dialect/pgdialect/go.mod index 29e6e67d7..e79f9ec4d 100644 --- a/dialect/pgdialect/go.mod +++ b/dialect/pgdialect/go.mod @@ -1,23 +1,29 @@ module github.com/uptrace/bun/dialect/pgdialect -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. require ( github.com/stretchr/testify v1.8.1 github.com/uptrace/bun v1.2.5 + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/jinzhu/inflection v1.0.0 // indirect github.com/kr/text v0.2.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/dialect/pgdialect/go.sum b/dialect/pgdialect/go.sum index c855eb08d..a373b962d 100644 --- a/dialect/pgdialect/go.sum +++ b/dialect/pgdialect/go.sum @@ -1,11 +1,18 @@ +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -25,8 +32,10 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/dialect/sqlitedialect/go.mod b/dialect/sqlitedialect/go.mod index 7787aedac..a2e8581ad 100644 --- a/dialect/sqlitedialect/go.mod +++ b/dialect/sqlitedialect/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/dialect/sqlitedialect -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -12,5 +14,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/dialect/sqlitedialect/go.sum b/dialect/sqlitedialect/go.sum index 1e3c492a4..045ab2f33 100644 --- a/dialect/sqlitedialect/go.sum +++ b/dialect/sqlitedialect/go.sum @@ -14,7 +14,7 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/driver/pgdriver/go.mod b/driver/pgdriver/go.mod index 27c2eb57b..74c57a33a 100644 --- a/driver/pgdriver/go.mod +++ b/driver/pgdriver/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/driver/pgdriver -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -19,7 +21,7 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/crypto v0.28.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/driver/pgdriver/go.sum b/driver/pgdriver/go.sum index 70147de37..e545dc75d 100644 --- a/driver/pgdriver/go.sum +++ b/driver/pgdriver/go.sum @@ -25,10 +25,10 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/driver/sqliteshim/go.mod b/driver/sqliteshim/go.mod index 35322d3e0..241535d1c 100644 --- a/driver/sqliteshim/go.mod +++ b/driver/sqliteshim/go.mod @@ -20,8 +20,8 @@ require ( github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect diff --git a/driver/sqliteshim/go.sum b/driver/sqliteshim/go.sum index f27a4323f..6a8063e2f 100644 --- a/driver/sqliteshim/go.sum +++ b/driver/sqliteshim/go.sum @@ -31,17 +31,17 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/example/basic/go.mod b/example/basic/go.mod index 13aa001d9..66fe269c4 100644 --- a/example/basic/go.mod +++ b/example/basic/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/basic -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -36,8 +36,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/basic/go.sum b/example/basic/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/basic/go.sum +++ b/example/basic/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/create-table-index/go.mod b/example/create-table-index/go.mod index 242eee241..b945d4850 100644 --- a/example/create-table-index/go.mod +++ b/example/create-table-index/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/create-table-index -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/create-table-index/go.sum b/example/create-table-index/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/create-table-index/go.sum +++ b/example/create-table-index/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/cursor-pagination/go.mod b/example/cursor-pagination/go.mod index a4d5d32d6..050386ff2 100644 --- a/example/cursor-pagination/go.mod +++ b/example/cursor-pagination/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/cursor-pagination -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/cursor-pagination/go.sum b/example/cursor-pagination/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/cursor-pagination/go.sum +++ b/example/cursor-pagination/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/custom-type/go.mod b/example/custom-type/go.mod index c642cb5b1..fca715266 100644 --- a/example/custom-type/go.mod +++ b/example/custom-type/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/custom-type -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/custom-type/go.sum b/example/custom-type/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/custom-type/go.sum +++ b/example/custom-type/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/fixture/go.mod b/example/fixture/go.mod index 9c201208c..2dc6b1d98 100644 --- a/example/fixture/go.mod +++ b/example/fixture/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/fixture -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -37,8 +37,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect diff --git a/example/fixture/go.sum b/example/fixture/go.sum index 5c2970472..8b9506f63 100644 --- a/example/fixture/go.sum +++ b/example/fixture/go.sum @@ -39,18 +39,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/example/get-where-fields/go.mod b/example/get-where-fields/go.mod index 5cae0913e..dadb37799 100644 --- a/example/get-where-fields/go.mod +++ b/example/get-where-fields/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/get-where-fields -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -33,8 +33,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/get-where-fields/go.sum b/example/get-where-fields/go.sum index 875301e35..09a4ac14c 100644 --- a/example/get-where-fields/go.sum +++ b/example/get-where-fields/go.sum @@ -30,17 +30,17 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/migrate/go.mod b/example/migrate/go.mod index 1c51ac929..f9d03d237 100644 --- a/example/migrate/go.mod +++ b/example/migrate/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/migrate -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -21,12 +21,16 @@ require ( ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/fatih/color v1.18.0 // indirect github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/jinzhu/inflection v1.0.0 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-sqlite3 v1.14.24 // indirect @@ -37,9 +41,11 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/migrate/go.sum b/example/migrate/go.sum index 45c5adffa..c2e224451 100644 --- a/example/migrate/go.sum +++ b/example/migrate/go.sum @@ -1,5 +1,10 @@ +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc= github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= @@ -14,6 +19,11 @@ github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -23,6 +33,8 @@ github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBW github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/puzpuzpuz/xsync/v3 v3.4.0 h1:DuVBAdXuGFHv8adVXjWWZ63pJq+NRXOWVXlKDBZ+mJ4= @@ -41,20 +53,25 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4= github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/model-hooks/go.mod b/example/model-hooks/go.mod index c9996a7c1..b972ee845 100644 --- a/example/model-hooks/go.mod +++ b/example/model-hooks/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/model-hooks -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -37,8 +37,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/model-hooks/go.sum b/example/model-hooks/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/model-hooks/go.sum +++ b/example/model-hooks/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/multi-tenant/go.mod b/example/multi-tenant/go.mod index 5fd893454..4bceaf449 100644 --- a/example/multi-tenant/go.mod +++ b/example/multi-tenant/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/multi-tenant -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -38,8 +38,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect diff --git a/example/multi-tenant/go.sum b/example/multi-tenant/go.sum index 5c2970472..8b9506f63 100644 --- a/example/multi-tenant/go.sum +++ b/example/multi-tenant/go.sum @@ -39,18 +39,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/example/opentelemetry/go.mod b/example/opentelemetry/go.mod index 33e52782a..6ca46586a 100644 --- a/example/opentelemetry/go.mod +++ b/example/opentelemetry/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/example/opentelemetry -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -21,41 +23,46 @@ require ( github.com/uptrace/bun/driver/pgdriver v1.2.5 github.com/uptrace/bun/extra/bunotel v1.2.5 github.com/uptrace/uptrace-go v1.31.0 - go.opentelemetry.io/otel v1.31.0 + go.opentelemetry.io/otel v1.32.0 ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/google/uuid v1.6.0 // indirect - github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.23.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - go.opentelemetry.io/contrib/instrumentation/runtime v0.56.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.7.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.31.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.31.0 // indirect - go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0 // indirect - go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0 // indirect - go.opentelemetry.io/otel/log v0.7.0 // indirect - go.opentelemetry.io/otel/metric v1.31.0 // indirect - go.opentelemetry.io/otel/sdk v1.31.0 // indirect - go.opentelemetry.io/otel/sdk/log v0.7.0 // indirect - go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect - go.opentelemetry.io/otel/trace v1.31.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect + go.opentelemetry.io/contrib/instrumentation/runtime v0.57.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.8.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.32.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0 // indirect + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.32.0 // indirect + go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.32.0 // indirect + go.opentelemetry.io/otel/log v0.8.0 // indirect + go.opentelemetry.io/otel/metric v1.32.0 // indirect + go.opentelemetry.io/otel/sdk v1.32.0 // indirect + go.opentelemetry.io/otel/sdk/log v0.8.0 // indirect + go.opentelemetry.io/otel/sdk/metric v1.32.0 // indirect + go.opentelemetry.io/otel/trace v1.32.0 // indirect go.opentelemetry.io/proto/otlp v1.3.1 // indirect - golang.org/x/crypto v0.28.0 // indirect - golang.org/x/net v0.30.0 // indirect - golang.org/x/sys v0.26.0 // indirect - golang.org/x/text v0.19.0 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20241021214115-324edc3d5d38 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 // indirect - google.golang.org/grpc v1.67.1 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241104194629-dd2ea8efbc28 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 // indirect + google.golang.org/grpc v1.68.0 // indirect google.golang.org/protobuf v1.35.1 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect mellium.im/sasl v0.3.2 // indirect ) diff --git a/example/opentelemetry/go.sum b/example/opentelemetry/go.sum index a82782939..4def7937e 100644 --- a/example/opentelemetry/go.sum +++ b/example/opentelemetry/go.sum @@ -1,5 +1,9 @@ +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= github.com/brianvoe/gofakeit/v5 v5.11.2 h1:Ny5Nsf4z2023ZvYP8ujW8p5B1t5sxhdFaQ/0IYXbeSA= github.com/brianvoe/gofakeit/v5 v5.11.2/go.mod h1:/ZENnKqX+XrN8SORLe/fu5lZDIo1tuPncWuRD+eyhSI= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -9,18 +13,29 @@ github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0 h1:asbCHRVmodnJTuQ3qamDwqVOIjwqUPTYmYuemVOx+Ys= -github.com/grpc-ecosystem/grpc-gateway/v2 v2.22.0/go.mod h1:ggCgvZ2r7uOoQjOyu2Y1NhHmEPPzzuhWgcza5M1Ji1I= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.23.0 h1:ad0vkEBuk23VJzZR9nkLVG0YAoN9coASF1GusYX6AlU= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.23.0/go.mod h1:igFoXX2ELCW06bol23DWPB5BEWfZISOzSP5K2sbLea0= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/puzpuzpuz/xsync/v3 v3.4.0 h1:DuVBAdXuGFHv8adVXjWWZ63pJq+NRXOWVXlKDBZ+mJ4= github.com/puzpuzpuz/xsync/v3 v3.4.0/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc h1:9lRDQMhESg+zvGYmW5DyG0UqvY96Bu5QYsTLvCHdrgo= @@ -33,50 +48,55 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -go.opentelemetry.io/contrib/instrumentation/runtime v0.56.0 h1:s7wHG+t8bEoH7ibWk1nk682h7EoWLJ5/8j+TSO3bX/o= -go.opentelemetry.io/contrib/instrumentation/runtime v0.56.0/go.mod h1:Q8Hsv3d9DwryfIl+ebj4mY81IYVRSPy4QfxroVZwqLo= -go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= -go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= -go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.7.0 h1:mMOmtYie9Fx6TSVzw4W+NTpvoaS1JWWga37oI1a/4qQ= -go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.7.0/go.mod h1:yy7nDsMMBUkD+jeekJ36ur5f3jJIrmCwUrY67VFhNpA= -go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.31.0 h1:ZsXq73BERAiNuuFXYqP4MR5hBrjXfMGSO+Cx7qoOZiM= -go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.31.0/go.mod h1:hg1zaDMpyZJuUzjFxFsRYBoccE86tM9Uf4IqNMUxvrY= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.31.0 h1:K0XaT3DwHAcV4nKLzcQvwAgSyisUghWoY20I7huthMk= -go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.31.0/go.mod h1:B5Ki776z/MBnVha1Nzwp5arlzBbE3+1jk+pGmaP5HME= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0 h1:lUsI2TYsQw2r1IASwoROaCnjdj2cvC2+Jbxvk6nHnWU= -go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.31.0/go.mod h1:2HpZxxQurfGxJlJDblybejHB6RX6pmExPNe517hREw4= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0 h1:UGZ1QwZWY67Z6BmckTU+9Rxn04m2bD3gD6Mk0OIOCPk= -go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.31.0/go.mod h1:fcwWuDuaObkkChiDlhEpSq9+X1C0omv+s5mBtToAQ64= -go.opentelemetry.io/otel/log v0.7.0 h1:d1abJc0b1QQZADKvfe9JqqrfmPYQCz2tUSO+0XZmuV4= -go.opentelemetry.io/otel/log v0.7.0/go.mod h1:2jf2z7uVfnzDNknKTO9G+ahcOAyWcp1fJmk/wJjULRo= -go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= -go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= -go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= -go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= -go.opentelemetry.io/otel/sdk/log v0.7.0 h1:dXkeI2S0MLc5g0/AwxTZv6EUEjctiH8aG14Am56NTmQ= -go.opentelemetry.io/otel/sdk/log v0.7.0/go.mod h1:oIRXpW+WD6M8BuGj5rtS0aRu/86cbDV/dAfNaZBIjYM= -go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= -go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= -go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= -go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= +go.opentelemetry.io/contrib/instrumentation/runtime v0.57.0 h1:kJB5wMVorwre8QzEodzTAbzm9FOOah0zvG+V4abNlEE= +go.opentelemetry.io/contrib/instrumentation/runtime v0.57.0/go.mod h1:Nup4TgnOyEJWmVq9sf/ASH3ZJiAXwWHd5xZCHG7Sg9M= +go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= +go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.8.0 h1:S+LdBGiQXtJdowoJoQPEtI52syEP/JYBUpjO49EQhV8= +go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.8.0/go.mod h1:5KXybFvPGds3QinJWQT7pmXf+TN5YIa7CNYObWRkj50= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.32.0 h1:t/Qur3vKSkUCcDVaSumWF2PKHt85pc7fRvFuoVT8qFU= +go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.32.0/go.mod h1:Rl61tySSdcOJWoEgYZVtmnKdA0GeKrSqkHC1t+91CH8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0 h1:IJFEoHiytixx8cMiVAO+GmHR6Frwu+u5Ur8njpFO6Ac= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.32.0/go.mod h1:3rHrKNtLIoS0oZwkY2vxi+oJcwFRWdtUyRII+so45p8= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.32.0 h1:cMyu9O88joYEaI47CnQkxO1XZdpoTF9fEnW2duIddhw= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.32.0/go.mod h1:6Am3rn7P9TVVeXYG+wtcGE7IE1tsQ+bP3AuWcKt/gOI= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.32.0 h1:cC2yDI3IQd0Udsux7Qmq8ToKAx1XCilTQECZ0KDZyTw= +go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.32.0/go.mod h1:2PD5Ex6z8CFzDbTdOlwyNIUywRr1DN0ospafJM1wJ+s= +go.opentelemetry.io/otel/log v0.8.0 h1:egZ8vV5atrUWUbnSsHn6vB8R21G2wrKqNiDt3iWertk= +go.opentelemetry.io/otel/log v0.8.0/go.mod h1:M9qvDdUTRCopJcGRKg57+JSQ9LgLBrwwfC32epk5NX8= +go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= +go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= +go.opentelemetry.io/otel/sdk v1.32.0 h1:RNxepc9vK59A8XsgZQouW8ue8Gkb4jpWtJm9ge5lEG4= +go.opentelemetry.io/otel/sdk v1.32.0/go.mod h1:LqgegDBjKMmb2GC6/PrTnteJG39I8/vJCAP9LlJXEjU= +go.opentelemetry.io/otel/sdk/log v0.8.0 h1:zg7GUYXqxk1jnGF/dTdLPrK06xJdrXgqgFLnI4Crxvs= +go.opentelemetry.io/otel/sdk/log v0.8.0/go.mod h1:50iXr0UVwQrYS45KbruFrEt4LvAdCaWWgIrsN3ZQggo= +go.opentelemetry.io/otel/sdk/metric v1.32.0 h1:rZvFnvmvawYb0alrYkjraqJq0Z4ZUJAiyYCU9snn1CU= +go.opentelemetry.io/otel/sdk/metric v1.32.0/go.mod h1:PWeZlq0zt9YkYAp3gjKZ0eicRYvOh1Gd+X99x6GHpCQ= +go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= +go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= go.opentelemetry.io/proto/otlp v1.3.1 h1:TrMUixzpM0yuc/znrFTP9MMRh8trP93mkCiDVeXrui0= go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR0XqQ7niQU+8= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= -golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= -golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= -golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= -google.golang.org/genproto/googleapis/api v0.0.0-20241021214115-324edc3d5d38 h1:2oV8dfuIkM1Ti7DwXc0BJfnwr9csz4TDXI9EmiI+Rbw= -google.golang.org/genproto/googleapis/api v0.0.0-20241021214115-324edc3d5d38/go.mod h1:vuAjtvlwkDKF6L1GQ0SokiRLCGFfeBUXWr/aFFkHACc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 h1:zciRKQ4kBpFgpfC5QQCVtnnNAcLIqweL7plyZRQHVpI= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= -google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= -google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +google.golang.org/genproto/googleapis/api v0.0.0-20241104194629-dd2ea8efbc28 h1:M0KvPgPmDZHPlbRbaNU1APr28TvwvvdUPlSv7PUvy8g= +google.golang.org/genproto/googleapis/api v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:dguCy7UOdZhTvLzDyt15+rOrawrpM4q7DD9dQ1P11P4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 h1:XVhgTWWV3kGQlwJHR3upFWZeTsei6Oks1apkZSeonIE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/grpc v1.68.0 h1:aHQeeJbo8zAkAa3pRzrVjZlbz6uSfeOXlJNQM0RAbz0= +google.golang.org/grpc v1.68.0/go.mod h1:fmSPC5AsjSBCK54MyHRx48kpOti1/jRfOlwEWywNjWA= google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= mellium.im/sasl v0.3.2 h1:PT6Xp7ccn9XaXAnJ03FcEjmAn7kK1x7aoXV6F+Vmrl0= diff --git a/example/pg-faceted-search/go.mod b/example/pg-faceted-search/go.mod index 09239f3d2..0f022a738 100644 --- a/example/pg-faceted-search/go.mod +++ b/example/pg-faceted-search/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/example/pg-faceted-search -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -22,16 +24,20 @@ require ( ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/fatih/color v1.18.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/crypto v0.28.0 // indirect - golang.org/x/sys v0.26.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect mellium.im/sasl v0.3.2 // indirect ) diff --git a/example/pg-faceted-search/go.sum b/example/pg-faceted-search/go.sum index 735d3004d..bee65b5fa 100644 --- a/example/pg-faceted-search/go.sum +++ b/example/pg-faceted-search/go.sum @@ -1,11 +1,18 @@ +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -25,12 +32,14 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/example/pg-listen/go.mod b/example/pg-listen/go.mod index 26579666e..1572b7350 100644 --- a/example/pg-listen/go.mod +++ b/example/pg-listen/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/example/pg-listen -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -18,15 +20,20 @@ require ( ) require ( + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/fatih/color v1.18.0 // indirect github.com/jinzhu/inflection v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/crypto v0.28.0 // indirect - golang.org/x/sys v0.26.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/sys v0.27.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect mellium.im/sasl v0.3.2 // indirect ) diff --git a/example/pg-listen/go.sum b/example/pg-listen/go.sum index 18a60053a..bee65b5fa 100644 --- a/example/pg-listen/go.sum +++ b/example/pg-listen/go.sum @@ -1,14 +1,25 @@ +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/puzpuzpuz/xsync/v3 v3.4.0 h1:DuVBAdXuGFHv8adVXjWWZ63pJq+NRXOWVXlKDBZ+mJ4= @@ -21,12 +32,17 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= mellium.im/sasl v0.3.2 h1:PT6Xp7ccn9XaXAnJ03FcEjmAn7kK1x7aoXV6F+Vmrl0= diff --git a/example/placeholders/go.mod b/example/placeholders/go.mod index d27c020ae..5d2c99d66 100644 --- a/example/placeholders/go.mod +++ b/example/placeholders/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/placeholders -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/placeholders/go.sum b/example/placeholders/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/placeholders/go.sum +++ b/example/placeholders/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/rel-belongs-to/go.mod b/example/rel-belongs-to/go.mod index e86ce3447..6d1bcec0c 100644 --- a/example/rel-belongs-to/go.mod +++ b/example/rel-belongs-to/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-belongs-to -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/rel-belongs-to/go.sum b/example/rel-belongs-to/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/rel-belongs-to/go.sum +++ b/example/rel-belongs-to/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/rel-has-many-polymorphic/go.mod b/example/rel-has-many-polymorphic/go.mod index 9cbb09aae..140925a09 100644 --- a/example/rel-has-many-polymorphic/go.mod +++ b/example/rel-has-many-polymorphic/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-has-many-polymorphic -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -38,8 +38,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect diff --git a/example/rel-has-many-polymorphic/go.sum b/example/rel-has-many-polymorphic/go.sum index 5c2970472..8b9506f63 100644 --- a/example/rel-has-many-polymorphic/go.sum +++ b/example/rel-has-many-polymorphic/go.sum @@ -39,18 +39,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/example/rel-has-many/go.mod b/example/rel-has-many/go.mod index 3faef2988..8dd2fcc2e 100644 --- a/example/rel-has-many/go.mod +++ b/example/rel-has-many/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-has-many -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/rel-has-many/go.sum b/example/rel-has-many/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/rel-has-many/go.sum +++ b/example/rel-has-many/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/rel-has-one/go.mod b/example/rel-has-one/go.mod index 388fb2fed..8455f09f6 100644 --- a/example/rel-has-one/go.mod +++ b/example/rel-has-one/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-has-one -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/rel-has-one/go.sum b/example/rel-has-one/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/rel-has-one/go.sum +++ b/example/rel-has-one/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/rel-join-condition/go.mod b/example/rel-join-condition/go.mod index c44c670d4..31419f2cd 100644 --- a/example/rel-join-condition/go.mod +++ b/example/rel-join-condition/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-join-condition -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/rel-join-condition/go.sum b/example/rel-join-condition/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/rel-join-condition/go.sum +++ b/example/rel-join-condition/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/rel-many-to-many-self/go.mod b/example/rel-many-to-many-self/go.mod index e4b76a95f..3ba2dda2e 100644 --- a/example/rel-many-to-many-self/go.mod +++ b/example/rel-many-to-many-self/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-many-to-many-self -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/rel-many-to-many-self/go.sum b/example/rel-many-to-many-self/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/rel-many-to-many-self/go.sum +++ b/example/rel-many-to-many-self/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/rel-many-to-many/go.mod b/example/rel-many-to-many/go.mod index c3519ef11..3894219f6 100644 --- a/example/rel-many-to-many/go.mod +++ b/example/rel-many-to-many/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/rel-many-to-many -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -34,8 +34,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/rel-many-to-many/go.sum b/example/rel-many-to-many/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/rel-many-to-many/go.sum +++ b/example/rel-many-to-many/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/string-representation/go.mod b/example/string-representation/go.mod index 73ca62592..4faa5fd4a 100644 --- a/example/string-representation/go.mod +++ b/example/string-representation/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/string-representation -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -31,8 +31,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/string-representation/go.sum b/example/string-representation/go.sum index 875301e35..09a4ac14c 100644 --- a/example/string-representation/go.sum +++ b/example/string-representation/go.sum @@ -30,17 +30,17 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/example/trivial/go.mod b/example/trivial/go.mod index 408d52e54..0428c96de 100644 --- a/example/trivial/go.mod +++ b/example/trivial/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/trivial -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -35,6 +35,8 @@ require ( require ( filippo.io/edwards25519 v1.1.0 // indirect + github.com/bahlo/generic-list-go v0.2.0 // indirect + github.com/buger/jsonparser v1.1.1 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/fatih/color v1.18.0 // indirect github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 // indirect @@ -42,6 +44,7 @@ require ( github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/jinzhu/inflection v1.0.0 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect github.com/mattn/go-sqlite3 v1.14.24 // indirect @@ -51,10 +54,12 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/crypto v0.28.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/mod v0.21.0 // indirect - golang.org/x/sys v0.26.0 // indirect + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/mod v0.22.0 // indirect + golang.org/x/sys v0.27.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect mellium.im/sasl v0.3.2 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect diff --git a/example/trivial/go.sum b/example/trivial/go.sum index ff7e19386..3b45e499e 100644 --- a/example/trivial/go.sum +++ b/example/trivial/go.sum @@ -3,6 +3,10 @@ filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4 github.com/Azure/azure-sdk-for-go/sdk/azcore v0.19.0/go.mod h1:h6H6c8enJmmocHUbLiiGY6sx7f9i+X3m1CHdd5c6Rdw= github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.11.0/go.mod h1:HcM1YX14R7CJcghJGOYCgdezslRSVzqwLf/q+4Y2r/0= github.com/Azure/azure-sdk-for-go/sdk/internal v0.7.0/go.mod h1:yqy467j36fJxcRV2TzfVZ1pCb5vxm4BtZPUdYWe/Xo8= +github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk= +github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg= +github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= +github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -28,6 +32,11 @@ github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -38,6 +47,8 @@ github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxU github.com/modocache/gover v0.0.0-20171022184752-b58185e213c5/go.mod h1:caMODM3PzxT8aQXRPkAt8xlV/e7d7w8GM5g0fa5F0D8= github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -55,20 +66,22 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 h1:rnB8ZLMeAr3VcqjfRkAm27qb8y6zFKNfuHvy1Gfe7KI= +github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1:DbzwytT4g/odXquuOCqroKvtxxldI4nb3nuesHF/Exo= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -76,15 +89,17 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/example/tx-composition/go.mod b/example/tx-composition/go.mod index 91ad19129..6b44b3d5a 100644 --- a/example/tx-composition/go.mod +++ b/example/tx-composition/go.mod @@ -1,8 +1,8 @@ module github.com/uptrace/bun/example/tx-composition -go 1.22.0 +go 1.23 -toolchain go1.22.6 +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -36,8 +36,8 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/sys v0.27.0 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect modernc.org/libc v1.61.0 // indirect modernc.org/mathutil v1.6.0 // indirect diff --git a/example/tx-composition/go.sum b/example/tx-composition/go.sum index c4f8fdaeb..0f91592d5 100644 --- a/example/tx-composition/go.sum +++ b/example/tx-composition/go.sum @@ -35,18 +35,18 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= modernc.org/cc/v4 v4.21.4 h1:3Be/Rdo1fpr8GrQ7IVw9OHtplU4gWbb+wNgeoBMmGLQ= diff --git a/extra/bundebug/go.mod b/extra/bundebug/go.mod index e8ae70399..d4973a070 100644 --- a/extra/bundebug/go.mod +++ b/extra/bundebug/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/extra/bundebug -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -17,5 +19,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/extra/bundebug/go.sum b/extra/bundebug/go.sum index 1a8a7149e..85c7456f8 100644 --- a/extra/bundebug/go.sum +++ b/extra/bundebug/go.sum @@ -23,7 +23,7 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/extra/bunotel/go.mod b/extra/bunotel/go.mod index a902d51ca..7850152f4 100644 --- a/extra/bunotel/go.mod +++ b/extra/bunotel/go.mod @@ -1,15 +1,17 @@ module github.com/uptrace/bun/extra/bunotel -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. require ( github.com/uptrace/bun v1.2.5 github.com/uptrace/opentelemetry-go-extra/otelsql v0.3.2 - go.opentelemetry.io/otel v1.31.0 - go.opentelemetry.io/otel/metric v1.31.0 - go.opentelemetry.io/otel/trace v1.31.0 + go.opentelemetry.io/otel v1.32.0 + go.opentelemetry.io/otel/metric v1.32.0 + go.opentelemetry.io/otel/trace v1.32.0 ) require ( @@ -20,5 +22,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/extra/bunotel/go.sum b/extra/bunotel/go.sum index e03782753..2464138a6 100644 --- a/extra/bunotel/go.sum +++ b/extra/bunotel/go.sum @@ -23,13 +23,13 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -go.opentelemetry.io/otel v1.31.0 h1:NsJcKPIW0D0H3NgzPDHmo0WW6SptzPdqg/L1zsIm2hY= -go.opentelemetry.io/otel v1.31.0/go.mod h1:O0C14Yl9FgkjqcCZAsE053C13OaddMYr/hz6clDkEJE= -go.opentelemetry.io/otel/metric v1.31.0 h1:FSErL0ATQAmYHUIzSezZibnyVlft1ybhy4ozRPcF2fE= -go.opentelemetry.io/otel/metric v1.31.0/go.mod h1:C3dEloVbLuYoX41KpmAhOqNriGbA+qqH6PQ5E5mUfnY= -go.opentelemetry.io/otel/trace v1.31.0 h1:ffjsj1aRouKewfr85U2aGagJ46+MvodynlQ1HYdmJys= -go.opentelemetry.io/otel/trace v1.31.0/go.mod h1:TXZkRk7SM2ZQLtR6eoAWQFIHPvzQ06FJAsO1tJg480A= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +go.opentelemetry.io/otel v1.32.0 h1:WnBN+Xjcteh0zdk01SVqV55d/m62NJLJdIyb4y/WO5U= +go.opentelemetry.io/otel v1.32.0/go.mod h1:00DCVSB0RQcnzlwyTfqtxSm+DRr9hpYrHjNGiBHVQIg= +go.opentelemetry.io/otel/metric v1.32.0 h1:xV2umtmNcThh2/a/aCP+h64Xx5wsj8qqnkYZktzNa0M= +go.opentelemetry.io/otel/metric v1.32.0/go.mod h1:jH7CIbbK6SH2V2wE16W05BHCtIDzauciCRLoc/SyMv8= +go.opentelemetry.io/otel/trace v1.32.0 h1:WIC9mYrXf8TmY/EXuULKc8hR17vE+Hjv2cssQDe03fM= +go.opentelemetry.io/otel/trace v1.32.0/go.mod h1:+i4rkvCraA+tG6AzwloGaCtkx53Fa+L+V8e9a7YvhT8= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/extra/bunrelic/go.mod b/extra/bunrelic/go.mod index 70a59ae9b..6f5cc1e4e 100644 --- a/extra/bunrelic/go.mod +++ b/extra/bunrelic/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/extra/bunrelic -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -15,10 +17,10 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/net v0.30.0 // indirect - golang.org/x/sys v0.26.0 // indirect - golang.org/x/text v0.19.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 // indirect - google.golang.org/grpc v1.67.1 // indirect + golang.org/x/net v0.31.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 // indirect + google.golang.org/grpc v1.68.0 // indirect google.golang.org/protobuf v1.35.1 // indirect ) diff --git a/extra/bunrelic/go.sum b/extra/bunrelic/go.sum index 7e373237c..2a507ada9 100644 --- a/extra/bunrelic/go.sum +++ b/extra/bunrelic/go.sum @@ -1,5 +1,7 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= @@ -18,16 +20,16 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= -golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= -golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38 h1:zciRKQ4kBpFgpfC5QQCVtnnNAcLIqweL7plyZRQHVpI= -google.golang.org/genproto/googleapis/rpc v0.0.0-20241021214115-324edc3d5d38/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= -google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= -google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28 h1:XVhgTWWV3kGQlwJHR3upFWZeTsei6Oks1apkZSeonIE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241104194629-dd2ea8efbc28/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= +google.golang.org/grpc v1.68.0 h1:aHQeeJbo8zAkAa3pRzrVjZlbz6uSfeOXlJNQM0RAbz0= +google.golang.org/grpc v1.68.0/go.mod h1:fmSPC5AsjSBCK54MyHRx48kpOti1/jRfOlwEWywNjWA= google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/extra/bunslog/go.mod b/extra/bunslog/go.mod index 685da67a1..338b2a885 100644 --- a/extra/bunslog/go.mod +++ b/extra/bunslog/go.mod @@ -1,6 +1,8 @@ module github.com/uptrace/bun/extra/bunslog -go 1.22 +go 1.23 + +toolchain go1.23.2 replace github.com/uptrace/bun => ../.. @@ -12,5 +14,5 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect ) diff --git a/extra/bunslog/go.sum b/extra/bunslog/go.sum index 1e3c492a4..045ab2f33 100644 --- a/extra/bunslog/go.sum +++ b/extra/bunslog/go.sum @@ -14,7 +14,7 @@ github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IU github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/go.mod b/go.mod index bfdaff905..9c55d7dd4 100644 --- a/go.mod +++ b/go.mod @@ -24,7 +24,7 @@ require ( github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - golang.org/x/sys v0.26.0 // indirect + golang.org/x/sys v0.27.0 // indirect gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index c08e09e39..dd8ae7543 100644 --- a/go.sum +++ b/go.sum @@ -49,8 +49,8 @@ github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41/go.mod h1: golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/internal/dbtest/go.mod b/internal/dbtest/go.mod index ebe262e80..c5912c9c2 100644 --- a/internal/dbtest/go.mod +++ b/internal/dbtest/go.mod @@ -39,6 +39,7 @@ require ( github.com/uptrace/bun/driver/pgdriver v1.2.5 github.com/uptrace/bun/driver/sqliteshim v1.2.5 github.com/uptrace/bun/extra/bundebug v1.2.5 + github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 ) require ( @@ -70,12 +71,11 @@ require ( github.com/tmthrgd/go-hex v0.0.0-20190904060850-447a3041c3bc // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect - github.com/wk8/go-ordered-map/v2 v2.1.9-0.20240816141633-0a40785b4f41 // indirect - golang.org/x/crypto v0.28.0 // indirect - golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c // indirect - golang.org/x/mod v0.21.0 // indirect - golang.org/x/sys v0.26.0 // indirect - golang.org/x/text v0.19.0 // indirect + golang.org/x/crypto v0.29.0 // indirect + golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f // indirect + golang.org/x/mod v0.22.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/text v0.20.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect mellium.im/sasl v0.3.2 // indirect modernc.org/gc/v3 v3.0.0-20241004144649-1aea3fae8852 // indirect diff --git a/internal/dbtest/go.sum b/internal/dbtest/go.sum index 7983053d0..4dad0b482 100644 --- a/internal/dbtest/go.sum +++ b/internal/dbtest/go.sum @@ -410,11 +410,11 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201016220609-9e8e0b390897/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= -golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= +golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -422,8 +422,8 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -450,8 +450,8 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -476,15 +476,15 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= -golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -502,8 +502,8 @@ golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= -golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From 30dc4db6bef7992f08002990678c7067f5a57196 Mon Sep 17 00:00:00 2001 From: Vladimir Mihailenco Date: Sun, 10 Nov 2024 14:43:05 +0200 Subject: [PATCH 49/55] add tz --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 50a1903e7..255d0f7ee 100644 --- a/Makefile +++ b/Makefile @@ -6,7 +6,7 @@ test: echo "go test in $${dir}"; \ (cd "$${dir}" && \ go test && \ - env GOOS=linux GOARCH=386 go test && \ + env GOOS=linux GOARCH=386 TZ= go test && \ go vet); \ done From 6b0ddc1df9d326ff4330385dd5eeb9525a6485ba Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 10 Nov 2024 19:43:01 +0100 Subject: [PATCH 50/55] refactor: move FQN to sqlschema package --- dialect/pgdialect/alter_table.go | 2 +- internal/dbtest/query_test.go | 2 +- migrate/diff.go | 5 ++--- migrate/operations.go | 33 ++++++++++++++++---------------- migrate/sqlschema/database.go | 8 ++++---- migrate/sqlschema/migrator.go | 2 +- migrate/sqlschema/table.go | 25 +++++++++++++++++++++--- schema/sqlfmt.go | 19 ------------------ 8 files changed, 47 insertions(+), 49 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 0d9f45f28..64c84594e 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -26,7 +26,7 @@ func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err err fmter := m.db.Formatter() // Append ALTER TABLE statement to the enclosed query bytes []byte. - appendAlterTable := func(query []byte, fqn schema.FQN) []byte { + appendAlterTable := func(query []byte, fqn sqlschema.FQN) []byte { query = append(query, "ALTER TABLE "...) query, _ = fqn.AppendQuery(fmter, query) return append(query, " "...) diff --git a/internal/dbtest/query_test.go b/internal/dbtest/query_test.go index 093c940c2..b906cb20e 100644 --- a/internal/dbtest/query_test.go +++ b/internal/dbtest/query_test.go @@ -1593,7 +1593,7 @@ func TestAlterTable(t *testing.T) { Genre string } - fqn := schema.FQN{Schema: "hobbies", Table: "movies"} + fqn := sqlschema.FQN{Schema: "hobbies", Table: "movies"} tests := []struct { name string diff --git a/migrate/diff.go b/migrate/diff.go index 5c93a33e0..cb0e60a2d 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -2,7 +2,6 @@ package migrate import ( "github.com/uptrace/bun/migrate/sqlschema" - "github.com/uptrace/bun/schema" ) // changeset is a set of changes to the database schema definition. @@ -379,7 +378,7 @@ func newRefMap(fks map[sqlschema.ForeignKey]string) refMap { } // RenameT updates table name in all foreign key definions which depend on it. -func (rm refMap) RenameTable(table schema.FQN, newName string) { +func (rm refMap) RenameTable(table sqlschema.FQN, newName string) { for fk := range rm { switch table { case fk.From.FQN: @@ -391,7 +390,7 @@ func (rm refMap) RenameTable(table schema.FQN, newName string) { } // RenameColumn updates column name in all foreign key definions which depend on it. -func (rm refMap) RenameColumn(table schema.FQN, column, newName string) { +func (rm refMap) RenameColumn(table sqlschema.FQN, column, newName string) { for fk := range rm { if table == fk.From.FQN { fk.From.Column.Replace(column, newName) diff --git a/migrate/operations.go b/migrate/operations.go index 41f5bd6ef..68baadee5 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -4,7 +4,6 @@ import ( "fmt" "github.com/uptrace/bun/migrate/sqlschema" - "github.com/uptrace/bun/schema" ) // Operation encapsulates the request to change a database definition @@ -32,7 +31,7 @@ type Operation interface { // Make sure the dialect does not include FOREIGN KEY constraints in the CREATE TABLE // statement, as those may potentially reference not-yet-existing columns/tables. type CreateTableOp struct { - FQN schema.FQN + FQN sqlschema.FQN Model interface{} } @@ -44,7 +43,7 @@ func (op *CreateTableOp) GetReverse() Operation { // DropTableOp drops a database table. This operation is not reversible. type DropTableOp struct { - FQN schema.FQN + FQN sqlschema.FQN } var _ Operation = (*DropTableOp)(nil) @@ -63,7 +62,7 @@ func (op *DropTableOp) GetReverse() Operation { // RenameTableOp renames the table. Note, that changing the "schema" part of the table's FQN is not allowed. type RenameTableOp struct { - FQN schema.FQN + FQN sqlschema.FQN NewName string } @@ -71,7 +70,7 @@ var _ Operation = (*RenameTableOp)(nil) func (op *RenameTableOp) GetReverse() Operation { return &RenameTableOp{ - FQN: schema.FQN{Schema: op.FQN.Schema, Table: op.NewName}, + FQN: sqlschema.FQN{Schema: op.FQN.Schema, Table: op.NewName}, NewName: op.FQN.Table, } } @@ -79,7 +78,7 @@ func (op *RenameTableOp) GetReverse() Operation { // RenameColumnOp renames a column in the table. If the changeset includes a rename operation // for the column's table, it should be executed first. type RenameColumnOp struct { - FQN schema.FQN + FQN sqlschema.FQN OldName string NewName string } @@ -101,7 +100,7 @@ func (op *RenameColumnOp) DependsOn(another Operation) bool { // AddColumnOp adds a new column to the table. type AddColumnOp struct { - FQN schema.FQN + FQN sqlschema.FQN Column string ColDef sqlschema.Column } @@ -123,7 +122,7 @@ func (op *AddColumnOp) GetReverse() Operation { // DropColumnOp depends on DropForeignKeyOp, DropPrimaryKeyOp, and ChangePrimaryKeyOp // if any of the constraints is defined on this table. type DropColumnOp struct { - FQN schema.FQN + FQN sqlschema.FQN Column string ColDef sqlschema.Column } @@ -158,7 +157,7 @@ type AddForeignKeyOp struct { var _ Operation = (*AddForeignKeyOp)(nil) -func (op *AddForeignKeyOp) FQN() schema.FQN { +func (op *AddForeignKeyOp) FQN() sqlschema.FQN { return op.ForeignKey.From.FQN } @@ -166,7 +165,7 @@ func (op *AddForeignKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { case *RenameTableOp: return op.ForeignKey.DependsOnTable(another.FQN) || - op.ForeignKey.DependsOnTable(schema.FQN{Schema: another.FQN.Schema, Table: another.NewName}) + op.ForeignKey.DependsOnTable(sqlschema.FQN{Schema: another.FQN.Schema, Table: another.NewName}) case *CreateTableOp: return op.ForeignKey.DependsOnTable(another.FQN) } @@ -188,7 +187,7 @@ type DropForeignKeyOp struct { var _ Operation = (*DropForeignKeyOp)(nil) -func (op *DropForeignKeyOp) FQN() schema.FQN { +func (op *DropForeignKeyOp) FQN() sqlschema.FQN { return op.ForeignKey.From.FQN } @@ -201,7 +200,7 @@ func (op *DropForeignKeyOp) GetReverse() Operation { // AddUniqueConstraintOp adds new UNIQUE constraint to the table. type AddUniqueConstraintOp struct { - FQN schema.FQN + FQN sqlschema.FQN Unique sqlschema.Unique } @@ -231,7 +230,7 @@ func (op *AddUniqueConstraintOp) DependsOn(another Operation) bool { // DropUniqueConstraintOp drops a UNIQUE constraint. type DropUniqueConstraintOp struct { - FQN schema.FQN + FQN sqlschema.FQN Unique sqlschema.Unique } @@ -256,7 +255,7 @@ func (op *DropUniqueConstraintOp) GetReverse() Operation { // E.g. reducing VARCHAR lenght is not possible in most dialects. // AutoMigrator does not enforce or validate these rules. type ChangeColumnTypeOp struct { - FQN schema.FQN + FQN sqlschema.FQN Column string From sqlschema.Column To sqlschema.Column @@ -275,7 +274,7 @@ func (op *ChangeColumnTypeOp) GetReverse() Operation { // DropPrimaryKeyOp drops the table's PRIMARY KEY. type DropPrimaryKeyOp struct { - FQN schema.FQN + FQN sqlschema.FQN PrimaryKey sqlschema.PrimaryKey } @@ -290,7 +289,7 @@ func (op *DropPrimaryKeyOp) GetReverse() Operation { // AddPrimaryKeyOp adds a new PRIMARY KEY to the table. type AddPrimaryKeyOp struct { - FQN schema.FQN + FQN sqlschema.FQN PrimaryKey sqlschema.PrimaryKey } @@ -313,7 +312,7 @@ func (op *AddPrimaryKeyOp) DependsOn(another Operation) bool { // ChangePrimaryKeyOp changes the PRIMARY KEY of the table. type ChangePrimaryKeyOp struct { - FQN schema.FQN + FQN sqlschema.FQN Old sqlschema.PrimaryKey New sqlschema.PrimaryKey } diff --git a/migrate/sqlschema/database.go b/migrate/sqlschema/database.go index 9800306db..fa96a9236 100644 --- a/migrate/sqlschema/database.go +++ b/migrate/sqlschema/database.go @@ -39,16 +39,16 @@ type ForeignKey struct { func NewColumnReference(schemaName, tableName string, columns ...string) ColumnReference { return ColumnReference{ - FQN: schema.FQN{Schema: schemaName, Table: tableName}, + FQN: FQN{Schema: schemaName, Table: tableName}, Column: NewColumns(columns...), } } -func (fk ForeignKey) DependsOnTable(fqn schema.FQN) bool { +func (fk ForeignKey) DependsOnTable(fqn FQN) bool { return fk.From.FQN == fqn || fk.To.FQN == fqn } -func (fk ForeignKey) DependsOnColumn(fqn schema.FQN, column string) bool { +func (fk ForeignKey) DependsOnColumn(fqn FQN, column string) bool { return fk.DependsOnTable(fqn) && (fk.From.Column.Contains(column) || fk.To.Column.Contains(column)) } @@ -122,6 +122,6 @@ func (u Unique) Equals(other Unique) bool { } type ColumnReference struct { - FQN schema.FQN + FQN FQN Column Columns } diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index 3532da0d4..bb8f1b82f 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -44,6 +44,6 @@ func (m *BaseMigrator) AppendCreateTable(b []byte, model interface{}) ([]byte, e return m.db.NewCreateTable().Model(model).AppendQuery(m.db.Formatter(), b) } -func (m *BaseMigrator) AppendDropTable(b []byte, fqn schema.FQN) ([]byte, error) { +func (m *BaseMigrator) AppendDropTable(b []byte, fqn FQN) ([]byte, error) { return m.db.NewDropTable().TableExpr(fqn.String()).AppendQuery(m.db.Formatter(), b) } diff --git a/migrate/sqlschema/table.go b/migrate/sqlschema/table.go index e5e71479c..acbc9bb65 100644 --- a/migrate/sqlschema/table.go +++ b/migrate/sqlschema/table.go @@ -1,6 +1,8 @@ package sqlschema import ( + "fmt" + "github.com/uptrace/bun/schema" orderedmap "github.com/wk8/go-ordered-map/v2" ) @@ -11,7 +13,7 @@ type Table interface { GetColumns() *orderedmap.OrderedMap[string, Column] GetPrimaryKey() *PrimaryKey GetUniqueConstraints() []Unique - GetFQN() schema.FQN + GetFQN() FQN } var _ Table = (*BaseTable)(nil) @@ -61,6 +63,23 @@ func (td *BaseTable) GetUniqueConstraints() []Unique { return td.UniqueConstraints } -func (t *BaseTable) GetFQN() schema.FQN { - return schema.FQN{Schema: t.Schema, Table: t.Name} +func (t *BaseTable) GetFQN() FQN { + return FQN{Schema: t.Schema, Table: t.Name} +} + +// FQN uniquely identifies a table in a multi-schema setup. +type FQN struct { + Schema string + Table string +} + +var _ schema.QueryAppender = (*FQN)(nil) + +// AppendQuery appends a fully-qualified table name. +func (fqn *FQN) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { + return fmter.AppendQuery(b, "?.?", schema.Ident(fqn.Schema), schema.Ident(fqn.Table)), nil +} + +func (fqn *FQN) String() string { + return fmt.Sprintf("%s.%s", fqn.Schema, fqn.Table) } diff --git a/schema/sqlfmt.go b/schema/sqlfmt.go index 5703c9694..7b4a9493f 100644 --- a/schema/sqlfmt.go +++ b/schema/sqlfmt.go @@ -1,7 +1,6 @@ package schema import ( - "fmt" "strings" "github.com/uptrace/bun/internal" @@ -39,24 +38,6 @@ func (s Name) AppendQuery(fmter Formatter, b []byte) ([]byte, error) { //------------------------------------------------------------------------------ -// FQN appends a fully-qualified table name. -type FQN struct { - Schema string - Table string -} - -var _ QueryAppender = (*FQN)(nil) - -func (fqn *FQN) AppendQuery(fmter Formatter, b []byte) ([]byte, error) { - return fmter.AppendQuery(b, "?.?", Ident(fqn.Schema), Ident(fqn.Table)), nil -} - -func (fqn *FQN) String() string { - return fmt.Sprintf("%s.%s", fqn.Schema, fqn.Table) -} - -//------------------------------------------------------------------------------ - // Ident represents a SQL identifier, for example, // a fully qualified column name such as `table_name.col_name`. type Ident string From 31ed58254ad08143d88684672acd33ce044ea5a9 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Sun, 10 Nov 2024 20:41:13 +0100 Subject: [PATCH 51/55] fix: remove schema name from t.Name during bun-schema inspection --- internal/dbtest/inspect_test.go | 21 +++++++++++++++++++++ internal/dbtest/migrate_test.go | 14 ++++++++------ migrate/sqlschema/inspector.go | 11 +++++++++-- 3 files changed, 38 insertions(+), 8 deletions(-) diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 7cb0ea8ec..a566acd08 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -626,5 +626,26 @@ func TestBunModelInspector_Inspect(t *testing.T) { return } }) + + t.Run("separates schema and table name", func(t *testing.T) { + type Model struct { + bun.BaseModel `bun:"table:custom_schema.model"` + } + + tables := schema.NewTables(dialect) + tables.Register((*Model)(nil)) + inspector := sqlschema.NewBunModelInspector(tables) + + got, err := inspector.Inspect(context.Background()) + require.NoError(t, err) + + gotTables := got.GetTables() + require.Equal(t, 1, gotTables.Len()) + for _, table := range gotTables.FromOldest() { + require.Equal(t, "custom_schema", table.GetSchema(), "wrong schema name") + require.Equal(t, "model", table.GetName(), "wrong table name") + return + } + }) }) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 9ceca55a6..08a22e12b 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -857,7 +857,7 @@ func testUnique(t *testing.T, db *bun.DB) { func testUniqueRenamedTable(t *testing.T, db *bun.DB) { type TableBefore struct { - bun.BaseModel `bun:"table:before"` + bun.BaseModel `bun:"table:automigrate.before"` FirstName string `bun:"first_name,unique:full_name"` LastName string `bun:"last_name,unique:full_name"` Birthday string `bun:"birthday,unique"` @@ -866,7 +866,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { } type TableAfter struct { - bun.BaseModel `bun:"table:after"` + bun.BaseModel `bun:"table:automigrate.after"` // Expand full_name unique group and rename it. FirstName string `bun:"first_name,unique:birth_certificate"` LastName string `bun:"last_name,unique:birth_certificate"` @@ -881,7 +881,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { orderedmap.Pair[string, sqlschema.Table]{ Key: "after", Value: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), + Schema: "automigrate", Name: "after", Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( orderedmap.Pair[string, sqlschema.Column]{ @@ -931,6 +931,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) + mustCreateSchema(t, ctx, db, "automigrate") mustResetModel(t, ctx, db, (*TableBefore)(nil)) mustDropTableOnCleanup(t, ctx, db, (*TableAfter)(nil)) m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) @@ -946,7 +947,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Has a composite primary key. type DropPKBefore struct { - bun.BaseModel `bun:"table:drop_your_pks"` + bun.BaseModel `bun:"table:please.drop_your_pks"` FirstName string `bun:"first_name,pk"` LastName string `bun:"last_name,pk"` } @@ -970,7 +971,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Doesn't have any primary keys. type DropPKAfter struct { - bun.BaseModel `bun:"table:drop_your_pks"` + bun.BaseModel `bun:"table:please.drop_your_pks"` FirstName string `bun:"first_name,notnull"` LastName string `bun:"last_name,notnull"` } @@ -994,7 +995,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { orderedmap.Pair[string, sqlschema.Table]{ Key: "drop_your_pks", Value: &sqlschema.BaseTable{ - Schema: db.Dialect().DefaultSchema(), + Schema: "please", Name: "drop_your_pks", Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( orderedmap.Pair[string, sqlschema.Column]{ @@ -1074,6 +1075,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) + mustCreateSchema(t, ctx, db, "please") mustResetModel(t, ctx, db, (*DropPKBefore)(nil), (*AddNewPKBefore)(nil), diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 087a7f9f9..04fc9a656 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -129,10 +129,17 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { pk = &PrimaryKey{Columns: NewColumns(columns...)} } - state.Tables.Set(t.Name, &BunTable{ + // In cases where a table is defined in a non-default schema in the `bun:table` tag, + // schema.Table only extracts the name of the schema, but passes the entire tag value to t.Name + // for backwads-compatibility. For example, a bun model like this: + // type Model struct { bun.BaseModel `bun:"table:favourite.books` } + // produces + // schema.Table{ Schema: "favourite", Name: "favourite.books" } + tableName := strings.TrimPrefix(t.Name, t.Schema+".") + state.Tables.Set(tableName, &BunTable{ BaseTable: BaseTable{ Schema: t.Schema, - Name: t.Name, + Name: tableName, Columns: columns, UniqueConstraints: unique, PrimaryKey: pk, From 4e9665957d64a64fed77e374c5e4eb3308999e5f Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 11 Nov 2024 21:11:53 +0100 Subject: [PATCH 52/55] refactor: drop multi-schema support AutoMigrator only supports 1 schema at a time. Use WithSchemaName() option to configure it. Defaults to the default schema in the dialect. --- dialect/pgdialect/inspector.go | 12 +- internal/dbtest/inspect_test.go | 563 +++++++++++++++++--------------- internal/dbtest/migrate_test.go | 24 +- migrate/auto.go | 15 +- migrate/sqlschema/inspector.go | 8 +- 5 files changed, 347 insertions(+), 275 deletions(-) diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index d4061a487..8f8188ff6 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -30,7 +30,7 @@ func newInspector(db *bun.DB, excludeTables ...string) *Inspector { return &Inspector{db: db, excludeTables: excludeTables} } -func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { +func (in *Inspector) Inspect(ctx context.Context, schemaName string) (sqlschema.Database, error) { dbSchema := Schema{ Tables: orderedmap.New[string, sqlschema.Table](), ForeignKeys: make(map[sqlschema.ForeignKey]string), @@ -43,12 +43,12 @@ func (in *Inspector) Inspect(ctx context.Context) (sqlschema.Database, error) { } var tables []*InformationSchemaTable - if err := in.db.NewRaw(sqlInspectTables, bun.In(exclude)).Scan(ctx, &tables); err != nil { + if err := in.db.NewRaw(sqlInspectTables, schemaName, bun.In(exclude)).Scan(ctx, &tables); err != nil { return dbSchema, err } var fks []*ForeignKey - if err := in.db.NewRaw(sqlInspectForeignKeys, bun.In(exclude), bun.In(exclude)).Scan(ctx, &fks); err != nil { + if err := in.db.NewRaw(sqlInspectForeignKeys, schemaName, bun.In(exclude), bun.In(exclude)).Scan(ctx, &fks); err != nil { return dbSchema, err } dbSchema.ForeignKeys = make(map[sqlschema.ForeignKey]string, len(fks)) @@ -160,8 +160,7 @@ type PrimaryKey struct { } const ( - // sqlInspectTables retrieves all user-defined tables across all schemas. - // It excludes relations from Postgres's reserved "pg_" schemas and views from the "information_schema". + // sqlInspectTables retrieves all user-defined tables in the selected schema. // Pass bun.In([]string{...}) to exclude tables from this inspection or bun.In([]string{''}) to include all results. sqlInspectTables = ` SELECT @@ -182,7 +181,7 @@ FROM information_schema.tables "t" ) pk ON ("t".table_schema || '.' || "t".table_name)::regclass = pk.indrelid WHERE table_type = 'BASE TABLE' - AND "t".table_schema <> 'information_schema' + AND "t".table_schema = ? AND "t".table_schema NOT LIKE 'pg_%' AND "table_name" NOT IN (?) ORDER BY "t".table_schema, "t".table_name @@ -289,6 +288,7 @@ FROM pg_constraint co WHERE co.contype = 'f' AND co.conrelid IN (SELECT oid FROM pg_class WHERE relkind = 'r') AND ARRAY_POSITION(co.conkey, sc.attnum) = ARRAY_POSITION(co.confkey, tc.attnum) + AND ss.nspname = ? AND s.relname NOT IN (?) AND "t".relname NOT IN (?) GROUP BY "constraint_name", "schema_name", "table_name", target_schema, target_table ` diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index a566acd08..7ef211137 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -74,276 +74,300 @@ type Journalist struct { func TestDatabaseInspector_Inspect(t *testing.T) { testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { - db.RegisterModel((*PublisherToJournalist)(nil)) - - dbInspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) - if err != nil { - t.Skip(err) - } - - ctx := context.Background() - mustCreateSchema(t, ctx, db, "admin") - mustCreateTableWithFKs(t, ctx, db, - // Order of creation matters: - (*Journalist)(nil), // does not reference other tables - (*Publisher)(nil), // does not reference other tables - (*Office)(nil), // references Publisher - (*PublisherToJournalist)(nil), // references Journalist and Publisher - (*Article)(nil), // references Journalist and Publisher - ) defaultSchema := db.Dialect().DefaultSchema() - // Tables come sorted alphabetically by schema and table. - wantTables := orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( - orderedmap.Pair[string, sqlschema.Table]{ - Key: "offices", - Value: &sqlschema.BaseTable{ - Schema: "admin", - Name: "offices", - Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( - orderedmap.Pair[string, sqlschema.Column]{ - Key: "office_name", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "publisher_id", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "publisher_name", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: true, - }, - }, - )), - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, - }, - }, - orderedmap.Pair[string, sqlschema.Table]{ - Key: "articles", - Value: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "articles", - Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( - orderedmap.Pair[string, sqlschema.Column]{ - Key: "isbn", - Value: &sqlschema.BaseColumn{ - SQLType: "bigint", - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: true, - DefaultValue: "", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "editor", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "john doe", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "title", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "locale", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - VarcharLen: 5, - IsNullable: true, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "en-GB", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "page_count", - Value: &sqlschema.BaseColumn{ - SQLType: "smallint", - IsNullable: false, - IsAutoIncrement: false, - IsIdentity: false, - DefaultValue: "1", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "book_count", - Value: &sqlschema.BaseColumn{ - SQLType: "integer", - IsNullable: false, - IsAutoIncrement: true, - IsIdentity: false, - DefaultValue: "", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "publisher_id", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "author_id", - Value: &sqlschema.BaseColumn{ - SQLType: "bigint", + for _, tt := range []struct { + name string + schemaName string + wantTables *orderedmap.OrderedMap[string, sqlschema.Table] + wantFKs []sqlschema.ForeignKey + }{ + { + name: "inspect default schema", + schemaName: defaultSchema, + // Tables come sorted alphabetically by schema and table. + wantTables: orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + // admin.offices should not be fetched, because it doesn't belong to the default schema. + orderedmap.Pair[string, sqlschema.Table]{ + Key: "articles", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "articles", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "isbn", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: true, + DefaultValue: "", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "editor", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "john doe", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "title", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "locale", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + VarcharLen: 5, + IsNullable: true, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "en-GB", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "page_count", + Value: &sqlschema.BaseColumn{ + SQLType: "smallint", + IsNullable: false, + IsAutoIncrement: false, + IsIdentity: false, + DefaultValue: "1", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "book_count", + Value: &sqlschema.BaseColumn{ + SQLType: "integer", + IsNullable: false, + IsAutoIncrement: true, + IsIdentity: false, + DefaultValue: "", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "author_id", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("isbn")}, + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("editor", "title")}, }, }, - )), - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("isbn")}, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("editor", "title")}, }, - }, - }, - orderedmap.Pair[string, sqlschema.Table]{ - Key: "authors", - Value: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "authors", - Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( - orderedmap.Pair[string, sqlschema.Column]{ - Key: "author_id", - Value: &sqlschema.BaseColumn{ - SQLType: "bigint", - IsIdentity: true, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "first_name", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "last_name", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "email", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "authors", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "authors", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "author_id", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + IsIdentity: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "first_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "last_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "email", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("author_id")}, + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("first_name", "last_name")}, + {Columns: sqlschema.NewColumns("email")}, }, }, - )), - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("author_id")}, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("first_name", "last_name")}, - {Columns: sqlschema.NewColumns("email")}, }, - }, - }, - orderedmap.Pair[string, sqlschema.Table]{ - Key: "publisher_to_journalists", - Value: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "publisher_to_journalists", - Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( - orderedmap.Pair[string, sqlschema.Column]{ - Key: "publisher_id", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "publisher_to_journalists", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "publisher_to_journalists", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "author_id", + Value: &sqlschema.BaseColumn{ + SQLType: "bigint", + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "author_id", - Value: &sqlschema.BaseColumn{ - SQLType: "bigint", + }, + orderedmap.Pair[string, sqlschema.Table]{ + Key: "publishers", + Value: &sqlschema.BaseTable{ + Schema: defaultSchema, + Name: "publishers", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + DefaultValue: "gen_random_uuid()", + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "created_at", + Value: &sqlschema.BaseColumn{ + SQLType: "timestamp", + DefaultValue: "current_timestamp", + IsNullable: true, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id")}, + UniqueConstraints: []sqlschema.Unique{ + {Columns: sqlschema.NewColumns("publisher_id", "publisher_name")}, }, }, - )), - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id", "author_id")}, + }, + )), + wantFKs: []sqlschema.ForeignKey{ + { + From: sqlschema.NewColumnReference(defaultSchema, "articles", "publisher_id"), + To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), + }, + { + From: sqlschema.NewColumnReference(defaultSchema, "articles", "author_id"), + To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), + }, + { + From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "publisher_id"), + To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), + }, + { + From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "author_id"), + To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), + }, }, }, - orderedmap.Pair[string, sqlschema.Table]{ - Key: "publishers", - Value: &sqlschema.BaseTable{ - Schema: defaultSchema, - Name: "publishers", - Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( - orderedmap.Pair[string, sqlschema.Column]{ - Key: "publisher_id", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - DefaultValue: "gen_random_uuid()", - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "publisher_name", - Value: &sqlschema.BaseColumn{ - SQLType: sqltype.VarChar, - }, - }, - orderedmap.Pair[string, sqlschema.Column]{ - Key: "created_at", - Value: &sqlschema.BaseColumn{ - SQLType: "timestamp", - DefaultValue: "current_timestamp", - IsNullable: true, - }, + { + name: "inspect admin schema", + schemaName: "admin", + wantTables: orderedmap.New[string, sqlschema.Table](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Table]{ + Key: "offices", + Value: &sqlschema.BaseTable{ + Schema: "admin", + Name: "offices", + Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( + orderedmap.Pair[string, sqlschema.Column]{ + Key: "office_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_id", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + orderedmap.Pair[string, sqlschema.Column]{ + Key: "publisher_name", + Value: &sqlschema.BaseColumn{ + SQLType: sqltype.VarChar, + IsNullable: true, + }, + }, + )), + PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("office_name")}, }, - )), - PrimaryKey: &sqlschema.PrimaryKey{Columns: sqlschema.NewColumns("publisher_id")}, - UniqueConstraints: []sqlschema.Unique{ - {Columns: sqlschema.NewColumns("publisher_id", "publisher_name")}, + }, + )), + wantFKs: []sqlschema.ForeignKey{ + { + From: sqlschema.NewColumnReference("admin", "offices", "publisher_name", "publisher_id"), + To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_name", "publisher_id"), }, }, }, - )) - - wantFKs := []sqlschema.ForeignKey{ - { - From: sqlschema.NewColumnReference(defaultSchema, "articles", "publisher_id"), - To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), - }, - { - From: sqlschema.NewColumnReference(defaultSchema, "articles", "author_id"), - To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), - }, - { - From: sqlschema.NewColumnReference("admin", "offices", "publisher_name", "publisher_id"), - To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_name", "publisher_id"), - }, - { - From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "publisher_id"), - To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), - }, - { - From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "author_id"), - To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), - }, + } { + t.Run(tt.name, func(t *testing.T) { + db.RegisterModel((*PublisherToJournalist)(nil)) + + dbInspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) + if err != nil { + t.Skip(err) + } + + ctx := context.Background() + + // Always create admin schema to test filtration is done correctly. + mustCreateSchema(t, ctx, db, "admin") + mustCreateTableWithFKs(t, ctx, db, + // Order of creation matters: + (*Journalist)(nil), // does not reference other tables + (*Publisher)(nil), // does not reference other tables + (*Office)(nil), // references Publisher + (*PublisherToJournalist)(nil), // references Journalist and Publisher + (*Article)(nil), // references Journalist and Publisher + ) + + got, err := dbInspector.Inspect(ctx, tt.schemaName) + require.NoError(t, err) + + // State.FKs store their database names, which differ from dialect to dialect. + // Because of that we compare FKs and Tables separately. + gotTables := got.(sqlschema.BaseDatabase).Tables + cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), tt.wantTables, gotTables) + + var fks []sqlschema.ForeignKey + for fk := range got.GetForeignKeys() { + fks = append(fks, fk) + } + require.ElementsMatch(t, tt.wantFKs, fks, "foreign keys differ: expected=listA, got=listB") + }) } - - got, err := dbInspector.Inspect(ctx) - require.NoError(t, err) - - // State.FKs store their database names, which differ from dialect to dialect. - // Because of that we compare FKs and Tables separately. - gotTables := got.(sqlschema.BaseDatabase).Tables - cmpTables(t, db.Dialect().(sqlschema.InspectorDialect), wantTables, gotTables) - - var fks []sqlschema.ForeignKey - for fk := range got.GetForeignKeys() { - fks = append(fks, fk) - } - require.ElementsMatch(t, wantFKs, fks) }) } @@ -518,7 +542,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { }, )) - got, err := inspector.Inspect(context.Background()) + got, err := inspector.Inspect(context.Background(), dialect.DefaultSchema()) require.NoError(t, err) gotTables := got.GetTables() @@ -563,7 +587,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { }, )) - got, err := inspector.Inspect(context.Background()) + got, err := inspector.Inspect(context.Background(), dialect.DefaultSchema()) require.NoError(t, err) gotTables := got.GetTables() @@ -592,7 +616,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { }, } - got, err := inspector.Inspect(context.Background()) + got, err := inspector.Inspect(context.Background(), dialect.DefaultSchema()) require.NoError(t, err) gotTables := got.GetTables() @@ -614,7 +638,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { inspector := sqlschema.NewBunModelInspector(tables) want := sqlschema.NewColumns("id", "email") - got, err := inspector.Inspect(context.Background()) + got, err := inspector.Inspect(context.Background(), dialect.DefaultSchema()) require.NoError(t, err) gotTables := got.GetTables() @@ -636,7 +660,7 @@ func TestBunModelInspector_Inspect(t *testing.T) { tables.Register((*Model)(nil)) inspector := sqlschema.NewBunModelInspector(tables) - got, err := inspector.Inspect(context.Background()) + got, err := inspector.Inspect(context.Background(), "custom_schema") require.NoError(t, err) gotTables := got.GetTables() @@ -647,5 +671,30 @@ func TestBunModelInspector_Inspect(t *testing.T) { return } }) + + t.Run("filters tables by schema", func(t *testing.T) { + type KeepMe struct { + bun.BaseModel `bun:"table:want.keep_me"` + } + + type LoseMe struct { + bun.BaseModel `bun:"table:lose_me"` + } + + tables := schema.NewTables(dialect) + tables.Register((*KeepMe)(nil), (*LoseMe)(nil)) + inspector := sqlschema.NewBunModelInspector(tables) + + got, err := inspector.Inspect(context.Background(), "want") + require.NoError(t, err) + + gotTables := got.GetTables() + require.Equal(t, 1, gotTables.Len()) + for _, table := range gotTables.FromOldest() { + require.Equal(t, "want", table.GetSchema(), "wrong schema name") + require.Equal(t, "keep_me", table.GetName(), "wrong table name") + return + } + }) }) } diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 08a22e12b..1dcd95a85 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -215,15 +215,21 @@ func newAutoMigratorOrSkip(tb testing.TB, db *bun.DB, opts ...migrate.AutoMigrat // inspectDbOrSkip returns a function to inspect the current state of the database. // The test will be *skipped* if the current dialect doesn't support database inpection // and fail if the inspector cannot successfully retrieve database state. -func inspectDbOrSkip(tb testing.TB, db *bun.DB) func(context.Context) sqlschema.BaseDatabase { +func inspectDbOrSkip(tb testing.TB, db *bun.DB, schemaName ...string) func(context.Context) sqlschema.BaseDatabase { tb.Helper() // AutoMigrator excludes these tables by default, but here we need to do this explicitly. inspector, err := sqlschema.NewInspector(db, migrationsTable, migrationLocksTable) if err != nil { tb.Skip(err) } + + // For convenience, schemaName is an optional parameter in this function. + inspectSchema := db.Dialect().DefaultSchema() + if len(schemaName) > 0 { + inspectSchema = schemaName[0] + } return func(ctx context.Context) sqlschema.BaseDatabase { - state, err := inspector.Inspect(ctx) + state, err := inspector.Inspect(ctx, inspectSchema) require.NoError(tb, err) return state.(sqlschema.BaseDatabase) } @@ -930,11 +936,14 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { )) ctx := context.Background() - inspect := inspectDbOrSkip(t, db) + inspect := inspectDbOrSkip(t, db, "automigrate") mustCreateSchema(t, ctx, db, "automigrate") mustResetModel(t, ctx, db, (*TableBefore)(nil)) mustDropTableOnCleanup(t, ctx, db, (*TableAfter)(nil)) - m := newAutoMigratorOrSkip(t, db, migrate.WithModel((*TableAfter)(nil))) + m := newAutoMigratorOrSkip(t, db, + migrate.WithModel((*TableAfter)(nil)), + migrate.WithSchemaName("automigrate"), + ) // Act runMigrations(t, m) @@ -947,7 +956,7 @@ func testUniqueRenamedTable(t *testing.T, db *bun.DB) { func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Has a composite primary key. type DropPKBefore struct { - bun.BaseModel `bun:"table:please.drop_your_pks"` + bun.BaseModel `bun:"table:drop_your_pks"` FirstName string `bun:"first_name,pk"` LastName string `bun:"last_name,pk"` } @@ -971,7 +980,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { // Doesn't have any primary keys. type DropPKAfter struct { - bun.BaseModel `bun:"table:please.drop_your_pks"` + bun.BaseModel `bun:"table:drop_your_pks"` FirstName string `bun:"first_name,notnull"` LastName string `bun:"last_name,notnull"` } @@ -995,7 +1004,7 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { orderedmap.Pair[string, sqlschema.Table]{ Key: "drop_your_pks", Value: &sqlschema.BaseTable{ - Schema: "please", + Schema: db.Dialect().DefaultSchema(), Name: "drop_your_pks", Columns: orderedmap.New[string, sqlschema.Column](orderedmap.WithInitialData( orderedmap.Pair[string, sqlschema.Column]{ @@ -1075,7 +1084,6 @@ func testUpdatePrimaryKeys(t *testing.T, db *bun.DB) { ctx := context.Background() inspect := inspectDbOrSkip(t, db) - mustCreateSchema(t, ctx, db, "please") mustResetModel(t, ctx, db, (*DropPKBefore)(nil), (*AddNewPKBefore)(nil), diff --git a/migrate/auto.go b/migrate/auto.go index 95b0da2fa..11e9cb57c 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -33,6 +33,13 @@ func WithExcludeTable(tables ...string) AutoMigratorOption { } } +// WithSchemaName changes the default database schema to migrate objects in. +func WithSchemaName(schemaName string) AutoMigratorOption { + return func(m *AutoMigrator) { + m.schemaName = schemaName + } +} + // WithTableNameAuto overrides default migrations table name. func WithTableNameAuto(table string) AutoMigratorOption { return func(m *AutoMigrator) { @@ -109,6 +116,9 @@ type AutoMigrator struct { table string // Migrations table (excluded from database inspection) locksTable string // Migration locks table (excluded from database inspection) + // schemaName is the database schema considered for migration. + schemaName string + // includeModels define the migration scope. includeModels []interface{} @@ -130,6 +140,7 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err db: db, table: defaultTable, locksTable: defaultLocksTable, + schemaName: db.Dialect().DefaultSchema(), } for _, opt := range opts { @@ -160,12 +171,12 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err func (am *AutoMigrator) plan(ctx context.Context) (*changeset, error) { var err error - got, err := am.dbInspector.Inspect(ctx) + got, err := am.dbInspector.Inspect(ctx, am.schemaName) if err != nil { return nil, err } - want, err := am.modelInspector.Inspect(ctx) + want, err := am.modelInspector.Inspect(ctx, am.schemaName) if err != nil { return nil, err } diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index 04fc9a656..b0073fe9b 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -23,7 +23,7 @@ type InspectorDialect interface { // Inspector reads schema state. type Inspector interface { - Inspect(ctx context.Context) (Database, error) + Inspect(ctx context.Context, schemaName string) (Database, error) } // inspector is opaque pointer to a databse inspector. @@ -75,7 +75,7 @@ type BunTable struct { Model interface{} } -func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { +func (bmi *BunModelInspector) Inspect(ctx context.Context, schemaName string) (Database, error) { state := BunModelSchema{ BaseDatabase: BaseDatabase{ ForeignKeys: make(map[ForeignKey]string), @@ -83,6 +83,10 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context) (Database, error) { Tables: orderedmap.New[string, Table](), } for _, t := range bmi.tables.All() { + if t.Schema != schemaName { + continue + } + columns := orderedmap.New[string, Column]() for _, f := range t.Fields { From 48811e092c913eba4fee9d3900b0a350e87aab94 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Mon, 11 Nov 2024 22:08:05 +0100 Subject: [PATCH 53/55] refactor: retire sqlschema.FQN Now that AutoMigrator only works with one schema at a time, there's no need to keep the code which was used ot differentiate tables between schemas --- dialect/pgdialect/alter_table.go | 61 ++++---- dialect/pgdialect/inspector.go | 4 +- internal/dbtest/inspect_test.go | 20 +-- internal/dbtest/migrate_test.go | 37 +++-- internal/dbtest/query_test.go | 121 +++++++-------- .../TestAlterTable-pg-add_foreign_key | 2 +- .../snapshots/TestAlterTable-pg-drop_table | 2 +- .../TestAlterTable-pgx-add_foreign_key | 2 +- .../snapshots/TestAlterTable-pgx-drop_table | 2 +- migrate/auto.go | 2 +- migrate/diff.go | 76 +++++----- migrate/operations.go | 141 +++++++++--------- migrate/sqlschema/database.go | 18 +-- migrate/sqlschema/inspector.go | 4 +- migrate/sqlschema/migrator.go | 10 +- migrate/sqlschema/table.go | 25 ---- 16 files changed, 252 insertions(+), 275 deletions(-) diff --git a/dialect/pgdialect/alter_table.go b/dialect/pgdialect/alter_table.go index 64c84594e..dac827a20 100644 --- a/dialect/pgdialect/alter_table.go +++ b/dialect/pgdialect/alter_table.go @@ -10,14 +10,15 @@ import ( "github.com/uptrace/bun/schema" ) -func (d *Dialect) Migrator(db *bun.DB) sqlschema.Migrator { - return &migrator{db: db, BaseMigrator: sqlschema.NewBaseMigrator(db)} +func (d *Dialect) Migrator(db *bun.DB, schemaName string) sqlschema.Migrator { + return &migrator{db: db, schemaName: schemaName, BaseMigrator: sqlschema.NewBaseMigrator(db)} } type migrator struct { *sqlschema.BaseMigrator - db *bun.DB + db *bun.DB + schemaName string } var _ sqlschema.Migrator = (*migrator)(nil) @@ -26,9 +27,9 @@ func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err err fmter := m.db.Formatter() // Append ALTER TABLE statement to the enclosed query bytes []byte. - appendAlterTable := func(query []byte, fqn sqlschema.FQN) []byte { + appendAlterTable := func(query []byte, tableName string) []byte { query = append(query, "ALTER TABLE "...) - query, _ = fqn.AppendQuery(fmter, query) + query = m.appendFQN(fmter, query, tableName) return append(query, " "...) } @@ -36,31 +37,31 @@ func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err err case *migrate.CreateTableOp: return m.AppendCreateTable(b, change.Model) case *migrate.DropTableOp: - return m.AppendDropTable(b, change.FQN) + return m.AppendDropTable(b, m.schemaName, change.TableName) case *migrate.RenameTableOp: - b, err = m.renameTable(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.renameTable(fmter, appendAlterTable(b, change.TableName), change) case *migrate.RenameColumnOp: - b, err = m.renameColumn(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.renameColumn(fmter, appendAlterTable(b, change.TableName), change) case *migrate.AddColumnOp: - b, err = m.addColumn(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.addColumn(fmter, appendAlterTable(b, change.TableName), change) case *migrate.DropColumnOp: - b, err = m.dropColumn(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.dropColumn(fmter, appendAlterTable(b, change.TableName), change) case *migrate.AddPrimaryKeyOp: - b, err = m.addPrimaryKey(fmter, appendAlterTable(b, change.FQN), change.PrimaryKey) + b, err = m.addPrimaryKey(fmter, appendAlterTable(b, change.TableName), change.PrimaryKey) case *migrate.ChangePrimaryKeyOp: - b, err = m.changePrimaryKey(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.changePrimaryKey(fmter, appendAlterTable(b, change.TableName), change) case *migrate.DropPrimaryKeyOp: - b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN), change.PrimaryKey.Name) + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.TableName), change.PrimaryKey.Name) case *migrate.AddUniqueConstraintOp: - b, err = m.addUnique(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.addUnique(fmter, appendAlterTable(b, change.TableName), change) case *migrate.DropUniqueConstraintOp: - b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN), change.Unique.Name) + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.TableName), change.Unique.Name) case *migrate.ChangeColumnTypeOp: - b, err = m.changeColumnType(fmter, appendAlterTable(b, change.FQN), change) + b, err = m.changeColumnType(fmter, appendAlterTable(b, change.TableName), change) case *migrate.AddForeignKeyOp: - b, err = m.addForeignKey(fmter, appendAlterTable(b, change.FQN()), change) + b, err = m.addForeignKey(fmter, appendAlterTable(b, change.TableName()), change) case *migrate.DropForeignKeyOp: - b, err = m.dropConstraint(fmter, appendAlterTable(b, change.FQN()), change.ConstraintName) + b, err = m.dropConstraint(fmter, appendAlterTable(b, change.TableName()), change.ConstraintName) default: return nil, fmt.Errorf("append sql: unknown operation %T", change) } @@ -70,6 +71,10 @@ func (m *migrator) AppendSQL(b []byte, operation interface{}) (_ []byte, err err return b, nil } +func (m *migrator) appendFQN(fmter schema.Formatter, b []byte, tableName string) []byte { + return fmter.AppendQuery(b, "?.?", bun.Ident(m.schemaName), bun.Ident(tableName)) +} + func (m *migrator) renameTable(fmter schema.Formatter, b []byte, rename *migrate.RenameTableOp) (_ []byte, err error) { b = append(b, "RENAME TO "...) b = fmter.AppendName(b, rename.NewName) @@ -88,21 +93,21 @@ func (m *migrator) renameColumn(fmter schema.Formatter, b []byte, rename *migrat func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddColumnOp) (_ []byte, err error) { b = append(b, "ADD COLUMN "...) - b = fmter.AppendName(b, add.Column) + b = fmter.AppendName(b, add.ColumnName) b = append(b, " "...) - b, err = add.ColDef.AppendQuery(fmter, b) + b, err = add.Column.AppendQuery(fmter, b) if err != nil { return nil, err } - if add.ColDef.GetDefaultValue() != "" { + if add.Column.GetDefaultValue() != "" { b = append(b, " DEFAULT "...) - b = append(b, add.ColDef.GetDefaultValue()...) + b = append(b, add.Column.GetDefaultValue()...) b = append(b, " "...) } - if add.ColDef.GetIsIdentity() { + if add.Column.GetIsIdentity() { b = appendGeneratedAsIdentity(b) } @@ -111,7 +116,7 @@ func (m *migrator) addColumn(fmter schema.Formatter, b []byte, add *migrate.AddC func (m *migrator) dropColumn(fmter schema.Formatter, b []byte, drop *migrate.DropColumnOp) (_ []byte, err error) { b = append(b, "DROP COLUMN "...) - b = fmter.AppendName(b, drop.Column) + b = fmter.AppendName(b, drop.ColumnName) return b, nil } @@ -137,7 +142,7 @@ func (m *migrator) addUnique(fmter schema.Formatter, b []byte, change *migrate.A b = fmter.AppendName(b, change.Unique.Name) } else { // Default naming scheme for unique constraints in Postgres is
__key - b = fmter.AppendName(b, fmt.Sprintf("%s_%s_key", change.FQN.Table, change.Unique.Columns)) + b = fmter.AppendName(b, fmt.Sprintf("%s_%s_key", change.TableName, change.Unique.Columns)) } b = append(b, " UNIQUE ("...) b, _ = change.Unique.Columns.AppendQuery(fmter, b) @@ -160,7 +165,7 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. if name == "" { colRef := add.ForeignKey.From columns := strings.Join(colRef.Column.Split(), "_") - name = fmt.Sprintf("%s_%s_fkey", colRef.FQN.Table, columns) + name = fmt.Sprintf("%s_%s_fkey", colRef.TableName, columns) } b = fmter.AppendName(b, name) @@ -171,9 +176,7 @@ func (m *migrator) addForeignKey(fmter schema.Formatter, b []byte, add *migrate. b = append(b, ")"...) b = append(b, " REFERENCES "...) - if b, err = add.ForeignKey.To.FQN.AppendQuery(fmter, b); err != nil { - return b, err - } + b = m.appendFQN(fmter, b, add.ForeignKey.To.TableName) b = append(b, " ("...) if b, err = add.ForeignKey.To.Column.AppendQuery(fmter, b); err != nil { diff --git a/dialect/pgdialect/inspector.go b/dialect/pgdialect/inspector.go index 8f8188ff6..ae2b7cc7e 100644 --- a/dialect/pgdialect/inspector.go +++ b/dialect/pgdialect/inspector.go @@ -112,8 +112,8 @@ func (in *Inspector) Inspect(ctx context.Context, schemaName string) (sqlschema. for _, fk := range fks { dbSchema.ForeignKeys[sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(fk.SourceSchema, fk.SourceTable, fk.SourceColumns...), - To: sqlschema.NewColumnReference(fk.TargetSchema, fk.TargetTable, fk.TargetColumns...), + From: sqlschema.NewColumnReference(fk.SourceTable, fk.SourceColumns...), + To: sqlschema.NewColumnReference(fk.TargetTable, fk.TargetColumns...), }] = fk.ConstraintName } return dbSchema, nil diff --git a/internal/dbtest/inspect_test.go b/internal/dbtest/inspect_test.go index 7ef211137..dd37e2f13 100644 --- a/internal/dbtest/inspect_test.go +++ b/internal/dbtest/inspect_test.go @@ -272,20 +272,20 @@ func TestDatabaseInspector_Inspect(t *testing.T) { )), wantFKs: []sqlschema.ForeignKey{ { - From: sqlschema.NewColumnReference(defaultSchema, "articles", "publisher_id"), - To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), + From: sqlschema.NewColumnReference("articles", "publisher_id"), + To: sqlschema.NewColumnReference("publishers", "publisher_id"), }, { - From: sqlschema.NewColumnReference(defaultSchema, "articles", "author_id"), - To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), + From: sqlschema.NewColumnReference("articles", "author_id"), + To: sqlschema.NewColumnReference("authors", "author_id"), }, { - From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "publisher_id"), - To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_id"), + From: sqlschema.NewColumnReference("publisher_to_journalists", "publisher_id"), + To: sqlschema.NewColumnReference("publishers", "publisher_id"), }, { - From: sqlschema.NewColumnReference(defaultSchema, "publisher_to_journalists", "author_id"), - To: sqlschema.NewColumnReference(defaultSchema, "authors", "author_id"), + From: sqlschema.NewColumnReference("publisher_to_journalists", "author_id"), + To: sqlschema.NewColumnReference("authors", "author_id"), }, }, }, @@ -326,8 +326,8 @@ func TestDatabaseInspector_Inspect(t *testing.T) { )), wantFKs: []sqlschema.ForeignKey{ { - From: sqlschema.NewColumnReference("admin", "offices", "publisher_name", "publisher_id"), - To: sqlschema.NewColumnReference(defaultSchema, "publishers", "publisher_name", "publisher_id"), + From: sqlschema.NewColumnReference("offices", "publisher_name", "publisher_id"), + To: sqlschema.NewColumnReference("publishers", "publisher_name", "publisher_id"), }, }, }, diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 1dcd95a85..9e4e85df2 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -326,16 +326,16 @@ func TestAutoMigrator_Migrate(t *testing.T) { tests := []struct { fn func(t *testing.T, db *bun.DB) }{ - {testRenameTable}, - {testRenamedColumns}, - {testCreateDropTable}, + // {testRenameTable}, + // {testRenamedColumns}, + // {testCreateDropTable}, {testAlterForeignKeys}, - {testChangeColumnType_AutoCast}, - {testIdentity}, - {testAddDropColumn}, - {testUnique}, - {testUniqueRenamedTable}, - {testUpdatePrimaryKeys}, + // {testChangeColumnType_AutoCast}, + // {testIdentity}, + // {testAddDropColumn}, + // {testUnique}, + // {testUniqueRenamedTable}, + // {testUpdatePrimaryKeys}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { @@ -469,23 +469,22 @@ func testAlterForeignKeys(t *testing.T, db *bun.DB) { // Assert state := inspect(ctx) - defaultSchema := db.Dialect().DefaultSchema() // Crated 2 new constraints require.Contains(t, state.ForeignKeys, sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(defaultSchema, "things_to_owners", "owner_id"), - To: sqlschema.NewColumnReference(defaultSchema, "owners", "id"), - }) + From: sqlschema.NewColumnReference("things_to_owners", "owner_id"), + To: sqlschema.NewColumnReference("owners", "id"), + }, "expected new FK constraint things_to_owners.owner_id -> owners.id") require.Contains(t, state.ForeignKeys, sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(defaultSchema, "things_to_owners", "thing_id"), - To: sqlschema.NewColumnReference(defaultSchema, "things", "id"), - }) + From: sqlschema.NewColumnReference("things_to_owners", "thing_id"), + To: sqlschema.NewColumnReference("things", "id"), + }, "expected new FK constraint things_to_owners.thing_id -> things.id") // Dropped the initial one require.NotContains(t, state.ForeignKeys, sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference(defaultSchema, "things", "owner_id"), - To: sqlschema.NewColumnReference(defaultSchema, "owners", "id"), - }) + From: sqlschema.NewColumnReference("things", "owner_id"), + To: sqlschema.NewColumnReference("owners", "id"), + }, "expected FK constraint things.owner_id -> owners.id to be dropped") } func testRenamedColumns(t *testing.T, db *bun.DB) { diff --git a/internal/dbtest/query_test.go b/internal/dbtest/query_test.go index b906cb20e..04d1959f9 100644 --- a/internal/dbtest/query_test.go +++ b/internal/dbtest/query_test.go @@ -1593,32 +1593,33 @@ func TestAlterTable(t *testing.T) { Genre string } - fqn := sqlschema.FQN{Schema: "hobbies", Table: "movies"} + schemaName := "hobbies" + tableName := "movies" tests := []struct { name string operation interface{} }{ {name: "create table", operation: &migrate.CreateTableOp{ - FQN: fqn, - Model: (*Movie)(nil), + TableName: tableName, + Model: (*Movie)(nil), }}, {name: "drop table", operation: &migrate.DropTableOp{ - FQN: fqn, + TableName: tableName, }}, {name: "rename table", operation: &migrate.RenameTableOp{ - FQN: fqn, - NewName: "films", + TableName: tableName, + NewName: "films", }}, {name: "rename column", operation: &migrate.RenameColumnOp{ - FQN: fqn, - OldName: "has_oscar", - NewName: "has_awards", + TableName: tableName, + OldName: "has_oscar", + NewName: "has_awards", }}, {name: "add column with default value", operation: &migrate.AddColumnOp{ - FQN: fqn, - Column: "language", - ColDef: &sqlschema.BaseColumn{ + TableName: tableName, + ColumnName: "language", + Column: &sqlschema.BaseColumn{ SQLType: "varchar", VarcharLen: 20, IsNullable: false, @@ -1626,100 +1627,100 @@ func TestAlterTable(t *testing.T) { }, }}, {name: "add column with identity", operation: &migrate.AddColumnOp{ - FQN: fqn, - Column: "n", - ColDef: &sqlschema.BaseColumn{ + TableName: tableName, + ColumnName: "n", + Column: &sqlschema.BaseColumn{ SQLType: sqltype.BigInt, IsNullable: false, IsIdentity: true, }, }}, {name: "drop column", operation: &migrate.DropColumnOp{ - FQN: fqn, - Column: "director", - ColDef: &sqlschema.BaseColumn{ + TableName: tableName, + ColumnName: "director", + Column: &sqlschema.BaseColumn{ SQLType: sqltype.VarChar, IsNullable: false, }, }}, {name: "add unique constraint", operation: &migrate.AddUniqueConstraintOp{ - FQN: fqn, + TableName: tableName, Unique: sqlschema.Unique{ Name: "one_genre_per_director", Columns: sqlschema.NewColumns("genre", "director"), }, }}, {name: "drop unique constraint", operation: &migrate.DropUniqueConstraintOp{ - FQN: fqn, + TableName: tableName, Unique: sqlschema.Unique{ Name: "one_genre_per_director", Columns: sqlschema.NewColumns("genre", "director"), }, }}, {name: "change column type int to bigint", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "budget", - From: &sqlschema.BaseColumn{SQLType: sqltype.Integer}, - To: &sqlschema.BaseColumn{SQLType: sqltype.BigInt}, + TableName: tableName, + Column: "budget", + From: &sqlschema.BaseColumn{SQLType: sqltype.Integer}, + To: &sqlschema.BaseColumn{SQLType: sqltype.BigInt}, }}, {name: "add default", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "budget", - From: &sqlschema.BaseColumn{DefaultValue: ""}, - To: &sqlschema.BaseColumn{DefaultValue: "100"}, + TableName: tableName, + Column: "budget", + From: &sqlschema.BaseColumn{DefaultValue: ""}, + To: &sqlschema.BaseColumn{DefaultValue: "100"}, }}, {name: "drop default", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "budget", - From: &sqlschema.BaseColumn{DefaultValue: "100"}, - To: &sqlschema.BaseColumn{DefaultValue: ""}, + TableName: tableName, + Column: "budget", + From: &sqlschema.BaseColumn{DefaultValue: "100"}, + To: &sqlschema.BaseColumn{DefaultValue: ""}, }}, {name: "make nullable", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "director", - From: &sqlschema.BaseColumn{IsNullable: false}, - To: &sqlschema.BaseColumn{IsNullable: true}, + TableName: tableName, + Column: "director", + From: &sqlschema.BaseColumn{IsNullable: false}, + To: &sqlschema.BaseColumn{IsNullable: true}, }}, {name: "add notnull", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "budget", - From: &sqlschema.BaseColumn{IsNullable: true}, - To: &sqlschema.BaseColumn{IsNullable: false}, + TableName: tableName, + Column: "budget", + From: &sqlschema.BaseColumn{IsNullable: true}, + To: &sqlschema.BaseColumn{IsNullable: false}, }}, {name: "increase varchar length", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "language", - From: &sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 20}, - To: &sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 255}, + TableName: tableName, + Column: "language", + From: &sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 20}, + To: &sqlschema.BaseColumn{SQLType: "varchar", VarcharLen: 255}, }}, {name: "add identity", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "id", - From: &sqlschema.BaseColumn{IsIdentity: false}, - To: &sqlschema.BaseColumn{IsIdentity: true}, + TableName: tableName, + Column: "id", + From: &sqlschema.BaseColumn{IsIdentity: false}, + To: &sqlschema.BaseColumn{IsIdentity: true}, }}, {name: "drop identity", operation: &migrate.ChangeColumnTypeOp{ - FQN: fqn, - Column: "id", - From: &sqlschema.BaseColumn{IsIdentity: true}, - To: &sqlschema.BaseColumn{IsIdentity: false}, + TableName: tableName, + Column: "id", + From: &sqlschema.BaseColumn{IsIdentity: true}, + To: &sqlschema.BaseColumn{IsIdentity: false}, }}, {name: "add primary key", operation: &migrate.AddPrimaryKeyOp{ - FQN: fqn, + TableName: tableName, PrimaryKey: sqlschema.PrimaryKey{ Name: "new_pk", Columns: sqlschema.NewColumns("id"), }, }}, {name: "drop primary key", operation: &migrate.DropPrimaryKeyOp{ - FQN: fqn, + TableName: tableName, PrimaryKey: sqlschema.PrimaryKey{ Name: "new_pk", Columns: sqlschema.NewColumns("id"), }, }}, {name: "change primary key", operation: &migrate.ChangePrimaryKeyOp{ - FQN: fqn, + TableName: tableName, Old: sqlschema.PrimaryKey{ Name: "old_pk", Columns: sqlschema.NewColumns("id"), @@ -1732,21 +1733,21 @@ func TestAlterTable(t *testing.T) { {name: "add foreign key", operation: &migrate.AddForeignKeyOp{ ConstraintName: "genre_description", ForeignKey: sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference("hobbies", "movies", "genre"), - To: sqlschema.NewColumnReference("wiki", "film_genres", "id"), + From: sqlschema.NewColumnReference("movies", "genre"), + To: sqlschema.NewColumnReference("film_genres", "id"), }, }}, {name: "drop foreign key", operation: &migrate.DropForeignKeyOp{ ConstraintName: "genre_description", ForeignKey: sqlschema.ForeignKey{ - From: sqlschema.NewColumnReference("hobbies", "movies", "genre"), - To: sqlschema.NewColumnReference("wiki", "film_genres", "id"), + From: sqlschema.NewColumnReference("movies", "genre"), + To: sqlschema.NewColumnReference("film_genres", "id"), }, }}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { - migrator, err := sqlschema.NewMigrator(db) + migrator, err := sqlschema.NewMigrator(db, schemaName) if err != nil { t.Skip(err) } diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key index fb4ab1c49..ee54ad507 100644 --- a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-add_foreign_key @@ -1 +1 @@ -ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "genre_description" FOREIGN KEY (genre) REFERENCES "wiki"."film_genres" (id) +ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "genre_description" FOREIGN KEY (genre) REFERENCES "hobbies"."film_genres" (id) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table index f74af4196..882688a66 100644 --- a/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pg-drop_table @@ -1 +1 @@ -DROP TABLE hobbies.movies +DROP TABLE "hobbies"."movies" diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key index fb4ab1c49..ee54ad507 100644 --- a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-add_foreign_key @@ -1 +1 @@ -ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "genre_description" FOREIGN KEY (genre) REFERENCES "wiki"."film_genres" (id) +ALTER TABLE "hobbies"."movies" ADD CONSTRAINT "genre_description" FOREIGN KEY (genre) REFERENCES "hobbies"."film_genres" (id) diff --git a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table index f74af4196..882688a66 100644 --- a/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table +++ b/internal/dbtest/testdata/snapshots/TestAlterTable-pgx-drop_table @@ -1 +1 @@ -DROP TABLE hobbies.movies +DROP TABLE "hobbies"."movies" diff --git a/migrate/auto.go b/migrate/auto.go index 11e9cb57c..753636720 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -155,7 +155,7 @@ func NewAutoMigrator(db *bun.DB, opts ...AutoMigratorOption) (*AutoMigrator, err am.dbInspector = dbInspector am.diffOpts = append(am.diffOpts, withTypeEquivalenceFunc(db.Dialect().(sqlschema.InspectorDialect).EquivalentType)) - dbMigrator, err := sqlschema.NewMigrator(db) + dbMigrator, err := sqlschema.NewMigrator(db, am.schemaName) if err != nil { return nil, err } diff --git a/migrate/diff.go b/migrate/diff.go index cb0e60a2d..facd47c74 100644 --- a/migrate/diff.go +++ b/migrate/diff.go @@ -41,10 +41,10 @@ RenameCreate: for haveName, haveTable := range currentTables.FromOldest() { if _, exists := targetTables.Get(haveName); !exists && d.canRename(haveTable, wantTable) { d.changes.Add(&RenameTableOp{ - FQN: haveTable.GetFQN(), - NewName: wantName, + TableName: haveTable.GetName(), + NewName: wantName, }) - d.refMap.RenameTable(haveTable.GetFQN(), wantName) + d.refMap.RenameTable(haveTable.GetName(), wantName) // Find renamed columns, if any, and check if constraints (PK, UNIQUE) have been updated. // We need not check wantTable any further. @@ -59,8 +59,8 @@ RenameCreate: // then we need to create this table in the database. additional := wantTable.(*sqlschema.BunTable) d.changes.Add(&CreateTableOp{ - FQN: wantTable.GetFQN(), - Model: additional.Model, + TableName: wantTable.GetName(), + Model: additional.Model, }) } @@ -68,7 +68,7 @@ RenameCreate: for name, table := range currentTables.FromOldest() { if _, keep := targetTables.Get(name); !keep { d.changes.Add(&DropTableOp{ - FQN: table.GetFQN(), + TableName: table.GetName(), }) } } @@ -111,10 +111,10 @@ ChangeRename: if cCol, ok := currentColumns.Get(tName); ok { if checkType && !d.equalColumns(cCol, tCol) { d.changes.Add(&ChangeColumnTypeOp{ - FQN: target.GetFQN(), - Column: tName, - From: cCol, - To: d.makeTargetColDef(cCol, tCol), + TableName: target.GetName(), + Column: tName, + From: cCol, + To: d.makeTargetColDef(cCol, tCol), }) } continue @@ -128,11 +128,11 @@ ChangeRename: continue } d.changes.Add(&RenameColumnOp{ - FQN: target.GetFQN(), - OldName: cName, - NewName: tName, + TableName: target.GetName(), + OldName: cName, + NewName: tName, }) - d.refMap.RenameColumn(target.GetFQN(), cName, tName) + d.refMap.RenameColumn(target.GetName(), cName, tName) currentColumns.Delete(cName) // no need to check this column again // Update primary key definition to avoid superficially recreating the constraint. @@ -142,9 +142,9 @@ ChangeRename: } d.changes.Add(&AddColumnOp{ - FQN: target.GetFQN(), - Column: tName, - ColDef: tCol, + TableName: target.GetName(), + ColumnName: tName, + Column: tCol, }) } @@ -152,9 +152,9 @@ ChangeRename: for cName, cCol := range currentColumns.FromOldest() { if _, keep := targetColumns.Get(cName); !keep { d.changes.Add(&DropColumnOp{ - FQN: target.GetFQN(), - Column: cName, - ColDef: cCol, + TableName: target.GetName(), + ColumnName: cName, + Column: cCol, }) } } @@ -169,8 +169,8 @@ Add: } } d.changes.Add(&AddUniqueConstraintOp{ - FQN: target.GetFQN(), - Unique: want, + TableName: target.GetName(), + Unique: want, }) } @@ -183,8 +183,8 @@ Drop: } d.changes.Add(&DropUniqueConstraintOp{ - FQN: target.GetFQN(), - Unique: got, + TableName: target.GetName(), + Unique: got, }) } @@ -198,19 +198,19 @@ Drop: switch { case targetPK == nil && currentPK != nil: d.changes.Add(&DropPrimaryKeyOp{ - FQN: target.GetFQN(), + TableName: target.GetName(), PrimaryKey: *currentPK, }) case currentPK == nil && targetPK != nil: d.changes.Add(&AddPrimaryKeyOp{ - FQN: target.GetFQN(), + TableName: target.GetName(), PrimaryKey: *targetPK, }) case targetPK.Columns != currentPK.Columns: d.changes.Add(&ChangePrimaryKeyOp{ - FQN: target.GetFQN(), - Old: *currentPK, - New: *targetPK, + TableName: target.GetName(), + Old: *currentPK, + New: *targetPK, }) } } @@ -378,24 +378,24 @@ func newRefMap(fks map[sqlschema.ForeignKey]string) refMap { } // RenameT updates table name in all foreign key definions which depend on it. -func (rm refMap) RenameTable(table sqlschema.FQN, newName string) { +func (rm refMap) RenameTable(tableName string, newName string) { for fk := range rm { - switch table { - case fk.From.FQN: - fk.From.FQN.Table = newName - case fk.To.FQN: - fk.To.FQN.Table = newName + switch tableName { + case fk.From.TableName: + fk.From.TableName = newName + case fk.To.TableName: + fk.To.TableName = newName } } } // RenameColumn updates column name in all foreign key definions which depend on it. -func (rm refMap) RenameColumn(table sqlschema.FQN, column, newName string) { +func (rm refMap) RenameColumn(tableName string, column, newName string) { for fk := range rm { - if table == fk.From.FQN { + if tableName == fk.From.TableName { fk.From.Column.Replace(column, newName) } - if table == fk.To.FQN { + if tableName == fk.To.TableName { fk.To.Column.Replace(column, newName) } } diff --git a/migrate/operations.go b/migrate/operations.go index 68baadee5..7b749c5a0 100644 --- a/migrate/operations.go +++ b/migrate/operations.go @@ -31,87 +31,87 @@ type Operation interface { // Make sure the dialect does not include FOREIGN KEY constraints in the CREATE TABLE // statement, as those may potentially reference not-yet-existing columns/tables. type CreateTableOp struct { - FQN sqlschema.FQN - Model interface{} + TableName string + Model interface{} } var _ Operation = (*CreateTableOp)(nil) func (op *CreateTableOp) GetReverse() Operation { - return &DropTableOp{FQN: op.FQN} + return &DropTableOp{TableName: op.TableName} } // DropTableOp drops a database table. This operation is not reversible. type DropTableOp struct { - FQN sqlschema.FQN + TableName string } var _ Operation = (*DropTableOp)(nil) func (op *DropTableOp) DependsOn(another Operation) bool { drop, ok := another.(*DropForeignKeyOp) - return ok && drop.ForeignKey.DependsOnTable(op.FQN) + return ok && drop.ForeignKey.DependsOnTable(op.TableName) } // GetReverse for a DropTable returns a no-op migration. Logically, CreateTable is the reverse, // but DropTable does not have the table's definition to create one. func (op *DropTableOp) GetReverse() Operation { - c := comment(fmt.Sprintf("WARNING: \"DROP TABLE %s\" cannot be reversed automatically because table definition is not available", op.FQN.String())) + c := comment(fmt.Sprintf("WARNING: \"DROP TABLE %s\" cannot be reversed automatically because table definition is not available", op.TableName)) return &c } -// RenameTableOp renames the table. Note, that changing the "schema" part of the table's FQN is not allowed. +// RenameTableOp renames the table. Changing the "schema" part of the table's FQN (moving tables between schemas) is not allowed. type RenameTableOp struct { - FQN sqlschema.FQN - NewName string + TableName string + NewName string } var _ Operation = (*RenameTableOp)(nil) func (op *RenameTableOp) GetReverse() Operation { return &RenameTableOp{ - FQN: sqlschema.FQN{Schema: op.FQN.Schema, Table: op.NewName}, - NewName: op.FQN.Table, + TableName: op.NewName, + NewName: op.TableName, } } // RenameColumnOp renames a column in the table. If the changeset includes a rename operation // for the column's table, it should be executed first. type RenameColumnOp struct { - FQN sqlschema.FQN - OldName string - NewName string + TableName string + OldName string + NewName string } var _ Operation = (*RenameColumnOp)(nil) func (op *RenameColumnOp) GetReverse() Operation { return &RenameColumnOp{ - FQN: op.FQN, - OldName: op.NewName, - NewName: op.OldName, + TableName: op.TableName, + OldName: op.NewName, + NewName: op.OldName, } } func (op *RenameColumnOp) DependsOn(another Operation) bool { rename, ok := another.(*RenameTableOp) - return ok && op.FQN.Schema == rename.FQN.Schema && op.FQN.Table == rename.NewName + return ok && op.TableName == rename.NewName } // AddColumnOp adds a new column to the table. type AddColumnOp struct { - FQN sqlschema.FQN - Column string - ColDef sqlschema.Column + TableName string + ColumnName string + Column sqlschema.Column } var _ Operation = (*AddColumnOp)(nil) func (op *AddColumnOp) GetReverse() Operation { return &DropColumnOp{ - FQN: op.FQN, - Column: op.Column, - ColDef: op.ColDef, + TableName: op.TableName, + ColumnName: op.ColumnName, + Column: op.Column, } } @@ -122,29 +122,29 @@ func (op *AddColumnOp) GetReverse() Operation { // DropColumnOp depends on DropForeignKeyOp, DropPrimaryKeyOp, and ChangePrimaryKeyOp // if any of the constraints is defined on this table. type DropColumnOp struct { - FQN sqlschema.FQN - Column string - ColDef sqlschema.Column + TableName string + ColumnName string + Column sqlschema.Column } var _ Operation = (*DropColumnOp)(nil) func (op *DropColumnOp) GetReverse() Operation { return &AddColumnOp{ - FQN: op.FQN, - Column: op.Column, - ColDef: op.ColDef, + TableName: op.TableName, + ColumnName: op.ColumnName, + Column: op.Column, } } func (op *DropColumnOp) DependsOn(another Operation) bool { switch drop := another.(type) { case *DropForeignKeyOp: - return drop.ForeignKey.DependsOnColumn(op.FQN, op.Column) + return drop.ForeignKey.DependsOnColumn(op.TableName, op.ColumnName) case *DropPrimaryKeyOp: - return op.FQN == drop.FQN && drop.PrimaryKey.Columns.Contains(op.Column) + return op.TableName == drop.TableName && drop.PrimaryKey.Columns.Contains(op.ColumnName) case *ChangePrimaryKeyOp: - return op.FQN == drop.FQN && drop.Old.Columns.Contains(op.Column) + return op.TableName == drop.TableName && drop.Old.Columns.Contains(op.ColumnName) } return false } @@ -157,17 +157,16 @@ type AddForeignKeyOp struct { var _ Operation = (*AddForeignKeyOp)(nil) -func (op *AddForeignKeyOp) FQN() sqlschema.FQN { - return op.ForeignKey.From.FQN +func (op *AddForeignKeyOp) TableName() string { + return op.ForeignKey.From.TableName } func (op *AddForeignKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { case *RenameTableOp: - return op.ForeignKey.DependsOnTable(another.FQN) || - op.ForeignKey.DependsOnTable(sqlschema.FQN{Schema: another.FQN.Schema, Table: another.NewName}) + return op.ForeignKey.DependsOnTable(another.TableName) || op.ForeignKey.DependsOnTable(another.NewName) case *CreateTableOp: - return op.ForeignKey.DependsOnTable(another.FQN) + return op.ForeignKey.DependsOnTable(another.TableName) } return false } @@ -187,8 +186,8 @@ type DropForeignKeyOp struct { var _ Operation = (*DropForeignKeyOp)(nil) -func (op *DropForeignKeyOp) FQN() sqlschema.FQN { - return op.ForeignKey.From.FQN +func (op *DropForeignKeyOp) TableName() string { + return op.ForeignKey.From.TableName } func (op *DropForeignKeyOp) GetReverse() Operation { @@ -200,28 +199,28 @@ func (op *DropForeignKeyOp) GetReverse() Operation { // AddUniqueConstraintOp adds new UNIQUE constraint to the table. type AddUniqueConstraintOp struct { - FQN sqlschema.FQN - Unique sqlschema.Unique + TableName string + Unique sqlschema.Unique } var _ Operation = (*AddUniqueConstraintOp)(nil) func (op *AddUniqueConstraintOp) GetReverse() Operation { return &DropUniqueConstraintOp{ - FQN: op.FQN, - Unique: op.Unique, + TableName: op.TableName, + Unique: op.Unique, } } func (op *AddUniqueConstraintOp) DependsOn(another Operation) bool { switch another := another.(type) { case *AddColumnOp: - return op.FQN == another.FQN && op.Unique.Columns.Contains(another.Column) + return op.TableName == another.TableName && op.Unique.Columns.Contains(another.ColumnName) case *RenameTableOp: - return op.FQN.Schema == another.FQN.Schema && op.FQN.Table == another.NewName + return op.TableName == another.NewName case *DropUniqueConstraintOp: // We want to drop the constraint with the same name before adding this one. - return op.FQN == another.FQN && op.Unique.Name == another.Unique.Name + return op.TableName == another.TableName && op.Unique.Name == another.Unique.Name default: return false } @@ -230,23 +229,23 @@ func (op *AddUniqueConstraintOp) DependsOn(another Operation) bool { // DropUniqueConstraintOp drops a UNIQUE constraint. type DropUniqueConstraintOp struct { - FQN sqlschema.FQN - Unique sqlschema.Unique + TableName string + Unique sqlschema.Unique } var _ Operation = (*DropUniqueConstraintOp)(nil) func (op *DropUniqueConstraintOp) DependsOn(another Operation) bool { if rename, ok := another.(*RenameTableOp); ok { - return op.FQN.Schema == rename.FQN.Schema && op.FQN.Table == rename.NewName + return op.TableName == rename.NewName } return false } func (op *DropUniqueConstraintOp) GetReverse() Operation { return &AddUniqueConstraintOp{ - FQN: op.FQN, - Unique: op.Unique, + TableName: op.TableName, + Unique: op.Unique, } } @@ -255,26 +254,26 @@ func (op *DropUniqueConstraintOp) GetReverse() Operation { // E.g. reducing VARCHAR lenght is not possible in most dialects. // AutoMigrator does not enforce or validate these rules. type ChangeColumnTypeOp struct { - FQN sqlschema.FQN - Column string - From sqlschema.Column - To sqlschema.Column + TableName string + Column string + From sqlschema.Column + To sqlschema.Column } var _ Operation = (*ChangeColumnTypeOp)(nil) func (op *ChangeColumnTypeOp) GetReverse() Operation { return &ChangeColumnTypeOp{ - FQN: op.FQN, - Column: op.Column, - From: op.To, - To: op.From, + TableName: op.TableName, + Column: op.Column, + From: op.To, + To: op.From, } } // DropPrimaryKeyOp drops the table's PRIMARY KEY. type DropPrimaryKeyOp struct { - FQN sqlschema.FQN + TableName string PrimaryKey sqlschema.PrimaryKey } @@ -282,14 +281,14 @@ var _ Operation = (*DropPrimaryKeyOp)(nil) func (op *DropPrimaryKeyOp) GetReverse() Operation { return &AddPrimaryKeyOp{ - FQN: op.FQN, + TableName: op.TableName, PrimaryKey: op.PrimaryKey, } } // AddPrimaryKeyOp adds a new PRIMARY KEY to the table. type AddPrimaryKeyOp struct { - FQN sqlschema.FQN + TableName string PrimaryKey sqlschema.PrimaryKey } @@ -297,7 +296,7 @@ var _ Operation = (*AddPrimaryKeyOp)(nil) func (op *AddPrimaryKeyOp) GetReverse() Operation { return &DropPrimaryKeyOp{ - FQN: op.FQN, + TableName: op.TableName, PrimaryKey: op.PrimaryKey, } } @@ -305,25 +304,25 @@ func (op *AddPrimaryKeyOp) GetReverse() Operation { func (op *AddPrimaryKeyOp) DependsOn(another Operation) bool { switch another := another.(type) { case *AddColumnOp: - return op.FQN == another.FQN && op.PrimaryKey.Columns.Contains(another.Column) + return op.TableName == another.TableName && op.PrimaryKey.Columns.Contains(another.ColumnName) } return false } // ChangePrimaryKeyOp changes the PRIMARY KEY of the table. type ChangePrimaryKeyOp struct { - FQN sqlschema.FQN - Old sqlschema.PrimaryKey - New sqlschema.PrimaryKey + TableName string + Old sqlschema.PrimaryKey + New sqlschema.PrimaryKey } var _ Operation = (*AddPrimaryKeyOp)(nil) func (op *ChangePrimaryKeyOp) GetReverse() Operation { return &ChangePrimaryKeyOp{ - FQN: op.FQN, - Old: op.New, - New: op.Old, + TableName: op.TableName, + Old: op.New, + New: op.Old, } } diff --git a/migrate/sqlschema/database.go b/migrate/sqlschema/database.go index fa96a9236..cdc5b2d50 100644 --- a/migrate/sqlschema/database.go +++ b/migrate/sqlschema/database.go @@ -37,19 +37,19 @@ type ForeignKey struct { To ColumnReference } -func NewColumnReference(schemaName, tableName string, columns ...string) ColumnReference { +func NewColumnReference(tableName string, columns ...string) ColumnReference { return ColumnReference{ - FQN: FQN{Schema: schemaName, Table: tableName}, - Column: NewColumns(columns...), + TableName: tableName, + Column: NewColumns(columns...), } } -func (fk ForeignKey) DependsOnTable(fqn FQN) bool { - return fk.From.FQN == fqn || fk.To.FQN == fqn +func (fk ForeignKey) DependsOnTable(tableName string) bool { + return fk.From.TableName == tableName || fk.To.TableName == tableName } -func (fk ForeignKey) DependsOnColumn(fqn FQN, column string) bool { - return fk.DependsOnTable(fqn) && +func (fk ForeignKey) DependsOnColumn(tableName string, column string) bool { + return fk.DependsOnTable(tableName) && (fk.From.Column.Contains(column) || fk.To.Column.Contains(column)) } @@ -122,6 +122,6 @@ func (u Unique) Equals(other Unique) bool { } type ColumnReference struct { - FQN FQN - Column Columns + TableName string + Column Columns } diff --git a/migrate/sqlschema/inspector.go b/migrate/sqlschema/inspector.go index b0073fe9b..ed474ed95 100644 --- a/migrate/sqlschema/inspector.go +++ b/migrate/sqlschema/inspector.go @@ -169,8 +169,8 @@ func (bmi *BunModelInspector) Inspect(ctx context.Context, schemaName string) (D target := rel.JoinTable state.ForeignKeys[ForeignKey{ - From: NewColumnReference(t.Schema, t.Name, fromCols...), - To: NewColumnReference(target.Schema, target.Name, toCols...), + From: NewColumnReference(t.Name, fromCols...), + To: NewColumnReference(target.Name, toCols...), }] = "" } } diff --git a/migrate/sqlschema/migrator.go b/migrate/sqlschema/migrator.go index bb8f1b82f..c9f9d2592 100644 --- a/migrate/sqlschema/migrator.go +++ b/migrate/sqlschema/migrator.go @@ -9,7 +9,7 @@ import ( type MigratorDialect interface { schema.Dialect - Migrator(*bun.DB) Migrator + Migrator(db *bun.DB, schemaName string) Migrator } type Migrator interface { @@ -21,13 +21,13 @@ type migrator struct { Migrator } -func NewMigrator(db *bun.DB) (Migrator, error) { +func NewMigrator(db *bun.DB, schemaName string) (Migrator, error) { md, ok := db.Dialect().(MigratorDialect) if !ok { return nil, fmt.Errorf("%q dialect does not implement sqlschema.Migrator", db.Dialect().Name()) } return &migrator{ - Migrator: md.Migrator(db), + Migrator: md.Migrator(db, schemaName), }, nil } @@ -44,6 +44,6 @@ func (m *BaseMigrator) AppendCreateTable(b []byte, model interface{}) ([]byte, e return m.db.NewCreateTable().Model(model).AppendQuery(m.db.Formatter(), b) } -func (m *BaseMigrator) AppendDropTable(b []byte, fqn FQN) ([]byte, error) { - return m.db.NewDropTable().TableExpr(fqn.String()).AppendQuery(m.db.Formatter(), b) +func (m *BaseMigrator) AppendDropTable(b []byte, schemaName, tableName string) ([]byte, error) { + return m.db.NewDropTable().TableExpr("?.?", bun.Ident(schemaName), bun.Ident(tableName)).AppendQuery(m.db.Formatter(), b) } diff --git a/migrate/sqlschema/table.go b/migrate/sqlschema/table.go index acbc9bb65..a805ba780 100644 --- a/migrate/sqlschema/table.go +++ b/migrate/sqlschema/table.go @@ -1,9 +1,6 @@ package sqlschema import ( - "fmt" - - "github.com/uptrace/bun/schema" orderedmap "github.com/wk8/go-ordered-map/v2" ) @@ -13,7 +10,6 @@ type Table interface { GetColumns() *orderedmap.OrderedMap[string, Column] GetPrimaryKey() *PrimaryKey GetUniqueConstraints() []Unique - GetFQN() FQN } var _ Table = (*BaseTable)(nil) @@ -62,24 +58,3 @@ func (td *BaseTable) GetPrimaryKey() *PrimaryKey { func (td *BaseTable) GetUniqueConstraints() []Unique { return td.UniqueConstraints } - -func (t *BaseTable) GetFQN() FQN { - return FQN{Schema: t.Schema, Table: t.Name} -} - -// FQN uniquely identifies a table in a multi-schema setup. -type FQN struct { - Schema string - Table string -} - -var _ schema.QueryAppender = (*FQN)(nil) - -// AppendQuery appends a fully-qualified table name. -func (fqn *FQN) AppendQuery(fmter schema.Formatter, b []byte) ([]byte, error) { - return fmter.AppendQuery(b, "?.?", schema.Ident(fqn.Schema), schema.Ident(fqn.Table)), nil -} - -func (fqn *FQN) String() string { - return fmt.Sprintf("%s.%s", fqn.Schema, fqn.Table) -} From 1799a0f2ce4e973d859cf0700b13e46041ab834a Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Tue, 12 Nov 2024 09:49:20 +0100 Subject: [PATCH 54/55] test: activate commented-out tests --- internal/dbtest/migrate_test.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/internal/dbtest/migrate_test.go b/internal/dbtest/migrate_test.go index 9e4e85df2..06bc531af 100644 --- a/internal/dbtest/migrate_test.go +++ b/internal/dbtest/migrate_test.go @@ -326,16 +326,16 @@ func TestAutoMigrator_Migrate(t *testing.T) { tests := []struct { fn func(t *testing.T, db *bun.DB) }{ - // {testRenameTable}, - // {testRenamedColumns}, - // {testCreateDropTable}, + {testRenameTable}, + {testRenamedColumns}, + {testCreateDropTable}, {testAlterForeignKeys}, - // {testChangeColumnType_AutoCast}, - // {testIdentity}, - // {testAddDropColumn}, - // {testUnique}, - // {testUniqueRenamedTable}, - // {testUpdatePrimaryKeys}, + {testChangeColumnType_AutoCast}, + {testIdentity}, + {testAddDropColumn}, + {testUnique}, + {testUniqueRenamedTable}, + {testUpdatePrimaryKeys}, } testEachDB(t, func(t *testing.T, dbName string, db *bun.DB) { From ac8d221e6443b469e794314c5fc189250fa542d5 Mon Sep 17 00:00:00 2001 From: dyma solovei Date: Tue, 12 Nov 2024 09:53:35 +0100 Subject: [PATCH 55/55] feat: include target schema name in migration name --- migrate/auto.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/migrate/auto.go b/migrate/auto.go index 753636720..32582eba3 100644 --- a/migrate/auto.go +++ b/migrate/auto.go @@ -229,13 +229,13 @@ func (am *AutoMigrator) createSQLMigrations(ctx context.Context, transactional b return nil, nil, fmt.Errorf("create sql migrations: %w", err) } - name, _ := genMigrationName("auto") + name, _ := genMigrationName(am.schemaName + "_auto") migrations := NewMigrations(am.migrationsOpts...) migrations.Add(Migration{ Name: name, Up: changes.Up(am.dbMigrator), Down: changes.Down(am.dbMigrator), - Comment: "Changes detected by bun.migrate.AutoMigrator", + Comment: "Changes detected by bun.AutoMigrator", }) // Append .tx.up.sql or .up.sql to migration name, dependin if it should be transactional.