diff --git a/Makefile b/Makefile index 6a65846c..1c04768f 100644 --- a/Makefile +++ b/Makefile @@ -6,3 +6,11 @@ run_migrations: migrate -path db/migrations/ -database "postgresql://root:root@localhost:5433/postgres?sslmode=disable" -verbose down --all migrate -path db/migrations/ -database "postgresql://root:root@localhost:5433/postgres?sslmode=disable" -verbose up sqlboiler -c ./db/sqlboiler.toml psql + +migrate: + $(MAKE) run_db + sleep 1 + $(MAKE) run_migrations + +protos: + buf generate diff --git a/buf.gen.yaml b/buf.gen.yaml new file mode 100644 index 00000000..8a779c4d --- /dev/null +++ b/buf.gen.yaml @@ -0,0 +1,17 @@ +version: v2 +managed: + enabled: true + override: + - file_option: go_package_prefix + value: github.com/crlssn/getstronger/go/pkg/pb + disable: + - module: buf.build/bufbuild/protovalidate +plugins: + - remote: buf.build/protocolbuffers/go + out: go/pkg/pb + opt: paths=source_relative + - remote: buf.build/connectrpc/go + out: go/pkg/pb + opt: paths=source_relative +inputs: + - directory: proto diff --git a/buf.lock b/buf.lock new file mode 100644 index 00000000..186cc364 --- /dev/null +++ b/buf.lock @@ -0,0 +1,6 @@ +# Generated by buf. DO NOT EDIT. +version: v2 +deps: + - name: buf.build/bufbuild/protovalidate + commit: a6c49f84cc0f4e038680d390392e2ab0 + digest: b5:e968392e88ff7915adcbd1635d670b45bff8836ec2415d81fc559ca5470a695dbdc30030bad8bc5764647c731079e9e7bba0023ea25c4e4a1672a7d2561d4a19 diff --git a/buf.yaml b/buf.yaml new file mode 100644 index 00000000..57f16c20 --- /dev/null +++ b/buf.yaml @@ -0,0 +1,12 @@ +# For details on buf.yaml configuration, visit https://buf.build/docs/configuration/v2/buf-yaml +version: v2 +modules: + - path: proto +lint: + use: + - DEFAULT +breaking: + use: + - FILE +deps: + - buf.build/bufbuild/protovalidate diff --git a/db/migrations/001_schema.up.sql b/db/migrations/001_schema.up.sql index 37b46b3d..0c6c49e7 100644 --- a/db/migrations/001_schema.up.sql +++ b/db/migrations/001_schema.up.sql @@ -1 +1,2 @@ CREATE SCHEMA IF NOT EXISTS getstronger; +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; diff --git a/db/migrations/002_base.up.sql b/db/migrations/002_base.up.sql new file mode 100644 index 00000000..d80a2111 --- /dev/null +++ b/db/migrations/002_base.up.sql @@ -0,0 +1,57 @@ +CREATE TABLE getstronger.auth +( + id UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(), + email VARCHAR(255) NOT NULL UNIQUE, + password BYTEA NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC') +); + +CREATE TABLE getstronger.users +( + id UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(), + auth_id UUID NOT NULL REFERENCES getstronger.auth (id), + name VARCHAR NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC') +); + +CREATE TABLE getstronger.routines +( + id UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(), + user_id UUID NOT NULL REFERENCES getstronger.users (id), + title VARCHAR NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC') +); + +CREATE TABLE getstronger.exercises +( + id UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(), + user_id UUID NOT NULL REFERENCES getstronger.users (id), + title VARCHAR NOT NULL, + sub_title VARCHAR, + created_at TIMESTAMP NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC') +); + +CREATE TABLE getstronger.routine_exercises +( + routine_id UUID NOT NULL REFERENCES getstronger.routines (id), + exercise_id UUID NOT NULL REFERENCES getstronger.exercises (id), + PRIMARY KEY (routine_id, exercise_id) +); + +CREATE TABLE getstronger.workouts +( + id UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(), + user_id UUID NOT NULL REFERENCES getstronger.users (id), + date DATE NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC') +); + +CREATE TABLE getstronger.sets +( + id UUID PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(), + workout_id UUID NOT NULL REFERENCES getstronger.workouts (id), + exercise_id UUID NOT NULL REFERENCES getstronger.exercises (id), + weight DECIMAL(8, 2) NOT NULL, + reps INT NOT NULL, + created_at TIMESTAMP NOT NULL DEFAULT (NOW() AT TIME ZONE 'UTC') +); diff --git a/db/migrations/002_users.up.sql b/db/migrations/002_users.up.sql deleted file mode 100644 index de0ce5d4..00000000 --- a/db/migrations/002_users.up.sql +++ /dev/null @@ -1,7 +0,0 @@ -CREATE TABLE getstronger.users -( - id UUID PRIMARY KEY NOT NULL, - email VARCHAR(255) NOT NULL, - password VARCHAR(255) NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP -); diff --git a/db/sqlboiler.toml b/db/sqlboiler.toml index 295aa28b..d54c0873 100755 --- a/db/sqlboiler.toml +++ b/db/sqlboiler.toml @@ -1,4 +1,4 @@ -output = "pkg/orm" +output = "go/pkg/orm" pkgname = "orm" wipe = true no-tests = true diff --git a/go.mod b/go.mod index ea981575..98c526bd 100644 --- a/go.mod +++ b/go.mod @@ -1,18 +1,50 @@ -module getstronger +module github.com/crlssn/getstronger go 1.23.0 require ( + buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.34.2-20240920164238-5a7b106cbb87.2 + connectrpc.com/connect v1.16.2 + github.com/bufbuild/protovalidate-go v0.7.2 github.com/friendsofgo/errors v0.9.2 + github.com/golang-jwt/jwt/v5 v5.2.1 + github.com/google/uuid v1.6.0 + github.com/jackc/pgx/v5 v5.6.0 + github.com/stretchr/testify v1.9.0 github.com/volatiletech/null/v8 v8.1.2 github.com/volatiletech/sqlboiler/v4 v4.16.2 github.com/volatiletech/strmangle v0.0.6 + go.uber.org/fx v1.22.2 + go.uber.org/zap v1.26.0 + golang.org/x/crypto v0.27.0 + google.golang.org/grpc v1.67.1 + google.golang.org/protobuf v1.35.1 ) require ( + github.com/antlr4-go/antlr/v4 v4.13.0 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/ericlagergren/decimal v0.0.0-20190420051523-6335edbaa640 // indirect github.com/gofrs/uuid v4.2.0+incompatible // indirect + github.com/google/cel-go v0.21.0 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/jackc/puddle/v2 v2.2.1 // indirect + github.com/lib/pq v1.10.6 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/spf13/cast v1.5.0 // indirect + github.com/stoewer/go-strcase v1.3.0 // indirect github.com/volatiletech/inflect v0.0.1 // indirect github.com/volatiletech/randomize v0.0.1 // indirect + go.uber.org/dig v1.18.0 // indirect + go.uber.org/multierr v1.10.0 // indirect + golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8 // indirect + golang.org/x/net v0.29.0 // indirect + golang.org/x/sync v0.8.0 // indirect + golang.org/x/sys v0.25.0 // indirect + golang.org/x/text v0.18.0 // indirect golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index d0fd9882..b8c94a72 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,5 @@ +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.34.2-20240920164238-5a7b106cbb87.2 h1:hl0FrmGlNpQZIGvU1/jDz0lsPDd0BhCE0QDRwPfLZcA= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.34.2-20240920164238-5a7b106cbb87.2/go.mod h1:ylS4c28ACSI59oJrOdW4pHS4n0Hw4TgSPHn8rpHl4Yw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -53,6 +55,8 @@ cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohl cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= +connectrpc.com/connect v1.16.2 h1:ybd6y+ls7GOlb7Bh5C8+ghA6SvCBajHwxssO2CGFjqE= +connectrpc.com/connect v1.16.2/go.mod h1:n2kgwskMHXC+lVqb18wngEpF95ldBHXjZYJussz5FRc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.0.0/go.mod h1:uGG2W01BaETf0Ozp+QxxKJdMBNRWPdstHG0Fmdwn1/U= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.0.0/go.mod h1:+6sju8gk8FRmSajX3Oz4G5Gm7P+mbqE9FVaXXFYTkCM= @@ -73,6 +77,8 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/apmckinlay/gsuneido v0.0.0-20190404155041-0b6cd442a18f/go.mod h1:JU2DOj5Fc6rol0yaT79Csr47QR0vONGwJtBNGRD7jmc= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= @@ -83,6 +89,8 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bufbuild/protovalidate-go v0.7.2 h1:UuvKyZHl5p7u3ztEjtRtqtDxOjRKX5VUOgKFq6p6ETk= +github.com/bufbuild/protovalidate-go v0.7.2/go.mod h1:PHV5pFuWlRzdDW02/cmVyNzdiQ+RNNwo7idGxdzS7o4= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -122,6 +130,9 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.m github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6Uu2PdjCQwWCJ3bM= +github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4= +github.com/ericlagergren/decimal v0.0.0-20190420051523-6335edbaa640 h1:VMAacqPM03GapxpfNORtKNl9o6Uws1BQYL54WjmolN0= github.com/ericlagergren/decimal v0.0.0-20190420051523-6335edbaa640/go.mod h1:mdYyfAkzn9kyJ/kMk/7WE9ufl9lflh+2NvecQ5mAghs= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= @@ -152,6 +163,8 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69 github.com/golang-jwt/jwt v3.2.1+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg= +github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= +github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= @@ -189,6 +202,8 @@ github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiu github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/cel-go v0.21.0 h1:cl6uW/gxN+Hy50tNYvI691+sXxioCnstFzLp2WO4GCI= +github.com/google/cel-go v0.21.0/go.mod h1:rHUlWCcBKgyEk+eV03RPdZUekPp6YcJwV0FxuUksYxc= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -202,8 +217,9 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -228,6 +244,8 @@ github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm4 github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -274,6 +292,14 @@ github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.6.0 h1:SWJzexBzPL5jb0GEsrPMLIsi/3jOo7RHlzTjcAeDrPY= +github.com/jackc/pgx/v5 v5.6.0/go.mod h1:DNZ/vlrUnhWCoFGxHAG8U2ljioxukquj7utPDgtQdTw= +github.com/jackc/puddle/v2 v2.2.1 h1:RhxXJtFG022u4ibrCSMSiu5aOq1i77R3OHKNJj77OAk= +github.com/jackc/puddle/v2 v2.2.1/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -302,6 +328,7 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.10.6 h1:jbk+ZieJ0D7EVGJYpL9QTz7/YW6UHbmdnZWYyK5cdBs= github.com/lib/pq v1.10.6/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= @@ -352,6 +379,7 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= @@ -398,9 +426,12 @@ github.com/spf13/cobra v1.5.0/go.mod h1:dWXEIy2H428czQCjInthrTRUg7yKbok+2Qi/yBIJ github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.12.0/go.mod h1:b6COn30jlNxbm/V2IqWiNWkJ+vZNiMNksliPCiuKtSI= +github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs= +github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -410,6 +441,9 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.5/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.3.0/go.mod h1:YzJjq/33h7nrwdY+iHMhEOEEbW0ovIz0tB6t6PwAXzs= github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= @@ -443,8 +477,18 @@ go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/dig v1.18.0 h1:imUL1UiY0Mg4bqbFfsRQO5G4CGRBec/ZujWTvSVp3pw= +go.uber.org/dig v1.18.0/go.mod h1:Us0rSJiThwCv2GteUN0Q7OKvU7n5J4dxZ9JKUXozFdE= +go.uber.org/fx v1.22.2 h1:iPW+OPxv0G8w75OemJ1RAnTUrF55zOJlXlo1TbJ0Buw= +go.uber.org/fx v1.22.2/go.mod h1:o/D9n+2mLP6v1EG+qsdT1O8wKopYAsqZasju97SDFCU= +go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= +go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= +go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo= +go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -459,6 +503,8 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220511200225-c6db032c6c88/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220826181053-bd7e27e6170d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A= +golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -469,6 +515,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8 h1:aAcj0Da7eBAtrTp03QXWvm88pSyOt+UgdZw2BFZ+lEw= +golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8/go.mod h1:CQ1k9gNrJ50XIzaKCRR2hssIjF07kZFEiieALBM/ARQ= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -544,6 +592,8 @@ golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220520000938-2e3eb7b945c2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo= +golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -577,6 +627,8 @@ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220513210516-0976fa681c29/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -660,6 +712,8 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220825204002-c680a09ffe64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= +golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -671,6 +725,8 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= +golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -863,6 +919,10 @@ google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX google.golang.org/genproto v0.0.0-20220429170224-98d788798c3e/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo= google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= google.golang.org/genproto v0.0.0-20220519153652-3a47de7e79bd/go.mod h1:RAyBrSAP7Fh3Nc84ghnVLDPuV51xc9agzmm4Ph6i0Q4= +google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 h1:T6rh4haD3GVYsgEfWExoCZA2o2FmbNyKpTuAxbEFPTg= +google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:wp2WsuBYj6j8wUdo3ToZsdxxixbvQNAHqVJrTgi5E5M= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 h1:QCqS/PdaHTSWGvupk2F/ehwHtGc0/GYkT+3GAcR1CCc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= @@ -893,6 +953,8 @@ google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ5 google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= +google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= @@ -908,10 +970,14 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= @@ -927,6 +993,7 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/go/cmd/main.go b/go/cmd/main.go new file mode 100644 index 00000000..6799a696 --- /dev/null +++ b/go/cmd/main.go @@ -0,0 +1,40 @@ +package main + +import ( + "github.com/bufbuild/protovalidate-go" + "go.uber.org/fx" + "go.uber.org/zap" + "google.golang.org/grpc" + + "github.com/crlssn/getstronger/go/pkg/db" + "github.com/crlssn/getstronger/go/pkg/jwt" + "github.com/crlssn/getstronger/go/pkg/repos" + "github.com/crlssn/getstronger/go/rpc" +) + +func main() { + fx.New(options()...).Run() +} + +func options() []fx.Option { + return []fx.Option{ + jwt.Module(), + rpc.NewModule(), + fx.Provide( + func() db.Options { + return db.Options{ + Host: "localhost", + Port: 5433, + User: "root", + Password: "root", + Database: "postgres", + } + }, + db.New, + zap.NewDevelopment, + repos.NewAuth, + grpc.NewServer, + protovalidate.New, + ), + } +} diff --git a/go/cmd/main_test.go b/go/cmd/main_test.go new file mode 100644 index 00000000..3bb6d2a3 --- /dev/null +++ b/go/cmd/main_test.go @@ -0,0 +1,12 @@ +package main + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.uber.org/fx" +) + +func TestWireup(t *testing.T) { + require.NoError(t, fx.ValidateApp(options()...)) +} diff --git a/go/pkg/db/db.go b/go/pkg/db/db.go new file mode 100644 index 00000000..4df31733 --- /dev/null +++ b/go/pkg/db/db.go @@ -0,0 +1,19 @@ +package db + +import ( + "database/sql" + "fmt" + _ "github.com/jackc/pgx/v5/stdlib" +) + +type Options struct { + Host string + Port int + User string + Password string + Database string +} + +func New(opts Options) (*sql.DB, error) { + return sql.Open("pgx", fmt.Sprintf("postgresql://%s:%s@%s:%d/%s", opts.User, opts.Password, opts.Host, opts.Port, opts.Database)) +} diff --git a/go/pkg/jwt/jwt.go b/go/pkg/jwt/jwt.go new file mode 100644 index 00000000..dbbf157a --- /dev/null +++ b/go/pkg/jwt/jwt.go @@ -0,0 +1,150 @@ +package jwt + +import ( + "fmt" + "time" + + "github.com/golang-jwt/jwt/v5" + "go.uber.org/zap" +) + +type Claims struct { + UserID string `json:"user_id"` + jwt.RegisteredClaims +} + +type Secrets struct { + AccessKey []byte + RefreshKey []byte +} + +func (s Secrets) ResolveKey(tokenType TokenType) []byte { + switch tokenType { + case TokenTypeAccess: + return s.AccessKey + case TokenTypeRefresh: + return s.RefreshKey + default: + return nil + } +} + +type Manager struct { + Log *zap.Logger + Secrets Secrets + Validator *jwt.Validator +} + +func NewManager(accessKey, refreshKey []byte) *Manager { + return &Manager{ + Secrets: Secrets{ + AccessKey: accessKey, + RefreshKey: refreshKey, + }, + Validator: jwt.NewValidator( + jwt.WithLeeway(5 * time.Second), + ), + } +} + +type TokenType string + +func (tt TokenType) Validate() bool { + switch tt { + case TokenTypeAccess, TokenTypeRefresh: + return true + default: + return false + } +} + +func (tt TokenType) String() string { + return string(tt) +} + +const ( + TokenTypeAccess TokenType = "access_token" + TokenTypeRefresh TokenType = "refresh_token" +) + +const ( + expiryTimeAccess = 15 * time.Minute + expiryTimeRefresh = 30 * 24 * time.Hour +) + +func (m *Manager) CreateToken(userID string, tokenType TokenType) (string, error) { + if !tokenType.Validate() { + return "", fmt.Errorf("unexpected token type: %v", tokenType) + } + + now := time.Now().UTC() + + expiryTime := expiryTimeAccess + if tokenType == TokenTypeRefresh { + expiryTime = expiryTimeRefresh + } + + claims := &Claims{ + UserID: userID, + RegisteredClaims: jwt.RegisteredClaims{ + ExpiresAt: jwt.NewNumericDate(now.Add(expiryTime)), + IssuedAt: jwt.NewNumericDate(now), + Subject: tokenType.String(), + }, + } + + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + return token.SignedString(m.Secrets.ResolveKey(tokenType)) +} + +var ErrInvalidToken = fmt.Errorf("invalid token") + +func (m *Manager) ClaimsFromToken(token string, tokenType TokenType) (*Claims, error) { + if !tokenType.Validate() { + return nil, fmt.Errorf("unexpected token type: %v", tokenType) + } + + claims := new(Claims) + t, err := jwt.ParseWithClaims(token, claims, func(token *jwt.Token) (interface{}, error) { + if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok { + return nil, fmt.Errorf("unexpected signing method: %v", token.Header["alg"]) + } + + subject, err := token.Claims.GetSubject() + if err != nil { + return nil, fmt.Errorf("getting subject: %w", err) + } + + if subject != tokenType.String() { + return nil, fmt.Errorf("unexpected subject: %v", subject) + } + + return m.Secrets.ResolveKey(tokenType), nil + }, jwt.WithLeeway(5*time.Second)) + if err != nil { + return nil, fmt.Errorf("token parsing: %w", err) + } + + if !t.Valid { + return nil, ErrInvalidToken + } + + return claims, nil +} + +func (m *Manager) ValidateAccessToken(token string) error { + claims, err := m.ClaimsFromToken(token, TokenTypeAccess) + if err != nil { + return fmt.Errorf("parsing claims: %w", err) + } + + if err = m.ValidateClaims(claims); err != nil { + return fmt.Errorf("validating claims: %w", err) + } + + return nil +} + +func (m *Manager) ValidateClaims(claims *Claims) error { + return m.Validator.Validate(claims) +} diff --git a/go/pkg/jwt/jwt_test.go b/go/pkg/jwt/jwt_test.go new file mode 100644 index 00000000..b42ef813 --- /dev/null +++ b/go/pkg/jwt/jwt_test.go @@ -0,0 +1,61 @@ +package jwt + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestGenerateTokens(t *testing.T) { + userID := "123" + now := time.Now().UTC() + + m := NewManager([]byte("access_key"), []byte("refresh_key")) + + accessToken, err := m.CreateToken(userID, TokenTypeAccess) + require.NoError(t, err) + require.NotEmpty(t, accessToken) + + refreshToken, err := m.CreateToken(userID, TokenTypeRefresh) + require.NoError(t, err) + require.NotEmpty(t, refreshToken) + + claims, err := m.ClaimsFromToken(accessToken, TokenTypeAccess) + require.NoError(t, err) + require.Equal(t, userID, claims.UserID) + require.True(t, claims.ExpiresAt.After(now.Add(expiryTimeAccess-time.Second))) + require.True(t, claims.ExpiresAt.Before(now.Add(expiryTimeAccess+time.Second))) + + claims, err = m.ClaimsFromToken(refreshToken, TokenTypeRefresh) + require.NoError(t, err) + require.Equal(t, userID, claims.UserID) + require.True(t, claims.ExpiresAt.After(now.Add(expiryTimeRefresh-time.Second))) + require.True(t, claims.ExpiresAt.Before(now.Add(expiryTimeRefresh+time.Second))) + + _, err = m.CreateToken(userID, "") + require.Error(t, err) + require.Equal(t, "unexpected token type: ", err.Error()) + + _, err = m.ClaimsFromToken(accessToken, "") + require.Error(t, err) + require.Equal(t, "unexpected token type: ", err.Error()) + + _, err = m.ClaimsFromToken(accessToken, TokenTypeRefresh) + require.Error(t, err) + require.Equal(t, "token parsing: token is unverifiable: error while executing keyfunc: unexpected subject: access_token", err.Error()) + + _, err = m.ClaimsFromToken(refreshToken, TokenTypeAccess) + require.Error(t, err) + require.Equal(t, "token parsing: token is unverifiable: error while executing keyfunc: unexpected subject: refresh_token", err.Error()) + + m2 := NewManager([]byte("access_key2"), []byte("refresh_key2")) + + _, err = m2.ClaimsFromToken(accessToken, TokenTypeAccess) + require.Error(t, err) + require.Equal(t, "token parsing: token signature is invalid: signature is invalid", err.Error()) + + _, err = m2.ClaimsFromToken(refreshToken, TokenTypeRefresh) + require.Error(t, err) + require.Equal(t, "token parsing: token signature is invalid: signature is invalid", err.Error()) +} diff --git a/go/pkg/jwt/module.go b/go/pkg/jwt/module.go new file mode 100644 index 00000000..4679ad5d --- /dev/null +++ b/go/pkg/jwt/module.go @@ -0,0 +1,11 @@ +package jwt + +import "go.uber.org/fx" + +func Module() fx.Option { + return fx.Provide( + func() *Manager { + return NewManager([]byte("access-key"), []byte("refresh-key")) + }, + ) +} diff --git a/go/pkg/orm/auth.go b/go/pkg/orm/auth.go new file mode 100644 index 00000000..4c0fc0ec --- /dev/null +++ b/go/pkg/orm/auth.go @@ -0,0 +1,1204 @@ +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package orm + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Auth is an object representing the database table. +type Auth struct { + ID string `boil:"id" json:"id" toml:"id" yaml:"id"` + Email string `boil:"email" json:"email" toml:"email" yaml:"email"` + Password []byte `boil:"password" json:"password" toml:"password" yaml:"password"` + CreatedAt time.Time `boil:"created_at" json:"created_at" toml:"created_at" yaml:"created_at"` + + R *authR `boil:"-" json:"-" toml:"-" yaml:"-"` + L authL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var AuthColumns = struct { + ID string + Email string + Password string + CreatedAt string +}{ + ID: "id", + Email: "email", + Password: "password", + CreatedAt: "created_at", +} + +var AuthTableColumns = struct { + ID string + Email string + Password string + CreatedAt string +}{ + ID: "auth.id", + Email: "auth.email", + Password: "auth.password", + CreatedAt: "auth.created_at", +} + +// Generated where + +type whereHelperstring struct{ field string } + +func (w whereHelperstring) EQ(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } +func (w whereHelperstring) NEQ(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } +func (w whereHelperstring) LT(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } +func (w whereHelperstring) LTE(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } +func (w whereHelperstring) GT(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } +func (w whereHelperstring) GTE(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } +func (w whereHelperstring) LIKE(x string) qm.QueryMod { return qm.Where(w.field+" LIKE ?", x) } +func (w whereHelperstring) NLIKE(x string) qm.QueryMod { return qm.Where(w.field+" NOT LIKE ?", x) } +func (w whereHelperstring) ILIKE(x string) qm.QueryMod { return qm.Where(w.field+" ILIKE ?", x) } +func (w whereHelperstring) NILIKE(x string) qm.QueryMod { return qm.Where(w.field+" NOT ILIKE ?", x) } +func (w whereHelperstring) IN(slice []string) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) +} +func (w whereHelperstring) NIN(slice []string) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) +} + +type whereHelper__byte struct{ field string } + +func (w whereHelper__byte) EQ(x []byte) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } +func (w whereHelper__byte) NEQ(x []byte) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } +func (w whereHelper__byte) LT(x []byte) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } +func (w whereHelper__byte) LTE(x []byte) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } +func (w whereHelper__byte) GT(x []byte) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } +func (w whereHelper__byte) GTE(x []byte) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } + +type whereHelpertime_Time struct{ field string } + +func (w whereHelpertime_Time) EQ(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.EQ, x) +} +func (w whereHelpertime_Time) NEQ(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.NEQ, x) +} +func (w whereHelpertime_Time) LT(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpertime_Time) LTE(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpertime_Time) GT(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpertime_Time) GTE(x time.Time) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +var AuthWhere = struct { + ID whereHelperstring + Email whereHelperstring + Password whereHelper__byte + CreatedAt whereHelpertime_Time +}{ + ID: whereHelperstring{field: "\"getstronger\".\"auth\".\"id\""}, + Email: whereHelperstring{field: "\"getstronger\".\"auth\".\"email\""}, + Password: whereHelper__byte{field: "\"getstronger\".\"auth\".\"password\""}, + CreatedAt: whereHelpertime_Time{field: "\"getstronger\".\"auth\".\"created_at\""}, +} + +// AuthRels is where relationship names are stored. +var AuthRels = struct { + Users string +}{ + Users: "Users", +} + +// authR is where relationships are stored. +type authR struct { + Users UserSlice `boil:"Users" json:"Users" toml:"Users" yaml:"Users"` +} + +// NewStruct creates a new relationship struct +func (*authR) NewStruct() *authR { + return &authR{} +} + +func (r *authR) GetUsers() UserSlice { + if r == nil { + return nil + } + return r.Users +} + +// authL is where Load methods for each relationship are stored. +type authL struct{} + +var ( + authAllColumns = []string{"id", "email", "password", "created_at"} + authColumnsWithoutDefault = []string{"email", "password"} + authColumnsWithDefault = []string{"id", "created_at"} + authPrimaryKeyColumns = []string{"id"} + authGeneratedColumns = []string{} +) + +type ( + // AuthSlice is an alias for a slice of pointers to Auth. + // This should almost always be used instead of []Auth. + AuthSlice []*Auth + // AuthHook is the signature for custom Auth hook methods + AuthHook func(context.Context, boil.ContextExecutor, *Auth) error + + authQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + authType = reflect.TypeOf(&Auth{}) + authMapping = queries.MakeStructMapping(authType) + authPrimaryKeyMapping, _ = queries.BindMapping(authType, authMapping, authPrimaryKeyColumns) + authInsertCacheMut sync.RWMutex + authInsertCache = make(map[string]insertCache) + authUpdateCacheMut sync.RWMutex + authUpdateCache = make(map[string]updateCache) + authUpsertCacheMut sync.RWMutex + authUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var authAfterSelectMu sync.Mutex +var authAfterSelectHooks []AuthHook + +var authBeforeInsertMu sync.Mutex +var authBeforeInsertHooks []AuthHook +var authAfterInsertMu sync.Mutex +var authAfterInsertHooks []AuthHook + +var authBeforeUpdateMu sync.Mutex +var authBeforeUpdateHooks []AuthHook +var authAfterUpdateMu sync.Mutex +var authAfterUpdateHooks []AuthHook + +var authBeforeDeleteMu sync.Mutex +var authBeforeDeleteHooks []AuthHook +var authAfterDeleteMu sync.Mutex +var authAfterDeleteHooks []AuthHook + +var authBeforeUpsertMu sync.Mutex +var authBeforeUpsertHooks []AuthHook +var authAfterUpsertMu sync.Mutex +var authAfterUpsertHooks []AuthHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Auth) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Auth) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Auth) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Auth) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Auth) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Auth) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Auth) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Auth) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Auth) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range authAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddAuthHook registers your hook function for all future operations. +func AddAuthHook(hookPoint boil.HookPoint, authHook AuthHook) { + switch hookPoint { + case boil.AfterSelectHook: + authAfterSelectMu.Lock() + authAfterSelectHooks = append(authAfterSelectHooks, authHook) + authAfterSelectMu.Unlock() + case boil.BeforeInsertHook: + authBeforeInsertMu.Lock() + authBeforeInsertHooks = append(authBeforeInsertHooks, authHook) + authBeforeInsertMu.Unlock() + case boil.AfterInsertHook: + authAfterInsertMu.Lock() + authAfterInsertHooks = append(authAfterInsertHooks, authHook) + authAfterInsertMu.Unlock() + case boil.BeforeUpdateHook: + authBeforeUpdateMu.Lock() + authBeforeUpdateHooks = append(authBeforeUpdateHooks, authHook) + authBeforeUpdateMu.Unlock() + case boil.AfterUpdateHook: + authAfterUpdateMu.Lock() + authAfterUpdateHooks = append(authAfterUpdateHooks, authHook) + authAfterUpdateMu.Unlock() + case boil.BeforeDeleteHook: + authBeforeDeleteMu.Lock() + authBeforeDeleteHooks = append(authBeforeDeleteHooks, authHook) + authBeforeDeleteMu.Unlock() + case boil.AfterDeleteHook: + authAfterDeleteMu.Lock() + authAfterDeleteHooks = append(authAfterDeleteHooks, authHook) + authAfterDeleteMu.Unlock() + case boil.BeforeUpsertHook: + authBeforeUpsertMu.Lock() + authBeforeUpsertHooks = append(authBeforeUpsertHooks, authHook) + authBeforeUpsertMu.Unlock() + case boil.AfterUpsertHook: + authAfterUpsertMu.Lock() + authAfterUpsertHooks = append(authAfterUpsertHooks, authHook) + authAfterUpsertMu.Unlock() + } +} + +// One returns a single auth record from the query. +func (q authQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Auth, error) { + o := &Auth{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: failed to execute a one query for auth") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Auth records from the query. +func (q authQuery) All(ctx context.Context, exec boil.ContextExecutor) (AuthSlice, error) { + var o []*Auth + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "orm: failed to assign all query results to Auth slice") + } + + if len(authAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Auth records in the query. +func (q authQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "orm: failed to count auth rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q authQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "orm: failed to check if auth exists") + } + + return count > 0, nil +} + +// Users retrieves all the user's Users with an executor. +func (o *Auth) Users(mods ...qm.QueryMod) userQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"getstronger\".\"users\".\"auth_id\"=?", o.ID), + ) + + return Users(queryMods...) +} + +// LoadUsers allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (authL) LoadUsers(ctx context.Context, e boil.ContextExecutor, singular bool, maybeAuth interface{}, mods queries.Applicator) error { + var slice []*Auth + var object *Auth + + if singular { + var ok bool + object, ok = maybeAuth.(*Auth) + if !ok { + object = new(Auth) + ok = queries.SetFromEmbeddedStruct(&object, &maybeAuth) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeAuth)) + } + } + } else { + s, ok := maybeAuth.(*[]*Auth) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeAuth) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeAuth)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &authR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &authR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.users`), + qm.WhereIn(`getstronger.users.auth_id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load users") + } + + var resultSlice []*User + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice users") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on users") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for users") + } + + if len(userAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Users = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Auth = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ID == foreign.AuthID { + local.R.Users = append(local.R.Users, foreign) + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Auth = local + break + } + } + } + + return nil +} + +// AddUsers adds the given related objects to the existing relationships +// of the auth, optionally inserting them as new records. +// Appends related to o.R.Users. +// Sets related.R.Auth appropriately. +func (o *Auth) AddUsers(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*User) error { + var err error + for _, rel := range related { + if insert { + rel.AuthID = o.ID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"users\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"auth_id"}), + strmangle.WhereClause("\"", "\"", 2, userPrimaryKeyColumns), + ) + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.AuthID = o.ID + } + } + + if o.R == nil { + o.R = &authR{ + Users: related, + } + } else { + o.R.Users = append(o.R.Users, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &userR{ + Auth: o, + } + } else { + rel.R.Auth = o + } + } + return nil +} + +// Auths retrieves all the records using an executor. +func Auths(mods ...qm.QueryMod) authQuery { + mods = append(mods, qm.From("\"getstronger\".\"auth\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"getstronger\".\"auth\".*"}) + } + + return authQuery{q} +} + +// FindAuth retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindAuth(ctx context.Context, exec boil.ContextExecutor, iD string, selectCols ...string) (*Auth, error) { + authObj := &Auth{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"getstronger\".\"auth\" where \"id\"=$1", sel, + ) + + q := queries.Raw(query, iD) + + err := q.Bind(ctx, exec, authObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: unable to select from auth") + } + + if err = authObj.doAfterSelectHooks(ctx, exec); err != nil { + return authObj, err + } + + return authObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Auth) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("orm: no auth provided for insertion") + } + + var err error + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(authColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + authInsertCacheMut.RLock() + cache, cached := authInsertCache[key] + authInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + authAllColumns, + authColumnsWithDefault, + authColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(authType, authMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(authType, authMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"getstronger\".\"auth\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"getstronger\".\"auth\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "orm: unable to insert into auth") + } + + if !cached { + authInsertCacheMut.Lock() + authInsertCache[key] = cache + authInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Auth. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Auth) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + authUpdateCacheMut.RLock() + cache, cached := authUpdateCache[key] + authUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + authAllColumns, + authPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("orm: unable to update auth, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"getstronger\".\"auth\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, wl), + strmangle.WhereClause("\"", "\"", len(wl)+1, authPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(authType, authMapping, append(wl, authPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update auth row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by update for auth") + } + + if !cached { + authUpdateCacheMut.Lock() + authUpdateCache[key] = cache + authUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q authQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all for auth") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected for auth") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o AuthSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("orm: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), authPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"getstronger\".\"auth\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, authPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all in auth slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected all in update all auth") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Auth) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns, opts ...UpsertOptionFunc) error { + if o == nil { + return errors.New("orm: no auth provided for upsert") + } + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(authColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + authUpsertCacheMut.RLock() + cache, cached := authUpsertCache[key] + authUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, _ := insertColumns.InsertColumnSet( + authAllColumns, + authColumnsWithDefault, + authColumnsWithoutDefault, + nzDefaults, + ) + + update := updateColumns.UpdateColumnSet( + authAllColumns, + authPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("orm: unable to upsert auth, could not build update column list") + } + + ret := strmangle.SetComplement(authAllColumns, strmangle.SetIntersect(insert, update)) + + conflict := conflictColumns + if len(conflict) == 0 && updateOnConflict && len(update) != 0 { + if len(authPrimaryKeyColumns) == 0 { + return errors.New("orm: unable to upsert auth, could not build conflict column list") + } + + conflict = make([]string, len(authPrimaryKeyColumns)) + copy(conflict, authPrimaryKeyColumns) + } + cache.query = buildUpsertQueryPostgres(dialect, "\"getstronger\".\"auth\"", updateOnConflict, ret, update, conflict, insert, opts...) + + cache.valueMapping, err = queries.BindMapping(authType, authMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(authType, authMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "orm: unable to upsert auth") + } + + if !cached { + authUpsertCacheMut.Lock() + authUpsertCache[key] = cache + authUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Auth record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Auth) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("orm: no Auth provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), authPrimaryKeyMapping) + sql := "DELETE FROM \"getstronger\".\"auth\" WHERE \"id\"=$1" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete from auth") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by delete for auth") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q authQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("orm: no authQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from auth") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for auth") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o AuthSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(authBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), authPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"getstronger\".\"auth\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, authPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from auth slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for auth") + } + + if len(authAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Auth) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindAuth(ctx, exec, o.ID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *AuthSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := AuthSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), authPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"getstronger\".\"auth\".* FROM \"getstronger\".\"auth\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, authPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "orm: unable to reload all in AuthSlice") + } + + *o = slice + + return nil +} + +// AuthExists checks if the Auth row exists. +func AuthExists(ctx context.Context, exec boil.ContextExecutor, iD string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"getstronger\".\"auth\" where \"id\"=$1 limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, iD) + } + row := exec.QueryRowContext(ctx, sql, iD) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "orm: unable to check if auth exists") + } + + return exists, nil +} + +// Exists checks if the Auth row exists. +func (o *Auth) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + return AuthExists(ctx, exec, o.ID) +} diff --git a/pkg/orm/boil_queries.go b/go/pkg/orm/boil_queries.go similarity index 93% rename from pkg/orm/boil_queries.go rename to go/pkg/orm/boil_queries.go index eccda6f2..f96acb65 100644 --- a/pkg/orm/boil_queries.go +++ b/go/pkg/orm/boil_queries.go @@ -1,4 +1,4 @@ -// Code generated by SQLBoiler 4.16.1 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. // This file is meant to be re-generated in place and/or deleted at any time. package orm diff --git a/go/pkg/orm/boil_table_names.go b/go/pkg/orm/boil_table_names.go new file mode 100644 index 00000000..deb6f9bd --- /dev/null +++ b/go/pkg/orm/boil_table_names.go @@ -0,0 +1,22 @@ +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package orm + +var TableNames = struct { + Auth string + Exercises string + RoutineExercises string + Routines string + Sets string + Users string + Workouts string +}{ + Auth: "auth", + Exercises: "exercises", + RoutineExercises: "routine_exercises", + Routines: "routines", + Sets: "sets", + Users: "users", + Workouts: "workouts", +} diff --git a/pkg/orm/boil_types.go b/go/pkg/orm/boil_types.go similarity index 95% rename from pkg/orm/boil_types.go rename to go/pkg/orm/boil_types.go index a3ae19e9..9a593038 100644 --- a/pkg/orm/boil_types.go +++ b/go/pkg/orm/boil_types.go @@ -1,4 +1,4 @@ -// Code generated by SQLBoiler 4.16.1 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. // This file is meant to be re-generated in place and/or deleted at any time. package orm diff --git a/pkg/orm/boil_view_names.go b/go/pkg/orm/boil_view_names.go similarity index 70% rename from pkg/orm/boil_view_names.go rename to go/pkg/orm/boil_view_names.go index 467f4ffe..4512bb4c 100644 --- a/pkg/orm/boil_view_names.go +++ b/go/pkg/orm/boil_view_names.go @@ -1,4 +1,4 @@ -// Code generated by SQLBoiler 4.16.1 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. // This file is meant to be re-generated in place and/or deleted at any time. package orm diff --git a/go/pkg/orm/exercises.go b/go/pkg/orm/exercises.go new file mode 100644 index 00000000..63033e04 --- /dev/null +++ b/go/pkg/orm/exercises.go @@ -0,0 +1,1693 @@ +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package orm + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/null/v8" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Exercise is an object representing the database table. +type Exercise struct { + ID string `boil:"id" json:"id" toml:"id" yaml:"id"` + UserID string `boil:"user_id" json:"user_id" toml:"user_id" yaml:"user_id"` + Title string `boil:"title" json:"title" toml:"title" yaml:"title"` + SubTitle null.String `boil:"sub_title" json:"sub_title,omitempty" toml:"sub_title" yaml:"sub_title,omitempty"` + CreatedAt time.Time `boil:"created_at" json:"created_at" toml:"created_at" yaml:"created_at"` + + R *exerciseR `boil:"-" json:"-" toml:"-" yaml:"-"` + L exerciseL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var ExerciseColumns = struct { + ID string + UserID string + Title string + SubTitle string + CreatedAt string +}{ + ID: "id", + UserID: "user_id", + Title: "title", + SubTitle: "sub_title", + CreatedAt: "created_at", +} + +var ExerciseTableColumns = struct { + ID string + UserID string + Title string + SubTitle string + CreatedAt string +}{ + ID: "exercises.id", + UserID: "exercises.user_id", + Title: "exercises.title", + SubTitle: "exercises.sub_title", + CreatedAt: "exercises.created_at", +} + +// Generated where + +type whereHelpernull_String struct{ field string } + +func (w whereHelpernull_String) EQ(x null.String) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, false, x) +} +func (w whereHelpernull_String) NEQ(x null.String) qm.QueryMod { + return qmhelper.WhereNullEQ(w.field, true, x) +} +func (w whereHelpernull_String) LT(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpernull_String) LTE(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpernull_String) GT(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpernull_String) GTE(x null.String) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} +func (w whereHelpernull_String) LIKE(x null.String) qm.QueryMod { + return qm.Where(w.field+" LIKE ?", x) +} +func (w whereHelpernull_String) NLIKE(x null.String) qm.QueryMod { + return qm.Where(w.field+" NOT LIKE ?", x) +} +func (w whereHelpernull_String) ILIKE(x null.String) qm.QueryMod { + return qm.Where(w.field+" ILIKE ?", x) +} +func (w whereHelpernull_String) NILIKE(x null.String) qm.QueryMod { + return qm.Where(w.field+" NOT ILIKE ?", x) +} +func (w whereHelpernull_String) IN(slice []string) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) +} +func (w whereHelpernull_String) NIN(slice []string) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) +} + +func (w whereHelpernull_String) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } +func (w whereHelpernull_String) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) } + +var ExerciseWhere = struct { + ID whereHelperstring + UserID whereHelperstring + Title whereHelperstring + SubTitle whereHelpernull_String + CreatedAt whereHelpertime_Time +}{ + ID: whereHelperstring{field: "\"getstronger\".\"exercises\".\"id\""}, + UserID: whereHelperstring{field: "\"getstronger\".\"exercises\".\"user_id\""}, + Title: whereHelperstring{field: "\"getstronger\".\"exercises\".\"title\""}, + SubTitle: whereHelpernull_String{field: "\"getstronger\".\"exercises\".\"sub_title\""}, + CreatedAt: whereHelpertime_Time{field: "\"getstronger\".\"exercises\".\"created_at\""}, +} + +// ExerciseRels is where relationship names are stored. +var ExerciseRels = struct { + User string + Routines string + Sets string +}{ + User: "User", + Routines: "Routines", + Sets: "Sets", +} + +// exerciseR is where relationships are stored. +type exerciseR struct { + User *User `boil:"User" json:"User" toml:"User" yaml:"User"` + Routines RoutineSlice `boil:"Routines" json:"Routines" toml:"Routines" yaml:"Routines"` + Sets SetSlice `boil:"Sets" json:"Sets" toml:"Sets" yaml:"Sets"` +} + +// NewStruct creates a new relationship struct +func (*exerciseR) NewStruct() *exerciseR { + return &exerciseR{} +} + +func (r *exerciseR) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +func (r *exerciseR) GetRoutines() RoutineSlice { + if r == nil { + return nil + } + return r.Routines +} + +func (r *exerciseR) GetSets() SetSlice { + if r == nil { + return nil + } + return r.Sets +} + +// exerciseL is where Load methods for each relationship are stored. +type exerciseL struct{} + +var ( + exerciseAllColumns = []string{"id", "user_id", "title", "sub_title", "created_at"} + exerciseColumnsWithoutDefault = []string{"user_id", "title"} + exerciseColumnsWithDefault = []string{"id", "sub_title", "created_at"} + exercisePrimaryKeyColumns = []string{"id"} + exerciseGeneratedColumns = []string{} +) + +type ( + // ExerciseSlice is an alias for a slice of pointers to Exercise. + // This should almost always be used instead of []Exercise. + ExerciseSlice []*Exercise + // ExerciseHook is the signature for custom Exercise hook methods + ExerciseHook func(context.Context, boil.ContextExecutor, *Exercise) error + + exerciseQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + exerciseType = reflect.TypeOf(&Exercise{}) + exerciseMapping = queries.MakeStructMapping(exerciseType) + exercisePrimaryKeyMapping, _ = queries.BindMapping(exerciseType, exerciseMapping, exercisePrimaryKeyColumns) + exerciseInsertCacheMut sync.RWMutex + exerciseInsertCache = make(map[string]insertCache) + exerciseUpdateCacheMut sync.RWMutex + exerciseUpdateCache = make(map[string]updateCache) + exerciseUpsertCacheMut sync.RWMutex + exerciseUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var exerciseAfterSelectMu sync.Mutex +var exerciseAfterSelectHooks []ExerciseHook + +var exerciseBeforeInsertMu sync.Mutex +var exerciseBeforeInsertHooks []ExerciseHook +var exerciseAfterInsertMu sync.Mutex +var exerciseAfterInsertHooks []ExerciseHook + +var exerciseBeforeUpdateMu sync.Mutex +var exerciseBeforeUpdateHooks []ExerciseHook +var exerciseAfterUpdateMu sync.Mutex +var exerciseAfterUpdateHooks []ExerciseHook + +var exerciseBeforeDeleteMu sync.Mutex +var exerciseBeforeDeleteHooks []ExerciseHook +var exerciseAfterDeleteMu sync.Mutex +var exerciseAfterDeleteHooks []ExerciseHook + +var exerciseBeforeUpsertMu sync.Mutex +var exerciseBeforeUpsertHooks []ExerciseHook +var exerciseAfterUpsertMu sync.Mutex +var exerciseAfterUpsertHooks []ExerciseHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Exercise) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Exercise) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Exercise) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Exercise) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Exercise) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Exercise) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Exercise) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Exercise) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Exercise) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range exerciseAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddExerciseHook registers your hook function for all future operations. +func AddExerciseHook(hookPoint boil.HookPoint, exerciseHook ExerciseHook) { + switch hookPoint { + case boil.AfterSelectHook: + exerciseAfterSelectMu.Lock() + exerciseAfterSelectHooks = append(exerciseAfterSelectHooks, exerciseHook) + exerciseAfterSelectMu.Unlock() + case boil.BeforeInsertHook: + exerciseBeforeInsertMu.Lock() + exerciseBeforeInsertHooks = append(exerciseBeforeInsertHooks, exerciseHook) + exerciseBeforeInsertMu.Unlock() + case boil.AfterInsertHook: + exerciseAfterInsertMu.Lock() + exerciseAfterInsertHooks = append(exerciseAfterInsertHooks, exerciseHook) + exerciseAfterInsertMu.Unlock() + case boil.BeforeUpdateHook: + exerciseBeforeUpdateMu.Lock() + exerciseBeforeUpdateHooks = append(exerciseBeforeUpdateHooks, exerciseHook) + exerciseBeforeUpdateMu.Unlock() + case boil.AfterUpdateHook: + exerciseAfterUpdateMu.Lock() + exerciseAfterUpdateHooks = append(exerciseAfterUpdateHooks, exerciseHook) + exerciseAfterUpdateMu.Unlock() + case boil.BeforeDeleteHook: + exerciseBeforeDeleteMu.Lock() + exerciseBeforeDeleteHooks = append(exerciseBeforeDeleteHooks, exerciseHook) + exerciseBeforeDeleteMu.Unlock() + case boil.AfterDeleteHook: + exerciseAfterDeleteMu.Lock() + exerciseAfterDeleteHooks = append(exerciseAfterDeleteHooks, exerciseHook) + exerciseAfterDeleteMu.Unlock() + case boil.BeforeUpsertHook: + exerciseBeforeUpsertMu.Lock() + exerciseBeforeUpsertHooks = append(exerciseBeforeUpsertHooks, exerciseHook) + exerciseBeforeUpsertMu.Unlock() + case boil.AfterUpsertHook: + exerciseAfterUpsertMu.Lock() + exerciseAfterUpsertHooks = append(exerciseAfterUpsertHooks, exerciseHook) + exerciseAfterUpsertMu.Unlock() + } +} + +// One returns a single exercise record from the query. +func (q exerciseQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Exercise, error) { + o := &Exercise{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: failed to execute a one query for exercises") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Exercise records from the query. +func (q exerciseQuery) All(ctx context.Context, exec boil.ContextExecutor) (ExerciseSlice, error) { + var o []*Exercise + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "orm: failed to assign all query results to Exercise slice") + } + + if len(exerciseAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Exercise records in the query. +func (q exerciseQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "orm: failed to count exercises rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q exerciseQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "orm: failed to check if exercises exists") + } + + return count > 0, nil +} + +// User pointed to by the foreign key. +func (o *Exercise) User(mods ...qm.QueryMod) userQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"id\" = ?", o.UserID), + } + + queryMods = append(queryMods, mods...) + + return Users(queryMods...) +} + +// Routines retrieves all the routine's Routines with an executor. +func (o *Exercise) Routines(mods ...qm.QueryMod) routineQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.InnerJoin("\"getstronger\".\"routine_exercises\" on \"getstronger\".\"routines\".\"id\" = \"getstronger\".\"routine_exercises\".\"routine_id\""), + qm.Where("\"getstronger\".\"routine_exercises\".\"exercise_id\"=?", o.ID), + ) + + return Routines(queryMods...) +} + +// Sets retrieves all the set's Sets with an executor. +func (o *Exercise) Sets(mods ...qm.QueryMod) setQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"getstronger\".\"sets\".\"exercise_id\"=?", o.ID), + ) + + return Sets(queryMods...) +} + +// LoadUser allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (exerciseL) LoadUser(ctx context.Context, e boil.ContextExecutor, singular bool, maybeExercise interface{}, mods queries.Applicator) error { + var slice []*Exercise + var object *Exercise + + if singular { + var ok bool + object, ok = maybeExercise.(*Exercise) + if !ok { + object = new(Exercise) + ok = queries.SetFromEmbeddedStruct(&object, &maybeExercise) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeExercise)) + } + } + } else { + s, ok := maybeExercise.(*[]*Exercise) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeExercise) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeExercise)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &exerciseR{} + } + args[object.UserID] = struct{}{} + + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &exerciseR{} + } + + args[obj.UserID] = struct{}{} + + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.users`), + qm.WhereIn(`getstronger.users.id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load User") + } + + var resultSlice []*User + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice User") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for users") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for users") + } + + if len(userAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.User = foreign + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Exercises = append(foreign.R.Exercises, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.UserID == foreign.ID { + local.R.User = foreign + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Exercises = append(foreign.R.Exercises, local) + break + } + } + } + + return nil +} + +// LoadRoutines allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (exerciseL) LoadRoutines(ctx context.Context, e boil.ContextExecutor, singular bool, maybeExercise interface{}, mods queries.Applicator) error { + var slice []*Exercise + var object *Exercise + + if singular { + var ok bool + object, ok = maybeExercise.(*Exercise) + if !ok { + object = new(Exercise) + ok = queries.SetFromEmbeddedStruct(&object, &maybeExercise) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeExercise)) + } + } + } else { + s, ok := maybeExercise.(*[]*Exercise) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeExercise) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeExercise)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &exerciseR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &exerciseR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.Select("\"getstronger\".\"routines\".\"id\", \"getstronger\".\"routines\".\"user_id\", \"getstronger\".\"routines\".\"title\", \"getstronger\".\"routines\".\"created_at\", \"a\".\"exercise_id\""), + qm.From("\"getstronger\".\"routines\""), + qm.InnerJoin("\"getstronger\".\"routine_exercises\" as \"a\" on \"getstronger\".\"routines\".\"id\" = \"a\".\"routine_id\""), + qm.WhereIn("\"a\".\"exercise_id\" in ?", argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load routines") + } + + var resultSlice []*Routine + + var localJoinCols []string + for results.Next() { + one := new(Routine) + var localJoinCol string + + err = results.Scan(&one.ID, &one.UserID, &one.Title, &one.CreatedAt, &localJoinCol) + if err != nil { + return errors.Wrap(err, "failed to scan eager loaded results for routines") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "failed to plebian-bind eager loaded slice routines") + } + + resultSlice = append(resultSlice, one) + localJoinCols = append(localJoinCols, localJoinCol) + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on routines") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for routines") + } + + if len(routineAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Routines = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &routineR{} + } + foreign.R.Exercises = append(foreign.R.Exercises, object) + } + return nil + } + + for i, foreign := range resultSlice { + localJoinCol := localJoinCols[i] + for _, local := range slice { + if local.ID == localJoinCol { + local.R.Routines = append(local.R.Routines, foreign) + if foreign.R == nil { + foreign.R = &routineR{} + } + foreign.R.Exercises = append(foreign.R.Exercises, local) + break + } + } + } + + return nil +} + +// LoadSets allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (exerciseL) LoadSets(ctx context.Context, e boil.ContextExecutor, singular bool, maybeExercise interface{}, mods queries.Applicator) error { + var slice []*Exercise + var object *Exercise + + if singular { + var ok bool + object, ok = maybeExercise.(*Exercise) + if !ok { + object = new(Exercise) + ok = queries.SetFromEmbeddedStruct(&object, &maybeExercise) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeExercise)) + } + } + } else { + s, ok := maybeExercise.(*[]*Exercise) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeExercise) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeExercise)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &exerciseR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &exerciseR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.sets`), + qm.WhereIn(`getstronger.sets.exercise_id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load sets") + } + + var resultSlice []*Set + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice sets") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on sets") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for sets") + } + + if len(setAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Sets = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &setR{} + } + foreign.R.Exercise = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ID == foreign.ExerciseID { + local.R.Sets = append(local.R.Sets, foreign) + if foreign.R == nil { + foreign.R = &setR{} + } + foreign.R.Exercise = local + break + } + } + } + + return nil +} + +// SetUser of the exercise to the related item. +// Sets o.R.User to related. +// Adds o to related.R.Exercises. +func (o *Exercise) SetUser(ctx context.Context, exec boil.ContextExecutor, insert bool, related *User) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"exercises\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"user_id"}), + strmangle.WhereClause("\"", "\"", 2, exercisePrimaryKeyColumns), + ) + values := []interface{}{related.ID, o.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.UserID = related.ID + if o.R == nil { + o.R = &exerciseR{ + User: related, + } + } else { + o.R.User = related + } + + if related.R == nil { + related.R = &userR{ + Exercises: ExerciseSlice{o}, + } + } else { + related.R.Exercises = append(related.R.Exercises, o) + } + + return nil +} + +// AddRoutines adds the given related objects to the existing relationships +// of the exercise, optionally inserting them as new records. +// Appends related to o.R.Routines. +// Sets related.R.Exercises appropriately. +func (o *Exercise) AddRoutines(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Routine) error { + var err error + for _, rel := range related { + if insert { + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + } + + for _, rel := range related { + query := "insert into \"getstronger\".\"routine_exercises\" (\"exercise_id\", \"routine_id\") values ($1, $2)" + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to insert into join table") + } + } + if o.R == nil { + o.R = &exerciseR{ + Routines: related, + } + } else { + o.R.Routines = append(o.R.Routines, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &routineR{ + Exercises: ExerciseSlice{o}, + } + } else { + rel.R.Exercises = append(rel.R.Exercises, o) + } + } + return nil +} + +// SetRoutines removes all previously related items of the +// exercise replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Exercises's Routines accordingly. +// Replaces o.R.Routines with related. +// Sets related.R.Exercises's Routines accordingly. +func (o *Exercise) SetRoutines(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Routine) error { + query := "delete from \"getstronger\".\"routine_exercises\" where \"exercise_id\" = $1" + values := []interface{}{o.ID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + removeRoutinesFromExercisesSlice(o, related) + if o.R != nil { + o.R.Routines = nil + } + + return o.AddRoutines(ctx, exec, insert, related...) +} + +// RemoveRoutines relationships from objects passed in. +// Removes related items from R.Routines (uses pointer comparison, removal does not keep order) +// Sets related.R.Exercises. +func (o *Exercise) RemoveRoutines(ctx context.Context, exec boil.ContextExecutor, related ...*Routine) error { + if len(related) == 0 { + return nil + } + + var err error + query := fmt.Sprintf( + "delete from \"getstronger\".\"routine_exercises\" where \"exercise_id\" = $1 and \"routine_id\" in (%s)", + strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1), + ) + values := []interface{}{o.ID} + for _, rel := range related { + values = append(values, rel.ID) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + removeRoutinesFromExercisesSlice(o, related) + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Routines { + if rel != ri { + continue + } + + ln := len(o.R.Routines) + if ln > 1 && i < ln-1 { + o.R.Routines[i] = o.R.Routines[ln-1] + } + o.R.Routines = o.R.Routines[:ln-1] + break + } + } + + return nil +} + +func removeRoutinesFromExercisesSlice(o *Exercise, related []*Routine) { + for _, rel := range related { + if rel.R == nil { + continue + } + for i, ri := range rel.R.Exercises { + if o.ID != ri.ID { + continue + } + + ln := len(rel.R.Exercises) + if ln > 1 && i < ln-1 { + rel.R.Exercises[i] = rel.R.Exercises[ln-1] + } + rel.R.Exercises = rel.R.Exercises[:ln-1] + break + } + } +} + +// AddSets adds the given related objects to the existing relationships +// of the exercise, optionally inserting them as new records. +// Appends related to o.R.Sets. +// Sets related.R.Exercise appropriately. +func (o *Exercise) AddSets(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Set) error { + var err error + for _, rel := range related { + if insert { + rel.ExerciseID = o.ID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"exercise_id"}), + strmangle.WhereClause("\"", "\"", 2, setPrimaryKeyColumns), + ) + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.ExerciseID = o.ID + } + } + + if o.R == nil { + o.R = &exerciseR{ + Sets: related, + } + } else { + o.R.Sets = append(o.R.Sets, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &setR{ + Exercise: o, + } + } else { + rel.R.Exercise = o + } + } + return nil +} + +// Exercises retrieves all the records using an executor. +func Exercises(mods ...qm.QueryMod) exerciseQuery { + mods = append(mods, qm.From("\"getstronger\".\"exercises\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"getstronger\".\"exercises\".*"}) + } + + return exerciseQuery{q} +} + +// FindExercise retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindExercise(ctx context.Context, exec boil.ContextExecutor, iD string, selectCols ...string) (*Exercise, error) { + exerciseObj := &Exercise{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"getstronger\".\"exercises\" where \"id\"=$1", sel, + ) + + q := queries.Raw(query, iD) + + err := q.Bind(ctx, exec, exerciseObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: unable to select from exercises") + } + + if err = exerciseObj.doAfterSelectHooks(ctx, exec); err != nil { + return exerciseObj, err + } + + return exerciseObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Exercise) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("orm: no exercises provided for insertion") + } + + var err error + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(exerciseColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + exerciseInsertCacheMut.RLock() + cache, cached := exerciseInsertCache[key] + exerciseInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + exerciseAllColumns, + exerciseColumnsWithDefault, + exerciseColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(exerciseType, exerciseMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(exerciseType, exerciseMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"getstronger\".\"exercises\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"getstronger\".\"exercises\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "orm: unable to insert into exercises") + } + + if !cached { + exerciseInsertCacheMut.Lock() + exerciseInsertCache[key] = cache + exerciseInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Exercise. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Exercise) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + exerciseUpdateCacheMut.RLock() + cache, cached := exerciseUpdateCache[key] + exerciseUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + exerciseAllColumns, + exercisePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("orm: unable to update exercises, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"getstronger\".\"exercises\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, wl), + strmangle.WhereClause("\"", "\"", len(wl)+1, exercisePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(exerciseType, exerciseMapping, append(wl, exercisePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update exercises row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by update for exercises") + } + + if !cached { + exerciseUpdateCacheMut.Lock() + exerciseUpdateCache[key] = cache + exerciseUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q exerciseQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all for exercises") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected for exercises") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o ExerciseSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("orm: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), exercisePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"getstronger\".\"exercises\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, exercisePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all in exercise slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected all in update all exercise") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Exercise) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns, opts ...UpsertOptionFunc) error { + if o == nil { + return errors.New("orm: no exercises provided for upsert") + } + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(exerciseColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + exerciseUpsertCacheMut.RLock() + cache, cached := exerciseUpsertCache[key] + exerciseUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, _ := insertColumns.InsertColumnSet( + exerciseAllColumns, + exerciseColumnsWithDefault, + exerciseColumnsWithoutDefault, + nzDefaults, + ) + + update := updateColumns.UpdateColumnSet( + exerciseAllColumns, + exercisePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("orm: unable to upsert exercises, could not build update column list") + } + + ret := strmangle.SetComplement(exerciseAllColumns, strmangle.SetIntersect(insert, update)) + + conflict := conflictColumns + if len(conflict) == 0 && updateOnConflict && len(update) != 0 { + if len(exercisePrimaryKeyColumns) == 0 { + return errors.New("orm: unable to upsert exercises, could not build conflict column list") + } + + conflict = make([]string, len(exercisePrimaryKeyColumns)) + copy(conflict, exercisePrimaryKeyColumns) + } + cache.query = buildUpsertQueryPostgres(dialect, "\"getstronger\".\"exercises\"", updateOnConflict, ret, update, conflict, insert, opts...) + + cache.valueMapping, err = queries.BindMapping(exerciseType, exerciseMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(exerciseType, exerciseMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "orm: unable to upsert exercises") + } + + if !cached { + exerciseUpsertCacheMut.Lock() + exerciseUpsertCache[key] = cache + exerciseUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Exercise record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Exercise) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("orm: no Exercise provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), exercisePrimaryKeyMapping) + sql := "DELETE FROM \"getstronger\".\"exercises\" WHERE \"id\"=$1" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete from exercises") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by delete for exercises") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q exerciseQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("orm: no exerciseQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from exercises") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for exercises") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o ExerciseSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(exerciseBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), exercisePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"getstronger\".\"exercises\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, exercisePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from exercise slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for exercises") + } + + if len(exerciseAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Exercise) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindExercise(ctx, exec, o.ID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *ExerciseSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := ExerciseSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), exercisePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"getstronger\".\"exercises\".* FROM \"getstronger\".\"exercises\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, exercisePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "orm: unable to reload all in ExerciseSlice") + } + + *o = slice + + return nil +} + +// ExerciseExists checks if the Exercise row exists. +func ExerciseExists(ctx context.Context, exec boil.ContextExecutor, iD string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"getstronger\".\"exercises\" where \"id\"=$1 limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, iD) + } + row := exec.QueryRowContext(ctx, sql, iD) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "orm: unable to check if exercises exists") + } + + return exists, nil +} + +// Exists checks if the Exercise row exists. +func (o *Exercise) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + return ExerciseExists(ctx, exec, o.ID) +} diff --git a/pkg/orm/psql_upsert.go b/go/pkg/orm/psql_upsert.go similarity index 97% rename from pkg/orm/psql_upsert.go rename to go/pkg/orm/psql_upsert.go index 9b78b34f..42cc58f9 100644 --- a/pkg/orm/psql_upsert.go +++ b/go/pkg/orm/psql_upsert.go @@ -1,4 +1,4 @@ -// Code generated by SQLBoiler 4.16.1 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. // This file is meant to be re-generated in place and/or deleted at any time. package orm diff --git a/go/pkg/orm/routines.go b/go/pkg/orm/routines.go new file mode 100644 index 00000000..fbb363cb --- /dev/null +++ b/go/pkg/orm/routines.go @@ -0,0 +1,1445 @@ +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package orm + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Routine is an object representing the database table. +type Routine struct { + ID string `boil:"id" json:"id" toml:"id" yaml:"id"` + UserID string `boil:"user_id" json:"user_id" toml:"user_id" yaml:"user_id"` + Title string `boil:"title" json:"title" toml:"title" yaml:"title"` + CreatedAt time.Time `boil:"created_at" json:"created_at" toml:"created_at" yaml:"created_at"` + + R *routineR `boil:"-" json:"-" toml:"-" yaml:"-"` + L routineL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var RoutineColumns = struct { + ID string + UserID string + Title string + CreatedAt string +}{ + ID: "id", + UserID: "user_id", + Title: "title", + CreatedAt: "created_at", +} + +var RoutineTableColumns = struct { + ID string + UserID string + Title string + CreatedAt string +}{ + ID: "routines.id", + UserID: "routines.user_id", + Title: "routines.title", + CreatedAt: "routines.created_at", +} + +// Generated where + +var RoutineWhere = struct { + ID whereHelperstring + UserID whereHelperstring + Title whereHelperstring + CreatedAt whereHelpertime_Time +}{ + ID: whereHelperstring{field: "\"getstronger\".\"routines\".\"id\""}, + UserID: whereHelperstring{field: "\"getstronger\".\"routines\".\"user_id\""}, + Title: whereHelperstring{field: "\"getstronger\".\"routines\".\"title\""}, + CreatedAt: whereHelpertime_Time{field: "\"getstronger\".\"routines\".\"created_at\""}, +} + +// RoutineRels is where relationship names are stored. +var RoutineRels = struct { + User string + Exercises string +}{ + User: "User", + Exercises: "Exercises", +} + +// routineR is where relationships are stored. +type routineR struct { + User *User `boil:"User" json:"User" toml:"User" yaml:"User"` + Exercises ExerciseSlice `boil:"Exercises" json:"Exercises" toml:"Exercises" yaml:"Exercises"` +} + +// NewStruct creates a new relationship struct +func (*routineR) NewStruct() *routineR { + return &routineR{} +} + +func (r *routineR) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +func (r *routineR) GetExercises() ExerciseSlice { + if r == nil { + return nil + } + return r.Exercises +} + +// routineL is where Load methods for each relationship are stored. +type routineL struct{} + +var ( + routineAllColumns = []string{"id", "user_id", "title", "created_at"} + routineColumnsWithoutDefault = []string{"user_id", "title"} + routineColumnsWithDefault = []string{"id", "created_at"} + routinePrimaryKeyColumns = []string{"id"} + routineGeneratedColumns = []string{} +) + +type ( + // RoutineSlice is an alias for a slice of pointers to Routine. + // This should almost always be used instead of []Routine. + RoutineSlice []*Routine + // RoutineHook is the signature for custom Routine hook methods + RoutineHook func(context.Context, boil.ContextExecutor, *Routine) error + + routineQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + routineType = reflect.TypeOf(&Routine{}) + routineMapping = queries.MakeStructMapping(routineType) + routinePrimaryKeyMapping, _ = queries.BindMapping(routineType, routineMapping, routinePrimaryKeyColumns) + routineInsertCacheMut sync.RWMutex + routineInsertCache = make(map[string]insertCache) + routineUpdateCacheMut sync.RWMutex + routineUpdateCache = make(map[string]updateCache) + routineUpsertCacheMut sync.RWMutex + routineUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var routineAfterSelectMu sync.Mutex +var routineAfterSelectHooks []RoutineHook + +var routineBeforeInsertMu sync.Mutex +var routineBeforeInsertHooks []RoutineHook +var routineAfterInsertMu sync.Mutex +var routineAfterInsertHooks []RoutineHook + +var routineBeforeUpdateMu sync.Mutex +var routineBeforeUpdateHooks []RoutineHook +var routineAfterUpdateMu sync.Mutex +var routineAfterUpdateHooks []RoutineHook + +var routineBeforeDeleteMu sync.Mutex +var routineBeforeDeleteHooks []RoutineHook +var routineAfterDeleteMu sync.Mutex +var routineAfterDeleteHooks []RoutineHook + +var routineBeforeUpsertMu sync.Mutex +var routineBeforeUpsertHooks []RoutineHook +var routineAfterUpsertMu sync.Mutex +var routineAfterUpsertHooks []RoutineHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Routine) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Routine) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Routine) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Routine) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Routine) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Routine) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Routine) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Routine) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Routine) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range routineAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddRoutineHook registers your hook function for all future operations. +func AddRoutineHook(hookPoint boil.HookPoint, routineHook RoutineHook) { + switch hookPoint { + case boil.AfterSelectHook: + routineAfterSelectMu.Lock() + routineAfterSelectHooks = append(routineAfterSelectHooks, routineHook) + routineAfterSelectMu.Unlock() + case boil.BeforeInsertHook: + routineBeforeInsertMu.Lock() + routineBeforeInsertHooks = append(routineBeforeInsertHooks, routineHook) + routineBeforeInsertMu.Unlock() + case boil.AfterInsertHook: + routineAfterInsertMu.Lock() + routineAfterInsertHooks = append(routineAfterInsertHooks, routineHook) + routineAfterInsertMu.Unlock() + case boil.BeforeUpdateHook: + routineBeforeUpdateMu.Lock() + routineBeforeUpdateHooks = append(routineBeforeUpdateHooks, routineHook) + routineBeforeUpdateMu.Unlock() + case boil.AfterUpdateHook: + routineAfterUpdateMu.Lock() + routineAfterUpdateHooks = append(routineAfterUpdateHooks, routineHook) + routineAfterUpdateMu.Unlock() + case boil.BeforeDeleteHook: + routineBeforeDeleteMu.Lock() + routineBeforeDeleteHooks = append(routineBeforeDeleteHooks, routineHook) + routineBeforeDeleteMu.Unlock() + case boil.AfterDeleteHook: + routineAfterDeleteMu.Lock() + routineAfterDeleteHooks = append(routineAfterDeleteHooks, routineHook) + routineAfterDeleteMu.Unlock() + case boil.BeforeUpsertHook: + routineBeforeUpsertMu.Lock() + routineBeforeUpsertHooks = append(routineBeforeUpsertHooks, routineHook) + routineBeforeUpsertMu.Unlock() + case boil.AfterUpsertHook: + routineAfterUpsertMu.Lock() + routineAfterUpsertHooks = append(routineAfterUpsertHooks, routineHook) + routineAfterUpsertMu.Unlock() + } +} + +// One returns a single routine record from the query. +func (q routineQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Routine, error) { + o := &Routine{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: failed to execute a one query for routines") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Routine records from the query. +func (q routineQuery) All(ctx context.Context, exec boil.ContextExecutor) (RoutineSlice, error) { + var o []*Routine + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "orm: failed to assign all query results to Routine slice") + } + + if len(routineAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Routine records in the query. +func (q routineQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "orm: failed to count routines rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q routineQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "orm: failed to check if routines exists") + } + + return count > 0, nil +} + +// User pointed to by the foreign key. +func (o *Routine) User(mods ...qm.QueryMod) userQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"id\" = ?", o.UserID), + } + + queryMods = append(queryMods, mods...) + + return Users(queryMods...) +} + +// Exercises retrieves all the exercise's Exercises with an executor. +func (o *Routine) Exercises(mods ...qm.QueryMod) exerciseQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.InnerJoin("\"getstronger\".\"routine_exercises\" on \"getstronger\".\"exercises\".\"id\" = \"getstronger\".\"routine_exercises\".\"exercise_id\""), + qm.Where("\"getstronger\".\"routine_exercises\".\"routine_id\"=?", o.ID), + ) + + return Exercises(queryMods...) +} + +// LoadUser allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (routineL) LoadUser(ctx context.Context, e boil.ContextExecutor, singular bool, maybeRoutine interface{}, mods queries.Applicator) error { + var slice []*Routine + var object *Routine + + if singular { + var ok bool + object, ok = maybeRoutine.(*Routine) + if !ok { + object = new(Routine) + ok = queries.SetFromEmbeddedStruct(&object, &maybeRoutine) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeRoutine)) + } + } + } else { + s, ok := maybeRoutine.(*[]*Routine) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeRoutine) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeRoutine)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &routineR{} + } + args[object.UserID] = struct{}{} + + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &routineR{} + } + + args[obj.UserID] = struct{}{} + + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.users`), + qm.WhereIn(`getstronger.users.id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load User") + } + + var resultSlice []*User + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice User") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for users") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for users") + } + + if len(userAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.User = foreign + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Routines = append(foreign.R.Routines, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.UserID == foreign.ID { + local.R.User = foreign + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Routines = append(foreign.R.Routines, local) + break + } + } + } + + return nil +} + +// LoadExercises allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (routineL) LoadExercises(ctx context.Context, e boil.ContextExecutor, singular bool, maybeRoutine interface{}, mods queries.Applicator) error { + var slice []*Routine + var object *Routine + + if singular { + var ok bool + object, ok = maybeRoutine.(*Routine) + if !ok { + object = new(Routine) + ok = queries.SetFromEmbeddedStruct(&object, &maybeRoutine) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeRoutine)) + } + } + } else { + s, ok := maybeRoutine.(*[]*Routine) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeRoutine) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeRoutine)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &routineR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &routineR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.Select("\"getstronger\".\"exercises\".\"id\", \"getstronger\".\"exercises\".\"user_id\", \"getstronger\".\"exercises\".\"title\", \"getstronger\".\"exercises\".\"sub_title\", \"getstronger\".\"exercises\".\"created_at\", \"a\".\"routine_id\""), + qm.From("\"getstronger\".\"exercises\""), + qm.InnerJoin("\"getstronger\".\"routine_exercises\" as \"a\" on \"getstronger\".\"exercises\".\"id\" = \"a\".\"exercise_id\""), + qm.WhereIn("\"a\".\"routine_id\" in ?", argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load exercises") + } + + var resultSlice []*Exercise + + var localJoinCols []string + for results.Next() { + one := new(Exercise) + var localJoinCol string + + err = results.Scan(&one.ID, &one.UserID, &one.Title, &one.SubTitle, &one.CreatedAt, &localJoinCol) + if err != nil { + return errors.Wrap(err, "failed to scan eager loaded results for exercises") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "failed to plebian-bind eager loaded slice exercises") + } + + resultSlice = append(resultSlice, one) + localJoinCols = append(localJoinCols, localJoinCol) + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on exercises") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for exercises") + } + + if len(exerciseAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Exercises = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &exerciseR{} + } + foreign.R.Routines = append(foreign.R.Routines, object) + } + return nil + } + + for i, foreign := range resultSlice { + localJoinCol := localJoinCols[i] + for _, local := range slice { + if local.ID == localJoinCol { + local.R.Exercises = append(local.R.Exercises, foreign) + if foreign.R == nil { + foreign.R = &exerciseR{} + } + foreign.R.Routines = append(foreign.R.Routines, local) + break + } + } + } + + return nil +} + +// SetUser of the routine to the related item. +// Sets o.R.User to related. +// Adds o to related.R.Routines. +func (o *Routine) SetUser(ctx context.Context, exec boil.ContextExecutor, insert bool, related *User) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"routines\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"user_id"}), + strmangle.WhereClause("\"", "\"", 2, routinePrimaryKeyColumns), + ) + values := []interface{}{related.ID, o.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.UserID = related.ID + if o.R == nil { + o.R = &routineR{ + User: related, + } + } else { + o.R.User = related + } + + if related.R == nil { + related.R = &userR{ + Routines: RoutineSlice{o}, + } + } else { + related.R.Routines = append(related.R.Routines, o) + } + + return nil +} + +// AddExercises adds the given related objects to the existing relationships +// of the routine, optionally inserting them as new records. +// Appends related to o.R.Exercises. +// Sets related.R.Routines appropriately. +func (o *Routine) AddExercises(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Exercise) error { + var err error + for _, rel := range related { + if insert { + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + } + + for _, rel := range related { + query := "insert into \"getstronger\".\"routine_exercises\" (\"routine_id\", \"exercise_id\") values ($1, $2)" + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to insert into join table") + } + } + if o.R == nil { + o.R = &routineR{ + Exercises: related, + } + } else { + o.R.Exercises = append(o.R.Exercises, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &exerciseR{ + Routines: RoutineSlice{o}, + } + } else { + rel.R.Routines = append(rel.R.Routines, o) + } + } + return nil +} + +// SetExercises removes all previously related items of the +// routine replacing them completely with the passed +// in related items, optionally inserting them as new records. +// Sets o.R.Routines's Exercises accordingly. +// Replaces o.R.Exercises with related. +// Sets related.R.Routines's Exercises accordingly. +func (o *Routine) SetExercises(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Exercise) error { + query := "delete from \"getstronger\".\"routine_exercises\" where \"routine_id\" = $1" + values := []interface{}{o.ID} + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err := exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + + removeExercisesFromRoutinesSlice(o, related) + if o.R != nil { + o.R.Exercises = nil + } + + return o.AddExercises(ctx, exec, insert, related...) +} + +// RemoveExercises relationships from objects passed in. +// Removes related items from R.Exercises (uses pointer comparison, removal does not keep order) +// Sets related.R.Routines. +func (o *Routine) RemoveExercises(ctx context.Context, exec boil.ContextExecutor, related ...*Exercise) error { + if len(related) == 0 { + return nil + } + + var err error + query := fmt.Sprintf( + "delete from \"getstronger\".\"routine_exercises\" where \"routine_id\" = $1 and \"exercise_id\" in (%s)", + strmangle.Placeholders(dialect.UseIndexPlaceholders, len(related), 2, 1), + ) + values := []interface{}{o.ID} + for _, rel := range related { + values = append(values, rel.ID) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, query) + fmt.Fprintln(writer, values) + } + _, err = exec.ExecContext(ctx, query, values...) + if err != nil { + return errors.Wrap(err, "failed to remove relationships before set") + } + removeExercisesFromRoutinesSlice(o, related) + if o.R == nil { + return nil + } + + for _, rel := range related { + for i, ri := range o.R.Exercises { + if rel != ri { + continue + } + + ln := len(o.R.Exercises) + if ln > 1 && i < ln-1 { + o.R.Exercises[i] = o.R.Exercises[ln-1] + } + o.R.Exercises = o.R.Exercises[:ln-1] + break + } + } + + return nil +} + +func removeExercisesFromRoutinesSlice(o *Routine, related []*Exercise) { + for _, rel := range related { + if rel.R == nil { + continue + } + for i, ri := range rel.R.Routines { + if o.ID != ri.ID { + continue + } + + ln := len(rel.R.Routines) + if ln > 1 && i < ln-1 { + rel.R.Routines[i] = rel.R.Routines[ln-1] + } + rel.R.Routines = rel.R.Routines[:ln-1] + break + } + } +} + +// Routines retrieves all the records using an executor. +func Routines(mods ...qm.QueryMod) routineQuery { + mods = append(mods, qm.From("\"getstronger\".\"routines\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"getstronger\".\"routines\".*"}) + } + + return routineQuery{q} +} + +// FindRoutine retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindRoutine(ctx context.Context, exec boil.ContextExecutor, iD string, selectCols ...string) (*Routine, error) { + routineObj := &Routine{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"getstronger\".\"routines\" where \"id\"=$1", sel, + ) + + q := queries.Raw(query, iD) + + err := q.Bind(ctx, exec, routineObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: unable to select from routines") + } + + if err = routineObj.doAfterSelectHooks(ctx, exec); err != nil { + return routineObj, err + } + + return routineObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Routine) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("orm: no routines provided for insertion") + } + + var err error + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(routineColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + routineInsertCacheMut.RLock() + cache, cached := routineInsertCache[key] + routineInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + routineAllColumns, + routineColumnsWithDefault, + routineColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(routineType, routineMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(routineType, routineMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"getstronger\".\"routines\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"getstronger\".\"routines\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "orm: unable to insert into routines") + } + + if !cached { + routineInsertCacheMut.Lock() + routineInsertCache[key] = cache + routineInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Routine. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Routine) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + routineUpdateCacheMut.RLock() + cache, cached := routineUpdateCache[key] + routineUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + routineAllColumns, + routinePrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("orm: unable to update routines, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"getstronger\".\"routines\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, wl), + strmangle.WhereClause("\"", "\"", len(wl)+1, routinePrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(routineType, routineMapping, append(wl, routinePrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update routines row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by update for routines") + } + + if !cached { + routineUpdateCacheMut.Lock() + routineUpdateCache[key] = cache + routineUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q routineQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all for routines") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected for routines") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o RoutineSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("orm: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), routinePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"getstronger\".\"routines\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, routinePrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all in routine slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected all in update all routine") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Routine) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns, opts ...UpsertOptionFunc) error { + if o == nil { + return errors.New("orm: no routines provided for upsert") + } + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(routineColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + routineUpsertCacheMut.RLock() + cache, cached := routineUpsertCache[key] + routineUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, _ := insertColumns.InsertColumnSet( + routineAllColumns, + routineColumnsWithDefault, + routineColumnsWithoutDefault, + nzDefaults, + ) + + update := updateColumns.UpdateColumnSet( + routineAllColumns, + routinePrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("orm: unable to upsert routines, could not build update column list") + } + + ret := strmangle.SetComplement(routineAllColumns, strmangle.SetIntersect(insert, update)) + + conflict := conflictColumns + if len(conflict) == 0 && updateOnConflict && len(update) != 0 { + if len(routinePrimaryKeyColumns) == 0 { + return errors.New("orm: unable to upsert routines, could not build conflict column list") + } + + conflict = make([]string, len(routinePrimaryKeyColumns)) + copy(conflict, routinePrimaryKeyColumns) + } + cache.query = buildUpsertQueryPostgres(dialect, "\"getstronger\".\"routines\"", updateOnConflict, ret, update, conflict, insert, opts...) + + cache.valueMapping, err = queries.BindMapping(routineType, routineMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(routineType, routineMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "orm: unable to upsert routines") + } + + if !cached { + routineUpsertCacheMut.Lock() + routineUpsertCache[key] = cache + routineUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Routine record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Routine) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("orm: no Routine provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), routinePrimaryKeyMapping) + sql := "DELETE FROM \"getstronger\".\"routines\" WHERE \"id\"=$1" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete from routines") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by delete for routines") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q routineQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("orm: no routineQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from routines") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for routines") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o RoutineSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(routineBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), routinePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"getstronger\".\"routines\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, routinePrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from routine slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for routines") + } + + if len(routineAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Routine) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindRoutine(ctx, exec, o.ID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *RoutineSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := RoutineSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), routinePrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"getstronger\".\"routines\".* FROM \"getstronger\".\"routines\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, routinePrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "orm: unable to reload all in RoutineSlice") + } + + *o = slice + + return nil +} + +// RoutineExists checks if the Routine row exists. +func RoutineExists(ctx context.Context, exec boil.ContextExecutor, iD string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"getstronger\".\"routines\" where \"id\"=$1 limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, iD) + } + row := exec.QueryRowContext(ctx, sql, iD) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "orm: unable to check if routines exists") + } + + return exists, nil +} + +// Exists checks if the Routine row exists. +func (o *Routine) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + return RoutineExists(ctx, exec, o.ID) +} diff --git a/go/pkg/orm/sets.go b/go/pkg/orm/sets.go new file mode 100644 index 00000000..1828b895 --- /dev/null +++ b/go/pkg/orm/sets.go @@ -0,0 +1,1392 @@ +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package orm + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/sqlboiler/v4/types" + "github.com/volatiletech/strmangle" +) + +// Set is an object representing the database table. +type Set struct { + ID string `boil:"id" json:"id" toml:"id" yaml:"id"` + WorkoutID string `boil:"workout_id" json:"workout_id" toml:"workout_id" yaml:"workout_id"` + ExerciseID string `boil:"exercise_id" json:"exercise_id" toml:"exercise_id" yaml:"exercise_id"` + Weight types.Decimal `boil:"weight" json:"weight" toml:"weight" yaml:"weight"` + Reps int `boil:"reps" json:"reps" toml:"reps" yaml:"reps"` + CreatedAt time.Time `boil:"created_at" json:"created_at" toml:"created_at" yaml:"created_at"` + + R *setR `boil:"-" json:"-" toml:"-" yaml:"-"` + L setL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var SetColumns = struct { + ID string + WorkoutID string + ExerciseID string + Weight string + Reps string + CreatedAt string +}{ + ID: "id", + WorkoutID: "workout_id", + ExerciseID: "exercise_id", + Weight: "weight", + Reps: "reps", + CreatedAt: "created_at", +} + +var SetTableColumns = struct { + ID string + WorkoutID string + ExerciseID string + Weight string + Reps string + CreatedAt string +}{ + ID: "sets.id", + WorkoutID: "sets.workout_id", + ExerciseID: "sets.exercise_id", + Weight: "sets.weight", + Reps: "sets.reps", + CreatedAt: "sets.created_at", +} + +// Generated where + +type whereHelpertypes_Decimal struct{ field string } + +func (w whereHelpertypes_Decimal) EQ(x types.Decimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.EQ, x) +} +func (w whereHelpertypes_Decimal) NEQ(x types.Decimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.NEQ, x) +} +func (w whereHelpertypes_Decimal) LT(x types.Decimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LT, x) +} +func (w whereHelpertypes_Decimal) LTE(x types.Decimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.LTE, x) +} +func (w whereHelpertypes_Decimal) GT(x types.Decimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GT, x) +} +func (w whereHelpertypes_Decimal) GTE(x types.Decimal) qm.QueryMod { + return qmhelper.Where(w.field, qmhelper.GTE, x) +} + +type whereHelperint struct{ field string } + +func (w whereHelperint) EQ(x int) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } +func (w whereHelperint) NEQ(x int) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } +func (w whereHelperint) LT(x int) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } +func (w whereHelperint) LTE(x int) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } +func (w whereHelperint) GT(x int) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } +func (w whereHelperint) GTE(x int) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } +func (w whereHelperint) IN(slice []int) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) +} +func (w whereHelperint) NIN(slice []int) qm.QueryMod { + values := make([]interface{}, 0, len(slice)) + for _, value := range slice { + values = append(values, value) + } + return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) +} + +var SetWhere = struct { + ID whereHelperstring + WorkoutID whereHelperstring + ExerciseID whereHelperstring + Weight whereHelpertypes_Decimal + Reps whereHelperint + CreatedAt whereHelpertime_Time +}{ + ID: whereHelperstring{field: "\"getstronger\".\"sets\".\"id\""}, + WorkoutID: whereHelperstring{field: "\"getstronger\".\"sets\".\"workout_id\""}, + ExerciseID: whereHelperstring{field: "\"getstronger\".\"sets\".\"exercise_id\""}, + Weight: whereHelpertypes_Decimal{field: "\"getstronger\".\"sets\".\"weight\""}, + Reps: whereHelperint{field: "\"getstronger\".\"sets\".\"reps\""}, + CreatedAt: whereHelpertime_Time{field: "\"getstronger\".\"sets\".\"created_at\""}, +} + +// SetRels is where relationship names are stored. +var SetRels = struct { + Exercise string + Workout string +}{ + Exercise: "Exercise", + Workout: "Workout", +} + +// setR is where relationships are stored. +type setR struct { + Exercise *Exercise `boil:"Exercise" json:"Exercise" toml:"Exercise" yaml:"Exercise"` + Workout *Workout `boil:"Workout" json:"Workout" toml:"Workout" yaml:"Workout"` +} + +// NewStruct creates a new relationship struct +func (*setR) NewStruct() *setR { + return &setR{} +} + +func (r *setR) GetExercise() *Exercise { + if r == nil { + return nil + } + return r.Exercise +} + +func (r *setR) GetWorkout() *Workout { + if r == nil { + return nil + } + return r.Workout +} + +// setL is where Load methods for each relationship are stored. +type setL struct{} + +var ( + setAllColumns = []string{"id", "workout_id", "exercise_id", "weight", "reps", "created_at"} + setColumnsWithoutDefault = []string{"workout_id", "exercise_id", "weight", "reps"} + setColumnsWithDefault = []string{"id", "created_at"} + setPrimaryKeyColumns = []string{"id"} + setGeneratedColumns = []string{} +) + +type ( + // SetSlice is an alias for a slice of pointers to Set. + // This should almost always be used instead of []Set. + SetSlice []*Set + // SetHook is the signature for custom Set hook methods + SetHook func(context.Context, boil.ContextExecutor, *Set) error + + setQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + setType = reflect.TypeOf(&Set{}) + setMapping = queries.MakeStructMapping(setType) + setPrimaryKeyMapping, _ = queries.BindMapping(setType, setMapping, setPrimaryKeyColumns) + setInsertCacheMut sync.RWMutex + setInsertCache = make(map[string]insertCache) + setUpdateCacheMut sync.RWMutex + setUpdateCache = make(map[string]updateCache) + setUpsertCacheMut sync.RWMutex + setUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var setAfterSelectMu sync.Mutex +var setAfterSelectHooks []SetHook + +var setBeforeInsertMu sync.Mutex +var setBeforeInsertHooks []SetHook +var setAfterInsertMu sync.Mutex +var setAfterInsertHooks []SetHook + +var setBeforeUpdateMu sync.Mutex +var setBeforeUpdateHooks []SetHook +var setAfterUpdateMu sync.Mutex +var setAfterUpdateHooks []SetHook + +var setBeforeDeleteMu sync.Mutex +var setBeforeDeleteHooks []SetHook +var setAfterDeleteMu sync.Mutex +var setAfterDeleteHooks []SetHook + +var setBeforeUpsertMu sync.Mutex +var setBeforeUpsertHooks []SetHook +var setAfterUpsertMu sync.Mutex +var setAfterUpsertHooks []SetHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Set) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Set) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Set) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Set) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Set) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Set) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Set) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Set) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Set) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range setAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddSetHook registers your hook function for all future operations. +func AddSetHook(hookPoint boil.HookPoint, setHook SetHook) { + switch hookPoint { + case boil.AfterSelectHook: + setAfterSelectMu.Lock() + setAfterSelectHooks = append(setAfterSelectHooks, setHook) + setAfterSelectMu.Unlock() + case boil.BeforeInsertHook: + setBeforeInsertMu.Lock() + setBeforeInsertHooks = append(setBeforeInsertHooks, setHook) + setBeforeInsertMu.Unlock() + case boil.AfterInsertHook: + setAfterInsertMu.Lock() + setAfterInsertHooks = append(setAfterInsertHooks, setHook) + setAfterInsertMu.Unlock() + case boil.BeforeUpdateHook: + setBeforeUpdateMu.Lock() + setBeforeUpdateHooks = append(setBeforeUpdateHooks, setHook) + setBeforeUpdateMu.Unlock() + case boil.AfterUpdateHook: + setAfterUpdateMu.Lock() + setAfterUpdateHooks = append(setAfterUpdateHooks, setHook) + setAfterUpdateMu.Unlock() + case boil.BeforeDeleteHook: + setBeforeDeleteMu.Lock() + setBeforeDeleteHooks = append(setBeforeDeleteHooks, setHook) + setBeforeDeleteMu.Unlock() + case boil.AfterDeleteHook: + setAfterDeleteMu.Lock() + setAfterDeleteHooks = append(setAfterDeleteHooks, setHook) + setAfterDeleteMu.Unlock() + case boil.BeforeUpsertHook: + setBeforeUpsertMu.Lock() + setBeforeUpsertHooks = append(setBeforeUpsertHooks, setHook) + setBeforeUpsertMu.Unlock() + case boil.AfterUpsertHook: + setAfterUpsertMu.Lock() + setAfterUpsertHooks = append(setAfterUpsertHooks, setHook) + setAfterUpsertMu.Unlock() + } +} + +// One returns a single set record from the query. +func (q setQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Set, error) { + o := &Set{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: failed to execute a one query for sets") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Set records from the query. +func (q setQuery) All(ctx context.Context, exec boil.ContextExecutor) (SetSlice, error) { + var o []*Set + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "orm: failed to assign all query results to Set slice") + } + + if len(setAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Set records in the query. +func (q setQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "orm: failed to count sets rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q setQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "orm: failed to check if sets exists") + } + + return count > 0, nil +} + +// Exercise pointed to by the foreign key. +func (o *Set) Exercise(mods ...qm.QueryMod) exerciseQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"id\" = ?", o.ExerciseID), + } + + queryMods = append(queryMods, mods...) + + return Exercises(queryMods...) +} + +// Workout pointed to by the foreign key. +func (o *Set) Workout(mods ...qm.QueryMod) workoutQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"id\" = ?", o.WorkoutID), + } + + queryMods = append(queryMods, mods...) + + return Workouts(queryMods...) +} + +// LoadExercise allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (setL) LoadExercise(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSet interface{}, mods queries.Applicator) error { + var slice []*Set + var object *Set + + if singular { + var ok bool + object, ok = maybeSet.(*Set) + if !ok { + object = new(Set) + ok = queries.SetFromEmbeddedStruct(&object, &maybeSet) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeSet)) + } + } + } else { + s, ok := maybeSet.(*[]*Set) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeSet) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeSet)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &setR{} + } + args[object.ExerciseID] = struct{}{} + + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &setR{} + } + + args[obj.ExerciseID] = struct{}{} + + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.exercises`), + qm.WhereIn(`getstronger.exercises.id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Exercise") + } + + var resultSlice []*Exercise + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Exercise") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for exercises") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for exercises") + } + + if len(exerciseAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Exercise = foreign + if foreign.R == nil { + foreign.R = &exerciseR{} + } + foreign.R.Sets = append(foreign.R.Sets, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.ExerciseID == foreign.ID { + local.R.Exercise = foreign + if foreign.R == nil { + foreign.R = &exerciseR{} + } + foreign.R.Sets = append(foreign.R.Sets, local) + break + } + } + } + + return nil +} + +// LoadWorkout allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (setL) LoadWorkout(ctx context.Context, e boil.ContextExecutor, singular bool, maybeSet interface{}, mods queries.Applicator) error { + var slice []*Set + var object *Set + + if singular { + var ok bool + object, ok = maybeSet.(*Set) + if !ok { + object = new(Set) + ok = queries.SetFromEmbeddedStruct(&object, &maybeSet) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeSet)) + } + } + } else { + s, ok := maybeSet.(*[]*Set) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeSet) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeSet)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &setR{} + } + args[object.WorkoutID] = struct{}{} + + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &setR{} + } + + args[obj.WorkoutID] = struct{}{} + + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.workouts`), + qm.WhereIn(`getstronger.workouts.id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Workout") + } + + var resultSlice []*Workout + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Workout") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for workouts") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for workouts") + } + + if len(workoutAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Workout = foreign + if foreign.R == nil { + foreign.R = &workoutR{} + } + foreign.R.Sets = append(foreign.R.Sets, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.WorkoutID == foreign.ID { + local.R.Workout = foreign + if foreign.R == nil { + foreign.R = &workoutR{} + } + foreign.R.Sets = append(foreign.R.Sets, local) + break + } + } + } + + return nil +} + +// SetExercise of the set to the related item. +// Sets o.R.Exercise to related. +// Adds o to related.R.Sets. +func (o *Set) SetExercise(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Exercise) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"exercise_id"}), + strmangle.WhereClause("\"", "\"", 2, setPrimaryKeyColumns), + ) + values := []interface{}{related.ID, o.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.ExerciseID = related.ID + if o.R == nil { + o.R = &setR{ + Exercise: related, + } + } else { + o.R.Exercise = related + } + + if related.R == nil { + related.R = &exerciseR{ + Sets: SetSlice{o}, + } + } else { + related.R.Sets = append(related.R.Sets, o) + } + + return nil +} + +// SetWorkout of the set to the related item. +// Sets o.R.Workout to related. +// Adds o to related.R.Sets. +func (o *Set) SetWorkout(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Workout) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"workout_id"}), + strmangle.WhereClause("\"", "\"", 2, setPrimaryKeyColumns), + ) + values := []interface{}{related.ID, o.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.WorkoutID = related.ID + if o.R == nil { + o.R = &setR{ + Workout: related, + } + } else { + o.R.Workout = related + } + + if related.R == nil { + related.R = &workoutR{ + Sets: SetSlice{o}, + } + } else { + related.R.Sets = append(related.R.Sets, o) + } + + return nil +} + +// Sets retrieves all the records using an executor. +func Sets(mods ...qm.QueryMod) setQuery { + mods = append(mods, qm.From("\"getstronger\".\"sets\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"getstronger\".\"sets\".*"}) + } + + return setQuery{q} +} + +// FindSet retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindSet(ctx context.Context, exec boil.ContextExecutor, iD string, selectCols ...string) (*Set, error) { + setObj := &Set{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"getstronger\".\"sets\" where \"id\"=$1", sel, + ) + + q := queries.Raw(query, iD) + + err := q.Bind(ctx, exec, setObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: unable to select from sets") + } + + if err = setObj.doAfterSelectHooks(ctx, exec); err != nil { + return setObj, err + } + + return setObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Set) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("orm: no sets provided for insertion") + } + + var err error + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(setColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + setInsertCacheMut.RLock() + cache, cached := setInsertCache[key] + setInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + setAllColumns, + setColumnsWithDefault, + setColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(setType, setMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(setType, setMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"getstronger\".\"sets\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"getstronger\".\"sets\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "orm: unable to insert into sets") + } + + if !cached { + setInsertCacheMut.Lock() + setInsertCache[key] = cache + setInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Set. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Set) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + setUpdateCacheMut.RLock() + cache, cached := setUpdateCache[key] + setUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + setAllColumns, + setPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("orm: unable to update sets, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"getstronger\".\"sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, wl), + strmangle.WhereClause("\"", "\"", len(wl)+1, setPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(setType, setMapping, append(wl, setPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update sets row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by update for sets") + } + + if !cached { + setUpdateCacheMut.Lock() + setUpdateCache[key] = cache + setUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q setQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all for sets") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected for sets") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o SetSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("orm: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), setPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"getstronger\".\"sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, setPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all in set slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected all in update all set") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Set) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns, opts ...UpsertOptionFunc) error { + if o == nil { + return errors.New("orm: no sets provided for upsert") + } + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(setColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + setUpsertCacheMut.RLock() + cache, cached := setUpsertCache[key] + setUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, _ := insertColumns.InsertColumnSet( + setAllColumns, + setColumnsWithDefault, + setColumnsWithoutDefault, + nzDefaults, + ) + + update := updateColumns.UpdateColumnSet( + setAllColumns, + setPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("orm: unable to upsert sets, could not build update column list") + } + + ret := strmangle.SetComplement(setAllColumns, strmangle.SetIntersect(insert, update)) + + conflict := conflictColumns + if len(conflict) == 0 && updateOnConflict && len(update) != 0 { + if len(setPrimaryKeyColumns) == 0 { + return errors.New("orm: unable to upsert sets, could not build conflict column list") + } + + conflict = make([]string, len(setPrimaryKeyColumns)) + copy(conflict, setPrimaryKeyColumns) + } + cache.query = buildUpsertQueryPostgres(dialect, "\"getstronger\".\"sets\"", updateOnConflict, ret, update, conflict, insert, opts...) + + cache.valueMapping, err = queries.BindMapping(setType, setMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(setType, setMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "orm: unable to upsert sets") + } + + if !cached { + setUpsertCacheMut.Lock() + setUpsertCache[key] = cache + setUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Set record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Set) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("orm: no Set provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), setPrimaryKeyMapping) + sql := "DELETE FROM \"getstronger\".\"sets\" WHERE \"id\"=$1" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete from sets") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by delete for sets") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q setQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("orm: no setQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from sets") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for sets") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o SetSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(setBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), setPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"getstronger\".\"sets\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, setPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from set slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for sets") + } + + if len(setAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Set) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindSet(ctx, exec, o.ID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *SetSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := SetSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), setPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"getstronger\".\"sets\".* FROM \"getstronger\".\"sets\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, setPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "orm: unable to reload all in SetSlice") + } + + *o = slice + + return nil +} + +// SetExists checks if the Set row exists. +func SetExists(ctx context.Context, exec boil.ContextExecutor, iD string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"getstronger\".\"sets\" where \"id\"=$1 limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, iD) + } + row := exec.QueryRowContext(ctx, sql, iD) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "orm: unable to check if sets exists") + } + + return exists, nil +} + +// Exists checks if the Set row exists. +func (o *Set) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + return SetExists(ctx, exec, o.ID) +} diff --git a/pkg/orm/users.go b/go/pkg/orm/users.go similarity index 56% rename from pkg/orm/users.go rename to go/pkg/orm/users.go index fb764717..1390ae7a 100644 --- a/pkg/orm/users.go +++ b/go/pkg/orm/users.go @@ -1,4 +1,4 @@ -// Code generated by SQLBoiler 4.16.1 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. // This file is meant to be re-generated in place and/or deleted at any time. package orm @@ -14,7 +14,6 @@ import ( "time" "github.com/friendsofgo/errors" - "github.com/volatiletech/null/v8" "github.com/volatiletech/sqlboiler/v4/boil" "github.com/volatiletech/sqlboiler/v4/queries" "github.com/volatiletech/sqlboiler/v4/queries/qm" @@ -25,9 +24,9 @@ import ( // User is an object representing the database table. type User struct { ID string `boil:"id" json:"id" toml:"id" yaml:"id"` - Email string `boil:"email" json:"email" toml:"email" yaml:"email"` - Password string `boil:"password" json:"password" toml:"password" yaml:"password"` - CreatedAt null.Time `boil:"created_at" json:"created_at,omitempty" toml:"created_at" yaml:"created_at,omitempty"` + AuthID string `boil:"auth_id" json:"auth_id" toml:"auth_id" yaml:"auth_id"` + Name string `boil:"name" json:"name" toml:"name" yaml:"name"` + CreatedAt time.Time `boil:"created_at" json:"created_at" toml:"created_at" yaml:"created_at"` R *userR `boil:"-" json:"-" toml:"-" yaml:"-"` L userL `boil:"-" json:"-" toml:"-" yaml:"-"` @@ -35,99 +34,61 @@ type User struct { var UserColumns = struct { ID string - Email string - Password string + AuthID string + Name string CreatedAt string }{ ID: "id", - Email: "email", - Password: "password", + AuthID: "auth_id", + Name: "name", CreatedAt: "created_at", } var UserTableColumns = struct { ID string - Email string - Password string + AuthID string + Name string CreatedAt string }{ ID: "users.id", - Email: "users.email", - Password: "users.password", + AuthID: "users.auth_id", + Name: "users.name", CreatedAt: "users.created_at", } // Generated where -type whereHelperstring struct{ field string } - -func (w whereHelperstring) EQ(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.EQ, x) } -func (w whereHelperstring) NEQ(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.NEQ, x) } -func (w whereHelperstring) LT(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LT, x) } -func (w whereHelperstring) LTE(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.LTE, x) } -func (w whereHelperstring) GT(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GT, x) } -func (w whereHelperstring) GTE(x string) qm.QueryMod { return qmhelper.Where(w.field, qmhelper.GTE, x) } -func (w whereHelperstring) LIKE(x string) qm.QueryMod { return qm.Where(w.field+" LIKE ?", x) } -func (w whereHelperstring) NLIKE(x string) qm.QueryMod { return qm.Where(w.field+" NOT LIKE ?", x) } -func (w whereHelperstring) ILIKE(x string) qm.QueryMod { return qm.Where(w.field+" ILIKE ?", x) } -func (w whereHelperstring) NILIKE(x string) qm.QueryMod { return qm.Where(w.field+" NOT ILIKE ?", x) } -func (w whereHelperstring) IN(slice []string) qm.QueryMod { - values := make([]interface{}, 0, len(slice)) - for _, value := range slice { - values = append(values, value) - } - return qm.WhereIn(fmt.Sprintf("%s IN ?", w.field), values...) -} -func (w whereHelperstring) NIN(slice []string) qm.QueryMod { - values := make([]interface{}, 0, len(slice)) - for _, value := range slice { - values = append(values, value) - } - return qm.WhereNotIn(fmt.Sprintf("%s NOT IN ?", w.field), values...) -} - -type whereHelpernull_Time struct{ field string } - -func (w whereHelpernull_Time) EQ(x null.Time) qm.QueryMod { - return qmhelper.WhereNullEQ(w.field, false, x) -} -func (w whereHelpernull_Time) NEQ(x null.Time) qm.QueryMod { - return qmhelper.WhereNullEQ(w.field, true, x) -} -func (w whereHelpernull_Time) LT(x null.Time) qm.QueryMod { - return qmhelper.Where(w.field, qmhelper.LT, x) -} -func (w whereHelpernull_Time) LTE(x null.Time) qm.QueryMod { - return qmhelper.Where(w.field, qmhelper.LTE, x) -} -func (w whereHelpernull_Time) GT(x null.Time) qm.QueryMod { - return qmhelper.Where(w.field, qmhelper.GT, x) -} -func (w whereHelpernull_Time) GTE(x null.Time) qm.QueryMod { - return qmhelper.Where(w.field, qmhelper.GTE, x) -} - -func (w whereHelpernull_Time) IsNull() qm.QueryMod { return qmhelper.WhereIsNull(w.field) } -func (w whereHelpernull_Time) IsNotNull() qm.QueryMod { return qmhelper.WhereIsNotNull(w.field) } - var UserWhere = struct { ID whereHelperstring - Email whereHelperstring - Password whereHelperstring - CreatedAt whereHelpernull_Time + AuthID whereHelperstring + Name whereHelperstring + CreatedAt whereHelpertime_Time }{ ID: whereHelperstring{field: "\"getstronger\".\"users\".\"id\""}, - Email: whereHelperstring{field: "\"getstronger\".\"users\".\"email\""}, - Password: whereHelperstring{field: "\"getstronger\".\"users\".\"password\""}, - CreatedAt: whereHelpernull_Time{field: "\"getstronger\".\"users\".\"created_at\""}, + AuthID: whereHelperstring{field: "\"getstronger\".\"users\".\"auth_id\""}, + Name: whereHelperstring{field: "\"getstronger\".\"users\".\"name\""}, + CreatedAt: whereHelpertime_Time{field: "\"getstronger\".\"users\".\"created_at\""}, } // UserRels is where relationship names are stored. var UserRels = struct { -}{} + Auth string + Exercises string + Routines string + Workouts string +}{ + Auth: "Auth", + Exercises: "Exercises", + Routines: "Routines", + Workouts: "Workouts", +} // userR is where relationships are stored. type userR struct { + Auth *Auth `boil:"Auth" json:"Auth" toml:"Auth" yaml:"Auth"` + Exercises ExerciseSlice `boil:"Exercises" json:"Exercises" toml:"Exercises" yaml:"Exercises"` + Routines RoutineSlice `boil:"Routines" json:"Routines" toml:"Routines" yaml:"Routines"` + Workouts WorkoutSlice `boil:"Workouts" json:"Workouts" toml:"Workouts" yaml:"Workouts"` } // NewStruct creates a new relationship struct @@ -135,13 +96,41 @@ func (*userR) NewStruct() *userR { return &userR{} } +func (r *userR) GetAuth() *Auth { + if r == nil { + return nil + } + return r.Auth +} + +func (r *userR) GetExercises() ExerciseSlice { + if r == nil { + return nil + } + return r.Exercises +} + +func (r *userR) GetRoutines() RoutineSlice { + if r == nil { + return nil + } + return r.Routines +} + +func (r *userR) GetWorkouts() WorkoutSlice { + if r == nil { + return nil + } + return r.Workouts +} + // userL is where Load methods for each relationship are stored. type userL struct{} var ( - userAllColumns = []string{"id", "email", "password", "created_at"} - userColumnsWithoutDefault = []string{"id", "email", "password"} - userColumnsWithDefault = []string{"created_at"} + userAllColumns = []string{"id", "auth_id", "name", "created_at"} + userColumnsWithoutDefault = []string{"auth_id", "name"} + userColumnsWithDefault = []string{"id", "created_at"} userPrimaryKeyColumns = []string{"id"} userGeneratedColumns = []string{} ) @@ -451,6 +440,724 @@ func (q userQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, return count > 0, nil } +// Auth pointed to by the foreign key. +func (o *User) Auth(mods ...qm.QueryMod) authQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"id\" = ?", o.AuthID), + } + + queryMods = append(queryMods, mods...) + + return Auths(queryMods...) +} + +// Exercises retrieves all the exercise's Exercises with an executor. +func (o *User) Exercises(mods ...qm.QueryMod) exerciseQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"getstronger\".\"exercises\".\"user_id\"=?", o.ID), + ) + + return Exercises(queryMods...) +} + +// Routines retrieves all the routine's Routines with an executor. +func (o *User) Routines(mods ...qm.QueryMod) routineQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"getstronger\".\"routines\".\"user_id\"=?", o.ID), + ) + + return Routines(queryMods...) +} + +// Workouts retrieves all the workout's Workouts with an executor. +func (o *User) Workouts(mods ...qm.QueryMod) workoutQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"getstronger\".\"workouts\".\"user_id\"=?", o.ID), + ) + + return Workouts(queryMods...) +} + +// LoadAuth allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (userL) LoadAuth(ctx context.Context, e boil.ContextExecutor, singular bool, maybeUser interface{}, mods queries.Applicator) error { + var slice []*User + var object *User + + if singular { + var ok bool + object, ok = maybeUser.(*User) + if !ok { + object = new(User) + ok = queries.SetFromEmbeddedStruct(&object, &maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeUser)) + } + } + } else { + s, ok := maybeUser.(*[]*User) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeUser)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &userR{} + } + args[object.AuthID] = struct{}{} + + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &userR{} + } + + args[obj.AuthID] = struct{}{} + + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.auth`), + qm.WhereIn(`getstronger.auth.id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load Auth") + } + + var resultSlice []*Auth + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice Auth") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for auth") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for auth") + } + + if len(authAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.Auth = foreign + if foreign.R == nil { + foreign.R = &authR{} + } + foreign.R.Users = append(foreign.R.Users, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.AuthID == foreign.ID { + local.R.Auth = foreign + if foreign.R == nil { + foreign.R = &authR{} + } + foreign.R.Users = append(foreign.R.Users, local) + break + } + } + } + + return nil +} + +// LoadExercises allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (userL) LoadExercises(ctx context.Context, e boil.ContextExecutor, singular bool, maybeUser interface{}, mods queries.Applicator) error { + var slice []*User + var object *User + + if singular { + var ok bool + object, ok = maybeUser.(*User) + if !ok { + object = new(User) + ok = queries.SetFromEmbeddedStruct(&object, &maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeUser)) + } + } + } else { + s, ok := maybeUser.(*[]*User) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeUser)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &userR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &userR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.exercises`), + qm.WhereIn(`getstronger.exercises.user_id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load exercises") + } + + var resultSlice []*Exercise + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice exercises") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on exercises") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for exercises") + } + + if len(exerciseAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Exercises = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &exerciseR{} + } + foreign.R.User = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ID == foreign.UserID { + local.R.Exercises = append(local.R.Exercises, foreign) + if foreign.R == nil { + foreign.R = &exerciseR{} + } + foreign.R.User = local + break + } + } + } + + return nil +} + +// LoadRoutines allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (userL) LoadRoutines(ctx context.Context, e boil.ContextExecutor, singular bool, maybeUser interface{}, mods queries.Applicator) error { + var slice []*User + var object *User + + if singular { + var ok bool + object, ok = maybeUser.(*User) + if !ok { + object = new(User) + ok = queries.SetFromEmbeddedStruct(&object, &maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeUser)) + } + } + } else { + s, ok := maybeUser.(*[]*User) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeUser)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &userR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &userR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.routines`), + qm.WhereIn(`getstronger.routines.user_id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load routines") + } + + var resultSlice []*Routine + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice routines") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on routines") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for routines") + } + + if len(routineAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Routines = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &routineR{} + } + foreign.R.User = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ID == foreign.UserID { + local.R.Routines = append(local.R.Routines, foreign) + if foreign.R == nil { + foreign.R = &routineR{} + } + foreign.R.User = local + break + } + } + } + + return nil +} + +// LoadWorkouts allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (userL) LoadWorkouts(ctx context.Context, e boil.ContextExecutor, singular bool, maybeUser interface{}, mods queries.Applicator) error { + var slice []*User + var object *User + + if singular { + var ok bool + object, ok = maybeUser.(*User) + if !ok { + object = new(User) + ok = queries.SetFromEmbeddedStruct(&object, &maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeUser)) + } + } + } else { + s, ok := maybeUser.(*[]*User) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeUser) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeUser)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &userR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &userR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.workouts`), + qm.WhereIn(`getstronger.workouts.user_id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load workouts") + } + + var resultSlice []*Workout + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice workouts") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on workouts") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for workouts") + } + + if len(workoutAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Workouts = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &workoutR{} + } + foreign.R.User = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ID == foreign.UserID { + local.R.Workouts = append(local.R.Workouts, foreign) + if foreign.R == nil { + foreign.R = &workoutR{} + } + foreign.R.User = local + break + } + } + } + + return nil +} + +// SetAuth of the user to the related item. +// Sets o.R.Auth to related. +// Adds o to related.R.Users. +func (o *User) SetAuth(ctx context.Context, exec boil.ContextExecutor, insert bool, related *Auth) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"users\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"auth_id"}), + strmangle.WhereClause("\"", "\"", 2, userPrimaryKeyColumns), + ) + values := []interface{}{related.ID, o.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.AuthID = related.ID + if o.R == nil { + o.R = &userR{ + Auth: related, + } + } else { + o.R.Auth = related + } + + if related.R == nil { + related.R = &authR{ + Users: UserSlice{o}, + } + } else { + related.R.Users = append(related.R.Users, o) + } + + return nil +} + +// AddExercises adds the given related objects to the existing relationships +// of the user, optionally inserting them as new records. +// Appends related to o.R.Exercises. +// Sets related.R.User appropriately. +func (o *User) AddExercises(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Exercise) error { + var err error + for _, rel := range related { + if insert { + rel.UserID = o.ID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"exercises\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"user_id"}), + strmangle.WhereClause("\"", "\"", 2, exercisePrimaryKeyColumns), + ) + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.UserID = o.ID + } + } + + if o.R == nil { + o.R = &userR{ + Exercises: related, + } + } else { + o.R.Exercises = append(o.R.Exercises, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &exerciseR{ + User: o, + } + } else { + rel.R.User = o + } + } + return nil +} + +// AddRoutines adds the given related objects to the existing relationships +// of the user, optionally inserting them as new records. +// Appends related to o.R.Routines. +// Sets related.R.User appropriately. +func (o *User) AddRoutines(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Routine) error { + var err error + for _, rel := range related { + if insert { + rel.UserID = o.ID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"routines\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"user_id"}), + strmangle.WhereClause("\"", "\"", 2, routinePrimaryKeyColumns), + ) + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.UserID = o.ID + } + } + + if o.R == nil { + o.R = &userR{ + Routines: related, + } + } else { + o.R.Routines = append(o.R.Routines, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &routineR{ + User: o, + } + } else { + rel.R.User = o + } + } + return nil +} + +// AddWorkouts adds the given related objects to the existing relationships +// of the user, optionally inserting them as new records. +// Appends related to o.R.Workouts. +// Sets related.R.User appropriately. +func (o *User) AddWorkouts(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Workout) error { + var err error + for _, rel := range related { + if insert { + rel.UserID = o.ID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"workouts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"user_id"}), + strmangle.WhereClause("\"", "\"", 2, workoutPrimaryKeyColumns), + ) + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.UserID = o.ID + } + } + + if o.R == nil { + o.R = &userR{ + Workouts: related, + } + } else { + o.R.Workouts = append(o.R.Workouts, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &workoutR{ + User: o, + } + } else { + rel.R.User = o + } + } + return nil +} + // Users retrieves all the records using an executor. func Users(mods ...qm.QueryMod) userQuery { mods = append(mods, qm.From("\"getstronger\".\"users\"")) @@ -503,8 +1210,8 @@ func (o *User) Insert(ctx context.Context, exec boil.ContextExecutor, columns bo if !boil.TimestampsAreSkipped(ctx) { currTime := time.Now().In(boil.GetLocation()) - if queries.MustTime(o.CreatedAt).IsZero() { - queries.SetScanner(&o.CreatedAt, currTime) + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime } } @@ -715,8 +1422,8 @@ func (o *User) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnCo if !boil.TimestampsAreSkipped(ctx) { currTime := time.Now().In(boil.GetLocation()) - if queries.MustTime(o.CreatedAt).IsZero() { - queries.SetScanner(&o.CreatedAt, currTime) + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime } } diff --git a/go/pkg/orm/workouts.go b/go/pkg/orm/workouts.go new file mode 100644 index 00000000..7c41a083 --- /dev/null +++ b/go/pkg/orm/workouts.go @@ -0,0 +1,1335 @@ +// Code generated by SQLBoiler 4.16.2 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. +// This file is meant to be re-generated in place and/or deleted at any time. + +package orm + +import ( + "context" + "database/sql" + "fmt" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/friendsofgo/errors" + "github.com/volatiletech/sqlboiler/v4/boil" + "github.com/volatiletech/sqlboiler/v4/queries" + "github.com/volatiletech/sqlboiler/v4/queries/qm" + "github.com/volatiletech/sqlboiler/v4/queries/qmhelper" + "github.com/volatiletech/strmangle" +) + +// Workout is an object representing the database table. +type Workout struct { + ID string `boil:"id" json:"id" toml:"id" yaml:"id"` + UserID string `boil:"user_id" json:"user_id" toml:"user_id" yaml:"user_id"` + Date time.Time `boil:"date" json:"date" toml:"date" yaml:"date"` + CreatedAt time.Time `boil:"created_at" json:"created_at" toml:"created_at" yaml:"created_at"` + + R *workoutR `boil:"-" json:"-" toml:"-" yaml:"-"` + L workoutL `boil:"-" json:"-" toml:"-" yaml:"-"` +} + +var WorkoutColumns = struct { + ID string + UserID string + Date string + CreatedAt string +}{ + ID: "id", + UserID: "user_id", + Date: "date", + CreatedAt: "created_at", +} + +var WorkoutTableColumns = struct { + ID string + UserID string + Date string + CreatedAt string +}{ + ID: "workouts.id", + UserID: "workouts.user_id", + Date: "workouts.date", + CreatedAt: "workouts.created_at", +} + +// Generated where + +var WorkoutWhere = struct { + ID whereHelperstring + UserID whereHelperstring + Date whereHelpertime_Time + CreatedAt whereHelpertime_Time +}{ + ID: whereHelperstring{field: "\"getstronger\".\"workouts\".\"id\""}, + UserID: whereHelperstring{field: "\"getstronger\".\"workouts\".\"user_id\""}, + Date: whereHelpertime_Time{field: "\"getstronger\".\"workouts\".\"date\""}, + CreatedAt: whereHelpertime_Time{field: "\"getstronger\".\"workouts\".\"created_at\""}, +} + +// WorkoutRels is where relationship names are stored. +var WorkoutRels = struct { + User string + Sets string +}{ + User: "User", + Sets: "Sets", +} + +// workoutR is where relationships are stored. +type workoutR struct { + User *User `boil:"User" json:"User" toml:"User" yaml:"User"` + Sets SetSlice `boil:"Sets" json:"Sets" toml:"Sets" yaml:"Sets"` +} + +// NewStruct creates a new relationship struct +func (*workoutR) NewStruct() *workoutR { + return &workoutR{} +} + +func (r *workoutR) GetUser() *User { + if r == nil { + return nil + } + return r.User +} + +func (r *workoutR) GetSets() SetSlice { + if r == nil { + return nil + } + return r.Sets +} + +// workoutL is where Load methods for each relationship are stored. +type workoutL struct{} + +var ( + workoutAllColumns = []string{"id", "user_id", "date", "created_at"} + workoutColumnsWithoutDefault = []string{"user_id", "date"} + workoutColumnsWithDefault = []string{"id", "created_at"} + workoutPrimaryKeyColumns = []string{"id"} + workoutGeneratedColumns = []string{} +) + +type ( + // WorkoutSlice is an alias for a slice of pointers to Workout. + // This should almost always be used instead of []Workout. + WorkoutSlice []*Workout + // WorkoutHook is the signature for custom Workout hook methods + WorkoutHook func(context.Context, boil.ContextExecutor, *Workout) error + + workoutQuery struct { + *queries.Query + } +) + +// Cache for insert, update and upsert +var ( + workoutType = reflect.TypeOf(&Workout{}) + workoutMapping = queries.MakeStructMapping(workoutType) + workoutPrimaryKeyMapping, _ = queries.BindMapping(workoutType, workoutMapping, workoutPrimaryKeyColumns) + workoutInsertCacheMut sync.RWMutex + workoutInsertCache = make(map[string]insertCache) + workoutUpdateCacheMut sync.RWMutex + workoutUpdateCache = make(map[string]updateCache) + workoutUpsertCacheMut sync.RWMutex + workoutUpsertCache = make(map[string]insertCache) +) + +var ( + // Force time package dependency for automated UpdatedAt/CreatedAt. + _ = time.Second + // Force qmhelper dependency for where clause generation (which doesn't + // always happen) + _ = qmhelper.Where +) + +var workoutAfterSelectMu sync.Mutex +var workoutAfterSelectHooks []WorkoutHook + +var workoutBeforeInsertMu sync.Mutex +var workoutBeforeInsertHooks []WorkoutHook +var workoutAfterInsertMu sync.Mutex +var workoutAfterInsertHooks []WorkoutHook + +var workoutBeforeUpdateMu sync.Mutex +var workoutBeforeUpdateHooks []WorkoutHook +var workoutAfterUpdateMu sync.Mutex +var workoutAfterUpdateHooks []WorkoutHook + +var workoutBeforeDeleteMu sync.Mutex +var workoutBeforeDeleteHooks []WorkoutHook +var workoutAfterDeleteMu sync.Mutex +var workoutAfterDeleteHooks []WorkoutHook + +var workoutBeforeUpsertMu sync.Mutex +var workoutBeforeUpsertHooks []WorkoutHook +var workoutAfterUpsertMu sync.Mutex +var workoutAfterUpsertHooks []WorkoutHook + +// doAfterSelectHooks executes all "after Select" hooks. +func (o *Workout) doAfterSelectHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutAfterSelectHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeInsertHooks executes all "before insert" hooks. +func (o *Workout) doBeforeInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutBeforeInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterInsertHooks executes all "after Insert" hooks. +func (o *Workout) doAfterInsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutAfterInsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpdateHooks executes all "before Update" hooks. +func (o *Workout) doBeforeUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutBeforeUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpdateHooks executes all "after Update" hooks. +func (o *Workout) doAfterUpdateHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutAfterUpdateHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeDeleteHooks executes all "before Delete" hooks. +func (o *Workout) doBeforeDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutBeforeDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterDeleteHooks executes all "after Delete" hooks. +func (o *Workout) doAfterDeleteHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutAfterDeleteHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doBeforeUpsertHooks executes all "before Upsert" hooks. +func (o *Workout) doBeforeUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutBeforeUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// doAfterUpsertHooks executes all "after Upsert" hooks. +func (o *Workout) doAfterUpsertHooks(ctx context.Context, exec boil.ContextExecutor) (err error) { + if boil.HooksAreSkipped(ctx) { + return nil + } + + for _, hook := range workoutAfterUpsertHooks { + if err := hook(ctx, exec, o); err != nil { + return err + } + } + + return nil +} + +// AddWorkoutHook registers your hook function for all future operations. +func AddWorkoutHook(hookPoint boil.HookPoint, workoutHook WorkoutHook) { + switch hookPoint { + case boil.AfterSelectHook: + workoutAfterSelectMu.Lock() + workoutAfterSelectHooks = append(workoutAfterSelectHooks, workoutHook) + workoutAfterSelectMu.Unlock() + case boil.BeforeInsertHook: + workoutBeforeInsertMu.Lock() + workoutBeforeInsertHooks = append(workoutBeforeInsertHooks, workoutHook) + workoutBeforeInsertMu.Unlock() + case boil.AfterInsertHook: + workoutAfterInsertMu.Lock() + workoutAfterInsertHooks = append(workoutAfterInsertHooks, workoutHook) + workoutAfterInsertMu.Unlock() + case boil.BeforeUpdateHook: + workoutBeforeUpdateMu.Lock() + workoutBeforeUpdateHooks = append(workoutBeforeUpdateHooks, workoutHook) + workoutBeforeUpdateMu.Unlock() + case boil.AfterUpdateHook: + workoutAfterUpdateMu.Lock() + workoutAfterUpdateHooks = append(workoutAfterUpdateHooks, workoutHook) + workoutAfterUpdateMu.Unlock() + case boil.BeforeDeleteHook: + workoutBeforeDeleteMu.Lock() + workoutBeforeDeleteHooks = append(workoutBeforeDeleteHooks, workoutHook) + workoutBeforeDeleteMu.Unlock() + case boil.AfterDeleteHook: + workoutAfterDeleteMu.Lock() + workoutAfterDeleteHooks = append(workoutAfterDeleteHooks, workoutHook) + workoutAfterDeleteMu.Unlock() + case boil.BeforeUpsertHook: + workoutBeforeUpsertMu.Lock() + workoutBeforeUpsertHooks = append(workoutBeforeUpsertHooks, workoutHook) + workoutBeforeUpsertMu.Unlock() + case boil.AfterUpsertHook: + workoutAfterUpsertMu.Lock() + workoutAfterUpsertHooks = append(workoutAfterUpsertHooks, workoutHook) + workoutAfterUpsertMu.Unlock() + } +} + +// One returns a single workout record from the query. +func (q workoutQuery) One(ctx context.Context, exec boil.ContextExecutor) (*Workout, error) { + o := &Workout{} + + queries.SetLimit(q.Query, 1) + + err := q.Bind(ctx, exec, o) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: failed to execute a one query for workouts") + } + + if err := o.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + + return o, nil +} + +// All returns all Workout records from the query. +func (q workoutQuery) All(ctx context.Context, exec boil.ContextExecutor) (WorkoutSlice, error) { + var o []*Workout + + err := q.Bind(ctx, exec, &o) + if err != nil { + return nil, errors.Wrap(err, "orm: failed to assign all query results to Workout slice") + } + + if len(workoutAfterSelectHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterSelectHooks(ctx, exec); err != nil { + return o, err + } + } + } + + return o, nil +} + +// Count returns the count of all Workout records in the query. +func (q workoutQuery) Count(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return 0, errors.Wrap(err, "orm: failed to count workouts rows") + } + + return count, nil +} + +// Exists checks if the row exists in the table. +func (q workoutQuery) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + var count int64 + + queries.SetSelect(q.Query, nil) + queries.SetCount(q.Query) + queries.SetLimit(q.Query, 1) + + err := q.Query.QueryRowContext(ctx, exec).Scan(&count) + if err != nil { + return false, errors.Wrap(err, "orm: failed to check if workouts exists") + } + + return count > 0, nil +} + +// User pointed to by the foreign key. +func (o *Workout) User(mods ...qm.QueryMod) userQuery { + queryMods := []qm.QueryMod{ + qm.Where("\"id\" = ?", o.UserID), + } + + queryMods = append(queryMods, mods...) + + return Users(queryMods...) +} + +// Sets retrieves all the set's Sets with an executor. +func (o *Workout) Sets(mods ...qm.QueryMod) setQuery { + var queryMods []qm.QueryMod + if len(mods) != 0 { + queryMods = append(queryMods, mods...) + } + + queryMods = append(queryMods, + qm.Where("\"getstronger\".\"sets\".\"workout_id\"=?", o.ID), + ) + + return Sets(queryMods...) +} + +// LoadUser allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for an N-1 relationship. +func (workoutL) LoadUser(ctx context.Context, e boil.ContextExecutor, singular bool, maybeWorkout interface{}, mods queries.Applicator) error { + var slice []*Workout + var object *Workout + + if singular { + var ok bool + object, ok = maybeWorkout.(*Workout) + if !ok { + object = new(Workout) + ok = queries.SetFromEmbeddedStruct(&object, &maybeWorkout) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeWorkout)) + } + } + } else { + s, ok := maybeWorkout.(*[]*Workout) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeWorkout) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeWorkout)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &workoutR{} + } + args[object.UserID] = struct{}{} + + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &workoutR{} + } + + args[obj.UserID] = struct{}{} + + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.users`), + qm.WhereIn(`getstronger.users.id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load User") + } + + var resultSlice []*User + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice User") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results of eager load for users") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for users") + } + + if len(userAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + + if len(resultSlice) == 0 { + return nil + } + + if singular { + foreign := resultSlice[0] + object.R.User = foreign + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Workouts = append(foreign.R.Workouts, object) + return nil + } + + for _, local := range slice { + for _, foreign := range resultSlice { + if local.UserID == foreign.ID { + local.R.User = foreign + if foreign.R == nil { + foreign.R = &userR{} + } + foreign.R.Workouts = append(foreign.R.Workouts, local) + break + } + } + } + + return nil +} + +// LoadSets allows an eager lookup of values, cached into the +// loaded structs of the objects. This is for a 1-M or N-M relationship. +func (workoutL) LoadSets(ctx context.Context, e boil.ContextExecutor, singular bool, maybeWorkout interface{}, mods queries.Applicator) error { + var slice []*Workout + var object *Workout + + if singular { + var ok bool + object, ok = maybeWorkout.(*Workout) + if !ok { + object = new(Workout) + ok = queries.SetFromEmbeddedStruct(&object, &maybeWorkout) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", object, maybeWorkout)) + } + } + } else { + s, ok := maybeWorkout.(*[]*Workout) + if ok { + slice = *s + } else { + ok = queries.SetFromEmbeddedStruct(&slice, maybeWorkout) + if !ok { + return errors.New(fmt.Sprintf("failed to set %T from embedded struct %T", slice, maybeWorkout)) + } + } + } + + args := make(map[interface{}]struct{}) + if singular { + if object.R == nil { + object.R = &workoutR{} + } + args[object.ID] = struct{}{} + } else { + for _, obj := range slice { + if obj.R == nil { + obj.R = &workoutR{} + } + args[obj.ID] = struct{}{} + } + } + + if len(args) == 0 { + return nil + } + + argsSlice := make([]interface{}, len(args)) + i := 0 + for arg := range args { + argsSlice[i] = arg + i++ + } + + query := NewQuery( + qm.From(`getstronger.sets`), + qm.WhereIn(`getstronger.sets.workout_id in ?`, argsSlice...), + ) + if mods != nil { + mods.Apply(query) + } + + results, err := query.QueryContext(ctx, e) + if err != nil { + return errors.Wrap(err, "failed to eager load sets") + } + + var resultSlice []*Set + if err = queries.Bind(results, &resultSlice); err != nil { + return errors.Wrap(err, "failed to bind eager loaded slice sets") + } + + if err = results.Close(); err != nil { + return errors.Wrap(err, "failed to close results in eager load on sets") + } + if err = results.Err(); err != nil { + return errors.Wrap(err, "error occurred during iteration of eager loaded relations for sets") + } + + if len(setAfterSelectHooks) != 0 { + for _, obj := range resultSlice { + if err := obj.doAfterSelectHooks(ctx, e); err != nil { + return err + } + } + } + if singular { + object.R.Sets = resultSlice + for _, foreign := range resultSlice { + if foreign.R == nil { + foreign.R = &setR{} + } + foreign.R.Workout = object + } + return nil + } + + for _, foreign := range resultSlice { + for _, local := range slice { + if local.ID == foreign.WorkoutID { + local.R.Sets = append(local.R.Sets, foreign) + if foreign.R == nil { + foreign.R = &setR{} + } + foreign.R.Workout = local + break + } + } + } + + return nil +} + +// SetUser of the workout to the related item. +// Sets o.R.User to related. +// Adds o to related.R.Workouts. +func (o *Workout) SetUser(ctx context.Context, exec boil.ContextExecutor, insert bool, related *User) error { + var err error + if insert { + if err = related.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } + + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"workouts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"user_id"}), + strmangle.WhereClause("\"", "\"", 2, workoutPrimaryKeyColumns), + ) + values := []interface{}{related.ID, o.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update local table") + } + + o.UserID = related.ID + if o.R == nil { + o.R = &workoutR{ + User: related, + } + } else { + o.R.User = related + } + + if related.R == nil { + related.R = &userR{ + Workouts: WorkoutSlice{o}, + } + } else { + related.R.Workouts = append(related.R.Workouts, o) + } + + return nil +} + +// AddSets adds the given related objects to the existing relationships +// of the workout, optionally inserting them as new records. +// Appends related to o.R.Sets. +// Sets related.R.Workout appropriately. +func (o *Workout) AddSets(ctx context.Context, exec boil.ContextExecutor, insert bool, related ...*Set) error { + var err error + for _, rel := range related { + if insert { + rel.WorkoutID = o.ID + if err = rel.Insert(ctx, exec, boil.Infer()); err != nil { + return errors.Wrap(err, "failed to insert into foreign table") + } + } else { + updateQuery := fmt.Sprintf( + "UPDATE \"getstronger\".\"sets\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, []string{"workout_id"}), + strmangle.WhereClause("\"", "\"", 2, setPrimaryKeyColumns), + ) + values := []interface{}{o.ID, rel.ID} + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, updateQuery) + fmt.Fprintln(writer, values) + } + if _, err = exec.ExecContext(ctx, updateQuery, values...); err != nil { + return errors.Wrap(err, "failed to update foreign table") + } + + rel.WorkoutID = o.ID + } + } + + if o.R == nil { + o.R = &workoutR{ + Sets: related, + } + } else { + o.R.Sets = append(o.R.Sets, related...) + } + + for _, rel := range related { + if rel.R == nil { + rel.R = &setR{ + Workout: o, + } + } else { + rel.R.Workout = o + } + } + return nil +} + +// Workouts retrieves all the records using an executor. +func Workouts(mods ...qm.QueryMod) workoutQuery { + mods = append(mods, qm.From("\"getstronger\".\"workouts\"")) + q := NewQuery(mods...) + if len(queries.GetSelect(q)) == 0 { + queries.SetSelect(q, []string{"\"getstronger\".\"workouts\".*"}) + } + + return workoutQuery{q} +} + +// FindWorkout retrieves a single record by ID with an executor. +// If selectCols is empty Find will return all columns. +func FindWorkout(ctx context.Context, exec boil.ContextExecutor, iD string, selectCols ...string) (*Workout, error) { + workoutObj := &Workout{} + + sel := "*" + if len(selectCols) > 0 { + sel = strings.Join(strmangle.IdentQuoteSlice(dialect.LQ, dialect.RQ, selectCols), ",") + } + query := fmt.Sprintf( + "select %s from \"getstronger\".\"workouts\" where \"id\"=$1", sel, + ) + + q := queries.Raw(query, iD) + + err := q.Bind(ctx, exec, workoutObj) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, sql.ErrNoRows + } + return nil, errors.Wrap(err, "orm: unable to select from workouts") + } + + if err = workoutObj.doAfterSelectHooks(ctx, exec); err != nil { + return workoutObj, err + } + + return workoutObj, nil +} + +// Insert a single record using an executor. +// See boil.Columns.InsertColumnSet documentation to understand column list inference for inserts. +func (o *Workout) Insert(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) error { + if o == nil { + return errors.New("orm: no workouts provided for insertion") + } + + var err error + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeInsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(workoutColumnsWithDefault, o) + + key := makeCacheKey(columns, nzDefaults) + workoutInsertCacheMut.RLock() + cache, cached := workoutInsertCache[key] + workoutInsertCacheMut.RUnlock() + + if !cached { + wl, returnColumns := columns.InsertColumnSet( + workoutAllColumns, + workoutColumnsWithDefault, + workoutColumnsWithoutDefault, + nzDefaults, + ) + + cache.valueMapping, err = queries.BindMapping(workoutType, workoutMapping, wl) + if err != nil { + return err + } + cache.retMapping, err = queries.BindMapping(workoutType, workoutMapping, returnColumns) + if err != nil { + return err + } + if len(wl) != 0 { + cache.query = fmt.Sprintf("INSERT INTO \"getstronger\".\"workouts\" (\"%s\") %%sVALUES (%s)%%s", strings.Join(wl, "\",\""), strmangle.Placeholders(dialect.UseIndexPlaceholders, len(wl), 1, 1)) + } else { + cache.query = "INSERT INTO \"getstronger\".\"workouts\" %sDEFAULT VALUES%s" + } + + var queryOutput, queryReturning string + + if len(cache.retMapping) != 0 { + queryReturning = fmt.Sprintf(" RETURNING \"%s\"", strings.Join(returnColumns, "\",\"")) + } + + cache.query = fmt.Sprintf(cache.query, queryOutput, queryReturning) + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(queries.PtrsFromMapping(value, cache.retMapping)...) + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + + if err != nil { + return errors.Wrap(err, "orm: unable to insert into workouts") + } + + if !cached { + workoutInsertCacheMut.Lock() + workoutInsertCache[key] = cache + workoutInsertCacheMut.Unlock() + } + + return o.doAfterInsertHooks(ctx, exec) +} + +// Update uses an executor to update the Workout. +// See boil.Columns.UpdateColumnSet documentation to understand column list inference for updates. +// Update does not automatically update the record in case of default values. Use .Reload() to refresh the records. +func (o *Workout) Update(ctx context.Context, exec boil.ContextExecutor, columns boil.Columns) (int64, error) { + var err error + if err = o.doBeforeUpdateHooks(ctx, exec); err != nil { + return 0, err + } + key := makeCacheKey(columns, nil) + workoutUpdateCacheMut.RLock() + cache, cached := workoutUpdateCache[key] + workoutUpdateCacheMut.RUnlock() + + if !cached { + wl := columns.UpdateColumnSet( + workoutAllColumns, + workoutPrimaryKeyColumns, + ) + + if !columns.IsWhitelist() { + wl = strmangle.SetComplement(wl, []string{"created_at"}) + } + if len(wl) == 0 { + return 0, errors.New("orm: unable to update workouts, could not build whitelist") + } + + cache.query = fmt.Sprintf("UPDATE \"getstronger\".\"workouts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, wl), + strmangle.WhereClause("\"", "\"", len(wl)+1, workoutPrimaryKeyColumns), + ) + cache.valueMapping, err = queries.BindMapping(workoutType, workoutMapping, append(wl, workoutPrimaryKeyColumns...)) + if err != nil { + return 0, err + } + } + + values := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), cache.valueMapping) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, values) + } + var result sql.Result + result, err = exec.ExecContext(ctx, cache.query, values...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update workouts row") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by update for workouts") + } + + if !cached { + workoutUpdateCacheMut.Lock() + workoutUpdateCache[key] = cache + workoutUpdateCacheMut.Unlock() + } + + return rowsAff, o.doAfterUpdateHooks(ctx, exec) +} + +// UpdateAll updates all rows with the specified column values. +func (q workoutQuery) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + queries.SetUpdate(q.Query, cols) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all for workouts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected for workouts") + } + + return rowsAff, nil +} + +// UpdateAll updates all rows with the specified column values, using an executor. +func (o WorkoutSlice) UpdateAll(ctx context.Context, exec boil.ContextExecutor, cols M) (int64, error) { + ln := int64(len(o)) + if ln == 0 { + return 0, nil + } + + if len(cols) == 0 { + return 0, errors.New("orm: update all requires at least one column argument") + } + + colNames := make([]string, len(cols)) + args := make([]interface{}, len(cols)) + + i := 0 + for name, value := range cols { + colNames[i] = name + args[i] = value + i++ + } + + // Append all of the primary key values for each column + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), workoutPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := fmt.Sprintf("UPDATE \"getstronger\".\"workouts\" SET %s WHERE %s", + strmangle.SetParamNames("\"", "\"", 1, colNames), + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), len(colNames)+1, workoutPrimaryKeyColumns, len(o))) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to update all in workout slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: unable to retrieve rows affected all in update all workout") + } + return rowsAff, nil +} + +// Upsert attempts an insert using an executor, and does an update or ignore on conflict. +// See boil.Columns documentation for how to properly use updateColumns and insertColumns. +func (o *Workout) Upsert(ctx context.Context, exec boil.ContextExecutor, updateOnConflict bool, conflictColumns []string, updateColumns, insertColumns boil.Columns, opts ...UpsertOptionFunc) error { + if o == nil { + return errors.New("orm: no workouts provided for upsert") + } + if !boil.TimestampsAreSkipped(ctx) { + currTime := time.Now().In(boil.GetLocation()) + + if o.CreatedAt.IsZero() { + o.CreatedAt = currTime + } + } + + if err := o.doBeforeUpsertHooks(ctx, exec); err != nil { + return err + } + + nzDefaults := queries.NonZeroDefaultSet(workoutColumnsWithDefault, o) + + // Build cache key in-line uglily - mysql vs psql problems + buf := strmangle.GetBuffer() + if updateOnConflict { + buf.WriteByte('t') + } else { + buf.WriteByte('f') + } + buf.WriteByte('.') + for _, c := range conflictColumns { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(updateColumns.Kind)) + for _, c := range updateColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + buf.WriteString(strconv.Itoa(insertColumns.Kind)) + for _, c := range insertColumns.Cols { + buf.WriteString(c) + } + buf.WriteByte('.') + for _, c := range nzDefaults { + buf.WriteString(c) + } + key := buf.String() + strmangle.PutBuffer(buf) + + workoutUpsertCacheMut.RLock() + cache, cached := workoutUpsertCache[key] + workoutUpsertCacheMut.RUnlock() + + var err error + + if !cached { + insert, _ := insertColumns.InsertColumnSet( + workoutAllColumns, + workoutColumnsWithDefault, + workoutColumnsWithoutDefault, + nzDefaults, + ) + + update := updateColumns.UpdateColumnSet( + workoutAllColumns, + workoutPrimaryKeyColumns, + ) + + if updateOnConflict && len(update) == 0 { + return errors.New("orm: unable to upsert workouts, could not build update column list") + } + + ret := strmangle.SetComplement(workoutAllColumns, strmangle.SetIntersect(insert, update)) + + conflict := conflictColumns + if len(conflict) == 0 && updateOnConflict && len(update) != 0 { + if len(workoutPrimaryKeyColumns) == 0 { + return errors.New("orm: unable to upsert workouts, could not build conflict column list") + } + + conflict = make([]string, len(workoutPrimaryKeyColumns)) + copy(conflict, workoutPrimaryKeyColumns) + } + cache.query = buildUpsertQueryPostgres(dialect, "\"getstronger\".\"workouts\"", updateOnConflict, ret, update, conflict, insert, opts...) + + cache.valueMapping, err = queries.BindMapping(workoutType, workoutMapping, insert) + if err != nil { + return err + } + if len(ret) != 0 { + cache.retMapping, err = queries.BindMapping(workoutType, workoutMapping, ret) + if err != nil { + return err + } + } + } + + value := reflect.Indirect(reflect.ValueOf(o)) + vals := queries.ValuesFromMapping(value, cache.valueMapping) + var returns []interface{} + if len(cache.retMapping) != 0 { + returns = queries.PtrsFromMapping(value, cache.retMapping) + } + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, cache.query) + fmt.Fprintln(writer, vals) + } + if len(cache.retMapping) != 0 { + err = exec.QueryRowContext(ctx, cache.query, vals...).Scan(returns...) + if errors.Is(err, sql.ErrNoRows) { + err = nil // Postgres doesn't return anything when there's no update + } + } else { + _, err = exec.ExecContext(ctx, cache.query, vals...) + } + if err != nil { + return errors.Wrap(err, "orm: unable to upsert workouts") + } + + if !cached { + workoutUpsertCacheMut.Lock() + workoutUpsertCache[key] = cache + workoutUpsertCacheMut.Unlock() + } + + return o.doAfterUpsertHooks(ctx, exec) +} + +// Delete deletes a single Workout record with an executor. +// Delete will match against the primary key column to find the record to delete. +func (o *Workout) Delete(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if o == nil { + return 0, errors.New("orm: no Workout provided for delete") + } + + if err := o.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + args := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(o)), workoutPrimaryKeyMapping) + sql := "DELETE FROM \"getstronger\".\"workouts\" WHERE \"id\"=$1" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args...) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete from workouts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by delete for workouts") + } + + if err := o.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + + return rowsAff, nil +} + +// DeleteAll deletes all matching rows. +func (q workoutQuery) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if q.Query == nil { + return 0, errors.New("orm: no workoutQuery provided for delete all") + } + + queries.SetDelete(q.Query) + + result, err := q.Query.ExecContext(ctx, exec) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from workouts") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for workouts") + } + + return rowsAff, nil +} + +// DeleteAll deletes all rows in the slice, using an executor. +func (o WorkoutSlice) DeleteAll(ctx context.Context, exec boil.ContextExecutor) (int64, error) { + if len(o) == 0 { + return 0, nil + } + + if len(workoutBeforeDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doBeforeDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + var args []interface{} + for _, obj := range o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), workoutPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "DELETE FROM \"getstronger\".\"workouts\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, workoutPrimaryKeyColumns, len(o)) + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, args) + } + result, err := exec.ExecContext(ctx, sql, args...) + if err != nil { + return 0, errors.Wrap(err, "orm: unable to delete all from workout slice") + } + + rowsAff, err := result.RowsAffected() + if err != nil { + return 0, errors.Wrap(err, "orm: failed to get rows affected by deleteall for workouts") + } + + if len(workoutAfterDeleteHooks) != 0 { + for _, obj := range o { + if err := obj.doAfterDeleteHooks(ctx, exec); err != nil { + return 0, err + } + } + } + + return rowsAff, nil +} + +// Reload refetches the object from the database +// using the primary keys with an executor. +func (o *Workout) Reload(ctx context.Context, exec boil.ContextExecutor) error { + ret, err := FindWorkout(ctx, exec, o.ID) + if err != nil { + return err + } + + *o = *ret + return nil +} + +// ReloadAll refetches every row with matching primary key column values +// and overwrites the original object slice with the newly updated slice. +func (o *WorkoutSlice) ReloadAll(ctx context.Context, exec boil.ContextExecutor) error { + if o == nil || len(*o) == 0 { + return nil + } + + slice := WorkoutSlice{} + var args []interface{} + for _, obj := range *o { + pkeyArgs := queries.ValuesFromMapping(reflect.Indirect(reflect.ValueOf(obj)), workoutPrimaryKeyMapping) + args = append(args, pkeyArgs...) + } + + sql := "SELECT \"getstronger\".\"workouts\".* FROM \"getstronger\".\"workouts\" WHERE " + + strmangle.WhereClauseRepeated(string(dialect.LQ), string(dialect.RQ), 1, workoutPrimaryKeyColumns, len(*o)) + + q := queries.Raw(sql, args...) + + err := q.Bind(ctx, exec, &slice) + if err != nil { + return errors.Wrap(err, "orm: unable to reload all in WorkoutSlice") + } + + *o = slice + + return nil +} + +// WorkoutExists checks if the Workout row exists. +func WorkoutExists(ctx context.Context, exec boil.ContextExecutor, iD string) (bool, error) { + var exists bool + sql := "select exists(select 1 from \"getstronger\".\"workouts\" where \"id\"=$1 limit 1)" + + if boil.IsDebug(ctx) { + writer := boil.DebugWriterFrom(ctx) + fmt.Fprintln(writer, sql) + fmt.Fprintln(writer, iD) + } + row := exec.QueryRowContext(ctx, sql, iD) + + err := row.Scan(&exists) + if err != nil { + return false, errors.Wrap(err, "orm: unable to check if workouts exists") + } + + return exists, nil +} + +// Exists checks if the Workout row exists. +func (o *Workout) Exists(ctx context.Context, exec boil.ContextExecutor) (bool, error) { + return WorkoutExists(ctx, exec, o.ID) +} diff --git a/go/pkg/pb/api/v1/apiv1connect/auth.connect.go b/go/pkg/pb/api/v1/apiv1connect/auth.connect.go new file mode 100644 index 00000000..ba4504a3 --- /dev/null +++ b/go/pkg/pb/api/v1/apiv1connect/auth.connect.go @@ -0,0 +1,171 @@ +// Code generated by protoc-gen-connect-go. DO NOT EDIT. +// +// Source: api/v1/auth.proto + +package apiv1connect + +import ( + connect "connectrpc.com/connect" + context "context" + errors "errors" + v1 "github.com/crlssn/getstronger/go/pkg/pb/api/v1" + http "net/http" + strings "strings" +) + +// This is a compile-time assertion to ensure that this generated file and the connect package are +// compatible. If you get a compiler error that this constant is not defined, this code was +// generated with a version of connect newer than the one compiled into your binary. You can fix the +// problem by either regenerating this code with an older version of connect or updating the connect +// version compiled into your binary. +const _ = connect.IsAtLeastVersion1_13_0 + +const ( + // AuthServiceName is the fully-qualified name of the AuthService service. + AuthServiceName = "api.v1.AuthService" +) + +// These constants are the fully-qualified names of the RPCs defined in this package. They're +// exposed at runtime as Spec.Procedure and as the final two segments of the HTTP route. +// +// Note that these are different from the fully-qualified method names used by +// google.golang.org/protobuf/reflect/protoreflect. To convert from these constants to +// reflection-formatted method names, remove the leading slash and convert the remaining slash to a +// period. +const ( + // AuthServiceSignupProcedure is the fully-qualified name of the AuthService's Signup RPC. + AuthServiceSignupProcedure = "/api.v1.AuthService/Signup" + // AuthServiceLoginProcedure is the fully-qualified name of the AuthService's Login RPC. + AuthServiceLoginProcedure = "/api.v1.AuthService/Login" + // AuthServiceRefreshTokenProcedure is the fully-qualified name of the AuthService's RefreshToken + // RPC. + AuthServiceRefreshTokenProcedure = "/api.v1.AuthService/RefreshToken" +) + +// These variables are the protoreflect.Descriptor objects for the RPCs defined in this package. +var ( + authServiceServiceDescriptor = v1.File_api_v1_auth_proto.Services().ByName("AuthService") + authServiceSignupMethodDescriptor = authServiceServiceDescriptor.Methods().ByName("Signup") + authServiceLoginMethodDescriptor = authServiceServiceDescriptor.Methods().ByName("Login") + authServiceRefreshTokenMethodDescriptor = authServiceServiceDescriptor.Methods().ByName("RefreshToken") +) + +// AuthServiceClient is a client for the api.v1.AuthService service. +type AuthServiceClient interface { + Signup(context.Context, *connect.Request[v1.SignupRequest]) (*connect.Response[v1.SignupResponse], error) + Login(context.Context, *connect.Request[v1.LoginRequest]) (*connect.Response[v1.LoginResponse], error) + RefreshToken(context.Context, *connect.Request[v1.RefreshTokenRequest]) (*connect.Response[v1.RefreshTokenResponse], error) +} + +// NewAuthServiceClient constructs a client for the api.v1.AuthService service. By default, it uses +// the Connect protocol with the binary Protobuf Codec, asks for gzipped responses, and sends +// uncompressed requests. To use the gRPC or gRPC-Web protocols, supply the connect.WithGRPC() or +// connect.WithGRPCWeb() options. +// +// The URL supplied here should be the base URL for the Connect or gRPC server (for example, +// http://api.acme.com or https://acme.com/grpc). +func NewAuthServiceClient(httpClient connect.HTTPClient, baseURL string, opts ...connect.ClientOption) AuthServiceClient { + baseURL = strings.TrimRight(baseURL, "/") + return &authServiceClient{ + signup: connect.NewClient[v1.SignupRequest, v1.SignupResponse]( + httpClient, + baseURL+AuthServiceSignupProcedure, + connect.WithSchema(authServiceSignupMethodDescriptor), + connect.WithClientOptions(opts...), + ), + login: connect.NewClient[v1.LoginRequest, v1.LoginResponse]( + httpClient, + baseURL+AuthServiceLoginProcedure, + connect.WithSchema(authServiceLoginMethodDescriptor), + connect.WithClientOptions(opts...), + ), + refreshToken: connect.NewClient[v1.RefreshTokenRequest, v1.RefreshTokenResponse]( + httpClient, + baseURL+AuthServiceRefreshTokenProcedure, + connect.WithSchema(authServiceRefreshTokenMethodDescriptor), + connect.WithClientOptions(opts...), + ), + } +} + +// authServiceClient implements AuthServiceClient. +type authServiceClient struct { + signup *connect.Client[v1.SignupRequest, v1.SignupResponse] + login *connect.Client[v1.LoginRequest, v1.LoginResponse] + refreshToken *connect.Client[v1.RefreshTokenRequest, v1.RefreshTokenResponse] +} + +// Signup calls api.v1.AuthService.Signup. +func (c *authServiceClient) Signup(ctx context.Context, req *connect.Request[v1.SignupRequest]) (*connect.Response[v1.SignupResponse], error) { + return c.signup.CallUnary(ctx, req) +} + +// Login calls api.v1.AuthService.Login. +func (c *authServiceClient) Login(ctx context.Context, req *connect.Request[v1.LoginRequest]) (*connect.Response[v1.LoginResponse], error) { + return c.login.CallUnary(ctx, req) +} + +// RefreshToken calls api.v1.AuthService.RefreshToken. +func (c *authServiceClient) RefreshToken(ctx context.Context, req *connect.Request[v1.RefreshTokenRequest]) (*connect.Response[v1.RefreshTokenResponse], error) { + return c.refreshToken.CallUnary(ctx, req) +} + +// AuthServiceHandler is an implementation of the api.v1.AuthService service. +type AuthServiceHandler interface { + Signup(context.Context, *connect.Request[v1.SignupRequest]) (*connect.Response[v1.SignupResponse], error) + Login(context.Context, *connect.Request[v1.LoginRequest]) (*connect.Response[v1.LoginResponse], error) + RefreshToken(context.Context, *connect.Request[v1.RefreshTokenRequest]) (*connect.Response[v1.RefreshTokenResponse], error) +} + +// NewAuthServiceHandler builds an HTTP handler from the service implementation. It returns the path +// on which to mount the handler and the handler itself. +// +// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf +// and JSON codecs. They also support gzip compression. +func NewAuthServiceHandler(svc AuthServiceHandler, opts ...connect.HandlerOption) (string, http.Handler) { + authServiceSignupHandler := connect.NewUnaryHandler( + AuthServiceSignupProcedure, + svc.Signup, + connect.WithSchema(authServiceSignupMethodDescriptor), + connect.WithHandlerOptions(opts...), + ) + authServiceLoginHandler := connect.NewUnaryHandler( + AuthServiceLoginProcedure, + svc.Login, + connect.WithSchema(authServiceLoginMethodDescriptor), + connect.WithHandlerOptions(opts...), + ) + authServiceRefreshTokenHandler := connect.NewUnaryHandler( + AuthServiceRefreshTokenProcedure, + svc.RefreshToken, + connect.WithSchema(authServiceRefreshTokenMethodDescriptor), + connect.WithHandlerOptions(opts...), + ) + return "/api.v1.AuthService/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case AuthServiceSignupProcedure: + authServiceSignupHandler.ServeHTTP(w, r) + case AuthServiceLoginProcedure: + authServiceLoginHandler.ServeHTTP(w, r) + case AuthServiceRefreshTokenProcedure: + authServiceRefreshTokenHandler.ServeHTTP(w, r) + default: + http.NotFound(w, r) + } + }) +} + +// UnimplementedAuthServiceHandler returns CodeUnimplemented from all methods. +type UnimplementedAuthServiceHandler struct{} + +func (UnimplementedAuthServiceHandler) Signup(context.Context, *connect.Request[v1.SignupRequest]) (*connect.Response[v1.SignupResponse], error) { + return nil, connect.NewError(connect.CodeUnimplemented, errors.New("api.v1.AuthService.Signup is not implemented")) +} + +func (UnimplementedAuthServiceHandler) Login(context.Context, *connect.Request[v1.LoginRequest]) (*connect.Response[v1.LoginResponse], error) { + return nil, connect.NewError(connect.CodeUnimplemented, errors.New("api.v1.AuthService.Login is not implemented")) +} + +func (UnimplementedAuthServiceHandler) RefreshToken(context.Context, *connect.Request[v1.RefreshTokenRequest]) (*connect.Response[v1.RefreshTokenResponse], error) { + return nil, connect.NewError(connect.CodeUnimplemented, errors.New("api.v1.AuthService.RefreshToken is not implemented")) +} diff --git a/go/pkg/pb/api/v1/auth.pb.go b/go/pkg/pb/api/v1/auth.pb.go new file mode 100644 index 00000000..317a8f7f --- /dev/null +++ b/go/pkg/pb/api/v1/auth.pb.go @@ -0,0 +1,429 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.35.1 +// protoc (unknown) +// source: api/v1/auth.proto + +package apiv1 + +import ( + _ "buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go/buf/validate" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + _ "google.golang.org/protobuf/types/known/timestamppb" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type SignupRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email,omitempty"` + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` +} + +func (x *SignupRequest) Reset() { + *x = SignupRequest{} + mi := &file_api_v1_auth_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SignupRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignupRequest) ProtoMessage() {} + +func (x *SignupRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_auth_proto_msgTypes[0] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignupRequest.ProtoReflect.Descriptor instead. +func (*SignupRequest) Descriptor() ([]byte, []int) { + return file_api_v1_auth_proto_rawDescGZIP(), []int{0} +} + +func (x *SignupRequest) GetEmail() string { + if x != nil { + return x.Email + } + return "" +} + +func (x *SignupRequest) GetPassword() string { + if x != nil { + return x.Password + } + return "" +} + +type SignupResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *SignupResponse) Reset() { + *x = SignupResponse{} + mi := &file_api_v1_auth_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *SignupResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*SignupResponse) ProtoMessage() {} + +func (x *SignupResponse) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_auth_proto_msgTypes[1] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use SignupResponse.ProtoReflect.Descriptor instead. +func (*SignupResponse) Descriptor() ([]byte, []int) { + return file_api_v1_auth_proto_rawDescGZIP(), []int{1} +} + +type LoginRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Email string `protobuf:"bytes,1,opt,name=email,proto3" json:"email,omitempty"` + Password string `protobuf:"bytes,2,opt,name=password,proto3" json:"password,omitempty"` +} + +func (x *LoginRequest) Reset() { + *x = LoginRequest{} + mi := &file_api_v1_auth_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *LoginRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LoginRequest) ProtoMessage() {} + +func (x *LoginRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_auth_proto_msgTypes[2] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LoginRequest.ProtoReflect.Descriptor instead. +func (*LoginRequest) Descriptor() ([]byte, []int) { + return file_api_v1_auth_proto_rawDescGZIP(), []int{2} +} + +func (x *LoginRequest) GetEmail() string { + if x != nil { + return x.Email + } + return "" +} + +func (x *LoginRequest) GetPassword() string { + if x != nil { + return x.Password + } + return "" +} + +type LoginResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AccessToken string `protobuf:"bytes,1,opt,name=access_token,json=accessToken,proto3" json:"access_token,omitempty"` + RefreshToken string `protobuf:"bytes,2,opt,name=refresh_token,json=refreshToken,proto3" json:"refresh_token,omitempty"` +} + +func (x *LoginResponse) Reset() { + *x = LoginResponse{} + mi := &file_api_v1_auth_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *LoginResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*LoginResponse) ProtoMessage() {} + +func (x *LoginResponse) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_auth_proto_msgTypes[3] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use LoginResponse.ProtoReflect.Descriptor instead. +func (*LoginResponse) Descriptor() ([]byte, []int) { + return file_api_v1_auth_proto_rawDescGZIP(), []int{3} +} + +func (x *LoginResponse) GetAccessToken() string { + if x != nil { + return x.AccessToken + } + return "" +} + +func (x *LoginResponse) GetRefreshToken() string { + if x != nil { + return x.RefreshToken + } + return "" +} + +type RefreshTokenRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + RefreshToken string `protobuf:"bytes,1,opt,name=refresh_token,json=refreshToken,proto3" json:"refresh_token,omitempty"` +} + +func (x *RefreshTokenRequest) Reset() { + *x = RefreshTokenRequest{} + mi := &file_api_v1_auth_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RefreshTokenRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RefreshTokenRequest) ProtoMessage() {} + +func (x *RefreshTokenRequest) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_auth_proto_msgTypes[4] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RefreshTokenRequest.ProtoReflect.Descriptor instead. +func (*RefreshTokenRequest) Descriptor() ([]byte, []int) { + return file_api_v1_auth_proto_rawDescGZIP(), []int{4} +} + +func (x *RefreshTokenRequest) GetRefreshToken() string { + if x != nil { + return x.RefreshToken + } + return "" +} + +type RefreshTokenResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + AccessToken string `protobuf:"bytes,1,opt,name=access_token,json=accessToken,proto3" json:"access_token,omitempty"` +} + +func (x *RefreshTokenResponse) Reset() { + *x = RefreshTokenResponse{} + mi := &file_api_v1_auth_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *RefreshTokenResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*RefreshTokenResponse) ProtoMessage() {} + +func (x *RefreshTokenResponse) ProtoReflect() protoreflect.Message { + mi := &file_api_v1_auth_proto_msgTypes[5] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use RefreshTokenResponse.ProtoReflect.Descriptor instead. +func (*RefreshTokenResponse) Descriptor() ([]byte, []int) { + return file_api_v1_auth_proto_rawDescGZIP(), []int{5} +} + +func (x *RefreshTokenResponse) GetAccessToken() string { + if x != nil { + return x.AccessToken + } + return "" +} + +var File_api_v1_auth_proto protoreflect.FileDescriptor + +var file_api_v1_auth_proto_rawDesc = []byte{ + 0x0a, 0x11, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x61, 0x75, 0x74, 0x68, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x1a, 0x14, 0x61, 0x70, 0x69, + 0x2f, 0x76, 0x31, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x1a, 0x1b, 0x62, 0x75, 0x66, 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, + 0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, + 0x53, 0x0a, 0x0d, 0x53, 0x69, 0x67, 0x6e, 0x75, 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x1d, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, + 0x07, 0xba, 0x48, 0x04, 0x72, 0x02, 0x60, 0x01, 0x52, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x12, + 0x23, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x42, 0x07, 0xba, 0x48, 0x04, 0x72, 0x02, 0x10, 0x06, 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, + 0x77, 0x6f, 0x72, 0x64, 0x22, 0x10, 0x0a, 0x0e, 0x53, 0x69, 0x67, 0x6e, 0x75, 0x70, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x52, 0x0a, 0x0c, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x05, 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xba, 0x48, 0x04, 0x72, 0x02, 0x60, 0x01, 0x52, 0x05, + 0x65, 0x6d, 0x61, 0x69, 0x6c, 0x12, 0x23, 0x0a, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, + 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x07, 0xba, 0x48, 0x04, 0x72, 0x02, 0x10, 0x01, + 0x52, 0x08, 0x70, 0x61, 0x73, 0x73, 0x77, 0x6f, 0x72, 0x64, 0x22, 0x57, 0x0a, 0x0d, 0x4c, 0x6f, + 0x67, 0x69, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x61, + 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x23, + 0x0a, 0x0d, 0x72, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x72, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x22, 0x45, 0x0a, 0x13, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x6f, + 0x6b, 0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2e, 0x0a, 0x0d, 0x72, 0x65, + 0x66, 0x72, 0x65, 0x73, 0x68, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x42, 0x09, 0xba, 0x48, 0x06, 0x72, 0x04, 0x98, 0x01, 0xdf, 0x01, 0x52, 0x0c, 0x72, 0x65, + 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x22, 0x39, 0x0a, 0x14, 0x52, 0x65, + 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, + 0x65, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, + 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x32, 0xcd, 0x01, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x53, 0x65, + 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x39, 0x0a, 0x06, 0x53, 0x69, 0x67, 0x6e, 0x75, 0x70, 0x12, + 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x75, 0x70, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, + 0x53, 0x69, 0x67, 0x6e, 0x75, 0x70, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x36, 0x0a, 0x05, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x12, 0x14, 0x2e, 0x61, 0x70, 0x69, 0x2e, + 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x15, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x69, 0x6e, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4b, 0x0a, 0x0c, 0x52, 0x65, 0x66, 0x72, + 0x65, 0x73, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1b, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, + 0x31, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x2e, 0x52, + 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x86, 0x01, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x2e, 0x61, 0x70, + 0x69, 0x2e, 0x76, 0x31, 0x42, 0x09, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, + 0x01, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x72, + 0x6c, 0x73, 0x73, 0x6e, 0x2f, 0x67, 0x65, 0x74, 0x73, 0x74, 0x72, 0x6f, 0x6e, 0x67, 0x65, 0x72, + 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, + 0x31, 0x3b, 0x61, 0x70, 0x69, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x41, 0x58, 0x58, 0xaa, 0x02, 0x06, + 0x41, 0x70, 0x69, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x06, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0xe2, + 0x02, 0x12, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, + 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x07, 0x41, 0x70, 0x69, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_api_v1_auth_proto_rawDescOnce sync.Once + file_api_v1_auth_proto_rawDescData = file_api_v1_auth_proto_rawDesc +) + +func file_api_v1_auth_proto_rawDescGZIP() []byte { + file_api_v1_auth_proto_rawDescOnce.Do(func() { + file_api_v1_auth_proto_rawDescData = protoimpl.X.CompressGZIP(file_api_v1_auth_proto_rawDescData) + }) + return file_api_v1_auth_proto_rawDescData +} + +var file_api_v1_auth_proto_msgTypes = make([]protoimpl.MessageInfo, 6) +var file_api_v1_auth_proto_goTypes = []any{ + (*SignupRequest)(nil), // 0: api.v1.SignupRequest + (*SignupResponse)(nil), // 1: api.v1.SignupResponse + (*LoginRequest)(nil), // 2: api.v1.LoginRequest + (*LoginResponse)(nil), // 3: api.v1.LoginResponse + (*RefreshTokenRequest)(nil), // 4: api.v1.RefreshTokenRequest + (*RefreshTokenResponse)(nil), // 5: api.v1.RefreshTokenResponse +} +var file_api_v1_auth_proto_depIdxs = []int32{ + 0, // 0: api.v1.AuthService.Signup:input_type -> api.v1.SignupRequest + 2, // 1: api.v1.AuthService.Login:input_type -> api.v1.LoginRequest + 4, // 2: api.v1.AuthService.RefreshToken:input_type -> api.v1.RefreshTokenRequest + 1, // 3: api.v1.AuthService.Signup:output_type -> api.v1.SignupResponse + 3, // 4: api.v1.AuthService.Login:output_type -> api.v1.LoginResponse + 5, // 5: api.v1.AuthService.RefreshToken:output_type -> api.v1.RefreshTokenResponse + 3, // [3:6] is the sub-list for method output_type + 0, // [0:3] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_v1_auth_proto_init() } +func file_api_v1_auth_proto_init() { + if File_api_v1_auth_proto != nil { + return + } + file_api_v1_options_proto_init() + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_v1_auth_proto_rawDesc, + NumEnums: 0, + NumMessages: 6, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_api_v1_auth_proto_goTypes, + DependencyIndexes: file_api_v1_auth_proto_depIdxs, + MessageInfos: file_api_v1_auth_proto_msgTypes, + }.Build() + File_api_v1_auth_proto = out.File + file_api_v1_auth_proto_rawDesc = nil + file_api_v1_auth_proto_goTypes = nil + file_api_v1_auth_proto_depIdxs = nil +} diff --git a/go/pkg/pb/api/v1/options.pb.go b/go/pkg/pb/api/v1/options.pb.go new file mode 100644 index 00000000..29b33030 --- /dev/null +++ b/go/pkg/pb/api/v1/options.pb.go @@ -0,0 +1,98 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.35.1 +// protoc (unknown) +// source: api/v1/options.proto + +package apiv1 + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + descriptorpb "google.golang.org/protobuf/types/descriptorpb" + reflect "reflect" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +var file_api_v1_options_proto_extTypes = []protoimpl.ExtensionInfo{ + { + ExtendedType: (*descriptorpb.MethodOptions)(nil), + ExtensionType: (*bool)(nil), + Field: 50001, + Name: "api.v1.requires_auth", + Tag: "varint,50001,opt,name=requires_auth", + Filename: "api/v1/options.proto", + }, +} + +// Extension fields to descriptorpb.MethodOptions. +var ( + // optional bool requires_auth = 50001; + E_RequiresAuth = &file_api_v1_options_proto_extTypes[0] +) + +var File_api_v1_options_proto protoreflect.FileDescriptor + +var file_api_v1_options_proto_rawDesc = []byte{ + 0x0a, 0x14, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x2f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x1a, 0x20, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x3a, 0x45, 0x0a, 0x0d, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x73, 0x5f, 0x61, 0x75, 0x74, + 0x68, 0x12, 0x1e, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x62, 0x75, 0x66, 0x2e, 0x4d, 0x65, 0x74, 0x68, 0x6f, 0x64, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x18, 0xd1, 0x86, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x72, 0x65, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x73, 0x41, 0x75, 0x74, 0x68, 0x42, 0x89, 0x01, 0x0a, 0x0a, 0x63, 0x6f, 0x6d, 0x2e, + 0x61, 0x70, 0x69, 0x2e, 0x76, 0x31, 0x42, 0x0c, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x50, + 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x34, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x63, 0x72, 0x6c, 0x73, 0x73, 0x6e, 0x2f, 0x67, 0x65, 0x74, 0x73, 0x74, 0x72, + 0x6f, 0x6e, 0x67, 0x65, 0x72, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x70, 0x62, 0x2f, + 0x61, 0x70, 0x69, 0x2f, 0x76, 0x31, 0x3b, 0x61, 0x70, 0x69, 0x76, 0x31, 0xa2, 0x02, 0x03, 0x41, + 0x58, 0x58, 0xaa, 0x02, 0x06, 0x41, 0x70, 0x69, 0x2e, 0x56, 0x31, 0xca, 0x02, 0x06, 0x41, 0x70, + 0x69, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x12, 0x41, 0x70, 0x69, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, + 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x07, 0x41, 0x70, 0x69, 0x3a, + 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var file_api_v1_options_proto_goTypes = []any{ + (*descriptorpb.MethodOptions)(nil), // 0: google.protobuf.MethodOptions +} +var file_api_v1_options_proto_depIdxs = []int32{ + 0, // 0: api.v1.requires_auth:extendee -> google.protobuf.MethodOptions + 1, // [1:1] is the sub-list for method output_type + 1, // [1:1] is the sub-list for method input_type + 1, // [1:1] is the sub-list for extension type_name + 0, // [0:1] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_api_v1_options_proto_init() } +func file_api_v1_options_proto_init() { + if File_api_v1_options_proto != nil { + return + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_api_v1_options_proto_rawDesc, + NumEnums: 0, + NumMessages: 0, + NumExtensions: 1, + NumServices: 0, + }, + GoTypes: file_api_v1_options_proto_goTypes, + DependencyIndexes: file_api_v1_options_proto_depIdxs, + ExtensionInfos: file_api_v1_options_proto_extTypes, + }.Build() + File_api_v1_options_proto = out.File + file_api_v1_options_proto_rawDesc = nil + file_api_v1_options_proto_goTypes = nil + file_api_v1_options_proto_depIdxs = nil +} diff --git a/go/pkg/repos/auth.go b/go/pkg/repos/auth.go new file mode 100644 index 00000000..94b1290f --- /dev/null +++ b/go/pkg/repos/auth.go @@ -0,0 +1,61 @@ +package repos + +import ( + "context" + "database/sql" + "fmt" + "github.com/crlssn/getstronger/go/pkg/orm" + "github.com/volatiletech/sqlboiler/v4/boil" + "golang.org/x/crypto/bcrypt" +) + +type Auth struct { + db *sql.DB +} + +func NewAuth(db *sql.DB) *Auth { + return &Auth{db} +} + +var ErrAuthEmailExists = fmt.Errorf("email already exists") + +func (a *Auth) Insert(ctx context.Context, email, password string) error { + exists, err := orm.Auths(orm.AuthWhere.Email.EQ(email)).Exists(ctx, a.db) + if err != nil { + return fmt.Errorf("email exists check: %w", err) + } + if exists { + return ErrAuthEmailExists + } + + bcryptPassword, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost) + if err != nil { + return fmt.Errorf("bcrypt password generation: %w", err) + } + + if err = (&orm.Auth{ + Email: email, + Password: bcryptPassword, + }).Insert(ctx, a.db, boil.Infer()); err != nil { + return fmt.Errorf("auth insert: %w", err) + } + + return nil +} + +func (a *Auth) CompareEmailAndPassword(ctx context.Context, email, password string) error { + auth, err := orm.Auths(orm.AuthWhere.Email.EQ(email)).One(ctx, a.db) + if err != nil { + return fmt.Errorf("auth fetch: %w", err) + } + + if err = bcrypt.CompareHashAndPassword(auth.Password, []byte(password)); err != nil { + return fmt.Errorf("hash and password comparision: %w", err) + } + + return nil +} + +func (a *Auth) FromEmail(ctx context.Context, email string) (*orm.Auth, error) { + return orm.Auths(orm.AuthWhere.Email.EQ(email)).One(ctx, a.db) +} diff --git a/go/pkg/xzap/xzap.go b/go/pkg/xzap/xzap.go new file mode 100644 index 00000000..c51253a6 --- /dev/null +++ b/go/pkg/xzap/xzap.go @@ -0,0 +1,7 @@ +package xzap + +import "go.uber.org/zap" + +func FieldRPC(rpc string) zap.Field { + return zap.String("rpc", rpc) +} diff --git a/go/rpc/auth/auth.go b/go/rpc/auth/auth.go new file mode 100644 index 00000000..89b86b6d --- /dev/null +++ b/go/rpc/auth/auth.go @@ -0,0 +1,109 @@ +package auth + +import ( + "context" + "errors" + "strings" + + "connectrpc.com/connect" + "go.uber.org/zap" + + "github.com/crlssn/getstronger/go/pkg/jwt" + v1 "github.com/crlssn/getstronger/go/pkg/pb/api/v1" + "github.com/crlssn/getstronger/go/pkg/pb/api/v1/apiv1connect" + "github.com/crlssn/getstronger/go/pkg/repos" + "github.com/crlssn/getstronger/go/pkg/xzap" +) + +var _ apiv1connect.AuthServiceHandler = (*handler)(nil) + +type handler struct { + log *zap.Logger + repo *repos.Auth + jwt *jwt.Manager +} + +func NewHandler(log *zap.Logger, repo *repos.Auth, jwt *jwt.Manager) apiv1connect.AuthServiceHandler { + return &handler{log, repo, jwt} +} + +func (h *handler) Signup(ctx context.Context, req *connect.Request[v1.SignupRequest]) (*connect.Response[v1.SignupResponse], error) { + log := h.log.With(xzap.FieldRPC(apiv1connect.AuthServiceSignupProcedure)) + + email := strings.ReplaceAll(req.Msg.Email, " ", "") + if !strings.Contains(email, "@") { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("invalid email")) + } + + if err := h.repo.Insert(ctx, email, req.Msg.Password); err != nil { + if errors.Is(err, repos.ErrAuthEmailExists) { + log.Warn("email already exists") + // Do not leak registered emails. + return connect.NewResponse(&v1.SignupResponse{}), nil + } + + log.Error("insert failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInternal, errors.New("")) + } + + log.Info("user signed up") + return connect.NewResponse(&v1.SignupResponse{}), nil +} + +func (h *handler) Login(ctx context.Context, req *connect.Request[v1.LoginRequest]) (*connect.Response[v1.LoginResponse], error) { + log := h.log.With(xzap.FieldRPC(apiv1connect.AuthServiceLoginProcedure)) + + if err := h.repo.CompareEmailAndPassword(ctx, req.Msg.Email, req.Msg.Password); err != nil { + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("invalid credentials")) + } + + auth, err := h.repo.FromEmail(ctx, req.Msg.Email) + if err != nil { + log.Error("fetch failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInternal, errors.New("")) + } + + accessToken, err := h.jwt.CreateToken(auth.ID, jwt.TokenTypeAccess) + if err != nil { + log.Error("token generation failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInternal, errors.New("")) + } + + refreshToken, err := h.jwt.CreateToken(auth.ID, jwt.TokenTypeRefresh) + if err != nil { + log.Error("token generation failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInternal, errors.New("")) + } + + log.Info("logged in") + return connect.NewResponse(&v1.LoginResponse{ + AccessToken: accessToken, + RefreshToken: refreshToken, + }), nil +} + +func (h *handler) RefreshToken(ctx context.Context, req *connect.Request[v1.RefreshTokenRequest]) (*connect.Response[v1.RefreshTokenResponse], error) { + log := h.log.With(xzap.FieldRPC(apiv1connect.AuthServiceRefreshTokenProcedure)) + + claims, err := h.jwt.ClaimsFromToken(req.Msg.RefreshToken, jwt.TokenTypeRefresh) + if err != nil { + log.Error("token parsing failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("invalid refresh token")) + } + + if err = h.jwt.ValidateClaims(claims); err != nil { + log.Error("token validation failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("invalid refresh token")) + } + + accessToken, err := h.jwt.CreateToken(claims.UserID, jwt.TokenTypeAccess) + if err != nil { + log.Error("token generation failed", zap.Error(err)) + return nil, connect.NewError(connect.CodeInternal, errors.New("")) + } + + h.log.Info("token refreshed") + return connect.NewResponse(&v1.RefreshTokenResponse{ + AccessToken: accessToken, + }), nil +} diff --git a/go/rpc/interceptors/auth.go b/go/rpc/interceptors/auth.go new file mode 100644 index 00000000..968c0896 --- /dev/null +++ b/go/rpc/interceptors/auth.go @@ -0,0 +1,112 @@ +package interceptors + +import ( + "context" + "errors" + "fmt" + "net/http" + "strings" + + "connectrpc.com/connect" + "go.uber.org/zap" + "google.golang.org/protobuf/proto" + "google.golang.org/protobuf/reflect/protoreflect" + "google.golang.org/protobuf/types/descriptorpb" + + "github.com/crlssn/getstronger/go/pkg/jwt" + apiv1 "github.com/crlssn/getstronger/go/pkg/pb/api/v1" +) + +type auth struct { + log *zap.Logger + jwt *jwt.Manager + methods map[string]bool +} + +func NewAuth(log *zap.Logger, jwt *jwt.Manager) Interceptor { + a := &auth{ + log: log, + jwt: jwt, + methods: make(map[string]bool), + } + a.initMethods() + return a +} + +func (a *auth) initMethods() { + fileDescriptors := []protoreflect.FileDescriptor{ + apiv1.File_api_v1_auth_proto, + } + + for _, fileDescriptor := range fileDescriptors { + // Iterate over the services in the file descriptor. + services := fileDescriptor.Services() + for i := 0; i < services.Len(); i++ { + service := services.Get(i) + methods := service.Methods() + for j := 0; j < methods.Len(); j++ { + method := methods.Get(j) + requiresAuth := false + + // Access the custom options. + options := method.Options().(*descriptorpb.MethodOptions) + if proto.HasExtension(options, apiv1.E_RequiresAuth) { + if ext := proto.GetExtension(options, apiv1.E_RequiresAuth); ext != nil { + if v, ok := ext.(bool); ok { + requiresAuth = v + } + } + } + + // Build the full method name. + fullMethodName := fmt.Sprintf("/%s/%s", service.FullName(), method.Name()) + a.methods[fullMethodName] = requiresAuth + } + } + } +} + +// Unary is the unary interceptor method for authentication. +func (a *auth) Unary() connect.UnaryInterceptorFunc { + interceptor := func(next connect.UnaryFunc) connect.UnaryFunc { + return func( + ctx context.Context, + req connect.AnyRequest, + ) (connect.AnyResponse, error) { + if err := a.authorize(req.Spec().Procedure, req.Header()); err != nil { + a.log.Warn("unauthenticated request", zap.Error(err)) + return nil, connect.NewError(connect.CodeUnauthenticated, err) + } + return next(ctx, req) + } + } + return interceptor +} + +// authorize checks the authorization of the request. +func (a *auth) authorize(methodName string, header http.Header) error { + requiresAuth := a.methods[methodName] + if !requiresAuth { + a.log.Info("method does not require authentication", zap.String("method", methodName)) + return nil + } + + authHeader := header.Get("Authorization") + if authHeader == "" { + return errors.New("authorization token is missing") + } + + const bearerPrefix = "Bearer " + if !strings.HasPrefix(authHeader, bearerPrefix) { + return errors.New("invalid authorization header format") + } + + token := strings.TrimPrefix(authHeader, bearerPrefix) + if err := a.jwt.ValidateAccessToken(token); err != nil { + return errors.New("invalid authorization token") + } + + return nil +} + +var _ Interceptor = (*auth)(nil) diff --git a/go/rpc/interceptors/auth_test.go b/go/rpc/interceptors/auth_test.go new file mode 100644 index 00000000..c82cebd3 --- /dev/null +++ b/go/rpc/interceptors/auth_test.go @@ -0,0 +1,95 @@ +package interceptors + +import ( + "context" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/suite" + "go.uber.org/zap" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/metadata" + "google.golang.org/grpc/status" + + "github.com/crlssn/getstronger/go/pkg/jwt" + "github.com/crlssn/getstronger/go/pkg/pb/api/v1/apiv1connect" +) + +type authSuite struct { + suite.Suite + + jwt *jwt.Manager + interceptor *auth +} + +func TestAuthSuite(t *testing.T) { + suite.Run(t, new(authSuite)) +} + +func (s *authSuite) SetupSuite() { + s.jwt = jwt.NewManager([]byte("access-token"), []byte("refresh-token")) + s.interceptor = NewAuth(zap.NewExample(), s.jwt).(*auth) +} + +func (s *authSuite) TestAuthorise() { + type expected struct { + err error + } + + type test struct { + name string + ctx context.Context + method string + expected expected + } + + accessToken, err := s.jwt.CreateToken(uuid.NewString(), jwt.TokenTypeAccess) + s.Require().NoError(err) + + tests := []test{ + { + name: "ok_valid_access_token", + ctx: metadata.NewIncomingContext(context.Background(), metadata.Pairs("authorization", accessToken)), + method: apiv1connect.AuthServiceRefreshTokenProcedure, + expected: expected{ + err: nil, + }, + }, + { + name: "err_missing_metadata", + ctx: context.Background(), + method: apiv1connect.AuthServiceRefreshTokenProcedure, + expected: expected{ + err: status.Error(codes.Unauthenticated, "missing metadata"), + }, + }, + { + name: "err_missing_authorization_token", + ctx: metadata.NewIncomingContext(context.Background(), metadata.Pairs("key", "value")), + method: apiv1connect.AuthServiceRefreshTokenProcedure, + expected: expected{ + err: status.Error(codes.Unauthenticated, "authorization token is missing"), + }, + }, + { + name: "err_invalid_authorization_token", + ctx: metadata.NewIncomingContext(context.Background(), metadata.Pairs("authorization", "")), + method: apiv1connect.AuthServiceRefreshTokenProcedure, + expected: expected{ + err: status.Error(codes.Unauthenticated, "invalid authorization token"), + }, + }, + } + + for _, t := range tests { + s.Run(t.name, func() { + err = s.interceptor.authorize(t.ctx, t.method) + if t.expected.err == nil { + s.Require().Nil(err) + return + } + s.Require().NotNil(err) + s.Require().Equal(t.expected.err, err) + }) + } +} diff --git a/go/rpc/interceptors/interface.go b/go/rpc/interceptors/interface.go new file mode 100644 index 00000000..0ae513b2 --- /dev/null +++ b/go/rpc/interceptors/interface.go @@ -0,0 +1,9 @@ +package interceptors + +import ( + "connectrpc.com/connect" +) + +type Interceptor interface { + Unary() connect.UnaryInterceptorFunc +} diff --git a/go/rpc/interceptors/validator.go b/go/rpc/interceptors/validator.go new file mode 100644 index 00000000..71ef1bb3 --- /dev/null +++ b/go/rpc/interceptors/validator.go @@ -0,0 +1,47 @@ +package interceptors + +import ( + "context" + "errors" + + "connectrpc.com/connect" + "github.com/bufbuild/protovalidate-go" + "go.uber.org/zap" + "google.golang.org/protobuf/proto" +) + +type validator struct { + log *zap.Logger + validator *protovalidate.Validator +} + +func NewValidator(log *zap.Logger, v *protovalidate.Validator) Interceptor { + return &validator{ + log: log, + validator: v, + } +} + +func (v *validator) Unary() connect.UnaryInterceptorFunc { + return func(next connect.UnaryFunc) connect.UnaryFunc { + return func( + ctx context.Context, + req connect.AnyRequest, + ) (connect.AnyResponse, error) { + msg, ok := req.Any().(proto.Message) + if !ok { + v.log.Warn("request message is not a proto.Message") + return nil, connect.NewError(connect.CodeInvalidArgument, errors.New("request message is not a proto.Message")) + } + + if err := v.validator.Validate(msg); err != nil { + v.log.Warn("invalid request", zap.Error(err)) + return nil, connect.NewError(connect.CodeInvalidArgument, err) + } + + return next(ctx, req) + } + } +} + +var _ Interceptor = (*validator)(nil) diff --git a/go/rpc/module.go b/go/rpc/module.go new file mode 100644 index 00000000..9e25637b --- /dev/null +++ b/go/rpc/module.go @@ -0,0 +1,87 @@ +package rpc + +import ( + "context" + "net/http" + + "connectrpc.com/connect" + "go.uber.org/fx" + "golang.org/x/net/http2" + "golang.org/x/net/http2/h2c" + + "github.com/crlssn/getstronger/go/pkg/pb/api/v1/apiv1connect" + "github.com/crlssn/getstronger/go/rpc/auth" + "github.com/crlssn/getstronger/go/rpc/interceptors" +) + +type Handler func(opts ...connect.HandlerOption) (string, http.Handler) + +const fxGroupInterceptors = `group:"interceptors"` + +func NewModule() fx.Option { + return fx.Options( + fx.Provide( + fx.Annotate( + interceptors.NewAuth, + fx.ResultTags(fxGroupInterceptors), + ), + fx.Annotate( + interceptors.NewValidator, + fx.ResultTags(fxGroupInterceptors), + ), + fx.Annotate( + newInterceptors, + fx.ParamTags(fxGroupInterceptors), + ), + newHandlers, + auth.NewHandler, + ), + fx.Invoke( + registerHandlers, + ), + ) +} + +func newInterceptors(interceptors []interceptors.Interceptor) []connect.HandlerOption { + var opts []connect.HandlerOption + for _, i := range interceptors { + opts = append(opts, connect.WithInterceptors(i.Unary())) + } + return opts +} + +type Handlers struct { + fx.In + + Auth apiv1connect.AuthServiceHandler +} + +func newHandlers(p Handlers) []Handler { + return []Handler{ + func(options ...connect.HandlerOption) (string, http.Handler) { + return apiv1connect.NewAuthServiceHandler(p.Auth, options...) + }, + } +} + +func registerHandlers(lc fx.Lifecycle, handlers []Handler, options []connect.HandlerOption) { + mux := http.NewServeMux() + for _, h := range handlers { + path, handler := h(options...) + mux.Handle(path, handler) + } + + lc.Append(fx.Hook{ + OnStart: func(_ context.Context) error { + go func() { + if err := http.ListenAndServe(":8080", h2c.NewHandler(mux, &http2.Server{})); err != nil { + panic(err) + } + }() + return nil + }, + OnStop: func(_ context.Context) error { + return nil + }, + }) +} diff --git a/pkg/orm/boil_table_names.go b/pkg/orm/boil_table_names.go deleted file mode 100644 index 87933451..00000000 --- a/pkg/orm/boil_table_names.go +++ /dev/null @@ -1,10 +0,0 @@ -// Code generated by SQLBoiler 4.16.1 (https://github.com/volatiletech/sqlboiler). DO NOT EDIT. -// This file is meant to be re-generated in place and/or deleted at any time. - -package orm - -var TableNames = struct { - Users string -}{ - Users: "users", -} diff --git a/proto/api/v1/auth.proto b/proto/api/v1/auth.proto new file mode 100644 index 00000000..0d3babf6 --- /dev/null +++ b/proto/api/v1/auth.proto @@ -0,0 +1,39 @@ +syntax = "proto3"; + +package api.v1; + +import "api/v1/options.proto"; + +import "google/protobuf/timestamp.proto"; +import "buf/validate/validate.proto"; + +message SignupRequest { + string email = 1 [(buf.validate.field).string.email = true]; + string password = 2 [(buf.validate.field).string.min_len = 6]; +} + +message SignupResponse {} + +message LoginRequest { + string email = 1 [(buf.validate.field).string.email = true]; + string password = 2 [(buf.validate.field).string.min_len = 1]; +} + +message LoginResponse { + string access_token = 1; + string refresh_token = 2; +} + +message RefreshTokenRequest { + string refresh_token = 1 [(buf.validate.field).string.len = 223]; +} + +message RefreshTokenResponse { + string access_token = 1; +} + +service AuthService { + rpc Signup(SignupRequest) returns (SignupResponse) {} + rpc Login(LoginRequest) returns (LoginResponse) {} + rpc RefreshToken(RefreshTokenRequest) returns (RefreshTokenResponse) {} +} diff --git a/proto/api/v1/options.proto b/proto/api/v1/options.proto new file mode 100644 index 00000000..3fcc416a --- /dev/null +++ b/proto/api/v1/options.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package api.v1; + +import "google/protobuf/descriptor.proto"; + +extend google.protobuf.MethodOptions { + bool requires_auth = 50001; +} diff --git a/smoke/auth.login.sh b/smoke/auth.login.sh new file mode 100644 index 00000000..a93e32c5 --- /dev/null +++ b/smoke/auth.login.sh @@ -0,0 +1,4 @@ +curl \ + --header "Content-Type: application/json" \ + --data '{"email": "hello@crlssn.com", "password": "123456"}' \ + http://localhost:8080/api.v1.AuthService/Login diff --git a/smoke/auth.refresh_token.sh b/smoke/auth.refresh_token.sh new file mode 100644 index 00000000..d34de1e7 --- /dev/null +++ b/smoke/auth.refresh_token.sh @@ -0,0 +1,4 @@ +curl \ + --header "Content-Type: application/json" \ + --data '{"refresh_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VyX2lkIjoiMzMxNDllNDUtNzY4Ny00YTllLTliMTMtYzUxNWM5N2YwNGM0Iiwic3ViIjoicmVmcmVzaF90b2tlbiIsImV4cCI6MTczMTc1NzU2MywiaWF0IjoxNzI5MTY1NTYzfQ.WJAkz_WIqqR7GIz3LqREQSmQVDx9ViYFvn20rPtAhXE"}' \ + http://localhost:8080/api.v1.AuthService/RefreshToken diff --git a/smoke/auth.signup.sh b/smoke/auth.signup.sh new file mode 100644 index 00000000..2d834e68 --- /dev/null +++ b/smoke/auth.signup.sh @@ -0,0 +1,5 @@ +curl \ + --header "Content-Type: application/json" \ + --data '{"email": "hello@crlssn.com", "password": "123456"}' \ + http://localhost:8080/api.v1.AuthService/Signup +