From a6f84f16223f51f29552d5c072b88b7cc13af6af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9goire=20Delattre?= Date: Sun, 12 Jun 2016 10:51:26 +0200 Subject: [PATCH 1/3] Add external medias SQL schema --- Makefile | 7 ++++++- sql/migration/0002_external_medias.down.sql | 4 ++++ sql/migration/0002_external_medias.up.sql | 15 +++++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 sql/migration/0002_external_medias.down.sql create mode 100644 sql/migration/0002_external_medias.up.sql diff --git a/Makefile b/Makefile index cd35989..ac84aa2 100644 --- a/Makefile +++ b/Makefile @@ -53,8 +53,13 @@ docker: $(DOCKER_COMPOSE) up -d sleep 4 -migration: +migration-tool: + go get -v github.com/mattes/migrate + +migration-schema: docker migration-tool $(MIGRATION) -path $(MIGRATION_SCHEMA) up + +migration-dev-data: docker migration-schema $(MIGRATION) -path $(MIGRATION_TEST_DATA) up dev: docker migration watch diff --git a/sql/migration/0002_external_medias.down.sql b/sql/migration/0002_external_medias.down.sql new file mode 100644 index 0000000..e6dbbc4 --- /dev/null +++ b/sql/migration/0002_external_medias.down.sql @@ -0,0 +1,4 @@ +DROP TABLE external_medias; +DROP TYPE media_type; +DROP TYPE media_category; +DROP TYPE media_source; diff --git a/sql/migration/0002_external_medias.up.sql b/sql/migration/0002_external_medias.up.sql new file mode 100644 index 0000000..6168db5 --- /dev/null +++ b/sql/migration/0002_external_medias.up.sql @@ -0,0 +1,15 @@ +CREATE TYPE media_type AS ENUM ('movie', 'show'); +CREATE TYPE media_category AS ENUM ('trending', 'popular', 'anticipated', 'box_office'); +CREATE TYPE media_source AS ENUM ('trakttv'); + +CREATE TABLE external_medias ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + type media_type NOT NULL, + source media_source NOT NULL, + category media_category NOT NULL, + ids text[][] NOT NULL, + LIKE base INCLUDING DEFAULTS +); +CREATE TRIGGER update_external_medias_updated_at BEFORE UPDATE ON external_medias FOR EACH ROW EXECUTE PROCEDURE update_updated_at_column(); +CREATE INDEX ON external_medias (type, source, category); + From a2adc4ccf56fc779d506e820f82f653daf564e50 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9goire=20Delattre?= Date: Sun, 12 Jun 2016 13:14:22 +0200 Subject: [PATCH 2/3] Add external medias package --- .../external_medias/external_medias.go | 62 +++++++++++++++ .../external_medias/external_medias_test.go | 75 +++++++++++++++++++ src/internal/sqly/string_slice.go | 58 ++++++++++++++ 3 files changed, 195 insertions(+) create mode 100644 src/internal/external_medias/external_medias.go create mode 100644 src/internal/external_medias/external_medias_test.go create mode 100644 src/internal/sqly/string_slice.go diff --git a/src/internal/external_medias/external_medias.go b/src/internal/external_medias/external_medias.go new file mode 100644 index 0000000..2cbe7ba --- /dev/null +++ b/src/internal/external_medias/external_medias.go @@ -0,0 +1,62 @@ +package extmedias + +import ( + "github.com/jmoiron/sqlx" + "gitlab.quimbo.fr/odwrtw/canape-sql/src/internal/sqly" +) + +const ( + addExternalMediaQuery = `INSERT INTO external_medias (type, source, category, ids) VALUES ($1, $2, $3, $4) RETURNING id;` + updateExternalMediaQuery = `UPDATE external_medias SET type=:type, source=:source, category=:category, ids=:ids WHERE id=:id RETURNING *;` + + deleteExternalMediaQuery = `DELETE FROM external_medias WHERE id=:id;` + getExternalMediaQuery = `SELECT * FROM external_medias WHERE type=$1 AND source=$2 AND category=$3 LIMIT 1;` +) + +// Media represents an external media +type Media struct { + sqly.BaseModel + Type string `db:"type"` + Source string `db:"source"` + Category string `db:"category"` + IDs sqly.StringSlice `db:"ids"` +} + +// Add adds the Media in the database +func (m *Media) Add(q sqlx.Queryer) error { + var id string + err := q.QueryRowx(addExternalMediaQuery, m.Type, m.Source, m.Category, m.IDs).Scan(&id) + if err != nil { + return err + } + m.ID = id + return nil +} + +// Update ids only updates the IDs of the media +func (m *Media) Update(ex *sqlx.DB) error { + rows, err := ex.NamedQuery(updateExternalMediaQuery, m) + if err != nil { + return err + } + for rows.Next() { + rows.StructScan(m) + } + return nil +} + +// Delete the media from database or raise an error +func (m *Media) Delete(ex *sqlx.DB) error { + _, err := ex.NamedExec(deleteExternalMediaQuery, m) + return err +} + +// Get gets a media +func Get(q sqlx.Queryer, mtype, msrc, mcat string) (*Media, error) { + m := &Media{} + if err := q.QueryRowx(getExternalMediaQuery, mtype, msrc, mcat).StructScan(m); err != nil { + return nil, err + } + + return m, nil +} diff --git a/src/internal/external_medias/external_medias_test.go b/src/internal/external_medias/external_medias_test.go new file mode 100644 index 0000000..2651ba3 --- /dev/null +++ b/src/internal/external_medias/external_medias_test.go @@ -0,0 +1,75 @@ +package extmedias + +import ( + "database/sql" + "fmt" + "os" + "testing" + + _ "github.com/mattes/migrate/driver/postgres" + "gitlab.quimbo.fr/odwrtw/canape-sql/src/internal/sqly" + + "github.com/jmoiron/sqlx" +) + +var db *sqlx.DB +var pgdsn string + +func init() { + var err error + + pgdsn = os.Getenv("POSTGRES_DSN") + db, err = sqlx.Connect("postgres", pgdsn) + + if err != nil { + fmt.Printf("Unavailable PG tests:\n %v\n", err) + os.Exit(1) + } +} + +func TestAddExternalMedias(t *testing.T) { + sqly.RunWithLastestMigration(db, pgdsn, t, func(db *sqlx.DB, t *testing.T) { + media := &Media{ + Type: "movie", + Source: "trakttv", + Category: "trending", + IDs: []string{"1", "2", "3"}, + } + + // Add it + if err := media.Add(db); err != nil { + t.Fatalf("failed to add external media: %q", err) + } + + // Update the IDs + media.IDs = []string{"1", "2", "3", "4"} + if err := media.Update(db); err != nil { + t.Fatalf("failed to update the external media: %q", err) + } + + // Find it + m, err := Get(db, media.Type, media.Source, media.Category) + if err != nil { + t.Fatalf("failed get the external media: %q", err) + } + + // Small, almost useless check + if len(m.IDs) != 4 { + t.Fatalf("the media should have 4 ids, only %d found", len(m.IDs)) + } + + // Delete it + if err := media.Delete(db); err != nil { + t.Fatalf("failed to delete the external media: %q", err) + } + + // Search it and expect that it's not found + m, err = Get(db, media.Type, media.Source, media.Category) + if err == nil { + t.Fatal("there should be an error, was the external media deleted ?") + } + if err != sql.ErrNoRows { + t.Fatalf("unexpected error: %q", err) + } + }) +} diff --git a/src/internal/sqly/string_slice.go b/src/internal/sqly/string_slice.go new file mode 100644 index 0000000..14bada0 --- /dev/null +++ b/src/internal/sqly/string_slice.go @@ -0,0 +1,58 @@ +package sqly + +import ( + "database/sql/driver" + "encoding/csv" + "errors" + "regexp" + "strings" +) + +// This mainly comes from https://gist.github.com/adharris/4163702 + +// StringSlice represents an array of string. The custom type is needed because +// pq does not support slices yet.. +type StringSlice []string + +// for replacing escaped quotes except if it is preceded by a literal backslash +// eg "\\" should translate to a quoted element whose value is \ + +var quoteEscapeRegex = regexp.MustCompile(`([^\\]([\\]{2})*)\\"`) + +// Scan convert to a slice of strings +// http://www.postgresql.org/docs/9.1/static/arrays.html#ARRAYS-IO +func (s *StringSlice) Scan(src interface{}) error { + asBytes, ok := src.([]byte) + if !ok { + return error(errors.New("Scan source was not []bytes")) + } + str := string(asBytes) + + // change quote escapes for csv parser + str = quoteEscapeRegex.ReplaceAllString(str, `$1""`) + str = strings.Replace(str, `\\`, `\`, -1) + // remove braces + str = str[1 : len(str)-1] + csvReader := csv.NewReader(strings.NewReader(str)) + + slice, err := csvReader.Read() + + if err != nil { + return err + } + + (*s) = StringSlice(slice) + + return nil +} + +// Value implements the Valuer interface +func (s StringSlice) Value() (driver.Value, error) { + // string escapes. + // \ => \\\ + // " => \" + for i, elem := range s { + s[i] = `"` + strings.Replace(strings.Replace(elem, `\`, `\\\`, -1), `"`, `\"`, -1) + `"` + } + return "{" + strings.Join(s, ",") + "}", nil +} From f0cf5f9b2e880c42eb8d27a70ca6556203f5205a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gr=C3=A9goire=20Delattre?= Date: Tue, 14 Jun 2016 14:28:05 +0200 Subject: [PATCH 3/3] Update sqly to get the migration path from the env --- Makefile | 7 +++---- src/internal/sqly/sqly.go | 17 ++++++++++++----- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/Makefile b/Makefile index ac84aa2..faff1ed 100644 --- a/Makefile +++ b/Makefile @@ -53,15 +53,14 @@ docker: $(DOCKER_COMPOSE) up -d sleep 4 -migration-tool: - go get -v github.com/mattes/migrate - -migration-schema: docker migration-tool +migration-schema: docker $(MIGRATION) -path $(MIGRATION_SCHEMA) up migration-dev-data: docker migration-schema $(MIGRATION) -path $(MIGRATION_TEST_DATA) up +migration: migration-schema migration-dev-data + dev: docker migration watch clean: diff --git a/src/internal/sqly/sqly.go b/src/internal/sqly/sqly.go index 1584847..faee992 100644 --- a/src/internal/sqly/sqly.go +++ b/src/internal/sqly/sqly.go @@ -1,7 +1,7 @@ package sqly import ( - "fmt" + "os" "testing" "time" @@ -9,6 +9,15 @@ import ( "github.com/mattes/migrate/migrate" ) +var migrationPath string + +func init() { + migrationPath = os.Getenv("MIGRATION_PATH") + if migrationPath == "" { + migrationPath = "../../../sql/migration" + } +} + // BaseModel have to be embeded in all your struct which reflect a table type BaseModel struct { ID string @@ -19,9 +28,8 @@ type BaseModel struct { // RunWithLastestMigration runs your test with database migration set to the lastest func RunWithLastestMigration(db *sqlx.DB, pgdsn string, t *testing.T, test func(db *sqlx.DB, t *testing.T)) { defer func() { - allErrors, ok := migrate.DownSync(pgdsn, "../sql") + allErrors, ok := migrate.DownSync(pgdsn, migrationPath) if !ok { - fmt.Println("Oh no ...") for _, err := range allErrors { t.Log(err) t.Fatal("We get some errors when reset the database schema") @@ -29,9 +37,8 @@ func RunWithLastestMigration(db *sqlx.DB, pgdsn string, t *testing.T, test func( } }() - allErrors, ok := migrate.UpSync(pgdsn, "../sql") + allErrors, ok := migrate.UpSync(pgdsn, migrationPath) if !ok { - fmt.Println("Oh no ...") for _, err := range allErrors { t.Log(err) t.Fatal("Impossible to run test we get some errors when initialize the database schema")