Merge branch 'externalVideos' into 'master'
Add external medias See merge request !10
This commit is contained in:
commit
7bb09ed024
6
Makefile
6
Makefile
@ -53,10 +53,14 @@ docker:
|
|||||||
$(DOCKER_COMPOSE) up -d
|
$(DOCKER_COMPOSE) up -d
|
||||||
sleep 4
|
sleep 4
|
||||||
|
|
||||||
migration:
|
migration-schema: docker
|
||||||
$(MIGRATION) -path $(MIGRATION_SCHEMA) up
|
$(MIGRATION) -path $(MIGRATION_SCHEMA) up
|
||||||
|
|
||||||
|
migration-dev-data: docker migration-schema
|
||||||
$(MIGRATION) -path $(MIGRATION_TEST_DATA) up
|
$(MIGRATION) -path $(MIGRATION_TEST_DATA) up
|
||||||
|
|
||||||
|
migration: migration-schema migration-dev-data
|
||||||
|
|
||||||
dev: docker migration watch
|
dev: docker migration watch
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
|
4
sql/migration/0002_external_medias.down.sql
Normal file
4
sql/migration/0002_external_medias.down.sql
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
DROP TABLE external_medias;
|
||||||
|
DROP TYPE media_type;
|
||||||
|
DROP TYPE media_category;
|
||||||
|
DROP TYPE media_source;
|
15
sql/migration/0002_external_medias.up.sql
Normal file
15
sql/migration/0002_external_medias.up.sql
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
CREATE TYPE media_type AS ENUM ('movie', 'show');
|
||||||
|
CREATE TYPE media_category AS ENUM ('trending', 'popular', 'anticipated', 'box_office');
|
||||||
|
CREATE TYPE media_source AS ENUM ('trakttv');
|
||||||
|
|
||||||
|
CREATE TABLE external_medias (
|
||||||
|
id uuid PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||||
|
type media_type NOT NULL,
|
||||||
|
source media_source NOT NULL,
|
||||||
|
category media_category NOT NULL,
|
||||||
|
ids text[][] NOT NULL,
|
||||||
|
LIKE base INCLUDING DEFAULTS
|
||||||
|
);
|
||||||
|
CREATE TRIGGER update_external_medias_updated_at BEFORE UPDATE ON external_medias FOR EACH ROW EXECUTE PROCEDURE update_updated_at_column();
|
||||||
|
CREATE INDEX ON external_medias (type, source, category);
|
||||||
|
|
62
src/internal/external_medias/external_medias.go
Normal file
62
src/internal/external_medias/external_medias.go
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
package extmedias
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
"gitlab.quimbo.fr/odwrtw/canape-sql/src/internal/sqly"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
addExternalMediaQuery = `INSERT INTO external_medias (type, source, category, ids) VALUES ($1, $2, $3, $4) RETURNING id;`
|
||||||
|
updateExternalMediaQuery = `UPDATE external_medias SET type=:type, source=:source, category=:category, ids=:ids WHERE id=:id RETURNING *;`
|
||||||
|
|
||||||
|
deleteExternalMediaQuery = `DELETE FROM external_medias WHERE id=:id;`
|
||||||
|
getExternalMediaQuery = `SELECT * FROM external_medias WHERE type=$1 AND source=$2 AND category=$3 LIMIT 1;`
|
||||||
|
)
|
||||||
|
|
||||||
|
// Media represents an external media
|
||||||
|
type Media struct {
|
||||||
|
sqly.BaseModel
|
||||||
|
Type string `db:"type"`
|
||||||
|
Source string `db:"source"`
|
||||||
|
Category string `db:"category"`
|
||||||
|
IDs sqly.StringSlice `db:"ids"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add adds the Media in the database
|
||||||
|
func (m *Media) Add(q sqlx.Queryer) error {
|
||||||
|
var id string
|
||||||
|
err := q.QueryRowx(addExternalMediaQuery, m.Type, m.Source, m.Category, m.IDs).Scan(&id)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
m.ID = id
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update ids only updates the IDs of the media
|
||||||
|
func (m *Media) Update(ex *sqlx.DB) error {
|
||||||
|
rows, err := ex.NamedQuery(updateExternalMediaQuery, m)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for rows.Next() {
|
||||||
|
rows.StructScan(m)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete the media from database or raise an error
|
||||||
|
func (m *Media) Delete(ex *sqlx.DB) error {
|
||||||
|
_, err := ex.NamedExec(deleteExternalMediaQuery, m)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get gets a media
|
||||||
|
func Get(q sqlx.Queryer, mtype, msrc, mcat string) (*Media, error) {
|
||||||
|
m := &Media{}
|
||||||
|
if err := q.QueryRowx(getExternalMediaQuery, mtype, msrc, mcat).StructScan(m); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return m, nil
|
||||||
|
}
|
75
src/internal/external_medias/external_medias_test.go
Normal file
75
src/internal/external_medias/external_medias_test.go
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
package extmedias
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
_ "github.com/mattes/migrate/driver/postgres"
|
||||||
|
"gitlab.quimbo.fr/odwrtw/canape-sql/src/internal/sqly"
|
||||||
|
|
||||||
|
"github.com/jmoiron/sqlx"
|
||||||
|
)
|
||||||
|
|
||||||
|
var db *sqlx.DB
|
||||||
|
var pgdsn string
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
pgdsn = os.Getenv("POSTGRES_DSN")
|
||||||
|
db, err = sqlx.Connect("postgres", pgdsn)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Unavailable PG tests:\n %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAddExternalMedias(t *testing.T) {
|
||||||
|
sqly.RunWithLastestMigration(db, pgdsn, t, func(db *sqlx.DB, t *testing.T) {
|
||||||
|
media := &Media{
|
||||||
|
Type: "movie",
|
||||||
|
Source: "trakttv",
|
||||||
|
Category: "trending",
|
||||||
|
IDs: []string{"1", "2", "3"},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add it
|
||||||
|
if err := media.Add(db); err != nil {
|
||||||
|
t.Fatalf("failed to add external media: %q", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update the IDs
|
||||||
|
media.IDs = []string{"1", "2", "3", "4"}
|
||||||
|
if err := media.Update(db); err != nil {
|
||||||
|
t.Fatalf("failed to update the external media: %q", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find it
|
||||||
|
m, err := Get(db, media.Type, media.Source, media.Category)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed get the external media: %q", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Small, almost useless check
|
||||||
|
if len(m.IDs) != 4 {
|
||||||
|
t.Fatalf("the media should have 4 ids, only %d found", len(m.IDs))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete it
|
||||||
|
if err := media.Delete(db); err != nil {
|
||||||
|
t.Fatalf("failed to delete the external media: %q", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search it and expect that it's not found
|
||||||
|
m, err = Get(db, media.Type, media.Source, media.Category)
|
||||||
|
if err == nil {
|
||||||
|
t.Fatal("there should be an error, was the external media deleted ?")
|
||||||
|
}
|
||||||
|
if err != sql.ErrNoRows {
|
||||||
|
t.Fatalf("unexpected error: %q", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
package sqly
|
package sqly
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -9,6 +9,15 @@ import (
|
|||||||
"github.com/mattes/migrate/migrate"
|
"github.com/mattes/migrate/migrate"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var migrationPath string
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
migrationPath = os.Getenv("MIGRATION_PATH")
|
||||||
|
if migrationPath == "" {
|
||||||
|
migrationPath = "../../../sql/migration"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// BaseModel have to be embeded in all your struct which reflect a table
|
// BaseModel have to be embeded in all your struct which reflect a table
|
||||||
type BaseModel struct {
|
type BaseModel struct {
|
||||||
ID string
|
ID string
|
||||||
@ -19,9 +28,8 @@ type BaseModel struct {
|
|||||||
// RunWithLastestMigration runs your test with database migration set to the lastest
|
// RunWithLastestMigration runs your test with database migration set to the lastest
|
||||||
func RunWithLastestMigration(db *sqlx.DB, pgdsn string, t *testing.T, test func(db *sqlx.DB, t *testing.T)) {
|
func RunWithLastestMigration(db *sqlx.DB, pgdsn string, t *testing.T, test func(db *sqlx.DB, t *testing.T)) {
|
||||||
defer func() {
|
defer func() {
|
||||||
allErrors, ok := migrate.DownSync(pgdsn, "../sql")
|
allErrors, ok := migrate.DownSync(pgdsn, migrationPath)
|
||||||
if !ok {
|
if !ok {
|
||||||
fmt.Println("Oh no ...")
|
|
||||||
for _, err := range allErrors {
|
for _, err := range allErrors {
|
||||||
t.Log(err)
|
t.Log(err)
|
||||||
t.Fatal("We get some errors when reset the database schema")
|
t.Fatal("We get some errors when reset the database schema")
|
||||||
@ -29,9 +37,8 @@ func RunWithLastestMigration(db *sqlx.DB, pgdsn string, t *testing.T, test func(
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
allErrors, ok := migrate.UpSync(pgdsn, "../sql")
|
allErrors, ok := migrate.UpSync(pgdsn, migrationPath)
|
||||||
if !ok {
|
if !ok {
|
||||||
fmt.Println("Oh no ...")
|
|
||||||
for _, err := range allErrors {
|
for _, err := range allErrors {
|
||||||
t.Log(err)
|
t.Log(err)
|
||||||
t.Fatal("Impossible to run test we get some errors when initialize the database schema")
|
t.Fatal("Impossible to run test we get some errors when initialize the database schema")
|
||||||
|
58
src/internal/sqly/string_slice.go
Normal file
58
src/internal/sqly/string_slice.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package sqly
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql/driver"
|
||||||
|
"encoding/csv"
|
||||||
|
"errors"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// This mainly comes from https://gist.github.com/adharris/4163702
|
||||||
|
|
||||||
|
// StringSlice represents an array of string. The custom type is needed because
|
||||||
|
// pq does not support slices yet..
|
||||||
|
type StringSlice []string
|
||||||
|
|
||||||
|
// for replacing escaped quotes except if it is preceded by a literal backslash
|
||||||
|
// eg "\\" should translate to a quoted element whose value is \
|
||||||
|
|
||||||
|
var quoteEscapeRegex = regexp.MustCompile(`([^\\]([\\]{2})*)\\"`)
|
||||||
|
|
||||||
|
// Scan convert to a slice of strings
|
||||||
|
// http://www.postgresql.org/docs/9.1/static/arrays.html#ARRAYS-IO
|
||||||
|
func (s *StringSlice) Scan(src interface{}) error {
|
||||||
|
asBytes, ok := src.([]byte)
|
||||||
|
if !ok {
|
||||||
|
return error(errors.New("Scan source was not []bytes"))
|
||||||
|
}
|
||||||
|
str := string(asBytes)
|
||||||
|
|
||||||
|
// change quote escapes for csv parser
|
||||||
|
str = quoteEscapeRegex.ReplaceAllString(str, `$1""`)
|
||||||
|
str = strings.Replace(str, `\\`, `\`, -1)
|
||||||
|
// remove braces
|
||||||
|
str = str[1 : len(str)-1]
|
||||||
|
csvReader := csv.NewReader(strings.NewReader(str))
|
||||||
|
|
||||||
|
slice, err := csvReader.Read()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
(*s) = StringSlice(slice)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Value implements the Valuer interface
|
||||||
|
func (s StringSlice) Value() (driver.Value, error) {
|
||||||
|
// string escapes.
|
||||||
|
// \ => \\\
|
||||||
|
// " => \"
|
||||||
|
for i, elem := range s {
|
||||||
|
s[i] = `"` + strings.Replace(strings.Replace(elem, `\`, `\\\`, -1), `"`, `\"`, -1) + `"`
|
||||||
|
}
|
||||||
|
return "{" + strings.Join(s, ",") + "}", nil
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user