mirror of https://github.com/pressly/goose.git
wip
parent
992628d83a
commit
7c5f69e1bf
5
Makefile
5
Makefile
|
@ -128,3 +128,8 @@ docker-turso:
|
|||
-p $(DB_TURSO_PORT):8080 \
|
||||
-l goose_test \
|
||||
ghcr.io/tursodatabase/libsql-server:v0.22.10
|
||||
|
||||
.PHONY: generate
|
||||
generate:
|
||||
cd ./internal/testdata && \
|
||||
find migrations/postgres -type f -exec sha256sum {} + | sha256sum | cut -c 1-32 > ./internal/testdata/migrations
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
package gooseutil
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Digest is used to generate a unique identifier for the contents of a directory, which can be used
|
||||
// to determine if the contents of the directory have changed.
|
||||
//
|
||||
// The digest is generated by iterating over the files in the directory, skipping directories,
|
||||
// sorting them, and then hashing the contents of each file. The hash of each file is then hashed to
|
||||
// generate the final digest. The resulting digest is a sha256 hash truncated to 32 characters.
|
||||
//
|
||||
// It is adapted from the dirhash package. Ref:
|
||||
//
|
||||
// https://cs.opensource.google/go/x/mod/+/refs/tags/v0.17.0:sumdb/dirhash/hash.go;l=31-44
|
||||
//
|
||||
// The output of this function is equivalent to the following shell command:
|
||||
//
|
||||
// sha256sum $(find <dir> -type f | sort) | sha256sum | cut -c 1-32
|
||||
func Digest(fsys fs.FS, root string) (string, error) {
|
||||
var files []string
|
||||
err := fs.WalkDir(fsys, root, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if d.IsDir() {
|
||||
if path != root {
|
||||
return fs.SkipDir
|
||||
}
|
||||
// Skip the root directory.
|
||||
return nil
|
||||
}
|
||||
files = append(files, path)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return "", fmt.Errorf("no files found")
|
||||
}
|
||||
h := sha256.New()
|
||||
sort.Strings(files)
|
||||
for _, file := range files {
|
||||
f, err := fsys.Open(file)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
hf := sha256.New()
|
||||
_, err = io.Copy(hf, f)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
fmt.Fprintf(h, "%x %s\n", hf.Sum(nil), file)
|
||||
fmt.Printf("%x %s\n", hf.Sum(nil), file)
|
||||
}
|
||||
digest := hex.EncodeToString(h.Sum(nil))[:32]
|
||||
return digest, nil
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
package gooseutil
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestDigest(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
package testdata
|
||||
|
||||
//go:generate sha256sum $(find ./migrations/postgres -type f | sort) | sha256sum | cut -c 1-32 > ./migrations/postgres.sha256sum
|
||||
|
||||
import "embed"
|
||||
|
||||
//go:embed migrations/**/*.sql
|
||||
var EmbedMigrations embed.FS
|
|
@ -0,0 +1,42 @@
|
|||
package testdata
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/pressly/goose/v3/internal/gooseutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestEmbededMigrations(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
files, err := EmbedMigrations.ReadDir(".")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, files, 1)
|
||||
require.Equal(t, "migrations", files[0].Name())
|
||||
|
||||
files, err = EmbedMigrations.ReadDir("migrations")
|
||||
require.NoError(t, err)
|
||||
got := make([]string, 0, len(files))
|
||||
for _, file := range files {
|
||||
got = append(got, file.Name())
|
||||
}
|
||||
require.ElementsMatch(t, []string{"postgres"}, got)
|
||||
|
||||
t.Run("postgres", func(t *testing.T) {
|
||||
expected, err := os.ReadFile("migrations/postgres.sha256")
|
||||
require.NoError(t, err)
|
||||
expected = bytes.TrimSpace(expected)
|
||||
files, err := EmbedMigrations.ReadDir("migrations/postgres")
|
||||
require.NoError(t, err)
|
||||
require.Len(t, files, 5)
|
||||
|
||||
digest, err := gooseutil.Digest(EmbedMigrations, "migrations/postgres")
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, string(expected), digest)
|
||||
})
|
||||
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
3129d29e299423d908d4ad8e597675b1
|
|
@ -0,0 +1,10 @@
|
|||
-- +goose Up
|
||||
CREATE TABLE users (
|
||||
id INTEGER PRIMARY KEY,
|
||||
username TEXT NOT NULL,
|
||||
email TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
-- +goose Down
|
||||
DROP TABLE users;
|
|
@ -0,0 +1,14 @@
|
|||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
CREATE TABLE posts (
|
||||
id INTEGER PRIMARY KEY,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
author_id INTEGER NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (author_id) REFERENCES users(id)
|
||||
);
|
||||
-- +goose StatementEnd
|
||||
|
||||
-- +goose Down
|
||||
DROP TABLE posts;
|
|
@ -0,0 +1,13 @@
|
|||
-- +goose Up
|
||||
CREATE TABLE comments (
|
||||
id INTEGER PRIMARY KEY,
|
||||
post_id INTEGER NOT NULL,
|
||||
user_id INTEGER NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (post_id) REFERENCES posts(id),
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
);
|
||||
|
||||
-- +goose Down
|
||||
DROP TABLE comments;
|
|
@ -0,0 +1,23 @@
|
|||
-- +goose Up
|
||||
INSERT INTO users (id, username, email)
|
||||
VALUES
|
||||
(1, 'john_doe', 'john@example.com'),
|
||||
(2, 'jane_smith', 'jane@example.com'),
|
||||
(3, 'alice_wonderland', 'alice@example.com');
|
||||
|
||||
INSERT INTO posts (id, title, content, author_id)
|
||||
VALUES
|
||||
(1, 'Introduction to SQL', 'SQL is a powerful language for managing databases...', 1),
|
||||
(2, 'Data Modeling Techniques', 'Choosing the right data model is crucial...', 2),
|
||||
(3, 'Advanced Query Optimization', 'Optimizing queries can greatly improve...', 1);
|
||||
|
||||
INSERT INTO comments (id, post_id, user_id, content)
|
||||
VALUES
|
||||
(1, 1, 3, 'Great introduction! Looking forward to more.'),
|
||||
(2, 1, 2, 'SQL can be a bit tricky at first, but practice helps.'),
|
||||
(3, 2, 1, 'You covered normalization really well in this post.');
|
||||
|
||||
-- +goose Down
|
||||
DELETE FROM comments;
|
||||
DELETE FROM posts;
|
||||
DELETE FROM users;
|
|
@ -0,0 +1,15 @@
|
|||
-- +goose NO TRANSACTION
|
||||
|
||||
-- +goose Up
|
||||
CREATE VIEW posts_view AS
|
||||
SELECT
|
||||
p.id,
|
||||
p.title,
|
||||
p.content,
|
||||
p.created_at,
|
||||
u.username AS author
|
||||
FROM posts p
|
||||
JOIN users u ON p.author_id = u.id;
|
||||
|
||||
-- +goose Down
|
||||
DROP VIEW posts_view;
|
|
@ -0,0 +1,32 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Check if the required argument is provided, a directory path
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "Usage: $0 <directory path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
dir_path="$1"
|
||||
|
||||
# Check if the directory path exists
|
||||
if [ ! -d "$dir_path" ]; then
|
||||
echo "Error: $dir_path does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Calculate the hash of each directory in the specified directory
|
||||
for dir in "$dir_path"/*; do
|
||||
if [ -d "$dir" ]; then
|
||||
# remove the dir_path from the found files
|
||||
all_files=$(find "$dir" -type f | sort | sed "s|$dir_path/||")
|
||||
cd $dir_path
|
||||
sha256sum $all_files
|
||||
digest=$(sha256sum $all_files | sha256sum | cut -c 1-32)
|
||||
echo ""
|
||||
echo "Hash of $dir: $digest"
|
||||
echo $digest > "$(basename "$dir").sha256"
|
||||
echo ""
|
||||
fi
|
||||
done
|
Loading…
Reference in New Issue