Add test coverage for filestore package
continuous-integration/drone/push Build is passing Details

This commit is contained in:
Rob Watson 2021-12-13 16:51:40 +01:00
parent 34681821e4
commit 4ef5dc4189
7 changed files with 543 additions and 0 deletions

View File

@ -7,6 +7,7 @@ import (
"net/url" "net/url"
"os" "os"
"path/filepath" "path/filepath"
"strings"
) )
// FileSystemStore is a file store that stores files on the local filesystem. // FileSystemStore is a file store that stores files on the local filesystem.
@ -25,6 +26,9 @@ func NewFileSystemStore(rootPath string, baseURL string) (*FileSystemStore, erro
if err != nil { if err != nil {
return nil, fmt.Errorf("error parsing URL: %v", err) return nil, fmt.Errorf("error parsing URL: %v", err)
} }
if !strings.HasSuffix(url.Path, "/") {
url.Path += "/"
}
return &FileSystemStore{rootPath: rootPath, baseURL: url}, nil return &FileSystemStore{rootPath: rootPath, baseURL: url}, nil
} }

View File

@ -0,0 +1,165 @@
package filestore_test
import (
"context"
"io/ioutil"
"os"
"path"
"strings"
"testing"
"git.netflux.io/rob/clipper/filestore"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFileStoreGetObject(t *testing.T) {
store, err := filestore.NewFileSystemStore("testdata/", "/")
require.NoError(t, err)
reader, err := store.GetObject(context.Background(), "file.txt")
require.NoError(t, err)
defer reader.Close()
content, err := ioutil.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, "hello world", string(content))
}
func TestFileStoreGetObjectWithRange(t *testing.T) {
testCases := []struct {
name string
start, end int64
wantErr string
wantContent string
}{
{
name: "happy path, complete object",
start: 0,
end: 12,
wantContent: "hello world",
},
{
name: "happy path, partial object",
start: 6,
end: 10,
wantContent: "worl",
},
{
name: "empty range",
start: 1,
end: 1,
wantContent: "",
},
{
name: "bad range",
start: -10,
end: 0,
wantErr: "error seeking in file: seek testdata/file.txt: invalid argument",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
store, err := filestore.NewFileSystemStore("testdata/", "/")
require.NoError(t, err)
reader, err := store.GetObjectWithRange(context.Background(), "file.txt", tc.start, tc.end)
if tc.wantErr == "" {
defer reader.Close()
require.NoError(t, err)
content, readErr := ioutil.ReadAll(reader)
require.NoError(t, readErr)
assert.Equal(t, tc.wantContent, string(content))
} else {
assert.EqualError(t, err, tc.wantErr)
}
})
}
}
func TestFileStoreGetURL(t *testing.T) {
testCases := []struct {
name string
baseURL string
key string
wantURL string
}{
{
name: "URL with host and no path, no trailing slash",
baseURL: "https://foo.example.com",
key: "bar",
wantURL: "https://foo.example.com/bar",
},
{
name: "URL with host and no path, trailing slash",
baseURL: "https://foo.example.com/",
key: "bar",
wantURL: "https://foo.example.com/bar",
},
{
name: "URL with host and path, no trailing slash",
baseURL: "https://foo.example.com/bar",
key: "baz",
wantURL: "https://foo.example.com/bar/baz",
},
{
name: "URL with host and path, trailing slash",
baseURL: "https://foo.example.com/bar/",
key: "baz",
wantURL: "https://foo.example.com/bar/baz",
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
store, err := filestore.NewFileSystemStore("testdata/", tc.baseURL)
require.NoError(t, err)
url, err := store.GetURL(context.Background(), tc.key)
require.NoError(t, err)
assert.Equal(t, tc.wantURL, url)
})
}
}
func TestFileStorePutObject(t *testing.T) {
rootPath := t.TempDir()
testCases := []struct {
name string
key string
content string
wantCount int64
wantErr string
}{
{
name: "happy path, no created directory",
key: "foo.txt",
content: "hello world",
wantCount: 11,
},
{
name: "happy path, with sub-directories",
key: "foo/bar/baz.txt",
content: "hello world",
wantCount: 11,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
store, err := filestore.NewFileSystemStore(rootPath, "/")
require.NoError(t, err)
n, err := store.PutObject(context.Background(), tc.key, strings.NewReader(tc.content), "text/plain")
require.NoError(t, err)
content, err := os.ReadFile(path.Join(rootPath, tc.key))
require.NoError(t, err)
assert.Equal(t, tc.wantCount, n)
assert.Equal(t, tc.content, string(content))
})
}
}

View File

@ -1,5 +1,8 @@
package filestore package filestore
//go:generate mockery --recursive --name S3Client --output ../generated/mocks
//go:generate mockery --recursive --name S3PresignClient --output ../generated/mocks
import ( import (
"bytes" "bytes"
"context" "context"

View File

@ -0,0 +1,156 @@
package filestore_test
import (
"context"
"errors"
"io"
"io/ioutil"
"strings"
"testing"
"time"
"git.netflux.io/rob/clipper/filestore"
"git.netflux.io/rob/clipper/generated/mocks"
signerv4 "github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/s3"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
)
func TestS3GetObject(t *testing.T) {
const (
bucket = "some-bucket"
key = "foo/bar"
content = "hello world"
)
body := io.NopCloser(strings.NewReader(content))
var s3Client mocks.S3Client
s3Client.On("GetObject", mock.Anything, mock.MatchedBy(func(input *s3.GetObjectInput) bool {
return *input.Bucket == bucket && *input.Key == key
})).Return(&s3.GetObjectOutput{Body: body}, nil)
defer s3Client.AssertExpectations(t)
store := filestore.NewS3FileStore(filestore.S3API{S3Client: &s3Client}, bucket, time.Minute, zap.NewNop().Sugar())
output, err := store.GetObject(context.Background(), key)
require.NoError(t, err)
defer output.Close()
actual, err := ioutil.ReadAll(output)
require.NoError(t, err)
assert.Equal(t, content, string(actual))
}
func TestS3GetObjectWithRange(t *testing.T) {
const (
bucket = "some-bucket"
key = "foo/bar"
content = "hello world"
start = 256
end = 32_768
)
body := io.NopCloser(strings.NewReader(content))
var s3Client mocks.S3Client
s3Client.On("GetObject", mock.Anything, mock.MatchedBy(func(input *s3.GetObjectInput) bool {
return *input.Bucket == bucket && *input.Key == key && *input.Range == "bytes=256-32768"
})).Return(&s3.GetObjectOutput{Body: body}, nil)
defer s3Client.AssertExpectations(t)
store := filestore.NewS3FileStore(filestore.S3API{S3Client: &s3Client}, bucket, time.Minute, zap.NewNop().Sugar())
output, err := store.GetObjectWithRange(context.Background(), key, start, end)
require.NoError(t, err)
defer output.Close()
actual, err := ioutil.ReadAll(output)
require.NoError(t, err)
assert.Equal(t, content, string(actual))
}
func TestS3GetURL(t *testing.T) {
const (
bucket = "some-bucket"
key = "foo/bar"
urlExpiry = time.Minute * 10
wantURL = "https://foo.s3.example.com/foo/bar"
)
var presignClient mocks.S3PresignClient
presignClient.On("PresignGetObject", mock.Anything, mock.MatchedBy(func(input *s3.GetObjectInput) bool {
return *input.Bucket == bucket && *input.Key == key
}), mock.Anything).Return(&signerv4.PresignedHTTPRequest{URL: wantURL}, nil)
defer presignClient.AssertExpectations(t)
store := filestore.NewS3FileStore(filestore.S3API{S3PresignClient: &presignClient}, bucket, urlExpiry, zap.NewNop().Sugar())
url, err := store.GetURL(context.Background(), key)
require.NoError(t, err)
assert.Equal(t, wantURL, url)
}
type testReader struct {
count, exp int
}
func (r *testReader) Read(p []byte) (int, error) {
max := r.exp - r.count
if max <= len(p) {
r.count += max
return max, io.EOF
}
r.count += len(p)
return len(p), nil
}
func TestS3PutObject(t *testing.T) {
const (
bucket = "some-bucket"
key = "foo/bar"
contentType = "audio/mp3"
contentLength = 20_000_000
)
uploadID := "abc123"
mockS3Client := func() *mocks.S3Client {
var s3Client mocks.S3Client
s3Client.On("CreateMultipartUpload", mock.Anything, mock.MatchedBy(func(input *s3.CreateMultipartUploadInput) bool {
return *input.Bucket == bucket && *input.Key == key && *input.ContentType == contentType
})).Return(&s3.CreateMultipartUploadOutput{UploadId: &uploadID}, nil)
return &s3Client
}
t.Run("OK", func(t *testing.T) {
s3Client := mockS3Client()
eTag := `"foo"`
var partLengths []int64
s3Client.On("UploadPart", mock.Anything, mock.MatchedBy(func(input *s3.UploadPartInput) bool {
partLengths = append(partLengths, input.ContentLength)
return *input.Bucket == bucket && *input.Key == key && *input.UploadId == uploadID
})).Return(&s3.UploadPartOutput{ETag: &eTag}, nil)
s3Client.On("CompleteMultipartUpload", mock.Anything, mock.MatchedBy(func(input *s3.CompleteMultipartUploadInput) bool {
return *input.Bucket == bucket && *input.Key == key && *input.UploadId == uploadID
})).Return(nil, nil)
defer s3Client.AssertExpectations(t)
store := filestore.NewS3FileStore(filestore.S3API{S3Client: s3Client}, bucket, time.Hour, zap.NewNop().Sugar())
n, err := store.PutObject(context.Background(), key, &testReader{exp: contentLength}, contentType)
require.NoError(t, err)
assert.Equal(t, int64(contentLength), n)
assert.Equal(t, []int64{5_242_880, 5_242_880, 5_242_880, 4_271_360}, partLengths)
})
t.Run("NOK,UploadPartFailure", func(t *testing.T) {
s3Client := mockS3Client()
s3Client.On("UploadPart", mock.Anything, mock.Anything).Return(nil, errors.New("boom"))
s3Client.On("AbortMultipartUpload", mock.Anything, mock.MatchedBy(func(input *s3.AbortMultipartUploadInput) bool {
return *input.Bucket == bucket && *input.Key == key && *input.UploadId == uploadID
})).Return(nil, nil)
defer s3Client.AssertExpectations(t)
store := filestore.NewS3FileStore(filestore.S3API{S3Client: s3Client}, bucket, time.Hour, zap.NewNop().Sugar())
_, err := store.PutObject(context.Background(), key, &testReader{exp: contentLength}, contentType)
assert.EqualError(t, err, "error while uploading part: boom")
})
}

1
backend/filestore/testdata/file.txt vendored Normal file
View File

@ -0,0 +1 @@
hello world

View File

@ -0,0 +1,166 @@
// Code generated by mockery v2.9.4. DO NOT EDIT.
package mocks
import (
context "context"
mock "github.com/stretchr/testify/mock"
s3 "github.com/aws/aws-sdk-go-v2/service/s3"
)
// S3Client is an autogenerated mock type for the S3Client type
type S3Client struct {
mock.Mock
}
// AbortMultipartUpload provides a mock function with given fields: _a0, _a1, _a2
func (_m *S3Client) AbortMultipartUpload(_a0 context.Context, _a1 *s3.AbortMultipartUploadInput, _a2 ...func(*s3.Options)) (*s3.AbortMultipartUploadOutput, error) {
_va := make([]interface{}, len(_a2))
for _i := range _a2 {
_va[_i] = _a2[_i]
}
var _ca []interface{}
_ca = append(_ca, _a0, _a1)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 *s3.AbortMultipartUploadOutput
if rf, ok := ret.Get(0).(func(context.Context, *s3.AbortMultipartUploadInput, ...func(*s3.Options)) *s3.AbortMultipartUploadOutput); ok {
r0 = rf(_a0, _a1, _a2...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*s3.AbortMultipartUploadOutput)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *s3.AbortMultipartUploadInput, ...func(*s3.Options)) error); ok {
r1 = rf(_a0, _a1, _a2...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CompleteMultipartUpload provides a mock function with given fields: _a0, _a1, _a2
func (_m *S3Client) CompleteMultipartUpload(_a0 context.Context, _a1 *s3.CompleteMultipartUploadInput, _a2 ...func(*s3.Options)) (*s3.CompleteMultipartUploadOutput, error) {
_va := make([]interface{}, len(_a2))
for _i := range _a2 {
_va[_i] = _a2[_i]
}
var _ca []interface{}
_ca = append(_ca, _a0, _a1)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 *s3.CompleteMultipartUploadOutput
if rf, ok := ret.Get(0).(func(context.Context, *s3.CompleteMultipartUploadInput, ...func(*s3.Options)) *s3.CompleteMultipartUploadOutput); ok {
r0 = rf(_a0, _a1, _a2...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*s3.CompleteMultipartUploadOutput)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *s3.CompleteMultipartUploadInput, ...func(*s3.Options)) error); ok {
r1 = rf(_a0, _a1, _a2...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// CreateMultipartUpload provides a mock function with given fields: _a0, _a1, _a2
func (_m *S3Client) CreateMultipartUpload(_a0 context.Context, _a1 *s3.CreateMultipartUploadInput, _a2 ...func(*s3.Options)) (*s3.CreateMultipartUploadOutput, error) {
_va := make([]interface{}, len(_a2))
for _i := range _a2 {
_va[_i] = _a2[_i]
}
var _ca []interface{}
_ca = append(_ca, _a0, _a1)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 *s3.CreateMultipartUploadOutput
if rf, ok := ret.Get(0).(func(context.Context, *s3.CreateMultipartUploadInput, ...func(*s3.Options)) *s3.CreateMultipartUploadOutput); ok {
r0 = rf(_a0, _a1, _a2...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*s3.CreateMultipartUploadOutput)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *s3.CreateMultipartUploadInput, ...func(*s3.Options)) error); ok {
r1 = rf(_a0, _a1, _a2...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// GetObject provides a mock function with given fields: _a0, _a1, _a2
func (_m *S3Client) GetObject(_a0 context.Context, _a1 *s3.GetObjectInput, _a2 ...func(*s3.Options)) (*s3.GetObjectOutput, error) {
_va := make([]interface{}, len(_a2))
for _i := range _a2 {
_va[_i] = _a2[_i]
}
var _ca []interface{}
_ca = append(_ca, _a0, _a1)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 *s3.GetObjectOutput
if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectInput, ...func(*s3.Options)) *s3.GetObjectOutput); ok {
r0 = rf(_a0, _a1, _a2...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*s3.GetObjectOutput)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectInput, ...func(*s3.Options)) error); ok {
r1 = rf(_a0, _a1, _a2...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// UploadPart provides a mock function with given fields: _a0, _a1, _a2
func (_m *S3Client) UploadPart(_a0 context.Context, _a1 *s3.UploadPartInput, _a2 ...func(*s3.Options)) (*s3.UploadPartOutput, error) {
_va := make([]interface{}, len(_a2))
for _i := range _a2 {
_va[_i] = _a2[_i]
}
var _ca []interface{}
_ca = append(_ca, _a0, _a1)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 *s3.UploadPartOutput
if rf, ok := ret.Get(0).(func(context.Context, *s3.UploadPartInput, ...func(*s3.Options)) *s3.UploadPartOutput); ok {
r0 = rf(_a0, _a1, _a2...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*s3.UploadPartOutput)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *s3.UploadPartInput, ...func(*s3.Options)) error); ok {
r1 = rf(_a0, _a1, _a2...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}

View File

@ -0,0 +1,48 @@
// Code generated by mockery v2.9.4. DO NOT EDIT.
package mocks
import (
context "context"
mock "github.com/stretchr/testify/mock"
s3 "github.com/aws/aws-sdk-go-v2/service/s3"
v4 "github.com/aws/aws-sdk-go-v2/aws/signer/v4"
)
// S3PresignClient is an autogenerated mock type for the S3PresignClient type
type S3PresignClient struct {
mock.Mock
}
// PresignGetObject provides a mock function with given fields: _a0, _a1, _a2
func (_m *S3PresignClient) PresignGetObject(_a0 context.Context, _a1 *s3.GetObjectInput, _a2 ...func(*s3.PresignOptions)) (*v4.PresignedHTTPRequest, error) {
_va := make([]interface{}, len(_a2))
for _i := range _a2 {
_va[_i] = _a2[_i]
}
var _ca []interface{}
_ca = append(_ca, _a0, _a1)
_ca = append(_ca, _va...)
ret := _m.Called(_ca...)
var r0 *v4.PresignedHTTPRequest
if rf, ok := ret.Get(0).(func(context.Context, *s3.GetObjectInput, ...func(*s3.PresignOptions)) *v4.PresignedHTTPRequest); ok {
r0 = rf(_a0, _a1, _a2...)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*v4.PresignedHTTPRequest)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *s3.GetObjectInput, ...func(*s3.PresignOptions)) error); ok {
r1 = rf(_a0, _a1, _a2...)
} else {
r1 = ret.Error(1)
}
return r0, r1
}