feat: add S3 file storage support (#1688)

This commit is contained in:
Weijie Zhao
2025-11-06 15:37:04 +08:00
committed by GitHub
parent 08525bcb4b
commit bc1368abcc
10 changed files with 505 additions and 9 deletions

View File

@@ -246,6 +246,48 @@ jobs:
chmod +x mage-static
./mage-static test:${{ matrix.test }}
test-s3-integration:
runs-on: ubuntu-latest
needs:
- mage
services:
test-minio:
image: bitnamilegacy/minio:latest
env:
MINIO_ROOT_USER: vikunja
MINIO_ROOT_PASSWORD: vikunjatest
MINIO_DEFAULT_BUCKETS: vikunja-test
ports:
- 9000:9000
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- name: Download Mage Binary
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5
with:
name: mage_bin
- name: Set up Go
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6
with:
go-version: stable
- name: test S3 file storage integration
env:
VIKUNJA_TESTS_USE_CONFIG: 1
VIKUNJA_DATABASE_TYPE: sqlite
VIKUNJA_FILES_TYPE: s3
VIKUNJA_FILES_S3_ENDPOINT: http://localhost:9000
VIKUNJA_FILES_S3_BUCKET: vikunja-test
VIKUNJA_FILES_S3_REGION: us-east-1
VIKUNJA_FILES_S3_ACCESSKEY: vikunja
VIKUNJA_FILES_S3_SECRETKEY: vikunjatest
VIKUNJA_FILES_S3_USEPATHSTYLE: true
VIKUNJA_SERVICE_PUBLICURL: http://127.0.0.1:3456
run: |
mkdir -p frontend/dist
touch frontend/dist/index.html
chmod +x mage-static
# Run only the S3 file storage integration tests
./mage-static test:filter "TestFileStorageIntegration"
frontend-lint:
runs-on: ubuntu-latest
steps:

View File

@@ -484,6 +484,47 @@
"key": "maxsize",
"default_value": "20MB",
"comment": "The maximum size of a file, as a human-readable string.\nWarning: The max size is limited 2^64-1 bytes due to the underlying datatype"
},
{
"key": "type",
"default_value": "local",
"comment": "The type of file storage backend. Supported values are `local` and `s3`."
},
{
"key": "s3",
"comment": "Configuration for S3 storage backend",
"children": [
{
"key": "endpoint",
"default_value": "",
"comment": "The S3 endpoint to use. Can be used with S3-compatible services like MinIO or Backblaze B2."
},
{
"key": "bucket",
"default_value": "",
"comment": "The name of the S3 bucket to store files in."
},
{
"key": "region",
"default_value": "",
"comment": "The S3 region where the bucket is located."
},
{
"key": "accesskey",
"default_value": "",
"comment": "The S3 access key ID."
},
{
"key": "secretkey",
"default_value": "",
"comment": "The S3 secret access key."
},
{
"key": "usepathstyle",
"default_value": "false",
"comment": "Whether to use path-style addressing (e.g., https://s3.amazonaws.com/bucket/key) instead of virtual-hosted-style (e.g., https://bucket.s3.amazonaws.com/key). This is commonly needed for self-hosted S3-compatible services. Some providers only support one style or the other."
}
]
}
]
},

3
go.mod
View File

@@ -90,6 +90,7 @@ require (
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect
github.com/aws/aws-sdk-go v1.55.8 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
github.com/beevik/etree v1.1.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
@@ -102,6 +103,7 @@ require (
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fatih/color v1.15.0 // indirect
github.com/fclairamb/afero-s3 v0.3.1 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect
github.com/go-chi/chi/v5 v5.2.2 // indirect
@@ -118,6 +120,7 @@ require (
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
github.com/huandu/go-clone v1.7.3 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/laurent22/ical-go v0.1.1-0.20181107184520-7e5d6ade8eef // indirect

12
go.sum
View File

@@ -33,6 +33,9 @@ github.com/arran4/golang-ical v0.3.2 h1:MGNjcXJFSuCXmYX/RpZhR2HDCYoFuK8vTPFLEdFC
github.com/arran4/golang-ical v0.3.2/go.mod h1:xblDGxxIUMWwFZk9dlECUlc1iXNV65LJZOTHLVwu8bo=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so=
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aws/aws-sdk-go v1.42.9/go.mod h1:585smgzpB/KqRA+K3y/NL/oYRqQvpNJYvLm+LY1U59Q=
github.com/aws/aws-sdk-go v1.55.8 h1:JRmEUbU52aJQZ2AjX4q4Wu7t4uZjOu71uyNmaWlUkJQ=
github.com/aws/aws-sdk-go v1.55.8/go.mod h1:ZkViS9AqA6otK+JBBNH2++sx1sgxrPKcSzPPvQkUtXk=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
github.com/bbrks/go-blurhash v1.1.1 h1:uoXOxRPDca9zHYabUTwvS4KnY++KKUbwFo+Yxb8ME4M=
@@ -99,6 +102,8 @@ github.com/dustinkirkland/golang-petname v0.0.0-20240422154211-76c06c4bde6b h1:+
github.com/dustinkirkland/golang-petname v0.0.0-20240422154211-76c06c4bde6b/go.mod h1:8AuBTZBRSFqEYBPYULd+NN474/zZBLP+6WeT5S9xlAc=
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs=
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw=
github.com/fclairamb/afero-s3 v0.3.1 h1:JLxcl42wseOjKAdXfVkz7GoeyNRrvxkZ1jBshuDSDgA=
github.com/fclairamb/afero-s3 v0.3.1/go.mod h1:VZ/bvRox6Bq3U+vTGa12uyDu+5UJb40M7tpIXlByKkc=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
@@ -306,6 +311,9 @@ github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZ
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8=
github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
@@ -327,6 +335,7 @@ github.com/kolaente/echo/v4 v4.0.0-20250124112709-682dfde74c31 h1:lUUZppO9AB30mf
github.com/kolaente/echo/v4 v4.0.0-20250124112709-682dfde74c31/go.mod h1:o90YNEeQWjDozo584l7AwhJMHN0bOC4tAfg+Xox9q5g=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -423,6 +432,7 @@ github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@@ -489,6 +499,7 @@ github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9yS
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 h1:+jumHNA0Wrelhe64i8F6HNlS8pkoyMv5sreGx2Ry5Rw=
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cwaq1E1/1lhQhtRK2ts/ZwZEhjcQeJQ1RuC6Q/8U=
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
github.com/spf13/afero v1.14.0 h1:9tH6MapGnn/j0eb0yIXiLjERO8RB6xIVZRDCX7PtqWA=
github.com/spf13/afero v1.14.0/go.mod h1:acJQ8t0ohCGuMN3O+Pv0V0hgMxNYDlvdk+VTfyZmbYo=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
@@ -646,6 +657,7 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk=
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=

View File

@@ -158,6 +158,15 @@ const (
FilesBasePath Key = `files.basepath`
FilesMaxSize Key = `files.maxsize`
FilesType Key = `files.type`
// S3 Configuration
FilesS3Endpoint Key = `files.s3.endpoint`
FilesS3Bucket Key = `files.s3.bucket`
FilesS3Region Key = `files.s3.region`
FilesS3AccessKey Key = `files.s3.accesskey`
FilesS3SecretKey Key = `files.s3.secretkey`
FilesS3UsePathStyle Key = `files.s3.usepathstyle`
MigrationTodoistEnable Key = `migration.todoist.enable`
MigrationTodoistClientID Key = `migration.todoist.clientid`
@@ -426,6 +435,14 @@ func InitDefaultConfig() {
// Files
FilesBasePath.setDefault("files")
FilesMaxSize.setDefault("20MB")
FilesType.setDefault("local")
// S3 Configuration
FilesS3Endpoint.setDefault("")
FilesS3Bucket.setDefault("")
FilesS3Region.setDefault("")
FilesS3AccessKey.setDefault("")
FilesS3SecretKey.setDefault("")
FilesS3UsePathStyle.setDefault(false)
// Cors
CorsEnable.setDefault(true)
CorsOrigins.setDefault([]string{"http://127.0.0.1:*", "http://localhost:*"})

View File

@@ -17,6 +17,8 @@
package files
import (
"errors"
"fmt"
"os"
"path/filepath"
"strings"
@@ -27,6 +29,10 @@ import (
"code.vikunja.io/api/pkg/log"
"code.vikunja.io/api/pkg/modules/keyvalue"
"github.com/aws/aws-sdk-go/aws" //nolint:staticcheck // afero-s3 still requires aws-sdk-go v1
"github.com/aws/aws-sdk-go/aws/credentials" //nolint:staticcheck // afero-s3 still requires aws-sdk-go v1
"github.com/aws/aws-sdk-go/aws/session" //nolint:staticcheck // afero-s3 still requires aws-sdk-go v1
s3 "github.com/fclairamb/afero-s3"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
@@ -35,7 +41,7 @@ import (
var fs afero.Fs
var afs *afero.Afero
func setDefaultConfig() {
func setDefaultLocalConfig() {
if !strings.HasPrefix(config.FilesBasePath.GetString(), "/") {
config.FilesBasePath.Set(filepath.Join(
config.ServiceRootpath.GetString(),
@@ -44,18 +50,73 @@ func setDefaultConfig() {
}
}
// InitFileHandler creates a new file handler for the file backend we want to use
func InitFileHandler() {
// initS3FileHandler initializes the S3 file backend
func initS3FileHandler() error {
// Get S3 configuration
endpoint := config.FilesS3Endpoint.GetString()
bucket := config.FilesS3Bucket.GetString()
region := config.FilesS3Region.GetString()
accessKey := config.FilesS3AccessKey.GetString()
secretKey := config.FilesS3SecretKey.GetString()
if endpoint == "" {
return errors.New("S3 endpoint is not configured. Please set files.s3.endpoint")
}
if bucket == "" {
return errors.New("S3 bucket is not configured. Please set files.s3.bucket")
}
if accessKey == "" {
return errors.New("S3 access key is not configured. Please set files.s3.accesskey")
}
if secretKey == "" {
return errors.New("S3 secret key is not configured. Please set files.s3.secretkey")
}
// Create AWS session for afero-s3
sess, err := session.NewSession(&aws.Config{
Region: aws.String(region),
Credentials: credentials.NewStaticCredentials(accessKey, secretKey, ""),
Endpoint: aws.String(endpoint),
S3ForcePathStyle: aws.Bool(config.FilesS3UsePathStyle.GetBool()),
})
if err != nil {
return fmt.Errorf("failed to create AWS session: %w", err)
}
// Initialize S3 filesystem using afero-s3
fs = s3.NewFs(bucket, sess)
afs = &afero.Afero{Fs: fs}
return nil
}
// initLocalFileHandler initializes the local filesystem backend
func initLocalFileHandler() {
fs = afero.NewOsFs()
afs = &afero.Afero{Fs: fs}
setDefaultConfig()
setDefaultLocalConfig()
}
// InitFileHandler creates a new file handler for the file backend we want to use
func InitFileHandler() error {
fileType := config.FilesType.GetString()
switch fileType {
case "s3":
return initS3FileHandler()
case "local":
initLocalFileHandler()
return nil
default:
return fmt.Errorf("invalid file storage type '%s': must be 'local' or 's3'", fileType)
}
}
// InitTestFileHandler initializes a new memory file system for testing
func InitTestFileHandler() {
fs = afero.NewMemMapFs()
afs = &afero.Afero{Fs: fs}
setDefaultConfig()
setDefaultLocalConfig()
}
func initFixtures(t *testing.T) {

View File

@@ -0,0 +1,297 @@
// Vikunja is a to-do list application to facilitate your life.
// Copyright 2018-present Vikunja and contributors. All rights reserved.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
package files
import (
"bytes"
"io"
"os"
"testing"
"code.vikunja.io/api/pkg/config"
"code.vikunja.io/api/pkg/db"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// TestFileStorageIntegration tests end-to-end file storage and retrieval
// with S3/MinIO storage backend. This test specifically validates S3 functionality
// and will fail if S3 is not properly configured.
func TestFileStorageIntegration(t *testing.T) {
// Ensure S3 is configured for this test
if config.FilesType.GetString() != "s3" {
t.Skip("Skipping S3 integration tests - VIKUNJA_FILES_TYPE must be set to 's3'")
}
// Validate S3 configuration is present
if config.FilesS3Endpoint.GetString() == "" {
t.Fatal("S3 integration test requires VIKUNJA_FILES_S3_ENDPOINT to be set")
}
t.Run("Initialize file handler with s3", func(t *testing.T) {
err := InitFileHandler()
require.NoError(t, err, "Failed to initialize file handler with type: s3")
assert.NotNil(t, afs, "File system should be initialized")
})
t.Run("Create and retrieve file with s3", func(t *testing.T) {
db.LoadAndAssertFixtures(t)
// Test data
testContent := []byte("This is a test file for storage integration testing with s3")
testFileName := "integration-test-file.txt"
testAuth := &testauth{id: 1}
// Create file
fileReader := bytes.NewReader(testContent)
createdFile, err := Create(fileReader, testFileName, uint64(len(testContent)), testAuth)
require.NoError(t, err, "Failed to create file")
require.NotNil(t, createdFile, "Created file should not be nil")
assert.Positive(t, createdFile.ID, "File ID should be assigned")
assert.Equal(t, testFileName, createdFile.Name, "File name should match")
assert.Equal(t, uint64(len(testContent)), createdFile.Size, "File size should match")
assert.Equal(t, int64(1), createdFile.CreatedByID, "Creator ID should match")
// Load file metadata from database
loadedFile := &File{ID: createdFile.ID}
err = loadedFile.LoadFileMetaByID()
require.NoError(t, err, "Failed to load file metadata")
assert.Equal(t, testFileName, loadedFile.Name, "Loaded file name should match")
assert.Equal(t, uint64(len(testContent)), loadedFile.Size, "Loaded file size should match")
// Load and verify file content
err = loadedFile.LoadFileByID()
require.NoError(t, err, "Failed to load file content")
require.NotNil(t, loadedFile.File, "File handle should not be nil")
retrievedContent, err := io.ReadAll(loadedFile.File)
require.NoError(t, err, "Failed to read file content")
assert.Equal(t, testContent, retrievedContent, "Retrieved content should match original")
_ = loadedFile.File.Close()
// Verify file exists in storage
fileInfo, err := FileStat(loadedFile)
require.NoError(t, err, "File should exist in storage")
assert.NotNil(t, fileInfo, "File info should not be nil")
// Delete file
s := db.NewSession()
defer s.Close()
err = loadedFile.Delete(s)
require.NoError(t, err, "Failed to delete file")
// Verify file is deleted from storage
_, err = FileStat(loadedFile)
require.Error(t, err, "File should not exist after deletion")
assert.True(t, os.IsNotExist(err), "Error should indicate file does not exist")
})
t.Run("Create multiple files with s3", func(t *testing.T) {
db.LoadAndAssertFixtures(t)
testAuth := &testauth{id: 1}
fileIDs := make([]int64, 0, 3)
// Create multiple files
for i := 1; i <= 3; i++ {
content := []byte("Test file content number " + string(rune('0'+i)))
fileName := "test-file-" + string(rune('0'+i)) + ".txt"
file, err := Create(bytes.NewReader(content), fileName, uint64(len(content)), testAuth)
require.NoError(t, err, "Failed to create file %d", i)
fileIDs = append(fileIDs, file.ID)
}
// Verify all files exist and can be retrieved
for i, fileID := range fileIDs {
file := &File{ID: fileID}
err := file.LoadFileByID()
require.NoError(t, err, "Failed to load file %d", i+1)
content, err := io.ReadAll(file.File)
require.NoError(t, err, "Failed to read file %d", i+1)
expectedContent := "Test file content number " + string(rune('0'+i+1))
assert.Equal(t, []byte(expectedContent), content, "Content should match for file %d", i+1)
_ = file.File.Close()
}
// Clean up: delete all files
s := db.NewSession()
defer s.Close()
for _, fileID := range fileIDs {
file := &File{ID: fileID}
err := file.Delete(s)
require.NoError(t, err, "Failed to delete file")
}
})
t.Run("Handle large file with s3", func(t *testing.T) {
db.LoadAndAssertFixtures(t)
testAuth := &testauth{id: 1}
// Create a 1MB file
largeContent := bytes.Repeat([]byte("X"), 1024*1024)
fileName := "large-test-file.bin"
file, err := Create(bytes.NewReader(largeContent), fileName, uint64(len(largeContent)), testAuth)
require.NoError(t, err, "Failed to create large file")
assert.Equal(t, uint64(len(largeContent)), file.Size, "File size should match")
// Retrieve and verify
loadedFile := &File{ID: file.ID}
err = loadedFile.LoadFileByID()
require.NoError(t, err, "Failed to load large file")
retrievedContent, err := io.ReadAll(loadedFile.File)
require.NoError(t, err, "Failed to read large file")
assert.Len(t, retrievedContent, len(largeContent), "Retrieved file size should match")
assert.Equal(t, largeContent, retrievedContent, "Large file content should match")
_ = loadedFile.File.Close()
// Clean up
s := db.NewSession()
defer s.Close()
err = loadedFile.Delete(s)
require.NoError(t, err, "Failed to delete large file")
})
t.Run("File not found with s3", func(t *testing.T) {
db.LoadAndAssertFixtures(t)
// Try to load a file that doesn't exist
nonExistentFile := &File{ID: 999999}
err := nonExistentFile.LoadFileByID()
require.Error(t, err, "Loading non-existent file should error")
assert.True(t, os.IsNotExist(err), "Error should indicate file does not exist")
// Try to load metadata for non-existent file
err = nonExistentFile.LoadFileMetaByID()
require.Error(t, err, "Loading metadata for non-existent file should error")
assert.True(t, IsErrFileDoesNotExist(err), "Error should be ErrFileDoesNotExist")
})
}
// TestInitFileHandler_S3Configuration tests S3 configuration validation
func TestInitFileHandler_S3Configuration(t *testing.T) {
// Save original config values
originalType := config.FilesType.GetString()
originalEndpoint := config.FilesS3Endpoint.GetString()
originalBucket := config.FilesS3Bucket.GetString()
originalRegion := config.FilesS3Region.GetString()
originalAccessKey := config.FilesS3AccessKey.GetString()
originalSecretKey := config.FilesS3SecretKey.GetString()
// Restore config after test
defer func() {
config.FilesType.Set(originalType)
config.FilesS3Endpoint.Set(originalEndpoint)
config.FilesS3Bucket.Set(originalBucket)
config.FilesS3Region.Set(originalRegion)
config.FilesS3AccessKey.Set(originalAccessKey)
config.FilesS3SecretKey.Set(originalSecretKey)
_ = InitFileHandler()
}()
t.Run("valid S3 configuration", func(t *testing.T) {
config.FilesType.Set("s3")
config.FilesS3Endpoint.Set("https://s3.amazonaws.com")
config.FilesS3Bucket.Set("test-bucket")
config.FilesS3Region.Set("us-east-1")
config.FilesS3AccessKey.Set("test-access-key")
config.FilesS3SecretKey.Set("test-secret-key")
// This should not return an error with valid configuration
err := InitFileHandler()
assert.NoError(t, err)
})
t.Run("missing S3 endpoint", func(t *testing.T) {
config.FilesType.Set("s3")
config.FilesS3Endpoint.Set("")
config.FilesS3Bucket.Set("test-bucket")
config.FilesS3AccessKey.Set("test-access-key")
config.FilesS3SecretKey.Set("test-secret-key")
// This should return an error for missing endpoint
err := InitFileHandler()
require.Error(t, err)
assert.Contains(t, err.Error(), "endpoint")
})
t.Run("missing S3 bucket", func(t *testing.T) {
config.FilesType.Set("s3")
config.FilesS3Endpoint.Set("https://s3.amazonaws.com")
config.FilesS3Bucket.Set("")
config.FilesS3AccessKey.Set("test-access-key")
config.FilesS3SecretKey.Set("test-secret-key")
// This should return an error for missing bucket
err := InitFileHandler()
require.Error(t, err)
assert.Contains(t, err.Error(), "bucket")
})
t.Run("missing S3 access key", func(t *testing.T) {
config.FilesType.Set("s3")
config.FilesS3Endpoint.Set("https://s3.amazonaws.com")
config.FilesS3Bucket.Set("test-bucket")
config.FilesS3AccessKey.Set("")
config.FilesS3SecretKey.Set("test-secret-key")
// This should return an error for missing access key
err := InitFileHandler()
require.Error(t, err)
assert.Contains(t, err.Error(), "access key")
})
t.Run("missing S3 secret key", func(t *testing.T) {
config.FilesType.Set("s3")
config.FilesS3Endpoint.Set("https://s3.amazonaws.com")
config.FilesS3Bucket.Set("test-bucket")
config.FilesS3AccessKey.Set("test-access-key")
config.FilesS3SecretKey.Set("")
// This should return an error for missing secret key
err := InitFileHandler()
require.Error(t, err)
assert.Contains(t, err.Error(), "secret key")
})
}
func TestInitFileHandler_LocalFilesystem(t *testing.T) {
// Save original config values
originalType := config.FilesType.GetString()
// Restore config after test
defer func() {
config.FilesType.Set(originalType)
}()
// Test with local filesystem
config.FilesType.Set("local")
// This should not return an error
err := InitFileHandler()
require.NoError(t, err)
// Verify that afs is initialized
assert.NotNil(t, afs)
}

View File

@@ -80,7 +80,10 @@ func FullInitWithoutAsync() {
LightInit()
// Initialize the files handler
files.InitFileHandler()
err := files.InitFileHandler()
if err != nil {
log.Fatalf("Could not init file handler: %s", err)
}
// Run the migrations
migration.Migrate(nil)
@@ -98,7 +101,7 @@ func FullInitWithoutAsync() {
ldap.InitializeLDAPConnection()
// Check all OpenID Connect providers at startup
_, err := openid.GetAllProviders()
_, err = openid.GetAllProviders()
if err != nil {
log.Errorf("Error initializing OpenID Connect providers: %s", err)
}

View File

@@ -110,7 +110,10 @@ func Restore(filename string) error {
// Init the configFile again since the restored configuration is most likely different from the one before
initialize.LightInit()
initialize.InitEngines()
files.InitFileHandler()
err = files.InitFileHandler()
if err != nil {
return fmt.Errorf("could not init file handler: %w", err)
}
///////
// Restore the db

View File

@@ -18,9 +18,12 @@ package v1
import (
"errors"
"io"
"net/http"
"strconv"
"strings"
"code.vikunja.io/api/pkg/config"
"code.vikunja.io/api/pkg/db"
"code.vikunja.io/api/pkg/models"
auth2 "code.vikunja.io/api/pkg/modules/auth"
@@ -179,7 +182,21 @@ func GetTaskAttachment(c echo.Context) error {
_ = s.Rollback()
return handler.HandleHTTPError(err)
}
if config.FilesType.GetString() == "s3" {
// s3 files cannot use http.ServeContent as it requires a Seekable file
// Set response headers
c.Response().Header().Set("Content-Type", taskAttachment.File.Mime)
c.Response().Header().Set("Content-Disposition", "inline; filename=\""+taskAttachment.File.Name+"\"")
c.Response().Header().Set("Content-Length", strconv.FormatUint(taskAttachment.File.Size, 10))
c.Response().Header().Set("Last-Modified", taskAttachment.File.Created.UTC().Format(http.TimeFormat))
http.ServeContent(c.Response(), c.Request(), taskAttachment.File.Name, taskAttachment.File.Created, taskAttachment.File.File)
// Stream the file content directly to the response
_, err = io.Copy(c.Response().Writer, taskAttachment.File.File)
if err != nil {
return handler.HandleHTTPError(err)
}
} else {
http.ServeContent(c.Response(), c.Request(), taskAttachment.File.Name, taskAttachment.File.Created, taskAttachment.File.File)
}
return nil
}