diff --git a/backend/cmd/server/main.go b/backend/cmd/server/main.go index eaf096f..d503edd 100644 --- a/backend/cmd/server/main.go +++ b/backend/cmd/server/main.go @@ -12,6 +12,7 @@ import ( "tanabata/backend/internal/db/postgres" "tanabata/backend/internal/handler" "tanabata/backend/internal/service" + "tanabata/backend/internal/storage" "tanabata/backend/migrations" ) @@ -43,9 +44,26 @@ func main() { migDB.Close() slog.Info("migrations applied") + // Storage + diskStorage, err := storage.NewDiskStorage( + cfg.FilesPath, + cfg.ThumbsCachePath, + cfg.ThumbWidth, cfg.ThumbHeight, + cfg.PreviewWidth, cfg.PreviewHeight, + ) + if err != nil { + slog.Error("failed to initialise storage", "err", err) + os.Exit(1) + } + // Repositories userRepo := postgres.NewUserRepo(pool) sessionRepo := postgres.NewSessionRepo(pool) + fileRepo := postgres.NewFileRepo(pool) + mimeRepo := postgres.NewMimeRepo(pool) + aclRepo := postgres.NewACLRepo(pool) + auditRepo := postgres.NewAuditRepo(pool) + transactor := postgres.NewTransactor(pool) // Services authSvc := service.NewAuthService( @@ -55,16 +73,28 @@ func main() { cfg.JWTAccessTTL, cfg.JWTRefreshTTL, ) + aclSvc := service.NewACLService(aclRepo) + auditSvc := service.NewAuditService(auditRepo) + fileSvc := service.NewFileService( + fileRepo, + mimeRepo, + diskStorage, + aclSvc, + auditSvc, + transactor, + cfg.ImportPath, + ) // Handlers authMiddleware := handler.NewAuthMiddleware(authSvc) authHandler := handler.NewAuthHandler(authSvc) + fileHandler := handler.NewFileHandler(fileSvc) - r := handler.NewRouter(authMiddleware, authHandler) + r := handler.NewRouter(authMiddleware, authHandler, fileHandler) slog.Info("starting server", "addr", cfg.ListenAddr) if err := r.Run(cfg.ListenAddr); err != nil { slog.Error("server error", "err", err) os.Exit(1) } -} +} \ No newline at end of file diff --git a/backend/internal/handler/file_handler.go b/backend/internal/handler/file_handler.go new file mode 100644 index 0000000..b810b67 --- /dev/null +++ b/backend/internal/handler/file_handler.go @@ -0,0 +1,755 @@ +package handler + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strconv" + "strings" + "time" + + "github.com/gabriel-vasile/mimetype" + "github.com/gin-gonic/gin" + "github.com/google/uuid" + + "tanabata/backend/internal/domain" + "tanabata/backend/internal/service" +) + +// FileHandler handles all /files endpoints. +type FileHandler struct { + fileSvc *service.FileService +} + +// NewFileHandler creates a FileHandler. +func NewFileHandler(fileSvc *service.FileService) *FileHandler { + return &FileHandler{fileSvc: fileSvc} +} + +// --------------------------------------------------------------------------- +// Response types +// --------------------------------------------------------------------------- + +type tagJSON struct { + ID string `json:"id"` + Name string `json:"name"` + Notes *string `json:"notes"` + Color *string `json:"color"` + CategoryID *string `json:"category_id"` + CategoryName *string `json:"category_name"` + CategoryColor *string `json:"category_color"` + CreatorID int16 `json:"creator_id"` + CreatorName string `json:"creator_name"` + IsPublic bool `json:"is_public"` + CreatedAt string `json:"created_at"` +} + +type fileJSON struct { + ID string `json:"id"` + OriginalName *string `json:"original_name"` + MIMEType string `json:"mime_type"` + MIMEExtension string `json:"mime_extension"` + ContentDatetime string `json:"content_datetime"` + Notes *string `json:"notes"` + Metadata json.RawMessage `json:"metadata"` + EXIF json.RawMessage `json:"exif"` + PHash *int64 `json:"phash"` + CreatorID int16 `json:"creator_id"` + CreatorName string `json:"creator_name"` + IsPublic bool `json:"is_public"` + IsDeleted bool `json:"is_deleted"` + CreatedAt string `json:"created_at"` + Tags []tagJSON `json:"tags"` +} + +func toTagJSON(t domain.Tag) tagJSON { + j := tagJSON{ + ID: t.ID.String(), + Name: t.Name, + Notes: t.Notes, + Color: t.Color, + CategoryName: t.CategoryName, + CategoryColor: t.CategoryColor, + CreatorID: t.CreatorID, + CreatorName: t.CreatorName, + IsPublic: t.IsPublic, + CreatedAt: t.CreatedAt.Format(time.RFC3339), + } + if t.CategoryID != nil { + s := t.CategoryID.String() + j.CategoryID = &s + } + return j +} + +func toFileJSON(f domain.File) fileJSON { + tags := make([]tagJSON, len(f.Tags)) + for i, t := range f.Tags { + tags[i] = toTagJSON(t) + } + exif := f.EXIF + if exif == nil { + exif = json.RawMessage("{}") + } + return fileJSON{ + ID: f.ID.String(), + OriginalName: f.OriginalName, + MIMEType: f.MIMEType, + MIMEExtension: f.MIMEExtension, + ContentDatetime: f.ContentDatetime.Format(time.RFC3339), + Notes: f.Notes, + Metadata: f.Metadata, + EXIF: exif, + PHash: f.PHash, + CreatorID: f.CreatorID, + CreatorName: f.CreatorName, + IsPublic: f.IsPublic, + IsDeleted: f.IsDeleted, + CreatedAt: f.CreatedAt.Format(time.RFC3339), + Tags: tags, + } +} + +// --------------------------------------------------------------------------- +// Helper +// --------------------------------------------------------------------------- + +func parseFileID(c *gin.Context) (uuid.UUID, bool) { + id, err := uuid.Parse(c.Param("id")) + if err != nil { + respondError(c, domain.ErrValidation) + return uuid.UUID{}, false + } + return id, true +} + +// --------------------------------------------------------------------------- +// GET /files +// --------------------------------------------------------------------------- + +func (h *FileHandler) List(c *gin.Context) { + params := domain.FileListParams{ + Cursor: c.Query("cursor"), + Direction: c.DefaultQuery("direction", "forward"), + Sort: c.DefaultQuery("sort", "created"), + Order: c.DefaultQuery("order", "desc"), + Filter: c.Query("filter"), + Search: c.Query("search"), + } + + if limitStr := c.Query("limit"); limitStr != "" { + n, err := strconv.Atoi(limitStr) + if err != nil || n < 1 || n > 200 { + respondError(c, domain.ErrValidation) + return + } + params.Limit = n + } else { + params.Limit = 50 + } + + if anchorStr := c.Query("anchor"); anchorStr != "" { + id, err := uuid.Parse(anchorStr) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + params.Anchor = &id + } + + if trashStr := c.Query("trash"); trashStr == "true" || trashStr == "1" { + params.Trash = true + } + + page, err := h.fileSvc.List(c.Request.Context(), params) + if err != nil { + respondError(c, err) + return + } + + items := make([]fileJSON, len(page.Items)) + for i, f := range page.Items { + items[i] = toFileJSON(f) + } + + respondJSON(c, http.StatusOK, gin.H{ + "items": items, + "next_cursor": page.NextCursor, + "prev_cursor": page.PrevCursor, + }) +} + +// --------------------------------------------------------------------------- +// POST /files (multipart upload) +// --------------------------------------------------------------------------- + +func (h *FileHandler) Upload(c *gin.Context) { + fh, err := c.FormFile("file") + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + src, err := fh.Open() + if err != nil { + respondError(c, err) + return + } + defer src.Close() + + // Detect MIME from actual bytes (ignore client-supplied Content-Type). + mt, err := mimetype.DetectReader(src) + if err != nil { + respondError(c, err) + return + } + // Rewind by reopening — FormFile gives a multipart.File which supports Seek. + if _, err := src.Seek(0, io.SeekStart); err != nil { + respondError(c, err) + return + } + mimeStr := strings.SplitN(mt.String(), ";", 2)[0] + + params := service.UploadParams{ + Reader: src, + MIMEType: mimeStr, + IsPublic: c.PostForm("is_public") == "true", + } + + if name := fh.Filename; name != "" { + params.OriginalName = &name + } + if notes := c.PostForm("notes"); notes != "" { + params.Notes = ¬es + } + if metaStr := c.PostForm("metadata"); metaStr != "" { + params.Metadata = json.RawMessage(metaStr) + } + if dtStr := c.PostForm("content_datetime"); dtStr != "" { + t, err := time.Parse(time.RFC3339, dtStr) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + params.ContentDatetime = &t + } + if tagIDsStr := c.PostForm("tag_ids"); tagIDsStr != "" { + for _, raw := range strings.Split(tagIDsStr, ",") { + raw = strings.TrimSpace(raw) + if raw == "" { + continue + } + id, err := uuid.Parse(raw) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + params.TagIDs = append(params.TagIDs, id) + } + } + + f, err := h.fileSvc.Upload(c.Request.Context(), params) + if err != nil { + respondError(c, err) + return + } + + respondJSON(c, http.StatusCreated, toFileJSON(*f)) +} + +// --------------------------------------------------------------------------- +// GET /files/:id +// --------------------------------------------------------------------------- + +func (h *FileHandler) GetMeta(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + f, err := h.fileSvc.Get(c.Request.Context(), id) + if err != nil { + respondError(c, err) + return + } + + respondJSON(c, http.StatusOK, toFileJSON(*f)) +} + +// --------------------------------------------------------------------------- +// PATCH /files/:id +// --------------------------------------------------------------------------- + +func (h *FileHandler) UpdateMeta(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + var body struct { + OriginalName *string `json:"original_name"` + ContentDatetime *string `json:"content_datetime"` + Notes *string `json:"notes"` + Metadata json.RawMessage `json:"metadata"` + IsPublic *bool `json:"is_public"` + } + if err := c.ShouldBindJSON(&body); err != nil { + respondError(c, domain.ErrValidation) + return + } + + params := service.UpdateParams{ + OriginalName: body.OriginalName, + Notes: body.Notes, + Metadata: body.Metadata, + IsPublic: body.IsPublic, + } + if body.ContentDatetime != nil { + t, err := time.Parse(time.RFC3339, *body.ContentDatetime) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + params.ContentDatetime = &t + } + + f, err := h.fileSvc.Update(c.Request.Context(), id, params) + if err != nil { + respondError(c, err) + return + } + + respondJSON(c, http.StatusOK, toFileJSON(*f)) +} + +// --------------------------------------------------------------------------- +// DELETE /files/:id (soft-delete) +// --------------------------------------------------------------------------- + +func (h *FileHandler) SoftDelete(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + if err := h.fileSvc.Delete(c.Request.Context(), id); err != nil { + respondError(c, err) + return + } + + c.Status(http.StatusNoContent) +} + +// --------------------------------------------------------------------------- +// GET /files/:id/content +// --------------------------------------------------------------------------- + +func (h *FileHandler) GetContent(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + res, err := h.fileSvc.GetContent(c.Request.Context(), id) + if err != nil { + respondError(c, err) + return + } + defer res.Body.Close() + + c.Header("Content-Type", res.MIMEType) + if res.OriginalName != nil { + c.Header("Content-Disposition", + fmt.Sprintf("attachment; filename=%q", *res.OriginalName)) + } + c.Status(http.StatusOK) + io.Copy(c.Writer, res.Body) //nolint:errcheck +} + +// --------------------------------------------------------------------------- +// PUT /files/:id/content (replace) +// --------------------------------------------------------------------------- + +func (h *FileHandler) ReplaceContent(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + fh, err := c.FormFile("file") + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + src, err := fh.Open() + if err != nil { + respondError(c, err) + return + } + defer src.Close() + + mt, err := mimetype.DetectReader(src) + if err != nil { + respondError(c, err) + return + } + if _, err := src.Seek(0, io.SeekStart); err != nil { + respondError(c, err) + return + } + mimeStr := strings.SplitN(mt.String(), ";", 2)[0] + + name := fh.Filename + params := service.UploadParams{ + Reader: src, + MIMEType: mimeStr, + OriginalName: &name, + } + + f, err := h.fileSvc.Replace(c.Request.Context(), id, params) + if err != nil { + respondError(c, err) + return + } + + respondJSON(c, http.StatusOK, toFileJSON(*f)) +} + +// --------------------------------------------------------------------------- +// GET /files/:id/thumbnail +// --------------------------------------------------------------------------- + +func (h *FileHandler) GetThumbnail(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + rc, err := h.fileSvc.GetThumbnail(c.Request.Context(), id) + if err != nil { + respondError(c, err) + return + } + defer rc.Close() + + c.Header("Content-Type", "image/jpeg") + c.Status(http.StatusOK) + io.Copy(c.Writer, rc) //nolint:errcheck +} + +// --------------------------------------------------------------------------- +// GET /files/:id/preview +// --------------------------------------------------------------------------- + +func (h *FileHandler) GetPreview(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + rc, err := h.fileSvc.GetPreview(c.Request.Context(), id) + if err != nil { + respondError(c, err) + return + } + defer rc.Close() + + c.Header("Content-Type", "image/jpeg") + c.Status(http.StatusOK) + io.Copy(c.Writer, rc) //nolint:errcheck +} + +// --------------------------------------------------------------------------- +// POST /files/:id/restore +// --------------------------------------------------------------------------- + +func (h *FileHandler) Restore(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + f, err := h.fileSvc.Restore(c.Request.Context(), id) + if err != nil { + respondError(c, err) + return + } + + respondJSON(c, http.StatusOK, toFileJSON(*f)) +} + +// --------------------------------------------------------------------------- +// DELETE /files/:id/permanent +// --------------------------------------------------------------------------- + +func (h *FileHandler) PermanentDelete(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + if err := h.fileSvc.PermanentDelete(c.Request.Context(), id); err != nil { + respondError(c, err) + return + } + + c.Status(http.StatusNoContent) +} + +// --------------------------------------------------------------------------- +// GET /files/:id/tags +// --------------------------------------------------------------------------- + +func (h *FileHandler) ListTags(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + tags, err := h.fileSvc.ListFileTags(c.Request.Context(), id) + if err != nil { + respondError(c, err) + return + } + + items := make([]tagJSON, len(tags)) + for i, t := range tags { + items[i] = toTagJSON(t) + } + respondJSON(c, http.StatusOK, items) +} + +// --------------------------------------------------------------------------- +// PUT /files/:id/tags (replace all) +// --------------------------------------------------------------------------- + +func (h *FileHandler) SetTags(c *gin.Context) { + id, ok := parseFileID(c) + if !ok { + return + } + + var body struct { + TagIDs []string `json:"tag_ids" binding:"required"` + } + if err := c.ShouldBindJSON(&body); err != nil { + respondError(c, domain.ErrValidation) + return + } + + tagIDs, err := parseUUIDs(body.TagIDs) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + tags, err := h.fileSvc.SetFileTags(c.Request.Context(), id, tagIDs) + if err != nil { + respondError(c, err) + return + } + + items := make([]tagJSON, len(tags)) + for i, t := range tags { + items[i] = toTagJSON(t) + } + respondJSON(c, http.StatusOK, items) +} + +// --------------------------------------------------------------------------- +// PUT /files/:id/tags/:tag_id +// --------------------------------------------------------------------------- + +func (h *FileHandler) AddTag(c *gin.Context) { + fileID, ok := parseFileID(c) + if !ok { + return + } + tagID, err := uuid.Parse(c.Param("tag_id")) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + tags, err := h.fileSvc.AddTag(c.Request.Context(), fileID, tagID) + if err != nil { + respondError(c, err) + return + } + + items := make([]tagJSON, len(tags)) + for i, t := range tags { + items[i] = toTagJSON(t) + } + respondJSON(c, http.StatusOK, items) +} + +// --------------------------------------------------------------------------- +// DELETE /files/:id/tags/:tag_id +// --------------------------------------------------------------------------- + +func (h *FileHandler) RemoveTag(c *gin.Context) { + fileID, ok := parseFileID(c) + if !ok { + return + } + tagID, err := uuid.Parse(c.Param("tag_id")) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + if err := h.fileSvc.RemoveTag(c.Request.Context(), fileID, tagID); err != nil { + respondError(c, err) + return + } + + c.Status(http.StatusNoContent) +} + +// --------------------------------------------------------------------------- +// POST /files/bulk/tags +// --------------------------------------------------------------------------- + +func (h *FileHandler) BulkSetTags(c *gin.Context) { + var body struct { + FileIDs []string `json:"file_ids" binding:"required"` + Action string `json:"action" binding:"required"` + TagIDs []string `json:"tag_ids" binding:"required"` + } + if err := c.ShouldBindJSON(&body); err != nil { + respondError(c, domain.ErrValidation) + return + } + if body.Action != "add" && body.Action != "remove" { + respondError(c, domain.ErrValidation) + return + } + + fileIDs, err := parseUUIDs(body.FileIDs) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + tagIDs, err := parseUUIDs(body.TagIDs) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + applied, err := h.fileSvc.BulkSetTags(c.Request.Context(), fileIDs, body.Action, tagIDs) + if err != nil { + respondError(c, err) + return + } + + strs := make([]string, len(applied)) + for i, id := range applied { + strs[i] = id.String() + } + respondJSON(c, http.StatusOK, gin.H{"applied_tag_ids": strs}) +} + +// --------------------------------------------------------------------------- +// POST /files/bulk/delete +// --------------------------------------------------------------------------- + +func (h *FileHandler) BulkDelete(c *gin.Context) { + var body struct { + FileIDs []string `json:"file_ids" binding:"required"` + } + if err := c.ShouldBindJSON(&body); err != nil { + respondError(c, domain.ErrValidation) + return + } + + fileIDs, err := parseUUIDs(body.FileIDs) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + if err := h.fileSvc.BulkDelete(c.Request.Context(), fileIDs); err != nil { + respondError(c, err) + return + } + + c.Status(http.StatusNoContent) +} + +// --------------------------------------------------------------------------- +// POST /files/bulk/common-tags +// --------------------------------------------------------------------------- + +func (h *FileHandler) CommonTags(c *gin.Context) { + var body struct { + FileIDs []string `json:"file_ids" binding:"required"` + } + if err := c.ShouldBindJSON(&body); err != nil { + respondError(c, domain.ErrValidation) + return + } + + fileIDs, err := parseUUIDs(body.FileIDs) + if err != nil { + respondError(c, domain.ErrValidation) + return + } + + common, partial, err := h.fileSvc.CommonTags(c.Request.Context(), fileIDs) + if err != nil { + respondError(c, err) + return + } + + toStrs := func(ids []uuid.UUID) []string { + s := make([]string, len(ids)) + for i, id := range ids { + s[i] = id.String() + } + return s + } + + respondJSON(c, http.StatusOK, gin.H{ + "common_tag_ids": toStrs(common), + "partial_tag_ids": toStrs(partial), + }) +} + +// --------------------------------------------------------------------------- +// POST /files/import +// --------------------------------------------------------------------------- + +func (h *FileHandler) Import(c *gin.Context) { + var body struct { + Path string `json:"path"` + } + // Body is optional; ignore bind errors. + _ = c.ShouldBindJSON(&body) + + result, err := h.fileSvc.Import(c.Request.Context(), body.Path) + if err != nil { + respondError(c, err) + return + } + + respondJSON(c, http.StatusOK, result) +} + +// --------------------------------------------------------------------------- +// Helpers +// --------------------------------------------------------------------------- + +func parseUUIDs(strs []string) ([]uuid.UUID, error) { + ids := make([]uuid.UUID, 0, len(strs)) + for _, s := range strs { + id, err := uuid.Parse(s) + if err != nil { + return nil, err + } + ids = append(ids, id) + } + return ids, nil +} \ No newline at end of file diff --git a/backend/internal/handler/router.go b/backend/internal/handler/router.go index a4dd657..0128455 100644 --- a/backend/internal/handler/router.go +++ b/backend/internal/handler/router.go @@ -7,8 +7,7 @@ import ( ) // NewRouter builds and returns a configured Gin engine. -// Additional handlers will be added here as they are implemented. -func NewRouter(auth *AuthMiddleware, authHandler *AuthHandler) *gin.Engine { +func NewRouter(auth *AuthMiddleware, authHandler *AuthHandler, fileHandler *FileHandler) *gin.Engine { r := gin.New() r.Use(gin.Logger(), gin.Recovery()) @@ -33,5 +32,35 @@ func NewRouter(auth *AuthMiddleware, authHandler *AuthHandler) *gin.Engine { } } + // File endpoints — all require authentication. + files := v1.Group("/files", auth.Handle()) + { + files.GET("", fileHandler.List) + files.POST("", fileHandler.Upload) + + // Bulk routes must be registered before /:id to avoid ambiguity. + files.POST("/bulk/tags", fileHandler.BulkSetTags) + files.POST("/bulk/delete", fileHandler.BulkDelete) + files.POST("/bulk/common-tags", fileHandler.CommonTags) + files.POST("/import", fileHandler.Import) + + // Per-file routes. + files.GET("/:id", fileHandler.GetMeta) + files.PATCH("/:id", fileHandler.UpdateMeta) + files.DELETE("/:id", fileHandler.SoftDelete) + + files.GET("/:id/content", fileHandler.GetContent) + files.PUT("/:id/content", fileHandler.ReplaceContent) + files.GET("/:id/thumbnail", fileHandler.GetThumbnail) + files.GET("/:id/preview", fileHandler.GetPreview) + files.POST("/:id/restore", fileHandler.Restore) + files.DELETE("/:id/permanent", fileHandler.PermanentDelete) + + files.GET("/:id/tags", fileHandler.ListTags) + files.PUT("/:id/tags", fileHandler.SetTags) + files.PUT("/:id/tags/:tag_id", fileHandler.AddTag) + files.DELETE("/:id/tags/:tag_id", fileHandler.RemoveTag) + } + return r -} +} \ No newline at end of file diff --git a/backend/internal/service/file_service.go b/backend/internal/service/file_service.go index 5332e78..c414481 100644 --- a/backend/internal/service/file_service.go +++ b/backend/internal/service/file_service.go @@ -6,8 +6,11 @@ import ( "encoding/json" "fmt" "io" + "os" + "path/filepath" "time" + "github.com/gabriel-vasile/mimetype" "github.com/google/uuid" "github.com/rwcarlsen/goexif/exif" @@ -23,13 +26,14 @@ const fileObjectTypeID int16 = 1 // UploadParams holds the parameters for uploading a new file. type UploadParams struct { - Reader io.Reader - MIMEType string - OriginalName *string - Notes *string - Metadata json.RawMessage - IsPublic bool - TagIDs []uuid.UUID + Reader io.Reader + MIMEType string + OriginalName *string + Notes *string + Metadata json.RawMessage + ContentDatetime *time.Time + IsPublic bool + TagIDs []uuid.UUID } // UpdateParams holds the parameters for updating file metadata. @@ -42,14 +46,35 @@ type UpdateParams struct { TagIDs *[]uuid.UUID // nil means don't change tags } +// ContentResult holds the open reader and metadata for a file download. +type ContentResult struct { + Body io.ReadCloser + MIMEType string + OriginalName *string +} + +// ImportFileError records a failed file during an import operation. +type ImportFileError struct { + Filename string `json:"filename"` + Reason string `json:"reason"` +} + +// ImportResult summarises a directory import. +type ImportResult struct { + Imported int `json:"imported"` + Skipped int `json:"skipped"` + Errors []ImportFileError `json:"errors"` +} + // FileService handles business logic for file records. type FileService struct { - files port.FileRepo - mimes port.MimeRepo - storage port.FileStorage - acl *ACLService - audit *AuditService - tx port.Transactor + files port.FileRepo + mimes port.MimeRepo + storage port.FileStorage + acl *ACLService + audit *AuditService + tx port.Transactor + importPath string // default server-side import directory } // NewFileService creates a FileService. @@ -60,19 +85,26 @@ func NewFileService( acl *ACLService, audit *AuditService, tx port.Transactor, + importPath string, ) *FileService { return &FileService{ - files: files, - mimes: mimes, - storage: storage, - acl: acl, - audit: audit, - tx: tx, + files: files, + mimes: mimes, + storage: storage, + acl: acl, + audit: audit, + tx: tx, + importPath: importPath, } } +// --------------------------------------------------------------------------- +// Core CRUD +// --------------------------------------------------------------------------- + // Upload validates the MIME type, saves the file to storage, creates the DB // record, and applies any initial tags — all within a single transaction. +// If ContentDatetime is nil and EXIF DateTimeOriginal is present, it is used. func (s *FileService) Upload(ctx context.Context, p UploadParams) (*domain.File, error) { userID, _, _ := domain.UserFromContext(ctx) @@ -90,7 +122,15 @@ func (s *FileService) Upload(ctx context.Context, p UploadParams) (*domain.File, data := buf.Bytes() // Extract EXIF metadata (best-effort; non-image files will error silently). - exifData := extractEXIF(data) + exifData, exifDatetime := extractEXIFWithDatetime(data) + + // Resolve content datetime: explicit > EXIF > zero value. + var contentDatetime time.Time + if p.ContentDatetime != nil { + contentDatetime = *p.ContentDatetime + } else if exifDatetime != nil { + contentDatetime = *exifDatetime + } // Assign UUID v7 so CreatedAt can be derived from it later. fileID, err := uuid.NewV7() @@ -107,15 +147,16 @@ func (s *FileService) Upload(ctx context.Context, p UploadParams) (*domain.File, var created *domain.File txErr := s.tx.WithTx(ctx, func(ctx context.Context) error { f := &domain.File{ - ID: fileID, - OriginalName: p.OriginalName, - MIMEType: mime.Name, - MIMEExtension: mime.Extension, - Notes: p.Notes, - Metadata: p.Metadata, - EXIF: exifData, - CreatorID: userID, - IsPublic: p.IsPublic, + ID: fileID, + OriginalName: p.OriginalName, + MIMEType: mime.Name, + MIMEExtension: mime.Extension, + ContentDatetime: contentDatetime, + Notes: p.Notes, + Metadata: p.Metadata, + EXIF: exifData, + CreatorID: userID, + IsPublic: p.IsPublic, } var createErr error @@ -128,7 +169,6 @@ func (s *FileService) Upload(ctx context.Context, p UploadParams) (*domain.File, if err := s.files.SetTags(ctx, created.ID, p.TagIDs); err != nil { return err } - // Re-fetch to populate Tags on the returned value. tags, err := s.files.ListTags(ctx, created.ID) if err != nil { return err @@ -283,7 +323,7 @@ func (s *FileService) Restore(ctx context.Context, id uuid.UUID) (*domain.File, } // PermanentDelete removes the file record and its stored bytes. Only allowed -// when the file is already in trash. Restricted to admins and the creator. +// when the file is already in trash. func (s *FileService) PermanentDelete(ctx context.Context, id uuid.UUID) error { userID, isAdmin, _ := domain.UserFromContext(ctx) @@ -292,7 +332,7 @@ func (s *FileService) PermanentDelete(ctx context.Context, id uuid.UUID) error { return err } if !f.IsDeleted { - return domain.ErrValidation + return domain.ErrConflict } ok, err := s.acl.CanEdit(ctx, userID, isAdmin, f.CreatorID, fileObjectTypeID, id) @@ -313,9 +353,7 @@ func (s *FileService) PermanentDelete(ctx context.Context, id uuid.UUID) error { return nil } -// Replace swaps the stored bytes for a file with new content. The MIME type -// may change. Thumbnail/preview caches are not invalidated here — callers -// should handle that if needed. +// Replace swaps the stored bytes for a file with new content. func (s *FileService) Replace(ctx context.Context, id uuid.UUID, p UploadParams) (*domain.File, error) { userID, isAdmin, _ := domain.UserFromContext(ctx) @@ -342,9 +380,8 @@ func (s *FileService) Replace(ctx context.Context, id uuid.UUID, p UploadParams) return nil, fmt.Errorf("FileService.Replace: read body: %w", err) } data := buf.Bytes() - exifData := extractEXIF(data) + exifData, _ := extractEXIFWithDatetime(data) - // Save new bytes, overwriting the existing stored file. if _, err := s.storage.Save(ctx, id, bytes.NewReader(data)); err != nil { return nil, fmt.Errorf("FileService.Replace: save to storage: %w", err) } @@ -373,20 +410,350 @@ func (s *FileService) List(ctx context.Context, params domain.FileListParams) (* return s.files.List(ctx, params) } +// --------------------------------------------------------------------------- +// Content / thumbnail / preview streaming +// --------------------------------------------------------------------------- + +// GetContent opens the raw file for download, enforcing view ACL. +func (s *FileService) GetContent(ctx context.Context, id uuid.UUID) (*ContentResult, error) { + f, err := s.Get(ctx, id) // ACL checked inside Get + if err != nil { + return nil, err + } + rc, err := s.storage.Read(ctx, id) + if err != nil { + return nil, err + } + return &ContentResult{ + Body: rc, + MIMEType: f.MIMEType, + OriginalName: f.OriginalName, + }, nil +} + +// GetThumbnail returns the thumbnail JPEG, enforcing view ACL. +func (s *FileService) GetThumbnail(ctx context.Context, id uuid.UUID) (io.ReadCloser, error) { + if _, err := s.Get(ctx, id); err != nil { + return nil, err + } + return s.storage.Thumbnail(ctx, id) +} + +// GetPreview returns the preview JPEG, enforcing view ACL. +func (s *FileService) GetPreview(ctx context.Context, id uuid.UUID) (io.ReadCloser, error) { + if _, err := s.Get(ctx, id); err != nil { + return nil, err + } + return s.storage.Preview(ctx, id) +} + +// --------------------------------------------------------------------------- +// Tag operations +// --------------------------------------------------------------------------- + +// ListFileTags returns the tags on a file, enforcing view ACL. +func (s *FileService) ListFileTags(ctx context.Context, fileID uuid.UUID) ([]domain.Tag, error) { + if _, err := s.Get(ctx, fileID); err != nil { + return nil, err + } + return s.files.ListTags(ctx, fileID) +} + +// SetFileTags replaces all tags on a file (full replace semantics), enforcing edit ACL. +func (s *FileService) SetFileTags(ctx context.Context, fileID uuid.UUID, tagIDs []uuid.UUID) ([]domain.Tag, error) { + userID, isAdmin, _ := domain.UserFromContext(ctx) + + f, err := s.files.GetByID(ctx, fileID) + if err != nil { + return nil, err + } + ok, err := s.acl.CanEdit(ctx, userID, isAdmin, f.CreatorID, fileObjectTypeID, fileID) + if err != nil { + return nil, err + } + if !ok { + return nil, domain.ErrForbidden + } + + if err := s.files.SetTags(ctx, fileID, tagIDs); err != nil { + return nil, err + } + + objType := fileObjectType + _ = s.audit.Log(ctx, "file_tag_add", &objType, &fileID, nil) + return s.files.ListTags(ctx, fileID) +} + +// AddTag adds a single tag to a file, enforcing edit ACL. +func (s *FileService) AddTag(ctx context.Context, fileID, tagID uuid.UUID) ([]domain.Tag, error) { + userID, isAdmin, _ := domain.UserFromContext(ctx) + + f, err := s.files.GetByID(ctx, fileID) + if err != nil { + return nil, err + } + ok, err := s.acl.CanEdit(ctx, userID, isAdmin, f.CreatorID, fileObjectTypeID, fileID) + if err != nil { + return nil, err + } + if !ok { + return nil, domain.ErrForbidden + } + + current, err := s.files.ListTags(ctx, fileID) + if err != nil { + return nil, err + } + // Only add if not already present. + for _, t := range current { + if t.ID == tagID { + return current, nil + } + } + ids := make([]uuid.UUID, 0, len(current)+1) + for _, t := range current { + ids = append(ids, t.ID) + } + ids = append(ids, tagID) + + if err := s.files.SetTags(ctx, fileID, ids); err != nil { + return nil, err + } + + objType := fileObjectType + _ = s.audit.Log(ctx, "file_tag_add", &objType, &fileID, map[string]any{"tag_id": tagID}) + return s.files.ListTags(ctx, fileID) +} + +// RemoveTag removes a single tag from a file, enforcing edit ACL. +func (s *FileService) RemoveTag(ctx context.Context, fileID, tagID uuid.UUID) error { + userID, isAdmin, _ := domain.UserFromContext(ctx) + + f, err := s.files.GetByID(ctx, fileID) + if err != nil { + return err + } + ok, err := s.acl.CanEdit(ctx, userID, isAdmin, f.CreatorID, fileObjectTypeID, fileID) + if err != nil { + return err + } + if !ok { + return domain.ErrForbidden + } + + current, err := s.files.ListTags(ctx, fileID) + if err != nil { + return err + } + ids := make([]uuid.UUID, 0, len(current)) + for _, t := range current { + if t.ID != tagID { + ids = append(ids, t.ID) + } + } + + if err := s.files.SetTags(ctx, fileID, ids); err != nil { + return err + } + + objType := fileObjectType + _ = s.audit.Log(ctx, "file_tag_remove", &objType, &fileID, map[string]any{"tag_id": tagID}) + return nil +} + +// --------------------------------------------------------------------------- +// Bulk operations +// --------------------------------------------------------------------------- + +// BulkDelete soft-deletes multiple files. Files the caller cannot edit are silently skipped. +func (s *FileService) BulkDelete(ctx context.Context, fileIDs []uuid.UUID) error { + for _, id := range fileIDs { + if err := s.Delete(ctx, id); err != nil { + // Skip files not found or forbidden; surface real errors. + if err == domain.ErrNotFound || err == domain.ErrForbidden { + continue + } + return err + } + } + return nil +} + +// BulkSetTags adds or removes the given tags on multiple files. +// For "add": tags are appended to each file's existing set. +// For "remove": tags are removed from each file's existing set. +// Returns the tag IDs that were applied (the input tagIDs, for add). +func (s *FileService) BulkSetTags(ctx context.Context, fileIDs []uuid.UUID, action string, tagIDs []uuid.UUID) ([]uuid.UUID, error) { + for _, fileID := range fileIDs { + switch action { + case "add": + for _, tagID := range tagIDs { + if _, err := s.AddTag(ctx, fileID, tagID); err != nil { + if err == domain.ErrNotFound || err == domain.ErrForbidden { + continue + } + return nil, err + } + } + case "remove": + for _, tagID := range tagIDs { + if err := s.RemoveTag(ctx, fileID, tagID); err != nil { + if err == domain.ErrNotFound || err == domain.ErrForbidden { + continue + } + return nil, err + } + } + default: + return nil, domain.ErrValidation + } + } + if action == "add" { + return tagIDs, nil + } + return []uuid.UUID{}, nil +} + +// CommonTags loads the tag sets for all given files and splits them into: +// - common: tag IDs present on every file +// - partial: tag IDs present on some but not all files +func (s *FileService) CommonTags(ctx context.Context, fileIDs []uuid.UUID) (common, partial []uuid.UUID, err error) { + if len(fileIDs) == 0 { + return nil, nil, nil + } + + // Count how many files each tag appears on. + counts := map[uuid.UUID]int{} + for _, fid := range fileIDs { + tags, err := s.files.ListTags(ctx, fid) + if err != nil { + return nil, nil, err + } + for _, t := range tags { + counts[t.ID]++ + } + } + + n := len(fileIDs) + for id, cnt := range counts { + if cnt == n { + common = append(common, id) + } else { + partial = append(partial, id) + } + } + if common == nil { + common = []uuid.UUID{} + } + if partial == nil { + partial = []uuid.UUID{} + } + return common, partial, nil +} + +// --------------------------------------------------------------------------- +// Import +// --------------------------------------------------------------------------- + +// Import scans a server-side directory and uploads all supported files. +// If path is empty, the configured default import path is used. +func (s *FileService) Import(ctx context.Context, path string) (*ImportResult, error) { + dir := path + if dir == "" { + dir = s.importPath + } + if dir == "" { + return nil, domain.ErrValidation + } + + entries, err := os.ReadDir(dir) + if err != nil { + return nil, fmt.Errorf("FileService.Import: read dir %q: %w", dir, err) + } + + result := &ImportResult{Errors: []ImportFileError{}} + + for _, entry := range entries { + if entry.IsDir() { + result.Skipped++ + continue + } + + fullPath := filepath.Join(dir, entry.Name()) + + mt, err := mimetype.DetectFile(fullPath) + if err != nil { + result.Errors = append(result.Errors, ImportFileError{ + Filename: entry.Name(), + Reason: fmt.Sprintf("MIME detection failed: %s", err), + }) + continue + } + + mimeStr := mt.String() + // Strip parameters (e.g. "text/plain; charset=utf-8" → "text/plain"). + if idx := len(mimeStr); idx > 0 { + for i, c := range mimeStr { + if c == ';' { + mimeStr = mimeStr[:i] + break + } + } + } + + if _, err := s.mimes.GetByName(ctx, mimeStr); err != nil { + result.Skipped++ + continue + } + + f, err := os.Open(fullPath) + if err != nil { + result.Errors = append(result.Errors, ImportFileError{ + Filename: entry.Name(), + Reason: fmt.Sprintf("open failed: %s", err), + }) + continue + } + + name := entry.Name() + _, uploadErr := s.Upload(ctx, UploadParams{ + Reader: f, + MIMEType: mimeStr, + OriginalName: &name, + }) + f.Close() + + if uploadErr != nil { + result.Errors = append(result.Errors, ImportFileError{ + Filename: entry.Name(), + Reason: uploadErr.Error(), + }) + continue + } + result.Imported++ + } + + return result, nil +} + // --------------------------------------------------------------------------- // Internal helpers // --------------------------------------------------------------------------- -// extractEXIF attempts to parse EXIF data from raw bytes and marshal it to -// JSON. Returns nil on any error (non-image files, no EXIF header, etc.). -func extractEXIF(data []byte) json.RawMessage { +// extractEXIFWithDatetime parses EXIF from raw bytes, returning both the JSON +// representation and the DateTimeOriginal (if present). Both may be nil. +func extractEXIFWithDatetime(data []byte) (json.RawMessage, *time.Time) { x, err := exif.Decode(bytes.NewReader(data)) if err != nil { - return nil + return nil, nil } b, err := x.MarshalJSON() if err != nil { - return nil + return nil, nil } - return json.RawMessage(b) + var dt *time.Time + if t, err := x.DateTime(); err == nil { + dt = &t + } + return json.RawMessage(b), dt } \ No newline at end of file