Add honeycomb completion heatmap and data migration framework

- Add completion_summary endpoint data to residence detail response
- Track completed_from_column on task completions (overdue/due_soon/upcoming)
- Add GetCompletionSummary repo method with monthly aggregation
- Add one-time data migration framework (data_migrations table + registry)
- Add backfill migration to classify historical completions
- Add standalone backfill script for manual/dry-run usage

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Trey t
2026-03-12 00:05:10 -05:00
parent 739b245ee6
commit 6803f6ec18
12 changed files with 958 additions and 21 deletions

View File

@@ -0,0 +1,94 @@
package database
import (
"sort"
"time"
"github.com/rs/zerolog/log"
"gorm.io/gorm"
)
// DataMigration tracks one-time data migrations that have been applied.
type DataMigration struct {
ID uint `gorm:"primaryKey"`
Name string `gorm:"column:name;uniqueIndex;size:255;not null"`
AppliedAt time.Time `gorm:"column:applied_at;not null"`
}
func (DataMigration) TableName() string {
return "data_migrations"
}
// dataMigrationEntry pairs a name with its run function.
type dataMigrationEntry struct {
Name string
Fn func(tx *gorm.DB) error
}
// registry holds all registered one-time data migrations, in order.
var registry []dataMigrationEntry
// RegisterDataMigration adds a one-time migration. Call from init() functions.
func RegisterDataMigration(name string, fn func(tx *gorm.DB) error) {
registry = append(registry, dataMigrationEntry{Name: name, Fn: fn})
}
// RunDataMigrations creates the tracking table and executes any migrations
// that haven't been applied yet. Called once during server startup.
func RunDataMigrations() error {
if db == nil {
return nil
}
// Ensure tracking table exists
if err := db.AutoMigrate(&DataMigration{}); err != nil {
return err
}
if len(registry) == 0 {
return nil
}
// Sort by name for deterministic order
sort.Slice(registry, func(i, j int) bool {
return registry[i].Name < registry[j].Name
})
// Load already-applied migrations
var applied []DataMigration
if err := db.Find(&applied).Error; err != nil {
return err
}
appliedSet := make(map[string]bool, len(applied))
for _, m := range applied {
appliedSet[m.Name] = true
}
// Run pending migrations
for _, entry := range registry {
if appliedSet[entry.Name] {
continue
}
log.Info().Str("migration", entry.Name).Msg("Running data migration")
err := db.Transaction(func(tx *gorm.DB) error {
if err := entry.Fn(tx); err != nil {
return err
}
// Record that this migration has been applied
return tx.Create(&DataMigration{
Name: entry.Name,
AppliedAt: time.Now().UTC(),
}).Error
})
if err != nil {
log.Error().Err(err).Str("migration", entry.Name).Msg("Data migration failed")
return err
}
log.Info().Str("migration", entry.Name).Msg("Data migration completed")
}
return nil
}

View File

@@ -188,6 +188,11 @@ func Migrate() error {
return fmt.Errorf("failed to run GoAdmin migrations: %w", err)
}
// Run one-time data migrations (backfills, etc.)
if err := RunDataMigrations(); err != nil {
return fmt.Errorf("failed to run data migrations: %w", err)
}
log.Info().Msg("Database migrations completed successfully")
return nil
}

View File

@@ -0,0 +1,206 @@
package database
import (
"sort"
"time"
"github.com/rs/zerolog/log"
"gorm.io/gorm"
)
const daysThreshold = 30 // "due soon" window, matches app default
func init() {
RegisterDataMigration("20250601_backfill_completion_columns", backfillCompletionColumns)
}
func backfillCompletionColumns(tx *gorm.DB) error {
// Query all completions that still have the default value
type completionRow struct {
ID uint
TaskID uint
CompletedAt time.Time
}
var completions []completionRow
err := tx.Table("task_taskcompletion").
Select("id, task_id, completed_at").
Where("completed_from_column = ? OR completed_from_column IS NULL OR completed_from_column = ''", "completed_tasks").
Order("task_id, completed_at ASC").
Scan(&completions).Error
if err != nil {
return err
}
log.Info().Int("total_completions", len(completions)).Msg("Backfill: found completions to classify")
if len(completions) == 0 {
log.Info().Msg("Backfill: nothing to backfill")
return nil
}
// Load all tasks referenced by these completions
taskIDs := make(map[uint]bool)
for _, c := range completions {
taskIDs[c.TaskID] = true
}
taskIDList := make([]uint, 0, len(taskIDs))
for id := range taskIDs {
taskIDList = append(taskIDList, id)
}
type taskRow struct {
ID uint
DueDate *time.Time
NextDueDate *time.Time
FrequencyID *uint
CustomIntervalDays *int
}
var tasks []taskRow
err = tx.Table("task_task").
Select("id, due_date, next_due_date, frequency_id, custom_interval_days").
Where("id IN ?", taskIDList).
Scan(&tasks).Error
if err != nil {
return err
}
taskMap := make(map[uint]*taskRow, len(tasks))
for i := range tasks {
taskMap[tasks[i].ID] = &tasks[i]
}
// Load frequency intervals
type freqRow struct {
ID uint
Name string
Days *int
}
var frequencies []freqRow
err = tx.Table("task_taskfrequency").Select("id, name, days").Scan(&frequencies).Error
if err != nil {
return err
}
freqMap := make(map[uint]*freqRow, len(frequencies))
for i := range frequencies {
freqMap[frequencies[i].ID] = &frequencies[i]
}
// Group completions by task
taskCompletions := make(map[uint][]completionRow)
for _, c := range completions {
taskCompletions[c.TaskID] = append(taskCompletions[c.TaskID], c)
}
for taskID := range taskCompletions {
sort.Slice(taskCompletions[taskID], func(i, j int) bool {
return taskCompletions[taskID][i].CompletedAt.Before(taskCompletions[taskID][j].CompletedAt)
})
}
// Classify each completion
type updateEntry struct {
completionID uint
column string
}
var updates []updateEntry
stats := map[string]int{
"overdue_tasks": 0,
"due_soon_tasks": 0,
"upcoming_tasks": 0,
"no_due_date": 0,
}
for taskID, comps := range taskCompletions {
task, ok := taskMap[taskID]
if !ok {
continue
}
// Determine interval for recurring tasks
var intervalDays int
if task.FrequencyID != nil {
if freq, ok := freqMap[*task.FrequencyID]; ok {
if freq.Name == "Custom" && task.CustomIntervalDays != nil {
intervalDays = *task.CustomIntervalDays
} else if freq.Days != nil {
intervalDays = *freq.Days
}
}
}
isRecurring := intervalDays > 0
if !isRecurring {
effectiveDate := task.DueDate
if effectiveDate == nil {
for _, c := range comps {
stats["no_due_date"]++
updates = append(updates, updateEntry{c.ID, "completed_tasks"})
}
continue
}
for _, c := range comps {
column := classifyCompletion(c.CompletedAt, *effectiveDate, daysThreshold)
stats[column]++
updates = append(updates, updateEntry{c.ID, column})
}
} else {
effectiveDate := task.DueDate
if effectiveDate == nil && task.NextDueDate != nil {
est := task.NextDueDate.AddDate(0, 0, -intervalDays*len(comps))
effectiveDate = &est
}
if effectiveDate == nil {
for _, c := range comps {
stats["no_due_date"]++
updates = append(updates, updateEntry{c.ID, "completed_tasks"})
}
continue
}
currentDueDate := *effectiveDate
for _, c := range comps {
column := classifyCompletion(c.CompletedAt, currentDueDate, daysThreshold)
stats[column]++
updates = append(updates, updateEntry{c.ID, column})
currentDueDate = c.CompletedAt.AddDate(0, 0, intervalDays)
}
}
}
log.Info().
Int("overdue", stats["overdue_tasks"]).
Int("due_soon", stats["due_soon_tasks"]).
Int("upcoming", stats["upcoming_tasks"]).
Int("no_due_date", stats["no_due_date"]).
Int("total_updates", len(updates)).
Msg("Backfill: classification results")
// Apply updates
for _, u := range updates {
if err := tx.Table("task_taskcompletion").
Where("id = ?", u.completionID).
Update("completed_from_column", u.column).Error; err != nil {
return err
}
}
log.Info().Int("total_updated", len(updates)).Msg("Backfill: complete")
return nil
}
// classifyCompletion determines what kanban column a task was in when completed.
func classifyCompletion(completedAt time.Time, dueDate time.Time, threshold int) string {
completedDay := time.Date(completedAt.Year(), completedAt.Month(), completedAt.Day(), 0, 0, 0, 0, time.UTC)
dueDay := time.Date(dueDate.Year(), dueDate.Month(), dueDate.Day(), 0, 0, 0, 0, time.UTC)
if completedDay.After(dueDay) {
return "overdue_tasks"
}
daysBefore := dueDay.Sub(completedDay).Hours() / 24
if daysBefore <= float64(threshold) {
return "due_soon_tasks"
}
return "upcoming_tasks"
}

View File

@@ -48,9 +48,10 @@ type ResidenceResponse struct {
PurchasePrice *decimal.Decimal `json:"purchase_price"`
IsPrimary bool `json:"is_primary"`
IsActive bool `json:"is_active"`
OverdueCount int `json:"overdue_count"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
OverdueCount int `json:"overdue_count"`
CompletionSummary *CompletionSummary `json:"completion_summary,omitempty"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// TotalSummary represents summary statistics for all residences
@@ -114,6 +115,28 @@ type SharePackageResponse struct {
ExpiresAt *time.Time `json:"expires_at,omitempty"`
}
// ColumnCompletionCount represents completions from a specific kanban column
type ColumnCompletionCount struct {
Column string `json:"column"`
Color string `json:"color"`
Count int `json:"count"`
}
// MonthlyCompletionSummary represents completions for a single month
type MonthlyCompletionSummary struct {
Month string `json:"month"` // "2025-04" format
Completions []ColumnCompletionCount `json:"completions"`
Total int `json:"total"`
Overflow int `json:"overflow"` // completions beyond the display cap
}
// CompletionSummary represents task completion data for the honeycomb grid
type CompletionSummary struct {
TotalAllTime int `json:"total_all_time"`
TotalLast12Months int `json:"total_last_12_months"`
Months []MonthlyCompletionSummary `json:"months"`
}
// === Factory Functions ===
// NewResidenceUserResponse creates a ResidenceUserResponse from a User model

View File

@@ -92,7 +92,8 @@ func (h *ResidenceHandler) GetResidence(c echo.Context) error {
return apperrors.BadRequest("error.invalid_residence_id")
}
response, err := h.residenceService.GetResidence(uint(residenceID), user.ID)
userNow := middleware.GetUserNow(c)
response, err := h.residenceService.GetResidence(uint(residenceID), user.ID, userNow)
if err != nil {
return err
}

View File

@@ -273,7 +273,8 @@ type TaskCompletion struct {
CompletedAt time.Time `gorm:"column:completed_at;not null" json:"completed_at"`
Notes string `gorm:"column:notes;type:text" json:"notes"`
ActualCost *decimal.Decimal `gorm:"column:actual_cost;type:decimal(10,2)" json:"actual_cost"`
Rating *int `gorm:"column:rating" json:"rating"` // 1-5 star rating
Rating *int `gorm:"column:rating" json:"rating"` // 1-5 star rating
CompletedFromColumn string `gorm:"column:completed_from_column;type:varchar(50);default:'completed_tasks'" json:"completed_from_column"`
// Multiple images support
Images []TaskCompletionImage `gorm:"foreignKey:CompletionID" json:"images,omitempty"`

View File

@@ -0,0 +1,174 @@
package repositories
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/treytartt/honeydue-api/internal/models"
"github.com/treytartt/honeydue-api/internal/testutil"
)
func TestGetCompletionSummary_EmptyResidence(t *testing.T) {
db := testutil.SetupTestDB(t)
repo := NewTaskRepository(db)
user := testutil.CreateTestUser(t, db, "owner", "owner@test.com", "password")
residence := testutil.CreateTestResidence(t, db, user.ID, "Test House")
now := time.Date(2026, 3, 15, 0, 0, 0, 0, time.UTC)
summary, err := repo.GetCompletionSummary(residence.ID, now, 10)
require.NoError(t, err)
assert.Equal(t, 0, summary.TotalAllTime)
assert.Equal(t, 0, summary.TotalLast12Months)
assert.Len(t, summary.Months, 12)
// First month should be March 2025
assert.Equal(t, "2025-03", summary.Months[0].Month)
// Last month should be February 2026
assert.Equal(t, "2026-02", summary.Months[11].Month)
}
func TestGetCompletionSummary_CountsByMonthAndColumn(t *testing.T) {
db := testutil.SetupTestDB(t)
repo := NewTaskRepository(db)
user := testutil.CreateTestUser(t, db, "owner", "owner@test.com", "password")
residence := testutil.CreateTestResidence(t, db, user.ID, "Test House")
task := testutil.CreateTestTask(t, db, residence.ID, user.ID, "Fix roof")
now := time.Date(2026, 3, 15, 0, 0, 0, 0, time.UTC)
completions := []models.TaskCompletion{
{TaskID: task.ID, CompletedByID: user.ID, CompletedAt: time.Date(2026, 1, 10, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "overdue_tasks"},
{TaskID: task.ID, CompletedByID: user.ID, CompletedAt: time.Date(2026, 1, 20, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "completed_tasks"},
{TaskID: task.ID, CompletedByID: user.ID, CompletedAt: time.Date(2026, 2, 5, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "due_soon_tasks"},
{TaskID: task.ID, CompletedByID: user.ID, CompletedAt: time.Date(2025, 6, 1, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "upcoming_tasks"},
}
for i := range completions {
require.NoError(t, db.Create(&completions[i]).Error)
}
summary, err := repo.GetCompletionSummary(residence.ID, now, 10)
require.NoError(t, err)
assert.Equal(t, 4, summary.TotalAllTime)
assert.Equal(t, 4, summary.TotalLast12Months)
for _, m := range summary.Months {
switch m.Month {
case "2026-01":
assert.Equal(t, 2, m.Total, "January should have 2 completions")
assert.Len(t, m.Completions, 2, "January should have 2 column entries")
case "2026-02":
assert.Equal(t, 1, m.Total, "February should have 1 completion")
case "2025-06":
assert.Equal(t, 1, m.Total, "June 2025 should have 1 completion")
}
}
}
func TestGetCompletionSummary_Overflow(t *testing.T) {
db := testutil.SetupTestDB(t)
repo := NewTaskRepository(db)
user := testutil.CreateTestUser(t, db, "owner", "owner@test.com", "password")
residence := testutil.CreateTestResidence(t, db, user.ID, "Busy House")
task := testutil.CreateTestTask(t, db, residence.ID, user.ID, "Lots of tasks")
now := time.Date(2026, 3, 15, 0, 0, 0, 0, time.UTC)
for i := 0; i < 15; i++ {
c := models.TaskCompletion{
TaskID: task.ID,
CompletedByID: user.ID,
CompletedAt: time.Date(2026, 2, 1+i, 12, 0, 0, 0, time.UTC),
CompletedFromColumn: "completed_tasks",
}
require.NoError(t, db.Create(&c).Error)
}
summary, err := repo.GetCompletionSummary(residence.ID, now, 10)
require.NoError(t, err)
for _, m := range summary.Months {
if m.Month == "2026-02" {
assert.Equal(t, 15, m.Total)
assert.Equal(t, 5, m.Overflow)
return
}
}
t.Fatal("February 2026 not found")
}
func TestGetCompletionSummary_OldCompletionsExcludedFromMonths(t *testing.T) {
db := testutil.SetupTestDB(t)
repo := NewTaskRepository(db)
user := testutil.CreateTestUser(t, db, "owner", "owner@test.com", "password")
residence := testutil.CreateTestResidence(t, db, user.ID, "Old House")
task := testutil.CreateTestTask(t, db, residence.ID, user.ID, "Old task")
now := time.Date(2026, 3, 15, 0, 0, 0, 0, time.UTC)
old := models.TaskCompletion{
TaskID: task.ID, CompletedByID: user.ID,
CompletedAt: time.Date(2024, 1, 1, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "completed_tasks",
}
require.NoError(t, db.Create(&old).Error)
recent := models.TaskCompletion{
TaskID: task.ID, CompletedByID: user.ID,
CompletedAt: time.Date(2026, 2, 1, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "completed_tasks",
}
require.NoError(t, db.Create(&recent).Error)
summary, err := repo.GetCompletionSummary(residence.ID, now, 10)
require.NoError(t, err)
assert.Equal(t, 2, summary.TotalAllTime)
assert.Equal(t, 1, summary.TotalLast12Months)
}
func TestGetCompletionSummary_CompletionColors(t *testing.T) {
db := testutil.SetupTestDB(t)
repo := NewTaskRepository(db)
user := testutil.CreateTestUser(t, db, "owner", "owner@test.com", "password")
residence := testutil.CreateTestResidence(t, db, user.ID, "Color House")
task := testutil.CreateTestTask(t, db, residence.ID, user.ID, "Color task")
now := time.Date(2026, 3, 15, 0, 0, 0, 0, time.UTC)
c := models.TaskCompletion{
TaskID: task.ID, CompletedByID: user.ID,
CompletedAt: time.Date(2026, 1, 5, 12, 0, 0, 0, time.UTC), CompletedFromColumn: "overdue_tasks",
}
require.NoError(t, db.Create(&c).Error)
summary, err := repo.GetCompletionSummary(residence.ID, now, 10)
require.NoError(t, err)
for _, m := range summary.Months {
if m.Month == "2026-01" {
require.Len(t, m.Completions, 1)
assert.Equal(t, "overdue_tasks", m.Completions[0].Column)
assert.Equal(t, "#FF3B30", m.Completions[0].Color)
assert.Equal(t, 1, m.Completions[0].Count)
return
}
}
t.Fatal("January 2026 not found")
}
func TestKanbanColumnColor(t *testing.T) {
tests := []struct {
column string
expected string
}{
{"overdue_tasks", "#FF3B30"},
{"in_progress_tasks", "#5856D6"},
{"due_soon_tasks", "#FF9500"},
{"upcoming_tasks", "#007AFF"},
{"completed_tasks", "#34C759"},
{"cancelled_tasks", "#8E8E93"},
{"unknown_column", "#34C759"},
}
for _, tt := range tests {
t.Run(tt.column, func(t *testing.T) {
assert.Equal(t, tt.expected, KanbanColumnColor(tt.column))
})
}
}

View File

@@ -8,6 +8,7 @@ import (
"github.com/rs/zerolog/log"
"gorm.io/gorm"
"github.com/treytartt/honeydue-api/internal/dto/responses"
"github.com/treytartt/honeydue-api/internal/models"
"github.com/treytartt/honeydue-api/internal/task"
"github.com/treytartt/honeydue-api/internal/task/categorization"
@@ -789,3 +790,125 @@ func (r *TaskRepository) GetOverdueCountByResidence(residenceIDs []uint, now tim
return countMap, nil
}
// kanbanColumnColors maps kanban column names to their hex colors.
var kanbanColumnColors = map[string]string{
"overdue_tasks": "#FF3B30",
"in_progress_tasks": "#5856D6",
"due_soon_tasks": "#FF9500",
"upcoming_tasks": "#007AFF",
"completed_tasks": "#34C759",
"cancelled_tasks": "#8E8E93",
}
// KanbanColumnColor returns the hex color for a kanban column name.
func KanbanColumnColor(column string) string {
if color, ok := kanbanColumnColors[column]; ok {
return color
}
return "#34C759" // default to green
}
// completionAggRow is an internal type for scanning aggregated completion data.
type completionAggRow struct {
ResidenceID uint
CompletedFromColumn string
CompletedMonth string
Count int64
}
// GetCompletionSummary returns completion summary data for a single residence.
// Returns total all-time count and monthly breakdowns (by column) for the last 12 months.
func (r *TaskRepository) GetCompletionSummary(residenceID uint, now time.Time, maxPerMonth int) (*responses.CompletionSummary, error) {
// 1. Total all-time completions for this residence
var totalAllTime int64
err := r.db.Model(&models.TaskCompletion{}).
Joins("JOIN task_task ON task_task.id = task_taskcompletion.task_id").
Where("task_task.residence_id = ?", residenceID).
Count(&totalAllTime).Error
if err != nil {
return nil, err
}
// 2. Monthly breakdown for last 12 months
startDate := time.Date(now.Year()-1, now.Month(), 1, 0, 0, 0, 0, now.Location())
// Use dialect-appropriate date formatting (PostgreSQL vs SQLite)
dateExpr := "TO_CHAR(task_taskcompletion.completed_at, 'YYYY-MM')"
if r.db.Dialector.Name() == "sqlite" {
dateExpr = "strftime('%Y-%m', task_taskcompletion.completed_at)"
}
var rows []completionAggRow
err = r.db.Model(&models.TaskCompletion{}).
Select(fmt.Sprintf("task_task.residence_id, task_taskcompletion.completed_from_column, %s as completed_month, COUNT(*) as count", dateExpr)).
Joins("JOIN task_task ON task_task.id = task_taskcompletion.task_id").
Where("task_task.residence_id = ? AND task_taskcompletion.completed_at >= ?", residenceID, startDate).
Group(fmt.Sprintf("task_task.residence_id, task_taskcompletion.completed_from_column, %s", dateExpr)).
Order("completed_month ASC").
Scan(&rows).Error
if err != nil {
return nil, err
}
// Build month map
type monthData struct {
columns map[string]int
total int
}
monthMap := make(map[string]*monthData)
// Initialize all 12 months
for i := 0; i < 12; i++ {
m := startDate.AddDate(0, i, 0)
key := m.Format("2006-01")
monthMap[key] = &monthData{columns: make(map[string]int)}
}
// Populate from query results
totalLast12 := 0
for _, row := range rows {
md, ok := monthMap[row.CompletedMonth]
if !ok {
continue
}
md.columns[row.CompletedFromColumn] = int(row.Count)
md.total += int(row.Count)
totalLast12 += int(row.Count)
}
// Convert to response DTOs
months := make([]responses.MonthlyCompletionSummary, 0, 12)
for i := 0; i < 12; i++ {
m := startDate.AddDate(0, i, 0)
key := m.Format("2006-01")
md := monthMap[key]
completions := make([]responses.ColumnCompletionCount, 0)
for col, count := range md.columns {
completions = append(completions, responses.ColumnCompletionCount{
Column: col,
Color: KanbanColumnColor(col),
Count: count,
})
}
overflow := 0
if md.total > maxPerMonth {
overflow = md.total - maxPerMonth
}
months = append(months, responses.MonthlyCompletionSummary{
Month: key,
Completions: completions,
Total: md.total,
Overflow: overflow,
})
}
return &responses.CompletionSummary{
TotalAllTime: int(totalAllTime),
TotalLast12Months: totalLast12,
Months: months,
}, nil
}

View File

@@ -58,8 +58,9 @@ func (s *ResidenceService) SetSubscriptionService(subService *SubscriptionServic
s.subscriptionService = subService
}
// GetResidence gets a residence by ID with access check
func (s *ResidenceService) GetResidence(residenceID, userID uint) (*responses.ResidenceResponse, error) {
// GetResidence gets a residence by ID with access check.
// The `now` parameter is used for timezone-aware completion summary aggregation.
func (s *ResidenceService) GetResidence(residenceID, userID uint, now time.Time) (*responses.ResidenceResponse, error) {
// Check access
hasAccess, err := s.residenceRepo.HasAccess(residenceID, userID)
if err != nil {
@@ -78,6 +79,17 @@ func (s *ResidenceService) GetResidence(residenceID, userID uint) (*responses.Re
}
resp := responses.NewResidenceResponse(residence)
// Attach completion summary (honeycomb grid data)
if s.taskRepo != nil {
summary, err := s.taskRepo.GetCompletionSummary(residenceID, now, 10)
if err != nil {
log.Warn().Err(err).Uint("residence_id", residenceID).Msg("Failed to fetch completion summary")
} else {
resp.CompletionSummary = summary
}
}
return &resp, nil
}

View File

@@ -102,7 +102,7 @@ func TestResidenceService_GetResidence(t *testing.T) {
user := testutil.CreateTestUser(t, db, "owner", "owner@test.com", "password")
residence := testutil.CreateTestResidence(t, db, user.ID, "Test House")
resp, err := service.GetResidence(residence.ID, user.ID)
resp, err := service.GetResidence(residence.ID, user.ID, time.Now())
require.NoError(t, err)
assert.Equal(t, residence.ID, resp.ID)
assert.Equal(t, "Test House", resp.Name)
@@ -119,7 +119,7 @@ func TestResidenceService_GetResidence_AccessDenied(t *testing.T) {
otherUser := testutil.CreateTestUser(t, db, "other", "other@test.com", "password")
residence := testutil.CreateTestResidence(t, db, owner.ID, "Test House")
_, err := service.GetResidence(residence.ID, otherUser.ID)
_, err := service.GetResidence(residence.ID, otherUser.ID, time.Now())
testutil.AssertAppError(t, err, http.StatusForbidden, "error.residence_access_denied")
}
@@ -132,7 +132,7 @@ func TestResidenceService_GetResidence_NotFound(t *testing.T) {
user := testutil.CreateTestUser(t, db, "user", "user@test.com", "password")
_, err := service.GetResidence(9999, user.ID)
_, err := service.GetResidence(9999, user.ID, time.Now())
assert.Error(t, err)
}
@@ -210,7 +210,7 @@ func TestResidenceService_DeleteResidence(t *testing.T) {
require.NoError(t, err)
// Should not be found
_, err = service.GetResidence(residence.ID, user.ID)
_, err = service.GetResidence(residence.ID, user.ID, time.Now())
assert.Error(t, err)
}

View File

@@ -17,6 +17,7 @@ import (
"github.com/treytartt/honeydue-api/internal/dto/responses"
"github.com/treytartt/honeydue-api/internal/models"
"github.com/treytartt/honeydue-api/internal/repositories"
"github.com/treytartt/honeydue-api/internal/task/categorization"
)
// Task-related errors (DEPRECATED - kept for reference, use apperrors instead)
@@ -551,13 +552,18 @@ func (s *TaskService) CreateCompletion(req *requests.CreateTaskCompletionRequest
completedAt = *req.CompletedAt
}
// Capture the kanban column BEFORE mutating NextDueDate/InProgress,
// so we know what state the task was in when the user completed it.
completedFromColumn := categorization.DetermineKanbanColumnWithTime(task, 30, now)
completion := &models.TaskCompletion{
TaskID: req.TaskID,
CompletedByID: userID,
CompletedAt: completedAt,
Notes: req.Notes,
ActualCost: req.ActualCost,
Rating: req.Rating,
TaskID: req.TaskID,
CompletedByID: userID,
CompletedAt: completedAt,
Notes: req.Notes,
ActualCost: req.ActualCost,
Rating: req.Rating,
CompletedFromColumn: completedFromColumn,
}
// Determine interval days for NextDueDate calculation before entering the transaction.
@@ -680,11 +686,15 @@ func (s *TaskService) QuickComplete(taskID uint, userID uint) error {
completedAt := time.Now().UTC()
// Capture kanban column before state mutation
completedFromColumn := categorization.DetermineKanbanColumn(task, 30)
completion := &models.TaskCompletion{
TaskID: taskID,
CompletedByID: userID,
CompletedAt: completedAt,
Notes: "Completed from widget",
TaskID: taskID,
CompletedByID: userID,
CompletedAt: completedAt,
Notes: "Completed from widget",
CompletedFromColumn: completedFromColumn,
}
if err := s.taskRepo.CreateCompletion(completion); err != nil {