Add honeycomb completion heatmap and data migration framework
- Add completion_summary endpoint data to residence detail response - Track completed_from_column on task completions (overdue/due_soon/upcoming) - Add GetCompletionSummary repo method with monthly aggregation - Add one-time data migration framework (data_migrations table + registry) - Add backfill migration to classify historical completions - Add standalone backfill script for manual/dry-run usage Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
206
internal/database/migration_backfill_completion_columns.go
Normal file
206
internal/database/migration_backfill_completion_columns.go
Normal file
@@ -0,0 +1,206 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
const daysThreshold = 30 // "due soon" window, matches app default
|
||||
|
||||
func init() {
|
||||
RegisterDataMigration("20250601_backfill_completion_columns", backfillCompletionColumns)
|
||||
}
|
||||
|
||||
func backfillCompletionColumns(tx *gorm.DB) error {
|
||||
// Query all completions that still have the default value
|
||||
type completionRow struct {
|
||||
ID uint
|
||||
TaskID uint
|
||||
CompletedAt time.Time
|
||||
}
|
||||
var completions []completionRow
|
||||
err := tx.Table("task_taskcompletion").
|
||||
Select("id, task_id, completed_at").
|
||||
Where("completed_from_column = ? OR completed_from_column IS NULL OR completed_from_column = ''", "completed_tasks").
|
||||
Order("task_id, completed_at ASC").
|
||||
Scan(&completions).Error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info().Int("total_completions", len(completions)).Msg("Backfill: found completions to classify")
|
||||
|
||||
if len(completions) == 0 {
|
||||
log.Info().Msg("Backfill: nothing to backfill")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Load all tasks referenced by these completions
|
||||
taskIDs := make(map[uint]bool)
|
||||
for _, c := range completions {
|
||||
taskIDs[c.TaskID] = true
|
||||
}
|
||||
taskIDList := make([]uint, 0, len(taskIDs))
|
||||
for id := range taskIDs {
|
||||
taskIDList = append(taskIDList, id)
|
||||
}
|
||||
|
||||
type taskRow struct {
|
||||
ID uint
|
||||
DueDate *time.Time
|
||||
NextDueDate *time.Time
|
||||
FrequencyID *uint
|
||||
CustomIntervalDays *int
|
||||
}
|
||||
var tasks []taskRow
|
||||
err = tx.Table("task_task").
|
||||
Select("id, due_date, next_due_date, frequency_id, custom_interval_days").
|
||||
Where("id IN ?", taskIDList).
|
||||
Scan(&tasks).Error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
taskMap := make(map[uint]*taskRow, len(tasks))
|
||||
for i := range tasks {
|
||||
taskMap[tasks[i].ID] = &tasks[i]
|
||||
}
|
||||
|
||||
// Load frequency intervals
|
||||
type freqRow struct {
|
||||
ID uint
|
||||
Name string
|
||||
Days *int
|
||||
}
|
||||
var frequencies []freqRow
|
||||
err = tx.Table("task_taskfrequency").Select("id, name, days").Scan(&frequencies).Error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
freqMap := make(map[uint]*freqRow, len(frequencies))
|
||||
for i := range frequencies {
|
||||
freqMap[frequencies[i].ID] = &frequencies[i]
|
||||
}
|
||||
|
||||
// Group completions by task
|
||||
taskCompletions := make(map[uint][]completionRow)
|
||||
for _, c := range completions {
|
||||
taskCompletions[c.TaskID] = append(taskCompletions[c.TaskID], c)
|
||||
}
|
||||
for taskID := range taskCompletions {
|
||||
sort.Slice(taskCompletions[taskID], func(i, j int) bool {
|
||||
return taskCompletions[taskID][i].CompletedAt.Before(taskCompletions[taskID][j].CompletedAt)
|
||||
})
|
||||
}
|
||||
|
||||
// Classify each completion
|
||||
type updateEntry struct {
|
||||
completionID uint
|
||||
column string
|
||||
}
|
||||
var updates []updateEntry
|
||||
|
||||
stats := map[string]int{
|
||||
"overdue_tasks": 0,
|
||||
"due_soon_tasks": 0,
|
||||
"upcoming_tasks": 0,
|
||||
"no_due_date": 0,
|
||||
}
|
||||
|
||||
for taskID, comps := range taskCompletions {
|
||||
task, ok := taskMap[taskID]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Determine interval for recurring tasks
|
||||
var intervalDays int
|
||||
if task.FrequencyID != nil {
|
||||
if freq, ok := freqMap[*task.FrequencyID]; ok {
|
||||
if freq.Name == "Custom" && task.CustomIntervalDays != nil {
|
||||
intervalDays = *task.CustomIntervalDays
|
||||
} else if freq.Days != nil {
|
||||
intervalDays = *freq.Days
|
||||
}
|
||||
}
|
||||
}
|
||||
isRecurring := intervalDays > 0
|
||||
|
||||
if !isRecurring {
|
||||
effectiveDate := task.DueDate
|
||||
if effectiveDate == nil {
|
||||
for _, c := range comps {
|
||||
stats["no_due_date"]++
|
||||
updates = append(updates, updateEntry{c.ID, "completed_tasks"})
|
||||
}
|
||||
continue
|
||||
}
|
||||
for _, c := range comps {
|
||||
column := classifyCompletion(c.CompletedAt, *effectiveDate, daysThreshold)
|
||||
stats[column]++
|
||||
updates = append(updates, updateEntry{c.ID, column})
|
||||
}
|
||||
} else {
|
||||
effectiveDate := task.DueDate
|
||||
if effectiveDate == nil && task.NextDueDate != nil {
|
||||
est := task.NextDueDate.AddDate(0, 0, -intervalDays*len(comps))
|
||||
effectiveDate = &est
|
||||
}
|
||||
if effectiveDate == nil {
|
||||
for _, c := range comps {
|
||||
stats["no_due_date"]++
|
||||
updates = append(updates, updateEntry{c.ID, "completed_tasks"})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
currentDueDate := *effectiveDate
|
||||
for _, c := range comps {
|
||||
column := classifyCompletion(c.CompletedAt, currentDueDate, daysThreshold)
|
||||
stats[column]++
|
||||
updates = append(updates, updateEntry{c.ID, column})
|
||||
currentDueDate = c.CompletedAt.AddDate(0, 0, intervalDays)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log.Info().
|
||||
Int("overdue", stats["overdue_tasks"]).
|
||||
Int("due_soon", stats["due_soon_tasks"]).
|
||||
Int("upcoming", stats["upcoming_tasks"]).
|
||||
Int("no_due_date", stats["no_due_date"]).
|
||||
Int("total_updates", len(updates)).
|
||||
Msg("Backfill: classification results")
|
||||
|
||||
// Apply updates
|
||||
for _, u := range updates {
|
||||
if err := tx.Table("task_taskcompletion").
|
||||
Where("id = ?", u.completionID).
|
||||
Update("completed_from_column", u.column).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
log.Info().Int("total_updated", len(updates)).Msg("Backfill: complete")
|
||||
return nil
|
||||
}
|
||||
|
||||
// classifyCompletion determines what kanban column a task was in when completed.
|
||||
func classifyCompletion(completedAt time.Time, dueDate time.Time, threshold int) string {
|
||||
completedDay := time.Date(completedAt.Year(), completedAt.Month(), completedAt.Day(), 0, 0, 0, 0, time.UTC)
|
||||
dueDay := time.Date(dueDate.Year(), dueDate.Month(), dueDate.Day(), 0, 0, 0, 0, time.UTC)
|
||||
|
||||
if completedDay.After(dueDay) {
|
||||
return "overdue_tasks"
|
||||
}
|
||||
|
||||
daysBefore := dueDay.Sub(completedDay).Hours() / 24
|
||||
if daysBefore <= float64(threshold) {
|
||||
return "due_soon_tasks"
|
||||
}
|
||||
|
||||
return "upcoming_tasks"
|
||||
}
|
||||
Reference in New Issue
Block a user