Expand guided reflection with CBT thought record and distortion routing

Adds a 5-step negative-mood reflection flow with an evidence-examination
step, Socratic templated questions that back-reference prior answers, and
a deterministic cognitive-distortion detector that routes the perspective-
check prompt to a distortion-specific reframe. Includes CBT plan docs,
flowchart, stats research notes, and MCP config.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Trey T
2026-04-14 18:49:39 -05:00
parent e6a34a0f25
commit cc4143d3ea
9 changed files with 2235 additions and 33 deletions

View File

@@ -171,10 +171,28 @@ struct GuidedReflectionView: View {
VStack(alignment: .leading, spacing: 24) {
progressSection
// Pre-intensity rating shown only on the first step, once.
// Captures the baseline emotional intensity so we can measure shift.
if currentStepIndex == 0 {
intensityCard(
title: String(localized: "guided_reflection_pre_intensity_title"),
value: preIntensityBinding
)
}
if let step = currentStep {
stepCard(step)
.id(step.id)
}
// Post-intensity rating shown on the final step, below the question.
// Measures how much the reflection shifted the feeling.
if isLastStep {
intensityCard(
title: String(localized: "guided_reflection_post_intensity_title"),
value: postIntensityBinding
)
}
}
.padding(.horizontal)
.padding(.top, 20)
@@ -184,6 +202,62 @@ struct GuidedReflectionView: View {
.onScrollPhaseChange(handleScrollPhaseChange)
}
// MARK: - Intensity Rating UI
private var preIntensityBinding: Binding<Int> {
Binding(
get: { draft.preIntensity ?? 5 },
set: { draft.preIntensity = $0 }
)
}
private var postIntensityBinding: Binding<Int> {
Binding(
get: { draft.postIntensity ?? 5 },
set: { draft.postIntensity = $0 }
)
}
@ViewBuilder
private func intensityCard(title: String, value: Binding<Int>) -> some View {
VStack(alignment: .leading, spacing: 12) {
Text(title)
.font(.subheadline)
.fontWeight(.medium)
.foregroundColor(textColor)
HStack {
Text(String(localized: "guided_reflection_intensity_low"))
.font(.caption)
.foregroundStyle(.secondary)
Spacer()
Text("\(value.wrappedValue) / 10")
.font(.caption)
.fontWeight(.semibold)
.foregroundStyle(accentColor)
Spacer()
Text(String(localized: "guided_reflection_intensity_high"))
.font(.caption)
.foregroundStyle(.secondary)
}
Slider(
value: Binding(
get: { Double(value.wrappedValue) },
set: { value.wrappedValue = Int($0.rounded()) }
),
in: 0...10,
step: 1
)
.tint(accentColor)
}
.padding(16)
.background(
RoundedRectangle(cornerRadius: 20)
.fill(Color(.secondarySystemBackground))
)
}
@ToolbarContentBuilder
private var navigationToolbar: some ToolbarContent {
ToolbarItem(placement: .cancellationAction) {
@@ -270,7 +344,9 @@ struct GuidedReflectionView: View {
.tracking(1.5)
}
Text(step.question)
// Resolve the template against current answers so Socratic back-references
// (e.g., "Looking at '<your thought>' again...") reflect edits in real time.
Text(draft.resolvedQuestion(for: step))
.font(.title3)
.fontWeight(.medium)
.foregroundColor(textColor)
@@ -279,6 +355,12 @@ struct GuidedReflectionView: View {
editor(for: step)
// Specificity probe gentle nudge if the Q1 (situation) answer is too vague.
// CBT works better on concrete events than generalized feelings.
if step.id == 0 && needsSpecificityProbe(for: step.answer) {
specificityProbe
}
if let chips = step.chips {
ChipSelectionView(
chips: chips,
@@ -297,6 +379,42 @@ struct GuidedReflectionView: View {
)
}
// MARK: - Specificity Probe
/// Vague phrases that should trigger the specificity nudge even if the text is
/// technically long enough. Matched case-insensitively against a trimmed answer.
private static let vaguePhrases: Set<String> = [
"idk", "i don't know", "i dont know",
"nothing", "everything", "nothing really",
"same as always", "same old", "dunno", "no idea"
]
private func needsSpecificityProbe(for answer: String) -> Bool {
let trimmed = answer.trimmingCharacters(in: .whitespacesAndNewlines)
guard !trimmed.isEmpty else { return false } // don't nag before they've started
if trimmed.count < 25 { return true }
let lower = trimmed.lowercased()
return Self.vaguePhrases.contains(where: { lower == $0 || lower.hasPrefix($0 + " ") })
}
private var specificityProbe: some View {
HStack(alignment: .top, spacing: 10) {
Image(systemName: "lightbulb.fill")
.foregroundStyle(accentColor)
.font(.footnote)
Text(String(localized: "guided_reflection_specificity_probe"))
.font(.footnote)
.foregroundStyle(.secondary)
.fixedSize(horizontal: false, vertical: true)
}
.padding(12)
.frame(maxWidth: .infinity, alignment: .leading)
.background(
RoundedRectangle(cornerRadius: 12)
.fill(accentColor.opacity(0.08))
)
}
private func editor(for step: GuidedReflectionDraft.Step) -> some View {
VStack(alignment: .leading, spacing: 10) {
AutoSizingReflectionTextEditor(
@@ -421,6 +539,14 @@ struct GuidedReflectionView: View {
private func navigateForward() {
guard let nextStepID = draft.stepID(after: currentStepID) else { return }
focusedStepID = nil
// When leaving Q2 on the negative path, classify the automatic thought and
// swap Q3's template to the tailored reframe prompt. Idempotent and safe
// to run on every forward navigation.
if draft.moodCategory == .negative && currentStepID == 1 {
draft.recomputeDistortion()
}
updateCurrentStep(to: nextStepID)
}
@@ -535,8 +661,11 @@ struct GuidedReflectionView: View {
private struct GuidedReflectionDraft: Equatable {
struct Step: Identifiable, Equatable {
let id: Int
let question: String
let label: String?
/// The template this step renders from. Contains the raw localized text and
/// optional placeholder ref. The user-visible question is computed by calling
/// `GuidedReflectionDraft.resolvedQuestion(for:)` which injects prior answers.
var template: QuestionTemplate
var label: String?
let chips: QuestionChips?
var answer: String
var selectedChips: [String]
@@ -551,7 +680,7 @@ private struct GuidedReflectionDraft: Equatable {
static func == (lhs: Step, rhs: Step) -> Bool {
lhs.id == rhs.id &&
lhs.question == rhs.question &&
lhs.template == rhs.template &&
lhs.label == rhs.label &&
lhs.answer == rhs.answer &&
lhs.selectedChips == rhs.selectedChips
@@ -561,27 +690,86 @@ private struct GuidedReflectionDraft: Equatable {
let moodCategory: MoodCategory
var steps: [Step]
var completedAt: Date?
var preIntensity: Int?
var postIntensity: Int?
var detectedDistortion: CognitiveDistortion?
init(reflection: GuidedReflection) {
moodCategory = reflection.moodCategory
completedAt = reflection.completedAt
preIntensity = reflection.preIntensity
postIntensity = reflection.postIntensity
detectedDistortion = reflection.detectedDistortion
let questions = GuidedReflection.questions(for: reflection.moodCategory)
let templates = GuidedReflection.questionTemplates(for: reflection.moodCategory)
let labels = reflection.moodCategory.stepLabels
steps = questions.enumerated().map { index, question in
steps = templates.enumerated().map { index, template in
// Preserve existing answers if reflection is being resumed.
let existingResponse = reflection.responses.first(where: { $0.id == index })
?? (reflection.responses.indices.contains(index) ? reflection.responses[index] : nil)
return Step(
id: index,
question: question,
template: template,
label: labels.indices.contains(index) ? labels[index] : nil,
chips: QuestionChips.chips(for: reflection.moodCategory, questionIndex: index),
answer: existingResponse?.answer ?? "",
selectedChips: existingResponse?.selectedChips ?? []
)
}
// Re-apply any previously-detected distortion so Q3 restores its tailored template.
if let distortion = detectedDistortion, moodCategory == .negative {
applyDistortion(distortion)
}
}
/// Produces (index, answer) tuples suitable for `QuestionTemplate.resolved(with:)`.
private var answerTuples: [(index: Int, text: String)] {
steps.map { ($0.id, $0.answer) }
}
/// Resolves the user-visible question text for a step, injecting the latest
/// value of any referenced prior answer. Called at render time by the view.
func resolvedQuestion(for step: Step) -> String {
step.template.resolved(with: answerTuples)
}
func resolvedQuestion(forStepID stepID: Int) -> String {
guard let step = step(forStepID: stepID) else { return "" }
return resolvedQuestion(for: step)
}
/// Mutating: detect the cognitive distortion in the current Q2 answer (negative path only)
/// and swap Q3's template to the tailored prompt. Safe to call repeatedly if Q2 is empty
/// or detection yields `.unknown` this resets to the fallback template.
mutating func recomputeDistortion() {
guard moodCategory == .negative,
let q2 = steps.first(where: { $0.id == 1 }),
!q2.answer.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty
else {
detectedDistortion = nil
applyDistortion(.unknown) // reset Q3 label to generic
return
}
let distortion = CognitiveDistortionDetector.detect(in: q2.answer)
detectedDistortion = distortion == .unknown ? nil : distortion
applyDistortion(distortion)
}
/// Overwrites Q3's template + label based on the detected distortion.
private mutating func applyDistortion(_ distortion: CognitiveDistortion) {
guard let q3Index = steps.firstIndex(where: { $0.id == 2 }) else { return }
steps[q3Index].template = distortion.perspectiveCheckTemplate
if distortion != .unknown {
steps[q3Index].label = distortion.stepLabel
} else {
// Reset to the default "Perspective Check" label from MoodCategory.stepLabels.
let defaults = moodCategory.stepLabels
steps[q3Index].label = defaults.indices.contains(2) ? defaults[2] : nil
}
}
var firstUnansweredStepID: Int? {
@@ -630,14 +818,19 @@ private struct GuidedReflectionDraft: Equatable {
GuidedReflection(
moodCategory: moodCategory,
responses: steps.map { step in
// Persist the user-visible resolved question text not the raw template
// so downstream consumers (AI feedback, history view) see what the user saw.
GuidedReflection.Response(
id: step.id,
question: step.question,
question: resolvedQuestion(for: step),
answer: step.answer,
selectedChips: step.selectedChips
)
},
completedAt: completedAt
completedAt: completedAt,
preIntensity: preIntensity,
postIntensity: postIntensity,
detectedDistortion: detectedDistortion
)
}
}