Phase 4-5: build pipeline + device enrollment

Builder service (Mac mini):
- Build worker: xcodebuild archive + export + fastlane signing + upload to unraid
- /api/build/upload (source archive) and /api/build/git (clone) ingest paths
- SSE-streamed build logs, builds list UI, live status updates
- /api/devices/from-enrollment bridge endpoint (shared-secret auth)

Storefront (unraid):
- /enroll/ public flow: landing page, mobileconfig generator, callback parser
- Forwards extracted UDIDs to the Mac mini builder for ASC registration
- docker-compose.yml now passes BUILDER_URL and BUILDER_SHARED_SECRET

Updated CLAUDE.md with full architecture, deploy flow, and gotchas.
This commit is contained in:
trey
2026-04-11 14:04:32 -05:00
parent e9b6936904
commit 8dbe87da2e
14 changed files with 1203 additions and 44 deletions

View File

@@ -0,0 +1,51 @@
const $ = (s) => document.querySelector(s);
function toast(msg, kind = '') {
const t = $('#toast');
t.textContent = msg;
t.className = 'toast show ' + kind;
setTimeout(() => t.classList.remove('show'), 3500);
}
$('#upload-form').addEventListener('submit', async (e) => {
e.preventDefault();
const fd = new FormData(e.target);
const btn = e.target.querySelector('button[type=submit]');
btn.disabled = true;
btn.textContent = 'Uploading…';
try {
const r = await fetch('/api/build/upload', { method: 'POST', body: fd });
const data = await r.json();
if (!r.ok) throw new Error(data.error || 'Upload failed');
location.href = `/builds#${data.job_id}`;
} catch (err) {
toast(err.message, 'error');
btn.disabled = false;
btn.textContent = 'Queue Build';
}
});
$('#git-form').addEventListener('submit', async (e) => {
e.preventDefault();
const body = {
url: e.target.url.value.trim(),
branch: e.target.branch.value.trim() || null,
scheme: e.target.scheme.value.trim() || null,
};
const btn = e.target.querySelector('button[type=submit]');
btn.disabled = true;
btn.textContent = 'Cloning…';
try {
const r = await fetch('/api/build/git', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
const data = await r.json();
if (!r.ok) throw new Error(data.error || 'Clone failed');
location.href = `/builds#${data.job_id}`;
} catch (err) {
toast(err.message, 'error');
btn.disabled = false;
btn.textContent = 'Queue Build';
}
});

107
builder/public/js/builds.js Normal file
View File

@@ -0,0 +1,107 @@
const $ = (s) => document.querySelector(s);
const esc = (s) => { const d = document.createElement('div'); d.textContent = s ?? ''; return d.innerHTML; };
let currentEventSource = null;
function formatDate(s) {
if (!s) return '—';
return new Date(s + 'Z').toLocaleString();
}
function duration(start, end) {
if (!start) return '—';
const s = new Date(start + 'Z').getTime();
const e = end ? new Date(end + 'Z').getTime() : Date.now();
const secs = Math.max(0, Math.round((e - s) / 1000));
if (secs < 60) return `${secs}s`;
return `${Math.floor(secs / 60)}m ${secs % 60}s`;
}
function statusBadge(status) {
const running = ['preparing', 'signing', 'archiving', 'exporting', 'uploading'].includes(status);
const cls = status === 'succeeded' ? 'succeeded'
: status === 'failed' ? 'failed'
: running ? 'running'
: 'pending';
return `<span class="badge ${cls}">${status}</span>`;
}
async function loadJobs() {
const r = await fetch('/api/builds');
if (r.status === 401) { location.href = '/login'; return; }
const jobs = await r.json();
const container = $('#jobs-container');
if (!jobs.length) {
container.innerHTML = '<div class="card"><p style="color:var(--text-muted)">No builds yet. Start one from <a href="/build" style="color:var(--accent)">New Build</a>.</p></div>';
return;
}
container.innerHTML = `
<table class="table">
<thead>
<tr><th>Status</th><th>Bundle</th><th>Source</th><th>Started</th><th>Duration</th><th></th></tr>
</thead>
<tbody>
${jobs.map(j => `
<tr data-id="${j.id}" style="cursor:pointer">
<td>${statusBadge(j.status)}</td>
<td class="mono">${esc(j.bundle_id) || '<span style="color:var(--text-muted)">—</span>'}</td>
<td class="mono">${esc(j.source_kind)}: ${esc((j.source_ref || '').slice(0, 40))}</td>
<td class="mono">${esc(formatDate(j.started_at))}</td>
<td class="mono">${esc(duration(j.started_at, j.finished_at))}</td>
<td>${j.install_url ? `<a href="${esc(j.install_url)}" class="btn-sm" style="background:var(--accent);color:white;padding:5px 12px;border-radius:14px;text-decoration:none;font-size:12px">Install</a>` : ''}</td>
</tr>
`).join('')}
</tbody>
</table>
`;
container.querySelectorAll('tbody tr').forEach((tr) => {
tr.addEventListener('click', () => openJob(tr.getAttribute('data-id')));
});
}
async function openJob(id) {
location.hash = id;
const r = await fetch(`/api/builds/${id}`);
if (!r.ok) return;
const job = await r.json();
$('#detail').style.display = 'block';
$('#detail-title').textContent = `Job ${id.slice(0, 8)} · ${job.status}`;
$('#detail-meta').innerHTML = `
<div>bundle: ${esc(job.bundle_id || '—')}</div>
<div>scheme: ${esc(job.scheme || '—')}</div>
<div>source: ${esc(job.source_kind)} ${esc(job.source_ref || '')}</div>
<div>started: ${esc(formatDate(job.started_at))} · finished: ${esc(formatDate(job.finished_at))}</div>
${job.install_url ? `<div>install: <a href="${esc(job.install_url)}" style="color:var(--accent)">${esc(job.install_url.slice(0, 80))}…</a></div>` : ''}
${job.error ? `<div style="color:var(--danger)">error: ${esc(job.error)}</div>` : ''}
`;
const logEl = $('#log-viewer');
logEl.textContent = '';
if (currentEventSource) { currentEventSource.close(); currentEventSource = null; }
const es = new EventSource(`/api/builds/${id}/logs`);
currentEventSource = es;
es.onmessage = (ev) => {
logEl.textContent += ev.data + '\n';
logEl.scrollTop = logEl.scrollHeight;
};
es.addEventListener('done', () => {
es.close();
currentEventSource = null;
// Refresh the job list so status pill updates.
loadJobs();
});
es.onerror = () => { es.close(); };
}
loadJobs();
setInterval(loadJobs, 5000);
// If arriving with a hash, open that job.
if (location.hash.length > 1) {
openJob(location.hash.slice(1));
}

199
builder/src/build-routes.js Normal file
View File

@@ -0,0 +1,199 @@
// Build pipeline HTTP routes.
// Attached to the main Express app in server.js via `register(app)`.
const path = require('path');
const fs = require('fs');
const os = require('os');
const { spawn } = require('child_process');
const multer = require('multer');
const { v4: uuidv4 } = require('uuid');
const { db, DATA_DIR } = require('./db');
const buildWorker = require('./build-worker');
const SOURCE_DIR = path.join(DATA_DIR, 'source');
const LOGS_DIR = path.join(DATA_DIR, 'builds');
const TMP_DIR = path.join(DATA_DIR, 'tmp');
[SOURCE_DIR, LOGS_DIR, TMP_DIR].forEach((d) => fs.mkdirSync(d, { recursive: true }));
const archiveUpload = multer({
dest: TMP_DIR,
limits: { fileSize: 500 * 1024 * 1024 },
fileFilter: (req, file, cb) => {
const name = file.originalname.toLowerCase();
if (name.endsWith('.zip') || name.endsWith('.tar.gz') || name.endsWith('.tgz')) {
return cb(null, true);
}
cb(new Error('Only .zip, .tar.gz, or .tgz archives'));
},
});
function extractArchive(archivePath, destDir) {
return new Promise((resolve, reject) => {
fs.mkdirSync(destDir, { recursive: true });
const lower = archivePath.toLowerCase();
let cmd, args;
if (lower.endsWith('.zip')) {
cmd = '/usr/bin/unzip';
args = ['-q', archivePath, '-d', destDir];
} else {
cmd = '/usr/bin/tar';
args = ['-xzf', archivePath, '-C', destDir];
}
const child = spawn(cmd, args);
let stderr = '';
child.stderr.on('data', (c) => { stderr += c.toString(); });
child.on('close', (code) => {
if (code === 0) resolve();
else reject(new Error(`${cmd} exited ${code}: ${stderr}`));
});
child.on('error', reject);
});
}
function cloneGitRepo({ url, branch, destDir, logPath }) {
return new Promise((resolve, reject) => {
fs.mkdirSync(destDir, { recursive: true });
const args = ['clone', '--depth', '1'];
if (branch) args.push('--branch', branch);
args.push(url, destDir);
fs.appendFileSync(logPath, `$ git ${args.join(' ')}\n`);
const child = spawn('/usr/bin/git', args);
child.stdout.on('data', (c) => fs.appendFileSync(logPath, c));
child.stderr.on('data', (c) => fs.appendFileSync(logPath, c));
child.on('close', (code) => code === 0 ? resolve() : reject(new Error(`git clone failed (${code})`)));
child.on('error', reject);
});
}
function register(app, { requireLogin }) {
// --- Pages ---
app.get('/build', requireLogin, (req, res) => {
res.sendFile(path.join(__dirname, '..', 'views', 'build.html'));
});
app.get('/builds', requireLogin, (req, res) => {
res.sendFile(path.join(__dirname, '..', 'views', 'builds.html'));
});
// --- Upload a source archive ---
app.post('/api/build/upload', requireLogin, archiveUpload.single('source'), async (req, res) => {
if (!req.file) return res.status(400).json({ error: 'No source file provided' });
try {
const jobId = uuidv4();
const sourceDir = path.join(SOURCE_DIR, jobId);
await extractArchive(req.file.path, sourceDir);
fs.unlinkSync(req.file.path);
const scheme = (req.body.scheme || '').trim() || null;
db.prepare(`
INSERT INTO build_jobs (id, source_kind, source_ref, scheme, status)
VALUES (?, 'upload', ?, ?, 'pending')
`).run(jobId, req.file.originalname, scheme);
buildWorker.kick();
res.json({ success: true, job_id: jobId });
} catch (err) {
if (req.file && fs.existsSync(req.file.path)) fs.unlinkSync(req.file.path);
res.status(500).json({ error: err.message });
}
});
// --- Clone a git repo ---
app.post('/api/build/git', requireLogin, async (req, res) => {
const { url, branch, scheme } = req.body || {};
if (!url) return res.status(400).json({ error: 'url is required' });
const jobId = uuidv4();
const logPath = path.join(LOGS_DIR, `${jobId}.log`);
fs.writeFileSync(logPath, `Cloning ${url}${branch ? ` (branch ${branch})` : ''}\n`);
db.prepare(`
INSERT INTO build_jobs (id, source_kind, source_ref, scheme, status, log_path)
VALUES (?, 'git', ?, ?, 'pending', ?)
`).run(jobId, url, scheme || null, logPath);
try {
const sourceDir = path.join(SOURCE_DIR, jobId);
await cloneGitRepo({ url, branch, destDir: sourceDir, logPath });
buildWorker.kick();
res.json({ success: true, job_id: jobId });
} catch (err) {
db.prepare("UPDATE build_jobs SET status = 'failed', error = ?, finished_at = datetime('now') WHERE id = ?")
.run(err.message, jobId);
res.status(500).json({ error: err.message });
}
});
// --- List jobs ---
app.get('/api/builds', requireLogin, (req, res) => {
const rows = db.prepare(`
SELECT id, bundle_id, source_kind, source_ref, scheme, status, started_at, finished_at, error, unraid_build_id, install_url
FROM build_jobs
ORDER BY COALESCE(started_at, created_at) DESC
LIMIT 100
`).all();
res.json(rows);
});
// --- Get a single job ---
app.get('/api/builds/:id', requireLogin, (req, res) => {
const row = db.prepare('SELECT * FROM build_jobs WHERE id = ?').get(req.params.id);
if (!row) return res.status(404).json({ error: 'not found' });
res.json(row);
});
// --- Stream logs via SSE ---
app.get('/api/builds/:id/logs', requireLogin, (req, res) => {
const row = db.prepare('SELECT * FROM build_jobs WHERE id = ?').get(req.params.id);
if (!row) return res.status(404).end();
const logPath = row.log_path || path.join(LOGS_DIR, `${row.id}.log`);
res.set({
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
});
res.flushHeaders?.();
let position = 0;
const sendNew = () => {
if (!fs.existsSync(logPath)) return;
const stat = fs.statSync(logPath);
if (stat.size <= position) return;
const fd = fs.openSync(logPath, 'r');
const buf = Buffer.alloc(stat.size - position);
fs.readSync(fd, buf, 0, buf.length, position);
fs.closeSync(fd);
position = stat.size;
// SSE: prefix every line with "data: "
const lines = buf.toString('utf8').split('\n');
for (const line of lines) {
if (line.length) res.write(`data: ${line}\n\n`);
}
};
sendNew();
const interval = setInterval(() => {
sendNew();
// Check if job finished — send one more time and close after a grace period.
const current = db.prepare('SELECT status FROM build_jobs WHERE id = ?').get(req.params.id);
if (current && (current.status === 'succeeded' || current.status === 'failed')) {
sendNew();
res.write(`event: done\ndata: ${current.status}\n\n`);
clearInterval(interval);
res.end();
}
}, 1000);
req.on('close', () => clearInterval(interval));
});
// --- Rebuild a finished job (reuses the last known source if available) ---
app.post('/api/builds/:id/rebuild', requireLogin, (req, res) => {
res.status(501).json({ error: 'rebuild not implemented yet' });
});
}
module.exports = { register };

334
builder/src/build-worker.js Normal file
View File

@@ -0,0 +1,334 @@
// Build worker — consumes `build_jobs` rows, runs xcodebuild + fastlane + upload.
// Single in-process loop; SQLite is the queue.
const path = require('path');
const fs = require('fs');
const os = require('os');
const { execFile, spawn } = require('child_process');
const { promisify } = require('util');
const execFileAsync = promisify(execFile);
const { db, getSetting, DATA_DIR } = require('./db');
const profileManager = require('./profile-manager');
const SOURCE_DIR = path.join(DATA_DIR, 'source');
const BUILD_DIR = path.join(DATA_DIR, 'build');
const LOGS_DIR = path.join(DATA_DIR, 'builds');
[SOURCE_DIR, BUILD_DIR, LOGS_DIR].forEach((d) => fs.mkdirSync(d, { recursive: true }));
const POLL_INTERVAL_MS = 2000;
let running = false;
// --- Job state helpers ---
function markStatus(jobId, status, extra = {}) {
const fields = [];
const values = [];
fields.push('status = ?'); values.push(status);
for (const [k, v] of Object.entries(extra)) {
fields.push(`${k} = ?`);
values.push(v);
}
if (status === 'failed' || status === 'succeeded') {
fields.push("finished_at = datetime('now')");
}
values.push(jobId);
db.prepare(`UPDATE build_jobs SET ${fields.join(', ')} WHERE id = ?`).run(...values);
}
function appendLog(logPath, line) {
fs.appendFileSync(logPath, line.endsWith('\n') ? line : line + '\n');
}
function section(logPath, title) {
appendLog(logPath, `\n━━━━━━━━ ${title} ━━━━━━━━`);
}
// --- Command runner that streams to the log file ---
function runCommand(cmd, args, { cwd, env, logPath }) {
return new Promise((resolve, reject) => {
appendLog(logPath, `$ ${cmd} ${args.join(' ')}`);
const child = spawn(cmd, args, { cwd, env });
let stdout = '', stderr = '';
child.stdout.on('data', (c) => { const s = c.toString(); stdout += s; fs.appendFileSync(logPath, s); });
child.stderr.on('data', (c) => { const s = c.toString(); stderr += s; fs.appendFileSync(logPath, s); });
child.on('error', reject);
child.on('close', (code) => {
if (code === 0) resolve({ stdout, stderr });
else reject(new Error(`${cmd} exited with code ${code}`));
});
});
}
// --- Project locator ---
function findProjectRoot(sourceDir) {
// Walk up to 3 levels looking for .xcodeproj/.xcworkspace.
const walk = (dir, depth) => {
if (depth > 3) return null;
const entries = fs.readdirSync(dir, { withFileTypes: true });
const workspace = entries.find((e) => e.isDirectory() && e.name.endsWith('.xcworkspace') && !e.name.endsWith('.xcodeproj/project.xcworkspace'));
if (workspace) return { dir, type: 'workspace', name: workspace.name };
const project = entries.find((e) => e.isDirectory() && e.name.endsWith('.xcodeproj'));
if (project) return { dir, type: 'project', name: project.name };
for (const e of entries) {
if (e.isDirectory() && !e.name.startsWith('.') && e.name !== 'node_modules') {
const found = walk(path.join(dir, e.name), depth + 1);
if (found) return found;
}
}
return null;
};
return walk(sourceDir, 0);
}
async function listSchemes({ projectRoot, logPath }) {
const args = projectRoot.type === 'workspace'
? ['-list', '-json', '-workspace', projectRoot.name]
: ['-list', '-json', '-project', projectRoot.name];
const { stdout } = await runCommand('/usr/bin/xcodebuild', args, { cwd: projectRoot.dir, env: process.env, logPath });
try {
const parsed = JSON.parse(stdout);
return parsed.workspace?.schemes || parsed.project?.schemes || [];
} catch {
return [];
}
}
async function getBuildSettings({ projectRoot, scheme, logPath }) {
const args = ['-showBuildSettings', '-json', '-scheme', scheme];
if (projectRoot.type === 'workspace') args.unshift('-workspace', projectRoot.name);
else args.unshift('-project', projectRoot.name);
const { stdout } = await runCommand('/usr/bin/xcodebuild', args, { cwd: projectRoot.dir, env: process.env, logPath });
try {
return JSON.parse(stdout);
} catch {
throw new Error('Could not parse xcodebuild -showBuildSettings JSON');
}
}
function extractBundleIds(buildSettings) {
const bundleIds = new Set();
let teamId = null;
for (const target of buildSettings) {
const s = target.buildSettings || {};
if (s.PRODUCT_BUNDLE_IDENTIFIER) bundleIds.add(s.PRODUCT_BUNDLE_IDENTIFIER);
if (s.DEVELOPMENT_TEAM && !teamId) teamId = s.DEVELOPMENT_TEAM;
}
return { bundleIds: Array.from(bundleIds), teamId };
}
// --- ExportOptions.plist generation ---
function buildExportOptions({ teamId, profilesByBundleId }) {
const entries = Object.entries(profilesByBundleId)
.map(([bid, uuid]) => ` <key>${bid}</key><string>${uuid}</string>`)
.join('\n');
return `<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>method</key><string>ad-hoc</string>
<key>teamID</key><string>${teamId}</string>
<key>signingStyle</key><string>manual</string>
<key>stripSwiftSymbols</key><true/>
<key>compileBitcode</key><false/>
<key>provisioningProfiles</key>
<dict>
${entries}
</dict>
</dict>
</plist>
`;
}
// --- Upload to unraid ---
async function uploadToUnraid({ ipaPath, notes, logPath }) {
const unraidUrl = getSetting('unraid_url');
const unraidToken = getSetting('unraid_token');
if (!unraidUrl || !unraidToken) throw new Error('unraid URL/token not configured');
appendLog(logPath, `\nUploading IPA to ${unraidUrl}/api/upload`);
const buf = fs.readFileSync(ipaPath);
const blob = new Blob([buf], { type: 'application/octet-stream' });
const form = new FormData();
form.append('ipa', blob, path.basename(ipaPath));
if (notes) form.append('notes', notes);
const res = await fetch(`${unraidUrl}/api/upload`, {
method: 'POST',
headers: { 'X-Api-Token': unraidToken },
body: form,
});
const body = await res.json().catch(() => ({}));
if (!res.ok || !body.success) {
throw new Error(`unraid upload failed (${res.status}): ${JSON.stringify(body)}`);
}
appendLog(logPath, `✓ Uploaded: ${JSON.stringify(body)}`);
return body;
}
// --- Main build function ---
async function runBuild(job) {
const jobId = job.id;
const logPath = path.join(LOGS_DIR, `${jobId}.log`);
markStatus(jobId, 'preparing', { log_path: logPath, started_at: "datetime('now')" });
// Reset started_at properly (datetime() doesn't work via binding above).
db.prepare("UPDATE build_jobs SET started_at = datetime('now') WHERE id = ?").run(jobId);
fs.writeFileSync(logPath, `Build ${jobId} started at ${new Date().toISOString()}\n`);
section(logPath, 'PREPARING');
const sourceDir = path.join(SOURCE_DIR, jobId);
if (!fs.existsSync(sourceDir)) throw new Error(`source dir missing: ${sourceDir}`);
const projectRoot = findProjectRoot(sourceDir);
if (!projectRoot) throw new Error('No .xcodeproj or .xcworkspace found in source');
appendLog(logPath, `Found ${projectRoot.type}: ${projectRoot.name} in ${projectRoot.dir}`);
// Pick the scheme
const schemes = await listSchemes({ projectRoot, logPath });
if (!schemes.length) throw new Error('No schemes found in project');
const scheme = job.scheme && schemes.includes(job.scheme)
? job.scheme
: schemes[0];
appendLog(logPath, `Using scheme: ${scheme} (available: ${schemes.join(', ')})`);
// Read build settings for bundle IDs + team id
const buildSettings = await getBuildSettings({ projectRoot, scheme, logPath });
const { bundleIds, teamId } = extractBundleIds(buildSettings);
if (!bundleIds.length) throw new Error('Could not determine bundle identifiers');
if (!teamId) throw new Error('Could not determine development team ID');
appendLog(logPath, `Team: ${teamId}`);
appendLog(logPath, `Bundle IDs: ${bundleIds.join(', ')}`);
// Persist the primary bundle id on the job row
db.prepare('UPDATE build_jobs SET bundle_id = ?, scheme = ? WHERE id = ?')
.run(bundleIds[0], scheme, jobId);
// --- Signing phase ---
section(logPath, 'SIGNING');
markStatus(jobId, 'signing');
const profilesByBundleId = {};
for (const bid of bundleIds) {
appendLog(logPath, `Ensuring profile for ${bid}`);
const info = await profileManager.getProfile(bid);
profilesByBundleId[bid] = info.profile_uuid;
appendLog(logPath, `${info.profile_uuid} (${info.fromCache ? 'cache' : 'fresh'}, ${info.device_count} devices, expires ${info.expires_at})`);
}
// --- Archiving phase ---
section(logPath, 'ARCHIVING');
markStatus(jobId, 'archiving');
const archivePath = path.join(BUILD_DIR, `${jobId}.xcarchive`);
const archiveArgs = [
projectRoot.type === 'workspace' ? '-workspace' : '-project', projectRoot.name,
'-scheme', scheme,
'-configuration', 'Release',
'-destination', 'generic/platform=iOS',
'-archivePath', archivePath,
'-allowProvisioningUpdates',
'CODE_SIGN_STYLE=Manual',
`DEVELOPMENT_TEAM=${teamId}`,
'archive',
];
// We can't specify per-target PROVISIONING_PROFILE_SPECIFIER globally, so we rely on
// xcodebuild finding the installed profiles in ~/Library/MobileDevice/Provisioning Profiles/
// by matching bundle id + team id.
await runCommand('/usr/bin/xcodebuild', archiveArgs, { cwd: projectRoot.dir, env: process.env, logPath });
// --- Exporting phase ---
section(logPath, 'EXPORTING');
markStatus(jobId, 'exporting');
const exportPath = path.join(BUILD_DIR, jobId);
fs.mkdirSync(exportPath, { recursive: true });
const exportOptionsPath = path.join(exportPath, 'ExportOptions.plist');
fs.writeFileSync(exportOptionsPath, buildExportOptions({ teamId, profilesByBundleId }));
appendLog(logPath, `Wrote ExportOptions.plist:\n${fs.readFileSync(exportOptionsPath, 'utf8')}`);
const exportArgs = [
'-exportArchive',
'-archivePath', archivePath,
'-exportPath', exportPath,
'-exportOptionsPlist', exportOptionsPath,
'-allowProvisioningUpdates',
];
await runCommand('/usr/bin/xcodebuild', exportArgs, { cwd: projectRoot.dir, env: process.env, logPath });
// Find the produced IPA
const ipaFile = fs.readdirSync(exportPath).find((f) => f.endsWith('.ipa'));
if (!ipaFile) throw new Error('Export succeeded but no .ipa produced');
const ipaPath = path.join(exportPath, ipaFile);
appendLog(logPath, `IPA produced: ${ipaPath} (${(fs.statSync(ipaPath).size / (1024*1024)).toFixed(1)} MB)`);
// --- Uploading phase ---
section(logPath, 'UPLOADING');
markStatus(jobId, 'uploading', { ipa_path: ipaPath });
const uploadResult = await uploadToUnraid({
ipaPath,
notes: `Built by ${os.hostname()} job ${jobId}`,
logPath,
});
markStatus(jobId, 'succeeded', {
unraid_build_id: uploadResult.build?.id || null,
install_url: uploadResult.build?.install_url || null,
});
// --- Cleanup: keep log + IPA, remove source + archive ---
try {
fs.rmSync(sourceDir, { recursive: true, force: true });
fs.rmSync(archivePath, { recursive: true, force: true });
} catch (e) {
appendLog(logPath, `Cleanup warning: ${e.message}`);
}
section(logPath, `SUCCEEDED at ${new Date().toISOString()}`);
}
async function processJob(job) {
const logPath = path.join(LOGS_DIR, `${job.id}.log`);
try {
await runBuild(job);
} catch (err) {
console.error(`[build-worker] job ${job.id} failed:`, err);
try { appendLog(logPath, `\n✗ FAILED: ${err.message}\n${err.stack || ''}`); } catch {}
markStatus(job.id, 'failed', { error: err.message });
}
}
async function loop() {
if (running) return;
running = true;
try {
while (true) {
const job = db.prepare("SELECT * FROM build_jobs WHERE status = 'pending' ORDER BY created_at ASC LIMIT 1").get();
if (!job) break;
await processJob(job);
}
} finally {
running = false;
}
}
function kick() {
// Non-blocking: fire and forget.
loop().catch((err) => console.error('[build-worker] loop error:', err));
}
function start() {
setInterval(kick, POLL_INTERVAL_MS);
kick();
}
module.exports = { start, kick, runBuild, processJob };

View File

@@ -21,7 +21,7 @@ const path = require('path');
const fs = require('fs');
const { db, getSetting, setSetting, DATA_DIR } = require('./db');
const { requireLogin, validatePassword } = require('./auth');
const { requireLogin, requireBuilderSecret, validatePassword } = require('./auth');
const app = express();
const PORT = process.env.PORT || 3090;
@@ -71,7 +71,7 @@ app.get('/logout', (req, res) => {
// --- Pages ---
app.get('/', requireLogin, (req, res) => {
res.sendFile(path.join(__dirname, '..', 'views', 'index.html'));
res.sendFile(path.join(__dirname, '..', 'views', 'builds.html'));
});
app.get('/settings', requireLogin, (req, res) => {
@@ -137,6 +137,43 @@ app.delete('/api/devices/:udid', requireLogin, (req, res) => {
res.json({ success: true });
});
// --- Enrollment bridge (called by unraid's /enroll/callback over the LAN) ---
app.post('/api/devices/from-enrollment', requireBuilderSecret, async (req, res) => {
const { udid, name, model, platform = 'IOS' } = req.body || {};
if (!udid || typeof udid !== 'string') {
return res.status(400).json({ error: 'UDID is required' });
}
db.prepare(`
INSERT INTO devices (udid, name, model, platform)
VALUES (?, ?, ?, ?)
ON CONFLICT(udid) DO UPDATE SET
name = COALESCE(NULLIF(excluded.name, ''), devices.name),
model = COALESCE(NULLIF(excluded.model, ''), devices.model),
platform = excluded.platform
`).run(udid, name || null, model || null, platform);
let synced = false;
try {
const asc = require('./asc-api');
const appleDevice = await asc.registerDevice({ udid, name, platform });
const appleDeviceId = appleDevice?.id || null;
db.prepare(`
UPDATE devices
SET apple_device_id = ?, synced_at = datetime('now')
WHERE udid = ?
`).run(appleDeviceId, udid);
synced = true;
invalidateProfilesForDeviceChange();
} catch (err) {
console.warn('[enrollment] ASC sync failed:', err.message);
return res.json({ success: true, synced: false, warning: err.message });
}
res.json({ success: true, synced });
});
// --- Settings API ---
const SETTINGS_KEYS = [
@@ -215,6 +252,10 @@ app.post('/api/settings/test-unraid', requireLogin, async (req, res) => {
}
});
// --- Build pipeline ---
require('./build-routes').register(app, { requireLogin });
// --- Profile API ---
app.get('/api/profile/:bundleId', requireLogin, async (req, res) => {
@@ -248,4 +289,7 @@ app.get('/api/health', (req, res) => {
app.listen(PORT, '0.0.0.0', () => {
console.log(`iOS App Store Builder running on port ${PORT}`);
console.log(`Data dir: ${DATA_DIR}`);
// Start the build worker loop.
require('./build-worker').start();
console.log('Build worker started');
});

67
builder/views/build.html Normal file
View File

@@ -0,0 +1,67 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>New Build - Builder</title>
<link rel="stylesheet" href="/css/style.css">
</head>
<body>
<header>
<div class="header-left"><h1>🔨 Builder</h1></div>
<nav>
<a href="/">Builds</a>
<a href="/build" class="active">New Build</a>
<a href="/devices">Devices</a>
<a href="/settings">Settings</a>
<a href="/logout" class="logout">Logout</a>
</nav>
</header>
<main>
<h1 class="page-title">New Build</h1>
<div class="section">
<h2>From source archive</h2>
<div class="card">
<form id="upload-form" enctype="multipart/form-data">
<label>Archive (.zip or .tar.gz)</label>
<input type="file" name="source" id="source-input" accept=".zip,.tar.gz,.tgz" required>
<label>Scheme (optional)</label>
<input type="text" name="scheme" placeholder="leave blank to use the first scheme">
<div class="btn-row">
<button type="submit">Queue Build</button>
</div>
</form>
</div>
</div>
<div class="section">
<h2>From git URL</h2>
<div class="card">
<form id="git-form">
<label>Repository URL</label>
<input type="text" name="url" placeholder="git@gitea.treytartt.com:user/repo.git or https://…">
<div class="field-group">
<div>
<label>Branch (optional)</label>
<input type="text" name="branch" placeholder="main">
</div>
<div>
<label>Scheme (optional)</label>
<input type="text" name="scheme" placeholder="first scheme">
</div>
</div>
<div class="btn-row">
<button type="submit">Queue Build</button>
</div>
</form>
</div>
</div>
<div id="toast" class="toast"></div>
</main>
<script src="/js/build.js"></script>
</body>
</html>

40
builder/views/builds.html Normal file
View File

@@ -0,0 +1,40 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Builds - Builder</title>
<link rel="stylesheet" href="/css/style.css">
</head>
<body>
<header>
<div class="header-left"><h1>🔨 Builder</h1></div>
<nav>
<a href="/" class="active">Builds</a>
<a href="/build">New Build</a>
<a href="/devices">Devices</a>
<a href="/settings">Settings</a>
<a href="/logout" class="logout">Logout</a>
</nav>
</header>
<main>
<div style="display:flex;align-items:center;justify-content:space-between;margin-bottom:16px">
<h1 class="page-title" style="margin-bottom:0">Builds</h1>
<a href="/build" class="btn-sm" style="background:var(--accent);color:white;padding:8px 16px;border-radius:20px;text-decoration:none;font-weight:600;font-size:14px">+ New Build</a>
</div>
<div id="jobs-container"><div class="card"><p style="color:var(--text-muted)">Loading…</p></div></div>
<div id="detail" class="section" style="display:none">
<h2 id="detail-title">Job details</h2>
<div class="card">
<div id="detail-meta" class="mono" style="font-size:12px;color:var(--text-muted);margin-bottom:12px"></div>
<pre id="log-viewer" class="log-viewer"></pre>
</div>
</div>
</main>
<script src="/js/builds.js"></script>
</body>
</html>