Initial commit — OFApp client + server

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Trey t
2026-02-12 20:07:06 -06:00
commit c60de19348
43 changed files with 8679 additions and 0 deletions

129
server/db.js Normal file
View File

@@ -0,0 +1,129 @@
import Database from 'better-sqlite3';
import { mkdirSync, existsSync } from 'fs';
import { dirname } from 'path';
const DB_PATH = process.env.DB_PATH || './data/db/ofapp.db';
const dir = dirname(DB_PATH);
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
const db = new Database(DB_PATH);
db.pragma('journal_mode = WAL');
db.exec(`
CREATE TABLE IF NOT EXISTS auth_config (
user_id TEXT,
cookie TEXT,
x_bc TEXT,
app_token TEXT,
x_of_rev TEXT,
user_agent TEXT
);
CREATE TABLE IF NOT EXISTS download_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
user_id TEXT,
post_id TEXT,
media_id TEXT,
media_type TEXT,
filename TEXT,
downloaded_at TEXT
);
CREATE TABLE IF NOT EXISTS download_cursors (
user_id TEXT UNIQUE,
cursor TEXT,
posts_downloaded INTEGER
);
CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT
);
`);
// Migration: add posted_at column if missing
const cols = db.prepare("PRAGMA table_info(download_history)").all().map((c) => c.name);
if (!cols.includes('posted_at')) {
db.exec('ALTER TABLE download_history ADD COLUMN posted_at TEXT');
}
export function getAuthConfig() {
const row = db.prepare('SELECT * FROM auth_config LIMIT 1').get();
return row || null;
}
export function saveAuthConfig(config) {
const del = db.prepare('DELETE FROM auth_config');
const ins = db.prepare(
'INSERT INTO auth_config (user_id, cookie, x_bc, app_token, x_of_rev, user_agent) VALUES (?, ?, ?, ?, ?, ?)'
);
const upsert = db.transaction((c) => {
del.run();
ins.run(c.user_id, c.cookie, c.x_bc, c.app_token, c.x_of_rev, c.user_agent);
});
upsert(config);
}
export function isMediaDownloaded(mediaId) {
const row = db.prepare('SELECT 1 FROM download_history WHERE media_id = ? LIMIT 1').get(String(mediaId));
return !!row;
}
export function recordDownload(userId, postId, mediaId, mediaType, filename, postedAt) {
db.prepare(
'INSERT INTO download_history (user_id, post_id, media_id, media_type, filename, downloaded_at, posted_at) VALUES (?, ?, ?, ?, ?, ?, ?)'
).run(String(userId), String(postId), String(mediaId), mediaType, filename, new Date().toISOString(), postedAt || null);
}
export function getDownloadHistory(userId) {
return db.prepare('SELECT * FROM download_history WHERE user_id = ? ORDER BY downloaded_at DESC').all(String(userId));
}
export function saveCursor(userId, cursor, postsDownloaded) {
db.prepare(
'INSERT INTO download_cursors (user_id, cursor, posts_downloaded) VALUES (?, ?, ?) ON CONFLICT(user_id) DO UPDATE SET cursor = excluded.cursor, posts_downloaded = excluded.posts_downloaded'
).run(String(userId), cursor, postsDownloaded);
}
export function getCursor(userId) {
return db.prepare('SELECT cursor, posts_downloaded FROM download_cursors WHERE user_id = ?').get(String(userId)) || null;
}
export function clearCursor(userId) {
db.prepare('DELETE FROM download_cursors WHERE user_id = ?').run(String(userId));
}
export function getPostDateByFilename(filename) {
const row = db.prepare('SELECT posted_at FROM download_history WHERE filename = ? LIMIT 1').get(filename);
return row?.posted_at || null;
}
export function getSetting(key) {
const row = db.prepare('SELECT value FROM settings WHERE key = ?').get(key);
return row ? row.value : null;
}
export function setSetting(key, value) {
db.prepare(
'INSERT INTO settings (key, value) VALUES (?, ?) ON CONFLICT(key) DO UPDATE SET value = excluded.value'
).run(key, value);
}
export function getAllSettings() {
const rows = db.prepare('SELECT key, value FROM settings').all();
const obj = {};
for (const row of rows) obj[row.key] = row.value;
return obj;
}
export function getDownloadStats() {
return db.prepare(
'SELECT user_id, COUNT(*) as file_count, MAX(downloaded_at) as last_download FROM download_history GROUP BY user_id'
).all();
}

256
server/download.js Normal file
View File

@@ -0,0 +1,256 @@
import { Router } from 'express';
import fetch from 'node-fetch';
import { mkdirSync, createWriteStream } from 'fs';
import { pipeline } from 'stream/promises';
import { extname } from 'path';
import { getAuthConfig, isMediaDownloaded, recordDownload, getDownloadStats, saveCursor, getCursor, clearCursor } from './db.js';
import { createSignedHeaders, getRules } from './signing.js';
const router = Router();
const OF_BASE = 'https://onlyfans.com';
const MEDIA_PATH = process.env.MEDIA_PATH || './data/media';
const DOWNLOAD_DELAY = parseInt(process.env.DOWNLOAD_DELAY || '1000', 10);
// In-memory progress: userId -> { total, completed, errors, running }
const progressMap = new Map();
function buildHeaders(authConfig, signedHeaders) {
const rules = getRules();
const headers = {
'User-Agent': authConfig.user_agent || 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:148.0) Gecko/20100101 Firefox/148.0',
'Accept': 'application/json, text/plain, */*',
'Cookie': authConfig.cookie,
'user-id': authConfig.user_id,
'x-bc': authConfig.x_bc,
'x-of-rev': authConfig.x_of_rev,
'app-token': rules.app_token,
...signedHeaders,
};
if (rules.remove_headers) {
for (const h of rules.remove_headers) {
delete headers[h];
}
}
return headers;
}
async function fetchOF(ofPath, authConfig) {
const signedHeaders = createSignedHeaders(ofPath, authConfig.user_id);
const headers = buildHeaders(authConfig, signedHeaders);
const res = await fetch(`${OF_BASE}${ofPath}`, { headers });
return res.json();
}
function getMediaUrl(media) {
if (media.source?.source) return media.source.source;
if (media.files?.full?.url) return media.files.full.url;
if (media.files?.preview?.url) return media.files.preview.url;
return null;
}
function getExtFromUrl(url) {
try {
const pathname = new URL(url).pathname;
const ext = extname(pathname).split('?')[0];
return ext || '.bin';
} catch {
return '.bin';
}
}
function getYearMonth(dateStr) {
if (!dateStr) return 'unknown';
try {
const d = new Date(dateStr);
if (isNaN(d.getTime())) return 'unknown';
const y = d.getFullYear();
const m = String(d.getMonth() + 1).padStart(2, '0');
return `${y}-${m}`;
} catch {
return 'unknown';
}
}
function sleep(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
async function downloadFile(url, dest) {
const res = await fetch(url);
if (!res.ok) throw new Error(`Download failed: ${res.status}`);
await pipeline(res.body, createWriteStream(dest));
}
async function runDownload(userId, authConfig, postLimit, resume, username) {
const progress = { total: 0, completed: 0, errors: 0, running: true };
progressMap.set(String(userId), progress);
try {
let beforePublishTime = null;
let hasMore = true;
const allMedia = [];
let postsFetched = 0;
let priorPostsDownloaded = 0;
if (resume) {
const saved = getCursor(String(userId));
if (saved) {
beforePublishTime = saved.cursor;
priorPostsDownloaded = saved.posts_downloaded || 0;
}
}
// Phase 1: Paginate media items directly via /posts/medias
while (hasMore) {
const batchSize = postLimit ? Math.min(10, postLimit - postsFetched) : 10;
if (batchSize <= 0) break;
let ofPath = `/api2/v2/users/${userId}/posts/medias?limit=${batchSize}&order=publish_date_desc&skip_users=all&format=infinite&pinned=0`;
if (beforePublishTime) {
ofPath += `&beforePublishTime=${encodeURIComponent(beforePublishTime)}`;
}
const data = await fetchOF(ofPath, authConfig);
const mediaList = Array.isArray(data) ? data : (data.list || []);
postsFetched += mediaList.length;
for (const media of mediaList) {
const postDate = media.postedAt || media.createdAt || media.publishedAt || null;
const postId = media.postId || media.post_id || media.id;
allMedia.push({ postId, media, postDate });
}
hasMore = Array.isArray(data) ? data.length === batchSize : !!data.hasMore;
if (!Array.isArray(data)) {
beforePublishTime = data.tailMarker || null;
} else if (mediaList.length > 0) {
// For flat array responses, use the last item's date as cursor
const last = mediaList[mediaList.length - 1];
beforePublishTime = last.postedAt || last.createdAt || null;
}
// Stop if we've hit the limit
if (postLimit && postsFetched >= postLimit) break;
if (hasMore) await sleep(DOWNLOAD_DELAY);
}
// Save cursor for future "continue" downloads
if (postLimit && beforePublishTime && hasMore) {
saveCursor(String(userId), beforePublishTime, priorPostsDownloaded + postsFetched);
} else {
// Downloaded all media or reached the end — clear cursor
clearCursor(String(userId));
}
progress.total = allMedia.length;
// Phase 2: Download each media item
for (const { postId, media, postDate } of allMedia) {
try {
const mediaId = String(media.id);
if (isMediaDownloaded(mediaId)) {
progress.completed++;
continue;
}
if (media.canView === false) {
progress.completed++;
continue;
}
const url = getMediaUrl(media);
if (!url) {
progress.completed++;
continue;
}
const mediaType = media.type || 'unknown';
const ext = getExtFromUrl(url);
const filename = `${postId}_${mediaId}_${mediaType}${ext}`;
const userDir = `${MEDIA_PATH}/${username || userId}`;
mkdirSync(userDir, { recursive: true });
const dest = `${userDir}/${filename}`;
await downloadFile(url, dest);
recordDownload(userId, String(postId), mediaId, mediaType, filename, postDate);
progress.completed++;
} catch (err) {
console.error(`[download] Error downloading media ${media.id}:`, err.message);
progress.errors++;
progress.completed++;
}
await sleep(DOWNLOAD_DELAY);
}
} catch (err) {
console.error(`[download] Fatal error for user ${userId}:`, err.message);
progress.errors++;
} finally {
progress.running = false;
}
}
// POST /api/download/:userId — start background download
router.post('/api/download/:userId', (req, res, next) => {
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
const { userId } = req.params;
const postLimit = req.body.limit ? parseInt(req.body.limit, 10) : null;
const resume = !!req.body.resume;
const username = req.body.username || null;
const existing = progressMap.get(String(userId));
if (existing?.running) {
return res.json({ status: 'already_running', userId, progress: existing });
}
runDownload(userId, authConfig, postLimit, resume, username).catch((err) =>
console.error(`[download] Unhandled error for user ${userId}:`, err.message)
);
res.json({ status: 'started', userId });
} catch (err) {
next(err);
}
});
// GET /api/download/:userId/status
router.get('/api/download/:userId/status', (req, res) => {
const progress = progressMap.get(String(req.params.userId));
if (!progress) return res.json({ status: 'not_started' });
res.json({ status: progress.running ? 'running' : 'completed', ...progress });
});
// GET /api/download/:userId/cursor
router.get('/api/download/:userId/cursor', (req, res) => {
const cursor = getCursor(String(req.params.userId));
if (!cursor) return res.json({ hasCursor: false });
res.json({ hasCursor: true, postsDownloaded: cursor.posts_downloaded });
});
// GET /api/download/active — list all running downloads
router.get('/api/download/active', (req, res) => {
const active = [];
for (const [userId, progress] of progressMap.entries()) {
if (progress.running) {
active.push({ user_id: userId, ...progress });
}
}
res.json(active);
});
// GET /api/download/history
router.get('/api/download/history', (req, res, next) => {
try {
const stats = getDownloadStats();
res.json(stats);
} catch (err) {
next(err);
}
});
export default router;

143
server/gallery.js Normal file
View File

@@ -0,0 +1,143 @@
import { Router } from 'express';
import { readdirSync, statSync } from 'fs';
import { join, extname } from 'path';
import { getPostDateByFilename, getSetting } from './db.js';
const router = Router();
const MEDIA_PATH = process.env.MEDIA_PATH || './data/media';
const IMAGE_EXTS = new Set(['.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp']);
const VIDEO_EXTS = new Set(['.mp4', '.mov', '.avi', '.webm', '.mkv', '.m4v']);
function getMediaType(filename) {
const ext = extname(filename).toLowerCase();
if (IMAGE_EXTS.has(ext)) return 'image';
if (VIDEO_EXTS.has(ext)) return 'video';
return null;
}
// GET /api/gallery/folders — list all folders with file counts
router.get('/api/gallery/folders', (req, res, next) => {
try {
const entries = readdirSync(MEDIA_PATH, { withFileTypes: true });
const folders = [];
for (const entry of entries) {
if (!entry.isDirectory() || entry.name.startsWith('.') || entry.name.startsWith('_')) continue;
const folderPath = join(MEDIA_PATH, entry.name);
const files = readdirSync(folderPath).filter((f) => {
return !f.startsWith('.') && getMediaType(f) !== null;
});
if (files.length > 0) {
const images = files.filter((f) => getMediaType(f) === 'image').length;
const videos = files.filter((f) => getMediaType(f) === 'video').length;
folders.push({ name: entry.name, total: files.length, images, videos });
}
}
folders.sort((a, b) => a.name.localeCompare(b.name));
res.json(folders);
} catch (err) {
next(err);
}
});
// GET /api/gallery/files?folder=&type=&sort=&offset=&limit=
router.get('/api/gallery/files', (req, res, next) => {
try {
const { folder, type, sort, offset, limit } = req.query;
const typeFilter = type || 'all'; // all, image, video
const sortMode = sort || 'latest'; // latest, shuffle
const offsetNum = parseInt(offset || '0', 10);
const limitNum = parseInt(limit || '50', 10);
let allFiles = [];
const foldersParam = req.query.folders; // comma-separated list
const foldersToScan = folder
? [folder]
: foldersParam
? foldersParam.split(',').map((f) => f.trim()).filter(Boolean)
: readdirSync(MEDIA_PATH, { withFileTypes: true })
.filter((e) => e.isDirectory() && !e.name.startsWith('.') && !e.name.startsWith('_'))
.map((e) => e.name);
for (const dir of foldersToScan) {
const dirPath = join(MEDIA_PATH, dir);
let files;
try {
files = readdirSync(dirPath);
} catch {
continue;
}
for (const file of files) {
if (file.startsWith('.')) continue;
const mediaType = getMediaType(file);
if (!mediaType) continue;
if (typeFilter !== 'all' && mediaType !== typeFilter) continue;
const filePath = join(dirPath, file);
const stat = statSync(filePath);
const postedAt = getPostDateByFilename(file);
const fileObj = {
folder: dir,
filename: file,
type: mediaType,
size: stat.size,
modified: stat.mtimeMs,
postedAt: postedAt || null,
url: `/api/gallery/media/${encodeURIComponent(dir)}/${encodeURIComponent(file)}`,
};
if ((getSetting('hls_enabled') || process.env.HLS_ENABLED) === 'true' && mediaType === 'video') {
fileObj.hlsUrl = `/api/hls/${encodeURIComponent(dir)}/${encodeURIComponent(file)}/master.m3u8`;
}
allFiles.push(fileObj);
}
}
// Sort
if (sortMode === 'shuffle') {
for (let i = allFiles.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[allFiles[i], allFiles[j]] = [allFiles[j], allFiles[i]];
}
} else {
allFiles.sort((a, b) => {
const aTime = a.postedAt ? new Date(a.postedAt).getTime() : a.modified;
const bTime = b.postedAt ? new Date(b.postedAt).getTime() : b.modified;
return bTime - aTime;
});
}
const total = allFiles.length;
const page = allFiles.slice(offsetNum, offsetNum + limitNum);
res.json({ total, offset: offsetNum, limit: limitNum, files: page });
} catch (err) {
next(err);
}
});
// GET /api/gallery/media/:folder/:filename — serve actual file
router.get('/api/gallery/media/:folder/:filename', (req, res) => {
const { folder, filename } = req.params;
// Prevent path traversal
if (folder.includes('..') || filename.includes('..')) {
return res.status(400).json({ error: 'Invalid path' });
}
const filePath = join(MEDIA_PATH, folder, filename);
res.sendFile(filePath, { root: '/' }, (err) => {
if (err && !res.headersSent) {
res.status(404).json({ error: 'File not found' });
}
});
});
export default router;

116
server/hls.js Normal file
View File

@@ -0,0 +1,116 @@
import { Router } from 'express';
import { join } from 'path';
import { existsSync } from 'fs';
import { execFile, spawn } from 'child_process';
import { promisify } from 'util';
import { getSetting } from './db.js';
const execFileAsync = promisify(execFile);
const router = Router();
const MEDIA_PATH = process.env.MEDIA_PATH || './data/media';
const SEGMENT_DURATION = 10;
function isHlsEnabled() {
return (getSetting('hls_enabled') || process.env.HLS_ENABLED) === 'true';
}
function validatePath(folder, filename) {
if (folder.includes('..') || filename.includes('..')) return null;
if (folder.includes('/') || folder.includes('\\')) return null;
if (filename.includes('/') || filename.includes('\\')) return null;
const filePath = join(MEDIA_PATH, folder, filename);
if (!existsSync(filePath)) return null;
return filePath;
}
// GET /api/hls/:folder/:filename/master.m3u8
router.get('/api/hls/:folder/:filename/master.m3u8', async (req, res) => {
if (!isHlsEnabled()) {
return res.status(404).json({ error: 'HLS not enabled' });
}
const { folder, filename } = req.params;
const filePath = validatePath(folder, filename);
if (!filePath) {
return res.status(400).json({ error: 'Invalid path' });
}
try {
const { stdout } = await execFileAsync('ffprobe', [
'-v', 'error',
'-show_entries', 'format=duration',
'-of', 'csv=p=0',
filePath,
]);
const duration = parseFloat(stdout.trim());
if (isNaN(duration) || duration <= 0) {
return res.status(500).json({ error: 'Could not determine video duration' });
}
const segmentCount = Math.ceil(duration / SEGMENT_DURATION);
let playlist = '#EXTM3U\n#EXT-X-VERSION:3\n';
playlist += `#EXT-X-TARGETDURATION:${SEGMENT_DURATION}\n`;
playlist += '#EXT-X-MEDIA-SEQUENCE:0\n';
for (let i = 0; i < segmentCount; i++) {
const remaining = duration - i * SEGMENT_DURATION;
const segDuration = Math.min(SEGMENT_DURATION, remaining);
playlist += `#EXTINF:${segDuration.toFixed(3)},\n`;
playlist += `segment-${i}.ts\n`;
}
playlist += '#EXT-X-ENDLIST\n';
res.setHeader('Content-Type', 'application/vnd.apple.mpegurl');
res.send(playlist);
} catch (err) {
console.error('[hls] ffprobe error:', err.message);
res.status(500).json({ error: 'Failed to probe video' });
}
});
// GET /api/hls/:folder/:filename/segment-:index.ts
router.get('/api/hls/:folder/:filename/segment-:index.ts', (req, res) => {
if (!isHlsEnabled()) {
return res.status(404).json({ error: 'HLS not enabled' });
}
const { folder, filename, index } = req.params;
const filePath = validatePath(folder, filename);
if (!filePath) {
return res.status(400).json({ error: 'Invalid path' });
}
const segIndex = parseInt(index, 10);
if (isNaN(segIndex) || segIndex < 0) {
return res.status(400).json({ error: 'Invalid segment index' });
}
const offset = segIndex * SEGMENT_DURATION;
const ffmpeg = spawn('ffmpeg', [
'-ss', String(offset),
'-i', filePath,
'-t', String(SEGMENT_DURATION),
'-c', 'copy',
'-f', 'mpegts',
'pipe:1',
], { stdio: ['ignore', 'pipe', 'ignore'] });
res.setHeader('Content-Type', 'video/MP2T');
ffmpeg.stdout.pipe(res);
req.on('close', () => {
ffmpeg.kill('SIGKILL');
});
ffmpeg.on('error', (err) => {
console.error('[hls] ffmpeg error:', err.message);
if (!res.headersSent) {
res.status(500).json({ error: 'Transcoding failed' });
}
});
});
export default router;

90
server/index.js Normal file
View File

@@ -0,0 +1,90 @@
import express from 'express';
import https from 'https';
import cors from 'cors';
import { existsSync, readFileSync, mkdirSync, writeFileSync } from 'fs';
import { execSync } from 'child_process';
import { fileURLToPath } from 'url';
import { dirname, join } from 'path';
import { initRules } from './signing.js';
import proxyRouter from './proxy.js';
import downloadRouter from './download.js';
import galleryRouter from './gallery.js';
import hlsRouter from './hls.js';
import settingsRouter from './settings.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const app = express();
const PORT = process.env.PORT || 3001;
const HTTPS_PORT = process.env.HTTPS_PORT || 3443;
app.use(cors());
// Parse DRM license request bodies as raw binary BEFORE global JSON parser
// (express.json can interfere with reading the raw body stream)
app.use('/api/drm-license', express.raw({ type: '*/*', limit: '1mb' }));
app.use(express.json());
// API routes
app.use(proxyRouter);
app.use(downloadRouter);
app.use(galleryRouter);
app.use(hlsRouter);
app.use(settingsRouter);
// Serve static client build in production
const clientDist = join(__dirname, '..', 'client', 'dist');
if (existsSync(clientDist)) {
app.use(express.static(clientDist));
app.get('*', (req, res) => {
if (req.path.startsWith('/api/')) return res.status(404).json({ error: 'Not found' });
res.sendFile(join(clientDist, 'index.html'));
});
}
// Error handler
app.use((err, req, res, _next) => {
console.error('[server] Error:', err.message);
res.status(500).json({ error: err.message || 'Internal server error' });
});
async function start() {
try {
await initRules();
} catch (err) {
console.error('[server] Failed to load signing rules:', err.message);
console.error('[server] Signing will not work until rules are available');
}
app.listen(PORT, () => {
console.log(`[server] Listening on http://localhost:${PORT}`);
});
// Start HTTPS server for DRM/EME support (requires secure context)
try {
const certDir = '/data/certs';
const certPath = `${certDir}/server.crt`;
const keyPath = `${certDir}/server.key`;
if (!existsSync(certPath) || !existsSync(keyPath)) {
mkdirSync(certDir, { recursive: true });
execSync(`openssl req -x509 -newkey rsa:2048 -keyout ${keyPath} -out ${certPath} -days 3650 -nodes -subj '/CN=ofapp'`);
console.log('[server] Generated self-signed HTTPS certificate');
}
const httpsServer = https.createServer({
key: readFileSync(keyPath),
cert: readFileSync(certPath),
}, app);
httpsServer.listen(HTTPS_PORT, () => {
console.log(`[server] HTTPS listening on https://localhost:${HTTPS_PORT}`);
});
} catch (err) {
console.error('[server] HTTPS setup failed:', err.message);
console.error('[server] DRM video playback will not work without HTTPS');
}
}
start();

1369
server/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

15
server/package.json Normal file
View File

@@ -0,0 +1,15 @@
{
"name": "ofapp-server",
"version": "1.0.0",
"type": "module",
"scripts": {
"start": "node index.js",
"dev": "node --watch index.js"
},
"dependencies": {
"express": "^4.21.0",
"better-sqlite3": "^11.0.0",
"node-fetch": "^3.3.2",
"cors": "^2.8.5"
}
}

397
server/proxy.js Normal file
View File

@@ -0,0 +1,397 @@
import express, { Router } from 'express';
import fetch from 'node-fetch';
import { getAuthConfig, saveAuthConfig } from './db.js';
import { createSignedHeaders, getRules } from './signing.js';
const router = Router();
const OF_BASE = 'https://onlyfans.com';
const DRM_ENTITY_TYPES = new Set(['post', 'message', 'story', 'stream']);
function normalizeDrmEntityType(entityType) {
const normalized = String(entityType || '').toLowerCase();
return DRM_ENTITY_TYPES.has(normalized) ? normalized : null;
}
function decodeBase64License(value) {
if (typeof value !== 'string') return null;
const trimmed = value.trim();
if (!trimmed) return null;
// Allow standard and URL-safe base64 alphabets.
if (!/^[A-Za-z0-9+/_=-]+$/.test(trimmed)) return null;
try {
const normalized = trimmed.replace(/-/g, '+').replace(/_/g, '/');
return Buffer.from(normalized, 'base64');
} catch {
return null;
}
}
function buildHeaders(authConfig, signedHeaders) {
const rules = getRules();
const headers = {
'User-Agent': authConfig.user_agent || 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:148.0) Gecko/20100101 Firefox/148.0',
'Accept': 'application/json, text/plain, */*',
'Cookie': authConfig.cookie,
'user-id': authConfig.user_id,
'x-bc': authConfig.x_bc,
'x-of-rev': authConfig.x_of_rev,
'app-token': rules.app_token,
...signedHeaders,
};
// Respect remove_headers from dynamic rules
if (rules.remove_headers) {
for (const h of rules.remove_headers) {
delete headers[h];
}
}
return headers;
}
async function proxyGet(ofPath, authConfig) {
const signedHeaders = createSignedHeaders(ofPath, authConfig.user_id);
const headers = buildHeaders(authConfig, signedHeaders);
const res = await fetch(`${OF_BASE}${ofPath}`, { headers });
const data = await res.json();
return { status: res.status, data };
}
// GET /api/auth
router.get('/api/auth', (req, res) => {
const config = getAuthConfig();
if (!config) return res.json(null);
res.json(config);
});
// POST /api/auth
router.post('/api/auth', (req, res) => {
const { user_id, cookie, x_bc, app_token, x_of_rev, user_agent } = req.body;
saveAuthConfig({ user_id, cookie, x_bc, app_token, x_of_rev, user_agent });
res.json({ success: true });
});
// GET /api/me
router.get('/api/me', async (req, res, next) => {
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
const { status, data } = await proxyGet('/api2/v2/users/me', authConfig);
res.status(status).json(data);
} catch (err) {
next(err);
}
});
// GET /api/feed
router.get('/api/feed', async (req, res, next) => {
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
let ofPath = '/api2/v2/posts?limit=10&format=infinite';
if (req.query.beforePublishTime) {
ofPath += `&beforePublishTime=${encodeURIComponent(req.query.beforePublishTime)}`;
}
const { status, data } = await proxyGet(ofPath, authConfig);
res.status(status).json(data);
} catch (err) {
next(err);
}
});
// GET /api/subscriptions
router.get('/api/subscriptions', async (req, res, next) => {
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
const offset = req.query.offset || 0;
const ofPath = `/api2/v2/subscriptions/subscribes?type=active&sort=desc&field=expire_date&limit=50&offset=${offset}`;
const { status, data } = await proxyGet(ofPath, authConfig);
res.status(status).json(data);
} catch (err) {
next(err);
}
});
// GET /api/users/:id/posts
router.get('/api/users/:id/posts', async (req, res, next) => {
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
let ofPath = `/api2/v2/users/${req.params.id}/posts?limit=10&order=publish_date_desc&format=infinite&pinned=0&counters=1`;
if (req.query.beforePublishTime) {
ofPath += `&beforePublishTime=${encodeURIComponent(req.query.beforePublishTime)}`;
}
const { status, data } = await proxyGet(ofPath, authConfig);
res.status(status).json(data);
} catch (err) {
next(err);
}
});
// GET /api/users/:username (resolve username to user object)
router.get('/api/users/:username', async (req, res, next) => {
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
const ofPath = `/api2/v2/users/${req.params.username}`;
const { status, data } = await proxyGet(ofPath, authConfig);
// OF API sometimes returns 200 with error body instead of a proper HTTP error
if (status === 200 && data && !data.id && data.code !== undefined) {
return res.status(404).json({ error: data.message || 'User not found' });
}
res.status(status).json(data);
} catch (err) {
next(err);
}
});
// GET /api/media-proxy — proxy CDN media through the server
router.get('/api/media-proxy', async (req, res) => {
const url = req.query.url;
if (!url) return res.status(400).json({ error: 'Missing url parameter' });
try {
const parsed = new URL(url);
if (!parsed.hostname.endsWith('onlyfans.com')) {
return res.status(403).json({ error: 'Only onlyfans.com URLs allowed' });
}
const headers = {};
if (req.headers.range) {
headers['Range'] = req.headers.range;
}
const upstream = await fetch(url, { headers });
if (!upstream.ok && upstream.status !== 206) return res.status(upstream.status).end();
const contentType = upstream.headers.get('content-type');
if (contentType) res.set('Content-Type', contentType);
const contentLength = upstream.headers.get('content-length');
if (contentLength) res.set('Content-Length', contentLength);
const contentRange = upstream.headers.get('content-range');
if (contentRange) res.set('Content-Range', contentRange);
const acceptRanges = upstream.headers.get('accept-ranges');
if (acceptRanges) res.set('Accept-Ranges', acceptRanges);
res.set('Cache-Control', 'public, max-age=86400');
res.status(upstream.status);
upstream.body.pipe(res);
} catch (err) {
console.error('[media-proxy] Error:', err.message);
res.status(500).json({ error: 'Proxy fetch failed' });
}
});
// POST /api/drm-license — proxy Widevine license requests through OF's DRM resolver
router.post('/api/drm-license', async (req, res) => {
const { mediaId, entityId, entityType, cp, cs, ck } = req.query;
if (!mediaId) {
return res.status(400).json({ error: 'Missing mediaId parameter' });
}
try {
const authConfig = getAuthConfig();
if (!authConfig) return res.status(401).json({ error: 'No auth config' });
// `express.raw()` handles most requests, but keep a fallback for missing content-type.
let rawBody = Buffer.isBuffer(req.body) ? req.body : null;
if (!rawBody) {
const chunks = [];
for await (const chunk of req) {
chunks.push(chunk);
}
rawBody = Buffer.concat(chunks);
}
const normalizedEntityType = normalizeDrmEntityType(entityType);
const parsedEntityId = Number.parseInt(entityId, 10);
const hasEntityContext = normalizedEntityType
&& Number.isFinite(parsedEntityId)
&& parsedEntityId > 0;
const drmPath = hasEntityContext
? `/api2/v2/users/media/${mediaId}/drm/${normalizedEntityType}/${parsedEntityId}`
: `/api2/v2/users/media/${mediaId}/drm/`;
const ofPath = `${drmPath}?type=widevine`;
console.log(
'[drm-license] License request mediaId:', mediaId,
'entityType:', hasEntityContext ? normalizedEntityType : 'own_media',
'entityId:', hasEntityContext ? parsedEntityId : null,
'challenge size:', rawBody.length,
'content-type:', req.headers['content-type'] || 'none'
);
const signedHeaders = createSignedHeaders(ofPath, authConfig.user_id);
const headers = buildHeaders(authConfig, signedHeaders);
headers['Content-Type'] = 'application/octet-stream';
// Append CloudFront cookies
const cfParts = [];
if (cp) cfParts.push(`CloudFront-Policy=${cp}`);
if (cs) cfParts.push(`CloudFront-Signature=${cs}`);
if (ck) cfParts.push(`CloudFront-Key-Pair-Id=${ck}`);
if (cfParts.length > 0) {
headers['Cookie'] = [headers['Cookie'], ...cfParts].filter(Boolean).join('; ');
}
console.log('[drm-license] Proxying to OF:', ofPath);
const upstream = await fetch(`${OF_BASE}${ofPath}`, {
method: 'POST',
headers,
body: rawBody.length > 0 ? rawBody : undefined,
});
const responseBody = Buffer.from(await upstream.arrayBuffer());
const upstreamContentType = upstream.headers.get('content-type') || '';
const isJson = upstreamContentType.includes('application/json');
const responsePreview = isJson
? responseBody.toString('utf8').substring(0, 300)
: `<binary:${responseBody.length}>`;
console.log('[drm-license] OF response:', upstream.status, 'size:', responseBody.length,
'content-type:', upstreamContentType || 'unknown', 'body:', responsePreview);
let bodyToSend = responseBody;
let contentType = upstreamContentType || 'application/octet-stream';
// Some endpoints return a JSON wrapper with a base64 license payload.
if (upstream.ok && isJson) {
try {
const payload = JSON.parse(responseBody.toString('utf8'));
const maybeLicense =
payload?.license ||
payload?.licenseData ||
payload?.data?.license ||
payload?.data?.licenseData ||
payload?.result?.license ||
payload?.result?.licenseData ||
null;
const decoded = decodeBase64License(maybeLicense);
if (decoded) {
bodyToSend = decoded;
contentType = 'application/octet-stream';
}
} catch {
// Keep upstream response unchanged if JSON parsing fails.
}
}
res.status(upstream.status);
res.set('Content-Type', contentType);
res.send(bodyToSend);
} catch (err) {
console.error('[drm-license] Error:', err.message);
res.status(500).json({ error: 'License proxy failed' });
}
});
// GET /api/drm-hls — proxy DRM-protected HLS streams from cdn3.onlyfans.com
router.get('/api/drm-hls', async (req, res) => {
const { url, cp, cs, ck } = req.query;
if (!url) return res.status(400).json({ error: 'Missing url parameter' });
try {
const parsed = new URL(url);
if (!parsed.hostname.endsWith('onlyfans.com')) {
return res.status(403).json({ error: 'Only onlyfans.com URLs allowed' });
}
// Attach CloudFront signed cookies
const cookieParts = [];
if (cp) cookieParts.push(`CloudFront-Policy=${cp}`);
if (cs) cookieParts.push(`CloudFront-Signature=${cs}`);
if (ck) cookieParts.push(`CloudFront-Key-Pair-Id=${ck}`);
const headers = {};
if (cookieParts.length > 0) {
headers['Cookie'] = cookieParts.join('; ');
}
if (req.headers.range) {
headers['Range'] = req.headers.range;
}
const upstream = await fetch(url, { headers });
if (!upstream.ok && upstream.status !== 206) {
console.error(`[drm-hls] Upstream ${upstream.status} for ${url}`);
return res.status(upstream.status).end();
}
const contentType = upstream.headers.get('content-type') || '';
// DASH manifest — inject BaseURL so Shaka Player resolves segment URLs to CDN
if (url.endsWith('.mpd') || contentType.includes('dash+xml')) {
let body = await upstream.text();
const baseUrl = url.substring(0, url.lastIndexOf('/') + 1);
// Insert <BaseURL> right after <MPD ...> opening tag so relative URLs resolve to CDN
body = body.replace(/(<MPD[^>]*>)/, `$1\n <BaseURL>${baseUrl}</BaseURL>`);
res.set('Content-Type', 'application/dash+xml');
res.set('Cache-Control', 'no-cache');
res.send(body);
}
// HLS playlist — rewrite URLs to route through this proxy
else if (url.endsWith('.m3u8') || contentType.includes('mpegurl') || contentType.includes('x-mpegurl')) {
const body = await upstream.text();
const baseUrl = url.substring(0, url.lastIndexOf('/') + 1);
const rewritten = body.split('\n').map(line => {
const trimmed = line.trim();
if (!trimmed) return line;
// Rewrite URI= attributes in EXT tags (e.g., #EXT-X-KEY, #EXT-X-MAP)
// Skip non-HTTP URIs like skd:// (FairPlay key identifiers)
if (trimmed.startsWith('#')) {
if (trimmed.includes('URI="')) {
return trimmed.replace(/URI="([^"]+)"/g, (_, uri) => {
if (!uri.startsWith('http') && !uri.startsWith('/')) return `URI="${uri}"`;
const abs = uri.startsWith('http') ? uri : baseUrl + uri;
return `URI="/api/drm-hls?url=${encodeURIComponent(abs)}&cp=${encodeURIComponent(cp || '')}&cs=${encodeURIComponent(cs || '')}&ck=${encodeURIComponent(ck || '')}"`;
});
}
return line;
}
// URL line (segment or variant playlist reference)
const abs = trimmed.startsWith('http') ? trimmed : baseUrl + trimmed;
return `/api/drm-hls?url=${encodeURIComponent(abs)}&cp=${encodeURIComponent(cp || '')}&cs=${encodeURIComponent(cs || '')}&ck=${encodeURIComponent(ck || '')}`;
}).join('\n');
res.set('Content-Type', 'application/vnd.apple.mpegurl');
res.set('Cache-Control', 'no-cache');
res.send(rewritten);
} else {
// Binary content (TS segments, init segments) — pipe through
const ct = upstream.headers.get('content-type');
if (ct) res.set('Content-Type', ct);
const cl = upstream.headers.get('content-length');
if (cl) res.set('Content-Length', cl);
const cr = upstream.headers.get('content-range');
if (cr) res.set('Content-Range', cr);
const ar = upstream.headers.get('accept-ranges');
if (ar) res.set('Accept-Ranges', ar);
res.set('Cache-Control', 'public, max-age=3600');
res.status(upstream.status);
upstream.body.pipe(res);
}
} catch (err) {
console.error('[drm-hls] Error:', err.message);
res.status(500).json({ error: 'DRM HLS proxy failed' });
}
});
export default router;

21
server/settings.js Normal file
View File

@@ -0,0 +1,21 @@
import { Router } from 'express';
import { getAllSettings, setSetting } from './db.js';
const router = Router();
// GET /api/settings
router.get('/api/settings', (req, res) => {
const settings = getAllSettings();
res.json(settings);
});
// PUT /api/settings
router.put('/api/settings', (req, res) => {
const updates = req.body;
for (const [key, value] of Object.entries(updates)) {
setSetting(key, String(value));
}
res.json(getAllSettings());
});
export default router;

84
server/signing.js Normal file
View File

@@ -0,0 +1,84 @@
import { createHash } from 'crypto';
import fetch from 'node-fetch';
// Try multiple community-maintained rule sources in order
const RULES_URLS = [
'https://raw.githubusercontent.com/rafa-9/dynamic-rules/main/rules.json',
'https://raw.githubusercontent.com/datawhores/onlyfans-dynamic-rules/main/dynamicRules.json',
'https://raw.githubusercontent.com/DATAHOARDERS/dynamic-rules/main/onlyfans.json',
];
const REFRESH_INTERVAL = 60 * 60 * 1000; // 1 hour
let rules = null;
let lastFetch = 0;
function normalizeRules(raw) {
// Different sources use different key names — normalize them
return {
static_param: raw.static_param,
checksum_indexes: raw.checksum_indexes,
checksum_constant: raw.checksum_constant ?? 0,
checksum_constants: raw.checksum_constants ?? null, // per-index constants (some sources)
app_token: raw.app_token || raw['app-token'] || '33d57ade8c02dbc5a333db99ff9ae26a',
prefix: raw.prefix || raw.format?.split(':')[0],
suffix: raw.suffix || raw.format?.split(':').pop(),
remove_headers: raw.remove_headers ?? [],
};
}
async function fetchRules() {
for (const url of RULES_URLS) {
try {
const res = await fetch(url);
if (!res.ok) continue;
const raw = await res.json();
rules = normalizeRules(raw);
lastFetch = Date.now();
console.log(`[signing] Rules loaded from ${url} (prefix: ${rules.prefix})`);
return rules;
} catch (err) {
console.warn(`[signing] Failed to fetch from ${url}: ${err.message}`);
}
}
throw new Error('All dynamic rules sources failed');
}
export async function initRules() {
await fetchRules();
}
export function getRules() {
if (Date.now() - lastFetch > REFRESH_INTERVAL) {
fetchRules().catch((err) => console.error('[signing] Failed to refresh rules:', err.message));
}
return rules;
}
export function createSignedHeaders(path, userId) {
if (!rules) throw new Error('Signing rules not initialized');
const timestamp = Date.now().toString();
// Use "0" for userId when user-id is in remove_headers
const signUserId = rules.remove_headers?.includes('user-id') ? '0' : userId;
const message = [rules.static_param, timestamp, path, signUserId].join('\n');
const sha1Hex = createHash('sha1').update(message).digest('hex');
const hexBytes = Buffer.from(sha1Hex, 'ascii');
let checksum = 0;
for (let i = 0; i < rules.checksum_indexes.length; i++) {
const byteVal = hexBytes[rules.checksum_indexes[i]];
const perIndex = rules.checksum_constants?.[i] ?? 0;
checksum += byteVal + perIndex;
}
checksum += rules.checksum_constant;
const sign = `${rules.prefix}:${sha1Hex}:${Math.abs(checksum).toString(16)}:${rules.suffix}`;
return {
sign,
time: timestamp,
'app-token': rules.app_token,
};
}