diff --git a/.claude/settings.local.json b/.claude/settings.local.json index f5f1b24..fadcd21 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -71,7 +71,15 @@ "Bash(git add:*)", "Bash(git rm:*)", "Bash(git check-ignore:*)", - "Bash(git commit:*)" + "Bash(git commit:*)", + "WebFetch(domain:git.ofdl.tools)", + "Bash(gh api:*)", + "WebFetch(domain:emarsden.github.io)", + "WebFetch(domain:go.buydrm.com)", + "WebFetch(domain:shaka-player-demo.appspot.com)", + "WebFetch(domain:winoffrg.medium.com)", + "Bash(npm run build:*)", + "Bash(zsh:*)" ] } } diff --git a/CLAUDE.md b/CLAUDE.md index 7c613c7..feb8942 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -43,10 +43,17 @@ Port mapping: HTTP 3002->3001, HTTPS 3003->3443. Data volumes at `/mnt/user/down | `proxy.js` | OF API proxy with auth headers + media/DRM proxy endpoints | | `signing.js` | Dynamic request signing (fetches rules from GitHub, auto-refreshes hourly) | | `download.js` | Background media download orchestration with resume support | -| `db.js` | SQLite (better-sqlite3, WAL mode) — auth, download history, cursors, settings | -| `gallery.js` | Serves downloaded media as browsable gallery | +| `db.js` | SQLite (better-sqlite3, WAL mode) — auth, download history, cursors, settings, media index | +| `gallery.js` | Gallery API (SQL-backed media index), video thumbnails, duplicate scanning, filesystem scanner | | `hls.js` | On-demand HLS transcoding via FFmpeg | | `settings.js` | Key-value settings API | +| `scrape.js` | Job-based scraping orchestrator — forum + Coomer/Kemono + MediaLink scrapers with progress/logs | +| `scrapers/forum.js` | Cheerio-based forum image scraper (XenForo, generic forums) | +| `scrapers/coomer.js` | Coomer/Kemono API scraper with concurrent downloads | +| `scrapers/medialink.js` | Fapello/gallery-site JSON API scraper — paginates media API, downloads full-size images + videos | +| `widevine.js` | Widevine CDM — WVD parsing, used for browser DRM playback only (not downloads) | +| `drm-download.js` | DRM video download pipeline — MPD parsing, pywidevine key acquisition, decrypt, mux | +| `pywidevine_helper.py` | Python helper using pywidevine lib for Widevine license challenges (called as subprocess) | ### Auth & Signing Flow @@ -74,6 +81,86 @@ CDN media from `*.onlyfans.com` is proxied through `/api/media-proxy` to avoid C `download.js` runs background downloads in-memory (not persisted across restarts). It paginates through `/api2/v2/users/:id/posts/medias`, downloads each file to `MEDIA_PATH/:username/`, records in SQLite, and supports cursor-based resume for interrupted downloads. +**DRM Video Downloads**: When a media item has `files.drm.manifest.dash`, the downloader uses `drm-download.js` to: + +1. Fetch & parse the DASH MPD manifest (supports both segmented and on-demand profiles) +2. Extract Widevine PSSH (filters by system ID `edef8ba9-...`, not PlayReady) +3. Call `pywidevine_helper.py` as an async subprocess to get content keys — the helper routes license requests through the local `/api/drm-license` proxy (which handles OF auth/signing) +4. Download encrypted tracks (single file for on-demand, init+segments for segmented) +5. Decrypt with ffmpeg (`-decryption_key`) → mux audio+video + +Requires a `.wvd` (Widevine Device) file at `WVD_PATH` and Python 3 + pywidevine installed. Without the `.wvd`, DRM videos are skipped with a log message. The Dockerfile installs `python3 py3-pip` and `pywidevine`. + +### Gallery Media Index + +The gallery uses a SQLite `media_files` table instead of scanning the filesystem on every request. This makes gallery page loads instant even with tens of thousands of files. + +- **Startup scan**: On server boot, `scanMediaFiles()` walks `MEDIA_PATH`, stats every media file, and upserts into `media_files`. Also prunes rows for deleted files/folders. Takes ~6s for ~38k files. +- **Incremental updates**: `download.js`, `scrapers/forum.js`, `scrapers/coomer.js`, and `scrapers/medialink.js` insert into `media_files` after each file is written. +- **Manual rescan**: `POST /api/gallery/rescan` triggers a full re-index (accessible via Settings page "Rescan Media Library" button). Use this after manually adding/removing files from the media folder. +- **Schema**: `media_files(folder, filename, type, size, modified, posted_at)` with indexes on folder, type, modified, posted_at. Unique on `(folder, filename)`. + +### Scraper System + +`scrape.js` provides a job-based scraping system with three scraper types: + +- **Forum scraper** (`scrapers/forum.js`): Scrapes image-hosting forum threads (XenForo-style). Uses Cheerio to parse HTML, finds images in post content areas, derives full-size URLs from thumbnails, filters out avatars/emojis/icons. Supports page range, delay between pages, auto-detection of max page count. +- **Coomer/Kemono scraper** (`scrapers/coomer.js`): Uses the Coomer/Kemono API (`/api/v1/{service}/user/{userId}/posts`) to fetch posts and download attached files. Supports configurable concurrency (1-20 workers) and skips already-downloaded files. +- **MediaLink scraper** (`scrapers/medialink.js`): Scrapes gallery sites like Fapello. Accepts a URL like `https://fapello.to/model/12345`, calls the site's JSON API (`GET /api/media/{userId}/{page}/{order}` with `X-Requested-With: XMLHttpRequest` + `Referer` headers to avoid 403), and downloads `newUrl` (full-size) for images or videos (`type "2"`). Paginates until the API returns empty. Supports configurable concurrency (1-10 workers), delay, and max pages. + +Jobs run in-memory with progress tracking, cancellation support, and per-job log history. The client polls for updates every 2s. + +## Setup Guides + +### Creating a WVD File (Widevine Device) + +DRM downloads require a `.wvd` file containing a Widevine L3 CDM. To create one: + +1. **Get CDM files** from a rooted Android device or dumped Chrome CDM: + - `client_id.bin` — the client identification blob + - `private_key.pem` — the RSA private key (PKCS#1 or PKCS#8) + +2. **Install pywidevine** (if not already): + ```bash + pip3 install pywidevine + ``` + +3. **Create the .wvd file**: + ```bash + pywidevine create-device -t ANDROID -l 3 -k private_key.pem -c client_id.bin -o device.wvd + ``` + +4. **Place the file** on the server at `/mnt/user/downloads/OFApp/cdm/device.wvd` (mapped to `/data/cdm/device.wvd` inside Docker). + +Note: Google periodically revokes L3 CDMs. When revoked, you'll need new `client_id.bin` + `private_key.pem` from a different device/source and regenerate the `.wvd`. + +### Updating Auth Cookies + +Auth cookies expire periodically. When you see 401 errors in the logs, re-extract from the browser: + +1. **Open Firefox** (or Chrome) and log into onlyfans.com +2. **Open DevTools** → Network tab +3. **Click any request** to `onlyfans.com/api2/...` +4. **From the request headers, grab**: + - **Cookie** — the full cookie string (contains `sess`, `auth_id`, `st`, `csrf`, etc.) + - **user-id** — your numeric user ID (e.g. `101476031`) + - **x-bc** — browser checksum hash (e.g. `8dbf86e101ff9265acbfbeb648d74e85092b6206`) + - **user-agent** — your browser's UA string + +5. **Update via the app's settings page**, or directly in the DB: + ```bash + sqlite3 /mnt/user/downloads/OFApp/db/ofapp.db + DELETE FROM auth_config; + INSERT INTO auth_config (cookie, user_id, x_bc, user_agent) VALUES ( + 'sess=...; auth_id=...; st=...; ...', + '101476031', + '8dbf86e101ff9265acbfbeb648d74e85092b6206', + 'Mozilla/5.0 ...' + ); + ``` + + In Firefox specifically: DevTools → Storage tab → Cookies → `onlyfans.com` to see individual cookie values, or Network tab → right-click a request → Copy → Copy Request Headers. + ## Environment Variables | Variable | Default | Notes | @@ -84,6 +171,7 @@ CDN media from `*.onlyfans.com` is proxied through `/api/media-proxy` to avoid C | `MEDIA_PATH` | /data/media | Downloaded files | | `DOWNLOAD_DELAY` | 1000 | ms between downloads | | `HLS_ENABLED` | false | FFmpeg HLS transcoding | +| `WVD_PATH` | /data/cdm/device.wvd | Widevine device file for DRM downloads | ## Key Gotchas @@ -92,4 +180,12 @@ CDN media from `*.onlyfans.com` is proxied through `/api/media-proxy` to avoid C - Request signing rules normalize differently across GitHub sources — `signing.js` handles both old format (static_param) and new format (static-param with dashes). - CloudFront cookies for DRM content are IP-locked to the server's public IP (47.185.183.191). They won't work from a different IP. - The `/api/drm-hls` proxy skips `skd://` URIs in HLS manifests (FairPlay key identifiers, Safari only). -- Server log prefixes: `[signing]`, `[drm-license]`, `[drm-hls]`, `[download]`, `[media-proxy]`, `[hls]`. +- Server log prefixes: `[signing]`, `[drm-license]`, `[drm-hls]`, `[download]`, `[drm-download]`, `[widevine]`, `[media-proxy]`, `[hls]`, `[gallery]`. +- DRM downloads require a Widevine L3 device file (`.wvd`). Place it at `/data/cdm/device.wvd` (or set `WVD_PATH`). Without it, DRM videos are silently skipped during bulk downloads. +- The pywidevine subprocess MUST be called with `execAsync` (not `execSync`) because it calls back to the local `/api/drm-license` proxy — `execSync` would deadlock the Node.js event loop. +- MPD PSSH extraction must filter for the Widevine system ID (`edef8ba9-79d6-4ace-a3c8-27dcd51d21ed`), not PlayReady (`9a04f079`). The MPD may contain both. +- OF uses DASH on-demand profile (`isoff-on-demand:2011`) with `` + `` — the entire track is a single encrypted file, not segmented. +- Widevine DRM requires a two-step license flow: first POST `[0x08, 0x04]` for the service certificate, then send the actual challenge. PallyCon's license server is strict about challenge format — our custom protobuf CDM was rejected, so we use pywidevine (proven library) instead. +- Auth cookies expire — user needs to re-extract from browser when they get 401 errors. +- Gallery endpoints read from the `media_files` SQLite index, not the filesystem. If files are added/removed manually outside the app, use the "Rescan Media Library" button in Settings (or `POST /api/gallery/rescan`) to re-index. +- The forum scraper requires the `cheerio` npm package (in server dependencies). It's used for HTML parsing only on the server side. diff --git a/Dockerfile b/Dockerfile index e704185..79b765a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,8 @@ RUN cd client && npm run build # Stage 2 — Production FROM node:20-alpine -RUN apk add --no-cache ffmpeg openssl +RUN apk add --no-cache ffmpeg openssl python3 py3-pip \ + && pip3 install --break-system-packages pywidevine WORKDIR /app COPY server/package*.json ./server/ RUN cd server && npm install --production diff --git a/client/src/App.jsx b/client/src/App.jsx index fea3fd0..79a8cf9 100644 --- a/client/src/App.jsx +++ b/client/src/App.jsx @@ -8,6 +8,8 @@ import UserPosts from './pages/UserPosts' import Downloads from './pages/Downloads' import Search from './pages/Search' import Gallery from './pages/Gallery' +import Duplicates from './pages/Duplicates' +import Scrape from './pages/Scrape' const navItems = [ { to: '/feed', label: 'Feed', icon: FeedIcon }, @@ -15,6 +17,7 @@ const navItems = [ { to: '/search', label: 'Search', icon: SearchIcon }, { to: '/downloads', label: 'Downloads', icon: DownloadIcon }, { to: '/gallery', label: 'Gallery', icon: GalleryNavIcon }, + { to: '/scrape', label: 'Scrape', icon: ScrapeIcon }, { to: '/', label: 'Settings', icon: SettingsIcon }, ] @@ -111,6 +114,8 @@ export default function App() { } /> } /> } /> + } /> + } /> @@ -160,6 +165,14 @@ function GalleryNavIcon({ className }) { ) } +function ScrapeIcon({ className }) { + return ( + + + + ) +} + function SettingsIcon({ className }) { return ( diff --git a/client/src/api.js b/client/src/api.js index c998fea..37dc53f 100644 --- a/client/src/api.js +++ b/client/src/api.js @@ -114,3 +114,84 @@ export function getGalleryFiles({ folder, folders, type, sort, offset, limit } = const query = buildQuery({ folder, folders: folders ? folders.join(',') : undefined, type, sort, offset, limit }); return request(`/api/gallery/files${query}`); } + +export function rescanMedia() { + return request('/api/gallery/rescan', { method: 'POST' }); +} + +export function getRescanStatus() { + return request('/api/gallery/rescan/status'); +} + +export function generateThumbs() { + return request('/api/gallery/generate-thumbs', { method: 'POST' }); +} + +export function getThumbsStatus() { + return request('/api/gallery/generate-thumbs/status'); +} + +export function scanDuplicates() { + return request('/api/gallery/scan-duplicates', { method: 'POST' }); +} + +export function getDuplicateScanStatus() { + return request('/api/gallery/scan-duplicates/status'); +} + +export function getDuplicateGroups(offset = 0, limit = 20) { + const query = buildQuery({ offset, limit }); + return request(`/api/gallery/duplicates${query}`); +} + +export function cleanDuplicates() { + return request('/api/gallery/duplicates/clean', { method: 'POST' }); +} + +export function deleteMediaFile(folder, filename) { + return request(`/api/gallery/media/${encodeURIComponent(folder)}/${encodeURIComponent(filename)}`, { method: 'DELETE' }); +} + +export function startForumScrape(config) { + return request('/api/scrape/forum', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config), + }); +} + +export function startCoomerScrape(config) { + return request('/api/scrape/coomer', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config), + }); +} + +export function startMediaLinkScrape(config) { + return request('/api/scrape/medialink', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config), + }); +} + +export function getScrapeJobs() { + return request('/api/scrape/jobs'); +} + +export function getScrapeJob(jobId) { + return request(`/api/scrape/jobs/${jobId}`); +} + +export function cancelScrapeJob(jobId) { + return request(`/api/scrape/jobs/${jobId}/cancel`, { method: 'POST' }); +} + +export function detectForumPages(url) { + return request('/api/scrape/forum/detect-pages', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ url }), + }); +} diff --git a/client/src/components/HlsVideo.jsx b/client/src/components/HlsVideo.jsx index 333e462..9524a1a 100644 --- a/client/src/components/HlsVideo.jsx +++ b/client/src/components/HlsVideo.jsx @@ -1,10 +1,13 @@ -import { useEffect, useRef } from 'react' +import { useEffect, useRef, forwardRef, useImperativeHandle } from 'react' import Hls from 'hls.js' -export default function HlsVideo({ hlsSrc, src, autoPlay, ...props }) { +const HlsVideo = forwardRef(function HlsVideo({ hlsSrc, src, autoPlay, ...props }, ref) { const videoRef = useRef(null) const hlsRef = useRef(null) + // Expose the underlying
+ + + {expanded && ( +
+ {logs.length === 0 ? ( + No logs yet... + ) : ( + logs.map((line, i) => ( +
+ {line} +
+ )) + )} +
+
+ )} +
+ ) +} diff --git a/client/src/pages/Downloads.jsx b/client/src/pages/Downloads.jsx index ca84073..4cc605a 100644 --- a/client/src/pages/Downloads.jsx +++ b/client/src/pages/Downloads.jsx @@ -1,10 +1,11 @@ import { useState, useEffect, useRef } from 'react' -import { getDownloadHistory, getActiveDownloads, getUser } from '../api' +import { getDownloadHistory, getActiveDownloads, getUser, getScrapeJobs } from '../api' import Spinner from '../components/Spinner' export default function Downloads() { const [history, setHistory] = useState([]) const [active, setActive] = useState([]) + const [scrapeJobs, setScrapeJobs] = useState([]) const [loading, setLoading] = useState(true) const [error, setError] = useState(null) const pollRef = useRef(null) @@ -23,9 +24,10 @@ export default function Downloads() { setLoading(true) setError(null) - const [histData, activeData] = await Promise.all([ + const [histData, activeData, scrapeData] = await Promise.all([ getDownloadHistory(), getActiveDownloads(), + getScrapeJobs(), ]) if (histData.error) { @@ -37,25 +39,31 @@ export default function Downloads() { const histList = Array.isArray(histData) ? histData : histData.list || [] setHistory(histList) setActive(Array.isArray(activeData) ? activeData : []) + setScrapeJobs(Array.isArray(scrapeData) ? scrapeData.filter(j => j.running) : []) setLoading(false) resolveUsernames(histList) } const startPolling = () => { pollRef.current = setInterval(async () => { - const activeData = await getActiveDownloads() - if (activeData.error) return - - const list = Array.isArray(activeData) ? activeData : [] - setActive((prev) => { - // If something just finished, refresh history - if (prev.length > 0 && list.length < prev.length) { - getDownloadHistory().then((h) => { - if (!h.error) setHistory(Array.isArray(h) ? h : h.list || []) - }) - } - return list - }) + const [activeData, scrapeData] = await Promise.all([ + getActiveDownloads(), + getScrapeJobs(), + ]) + if (!activeData.error) { + const list = Array.isArray(activeData) ? activeData : [] + setActive((prev) => { + if (prev.length > 0 && list.length < prev.length) { + getDownloadHistory().then((h) => { + if (!h.error) setHistory(Array.isArray(h) ? h : h.list || []) + }) + } + return list + }) + } + if (!scrapeData.error) { + setScrapeJobs(Array.isArray(scrapeData) ? scrapeData.filter(j => j.running) : []) + } }, 2000) } @@ -159,6 +167,50 @@ export default function Downloads() {
)} + {/* Scrape Jobs */} + {scrapeJobs.length > 0 && ( +
+

+ Scrape Jobs +

+
+ {scrapeJobs.map((job) => { + const progress = job.progress.total > 0 + ? Math.round((job.progress.completed / job.progress.total) * 100) + : 0 + + return ( +
+
+
+ + {job.type} + +

{job.folderName}

+
+ {progress}% +
+

+ {job.progress.completed} / {job.progress.total} {job.type === 'forum' ? 'pages' : 'files'} + {job.progress.errors > 0 && ( + ({job.progress.errors} errors) + )} +

+
+
+
+
+ ) + })} +
+
+ )} + {/* Download History */}

diff --git a/client/src/pages/Duplicates.jsx b/client/src/pages/Duplicates.jsx new file mode 100644 index 0000000..8b276aa --- /dev/null +++ b/client/src/pages/Duplicates.jsx @@ -0,0 +1,257 @@ +import { useState, useEffect, useRef } from 'react' +import { getDuplicateGroups, getDuplicateScanStatus, deleteMediaFile, cleanDuplicates } from '../api' + +export default function Duplicates() { + const [groups, setGroups] = useState([]) + const [total, setTotal] = useState(0) + const [offset, setOffset] = useState(0) + const [loading, setLoading] = useState(true) + const [deleting, setDeleting] = useState(null) + const [cleaning, setCleaning] = useState(false) + const [cleanResult, setCleanResult] = useState(null) + const [scanStatus, setScanStatus] = useState(null) + const pollRef = useRef(null) + const LIMIT = 20 + + const fetchGroups = async (off = 0) => { + setLoading(true) + const data = await getDuplicateGroups(off, LIMIT) + if (!data.error) { + setGroups(data.groups || []) + setTotal(data.total || 0) + setOffset(off) + } + setLoading(false) + } + + useEffect(() => { + // Check if scan is still running + getDuplicateScanStatus().then((s) => { + if (!s.error) setScanStatus(s) + if (s.running) { + pollRef.current = setInterval(async () => { + const st = await getDuplicateScanStatus() + if (!st.error) setScanStatus(st) + if (!st.running) { + clearInterval(pollRef.current) + pollRef.current = null + fetchGroups(0) + } + }, 1000) + } else { + fetchGroups(0) + } + }) + return () => { if (pollRef.current) clearInterval(pollRef.current) } + }, []) + + const handleDelete = async (folder, filename, groupIdx) => { + const key = `${folder}/${filename}` + setDeleting(key) + const result = await deleteMediaFile(folder, filename) + setDeleting(null) + if (result.error) return + + // Update local state — remove file from group, remove group if < 2 + setGroups((prev) => { + const updated = prev.map((group, i) => { + if (i !== groupIdx) return group + return group.filter((f) => !(f.folder === folder && f.filename === filename)) + }).filter((g) => g.length > 1) + setTotal((t) => t - (prev.length - updated.length)) + return updated + }) + } + + const totalSaved = groups.reduce((sum, group) => { + const sizes = group.map((f) => f.size).sort((a, b) => b - a) + return sum + sizes.slice(1).reduce((s, sz) => s + sz, 0) + }, 0) + + if (scanStatus?.running) { + return ( +
+

Duplicate Files

+
+

Scanning for duplicates...

+
+ {scanStatus.done} of {scanStatus.total} files checked + {scanStatus.groups} groups found +
+
+
+
+
+
+ ) + } + + return ( +
+
+
+

Duplicate Files

+

+ {total} duplicate group{total !== 1 ? 's' : ''} found + {totalSaved > 0 && ( + · {(totalSaved / (1024 * 1024)).toFixed(1)} MB reclaimable + )} +

+
+
+ {total > 0 && !cleaning && ( + + )} + {cleaning && ( + + + + + + Deleting... + + )} +
+ {total > LIMIT && ( +
+ + + {offset + 1}–{Math.min(offset + LIMIT, total)} of {total} + + +
+ )} +
+ + {loading && ( +
+ + + + +
+ )} + + {cleanResult && ( +
+

+ Deleted {cleanResult.deleted} duplicate files, freed {(cleanResult.freed / (1024 * 1024)).toFixed(1)} MB + {cleanResult.errors > 0 && ({cleanResult.errors} errors)} +

+
+ )} + + {!loading && groups.length === 0 && !cleanResult && ( +
+

No duplicates found. Run a scan from Settings first.

+
+ )} + + {!loading && groups.map((group, groupIdx) => ( +
+
+ + {group.length} copies · {(group[0].size / (1024 * 1024)).toFixed(1)} MB each + + + {group[0].type} + +
+
+ {group.map((file) => { + const key = `${file.folder}/${file.filename}` + return ( +
+
+ {file.type === 'image' ? ( + {file.filename} + ) : file.thumbUrl ? ( + {file.filename} + ) : ( +
No preview
+ )} +
+
+

@{file.folder}

+

{file.path}

+
+ {(file.size / (1024 * 1024)).toFixed(1)} MB + {new Date(file.modified).toLocaleDateString()} +
+ +
+
+ ) + })} +
+
+ ))} + + {/* Bottom pagination */} + {!loading && total > LIMIT && ( +
+ + + {offset + 1}–{Math.min(offset + LIMIT, total)} of {total} + + +
+ )} +
+ ) +} diff --git a/client/src/pages/Gallery.jsx b/client/src/pages/Gallery.jsx index a252b2f..69d304d 100644 --- a/client/src/pages/Gallery.jsx +++ b/client/src/pages/Gallery.jsx @@ -6,6 +6,56 @@ import HlsVideo from '../components/HlsVideo' const PAGE_SIZE = 50 +function GalleryThumbnail({ file }) { + const [loaded, setLoaded] = useState(false) + const [errored, setErrored] = useState(false) + const [retries, setRetries] = useState(0) + + const imgSrc = file.type === 'video' + ? `/api/gallery/thumb/${encodeURIComponent(file.folder)}/${encodeURIComponent(file.filename)}` + : file.url + + // Images — lazy load with retry + const handleError = () => { + if (retries < 2) { + setTimeout(() => setRetries(r => r + 1), 1000 + retries * 1500) + } else { + setErrored(true) + } + } + + return ( + <> + {!loaded && !errored && ( +
+ )} + {errored ? ( +
+ {file.type === 'video' ? ( + + + + ) : ( + + + + )} +
+ ) : ( + setLoaded(true)} + onError={handleError} + className={`w-full h-full object-cover transition-opacity duration-300 ${loaded ? 'opacity-100' : 'opacity-0'}`} + /> + )} + + ) +} + function formatShortDate(dateStr) { if (!dateStr) return '' const d = new Date(dateStr) @@ -307,22 +357,7 @@ export default function Gallery() { className="relative group bg-[#161616] rounded-lg overflow-hidden cursor-pointer aspect-square" onClick={() => setLightbox(file)} > - {file.type === 'video' ? ( -