Initial project: ESP32-S3 e-ink photo frame with web UI

ESP32-S3 firmware (PlatformIO) that fetches JPEGs from a photo server,
decodes on-device with PSRAM, Floyd-Steinberg dithers to the Spectra 6
6-color palette, and displays on a 7.3" GDEP073E01 e-paper panel.
Deep sleeps 1 hour between updates.

Photo server (Python/Flask) with web UI for photo management, Traefik
routing at photos.haunt.house with Google OAuth, and Home Assistant
REST sensor integration.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-03-26 14:33:16 -05:00
commit 4ddda58b43
11 changed files with 859 additions and 0 deletions

17
.gitignore vendored Normal file
View File

@@ -0,0 +1,17 @@
# PlatformIO
firmware/.pio/
firmware/.vscode/
# Python
server/__pycache__/
server/*.pyc
server/.venv/
# Photos (user data, not checked in)
server/photos/
# Secrets
.env
# macOS
.DS_Store

17
firmware/platformio.ini Normal file
View File

@@ -0,0 +1,17 @@
[env:esp32s3]
platform = espressif32
board = esp32-s3-devkitc-1
framework = arduino
monitor_speed = 115200
upload_speed = 921600
board_build.arduino.memory_type = qio_opi
board_build.psram = enabled
build_flags =
-DBOARD_HAS_PSRAM
-DARDUINO_USB_CDC_ON_BOOT=1
lib_deps =
zinggjm/GxEPD2@^1.6.0
bitbank2/JPEGDEC@^1.4.2

28
firmware/src/config.h Normal file
View File

@@ -0,0 +1,28 @@
#pragma once
// WiFi credentials
#define WIFI_SSID "your-wifi-ssid"
#define WIFI_PASSWORD "your-wifi-password"
// Photo server URL — should return a JPEG resized to 800x480
// Use the LAN address to avoid OAuth (ESP32 hits the /photo and /heartbeat
// endpoints directly). If using Traefik, those paths are auth-exempt.
#define PHOTO_SERVER_URL "http://nas.home.network:8473/photo"
// Or via Traefik (HTTPS, auth-exempt paths):
// #define PHOTO_SERVER_URL "https://photos.haunt.house/photo"
// How long to sleep between photo updates (in seconds)
#define SLEEP_DURATION_SEC (60 * 60) // 1 hour
// Display dimensions
#define DISPLAY_WIDTH 800
#define DISPLAY_HEIGHT 480
// SPI pin assignments for ESP32-S3-DevKitC-1
// Adjust these to match your wiring
#define PIN_SPI_MOSI 11
#define PIN_SPI_SCLK 12
#define PIN_EPD_CS 10
#define PIN_EPD_DC 8
#define PIN_EPD_RST 9
#define PIN_EPD_BUSY 7

96
firmware/src/dither.h Normal file
View File

@@ -0,0 +1,96 @@
#pragma once
#include <cstdint>
// Spectra 6 palette — 6 colors
// These RGB values approximate the actual E Ink pigment colors.
// Tune these if the output looks off on your specific panel.
struct PaletteColor {
int16_t r, g, b;
uint8_t index; // native panel color index
};
// Native panel color indices (what the controller expects)
static constexpr uint8_t COLOR_BLACK = 0;
static constexpr uint8_t COLOR_WHITE = 1;
static constexpr uint8_t COLOR_YELLOW = 2;
static constexpr uint8_t COLOR_RED = 3;
static constexpr uint8_t COLOR_BLUE = 5;
static constexpr uint8_t COLOR_GREEN = 6;
// Approximate RGB values of the actual E Ink pigments
static const PaletteColor PALETTE[] = {
{ 0, 0, 0, COLOR_BLACK },
{ 255, 255, 255, COLOR_WHITE },
{ 200, 30, 30, COLOR_RED },
{ 0, 145, 0, COLOR_GREEN },
{ 0, 0, 160, COLOR_BLUE },
{ 230, 210, 0, COLOR_YELLOW },
};
static constexpr int PALETTE_SIZE = sizeof(PALETTE) / sizeof(PALETTE[0]);
// Find the closest palette color using squared Euclidean distance
inline uint8_t findClosestColor(int16_t r, int16_t g, int16_t b) {
uint8_t bestIndex = 0;
int32_t bestDist = INT32_MAX;
for (int i = 0; i < PALETTE_SIZE; i++) {
int16_t dr = r - PALETTE[i].r;
int16_t dg = g - PALETTE[i].g;
int16_t db = b - PALETTE[i].b;
int32_t dist = (int32_t)dr * dr + (int32_t)dg * dg + (int32_t)db * db;
if (dist < bestDist) {
bestDist = dist;
bestIndex = i;
}
}
return bestIndex;
}
// Floyd-Steinberg dithering on an RGB888 buffer in-place.
// Writes the result into a 4bpp output buffer (2 pixels per byte, native panel indices).
// rgb must be width*height*3 bytes (allocated in PSRAM).
// output must be width*height/2 bytes.
void ditherFloydSteinberg(uint8_t* rgb, uint8_t* output, int width, int height) {
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int idx = (y * width + x) * 3;
// Clamp current pixel
int16_t r = constrain((int16_t)rgb[idx + 0], 0, 255);
int16_t g = constrain((int16_t)rgb[idx + 1], 0, 255);
int16_t b = constrain((int16_t)rgb[idx + 2], 0, 255);
// Find closest palette color
uint8_t ci = findClosestColor(r, g, b);
const PaletteColor& pc = PALETTE[ci];
// Write to output buffer (4bpp, 2 pixels per byte)
int pixelPos = y * width + x;
if (pixelPos % 2 == 0) {
output[pixelPos / 2] = (pc.index << 4);
} else {
output[pixelPos / 2] |= pc.index;
}
// Compute quantization error
int16_t er = r - pc.r;
int16_t eg = g - pc.g;
int16_t eb = b - pc.b;
// Distribute error to neighbors (Floyd-Steinberg coefficients)
// Right: 7/16, Bottom-left: 3/16, Bottom: 5/16, Bottom-right: 1/16
auto diffuse = [&](int nx, int ny, int16_t fraction) {
if (nx < 0 || nx >= width || ny < 0 || ny >= height) return;
int ni = (ny * width + nx) * 3;
rgb[ni + 0] = (uint8_t)constrain((int16_t)rgb[ni + 0] + (er * fraction / 16), 0, 255);
rgb[ni + 1] = (uint8_t)constrain((int16_t)rgb[ni + 1] + (eg * fraction / 16), 0, 255);
rgb[ni + 2] = (uint8_t)constrain((int16_t)rgb[ni + 2] + (eb * fraction / 16), 0, 255);
};
diffuse(x + 1, y, 7);
diffuse(x - 1, y + 1, 3);
diffuse(x, y + 1, 5);
diffuse(x + 1, y + 1, 1);
}
}
}

View File

@@ -0,0 +1,2 @@
dependencies:
idf: '>=5.1'

240
firmware/src/main.cpp Normal file
View File

@@ -0,0 +1,240 @@
#include <Arduino.h>
#include <WiFi.h>
#include <HTTPClient.h>
#include <JPEGDEC.h>
#include <GxEPD2_EPD.h>
#include <epd7c/GxEPD2_730c_GDEP073E01.h>
#include <SPI.h>
#include "config.h"
#include "dither.h"
// Display instance
SPIClass displaySPI(SPI2_HOST);
GxEPD2_730c_GDEP073E01 epd(PIN_EPD_CS, PIN_EPD_DC, PIN_EPD_RST, PIN_EPD_BUSY);
// Buffers (allocated in PSRAM)
static uint8_t* jpegBuf = nullptr; // raw JPEG data from server
static uint8_t* rgbBuf = nullptr; // decoded RGB888 (800*480*3 = 1,152,000 bytes)
static uint8_t* displayBuf = nullptr; // dithered 4bpp output (800*480/2 = 192,000 bytes)
static size_t jpegLen = 0;
static String photoName = ""; // name of the photo currently being displayed
// JPEG decoder callback — writes decoded pixels into rgbBuf
static int jpegDrawCallback(JPEGDRAW* pDraw) {
for (int y = 0; y < pDraw->iHeight; y++) {
int destY = pDraw->y + y;
if (destY >= DISPLAY_HEIGHT) continue;
for (int x = 0; x < pDraw->iWidth; x++) {
int destX = pDraw->x + x;
if (destX >= DISPLAY_WIDTH) continue;
// JPEGDEC outputs RGB565 by default
uint16_t pixel = pDraw->pPixels[y * pDraw->iWidth + x];
uint8_t r = ((pixel >> 11) & 0x1F) << 3;
uint8_t g = ((pixel >> 5) & 0x3F) << 2;
uint8_t b = ( pixel & 0x1F) << 3;
int idx = (destY * DISPLAY_WIDTH + destX) * 3;
rgbBuf[idx + 0] = r;
rgbBuf[idx + 1] = g;
rgbBuf[idx + 2] = b;
}
}
return 1; // continue decoding
}
bool connectWiFi() {
Serial.printf("Connecting to WiFi '%s'...\n", WIFI_SSID);
WiFi.begin(WIFI_SSID, WIFI_PASSWORD);
int attempts = 0;
while (WiFi.status() != WL_CONNECTED && attempts < 60) {
delay(500);
Serial.print(".");
attempts++;
}
if (WiFi.status() == WL_CONNECTED) {
Serial.printf("\nConnected! IP: %s\n", WiFi.localIP().toString().c_str());
return true;
}
Serial.println("\nWiFi connection failed.");
return false;
}
bool fetchPhoto() {
HTTPClient http;
http.begin(PHOTO_SERVER_URL);
http.setTimeout(30000);
const char* headerKeys[] = {"X-Photo-Name"};
http.collectHeaders(headerKeys, 1);
Serial.printf("Fetching photo from %s\n", PHOTO_SERVER_URL);
int httpCode = http.GET();
if (httpCode != HTTP_CODE_OK) {
Serial.printf("HTTP error: %d\n", httpCode);
http.end();
return false;
}
// Capture photo name from response header
photoName = http.header("X-Photo-Name");
Serial.printf("Photo: %s\n", photoName.c_str());
jpegLen = http.getSize();
if (jpegLen <= 0 || jpegLen > 2 * 1024 * 1024) {
// Unknown size — read into buffer with a max cap
Serial.println("Reading response (unknown or oversized content-length)...");
WiFiClient* stream = http.getStreamPtr();
jpegLen = 0;
size_t maxSize = 2 * 1024 * 1024;
while (stream->connected() || stream->available()) {
size_t avail = stream->available();
if (avail == 0) { delay(10); continue; }
size_t toRead = min(avail, maxSize - jpegLen);
if (toRead == 0) break;
size_t read = stream->readBytes(jpegBuf + jpegLen, toRead);
jpegLen += read;
}
} else {
Serial.printf("Content-Length: %u bytes\n", jpegLen);
WiFiClient* stream = http.getStreamPtr();
size_t received = 0;
while (received < jpegLen) {
size_t avail = stream->available();
if (avail == 0) { delay(10); continue; }
size_t read = stream->readBytes(jpegBuf + received, min(avail, jpegLen - received));
received += read;
}
}
http.end();
Serial.printf("Received %u bytes of JPEG data\n", jpegLen);
return jpegLen > 0;
}
bool decodeJpeg() {
Serial.println("Decoding JPEG...");
JPEGDEC jpeg;
if (!jpeg.openRAM(jpegBuf, jpegLen, jpegDrawCallback)) {
Serial.println("Failed to open JPEG");
return false;
}
Serial.printf("JPEG: %dx%d\n", jpeg.getWidth(), jpeg.getHeight());
// Use 1/1 scale if the server already resized to 800x480.
// If the image is larger, we could use 1/2 or 1/4 scale, but
// the server should handle resizing.
jpeg.setPixelType(RGB565_BIG_ENDIAN);
if (!jpeg.decode(0, 0, 0)) {
Serial.println("JPEG decode failed");
return false;
}
Serial.println("JPEG decoded successfully");
return true;
}
void displayImage() {
Serial.println("Dithering to 6-color palette...");
memset(displayBuf, 0, DISPLAY_WIDTH * DISPLAY_HEIGHT / 2);
ditherFloydSteinberg(rgbBuf, displayBuf, DISPLAY_WIDTH, DISPLAY_HEIGHT);
Serial.println("Dithering complete");
Serial.println("Writing to e-paper display...");
epd.init(115200, true, 50, false);
// Write raw 4bpp native-encoded pixel data directly to the panel.
// invert=true tells GxEPD2 to skip its color remapping since our
// dither output already uses native panel color indices.
epd.writeNative(displayBuf, nullptr, 0, 0, DISPLAY_WIDTH, DISPLAY_HEIGHT,
true, false, false);
epd.refresh(false); // full refresh
Serial.println("Waiting for display refresh (~15s)...");
epd.hibernate();
Serial.println("Display updated!");
}
void sendHeartbeat() {
HTTPClient http;
String url = PHOTO_SERVER_URL;
// Replace /photo with /heartbeat
url = url.substring(0, url.lastIndexOf('/')) + "/heartbeat";
http.begin(url);
http.addHeader("Content-Type", "application/json");
String body = "{\"photo\":\"" + photoName + "\",\"free_heap\":" + String(ESP.getFreeHeap()) + "}";
int code = http.POST(body);
Serial.printf("Heartbeat sent (%d)\n", code);
http.end();
}
void enterDeepSleep() {
Serial.printf("Going to deep sleep for %d seconds...\n", SLEEP_DURATION_SEC);
WiFi.disconnect(true);
WiFi.mode(WIFI_OFF);
esp_sleep_enable_timer_wakeup((uint64_t)SLEEP_DURATION_SEC * 1000000ULL);
esp_deep_sleep_start();
}
void setup() {
Serial.begin(115200);
delay(1000);
Serial.println("\n=== E-Ink Photo Frame ===");
// Check PSRAM
if (!psramFound()) {
Serial.println("ERROR: PSRAM not found! Cannot continue.");
return;
}
Serial.printf("PSRAM: %u bytes free\n", ESP.getFreePsram());
// Allocate buffers in PSRAM
jpegBuf = (uint8_t*)ps_malloc(2 * 1024 * 1024); // 2MB for JPEG
rgbBuf = (uint8_t*)ps_malloc(DISPLAY_WIDTH * DISPLAY_HEIGHT * 3); // ~1.1MB
displayBuf = (uint8_t*)ps_malloc(DISPLAY_WIDTH * DISPLAY_HEIGHT / 2); // 192KB
if (!jpegBuf || !rgbBuf || !displayBuf) {
Serial.println("ERROR: Failed to allocate PSRAM buffers!");
return;
}
// Initialize SPI for the display
displaySPI.begin(PIN_SPI_SCLK, -1, PIN_SPI_MOSI, PIN_EPD_CS);
epd.selectSPI(displaySPI, SPISettings(20000000, MSBFIRST, SPI_MODE0));
if (!connectWiFi()) {
Serial.println("No WiFi — going back to sleep");
enterDeepSleep();
return;
}
if (!fetchPhoto()) {
Serial.println("Failed to fetch photo — going back to sleep");
enterDeepSleep();
return;
}
if (!decodeJpeg()) {
Serial.println("Failed to decode JPEG — going back to sleep");
enterDeepSleep();
return;
}
displayImage();
sendHeartbeat();
enterDeepSleep();
}
void loop() {
// Never reached — we deep sleep from setup()
}

View File

@@ -0,0 +1,36 @@
# Home Assistant configuration for the E-Ink Photo Frame
# Add this to your HA configuration.yaml
rest:
- resource: "http://eink-photo-server:8473/api/status"
scan_interval: 300 # check every 5 minutes
sensor:
- name: "Photo Frame"
value_template: "{{ value_json.state }}"
json_attributes:
- last_update
- current_photo
- frame_ip
- total_photos
- total_updates
template:
- sensor:
- name: "Photo Frame Current Photo"
state: "{{ state_attr('sensor.photo_frame', 'current_photo') or 'unknown' }}"
icon: mdi:image
- name: "Photo Frame Last Update"
state: >
{% set ts = state_attr('sensor.photo_frame', 'last_update') %}
{% if ts %}
{{ as_timestamp(ts) | timestamp_custom('%b %d %I:%M %p') }}
{% else %}
Never
{% endif %}
icon: mdi:clock-outline
- name: "Photo Frame Library Size"
state: "{{ state_attr('sensor.photo_frame', 'total_photos') | int(0) }}"
unit_of_measurement: "photos"
icon: mdi:image-multiple

12
server/Dockerfile Normal file
View File

@@ -0,0 +1,12 @@
FROM python:3.12-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY server.py .
EXPOSE 8473
CMD ["python", "server.py", "--port", "8473", "--photos-dir", "/photos"]

40
server/docker-compose.yml Normal file
View File

@@ -0,0 +1,40 @@
services:
photo-server:
build: .
container_name: eink-photo-server
restart: unless-stopped
networks:
- webgateway
volumes:
- photos:/photos
- state:/data
environment:
- TZ=America/Chicago
labels:
- "traefik.enable=true"
# HTTPS
- "traefik.http.routers.photos-websecure.rule=Host(`photos.haunt.house`)"
- "traefik.http.routers.photos-websecure.entryPoints=websecure"
- "traefik.http.routers.photos-websecure.tls.certresolver=myresolver"
- "traefik.http.routers.photos-websecure.middlewares=hsts,google-auth@docker"
# HTTP redirect
- "traefik.http.routers.photos-http.rule=Host(`photos.haunt.house`)"
- "traefik.http.routers.photos-http.entryPoints=web"
- "traefik.http.routers.photos-http.middlewares=redirect-https"
# Internal port
- "traefik.http.services.photos.loadbalancer.server.port=8473"
# ESP32 endpoint — no auth (frame can't do OAuth)
- "traefik.http.routers.photos-frame.rule=Host(`photos.haunt.house`) && (Path(`/photo`) || Path(`/heartbeat`))"
- "traefik.http.routers.photos-frame.entryPoints=websecure"
- "traefik.http.routers.photos-frame.tls.certresolver=myresolver"
- "traefik.http.routers.photos-frame.middlewares=hsts"
- "traefik.http.routers.photos-frame.priority=100"
- "traefik.http.routers.photos-frame.service=photos"
networks:
webgateway:
external: true
volumes:
photos:
state:

2
server/requirements.txt Normal file
View File

@@ -0,0 +1,2 @@
flask==3.1.*
pillow==11.*

369
server/server.py Normal file
View File

@@ -0,0 +1,369 @@
"""
E-Ink Photo Frame Server
Serves photos for the ESP32 photo frame and provides a web UI for management.
Tracks frame status via heartbeat reports from the ESP32.
Endpoints:
GET / Web UI — gallery, upload, frame status
GET /photo Random JPEG resized to 800x480 (called by ESP32)
POST /heartbeat ESP32 reports what it displayed
GET /api/status Frame status (for Home Assistant REST sensor)
GET /api/photos List all photos as JSON
POST /api/upload Upload new photos
DELETE /api/photos/<name> Delete a photo
GET /health Health check
"""
import argparse
import io
import json
import random
import sys
import time
from datetime import datetime, timezone
from pathlib import Path
from flask import Flask, Response, jsonify, request, render_template_string
from werkzeug.utils import secure_filename
from PIL import Image
app = Flask(__name__)
PHOTOS_DIR: Path = Path(".")
TARGET_WIDTH = 800
TARGET_HEIGHT = 480
SUPPORTED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".webp", ".heic", ".bmp", ".tiff"}
MAX_UPLOAD_SIZE = 20 * 1024 * 1024 # 20MB
# Frame state — persisted to disk so it survives restarts
STATE_FILE = Path("/data/frame_state.json")
def load_state() -> dict:
try:
if STATE_FILE.exists():
return json.loads(STATE_FILE.read_text())
except Exception:
pass
return {"last_update": None, "current_photo": None, "ip": None, "updates": 0}
def save_state(state: dict):
try:
STATE_FILE.parent.mkdir(parents=True, exist_ok=True)
STATE_FILE.write_text(json.dumps(state))
except Exception as e:
print(f"Warning: could not save state: {e}", file=sys.stderr)
frame_state = load_state()
def get_photo_list() -> list[Path]:
photos = []
for f in PHOTOS_DIR.rglob("*"):
if f.suffix.lower() in SUPPORTED_EXTENSIONS and f.is_file():
photos.append(f)
return sorted(photos, key=lambda p: p.name)
def resize_and_crop(img: Image.Image) -> Image.Image:
target_ratio = TARGET_WIDTH / TARGET_HEIGHT
img_ratio = img.width / img.height
if img_ratio > target_ratio:
new_height = TARGET_HEIGHT
new_width = int(img.width * (TARGET_HEIGHT / img.height))
else:
new_width = TARGET_WIDTH
new_height = int(img.height * (TARGET_WIDTH / img.width))
img = img.resize((new_width, new_height), Image.LANCZOS)
left = (new_width - TARGET_WIDTH) // 2
top = (new_height - TARGET_HEIGHT) // 2
img = img.crop((left, top, left + TARGET_WIDTH, top + TARGET_HEIGHT))
return img
def make_thumbnail(img: Image.Image, size: int = 300) -> bytes:
img.thumbnail((size, size), Image.LANCZOS)
buf = io.BytesIO()
img.save(buf, format="JPEG", quality=75)
return buf.getvalue()
# --- ESP32 endpoints ---
@app.route("/photo")
def random_photo():
photos = get_photo_list()
if not photos:
return jsonify({"error": "No photos found"}), 404
chosen = random.choice(photos)
try:
img = Image.open(chosen)
img = img.convert("RGB")
img = resize_and_crop(img)
buf = io.BytesIO()
img.save(buf, format="JPEG", quality=85)
buf.seek(0)
return Response(buf.getvalue(), mimetype="image/jpeg",
headers={"X-Photo-Name": chosen.name})
except Exception as e:
print(f"Error processing {chosen}: {e}", file=sys.stderr)
return jsonify({"error": str(e)}), 500
@app.route("/heartbeat", methods=["POST"])
def heartbeat():
"""ESP32 reports after displaying a photo."""
global frame_state
data = request.get_json(silent=True) or {}
frame_state["last_update"] = datetime.now(timezone.utc).isoformat()
frame_state["current_photo"] = data.get("photo", None)
frame_state["ip"] = request.remote_addr
frame_state["free_heap"] = data.get("free_heap", None)
frame_state["updates"] = frame_state.get("updates", 0) + 1
save_state(frame_state)
return jsonify({"ok": True})
# --- API endpoints ---
@app.route("/api/status")
def api_status():
"""Frame status for Home Assistant REST sensor."""
photos = get_photo_list()
return jsonify({
"state": "online" if frame_state.get("last_update") else "waiting",
"last_update": frame_state.get("last_update"),
"current_photo": frame_state.get("current_photo"),
"frame_ip": frame_state.get("ip"),
"total_photos": len(photos),
"total_updates": frame_state.get("updates", 0),
})
@app.route("/api/photos")
def api_photos():
photos = get_photo_list()
return jsonify([{"name": p.name, "size": p.stat().st_size} for p in photos])
@app.route("/api/upload", methods=["POST"])
def api_upload():
uploaded = []
for f in request.files.getlist("photos"):
if not f.filename:
continue
fname = secure_filename(f.filename)
ext = Path(fname).suffix.lower()
if ext not in SUPPORTED_EXTENSIONS:
continue
dest = PHOTOS_DIR / fname
f.save(str(dest))
uploaded.append(fname)
return jsonify({"uploaded": uploaded, "count": len(uploaded)})
@app.route("/api/photos/<name>", methods=["DELETE"])
def api_delete(name: str):
fname = secure_filename(name)
path = PHOTOS_DIR / fname
if not path.exists():
return jsonify({"error": "not found"}), 404
path.unlink()
return jsonify({"deleted": fname})
@app.route("/thumb/<name>")
def thumbnail(name: str):
fname = secure_filename(name)
path = PHOTOS_DIR / fname
if not path.exists():
return "", 404
try:
img = Image.open(path).convert("RGB")
data = make_thumbnail(img)
return Response(data, mimetype="image/jpeg")
except Exception:
return "", 500
# --- Web UI ---
@app.route("/")
def index():
photos = get_photo_list()
return render_template_string(WEB_UI, photos=photos, state=frame_state,
photo_count=len(photos))
@app.route("/health")
def health():
photos = get_photo_list()
return jsonify({"status": "ok", "photo_count": len(photos)})
WEB_UI = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Photo Frame</title>
<style>
:root { --bg: #111; --card: #1a1a1a; --text: #eee; --accent: #6c8; --border: #333; }
* { box-sizing: border-box; margin: 0; padding: 0; }
body { font-family: -apple-system, system-ui, sans-serif; background: var(--bg); color: var(--text); min-height: 100vh; }
.container { max-width: 960px; margin: 0 auto; padding: 1.5rem; }
h1 { font-size: 1.5rem; margin-bottom: 1.5rem; }
h1 span { color: var(--accent); }
.status-card {
background: var(--card); border: 1px solid var(--border); border-radius: 8px;
padding: 1rem 1.25rem; margin-bottom: 1.5rem; display: flex; gap: 2rem; flex-wrap: wrap;
}
.status-card .item { display: flex; flex-direction: column; gap: 0.2rem; }
.status-card .label { font-size: 0.75rem; text-transform: uppercase; color: #888; }
.status-card .value { font-size: 0.95rem; }
.status-card .online { color: var(--accent); }
.status-card .waiting { color: #f80; }
.upload-area {
background: var(--card); border: 2px dashed var(--border); border-radius: 8px;
padding: 2rem; text-align: center; margin-bottom: 1.5rem; cursor: pointer;
transition: border-color 0.2s;
}
.upload-area:hover, .upload-area.dragover { border-color: var(--accent); }
.upload-area input { display: none; }
.gallery { display: grid; grid-template-columns: repeat(auto-fill, minmax(200px, 1fr)); gap: 0.75rem; }
.photo-card {
background: var(--card); border: 1px solid var(--border); border-radius: 6px;
overflow: hidden; position: relative;
}
.photo-card img { width: 100%; aspect-ratio: 5/3; object-fit: cover; display: block; }
.photo-card .info {
padding: 0.5rem 0.6rem; display: flex; justify-content: space-between; align-items: center;
font-size: 0.8rem;
}
.photo-card .name { overflow: hidden; text-overflow: ellipsis; white-space: nowrap; max-width: 70%; }
.photo-card .delete {
background: none; border: none; color: #f55; cursor: pointer; font-size: 0.85rem;
padding: 0.2rem 0.4rem; border-radius: 4px;
}
.photo-card .delete:hover { background: rgba(255,85,85,0.15); }
.photo-card.current { border-color: var(--accent); }
.empty { text-align: center; padding: 3rem; color: #666; }
</style>
</head>
<body>
<div class="container">
<h1><span>&#9632;</span> Photo Frame</h1>
<div class="status-card">
<div class="item">
<span class="label">Frame Status</span>
<span class="value {{ 'online' if state.get('last_update') else 'waiting' }}">
{{ 'Online' if state.get('last_update') else 'Waiting for first update' }}
</span>
</div>
{% if state.get('last_update') %}
<div class="item">
<span class="label">Last Update</span>
<span class="value" data-utc="{{ state.last_update }}">{{ state.last_update[:19] }}</span>
</div>
<div class="item">
<span class="label">Showing</span>
<span class="value">{{ state.get('current_photo', 'Unknown') }}</span>
</div>
{% endif %}
<div class="item">
<span class="label">Photos</span>
<span class="value">{{ photo_count }}</span>
</div>
<div class="item">
<span class="label">Total Refreshes</span>
<span class="value">{{ state.get('updates', 0) }}</span>
</div>
</div>
<div class="upload-area" id="upload-area" onclick="document.getElementById('file-input').click()">
<input type="file" id="file-input" multiple accept="image/*">
<p>Drop photos here or click to upload</p>
</div>
{% if photos %}
<div class="gallery">
{% for p in photos %}
<div class="photo-card {{ 'current' if state.get('current_photo') == p.name else '' }}" data-name="{{ p.name }}">
<img src="/thumb/{{ p.name }}" loading="lazy" alt="{{ p.name }}">
<div class="info">
<span class="name" title="{{ p.name }}">{{ p.name }}</span>
<button class="delete" onclick="deletePhoto('{{ p.name }}')" title="Delete">&times;</button>
</div>
</div>
{% endfor %}
</div>
{% else %}
<div class="empty">No photos yet. Upload some!</div>
{% endif %}
</div>
<script>
const area = document.getElementById('upload-area');
const input = document.getElementById('file-input');
['dragenter','dragover'].forEach(e => area.addEventListener(e, ev => { ev.preventDefault(); area.classList.add('dragover'); }));
['dragleave','drop'].forEach(e => area.addEventListener(e, ev => { ev.preventDefault(); area.classList.remove('dragover'); }));
area.addEventListener('drop', ev => { uploadFiles(ev.dataTransfer.files); });
input.addEventListener('change', () => { uploadFiles(input.files); });
function uploadFiles(files) {
const fd = new FormData();
for (const f of files) fd.append('photos', f);
fetch('/api/upload', { method: 'POST', body: fd })
.then(r => r.json())
.then(d => { if (d.count > 0) location.reload(); });
}
function deletePhoto(name) {
if (!confirm('Delete ' + name + '?')) return;
fetch('/api/photos/' + encodeURIComponent(name), { method: 'DELETE' })
.then(r => r.json())
.then(() => {
document.querySelector('[data-name="' + name + '"]')?.remove();
});
}
// Convert UTC timestamps to local time
document.querySelectorAll('[data-utc]').forEach(el => {
const d = new Date(el.dataset.utc);
el.textContent = d.toLocaleString();
});
</script>
</body>
</html>
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="E-Ink Photo Frame Server")
parser.add_argument("--port", type=int, default=8473)
parser.add_argument("--photos-dir", type=str, default="./photos")
args = parser.parse_args()
PHOTOS_DIR = Path(args.photos_dir)
if not PHOTOS_DIR.exists():
PHOTOS_DIR.mkdir(parents=True, exist_ok=True)
print(f"Serving photos from: {PHOTOS_DIR.resolve()}")
print(f"Found {len(get_photo_list())} photos")
print(f"Listening on port {args.port}")
app.run(host="0.0.0.0", port=args.port)