Series: Pure Node.js — Zero Dependencies

Post #11: File Uploads & Image Serving

📅 March 2026 ⏱ 11 min read 🏷 Node.js, File Uploads, MIME, Streams

📚 Pure Node.js Series — Zero Dependencies

#1: Build an HTTP Server From Scratch #2: Manual Routing — No Express Needed #3: Serving Static Files (HTML, CSS, JS) #4: Handling Forms & POST Requests #5: JSON Files as a Database #6: HTML Templating with String Interpolation #7: Vanilla JS Charts & Dynamic UI #8: WebSockets From Scratch #9: Chat Rooms & Broadcast Messages #10: Sessions & Cookies Without Packages → #11: File Uploads & Image Serving (you are here) #12: Deploy Pure Node.js to a VPS

How File Uploads Work

When an HTML form has enctype="multipart/form-data", the browser encodes files and text fields together in a special format, separated by a randomly generated boundary string. Packages like multer parse this for you. We'll do it ourselves.

The HTML Upload Form

<form action="/products" method="POST" enctype="multipart/form-data">
  <input type="text" name="name" placeholder="Product name" required />
  <input type="number" name="price" placeholder="Price" required />
  <input type="file" name="image" accept="image/*" />
  <button type="submit">Add Product</button>
</form>

Parsing multipart/form-data

// upload.js
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');

const UPLOAD_DIR = path.join(__dirname, 'uploads');
fs.mkdirSync(UPLOAD_DIR, { recursive: true });

const MAX_FILE_SIZE = 5 * 1024 * 1024; // 5MB
const ALLOWED_TYPES = ['image/jpeg', 'image/png', 'image/webp', 'image/gif'];

async function parseMultipart(req) {
  return new Promise((resolve, reject) => {
    const contentType = req.headers['content-type'] || '';
    const boundaryMatch = contentType.match(/boundary=(.+)$/);

    if (!boundaryMatch) {
      reject(new Error('No boundary in multipart request'));
      return;
    }

    const boundary = '--' + boundaryMatch[1];
    const chunks = [];
    let totalSize = 0;

    req.on('data', chunk => {
      totalSize += chunk.length;
      if (totalSize > MAX_FILE_SIZE * 2) {
        req.destroy();
        reject(new Error('Request too large'));
        return;
      }
      chunks.push(chunk);
    });

    req.on('end', () => {
      const body = Buffer.concat(chunks);
      resolve(parseMultipartBody(body, boundary));
    });

    req.on('error', reject);
  });
}

function parseMultipartBody(body, boundary) {
  const fields = {};
  const files = {};

  // Split body on boundary
  const boundaryBuf = Buffer.from(boundary);
  const parts = splitBuffer(body, boundaryBuf);

  for (const part of parts) {
    // Each part has headers, blank line, then content
    const headerEnd = part.indexOf('\r\n\r\n');
    if (headerEnd === -1) continue;

    const headerStr = part.slice(0, headerEnd).toString();
    const content = part.slice(headerEnd + 4);

    // Remove trailing \r\n
    const data = content.slice(0, content.length - 2);

    // Parse Content-Disposition header
    const nameMatch = headerStr.match(/name="([^"]+)"/);
    const filenameMatch = headerStr.match(/filename="([^"]+)"/);
    const typeMatch = headerStr.match(/Content-Type: (.+)/i);

    if (!nameMatch) continue;
    const fieldName = nameMatch[1];

    if (filenameMatch) {
      // File field
      files[fieldName] = {
        filename: filenameMatch[1],
        mimeType: typeMatch ? typeMatch[1].trim() : 'application/octet-stream',
        data
      };
    } else {
      // Text field
      fields[fieldName] = data.toString('utf8');
    }
  }

  return { fields, files };
}

// Helper: split a Buffer on a delimiter
function splitBuffer(buffer, delimiter) {
  const parts = [];
  let start = 0;
  let pos = buffer.indexOf(delimiter);

  while (pos !== -1) {
    if (pos > start) parts.push(buffer.slice(start, pos));
    start = pos + delimiter.length;
    pos = buffer.indexOf(delimiter, start);
  }

  if (start < buffer.length) parts.push(buffer.slice(start));
  return parts.filter(p => p.length > 2); // filter empty boundary parts
}

module.exports = { parseMultipart, UPLOAD_DIR, ALLOWED_TYPES, MAX_FILE_SIZE };

Saving the Uploaded File

async function saveUploadedFile(file) {
  // Validate MIME type
  if (!ALLOWED_TYPES.includes(file.mimeType)) {
    throw new Error(`File type ${file.mimeType} not allowed`);
  }

  // Validate size
  if (file.data.length > MAX_FILE_SIZE) {
    throw new Error('File too large (max 5MB)');
  }

  // Generate safe filename — never use the user's filename directly
  const ext = file.mimeType.split('/')[1].replace('jpeg', 'jpg');
  const safeName = crypto.randomUUID() + '.' + ext;
  const filePath = path.join(UPLOAD_DIR, safeName);

  await fs.promises.writeFile(filePath, file.data);
  return safeName;
}
Never use the user's original filename. A user could upload a file named ../../server.js or malicious.php. Always generate a new UUID-based filename and validate the MIME type from the Content-Type header (not the file extension).

Serving Uploaded Images

const MIME_TYPES = {
  '.jpg': 'image/jpeg',
  '.jpeg': 'image/jpeg',
  '.png': 'image/png',
  '.gif': 'image/gif',
  '.webp': 'image/webp',
};

async function serveUpload(req, res, filename) {
  // Sanitize: only allow alphanumeric, dash, dot
  if (!/^[\w\-.]+$/.test(filename)) {
    res.writeHead(400); res.end('Bad filename');
    return;
  }

  const filePath = path.join(UPLOAD_DIR, filename);
  const ext = path.extname(filename).toLowerCase();
  const mimeType = MIME_TYPES[ext];

  if (!mimeType) {
    res.writeHead(415); res.end('Unsupported media type');
    return;
  }

  try {
    const stat = await fs.promises.stat(filePath);

    res.writeHead(200, {
      'Content-Type': mimeType,
      'Content-Length': stat.size,
      'Cache-Control': 'public, max-age=31536000', // 1 year
      'ETag': `"${stat.mtimeMs}"`,
    });

    // Stream the file — don't load it all into memory
    fs.createReadStream(filePath).pipe(res);
  } catch {
    res.writeHead(404); res.end('Not found');
  }
}

The Upload Handler

// handlers/products.js
const { parseMultipart, saveUploadedFile } = require('../upload');
const db = require('../db/products');

async function handleCreateProduct(req, res) {
  const { fields, files } = await parseMultipart(req);

  const name = fields.name?.trim();
  const price = parseFloat(fields.price);

  if (!name || isNaN(price)) {
    res.writeHead(422, { 'Content-Type': 'application/json' });
    res.end(JSON.stringify({ error: 'Name and valid price required' }));
    return;
  }

  let imageFilename = null;
  if (files.image && files.image.data.length > 0) {
    try {
      imageFilename = await saveUploadedFile(files.image);
    } catch (err) {
      res.writeHead(400, { 'Content-Type': 'application/json' });
      res.end(JSON.stringify({ error: err.message }));
      return;
    }
  }

  await db.create({ name, price, image: imageFilename });
  res.writeHead(303, { Location: '/products' });
  res.end();
}
Key takeaways:
  • Parse the boundary from Content-Type header, split body on it
  • Always generate a new UUID filename — never use the user's filename
  • Validate MIME type from header, not file extension
  • Stream files with fs.createReadStream().pipe(res) — don't load large files into memory
  • Set Cache-Control: max-age on static assets for browser caching

What's Next

In the final post — Post #12 — we deploy our pure Node.js store to a VPS, set up nginx as a reverse proxy, and configure systemd to keep the server running forever.

Building along? Share on X/Twitter or GitHub.