Skip to main content
Photo of DeepakNess DeepakNess

Better Handling Images on My 11ty Blog

Last year I migrated away from WordPress and now using 11ty SSG for my blog deepakness.com and while I was mostly happy with everything, I have also occasionally had second thoughts about using 11ty. And mainly because of the increasing Netlify build times as my blog grew, it was taking up to 1 min 23 seconds for a build. Once or twice I even hit my Netlify monthly 300 mins build time limits.

Old Netlify build time

So I had to find a better way for the site, and I had the following two options in my mind:

  1. Ditch 11ty and start using WordPress or something similar
  2. Stay on 11ty and optimize the build time on Netlify

I went with the #2 option as it required much less work than migrating back to WordPress, and started looking into what's causing the build times to increase so much.

And it was the @11ty/eleventy-img plugin that I was using to dynamically optimize and transform images during build time. While it worked great, it wasted so much compute as it ran image transformations during each build. And I tried using netlify-plugin-cache for caching already transformed images, but it didn't properly work because I am co-locating all images in separate blog folders, as you see below:

blog/
└── some-post/
    ├── index.md
    ├── image-1.png
    └── image-2.png

So... what was the solution here?

I removed the @11ty/eleventy-img plugin and decided to locally process all images using the sharp image library, before sending them to Netlify because the build times will be faster then.

This worked as expected, and here's what I am doing with images:

  1. Resize original images to 1600px, compress, and keep them in their original format
  2. Create a .webp version of images (1200px wide), and save with the same filename
  3. Track already optimized images in the .image-cache.json file via MD5 hash

And all this happens with the following script I created:

import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import sharp from 'sharp';
import { fileURLToPath } from 'url';

const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const ROOT_DIR = path.resolve(__dirname, '..');
const BLOG_DIR = path.join(ROOT_DIR, 'content', 'blog');
const CACHE_FILE = path.join(ROOT_DIR, '.image-cache.json');

// Configuration
const CONFIG = {
  webp: { maxWidth: 1200, quality: 80 },
  original: { maxWidth: 1600 },
  extensions: ['.jpg', '.jpeg', '.png'],
};

// Load or create cache
function loadCache() {
  try {
    return JSON.parse(fs.readFileSync(CACHE_FILE, 'utf-8'));
  } catch {
    return {};
  }
}

function saveCache(cache) {
  fs.writeFileSync(CACHE_FILE, JSON.stringify(cache, null, 2));
}

// Calculate file hash
function getFileHash(filePath) {
  const content = fs.readFileSync(filePath);
  return crypto.createHash('md5').update(content).digest('hex');
}

// Find all images in blog folders
function findImages() {
  const images = [];
  
  if (!fs.existsSync(BLOG_DIR)) {
    console.log('Blog directory not found:', BLOG_DIR);
    return images;
  }

  const postDirs = fs.readdirSync(BLOG_DIR, { withFileTypes: true })
    .filter(d => d.isDirectory())
    .map(d => path.join(BLOG_DIR, d.name));

  for (const postDir of postDirs) {
    const files = fs.readdirSync(postDir);
    for (const file of files) {
      const ext = path.extname(file).toLowerCase();
      if (CONFIG.extensions.includes(ext) && !file.endsWith('.webp')) {
        images.push(path.join(postDir, file));
      }
    }
  }

  return images;
}

// Optimize a single image
async function optimizeImage(imagePath, cache) {
  const relativePath = path.relative(ROOT_DIR, imagePath);
  const hash = getFileHash(imagePath);
  
  // Skip if already optimized with same hash
  if (cache[relativePath]?.hash === hash) {
    console.log(`⏭️  Skipping (unchanged): ${relativePath}`);
    return false;
  }

  const dir = path.dirname(imagePath);
  const ext = path.extname(imagePath);
  const basename = path.basename(imagePath, ext);
  const webpPath = path.join(dir, `${basename}.webp`);

  try {
    const image = sharp(imagePath);
    const metadata = await image.metadata();

    // Create WebP version (max 1200px, quality 80)
    let webpPipeline = sharp(imagePath);
    let finalWidth = metadata.width;
    let finalHeight = metadata.height;

    if (metadata.width > CONFIG.webp.maxWidth) {
      const ratio = CONFIG.webp.maxWidth / metadata.width;
      finalWidth = CONFIG.webp.maxWidth;
      finalHeight = Math.round(metadata.height * ratio);
      webpPipeline = webpPipeline.resize(CONFIG.webp.maxWidth);
    }
    await webpPipeline
      .webp({ quality: CONFIG.webp.quality })
      .toFile(webpPath);

    // Optimize original (max 1600px, keep format)
    if (metadata.width > CONFIG.original.maxWidth) {
      const tempPath = imagePath + '.tmp';
      await sharp(imagePath)
        .resize(CONFIG.original.maxWidth)
        .toFile(tempPath);
      fs.renameSync(tempPath, imagePath);
      // Recalculate dimensions after resize
      const newMeta = await sharp(imagePath).metadata();
      finalWidth = newMeta.width;
      finalHeight = newMeta.height;
    }

    // Update cache with dimensions
    cache[relativePath] = {
      hash: getFileHash(imagePath), // Recalculate after optimization
      width: finalWidth,
      height: finalHeight,
      optimizedAt: new Date().toISOString(),
    };

    console.log(`✅ Optimized: ${relativePath} (${finalWidth}x${finalHeight})`);
    return true;
  } catch (error) {
    console.error(`❌ Error optimizing ${relativePath}:`, error.message);
    return false;
  }
}

// Main function
async function main() {
  console.log('\n🖼️  Starting image optimization...\n');
  
  const cache = loadCache();
  const images = findImages();
  
  console.log(`Found ${images.length} images to check\n`);

  let optimized = 0;
  let skipped = 0;

  for (const imagePath of images) {
    const wasOptimized = await optimizeImage(imagePath, cache);
    if (wasOptimized) optimized++;
    else skipped++;
  }

  saveCache(cache);

  console.log(`\n✨ Done! Optimized: ${optimized}, Skipped: ${skipped}\n`);
}

main().catch(console.error);

The script optimizes the original image, creates a WebP version of the image, and adds updates the .image-cache.json file to avoid re-optimization of the image(s). The script also adds the width and height of the images in the JSON cache file to be later used for width and height in the img tags to avoid any cumulative layout shift on the pages.

Local image optimization using sharp library

And after this my Netlify build time was reduced to just 20 seconds, as you see in the screenshot below:

New Netlify build time

And I use the HTML <picture> tag to show images as you see below.

<picture>
  <source srcset="./image.webp" type="image/webp">
  <img alt="alt text" decoding="async" width="1200" height="630" src="./image.png">
</picture>

The browser first tries to load the .webp version of the image, and if it's not supported then loads the optimized version in the original format (PNG/JPEG). I only link to the original image in my Markdown file as ![alt](./image.png) and the above picture tag with WebP is automatically added in the final HTML via the following code in the eleventy.config.js file:

	// Picture tag transform: wrap <img> with <picture> for WebP + fallback
eleventyConfig.addTransform("pictureTag", function(content, outputPath) {
  // Only apply to blog pages
  if (!outputPath || !outputPath.endsWith(".html") || !outputPath.includes("/blog/")) {
    return content;
  }

  // Load image dimensions cache
  let imageCache = {};
  try {
    const cacheContent = fs.readFileSync('.image-cache.json', 'utf-8');
    imageCache = JSON.parse(cacheContent);
  } catch {
    // Cache not found, continue without dimensions
  }

  // Extract blog slug from outputPath (e.g., "_site/blog/better-images/index.html" -> "better-images")
  const blogMatch = outputPath.match(/\/blog\/([^/]+)\//);
  const blogSlug = blogMatch ? blogMatch[1] : null;

  // Match <img> tags with jpg/jpeg/png src
  return content.replace(
    /<img\s+([^>]*?)src=["']([^"']+\.(jpg|jpeg|png))["']([^>]*)>/gi,
    (match, before, src, ext, after) => {
      // Skip if already inside a <picture> tag
      if (match.includes('data-no-picture')) {
        return match;
      }

      // Create WebP source path
      const webpSrc = src.replace(/\.(jpg|jpeg|png)$/i, '.webp');

      // Keep existing attributes, add decoding async if not present
      let attrs = before + after;
      if (!attrs.includes('decoding=')) {
        attrs += ' decoding="async"';
      }

      // Add width/height from cache if available
      // Convert relative src (e.g., "./image.png") to cache key (e.g., "content/blog/slug/image.png")
      if (blogSlug && !attrs.includes('width=')) {
        const imageName = src.replace(/^\.\//, ''); // Remove leading ./
        const cacheKey = `content/blog/${blogSlug}/${imageName}`;
        const cached = imageCache[cacheKey];
        if (cached?.width && cached?.height) {
          attrs += ` width="${cached.width}" height="${cached.height}"`;
        }
      }

    return `<picture>
      <source srcset="${webpSrc}" type="image/webp">
      <img ${attrs.trim()} src="${src}">
    </picture>`;
    }
  );
}

It's a bit complicated setup, but I am very much satisfied with the setup, and will stay with 11ty, for now.

Comment via email