Open djforth opened 8 years ago
I've pushed a fix for #this https://github.com/imagemin/imagemin/pull/192
Would really like to see this make its way into imagemin. An ideal workflow for us is to run imagemin on a directory and simply overwrite all of the files with the minified versions and still preserve the folder tree.
👍
I'd really like to see https://github.com/imagemin/imagemin/pull/225 merged soon. With one condition - for me I had to add the path.sep in the middle of the string concat. Then it worked just fine.
👍
👍
Is there any movement? I switched from gulp-imagemin to this library and this issue is introducing breaking changes.
Please see new approach in #262 👍
Any update?
I'm wondering why "no one else" has this problem, it's so obvious when you use a glob pattern for input. Maybe the package managers out there hide the problem, but I'm using npm scripts, to be as plugin independent as possible.
My current ugly solution is, stop using the glob pattern **/*
and creating an imagemin task for every subdirectory and run them with npm-run-all: "dev:build:img": "npm-run-all --parallel dev:build:img:*"
My Fork imagemin-keep-folder form imagemin.
support keep and customize folder structure
$ npm install --save-dev imagemin-keep-folder
// as usual
const imagemin = require('imagemin-keep-folder');
imagemin(['images/*.{jpg,png}'], 'build/images', {
}).then(files => {
console.log(files);
//=> [{data: <Buffer 89 50 4e …>, path: 'build/images/foo.jpg'}, …]
});
// keep folder structure as input
const imagemin = require('imagemin-keep-folder');
imagemin(['images/**/*.{jpg,png}'], {
});
// for example
// images/a.jpg => images/a.jpg
// images/foo/a.jpg => images/foo/a.jpg
// images/foo/bar/a.jpg => images/foo/bar/a.jpg
// keep folder structure as input use imagemin-webp
const imagemin = require('imagemin-keep-folder');
const imageminWebp = require("imagemin-webp");
imagemin(['images/**/*.{jpg,png}'], {
use: [
imageminWebp({})
]
});
// for example
// images/a.jpg => images/a.webp
// images/foo/a.jpg => images/foo/a.webp
// images/foo/bar/a.jpg => images/foo/bar/a.webp
// customize folder structure as input use imagemin-webp
const imagemin = require('imagemin-keep-folder');
const imageminWebp = require("imagemin-webp");
imagemin(['images/**/*.{jpg,png}'], {
use: [
imageminWebp({})
],
replaceOutputDir: output => {
return output.replace(/images\//, '.webp/')
}
});
// for example
// images/a.jpg => .webp/a.webp
// images/foo/a.jpg => .webp/foo/a.webp
// images/foo/bar/a.jpg => .webp/foo/bar/a.webp
Any movement on this?
This was my workaround to keep the same subfolder structure in output folder:
const imagemin = require('imagemin');
const imageminMozjpeg = require('imagemin-mozjpeg');
const imageminPngquant = require('imagemin-pngquant');
const imageminSvgo = require('imagemin-svgo');
const { lstatSync, readdirSync } = require('fs');
const { join } = require('path');
/**
* @description
* Script for compressing all our static images.
* (Mains current folder structure)
*
* ie. images_folder => compressed/images_folder
*/
/**
* Output directory
* Where all the compressed images will go
*/
const OUTPUT_DIR = 'compressed';
/**
* List of input directories
*/
const INPUT_DIRS = [
'images_folder',
// ADD NEW FOLDERS HERE
// ...
];
/**
* Helper functions to get directories / sub-directories
*
* @see https://stackoverflow.com/a/40896897/4364074
*/
const isDirectory = source => lstatSync(source).isDirectory();
const getDirectories = source =>
readdirSync(source)
.map(name => join(source, name))
.filter(isDirectory);
const getDirectoriesRecursive = source => [
source,
...getDirectories(source)
.map(getDirectoriesRecursive)
.reduce((a, b) => a.concat(b), [])
];
try {
console.log('Beginning image compression...');
(async () => {
let imageDirs = [];
INPUT_DIRS.map(
dirname =>
(imageDirs = imageDirs.concat(getDirectoriesRecursive(dirname)))
);
/**
* Loop through all subfolders, and recursively run imagemin,
* outputting to the same subfolders inside OUTPUT_DIR folder
*/
for (let i in imageDirs) {
const dir = imageDirs[i];
await imagemin([`${dir}/*.{jpg,png,svg,gif}`], join(OUTPUT_DIR, dir), {
plugins: [
imageminMozjpeg(options['mozjpegOptions']),
imageminPngquant(options['pngquantOptions']),
imageminSvgo(options['svgoOptions'])
]
});
console.log(`...${(((+i + 1) / imageDirs.length) * 100).toFixed(0)}%`);
}
console.log('Finished compressing all images!');
})();
} catch (e) {
console.log(e);
}
Full version here: pastebin.
Please like if this worked (took me a whole day to figure out)!
Would love if this was fixed so the CLI could make use of it.
It seems convenient if destination
can be set as a function, like
destinatin: sourcePath => 'minified/' + sourcePath,
Currently, I move output files based on their sourcePath.
const { exec } = require('child_process');
const imagemin = require('imagemin');
const imageminPngquant = require('imagemin-pngquant');
const imageminJpegtran = require('imagemin-jpegtran');
// const imageminOptipng = require('imagemin-optipng');
async function minify(path) {
const destination = 'minified';
const files = await imagemin([path], {
destination,
plugins: [
imageminJpegtran(),
imageminPngquant({
quality: [0.6, 0.8],
}),
// imageminOptipng(),
]
})
console.log(files.map(file => file.sourcePath + ' -> ' + file.destinationPath));
files.forEach(file => {
const targetPath = destination + '/' + file.sourcePath.replace(/[^\\/]*$/, '');
const cmd = `mkdir -p ${targetPath} && mv ${file.destinationPath} ${targetPath}`;
console.log(cmd);
exec(cmd, (error, output) => {
if (error) {
console.log(error);
}
})
})
}
@leafOfTree where from do you import exec
?
@JustFly1984 exec
is from node builtin module child_process
. My comment is updated.
const { exec } = require('child_process');
Based on @brothatru answer (thank you, you saved my day), I have modified some parts of the script for my needs (I want compressed files on another directory).
The script didn't work for me on Windows environment because imagemin path params need forward slashes. I have also adapted input params for imagemin to the last version (destiny must go on destination key).
The script does not suit all cases with INPUT_DIR and OUTPUT_DIR, but it can be modified for any specific case. This is my approach (it supports having same file name on different folders):
const imagemin = require('imagemin');
// Lossy Plugins
const imageminMozjpeg = require('imagemin-mozjpeg');
const imageminPngquant = require('imagemin-pngquant');
const imageminGiflossy = require('imagemin-giflossy');
const imageminWebp = require('imagemin-webp');
const imageminSvgo = require('imagemin-svgo');
// Lossyless Plugin
const imageminJpegtran = require('imagemin-jpegtran');
const imageminOptipng = require('imagemin-optipng');
const imageminGifsicle = require('imagemin-gifsicle');
const { lstatSync, readdirSync } = require('fs');
const { join, normalize } = require('path');
// Source directory for images to be optimized
const INPUT_DIR = 'static-src/img';
// Destiny for compressed images
const OUTPUT_DIR = 'static/img';
// Colors for console.log messages
const COLORS = {
yellow: '\x1b[33m%s\x1b[0m'
};
/**
* Return true if source is a directory.
* @param {string} source Directory.
*/
const isDirectory = source => lstatSync(source).isDirectory();
/**
* Get directories for a given directory.
* @param {string} source Directory.
*/
const getDirectories = source =>
readdirSync(source)
.map(name => join(source, name))
.filter(isDirectory);
/**
* Recursive function that get list of all directories and subdirectories for
* a given directory.
* @param {string} source Root directory.
*/
const getDirectoriesRecursive = source => [
normalize(source),
...getDirectories(source)
.map(getDirectoriesRecursive)
.reduce((a, b) => a.concat(b), [])
];
/**
* Convert Windows backslash paths to slash paths.
* @param {string} path
*/
const converToSlash = path => {
const isExtendedLengthPath = /^\\\\\?\\/.test(path);
const hasNonAscii = /[^\u0000-\u0080]+/.test(path);
if (isExtendedLengthPath || hasNonAscii) {
return path;
}
return path.replace(/\\/g, '/');
};
console.log(COLORS.yellow, 'Beginning image compression.');
(async () => {
const imageDirs = getDirectoriesRecursive(INPUT_DIR);
let imagesOptimized = 0;
/**
* Loop through all subfolders, and recursively run imagemin,
* outputting to the same subfolders inside OUTPUT_DIR folder.
*/
for (let i in imageDirs) {
const dir = imageDirs[i];
/**
* imagemin needs paths with forward slashes. converToSlash is needed
* on Windows environment.
*
* Remove INPUT_DIR in OUTPUT_DIR for just getting the part of folder wanted.
* If not replaced, the output would be: static/img/static-src/img/**
*/
const destiny = converToSlash(join(OUTPUT_DIR, dir)).replace(INPUT_DIR, '');
const files = await imagemin([`${converToSlash(dir)}/*.{jpg,png,svg,gif}`], {
destination: normalize(destiny),
plugins: [
imageminJpegtran(),
imageminPngquant({
quality: [0.6, 0.8]
}),
imageminGifsicle(),
imageminSvgo({
plugins: [{ removeViewBox: false }]
})
]
});
imagesOptimized += files.length;
}
console.log(COLORS.yellow, `Image compression finished. Total images compressed: ${imagesOptimized}`);
})();
All the forks are pretty outdated at this point so having it in the main app would be nice
Have created a small wrapper module that preserves directory structure, whilst not impacting imagemin's native API: https://github.com/adamduncan/imagemin-dir (alpha)
Hopefully can be of use until this issue is resolved. Feedback and corrections/improvements welcomed! 🚀
@adamduncan Hi, thank you for your ponyfill. I love your idea, but imagemin-dir
seems not to satisfy my needs.
input: 'source/images/**/*'
destination: 'dist/imgs'
What I expect:
source/images/foo.jpg -> dist/imgs/foo.jpg
source/images/foo/bar.jpg -> dist/imgs/foo/bar.jpg
source/images/foo/bar/baz.jpg -> dist/imgs/foo/bar/baz.jpg
but actually:
source/images/foo.jpg -> dist/imgs/images/foo.jpg
source/images/foo/bar.jpg -> dist/imgs/images/foo/bar.jpg
source/images/foo/bar/baz.jpg -> dist/imgs/images/foo/bar/baz.jpg
I made improvements on @ixkaito/imagemin
inspired by imagemin-dir
. Hope this helps someone who has the same needs as me.
https://github.com/imagemin/imagemin/blob/cfc8ff20979ac24931aa1bbdc3eb9b97a875775a/index.js#L32-L34
if destinationPath
is null, then will not writeFile
, so we can do this:
const util = require('util');
const path = require('path');
const fs = require('graceful-fs');
const makeDir = require('make-dir');
const writeFile = util.promisify(fs.writeFile);
const srcdir = 'src/images';
const distdir = 'dist/images';
require('imagemin')([srcdir + '/**/*.{jpg,jpeg,png}'], {
plugins: [
require('imagemin-jpegtran')({
progressive: true
}),
require('imagemin-pngquant')({
speed: 4,
quality: '65-90'
})
]
}).then(files => files
.forEach(async v => {
let source = path.parse(v.sourcePath);
v.destinationPath = `${source.dir.replace(srcdir, distdir)}/${source.name}${source.ext}`;
await makeDir(path.dirname(v.destinationPath));
await writeFile(v.destinationPath, v.data);
);
This feature would be merged any time soon?
I'd also appreciate seeing this feature mergerd. Really.
Any comment on why this isn't being merged?
Any updates? Really looking forward to this feature
Hello, Is there any news regarding the essential feature, please ?
@loskael Thank you for your great workaround. Since imagemin changed to ESM in v8.0.0, I have updated the code.
require()
to import
.
.js
to .mjs
quality
option to an array for imagemin-pngquant v7.0.0+.forEach()
$ npm i -D imagemin imagemin-jpegtran imagemin-pngquant
import imagemin from 'imagemin';
import imageminJpegtran from 'imagemin-jpegtran';
import imageminPngquant from 'imagemin-pngquant';
import { promises as fsPromises } from 'node:fs';
import { promisify } from 'node:util';
import path from 'node:path';
import fs from 'graceful-fs';
const writeFile = promisify(fs.writeFile);
const srcdir = 'src/images';
const distdir = 'dist/images';
imagemin([srcdir + '/**/*.{jpg,jpeg,png}'], {
plugins: [
imageminJpegtran({
progressive: true
}),
imageminPngquant({
speed: 4,
quality: [0.65, 0.9]
})
]
}).then(files => files
.forEach(async v => {
let source = path.parse(v.sourcePath);
v.destinationPath = `${source.dir.replace(srcdir, distdir)}/${source.name}${source.ext}`;
await fsPromises.mkdir(path.dirname(v.destinationPath), { recursive: true });
await writeFile(v.destinationPath, v.data);
})
);
import imagemin from 'imagemin';
import imageminJpegtran from 'imagemin-jpegtran';
import imageminPngquant from 'imagemin-pngquant';
import { promises as fsPromises } from 'node:fs';
import { promisify } from 'node:util';
import path from 'node:path';
import fs from 'graceful-fs';
const writeFile = promisify(fs.writeFile);
const srcdir = 'src/images';
const distdir = 'dist/images';
imagemin([srcdir + '/**/*.{jpg,jpeg,png}'], {
plugins: [
imageminJpegtran({
progressive: true
}),
imageminPngquant({
speed: 4,
quality: [0.65, 0.9]
})
]
}).then(files => files
.forEach(async v => {
let source = path.parse(v.sourcePath);
v.destinationPath = `${source.dir.replace(srcdir, distdir)}/${source.name}${source.ext}`;
await fsPromises.mkdir(path.dirname(v.destinationPath), { recursive: true });
await writeFile(v.destinationPath, v.data);
})
);
This seems to have trouble when doing more than 500 MB worth of images. (I'm trying to do +20 GB worth) Is there a way to modify the code to get it to work with a larger quantity of images?
I get an error that looks like this:
node:internal/process/promises:279 triggerUncaughtException(err, true / fromPromise /); ^
[Error: EIO: i/o error, write] { errno: -5, code: 'EIO', syscall: 'write' }
or this:
node:internal/process/promises:279 triggerUncaughtException(err, true / fromPromise /); ^
Error: read ENOTCONN at tryReadStart (node:net:614:20) at Socket._read (node:net:625:5) at Socket.Readable.read (node:internal/streams/readable:487:10) at Socket.read (node:net:666:39) at new Socket (node:net:415:12) at Object.Socket (node:net:286:41) at createSocket (node:internal/child_process:328:14) at ChildProcess.spawn (node:internal/child_process:445:23) at Object.spawn (node:child_process:700:9) at execa (file:///mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/execa/index.js:84:26) { errno: -107, code: 'ENOTCONN', syscall: 'read', originalMessage: 'read ENOTCONN', shortMessage: 'Command failed with ENOTCONN: /mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/mozjpeg/vendor/cjpeg -quality 75\n' + 'read ENOTCONN', command: '/mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/mozjpeg/vendor/cjpeg -quality 75', escapedCommand: '"/mnt/d/Projects/GH_2022/imagemin-mozjpeg/node_modules/mozjpeg/vendor/cjpeg" -quality 75', exitCode: undefined, signal: undefined, signalDescription: undefined, stdout: Buffer(0) [Uint8Array] [], stderr: Buffer(0) [Uint8Array] [], failed: true, timedOut: false, isCanceled: false, killed: false }
So it looks like imagemin grabs all the images at once before it ever gets to the for loop and crashes so I just made it so it process all the images synchronously.
It's slow but robust and can do as many images as needed in one go. It also replicates the folder structure too to make things easy. Hopefully it helps someone else that wants to optimize a ton of jpeg files using the Mozjpeg encoder for their own images. (You can also easily change it for other plugins too if you want like imageminJpegtran
, imageminPngquant
, etc.)
Here's what I came up with:
// mozjpegify.mjs
import path from 'path';
import glob from 'glob';
import imagemin from 'imagemin';
import imageminMozjpeg from 'imagemin-mozjpeg';
const srcdir = 'images/source';
const distdir = 'images/dist';
Main();
async function Main() {
GetPath(srcdir, GetPathCallback);
}
function GetPath( src, callback ) {
glob(src + '/**/*', callback);
};
async function GetPathCallback(err, filePath) {
if (!err) {
for(let i=0; i<filePath.length; i++) {
//console.log( filePath[i] + ' -> ' + filePath[i].replace(srcdir, distdir) ); // source -> target
let ext = filePath[i].split('.').pop(); // get file extension
if( ext == 'jpg' || ext == 'jpeg' || ext == 'png' ) { // make sure it's an image and not a folder or something
await Optimize( filePath[i], ParsePath(filePath[i].replace(srcdir, distdir)) );
}
}
}
else {
console.log('Error:', err);
}
}
async function Optimize( src, destFolder ) {
const files = await imagemin(
[src],
{
destination: destFolder,
plugins: [
imageminMozjpeg({ quality: 75 })
]
}
);
console.log( src + '...Done' );
}
function ParsePath(filepath) {
return path.parse(filepath).dir;
}
You can use the following code:
import fs from 'fs';
import imagemin from 'imagemin';
import imageminJpegtran from "imagemin-jpegtran";
import imageminPngquant from "imagemin-pngquant";
import path from 'path';
const INPUT = "input";
const OUTPUT = "output";
function getInOut(input, output) {
let ret = [];
ret.push({ input, output });
const dirs = fs.readdirSync(input);
for (let dir of dirs) {
let inputNext = path.join(input, dir);
let outputNext = path.join(output, dir);
if (fs.statSync(inputNext).isDirectory()) {
ret.push(...getInOut(inputNext, outputNext));
}
}
return ret;
}
(async () => {
let input = path.join(process.cwd(), INPUT);
let output = path.join(process.cwd(), OUTPUT);
let dirs = getInOut(input, output);
for (let item of dirs) {
const files = await imagemin([`${item.input}/*.{jpg,png}`], {
destination: item.output,
plugins: [
imageminJpegtran(),
imageminPngquant({
quality: [0.6, 0.8],
}),
],
});
}
console.log('output success');
})();
Think this may be related to https://github.com/imagemin/imagemin/issues/87.
If I put an input destination on /imgs//.{.png, .gif, .jpg, *.svg} and set my destination too /build/imgs then I would expect:
/imgs/foo/bar.jpg -> /build/imgs/foo/bar.jpg
but is actually creating it as:
/imgs/foo/bar.jpg -> /build/imgs/bar.jpg