Compare commits

..

No commits in common. "master" and "v6.0.0" have entirely different histories.

10 changed files with 2901 additions and 17260 deletions

View File

@ -1,6 +1,6 @@
name: Test template output
on: [push, pull_request]
on: [push]
jobs:
test:
@ -9,8 +9,9 @@ jobs:
strategy:
matrix:
node_version: [10, 11, 12, 13, 14]
os: [ubuntu-latest, windows-latest, macOS-latest]
node_version: [10, 11, 12, 13]
os: [ubuntu-latest, macOS-latest]
# os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@v1

2
.nvmrc
View File

@ -1 +1 @@
v12
v10

View File

@ -23,6 +23,7 @@ require('./tasks/lint')(options);
require('./tasks/postcss')(options);
require('./tasks/sass')(options);
require('./tasks/check-for-unused').checkForUnusedTask(options);
require('./tasks/check-deps')(options);
/* Runs the entire pipeline once. */
gulp.task(

19923
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{
"name": "responsive-html-email-signature",
"version": "6.1.0",
"version": "6.0.0",
"description": "Responsive template for emails & email signatures.",
"main": "index.js",
"repository": {
@ -23,27 +23,25 @@
},
"homepage": "https://github.com/danmindru/responsive-html-email-signature#readme",
"scripts": {
"start": "./node_modules/.bin/gulp",
"once": "./node_modules/.bin/gulp run-pipeline",
"start": "node ./node_modules/.bin/gulp",
"once": "node ./node_modules/.bin/gulp run-pipeline",
"deploy": "npm run test && cp -r dist demo && git push origin `git subtree split --prefix demo develop`:gh-pages --force",
"test": "npm run once && npm run _test",
"test:watch": "npm run once && npm run _test:watch",
"format": "./node_modules/.bin/prettier {tasks,tests}/**/*.js gulpfile.js .eslintrc.js --write",
"lint": "./node_modules/.bin/eslint ./**/*.js gulpfile.js",
"_test": "./node_modules/.bin/ava",
"_test:watch": "./node_modules/.bin/ava --watch"
"test": "npm run once && node ./node_modules/.bin/ava",
"test:watch": "npm run once && node ./node_modules/.bin/ava --watch",
"format": "node ./node_modules/.bin/prettier {tasks,tests}/**/*.js gulpfile.js .eslintrc.js --write",
"lint": "node ./node_modules/.bin/eslint ./**/*.js gulpfile.js"
},
"dependencies": {
"autoprefixer": "^9.6.1",
"chalk": "^2.4.2",
"cheerio": "^0.22.0",
"del": "^5.1.0",
"graceful-fs": "^4.2.3",
"gulp": "^4.0.2",
"gulp-autoprefixer": "^7.0.1",
"gulp-david": "^1.0.1",
"gulp-inline-css": "^3.5.0",
"gulp-inline-images-no-http": "^1.3.3",
"gulp-jsonlint": "^1.3.2",
"gulp-inline-images-no-http": "^1.3.0",
"gulp-jsonlint": "^1.3.1",
"gulp-less": "^4.0.1",
"gulp-minify-html": "~1.0.5",
"gulp-minify-inline": "^1.1.0",
@ -51,32 +49,28 @@
"gulp-postcss": "^8.0.0",
"gulp-preprocess": "^3.0.3",
"gulp-rename": "^2.0.0",
"gulp-sass": "^4.1.0",
"gulp-sass": "^4.0.2",
"klaw": "^3.0.0",
"node-sass": "^7.0.0",
"plugin-error": "^1.0.1",
"through2": "^2.0.5"
"node-sass": "^4.13.1"
},
"resolutions": {
"graceful-fs": "^4.1.15"
},
"devDependencies": {
"ava": "^2.4.0",
"eslint": "^6.8.0",
"eslint-config-prettier": "^6.11.0",
"eslint-config-standard": "^14.1.1",
"eslint-plugin-import": "^2.22.0",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-prettier": "^3.1.4",
"eslint-config-prettier": "^6.10.0",
"eslint-config-standard": "^14.1.0",
"eslint-plugin-import": "^2.20.1",
"eslint-plugin-node": "^11.0.0",
"eslint-plugin-prettier": "^3.1.2",
"eslint-plugin-promise": "^4.2.1",
"eslint-plugin-standard": "^4.0.1",
"graceful-fs": "^4.2.4",
"gulp-cli": "^2.3.0",
"opencollective-postinstall": "^2.0.3",
"gulp-cli": "^2.2.0",
"opencollective-postinstall": "^2.0.2",
"prettier": "^1.19.1",
"pretty-quick": "^2.0.1"
},
"resolutions": {
"graceful-fs": "^4.2.4",
"vinyl-fs": "^3.0.3"
},
"browserslist": [
"last 5 versions"
],

View File

@ -5,7 +5,7 @@ const minifyInline = require('gulp-minify-inline');
const preprocess = require('gulp-preprocess');
const rename = require('gulp-rename');
const del = require('del');
const { inlineImg } = require('./check-for-image-url');
const inlineimg = require('gulp-inline-images-no-http');
const { getConfigsForDir, getFilePathsForDir, getCssLinkTagsFromFilelist } = require('./util/util');
function buildTask(options) {
@ -34,7 +34,7 @@ function buildTask(options) {
return options
.src([cwd + '/**/*.html', '!' + cwd + '/**/*.inc.html'])
.pipe(preprocess({ context }))
.pipe(inlineImg({ getHTTP: confItems[0]['inlineRemoteUrl'] }))
.pipe(inlineimg())
.pipe(
inlineCss({
applyTableAttributes: true,

10
tasks/check-deps.js Normal file
View File

@ -0,0 +1,10 @@
const gulp = require('gulp');
const david = require('gulp-david');
function checkDepsTask() {
gulp.task('check-deps', function checkDeps() {
gulp.src('package.json').pipe(david());
});
}
module.exports = checkDepsTask;

View File

@ -1,155 +0,0 @@
'use strict';
const https = require('https');
const http = require('http');
const path = require('path');
const url = require('url');
const fs = require('fs');
const PluginError = require('plugin-error');
const through = require('through2');
const cheerio = require('cheerio');
const { log } = require('./util/util');
const PLUGIN_NAME = 'gulp-inline-images';
const MIME_TYPE_REGEX = /.+\/([^\s]*)/;
const INLINE_ATTR = 'inline';
const NOT_INLINE_ATTR = `!${INLINE_ATTR}`;
function inlineImg(options = {}) {
const selector = options.selector || 'img[src]';
const attribute = options.attribute || 'src';
const getHTTP = options.getHTTP || false;
return through.obj(function(file, encoding, callback) {
if (file.isStream()) {
this.emit('error', new PluginError(PLUGIN_NAME, 'Streams are not supported!'));
return callback();
}
if (file.isBuffer()) {
const contents = file.contents.toString(encoding);
// Load it into cheerio's virtual DOM for easy manipulation
const $ = cheerio.load(contents);
const inlineFlag = $(`img[${INLINE_ATTR}]`);
// If images with an inline attr are found that is the selection we want
const imgTags = inlineFlag.length ? inlineFlag : $(selector);
let count = 0;
imgTags.each(function() {
const $img = $(this);
const src = $img.attr(attribute);
// Save the file format from the extension
const extFormat = path.extname(src).substr(1);
// If inlineFlag tags were found we want to remove the inline tag
if (inlineFlag.length) {
$img.removeAttr(INLINE_ATTR);
}
// Find !inline attribute
const notInlineFlag = $img.attr(NOT_INLINE_ATTR);
if (typeof notInlineFlag !== typeof undefined && notInlineFlag !== false) {
// Remove the tag and don't process this file
return $img.removeAttr(NOT_INLINE_ATTR);
}
// Count async ops
count++;
getSrcBase64(options.basedir || file.base, getHTTP, src, (err, result, resFormat, skipFormatting) => {
if (err) {
log.warn(`Failed to load http image. Check the format of ${src}.`);
log.error(err);
} else {
// Need a format in and a result for this to work
if (!skipFormatting) {
if (result && (extFormat || resFormat)) {
$img.attr('src', `data:image/${extFormat};base64,${result}`);
} else {
$img.attr('src', ``);
$img.attr('alt', `Image not found, Please check Url`);
log.warn(`Failed to read image. Check the format of ${src}.`);
}
}
if (!--count) {
file.contents = Buffer.from($.html());
callback(null, file);
}
}
});
});
// If no files are processing we don't need to wait as none were ever started
if (!imgTags.length) {
file.contents = Buffer.from($.html());
callback(null, file);
}
}
});
}
function getHTTPBase64(url, callback) {
// Get applicable library
const lib = url.startsWith('https') ? https : http;
// Initiate a git request to our URL
const req = lib.get(url, res => {
// Check for redirect
if (res.statusCode >= 301 && res.statusCode < 400 && res.headers.location) {
// Redirect
return getHTTPBase64(res.headers.location, callback);
}
// Check for HTTP errors
if (res.statusCode < 200 || res.statusCode >= 400) {
return callback(new Error('Failed to load page, status code: ' + res.statusCode));
}
// Get file format
let format;
if (res.headers['content-type']) {
const matches = res.headers['content-type'].match(MIME_TYPE_REGEX);
if (matches) {
format = matches[1];
}
}
// Create an empty buffer to store the body in
let body = Buffer.from([]);
// Append each chunk to the body
res.on('data', chunk => (body = Buffer.concat([body, chunk])));
// Done callback
res.on('end', () => callback(null, body.toString('base64'), format));
});
// Listen for network errors
req.on('error', err => callback(err));
}
function getSrcBase64(base, getHTTP, src, callback) {
// TODO: @deprecated — since v11.0.0 url.parse should be replaced with url.URL() ctor
if (!url.parse(src).hostname) {
// Get local file
const filePath = path.join(base, src);
if (fs.existsSync(filePath)) {
fs.readFile(filePath, 'base64', callback);
} else {
callback(null);
}
} else {
// Get remote file
if (getHTTP) {
return getHTTPBase64(src, callback);
} else {
callback(null, src, null, true);
}
}
}
module.exports = {
inlineImg,
getHTTPBase64,
getSrcBase64
};

View File

@ -28,9 +28,7 @@ const outputWarningsForUnusedItems = (unusedItems, configs) => {
const { dir } = configs[index];
unusedInConfigs.forEach(unusedInConfItems => {
const unusedItemsToLog = unusedInConfItems
.filter(item => item !== `${OUTPUT_KEYWORD} id`)
.filter(item => item !== '@echo inlineRemoteUrl');
const unusedItemsToLog = unusedInConfItems.filter(item => item !== `${OUTPUT_KEYWORD} id`);
if (unusedItemsToLog.length) {
log.warn(

View File

@ -25,9 +25,8 @@ const getConfigsForDir = (rootDir, configFileName) => {
let current = null;
let confItems;
const resolvedPath = path.resolve(rootDir, confPath);
delete require.cache[resolvedPath]; // NB: For 'watch' to properly work, the cache needs to be deleted before each require.
current = require(resolvedPath);
delete require.cache[require.resolve(rootDir, confPath)]; // NB: For 'watch' to properly work, the cache needs to be deleted before each require.
current = require(path.resolve(rootDir, confPath));
// Handle single objects or arrays of configs.
if (current && current.length) {