revert: build(aio): implement prerendering (#15346)
This reverts commit d0bc83ca27
.
Protractor-based prerendering is flakey on Travis and takes several minutes to
complete, slowing down the build. Prerendering has a lower impact now that we
use a ServiceWorker. We will revisit in the future (probably using a
`PlatformServer`-based approach).
PR Close #15346
This commit is contained in:

committed by
Miško Hevery

parent
90d2518d9a
commit
1bcbcfd56f
@ -1,30 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
// Imports
|
||||
const path = require('path');
|
||||
|
||||
// Constants
|
||||
const BROWSER_INSTANCES = 3;
|
||||
|
||||
const PORT = 4201;
|
||||
const BASE_URL = `http://localhost:${PORT}`;
|
||||
|
||||
const ROOT_DIR = path.join(__dirname, '../..');
|
||||
const DIST_DIR = path.join(ROOT_DIR, 'dist');
|
||||
const CONTENT_DIR = path.join(DIST_DIR, 'content');
|
||||
const INPUT_DIR = path.join(CONTENT_DIR, 'docs');
|
||||
const TMP_SPECS_DIR = path.join(ROOT_DIR, 'tmp/docs-prerender-specs');
|
||||
const TMP_OUTPUT_DIR = path.join(ROOT_DIR, 'tmp/docs-prerendered');
|
||||
|
||||
// Exports
|
||||
module.exports = {
|
||||
BASE_URL,
|
||||
BROWSER_INSTANCES,
|
||||
CONTENT_DIR,
|
||||
DIST_DIR,
|
||||
INPUT_DIR,
|
||||
PORT,
|
||||
ROOT_DIR,
|
||||
TMP_OUTPUT_DIR,
|
||||
TMP_SPECS_DIR
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
// Imports
|
||||
const sh = require('shelljs');
|
||||
const { CONTENT_DIR, TMP_OUTPUT_DIR } = require('./constants');
|
||||
|
||||
sh.config.fatal = true;
|
||||
|
||||
// Run
|
||||
sh.cp('-r', TMP_OUTPUT_DIR, CONTENT_DIR);
|
@ -1,89 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
// Imports
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const sh = require('shelljs');
|
||||
const { BASE_URL, BROWSER_INSTANCES, INPUT_DIR, PORT, TMP_OUTPUT_DIR, TMP_SPECS_DIR } = require('./constants');
|
||||
|
||||
sh.config.fatal = true;
|
||||
|
||||
// Helpers
|
||||
const chunkArray = (items, numChunks) => {
|
||||
numChunks = Math.min(numChunks, items.length);
|
||||
const itemsPerChunk = Math.ceil(items.length / numChunks);
|
||||
const chunks = new Array(numChunks);
|
||||
|
||||
console.log(`Chunking ${items.length} items into ${numChunks} chunks.`);
|
||||
|
||||
for (let i = 0; i < numChunks; i++) {
|
||||
chunks[i] = items.slice(i * itemsPerChunk, (i + 1) * itemsPerChunk);
|
||||
}
|
||||
|
||||
return chunks;
|
||||
};
|
||||
|
||||
const getAllFiles = rootDir => fs.readdirSync(rootDir).reduce((files, file) => {
|
||||
const absolutePath = path.join(rootDir, file);
|
||||
const isFile = fs.lstatSync(absolutePath).isFile();
|
||||
|
||||
return files.concat(isFile ? absolutePath : getAllFiles(absolutePath));
|
||||
}, []);
|
||||
|
||||
const getAllUrls = rootDir => getAllFiles(rootDir).
|
||||
filter(absolutePath => path.extname(absolutePath) === '.json').
|
||||
map(absolutePath => absolutePath.slice(0, -5)).
|
||||
map(absolutePath => path.relative(INPUT_DIR, absolutePath)).
|
||||
map(relativePath => `${BASE_URL}/${relativePath}`);
|
||||
|
||||
const getTestForChunk = (chunk, idx) => `
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const protractor = require('protractor');
|
||||
const sh = require('shelljs');
|
||||
const url = require('url');
|
||||
|
||||
const browser = protractor.browser;
|
||||
sh.config.fatal = true;
|
||||
|
||||
describe('chunk ${idx}', () => ${JSON.stringify(chunk)}.forEach(urlToPage => {
|
||||
const parsedUrl = url.parse(urlToPage);
|
||||
|
||||
it(\`should render \${parsedUrl.path}\`, done => {
|
||||
browser.get(urlToPage);
|
||||
browser.getPageSource()
|
||||
.then(source => {
|
||||
if (/document not found/i.test(source) && !/file-not-found/i.test(urlToPage)) {
|
||||
return Promise.reject(\`404 for \${urlToPage}\`);
|
||||
}
|
||||
|
||||
const relativeFilePath = parsedUrl.path.replace(/\\/$/, '/index').replace(/^\\//, '') + '.html';
|
||||
const absoluteFilePath = path.resolve('${TMP_OUTPUT_DIR}', relativeFilePath);
|
||||
const absoluteDirPath = path.dirname(absoluteFilePath);
|
||||
|
||||
console.log(\`Writing to \${absoluteFilePath}...\`);
|
||||
|
||||
sh.mkdir('-p', absoluteDirPath);
|
||||
fs.writeFileSync(absoluteFilePath, source);
|
||||
})
|
||||
.then(done, done.fail);
|
||||
});
|
||||
}));
|
||||
`;
|
||||
|
||||
// Run
|
||||
const docsUrls = getAllUrls(INPUT_DIR);
|
||||
const chunked = chunkArray(docsUrls, BROWSER_INSTANCES);
|
||||
|
||||
sh.rm('-rf', TMP_OUTPUT_DIR);
|
||||
sh.rm('-rf', TMP_SPECS_DIR);
|
||||
sh.mkdir('-p', TMP_SPECS_DIR);
|
||||
|
||||
chunked.forEach((chunk, idx) => {
|
||||
const outputFile = path.join(TMP_SPECS_DIR, `chunk${idx}.spec.js`);
|
||||
const testContent = getTestForChunk(chunk, idx);
|
||||
|
||||
fs.writeFileSync(outputFile, testContent);
|
||||
});
|
@ -1,35 +0,0 @@
|
||||
// Protractor configuration file, see link for more information
|
||||
// https://github.com/angular/protractor/blob/master/lib/config.ts
|
||||
'use strict';
|
||||
|
||||
/*global jasmine */
|
||||
const { SpecReporter } = require('jasmine-spec-reporter');
|
||||
const path = require('path');
|
||||
const { BASE_URL, BROWSER_INSTANCES, TMP_SPECS_DIR } = require('./constants');
|
||||
|
||||
exports.config = {
|
||||
allScriptsTimeout: 11000,
|
||||
specs: [
|
||||
path.join(TMP_SPECS_DIR, 'chunk*.spec.js')
|
||||
],
|
||||
capabilities: {
|
||||
browserName: 'chrome',
|
||||
shardTestFiles: true,
|
||||
maxInstances: BROWSER_INSTANCES,
|
||||
// For Travis
|
||||
chromeOptions: {
|
||||
binary: process.env.CHROME_BIN
|
||||
}
|
||||
},
|
||||
directConnect: true,
|
||||
baseUrl: BASE_URL,
|
||||
framework: 'jasmine',
|
||||
jasmineNodeOpts: {
|
||||
showColors: true,
|
||||
defaultTimeoutInterval: 30000,
|
||||
print: function() {}
|
||||
},
|
||||
onPrepare() {
|
||||
jasmine.getEnv().addReporter(new SpecReporter({spec: {displayStacktrace: true}}));
|
||||
}
|
||||
};
|
@ -1,63 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
// Imports
|
||||
const fs = require('fs');
|
||||
const http = require('http');
|
||||
const path = require('path');
|
||||
const { BASE_URL, DIST_DIR, PORT } = require('./constants');
|
||||
|
||||
// Constants
|
||||
const CONTENT_TYPES = {
|
||||
'.css': 'text/css',
|
||||
'.html': 'text/html',
|
||||
'.ico': 'image/x-icon',
|
||||
'.jpg': 'image/jpeg',
|
||||
'.js': 'text/javascript',
|
||||
'.json': 'application/json',
|
||||
'.png': 'image/png',
|
||||
'.svg': 'image/svg+xml'
|
||||
};
|
||||
const CACHE = {};
|
||||
const VERBOSE = process.argv.includes('--verbose');
|
||||
|
||||
// Helpers
|
||||
const urlToFile = url => path.join(DIST_DIR, url);
|
||||
|
||||
const getFile = filePath => new Promise((resolve, reject) => CACHE.hasOwnProperty(filePath) ?
|
||||
resolve(CACHE[filePath]) :
|
||||
fs.readFile(filePath, 'utf-8', (err, content) => err ? reject(err) : resolve(CACHE[filePath] = content)));
|
||||
|
||||
const middleware = (req, res) => {
|
||||
const method = req.method;
|
||||
let url = req.url;
|
||||
|
||||
if (VERBOSE) console.log(`Request: ${method} ${url}`);
|
||||
if (method !== 'GET') return;
|
||||
|
||||
if (url.endsWith('/')) url += 'index';
|
||||
if (!url.includes('.')) url += '.html';
|
||||
|
||||
let filePath = urlToFile(url);
|
||||
if (!fs.existsSync(filePath)) filePath = urlToFile('index.html');
|
||||
|
||||
getFile(filePath).
|
||||
then(content => {
|
||||
const contentType = CONTENT_TYPES[path.extname(filePath)] || 'application/octet-stream';
|
||||
res.setHeader('Content-Type', contentType);
|
||||
res.end(content);
|
||||
}).
|
||||
catch(err => {
|
||||
console.error(err);
|
||||
res.statusCode = 500;
|
||||
res.end(http.STATUS_CODES[500]);
|
||||
});
|
||||
};
|
||||
|
||||
// Run
|
||||
const server = http.
|
||||
createServer(middleware).
|
||||
on('error', err => console.error(err)).
|
||||
on('listening', () => console.log(`Server listening at ${BASE_URL}.`)).
|
||||
listen(PORT);
|
||||
|
||||
|
Reference in New Issue
Block a user