build(aio): refactor dgeni packages

This is to tidy up the `author-packagse`, which currently duplicates a
lot of the configuration in the main packages. We need to
DRY this up so that we don't fall foul of a change in one being missed in
the other.
This commit is contained in:
Peter Bacon Darwin
2017-04-21 13:10:52 +01:00
committed by Pete Bacon Darwin
parent 7a8bd99ab1
commit 3cad5da5a4
66 changed files with 480 additions and 634 deletions

View File

@ -0,0 +1,701 @@
a
able
about
above
abst
accordance
according
accordingly
across
act
actually
added
adj
adopted
affected
affecting
affects
after
afterwards
again
against
ah
all
almost
alone
along
already
also
although
always
am
among
amongst
an
and
announce
another
any
anybody
anyhow
anymore
anyone
anything
anyway
anyways
anywhere
apparently
approximately
are
aren
arent
arise
around
as
aside
ask
asking
at
auth
available
away
awfully
b
back
be
became
because
become
becomes
becoming
been
before
beforehand
begin
beginning
beginnings
begins
behind
being
believe
below
beside
besides
between
beyond
biol
both
brief
briefly
but
by
c
ca
came
can
cannot
can't
cant
cause
causes
certain
certainly
co
com
come
comes
contain
containing
contains
could
couldnt
d
date
did
didn't
didnt
different
do
does
doesn't
doesnt
doing
done
don't
dont
down
downwards
due
during
e
each
ed
edu
effect
eg
eight
eighty
either
else
elsewhere
end
ending
enough
especially
et
et-al
etc
even
ever
every
everybody
everyone
everything
everywhere
ex
except
f
far
few
ff
fifth
first
five
fix
followed
following
follows
for
former
formerly
forth
found
four
from
further
furthermore
g
gave
get
gets
getting
give
given
gives
giving
go
goes
gone
got
gotten
h
had
happens
hardly
has
hasn't
hasnt
have
haven't
havent
having
he
hed
hence
her
here
hereafter
hereby
herein
heres
hereupon
hers
herself
hes
hi
hid
him
himself
his
hither
home
how
howbeit
however
hundred
i
id
ie
if
i'll
ill
im
immediate
immediately
importance
important
in
inc
indeed
index
information
instead
into
invention
inward
is
isn't
isnt
it
itd
it'll
itll
its
itself
i've
ive
j
just
k
keep
keeps
kept
keys
kg
km
know
known
knows
l
largely
last
lately
later
latter
latterly
least
less
lest
let
lets
like
liked
likely
line
little
'll
'll
look
looking
looks
ltd
m
made
mainly
make
makes
many
may
maybe
me
mean
means
meantime
meanwhile
merely
mg
might
million
miss
ml
more
moreover
most
mostly
mr
mrs
much
mug
must
my
myself
n
na
name
namely
nay
nd
near
nearly
necessarily
necessary
need
needs
neither
never
nevertheless
new
next
nine
ninety
no
nobody
non
none
nonetheless
noone
nor
normally
nos
not
noted
nothing
now
nowhere
o
obtain
obtained
obviously
of
off
often
oh
ok
okay
old
omitted
on
once
one
ones
only
onto
or
ord
other
others
otherwise
ought
our
ours
ourselves
out
outside
over
overall
owing
own
p
page
pages
part
particular
particularly
past
per
perhaps
placed
please
plus
poorly
possible
possibly
potentially
pp
predominantly
present
previously
primarily
probably
promptly
proud
provides
put
q
que
quickly
quite
qv
r
ran
rather
rd
re
readily
really
recent
recently
ref
refs
regarding
regardless
regards
related
relatively
research
respectively
resulted
resulting
results
right
run
s
said
same
saw
say
saying
says
sec
section
see
seeing
seem
seemed
seeming
seems
seen
self
selves
sent
seven
several
shall
she
shed
she'll
shell
shes
should
shouldn't
shouldnt
show
showed
shown
showns
shows
significant
significantly
similar
similarly
since
six
slightly
so
some
somebody
somehow
someone
somethan
something
sometime
sometimes
somewhat
somewhere
soon
sorry
specifically
specified
specify
specifying
state
states
still
stop
strongly
sub
substantially
successfully
such
sufficiently
suggest
sup
sure
t
take
taken
taking
tell
tends
th
than
thank
thanks
thanx
that
that'll
thatll
thats
that've
thatve
the
their
theirs
them
themselves
then
thence
there
thereafter
thereby
thered
therefore
therein
there'll
therell
thereof
therere
theres
thereto
thereupon
there've
thereve
these
they
theyd
they'll
theyll
theyre
they've
theyve
think
this
those
thou
though
thoughh
thousand
throug
through
throughout
thru
thus
til
tip
to
together
too
took
toward
towards
tried
tries
truly
try
trying
ts
twice
two
u
un
under
unfortunately
unless
unlike
unlikely
until
unto
up
upon
ups
us
use
used
useful
usefully
usefulness
uses
using
usually
v
value
various
've
've
very
via
viz
vol
vols
vs
w
want
wants
was
wasn't
wasnt
way
we
wed
welcome
we'll
well
went
were
weren't
werent
we've
weve
what
whatever
what'll
whatll
whats
when
whence
whenever
where
whereafter
whereas
whereby
wherein
wheres
whereupon
wherever
whether
which
while
whim
whither
who
whod
whoever
whole
who'll
wholl
whom
whomever
whos
whose
why
widely
will
willing
wish
with
within
without
won't
wont
words
would
wouldn't
wouldnt
www
x
y
yes
yet
you
youd
you'll
youll
your
youre
yours
yourself
yourselves
you've
youve
z
zero

View File

@ -0,0 +1,116 @@
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
const path = require('path');
const Package = require('dgeni').Package;
const jsdocPackage = require('dgeni-packages/jsdoc');
const nunjucksPackage = require('dgeni-packages/nunjucks');
const linksPackage = require('../links-package');
const examplesPackage = require('../examples-package');
const targetPackage = require('../target-package');
const remarkPackage = require('../remark-package');
const { PROJECT_ROOT, DOCS_OUTPUT_PATH, TEMPLATES_PATH, requireFolder } = require('../config');
module.exports = new Package('angular-base', [
jsdocPackage, nunjucksPackage, linksPackage, examplesPackage, targetPackage, remarkPackage
])
// Register the processors
.processor(require('./processors/generateKeywords'))
.processor(require('./processors/createOverviewDump'))
.processor(require('./processors/checkUnbalancedBackTicks'))
.processor(require('./processors/convertToJson'))
.processor(require('./processors/fixInternalDocumentLinks'))
// overrides base packageInfo and returns the one for the 'angular/angular' repo.
.factory('packageInfo', function() { return require(path.resolve(PROJECT_ROOT, 'package.json')); })
.factory(require('./readers/json'))
.config(function(checkAnchorLinksProcessor) {
// TODO: re-enable
checkAnchorLinksProcessor.$enabled = false;
})
// Where do we get the source files?
.config(function(readFilesProcessor, collectExamples, generateKeywordsProcessor, jsonFileReader) {
readFilesProcessor.fileReaders.push(jsonFileReader);
readFilesProcessor.basePath = PROJECT_ROOT;
readFilesProcessor.sourceFiles = [];
collectExamples.exampleFolders = [];
generateKeywordsProcessor.ignoreWordsFile = path.resolve(__dirname, 'ignore.words');
generateKeywordsProcessor.docTypesToIgnore = ['example-region'];
})
// Where do we write the output files?
.config(function(writeFilesProcessor) { writeFilesProcessor.outputFolder = DOCS_OUTPUT_PATH; })
// Target environments
.config(function(targetEnvironments) {
const ALLOWED_LANGUAGES = ['ts', 'js', 'dart'];
const TARGET_LANGUAGE = 'ts';
ALLOWED_LANGUAGES.forEach(target => targetEnvironments.addAllowed(target));
targetEnvironments.activate(TARGET_LANGUAGE);
})
// Configure nunjucks rendering of docs via templates
.config(function(
renderDocsProcessor, templateFinder, templateEngine, getInjectables) {
// Where to find the templates for the doc rendering
templateFinder.templateFolders = [TEMPLATES_PATH];
// Standard patterns for matching docs to templates
templateFinder.templatePatterns = [
'${ doc.template }', '${ doc.id }.${ doc.docType }.template.html',
'${ doc.id }.template.html', '${ doc.docType }.template.html',
'${ doc.id }.${ doc.docType }.template.js', '${ doc.id }.template.js',
'${ doc.docType }.template.js', '${ doc.id }.${ doc.docType }.template.json',
'${ doc.id }.template.json', '${ doc.docType }.template.json', 'common.template.html'
];
// Nunjucks and Angular conflict in their template bindings so change Nunjucks
templateEngine.config.tags = {variableStart: '{$', variableEnd: '$}'};
templateEngine.filters =
templateEngine.filters.concat(getInjectables(requireFolder(__dirname, './rendering')));
// helpers are made available to the nunjucks templates
renderDocsProcessor.helpers.relativePath = function(from, to) {
return path.relative(from, to);
};
})
// We are not going to be relaxed about ambiguous links
.config(function(getLinkInfo) {
getLinkInfo.useFirstAmbiguousLink = false;
})
.config(function(computePathsProcessor, generateKeywordsProcessor) {
generateKeywordsProcessor.outputFolder = 'app';
// Replace any path templates inherited from other packages
// (we want full and transparent control)
computePathsProcessor.pathTemplates = [
{docTypes: ['example-region'], getOutputPath: function() {}},
];
})
.config(function(convertToJsonProcessor) {
convertToJsonProcessor.docTypes = [];
});

View File

@ -0,0 +1,33 @@
var _ = require('lodash');
/**
* @dgProcessor checkUnbalancedBackTicks
* @description
* Searches the rendered content for an odd number of (```) backticks,
* which would indicate an unbalanced pair and potentially a typo in the
* source content.
*/
module.exports = function checkUnbalancedBackTicks(log, createDocMessage) {
var BACKTICK_REGEX = /^ *```/gm;
return {
// $runAfter: ['checkAnchorLinksProcessor'],
$runAfter: ['inlineTagProcessor'],
$runBefore: ['writeFilesProcessor'],
$process: function(docs) {
_.forEach(docs, function(doc) {
if (doc.renderedContent) {
var matches = doc.renderedContent.match(BACKTICK_REGEX);
if (matches && matches.length % 2 !== 0) {
doc.unbalancedBackTicks = true;
log.warn(createDocMessage(
'checkUnbalancedBackTicks processor: unbalanced backticks found in rendered content',
doc));
log.warn(doc.renderedContent);
}
}
});
}
};
};

View File

@ -0,0 +1,29 @@
var testPackage = require('../../helpers/test-package');
var Dgeni = require('dgeni');
describe('checkUnbalancedBackTicks', function() {
var dgeni, injector, processor, log;
beforeEach(function() {
dgeni = new Dgeni([testPackage('angular-base-package')]);
injector = dgeni.configureInjector();
processor = injector.get('checkUnbalancedBackTicks');
log = injector.get('log');
});
it('should warn if there are an odd number of back ticks in the rendered content', function() {
var docs = [{
renderedContent: '```\n' +
'code block\n' +
'```\n' +
'```\n' +
'code block with missing closing back ticks\n'
}];
processor.$process(docs);
expect(log.warn).toHaveBeenCalledWith(
'checkUnbalancedBackTicks processor: unbalanced backticks found in rendered content - doc');
expect(docs[0].unbalancedBackTicks).toBe(true);
});
});

View File

@ -0,0 +1,40 @@
module.exports = function convertToJsonProcessor(log, createDocMessage) {
return {
$runAfter: ['checkUnbalancedBackTicks'],
$runBefore: ['writeFilesProcessor'],
docTypes: [],
$process: function(docs) {
const docTypes = this.docTypes;
docs.forEach((doc) => {
if (docTypes.indexOf(doc.docType) !== -1) {
let contents = doc.renderedContent || '';
let title = doc.title;
// We do allow an empty `title` but resort to `name` if it is not even defined
if (title === undefined) {
title = doc.name;
}
// If there is no title then try to extract it from the first h1 in the renderedContent
if (title === undefined) {
const match = /<h1[^>]*>(.+?)<\/h1>/.exec(contents);
if (match) {
title = match[1];
}
}
// If there is still no title then log a warning
if (title === undefined) {
title = '';
log.warn(createDocMessage('Title property expected', doc));
}
doc.renderedContent = JSON.stringify({ id: doc.path, title, contents }, null, 2);
}
});
}
};
};

View File

@ -0,0 +1,71 @@
var testPackage = require('../../helpers/test-package');
var Dgeni = require('dgeni');
describe('convertToJson processor', () => {
var dgeni, injector, processor, log;
beforeAll(function() {
dgeni = new Dgeni([testPackage('angular-base-package')]);
injector = dgeni.configureInjector();
processor = injector.get('convertToJsonProcessor');
log = injector.get('log');
processor.docTypes = ['test-doc'];
});
it('should be part of the dgeni package', () => {
expect(processor).toBeDefined();
});
it('should convert the renderedContent to JSON', () => {
const docs = [{
docType: 'test-doc',
title: 'The Title',
name: 'The Name',
path: 'test/doc',
renderedContent: 'Some Content'
}];
processor.$process(docs);
expect(JSON.parse(docs[0].renderedContent).id).toEqual('test/doc');
expect(JSON.parse(docs[0].renderedContent).title).toEqual('The Title');
expect(JSON.parse(docs[0].renderedContent).contents).toEqual('Some Content');
});
it('should get the title from name if no title is specified', () => {
const docs = [{ docType: 'test-doc', name: 'The Name' }];
processor.$process(docs);
expect(JSON.parse(docs[0].renderedContent).title).toEqual('The Name');
});
it('should accept an empty title', () => {
const docs = [{ docType: 'test-doc', title: '' }];
processor.$process(docs);
expect(JSON.parse(docs[0].renderedContent).title).toEqual('');
expect(log.warn).not.toHaveBeenCalled();
});
it('should accept an empty name if title is not provided', () => {
const docs = [{ docType: 'test-doc', name: '' }];
processor.$process(docs);
expect(JSON.parse(docs[0].renderedContent).title).toEqual('');
expect(log.warn).not.toHaveBeenCalled();
});
it('should get the title from the first `h1` if no title nor name is specified', () => {
const docs = [{ docType: 'test-doc', renderedContent: '<div><h1 class="title">Some title</h1><article><h1>Article 1</h1></article></div>' }];
processor.$process(docs);
expect(JSON.parse(docs[0].renderedContent).contents).toEqual('<div><h1 class="title">Some title</h1><article><h1>Article 1</h1></article></div>');
expect(JSON.parse(docs[0].renderedContent).title).toEqual('Some title');
});
it('should set missing titles to empty', () => {
const docs = [{ docType: 'test-doc' }];
processor.$process(docs);
expect(JSON.parse(docs[0].renderedContent).title).toBe('');
});
it('should log a warning', () => {
const docs = [{ docType: 'test-doc' }];
processor.$process(docs);
expect(log.warn).toHaveBeenCalled();
});
});

View File

@ -0,0 +1,24 @@
var _ = require('lodash');
module.exports = function createOverviewDump() {
return {
$runAfter: ['processing-docs'],
$runBefore: ['docs-processed'],
$process: function(docs) {
var overviewDoc = {
id: 'overview-dump',
aliases: ['overview-dump'],
path: 'overview-dump',
outputPath: 'overview-dump.html',
modules: []
};
_.forEach(docs, function(doc) {
if (doc.docType === 'module') {
overviewDoc.modules.push(doc);
}
});
docs.push(overviewDoc);
}
};
};

View File

@ -0,0 +1,24 @@
/**
* @dgProcessor fixInternalDocumentLinks
* @description
* Add in the document path to links that start with a hash.
* This is important when the web app has a base href in place,
* since links like: `<a href="#some-id">` would get mapped to
* the URL `base/#some-id` even if the current location is `base/some/doc`.
*/
module.exports = function fixInternalDocumentLinks() {
var INTERNAL_LINK = /(<a [^>]*href=")(#[^"]*)/g;
return {
$runAfter: ['inlineTagProcessor'],
$runBefore: ['convertToJsonProcessor'],
$process: function(docs) {
docs.forEach(doc => {
doc.renderedContent = doc.renderedContent.replace(INTERNAL_LINK, (_, pre, hash) => {
return pre + doc.path + hash;
});
});
}
};
};

View File

@ -0,0 +1,52 @@
const testPackage = require('../../helpers/test-package');
const processorFactory = require('./fixInternalDocumentLinks');
const Dgeni = require('dgeni');
describe('fixInternalDocumentLinks processor', () => {
it('should be available on the injector', () => {
const dgeni = new Dgeni([testPackage('angular-base-package')]);
const injector = dgeni.configureInjector();
const processor = injector.get('fixInternalDocumentLinks');
expect(processor.$process).toBeDefined();
});
it('should run before the correct processor', () => {
const processor = processorFactory();
expect(processor.$runBefore).toEqual(['convertToJsonProcessor']);
});
it('should run after the correct processor', () => {
const processor = processorFactory();
expect(processor.$runAfter).toEqual(['inlineTagProcessor']);
});
it('should prefix internal hash links with the current doc path', () => {
const processor = processorFactory();
const docs = [
{
path: 'some/doc',
renderedContent: `
<a href="http://google.com#q=angular">Google</a>
<a href="some/relative/path#some-id">Some Id</a>
<a href="#some-internal-id">Link to heading</a>
<a class="important" href="#some-internal-id">Link to heading</a>
<a href="#some-internal-id" target="_blank">Link to heading</a>
`
},
];
processor.$process(docs);
expect(docs).toEqual([
{
path: 'some/doc',
renderedContent: `
<a href="http://google.com#q=angular">Google</a>
<a href="some/relative/path#some-id">Some Id</a>
<a href="some/doc#some-internal-id">Link to heading</a>
<a class="important" href="some/doc#some-internal-id">Link to heading</a>
<a href="some/doc#some-internal-id" target="_blank">Link to heading</a>
`
},
]);
});
});

View File

@ -0,0 +1,142 @@
'use strict';
var fs = require('fs');
var path = require('canonical-path');
/**
* @dgProcessor generateKeywordsProcessor
* @description
* This processor extracts all the keywords from each document and creates
* a new document that will be rendered as a JavaScript file containing all
* this data.
*/
module.exports = function generateKeywordsProcessor(log, readFilesProcessor) {
return {
ignoreWordsFile: undefined,
propertiesToIgnore: [],
docTypesToIgnore: [],
outputFolder: '',
$validate: {
ignoreWordsFile: {},
docTypesToIgnore: {},
propertiesToIgnore: {},
outputFolder: {presence: true}
},
$runAfter: ['paths-computed'],
$runBefore: ['rendering-docs'],
$process: function(docs) {
// Keywords to ignore
var wordsToIgnore = [];
var propertiesToIgnore;
var docTypesToIgnore;
// Keywords start with "ng:" or one of $, _ or a letter
var KEYWORD_REGEX = /^((ng:|[$_a-z])[\w\-_]+)/;
// Load up the keywords to ignore, if specified in the config
if (this.ignoreWordsFile) {
var ignoreWordsPath = path.resolve(readFilesProcessor.basePath, this.ignoreWordsFile);
wordsToIgnore = fs.readFileSync(ignoreWordsPath, 'utf8').toString().split(/[,\s\n\r]+/gm);
log.debug('Loaded ignore words from "' + ignoreWordsPath + '"');
log.silly(wordsToIgnore);
}
propertiesToIgnore = convertToMap(this.propertiesToIgnore);
log.debug('Properties to ignore', propertiesToIgnore);
docTypesToIgnore = convertToMap(this.docTypesToIgnore);
log.debug('Doc types to ignore', docTypesToIgnore);
var ignoreWordsMap = convertToMap(wordsToIgnore);
// If the title contains a name starting with ng, e.g. "ngController", then add the module
// name
// without the ng to the title text, e.g. "controller".
function extractTitleWords(title) {
var match = /ng([A-Z]\w*)/.exec(title);
if (match) {
title = title + ' ' + match[1].toLowerCase();
}
return title;
}
function extractWords(text, words, keywordMap) {
var tokens = text.toLowerCase().split(/[.\s,`'"#]+/mg);
tokens.forEach(function(token) {
var match = token.match(KEYWORD_REGEX);
if (match) {
var key = match[1];
if (!keywordMap[key]) {
keywordMap[key] = true;
words.push(key);
}
}
});
}
const filteredDocs = docs
// We are not interested in some docTypes
.filter(function(doc) { return !docTypesToIgnore[doc.docType]; })
// Ignore internals and private exports (indicated by the ɵ prefix)
.filter(function(doc) { return !doc.internal && !doc.privateExport; });
filteredDocs.forEach(function(doc) {
var words = [];
var keywordMap = Object.assign({}, ignoreWordsMap);
var members = [];
var membersMap = {};
// Search each top level property of the document for search terms
Object.keys(doc).forEach(function(key) {
const value = doc[key];
if (isString(value) && !propertiesToIgnore[key]) {
extractWords(value, words, keywordMap);
}
if (key === 'methods' || key === 'properties' || key === 'events') {
value.forEach(function(member) { extractWords(member.name, members, membersMap); });
}
});
doc.searchTerms = {
titleWords: extractTitleWords(doc.title || doc.name),
keywords: words.sort().join(' '),
members: members.sort().join(' ')
};
});
var searchData =
filteredDocs.filter(function(page) { return page.searchTerms; }).map(function(page) {
return Object.assign(
{path: page.path, title: page.name || page.title, type: page.docType}, page.searchTerms);
});
docs.push({
docType: 'json-doc',
id: 'search-data-json',
template: 'json-doc.template.json',
path: this.outputFolder + '/search-data.json',
outputPath: this.outputFolder + '/search-data.json',
data: searchData
});
}
};
};
function isString(value) {
return typeof value == 'string';
}
function convertToMap(collection) {
const obj = {};
collection.forEach(key => { obj[key] = true; });
return obj;
}

View File

@ -0,0 +1,41 @@
const testPackage = require('../../helpers/test-package');
const mockLogger = require('dgeni/lib/mocks/log')(false);
const processorFactory = require('./generateKeywords');
const Dgeni = require('dgeni');
const mockReadFilesProcessor = {
basePath: 'base/path'
};
describe('generateKeywords processor', () => {
it('should be available on the injector', () => {
const dgeni = new Dgeni([testPackage('angular-base-package')]);
const injector = dgeni.configureInjector();
const processor = injector.get('generateKeywordsProcessor');
expect(processor.$process).toBeDefined();
});
it('should run after the correct processor', () => {
const processor = processorFactory(mockLogger, mockReadFilesProcessor);
expect(processor.$runAfter).toEqual(['paths-computed']);
});
it('should run before the correct processor', () => {
const processor = processorFactory(mockLogger, mockReadFilesProcessor);
expect(processor.$runBefore).toEqual(['rendering-docs']);
});
it('should ignore internal and private exports', () => {
const processor = processorFactory(mockLogger, mockReadFilesProcessor);
const docs = [
{ docType: 'class', name: 'PublicExport' },
{ docType: 'class', name: 'PrivateExport', privateExport: true },
{ docType: 'class', name: 'InternalExport', internal: true }
];
processor.$process(docs);
expect(docs[docs.length - 1].data).toEqual([
jasmine.objectContaining({ title: 'PublicExport', type: 'class'})
]);
});
});

View File

@ -0,0 +1,19 @@
/**
* Read in JSON files
*/
module.exports = function jsonFileReader() {
return {
name: 'jsonFileReader',
getDocs: function(fileInfo) {
// We return a single element array because content files only contain one document
return [{
docType: fileInfo.baseName + '-json',
data: JSON.parse(fileInfo.content),
template: 'json-doc.template.json',
id: fileInfo.baseName,
aliases: [fileInfo.baseName, fileInfo.relativePath]
}];
}
};
};

View File

@ -0,0 +1,62 @@
module.exports = function() {
// var MIXIN_PATTERN = /\S*\+\S*\(.*/;
return {
name: 'indentForMarkdown',
process: function(str, width) {
if (str == null || str.length === 0) {
return '';
}
width = width || 4;
var lines = str.split('\n');
var newLines = [];
var sp = spaces(width);
var spMixin = spaces(width - 2);
var isAfterMarkdownTag = true;
lines.forEach(function(line) {
// indent lines that match mixin pattern by 2 less than specified width
if (line.indexOf('{@example') >= 0) {
if (isAfterMarkdownTag) {
// happens if example follows example
if (newLines.length > 0) {
newLines.pop();
} else {
// wierd case - first expression in str is an @example
// in this case the :marked appear above the str passed in,
// so we need to put 'something' into the markdown tag.
newLines.push(sp + '.'); // '.' is a dummy char
}
}
newLines.push(spMixin + line);
// after a mixin line we need to reenter markdown.
newLines.push(spMixin + ':marked');
isAfterMarkdownTag = true;
} else {
if ((!isAfterMarkdownTag) || (line.trim().length > 0)) {
newLines.push(sp + line);
isAfterMarkdownTag = false;
}
}
});
if (isAfterMarkdownTag) {
if (newLines.length > 0) {
// if last line is a markdown tag remove it.
newLines.pop();
}
}
// force character to be a newLine.
if (newLines.length > 0) newLines.push('');
var res = newLines.join('\n');
return res;
}
};
function spaces(n) {
var str = '';
for (var i = 0; i < n; i++) {
str += ' ';
}
return str;
}
};

View File

@ -0,0 +1,6 @@
module.exports = function toId() {
return {
name: 'toId',
process: function(str) { return str.replace(/[^(a-z)(A-Z)(0-9)._-]/g, '-'); }
};
};

View File

@ -0,0 +1,14 @@
var factory = require('./toId');
describe('toId filter', function() {
var filter;
beforeEach(function() { filter = factory(); });
it('should be called "toId"', function() { expect(filter.name).toEqual('toId'); });
it('should convert a string to make it appropriate for use as an HTML id', function() {
expect(filter.process('This is a big string with €bad#characaters¢\nAnd even NewLines'))
.toEqual('This-is-a-big-string-with--bad-characaters--And-even-NewLines');
});
});

View File

@ -0,0 +1,15 @@
module.exports = function() {
return {
name: 'trimBlankLines',
process: function(str) {
var lines = str.split(/\r?\n/);
while (lines.length && (lines[0].trim() === '')) {
lines.shift();
}
while (lines.length && (lines[lines.length - 1].trim() === '')) {
lines.pop();
}
return lines.join('\n');
}
};
};

View File

@ -0,0 +1,15 @@
var factory = require('./trimBlankLines');
describe('trimBlankLines filter', function() {
var filter;
beforeEach(function() { filter = factory(); });
it('should be called "trimBlankLines"',
function() { expect(filter.name).toEqual('trimBlankLines'); });
it('should remove empty lines from the start and end of the string', function() {
expect(filter.process('\n \n\nsome text\n \nmore text\n \n'))
.toEqual('some text\n \nmore text');
});
});