forked from sandywu/codepainter
-
Notifications
You must be signed in to change notification settings - Fork 2
/
codepainter.js
95 lines (73 loc) · 2.3 KB
/
codepainter.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
const fs = require('fs'),
Pipe = require('./lib/Pipe'),
rules = require('./lib/rules'),
Serializer = require('./lib/Serializer'),
Tokenizer = require('./lib/Tokenizer');
/**
* Converts the style string into an object.
*
* First tries to parse the style string as a JSON string. If that does not work,
* tries interpret the style string as the name of a predefined style and load
* the respective style file. If that does not work either, throws an error.
*
*/
function convertStyle ( style ) {
if ( ( typeof style === 'string' ) ) {
try {
style = JSON.parse(style);
} catch (e) {
try {
style = require ( __dirname + '/lib/styles/' + style + '.json' );
} catch (e) {
msg = style + ' is not a valid style.\n\nValid predefined styles are:\n';
var files = fs.readdirSync( __dirname + '/lib/styles/' );
for ( var i in files ) {
msg += ' ' + files[i].slice(0, -5) + '\n';
}
throw new Error(msg);
}
}
}
return style;
}
module.exports.infer = function (sample, callback) {
var style = {},
tokenizer = new Tokenizer();
sample.pipe(tokenizer);
rules.forEach(function (rule) {
rule.infer(tokenizer, function (value) {
style[rule.name] = value;
});
});
tokenizer.on('end',function(){
callback(style);
});
sample.resume();
};
module.exports.transform = function (input, style, output) {
var enabledRules = [],
tokenizer = new Tokenizer(),
serializer = new Serializer(),
streams = [];
style = convertStyle(style);
rules.forEach(function (rule) {
if (typeof style[rule.name] !== 'undefined' && style[rule.name] !== null)
enabledRules.push(rule);
});
input.pipe(tokenizer);
serializer.pipe(output);
if (enabledRules.length > 0) {
tokenizer.registerRules( enabledRules );
streams.push(tokenizer);
for (var i = 0; i < enabledRules.length - 1; i++)
streams.push(new Pipe());
streams.push(serializer);
for (var i = 0; i < enabledRules.length; i++) {
var rule = enabledRules[i];
rule.transform(streams[i], style[rule.name], streams[i + 1], function (error) { });
};
} else {
tokenizer.pipe(serializer);
}
input.resume();
};