@@ -140,19 +140,12 @@ function compileGrammar(grammarFile, additionalGrammars) {
140140}
141141
142142
143- function testFile ( file , grammar , options ) {
144- function padRight ( str , pad ) {
145- if ( str . length < pad ) {
146- return str + ( new Array ( pad - str . length ) ) . join ( ' ' ) ;
147- }
148-
149- return str ;
150- }
143+ function stripnl ( str ) {
144+ return str . replace ( / ^ \n + / , '' ) . replace ( / \n + $ / , '' ) ;
145+ }
151146
152- function stripnl ( str ) {
153- return str . replace ( / ^ \n + / , '' ) . replace ( / \n + $ / , '' ) ;
154- }
155147
148+ function readTestFile ( file ) {
156149 function rpartition ( str , separator ) {
157150 if ( ! separator ) {
158151 throw new Error ( 'empty separator' )
@@ -171,8 +164,50 @@ function testFile(file, grammar, options) {
171164 }
172165 }
173166
167+ var buf = fs . readFileSync ( file , 'utf8' ) ,
168+ parts = rpartition ( buf , '\n\n\n' ) ,
169+ source = parts [ 0 ] ,
170+ test = parts [ 2 ] ;
171+
172+ if ( test ) {
173+ test = stripnl ( test ) ;
174+ }
175+
176+ if ( source ) {
177+ source = stripnl ( source ) ;
178+ }
179+
180+ if ( ! test ) {
181+ test = null ;
182+ }
183+
184+ if ( ! source ) {
185+ source = null ;
186+ }
187+
188+ if ( ! source && test ) {
189+ source = test ;
190+ test = null ;
191+ }
192+
193+ return {
194+ source : source ,
195+ test : test
196+ }
197+ }
198+
199+
200+ function testFile ( file , grammar , options ) {
201+ function padRight ( str , pad ) {
202+ if ( str . length < pad ) {
203+ return str + ( new Array ( pad - str . length ) ) . join ( ' ' ) ;
204+ }
205+
206+ return str ;
207+ }
208+
174209 function tokenize ( lines ) {
175- var lines = grammar . tokenizeLines ( stripnl ( lines ) ) ,
210+ var lines = grammar . tokenizeLines ( lines ) ,
176211 result = [ ] ;
177212
178213 result = [ ]
@@ -235,23 +270,9 @@ function testFile(file, grammar, options) {
235270 return result . join ( '\n' ) ;
236271 }
237272
238- var buf = fs . readFileSync ( file , 'utf8' ) ,
239- parts = rpartition ( buf , '\n\n\n' ) ,
240- source = parts [ 0 ] ,
241- test = parts [ 2 ] ;
242-
243- if ( test ) {
244- test = stripnl ( test ) ;
245- }
246-
247- if ( ! test ) {
248- test = null ;
249- }
250-
251- if ( ! source && test ) {
252- source = test ;
253- test = null ;
254- }
273+ var testFile = readTestFile ( file ) ,
274+ source = testFile . source ,
275+ test = testFile . test ;
255276
256277 if ( ! source ) {
257278 return {
@@ -336,6 +357,82 @@ function test(testFiles, grammarFile, options) {
336357}
337358
338359
360+ function generateAtomSpec ( testFiles , grammarFile , options ) {
361+ var grammar = compileGrammar ( grammarFile , options . add_syntaxes ) ,
362+ specLines = [ ] ;
363+
364+ console . log ( grammar . scopeName ) ;
365+
366+ for ( var fi = 0 ; fi < testFiles . length ; fi ++ ) {
367+ var fileName = testFiles [ fi ] ,
368+ testFile = readTestFile ( fileName ) ;
369+
370+ if ( ! testFile . test || ! testFile . source ) {
371+ continue ;
372+ }
373+
374+ var lines = grammar . tokenizeLines ( testFile . source ) ,
375+ testLines = [ ] ;
376+
377+ testLines . push (
378+ 'tokens = grammar.tokenizeLines(' +
379+ JSON . stringify ( testFile . source ) +
380+ ')' )
381+
382+ for ( var i = 0 ; i < lines . length ; i ++ ) {
383+ var line = lines [ i ] ;
384+
385+ for ( var j = 0 ; j < line . length ; j ++ ) {
386+ var value = line [ j ] . value ,
387+ scopes = line [ j ] . scopes ;
388+
389+ testLines . push (
390+ 'expect(tokens[' + i + '][' + j + '].value)' +
391+ '.toBe(' + JSON . stringify ( value ) + ');' )
392+
393+ testLines . push (
394+ 'expect(tokens[' + i + '][' + j + '].scopes)' +
395+ '.toEqual(' + JSON . stringify ( scopes ) + ');' )
396+ }
397+ }
398+
399+ specLines . push ( {
400+ file : fileName ,
401+ lines : testLines
402+ } )
403+ }
404+
405+ var buf = [
406+ '// !!! autogenerated; do not edit !!!\n\n\n' ,
407+ 'describe("Grammar Tests", function() {' ,
408+ ' var grammar = null;' ,
409+ ' beforeEach(function() {' ,
410+ ' waitsForPromise(function() {' ,
411+ ' return atom.packages.activatePackage(' +
412+ JSON . stringify ( options . packageName ) + ')' ,
413+ ' });' ,
414+ ' runs(function() {' ,
415+ ' grammar = atom.grammars.grammarForScopeName(' +
416+ JSON . stringify ( grammar . scopeName ) + ')' ,
417+ ' });' ,
418+ ' });' ,
419+ ''
420+ ] ;
421+
422+ for ( i = 0 ; i < specLines . length ; i ++ ) {
423+ buf . push ( ' it(' + JSON . stringify ( specLines [ i ] . file ) + ', ' ) ;
424+ buf . push ( ' function() {' )
425+ buf . push ( ' ' + specLines [ i ] . lines . join ( '\n ' ) ) ;
426+ buf . push ( ' });' ) ;
427+ }
428+
429+ buf . push ( '});\n' ) ;
430+ buf = buf . join ( '\n' ) ;
431+
432+ fs . writeFileSync ( options . out , buf + '\n' ) ;
433+ }
434+
435+
339436function buildCson ( inName , outName ) {
340437 var yamlSchema = readGrammarFile ( inName , false ) ,
341438 csonSource = cson . createCSONString ( yamlSchema , { indent : 2 } ) ;
@@ -401,6 +498,7 @@ function listScopes(grammarFile) {
401498
402499module . exports = {
403500 test : test ,
501+ generateAtomSpec : generateAtomSpec ,
404502 buildCson : buildCson ,
405503 buildPList : buildPList ,
406504 listScopes : listScopes
0 commit comments