moving print_tokens into the actual coffee-script module, so we can use it separately from the command-line

This commit is contained in:
Jeremy Ashkenas
2010-02-15 18:09:01 -05:00
parent e7291f57ba
commit 9c3040b704
4 changed files with 23 additions and 18 deletions

View File

@@ -1,7 +1,7 @@
# Set up for both the browser and the server.
if process?
process.mixin require './nodes'
path: require('path')
path: require 'path'
lexer: new (require('./lexer').Lexer)()
parser: require('./parser').parser
else
@@ -42,6 +42,12 @@ exports.tokenize: (code) ->
exports.tree: (code) ->
parser.parse lexer.tokenize code
# Pretty-print a token stream.
exports.print_tokens: (tokens) ->
strings: for token in tokens
'[' + token[0] + ' ' + token[1].toString().replace(/\n/, '\\n') + ']'
strings.join(' ')
#---------- Below this line is obsolete, for the Ruby compiler. ----------------

View File

@@ -68,7 +68,7 @@ compile_scripts: ->
return unless source: sources.shift()
opts: options
posix.cat(source).addCallback (code) ->
if opts.tokens then puts tokenize(code)
if opts.tokens then puts coffee.print_tokens coffee.tokenize code
else if opts.tree then puts coffee.tree(code).toString()
else
js: coffee.compile code
@@ -86,12 +86,6 @@ write_js: (source, js) ->
posix.open(js_path, process.O_CREAT | process.O_WRONLY | process.O_TRUNC, parseInt('0755', 8)).addCallback (fd) ->
posix.write(fd, js)
# Pretty-print the token stream.
tokenize: (code) ->
strings: coffee.tokenize(code).map (token) ->
'[' + token[0] + ' ' + token[1].toString().replace(/\n/, '\\n') + ']'
strings.join(' ')
# Pipe compiled JS through JSLint (requires a working 'jsl' command).
lint: (js) ->
jsl: process.createChildProcess('jsl', ['-nologo', '-stdin'])