mirror of
https://github.com/jashkenas/coffeescript.git
synced 2026-02-16 10:34:55 -05:00
added and= and or= to the language.
This commit is contained in:
34
lib/lexer.js
34
lib/lexer.js
@@ -1,5 +1,5 @@
|
|||||||
(function(){
|
(function(){
|
||||||
var ASSIGNED, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_KEYWORDS, COMMENT, CONVERSIONS, HALF_ASSIGNMENTS, HEREDOC, HEREDOC_INDENT, IDENTIFIER, INTERPOLATION, JS_CLEANER, JS_FORBIDDEN, JS_KEYWORDS, LAST_DENT, LAST_DENTS, LINE_BREAK, Lexer, MULTILINER, MULTI_DENT, NEXT_CHARACTER, NOT_REGEX, NO_NEWLINE, NUMBER, OPERATOR, REGEX_END, REGEX_ESCAPE, REGEX_INTERPOLATION, REGEX_START, RESERVED, Rewriter, STRING_NEWLINES, WHITESPACE, _a, _b, _c, compact, count, helpers, include, starts;
|
var ASSIGNED, CALLABLE, CODE, COFFEE_ALIASES, COFFEE_KEYWORDS, COMMENT, CONVERSIONS, HEREDOC, HEREDOC_INDENT, IDENTIFIER, INTERPOLATION, JS_CLEANER, JS_FORBIDDEN, JS_KEYWORDS, LAST_DENT, LAST_DENTS, LINE_BREAK, Lexer, MULTILINER, MULTI_DENT, NEXT_CHARACTER, NOT_REGEX, NO_NEWLINE, NUMBER, OPERATOR, REGEX_END, REGEX_ESCAPE, REGEX_INTERPOLATION, REGEX_START, RESERVED, Rewriter, STRING_NEWLINES, WHITESPACE, _a, _b, _c, compact, count, helpers, include, starts;
|
||||||
var __slice = Array.prototype.slice;
|
var __slice = Array.prototype.slice;
|
||||||
if (typeof process !== "undefined" && process !== null) {
|
if (typeof process !== "undefined" && process !== null) {
|
||||||
_a = require('./rewriter');
|
_a = require('./rewriter');
|
||||||
@@ -105,9 +105,6 @@
|
|||||||
if (include(COFFEE_ALIASES, id)) {
|
if (include(COFFEE_ALIASES, id)) {
|
||||||
tag = (id = CONVERSIONS[id]);
|
tag = (id = CONVERSIONS[id]);
|
||||||
}
|
}
|
||||||
if (this.prev() && this.prev()[0] === 'ASSIGN' && include(HALF_ASSIGNMENTS, tag)) {
|
|
||||||
return this.tagHalfAssignment(tag);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
this.token(tag, id);
|
this.token(tag, id);
|
||||||
if (close_index) {
|
if (close_index) {
|
||||||
@@ -299,7 +296,7 @@
|
|||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
Lexer.prototype.literalToken = function() {
|
Lexer.prototype.literalToken = function() {
|
||||||
var match, prevSpaced, space, tag, value;
|
var _d, match, prevSpaced, space, tag, value;
|
||||||
match = this.chunk.match(OPERATOR);
|
match = this.chunk.match(OPERATOR);
|
||||||
value = match && match[1];
|
value = match && match[1];
|
||||||
space = match && match[2];
|
space = match && match[2];
|
||||||
@@ -307,10 +304,16 @@
|
|||||||
this.tagParameters();
|
this.tagParameters();
|
||||||
}
|
}
|
||||||
value = value || this.chunk.substr(0, 1);
|
value = value || this.chunk.substr(0, 1);
|
||||||
|
this.i += value.length;
|
||||||
prevSpaced = this.prev() && this.prev().spaced;
|
prevSpaced = this.prev() && this.prev().spaced;
|
||||||
tag = value;
|
tag = value;
|
||||||
if (value === '=' && include(JS_FORBIDDEN, this.value)) {
|
if (value === '=') {
|
||||||
this.assignmentError();
|
if (include(JS_FORBIDDEN, this.value())) {
|
||||||
|
this.assignmentError();
|
||||||
|
}
|
||||||
|
if (('or' === (_d = this.value()) || 'and' === _d)) {
|
||||||
|
return this.tag(1, CONVERSIONS[this.value()] + '=');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (value === ';') {
|
if (value === ';') {
|
||||||
tag = 'TERMINATOR';
|
tag = 'TERMINATOR';
|
||||||
@@ -329,10 +332,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.i += value.length;
|
|
||||||
if (space && prevSpaced && this.prev()[0] === 'ASSIGN' && include(HALF_ASSIGNMENTS, tag)) {
|
|
||||||
return this.tagHalfAssignment(tag);
|
|
||||||
}
|
|
||||||
this.token(tag, value);
|
this.token(tag, value);
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
@@ -371,15 +370,6 @@
|
|||||||
}
|
}
|
||||||
return doc.replace(MULTILINER, "\\n").replace(new RegExp(options.quote, 'g'), ("\\" + options.quote));
|
return doc.replace(MULTILINER, "\\n").replace(new RegExp(options.quote, 'g'), ("\\" + options.quote));
|
||||||
};
|
};
|
||||||
Lexer.prototype.tagHalfAssignment = function(tag) {
|
|
||||||
var last;
|
|
||||||
if (tag === 'OP?') {
|
|
||||||
tag = '?';
|
|
||||||
}
|
|
||||||
last = this.tokens.pop();
|
|
||||||
this.tokens.push([("" + tag + "="), ("" + tag + "="), last[2]]);
|
|
||||||
return true;
|
|
||||||
};
|
|
||||||
Lexer.prototype.tagParameters = function() {
|
Lexer.prototype.tagParameters = function() {
|
||||||
var _d, i, tok;
|
var _d, i, tok;
|
||||||
if (this.tag() !== ')') {
|
if (this.tag() !== ')') {
|
||||||
@@ -616,12 +606,12 @@
|
|||||||
NOT_REGEX = ['NUMBER', 'REGEX', '++', '--', 'FALSE', 'NULL', 'TRUE', ']'];
|
NOT_REGEX = ['NUMBER', 'REGEX', '++', '--', 'FALSE', 'NULL', 'TRUE', ']'];
|
||||||
CALLABLE = ['IDENTIFIER', 'SUPER', ')', ']', '}', 'STRING', '@', 'THIS', '?', '::'];
|
CALLABLE = ['IDENTIFIER', 'SUPER', ')', ']', '}', 'STRING', '@', 'THIS', '?', '::'];
|
||||||
LINE_BREAK = ['INDENT', 'OUTDENT', 'TERMINATOR'];
|
LINE_BREAK = ['INDENT', 'OUTDENT', 'TERMINATOR'];
|
||||||
HALF_ASSIGNMENTS = ['-', '+', '/', '*', '%', '||', '&&', '?', 'OP?'];
|
|
||||||
CONVERSIONS = {
|
CONVERSIONS = {
|
||||||
'and': '&&',
|
'and': '&&',
|
||||||
'or': '||',
|
'or': '||',
|
||||||
'is': '==',
|
'is': '==',
|
||||||
'isnt': '!=',
|
'isnt': '!=',
|
||||||
'not': '!'
|
'not': '!',
|
||||||
|
'===': '=='
|
||||||
};
|
};
|
||||||
})();
|
})();
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ lexer = new Lexer
|
|||||||
# Compile a string of CoffeeScript code to JavaScript, using the Coffee/Jison
|
# Compile a string of CoffeeScript code to JavaScript, using the Coffee/Jison
|
||||||
# compiler.
|
# compiler.
|
||||||
exports.compile = compile = (code, options) ->
|
exports.compile = compile = (code, options) ->
|
||||||
options ||= {}
|
options or= {}
|
||||||
try
|
try
|
||||||
(parser.parse lexer.tokenize code).compile options
|
(parser.parse lexer.tokenize code).compile options
|
||||||
catch err
|
catch err
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ printTokens = (tokens) ->
|
|||||||
parseOptions = ->
|
parseOptions = ->
|
||||||
optionParser = new optparse.OptionParser SWITCHES, BANNER
|
optionParser = new optparse.OptionParser SWITCHES, BANNER
|
||||||
o = options = optionParser.parse(process.argv[2...process.argv.length])
|
o = options = optionParser.parse(process.argv[2...process.argv.length])
|
||||||
options.compile ||= !!o.output
|
options.compile or= !!o.output
|
||||||
options.run = not (o.compile or o.print or o.lint)
|
options.run = not (o.compile or o.print or o.lint)
|
||||||
options.print = !! (o.print or (o.eval or o.stdio and o.compile))
|
options.print = !! (o.print or (o.eval or o.stdio and o.compile))
|
||||||
sources = options.arguments
|
sources = options.arguments
|
||||||
|
|||||||
@@ -100,7 +100,6 @@ exports.Lexer = class Lexer
|
|||||||
@identifierError id
|
@identifierError id
|
||||||
unless forcedIdentifier
|
unless forcedIdentifier
|
||||||
tag = id = CONVERSIONS[id] if include COFFEE_ALIASES, id
|
tag = id = CONVERSIONS[id] if include COFFEE_ALIASES, id
|
||||||
return @tagHalfAssignment tag if @prev() and @prev()[0] is 'ASSIGN' and include HALF_ASSIGNMENTS, tag
|
|
||||||
@token tag, id
|
@token tag, id
|
||||||
@token ']', ']' if close_index
|
@token ']', ']' if close_index
|
||||||
true
|
true
|
||||||
@@ -258,10 +257,14 @@ exports.Lexer = class Lexer
|
|||||||
value = match and match[1]
|
value = match and match[1]
|
||||||
space = match and match[2]
|
space = match and match[2]
|
||||||
@tagParameters() if value and value.match CODE
|
@tagParameters() if value and value.match CODE
|
||||||
value ||= @chunk.substr 0, 1
|
value or= @chunk.substr 0, 1
|
||||||
|
@i += value.length
|
||||||
prevSpaced = @prev() and @prev().spaced
|
prevSpaced = @prev() and @prev().spaced
|
||||||
tag = value
|
tag = value
|
||||||
@assignmentError() if value is '=' and include JS_FORBIDDEN, @value
|
if value is '='
|
||||||
|
@assignmentError() if include JS_FORBIDDEN, @value()
|
||||||
|
if @value() in ['or', 'and']
|
||||||
|
return @tag 1, CONVERSIONS[@value()] + '='
|
||||||
if value is ';'
|
if value is ';'
|
||||||
tag = 'TERMINATOR'
|
tag = 'TERMINATOR'
|
||||||
else if value is '?' and prevSpaced
|
else if value is '?' and prevSpaced
|
||||||
@@ -273,8 +276,6 @@ exports.Lexer = class Lexer
|
|||||||
tag = 'INDEX_START'
|
tag = 'INDEX_START'
|
||||||
@tag 1, 'INDEX_SOAK' if @tag() is '?'
|
@tag 1, 'INDEX_SOAK' if @tag() is '?'
|
||||||
@tag 1, 'INDEX_PROTO' if @tag() is '::'
|
@tag 1, 'INDEX_PROTO' if @tag() is '::'
|
||||||
@i += value.length
|
|
||||||
return @tagHalfAssignment tag if space and prevSpaced and @prev()[0] is 'ASSIGN' and include HALF_ASSIGNMENTS, tag
|
|
||||||
@token tag, value
|
@token tag, value
|
||||||
true
|
true
|
||||||
|
|
||||||
@@ -309,13 +310,6 @@ exports.Lexer = class Lexer
|
|||||||
doc.replace(MULTILINER, "\\n")
|
doc.replace(MULTILINER, "\\n")
|
||||||
.replace(new RegExp(options.quote, 'g'), "\\$options.quote")
|
.replace(new RegExp(options.quote, 'g'), "\\$options.quote")
|
||||||
|
|
||||||
# Tag a half assignment.
|
|
||||||
tagHalfAssignment: (tag) ->
|
|
||||||
tag = '?' if tag is 'OP?'
|
|
||||||
last = @tokens.pop()
|
|
||||||
@tokens.push ["$tag=", "$tag=", last[2]]
|
|
||||||
true
|
|
||||||
|
|
||||||
# A source of ambiguity in our grammar used to be parameter lists in function
|
# A source of ambiguity in our grammar used to be parameter lists in function
|
||||||
# definitions versus argument lists in function calls. Walk backwards, tagging
|
# definitions versus argument lists in function calls. Walk backwards, tagging
|
||||||
# parameters specially in order to make things easier for the parser.
|
# parameters specially in order to make things easier for the parser.
|
||||||
@@ -351,7 +345,7 @@ exports.Lexer = class Lexer
|
|||||||
# contents of the string. This method allows us to have strings within
|
# contents of the string. This method allows us to have strings within
|
||||||
# interpolations within strings, ad infinitum.
|
# interpolations within strings, ad infinitum.
|
||||||
balancedString: (str, delimited, options) ->
|
balancedString: (str, delimited, options) ->
|
||||||
options ||= {}
|
options or= {}
|
||||||
slash = delimited[0][0] is '/'
|
slash = delimited[0][0] is '/'
|
||||||
levels = []
|
levels = []
|
||||||
i = 0
|
i = 0
|
||||||
@@ -388,7 +382,7 @@ exports.Lexer = class Lexer
|
|||||||
# new Lexer, tokenize the interpolated contents, and merge them into the
|
# new Lexer, tokenize the interpolated contents, and merge them into the
|
||||||
# token stream.
|
# token stream.
|
||||||
interpolateString: (str, options) ->
|
interpolateString: (str, options) ->
|
||||||
options ||= {}
|
options or= {}
|
||||||
if str.length < 3 or not starts str, '"'
|
if str.length < 3 or not starts str, '"'
|
||||||
@token 'STRING', str
|
@token 'STRING', str
|
||||||
else
|
else
|
||||||
@@ -556,9 +550,6 @@ CALLABLE = ['IDENTIFIER', 'SUPER', ')', ']', '}', 'STRING', '@', 'THIS', '?', ':
|
|||||||
# avoid an ambiguity in the grammar.
|
# avoid an ambiguity in the grammar.
|
||||||
LINE_BREAK = ['INDENT', 'OUTDENT', 'TERMINATOR']
|
LINE_BREAK = ['INDENT', 'OUTDENT', 'TERMINATOR']
|
||||||
|
|
||||||
# Half-assignments...
|
|
||||||
HALF_ASSIGNMENTS = ['-', '+', '/', '*', '%', '||', '&&', '?', 'OP?']
|
|
||||||
|
|
||||||
# Conversions from CoffeeScript operators into JavaScript ones.
|
# Conversions from CoffeeScript operators into JavaScript ones.
|
||||||
CONVERSIONS = {
|
CONVERSIONS = {
|
||||||
'and': '&&'
|
'and': '&&'
|
||||||
@@ -566,4 +557,5 @@ CONVERSIONS = {
|
|||||||
'is': '=='
|
'is': '=='
|
||||||
'isnt': '!='
|
'isnt': '!='
|
||||||
'not': '!'
|
'not': '!'
|
||||||
|
'===': '=='
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ exports.BaseNode = class BaseNode
|
|||||||
# `toString` representation of the node, for inspecting the parse tree.
|
# `toString` representation of the node, for inspecting the parse tree.
|
||||||
# This is what `coffee --nodes` prints out.
|
# This is what `coffee --nodes` prints out.
|
||||||
toString: (idt, override) ->
|
toString: (idt, override) ->
|
||||||
idt ||= ''
|
idt or= ''
|
||||||
children = (child.toString idt + TAB for child in @collectChildren()).join('')
|
children = (child.toString idt + TAB for child in @collectChildren()).join('')
|
||||||
'\n' + idt + (override or @class) + children
|
'\n' + idt + (override or @class) + children
|
||||||
|
|
||||||
@@ -193,7 +193,7 @@ exports.Expressions = class Expressions extends BaseNode
|
|||||||
|
|
||||||
# An **Expressions** is the only node that can serve as the root.
|
# An **Expressions** is the only node that can serve as the root.
|
||||||
compile: (o) ->
|
compile: (o) ->
|
||||||
o ||= {}
|
o or= {}
|
||||||
if o.scope then super(o) else @compileRoot(o)
|
if o.scope then super(o) else @compileRoot(o)
|
||||||
|
|
||||||
compileNode: (o) ->
|
compileNode: (o) ->
|
||||||
@@ -354,7 +354,7 @@ exports.ValueNode = class ValueNode extends BaseNode
|
|||||||
only = del o, 'onlyFirst'
|
only = del o, 'onlyFirst'
|
||||||
op = del o, 'operation'
|
op = del o, 'operation'
|
||||||
props = if only then @properties[0...@properties.length - 1] else @properties
|
props = if only then @properties[0...@properties.length - 1] else @properties
|
||||||
o.chainRoot ||= this
|
o.chainRoot or= this
|
||||||
baseline = @base.compile o
|
baseline = @base.compile o
|
||||||
baseline = "($baseline)" if @hasProperties() and (@base instanceof ObjectNode or @isNumber())
|
baseline = "($baseline)" if @hasProperties() and (@base instanceof ObjectNode or @isNumber())
|
||||||
complete = @last = baseline
|
complete = @last = baseline
|
||||||
@@ -502,7 +502,7 @@ exports.AccessorNode = class AccessorNode extends BaseNode
|
|||||||
|
|
||||||
compileNode: (o) ->
|
compileNode: (o) ->
|
||||||
name = @name.compile o
|
name = @name.compile o
|
||||||
o.chainRoot.wrapped ||= @soakNode
|
o.chainRoot.wrapped or= @soakNode
|
||||||
namePart = if name.match(IS_STRING) then "[$name]" else ".$name"
|
namePart = if name.match(IS_STRING) then "[$name]" else ".$name"
|
||||||
@prototype + namePart
|
@prototype + namePart
|
||||||
|
|
||||||
@@ -518,7 +518,7 @@ exports.IndexNode = class IndexNode extends BaseNode
|
|||||||
@index = index
|
@index = index
|
||||||
|
|
||||||
compileNode: (o) ->
|
compileNode: (o) ->
|
||||||
o.chainRoot.wrapped ||= @soakNode
|
o.chainRoot.wrapped or= @soakNode
|
||||||
idx = @index.compile o
|
idx = @index.compile o
|
||||||
prefix = if @proto then '.prototype' else ''
|
prefix = if @proto then '.prototype' else ''
|
||||||
"$prefix[$idx]"
|
"$prefix[$idx]"
|
||||||
@@ -570,7 +570,7 @@ exports.RangeNode = class RangeNode extends BaseNode
|
|||||||
[from, to] = [parseInt(@fromNum, 10), parseInt(@toNum, 10)]
|
[from, to] = [parseInt(@fromNum, 10), parseInt(@toNum, 10)]
|
||||||
idx = del o, 'index'
|
idx = del o, 'index'
|
||||||
step = del o, 'step'
|
step = del o, 'step'
|
||||||
step &&= "$idx += ${step.compile(o)}"
|
step and= "$idx += ${step.compile(o)}"
|
||||||
if from <= to
|
if from <= to
|
||||||
"$idx = $from; $idx <$@equals $to; ${step or "$idx++"}"
|
"$idx = $from; $idx <$@equals $to; ${step or "$idx++"}"
|
||||||
else
|
else
|
||||||
@@ -721,8 +721,8 @@ exports.ClassNode = class ClassNode extends BaseNode
|
|||||||
continue
|
continue
|
||||||
if func instanceof CodeNode and func.bound
|
if func instanceof CodeNode and func.bound
|
||||||
func.bound = false
|
func.bound = false
|
||||||
constScope ||= new Scope(o.scope, constructor.body, constructor)
|
constScope or= new Scope(o.scope, constructor.body, constructor)
|
||||||
me ||= constScope.freeVariable()
|
me or= constScope.freeVariable()
|
||||||
pname = pvar.compile(o)
|
pname = pvar.compile(o)
|
||||||
constructor.body.push new ReturnNode literal 'this' if constructor.body.empty()
|
constructor.body.push new ReturnNode literal 'this' if constructor.body.empty()
|
||||||
constructor.body.unshift literal "this.${pname} = function(){ return ${className}.prototype.${pname}.apply($me, arguments); }"
|
constructor.body.unshift literal "this.${pname} = function(){ return ${className}.prototype.${pname}.apply($me, arguments); }"
|
||||||
@@ -902,7 +902,7 @@ exports.CodeNode = class CodeNode extends BaseNode
|
|||||||
traverseChildren: (crossScope, func) -> super(crossScope, func) if crossScope
|
traverseChildren: (crossScope, func) -> super(crossScope, func) if crossScope
|
||||||
|
|
||||||
toString: (idt) ->
|
toString: (idt) ->
|
||||||
idt ||= ''
|
idt or= ''
|
||||||
children = (child.toString(idt + TAB) for child in @collectChildren()).join('')
|
children = (child.toString(idt + TAB) for child in @collectChildren()).join('')
|
||||||
"\n$idt$children"
|
"\n$idt$children"
|
||||||
|
|
||||||
@@ -1385,7 +1385,7 @@ exports.IfNode = class IfNode extends BaseNode
|
|||||||
# The **IfNode** only compiles into a statement if either of its bodies needs
|
# The **IfNode** only compiles into a statement if either of its bodies needs
|
||||||
# to be a statement. Otherwise a ternary is safe.
|
# to be a statement. Otherwise a ternary is safe.
|
||||||
isStatement: ->
|
isStatement: ->
|
||||||
@statement ||= !!(@tags.statement or @bodyNode().isStatement() or (@elseBody and @elseBodyNode().isStatement()))
|
@statement or= !!(@tags.statement or @bodyNode().isStatement() or (@elseBody and @elseBodyNode().isStatement()))
|
||||||
|
|
||||||
compileCondition: (o) ->
|
compileCondition: (o) ->
|
||||||
(cond.compile(o) for cond in flatten([@condition])).join(' || ')
|
(cond.compile(o) for cond in flatten([@condition])).join(' || ')
|
||||||
@@ -1395,8 +1395,8 @@ exports.IfNode = class IfNode extends BaseNode
|
|||||||
|
|
||||||
makeReturn: ->
|
makeReturn: ->
|
||||||
if @isStatement()
|
if @isStatement()
|
||||||
@body &&= @ensureExpressions(@body.makeReturn())
|
@body and= @ensureExpressions(@body.makeReturn())
|
||||||
@elseBody &&= @ensureExpressions(@elseBody.makeReturn())
|
@elseBody and= @ensureExpressions(@elseBody.makeReturn())
|
||||||
this
|
this
|
||||||
else
|
else
|
||||||
new ReturnNode this
|
new ReturnNode this
|
||||||
|
|||||||
@@ -198,7 +198,7 @@ exports.Rewriter = class Rewriter
|
|||||||
@scanTokens (prev, token, post, i) =>
|
@scanTokens (prev, token, post, i) =>
|
||||||
for pair in pairs
|
for pair in pairs
|
||||||
[open, close] = pair
|
[open, close] = pair
|
||||||
levels[open] ||= 0
|
levels[open] or= 0
|
||||||
if token[0] is open
|
if token[0] is open
|
||||||
openLine[open] = token[2] if levels[open] == 0
|
openLine[open] = token[2] if levels[open] == 0
|
||||||
levels[open] += 1
|
levels[open] += 1
|
||||||
|
|||||||
Reference in New Issue
Block a user