mirror of
https://github.com/atom/atom.git
synced 2026-02-14 08:35:11 -05:00
Rename parentScopes to openScopes
This commit is contained in:
@@ -154,7 +154,7 @@ class TokenizedBuffer extends Model
|
||||
row = startRow
|
||||
loop
|
||||
previousStack = @stackForRow(row)
|
||||
@tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1), @parentScopesForRow(row))
|
||||
@tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1), @openScopesForRow(row))
|
||||
if --rowsRemaining is 0
|
||||
filledRegion = false
|
||||
endRow = row
|
||||
@@ -214,7 +214,7 @@ class TokenizedBuffer extends Model
|
||||
|
||||
@updateInvalidRows(start, end, delta)
|
||||
previousEndStack = @stackForRow(end) # used in spill detection below
|
||||
newTokenizedLines = @buildTokenizedLinesForRows(start, end + delta, @stackForRow(start - 1), @parentScopesForRow(start))
|
||||
newTokenizedLines = @buildTokenizedLinesForRows(start, end + delta, @stackForRow(start - 1), @openScopesForRow(start))
|
||||
_.spliceWithArray(@tokenizedLines, start, end - start + 1, newTokenizedLines)
|
||||
|
||||
start = @retokenizeWhitespaceRowsIfIndentLevelChanged(start - 1, -1)
|
||||
@@ -235,7 +235,7 @@ class TokenizedBuffer extends Model
|
||||
line = @tokenizedLines[row]
|
||||
if line?.isOnlyWhitespace() and @indentLevelForRow(row) isnt line.indentLevel
|
||||
while line?.isOnlyWhitespace()
|
||||
@tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1), @parentScopesForRow(row))
|
||||
@tokenizedLines[row] = @buildTokenizedLineForRow(row, @stackForRow(row - 1), @openScopesForRow(row))
|
||||
row += increment
|
||||
line = @tokenizedLines[row]
|
||||
|
||||
@@ -277,17 +277,17 @@ class TokenizedBuffer extends Model
|
||||
@tokenizedLineForRow(row).isComment() and
|
||||
@tokenizedLineForRow(nextRow).isComment()
|
||||
|
||||
buildTokenizedLinesForRows: (startRow, endRow, startingStack, startingParentScopes) ->
|
||||
buildTokenizedLinesForRows: (startRow, endRow, startingStack, startingopenScopes) ->
|
||||
ruleStack = startingStack
|
||||
parentScopes = startingParentScopes
|
||||
openScopes = startingopenScopes
|
||||
stopTokenizingAt = startRow + @chunkSize
|
||||
tokenizedLines = for row in [startRow..endRow]
|
||||
if (ruleStack or row is 0) and row < stopTokenizingAt
|
||||
tokenizedLine = @buildTokenizedLineForRow(row, ruleStack, parentScopes)
|
||||
tokenizedLine = @buildTokenizedLineForRow(row, ruleStack, openScopes)
|
||||
ruleStack = tokenizedLine.ruleStack
|
||||
parentScopes = @scopesFromTags(parentScopes, tokenizedLine.tags)
|
||||
openScopes = @scopesFromTags(openScopes, tokenizedLine.tags)
|
||||
else
|
||||
tokenizedLine = @buildPlaceholderTokenizedLineForRow(row, parentScopes)
|
||||
tokenizedLine = @buildPlaceholderTokenizedLineForRow(row, openScopes)
|
||||
tokenizedLine
|
||||
|
||||
if endRow >= stopTokenizingAt
|
||||
@@ -300,23 +300,23 @@ class TokenizedBuffer extends Model
|
||||
@buildPlaceholderTokenizedLineForRow(row) for row in [startRow..endRow]
|
||||
|
||||
buildPlaceholderTokenizedLineForRow: (row) ->
|
||||
parentScopes = [@grammar.idForScope(@grammar.scopeName)]
|
||||
openScopes = [@grammar.idForScope(@grammar.scopeName)]
|
||||
text = @buffer.lineForRow(row)
|
||||
tags = [text.length]
|
||||
tabLength = @getTabLength()
|
||||
indentLevel = @indentLevelForRow(row)
|
||||
lineEnding = @buffer.lineEndingForRow(row)
|
||||
new TokenizedLine({parentScopes, text, tags, tabLength, indentLevel, @invisibles, lineEnding})
|
||||
new TokenizedLine({openScopes, text, tags, tabLength, indentLevel, @invisibles, lineEnding})
|
||||
|
||||
buildTokenizedLineForRow: (row, ruleStack, parentScopes) ->
|
||||
@buildTokenizedLineForRowWithText(row, @buffer.lineForRow(row), ruleStack, parentScopes)
|
||||
buildTokenizedLineForRow: (row, ruleStack, openScopes) ->
|
||||
@buildTokenizedLineForRowWithText(row, @buffer.lineForRow(row), ruleStack, openScopes)
|
||||
|
||||
buildTokenizedLineForRowWithText: (row, text, ruleStack = @stackForRow(row - 1), parentScopes = @parentScopesForRow(row)) ->
|
||||
buildTokenizedLineForRowWithText: (row, text, ruleStack = @stackForRow(row - 1), openScopes = @openScopesForRow(row)) ->
|
||||
lineEnding = @buffer.lineEndingForRow(row)
|
||||
tabLength = @getTabLength()
|
||||
indentLevel = @indentLevelForRow(row)
|
||||
{tags, ruleStack} = @grammar.tokenizeLine(text, ruleStack, row is 0)
|
||||
new TokenizedLine({parentScopes, text, tags, ruleStack, tabLength, lineEnding, indentLevel, @invisibles})
|
||||
new TokenizedLine({openScopes, text, tags, ruleStack, tabLength, lineEnding, indentLevel, @invisibles})
|
||||
|
||||
tokenizedLineForRow: (bufferRow) ->
|
||||
@tokenizedLines[bufferRow]
|
||||
@@ -324,10 +324,10 @@ class TokenizedBuffer extends Model
|
||||
stackForRow: (bufferRow) ->
|
||||
@tokenizedLines[bufferRow]?.ruleStack
|
||||
|
||||
parentScopesForRow: (bufferRow) ->
|
||||
openScopesForRow: (bufferRow) ->
|
||||
if bufferRow > 0
|
||||
precedingLine = @tokenizedLines[bufferRow - 1]
|
||||
@scopesFromTags(precedingLine.parentScopes, precedingLine.tags)
|
||||
@scopesFromTags(precedingLine.openScopes, precedingLine.tags)
|
||||
else
|
||||
[]
|
||||
|
||||
@@ -394,8 +394,8 @@ class TokenizedBuffer extends Model
|
||||
selector = new ScopeSelector(selector.replace(/^\./, ''))
|
||||
position = Point.fromObject(position)
|
||||
|
||||
{parentScopes, tags} = @tokenizedLines[position.row]
|
||||
scopes = parentScopes.map (tag) -> atom.grammars.scopeForId(tag)
|
||||
{openScopes, tags} = @tokenizedLines[position.row]
|
||||
scopes = openScopes.map (tag) -> atom.grammars.scopeForId(tag)
|
||||
|
||||
startColumn = 0
|
||||
for tag, tokenIndex in tags
|
||||
|
||||
@@ -26,7 +26,7 @@ class TokenizedLine
|
||||
return unless properties?
|
||||
|
||||
@specialTokens = {}
|
||||
{@parentScopes, @text, @tags, @lineEnding, @ruleStack} = properties
|
||||
{@openScopes, @text, @tags, @lineEnding, @ruleStack} = properties
|
||||
{@startBufferColumn, @fold, @tabLength, @indentLevel, @invisibles} = properties
|
||||
|
||||
@startBufferColumn ?= 0
|
||||
@@ -148,7 +148,7 @@ class TokenizedLine
|
||||
Object.defineProperty @prototype, 'tokens', get: ->
|
||||
offset = 0
|
||||
|
||||
atom.grammars.decodeTokens @text, @tags, @parentScopes.slice(), (tokenProperties, index) =>
|
||||
atom.grammars.decodeTokens @text, @tags, @openScopes.slice(), (tokenProperties, index) =>
|
||||
switch @specialTokens[index]
|
||||
when SoftTab
|
||||
tokenProperties.isAtomic = true
|
||||
@@ -191,7 +191,7 @@ class TokenizedLine
|
||||
copy: ->
|
||||
copy = new TokenizedLine
|
||||
copy.indentLevel = @indentLevel
|
||||
copy.parentScopes = @parentScopes
|
||||
copy.openScopes = @openScopes
|
||||
copy.text = @text
|
||||
copy.tags = @tags
|
||||
copy.specialTokens = @specialTokens
|
||||
@@ -338,7 +338,7 @@ class TokenizedLine
|
||||
leftSpecialTokens = {}
|
||||
rightSpecialTokens = {}
|
||||
|
||||
rightParentScopes = @parentScopes.slice()
|
||||
rightopenScopes = @openScopes.slice()
|
||||
|
||||
screenColumn = 0
|
||||
|
||||
@@ -382,20 +382,20 @@ class TokenizedLine
|
||||
else if (tag % 2) is -1
|
||||
if screenColumn < column
|
||||
leftTags.push(tag)
|
||||
rightParentScopes.push(tag)
|
||||
rightopenScopes.push(tag)
|
||||
else
|
||||
rightTags.push(tag)
|
||||
else
|
||||
if screenColumn < column
|
||||
leftTags.push(tag)
|
||||
rightParentScopes.pop()
|
||||
rightopenScopes.pop()
|
||||
else
|
||||
rightTags.push(tag)
|
||||
|
||||
splitBufferColumn = @bufferColumnForScreenColumn(column)
|
||||
|
||||
leftFragment = new TokenizedLine
|
||||
leftFragment.parentScopes = @parentScopes
|
||||
leftFragment.openScopes = @openScopes
|
||||
leftFragment.text = leftText
|
||||
leftFragment.tags = leftTags
|
||||
leftFragment.specialTokens = leftSpecialTokens
|
||||
@@ -410,7 +410,7 @@ class TokenizedLine
|
||||
leftFragment.firstTrailingWhitespaceIndex = Math.min(column, @firstTrailingWhitespaceIndex)
|
||||
|
||||
rightFragment = new TokenizedLine
|
||||
rightFragment.parentScopes = rightParentScopes
|
||||
rightFragment.openScopes = rightopenScopes
|
||||
rightFragment.text = rightText
|
||||
rightFragment.tags = rightTags
|
||||
rightFragment.specialTokens = rightSpecialTokens
|
||||
|
||||
Reference in New Issue
Block a user