Stop tokenizing buffer when it is destroyed

This commit is contained in:
Kevin Sawicki
2013-09-27 10:12:13 -07:00
parent a6cd8e7c85
commit 438de204ca
2 changed files with 15 additions and 2 deletions

View File

@@ -26,6 +26,18 @@ describe "TokenizedBuffer", ->
expect(tokenizedBuffer2.buffer).toBe tokenizedBuffer1.buffer
expect(tokenizedBuffer2.getTabLength()).toBe tokenizedBuffer1.getTabLength()
describe "when the buffer is destroyed", ->
beforeEach ->
buffer = project.bufferForPath('sample.js')
tokenizedBuffer = new TokenizedBuffer({buffer})
startTokenizing(tokenizedBuffer)
it "stops tokenization", ->
tokenizedBuffer.destroy()
spyOn(tokenizedBuffer, 'tokenizeNextChunk')
advanceClock()
expect(tokenizedBuffer.tokenizeNextChunk).not.toHaveBeenCalled()
describe "when the buffer contains soft-tabs", ->
beforeEach ->
buffer = project.bufferForPath('sample.js')

View File

@@ -99,11 +99,11 @@ class TokenizedBuffer
@trigger "changed", { start: 0, end: lastRow, delta: 0 }
tokenizeInBackground: ->
return if not @visible or @pendingChunk
return if not @visible or @pendingChunk or @destroyed
@pendingChunk = true
_.defer =>
@pendingChunk = false
@tokenizeNextChunk()
@tokenizeNextChunk() unless @destroyed
tokenizeNextChunk: ->
rowsRemaining = @chunkSize
@@ -249,6 +249,7 @@ class TokenizedBuffer
destroy: ->
@unsubscribe()
@destroyed = true
iterateTokensInBufferRange: (bufferRange, iterator) ->
bufferRange = Range.fromObject(bufferRange)