From caed3d39de694361ed8b61845f5409e0cbfd9c7a Mon Sep 17 00:00:00 2001 From: Corey Johnson & Nathan Sobo Date: Tue, 23 Apr 2013 13:50:02 -0700 Subject: [PATCH] Revert "Don't tokenize files that have more than 10000 lines" This was a premature optimization. The real problem is with the spell-check package. This reverts commit 987d1da233c4514be86b78b70aa103a6fa82f9d6. --- src/app/tokenized-buffer.coffee | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/app/tokenized-buffer.coffee b/src/app/tokenized-buffer.coffee index b6ee2c8c2..5510273e9 100644 --- a/src/app/tokenized-buffer.coffee +++ b/src/app/tokenized-buffer.coffee @@ -62,11 +62,8 @@ class TokenizedBuffer @invalidateRow(0) @trigger "changed", { start: 0, end: lastRow, delta: 0 } - tooBigToTokenize: -> - @screenLines.length > 10000 - tokenizeInBackground: -> - return if not @visible or @pendingChunk or @tooBigToTokenize() + return if not @visible or @pendingChunk @pendingChunk = true _.defer => @pendingChunk = false