From b99008d2a0a097fd2d07a78b3a9052b83489736c Mon Sep 17 00:00:00 2001 From: Steve Slaven Date: Fri, 7 Aug 2009 15:48:45 -0700 Subject: Don't re-read the entire file in case we are extending it when we don't have enough blocks, just start from the last block. This increases performace significantly for large files diff --git a/FuseArchive/ChunkFile.py b/FuseArchive/ChunkFile.py index e19a9fb..b9f114f 100644 --- a/FuseArchive/ChunkFile.py +++ b/FuseArchive/ChunkFile.py @@ -217,7 +217,7 @@ class ChunkFile(object): self.chunks[ self.chunk_index ] = self.chunk # Flush if we have too many dirty chunks - if self.dirty_chunks > dirty_flush: + if self.dirty_chunks >= dirty_flush: self._flush_chunks() # This flushes any cached chunks @@ -288,7 +288,12 @@ class ChunkFile(object): if len( self.chunks ) - 1 < index: logging.debug( "Not enough chunks %d, need %d, extending" % ( len( self.chunks ), index + 1 ) ) - this_index = 0 + + # Start with our last block, in case we need to null pad it out + this_index = len( self.chunks ) - 1 + if this_index < 0: + this_index = 0 + while this_index < index: self._load_chunk( this_index ) fill_null = self.chunk_size - len(self.chunk) -- cgit v0.10.2