aboutsummaryrefslogtreecommitdiffstats
path: root/FuseArchive
diff options
context:
space:
mode:
authorSteve Slaven <bpk@hoopajoo.net>2009-08-04 05:52:14 (GMT)
committerSteve Slaven <bpk@hoopajoo.net>2009-08-04 05:52:14 (GMT)
commit642fc898d83e5b534cbd957381ae884b853c35c9 (patch)
tree259e87abe1123b87ab9b26029443b419ad924740 /FuseArchive
parent6e472c5db75d07ef43e1cbe0015655f444bb3b0d (diff)
downloadfusearchive-642fc898d83e5b534cbd957381ae884b853c35c9.zip
fusearchive-642fc898d83e5b534cbd957381ae884b853c35c9.tar.gz
fusearchive-642fc898d83e5b534cbd957381ae884b853c35c9.tar.bz2
Added more logging, fixed error with storage size for block length
Diffstat (limited to 'FuseArchive')
-rw-r--r--FuseArchive/Chunk.py21
1 files changed, 18 insertions, 3 deletions
diff --git a/FuseArchive/Chunk.py b/FuseArchive/Chunk.py
index 2ceb7ed..9f12f83 100644
--- a/FuseArchive/Chunk.py
+++ b/FuseArchive/Chunk.py
@@ -1,13 +1,17 @@
-import struct, zlib
+import struct, zlib, logging
# Format version
# Payload block size (so you don't have to uncompress it to see)
# Data compression type (0 = none, 1 = zlib)
# Ref count (number of files that use this)
# filler to pad out ot 64 bytes for future expansion
-hformat = 'HHBL52x'
+hformat = 'HLBL48x'
compress_level = 6
+assert struct.calcsize( hformat ) == 64, \
+ "Header struct must be 64 bytes not %d bytes" % \
+ struct.calcsize( hformat )
+
# This handles serialization and deserialization of compressed chunks with
# some header data
class Chunk:
@@ -24,25 +28,35 @@ class Chunk:
# Returns a string representing the serialized class to be sent to a
# file
def serialize(self, compression = 1):
+ l = len( self.chunk )
+
+ logging.debug( "Packing header: len: %d compression: %d count: %d" %
+ ( l, compression, self.count ) )
+
data = struct.pack( hformat,
0,
- len( self.chunk ),
+ l,
compression,
self.count
)
if compression == 0:
+ logging.debug( "Saving chunk data raw" )
data += self.chunk
elif compression == 1:
+ logging.debug( "Saving chunk data using zlib" )
data += zlib.compress( self.chunk, compress_level )
else:
raise ValueError( "Invalid compression type: %d" % compression )
+ logging.debug( "Returning serialized block, size is %d" % len( data) )
+
return data
# Converts the output of serialize back to a chunk object
@staticmethod
def deserialize(data):
+ logging.debug( "Deserializing data of length %d" % len( data ) )
hd = Chunk.parse_header( data[ :64 ] )
obj = Chunk()
obj.count = hd[ 'count' ]
@@ -62,6 +76,7 @@ class Chunk:
# involve potentially uncompressing some data
@staticmethod
def parse_header(data):
+ logging.debug( "Parsing header of length %d" % len( data ) )
fields = struct.unpack( hformat, data )
return {
'version': fields[ 0 ],