99import tempfile
1010import os
1111import sys
12- import stat
1312import subprocess
1413import glob
1514from cStringIO import StringIO
1615
17- from typ import *
16+ from stat import (
17+ S_ISLNK ,
18+ S_ISDIR ,
19+ S_IFMT ,
20+ S_IFDIR ,
21+ S_IFLNK ,
22+ S_IFREG
23+ )
24+
25+ from typ import (
26+ BaseIndexEntry ,
27+ IndexEntry ,
28+ CE_NAMEMASK ,
29+ CE_STAGESHIFT
30+ )
31+
1832from util import (
1933 TemporaryFileSwap ,
20- clear_cache ,
34+ post_clear_cache ,
2135 default_index ,
2236 pack ,
2337 unpack
@@ -75,7 +89,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
7589 """
7690 __slots__ = ( "repo" , "version" , "entries" , "_extension_data" , "_file_path" )
7791 _VERSION = 2 # latest version we support
78- S_IFGITLINK = 0160000
92+ S_IFGITLINK = 0160000 # a submodule
7993
8094 def __init__ (self , repo , file_path = None ):
8195 """
@@ -141,12 +155,12 @@ def _read_entry(cls, stream):
141155 mtime = unpack (">8s" , stream .read (8 ))[0 ]
142156 (dev , ino , mode , uid , gid , size , sha , flags ) = \
143157 unpack (">LLLLLL20sH" , stream .read (20 + 4 * 6 + 2 ))
144- path_size = flags & 0x0fff
158+ path_size = flags & CE_NAMEMASK
145159 path = stream .read (path_size )
146160
147161 real_size = ((stream .tell () - beginoffset + 8 ) & ~ 7 )
148162 data = stream .read ((beginoffset + real_size ) - stream .tell ())
149- return IndexEntry ((mode , binascii .hexlify (sha ), flags >> 12 , path , ctime , mtime , dev , ino , uid , gid , size ))
163+ return IndexEntry ((mode , binascii .hexlify (sha ), flags , path , ctime , mtime , dev , ino , uid , gid , size ))
150164
151165 @classmethod
152166 def _read_header (cls , stream ):
@@ -198,7 +212,7 @@ def _serialize(self, stream, ignore_tree_extension_data=False):
198212
199213 # body
200214 entries_sorted = self .entries .values ()
201- entries_sorted .sort (key = lambda e : (e [3 ], e [ 2 ] )) # use path/stage as sort key
215+ entries_sorted .sort (key = lambda e : (e [3 ], e . stage )) # use path/stage as sort key
202216 for entry in entries_sorted :
203217 self ._write_cache_entry (stream , entry )
204218 # END for each entry
@@ -226,17 +240,18 @@ def _serialize(self, stream, ignore_tree_extension_data=False):
226240 def _write_cache_entry (cls , stream , entry ):
227241 """ Write an IndexEntry to a stream """
228242 beginoffset = stream .tell ()
229- stream .write (entry [4 ]) # ctime
230- stream .write (entry [5 ]) # mtime
243+ write = stream .write
244+ write (entry [4 ]) # ctime
245+ write (entry [5 ]) # mtime
231246 path = entry [3 ]
232- plen = len (path ) & 0x0fff # path length
247+ plen = len (path ) & CE_NAMEMASK # path length
233248 assert plen == len (path ), "Path %s too long to fit into index" % entry [3 ]
234- flags = plen | ( entry [2 ] << 12 ) # stage and path length are 2 byte flags
235- stream . write (pack (">LLLLLL20sH" , entry [6 ], entry [7 ], entry [0 ],
249+ flags = plen | entry [2 ]
250+ write (pack (">LLLLLL20sH" , entry [6 ], entry [7 ], entry [0 ],
236251 entry [8 ], entry [9 ], entry [10 ], binascii .unhexlify (entry [1 ]), flags ))
237- stream . write (path )
252+ write (path )
238253 real_size = ((stream .tell () - beginoffset + 8 ) & ~ 7 )
239- stream . write ("\0 " * ((beginoffset + real_size ) - stream .tell ()))
254+ write ("\0 " * ((beginoffset + real_size ) - stream .tell ()))
240255
241256 def write (self , file_path = None , ignore_tree_extension_data = False ):
242257 """
@@ -272,7 +287,7 @@ def write(self, file_path = None, ignore_tree_extension_data=False):
272287 if file_path is not None :
273288 self ._file_path = file_path
274289
275- @clear_cache
290+ @post_clear_cache
276291 @default_index
277292 def merge_tree (self , rhs , base = None ):
278293 """Merge the given rhs treeish into the current index, possibly taking
@@ -383,24 +398,14 @@ def from_tree(cls, repo, *treeish, **kwargs):
383398 return index
384399
385400 @classmethod
386- def _index_mode_to_tree_index_mode (cls , index_mode ):
387- """
388- Cleanup a index_mode value.
389- This will return a index_mode that can be stored in a tree object.
390-
391- ``index_mode``
392- Index_mode to clean up.
393- """
394- if stat .S_ISLNK (index_mode ):
395- return stat .S_IFLNK
396- elif stat .S_ISDIR (index_mode ):
397- return stat .S_IFDIR
398- elif stat .S_IFMT (index_mode ) == cls .S_IFGITLINK :
401+ def _stat_mode_to_index_mode (cls , mode ):
402+ """Convert the given mode from a stat call to the corresponding index mode
403+ and return it"""
404+ if S_ISLNK (mode ): # symlinks
405+ return S_IFLNK
406+ if S_ISDIR (mode ) or S_IFMT (mode ) == cls .S_IFGITLINK : # submodules
399407 return cls .S_IFGITLINK
400- ret = stat .S_IFREG | 0644
401- ret |= (index_mode & 0111 )
402- return ret
403-
408+ return S_IFREG | 644 | (mode & 0100 ) # blobs with or without executable bit
404409
405410 # UTILITIES
406411 def _iter_expand_paths (self , paths ):
@@ -479,7 +484,9 @@ def iter_blobs(self, predicate = lambda t: True):
479484 only if they match a given list of paths.
480485 """
481486 for entry in self .entries .itervalues ():
482- mode = self ._index_mode_to_tree_index_mode (entry .mode )
487+ # TODO: is it necessary to convert the mode ? We did that when adding
488+ # it to the index, right ?
489+ mode = self ._stat_mode_to_index_mode (entry .mode )
483490 blob = Blob (self .repo , entry .sha , mode , entry .path )
484491 blob .size = entry .size
485492 output = (entry .stage , blob )
@@ -636,7 +643,6 @@ def _preprocess_add_items(self, items):
636643 # END for each item
637644 return (paths , entries )
638645
639- @clear_cache
640646 @default_index
641647 def add (self , items , force = True , fprogress = lambda * args : None , path_rewriter = None ):
642648 """Add files from the working tree, specific blobs or BaseIndexEntries
@@ -739,7 +745,7 @@ def store_path(filepath):
739745 """Store file at filepath in the database and return the base index entry"""
740746 st = os .lstat (filepath ) # handles non-symlinks as well
741747 stream = None
742- if stat . S_ISLNK (st .st_mode ):
748+ if S_ISLNK (st .st_mode ):
743749 stream = StringIO (os .readlink (filepath ))
744750 else :
745751 stream = open (filepath , 'rb' )
@@ -759,13 +765,6 @@ def store_path(filepath):
759765 for filepath in self ._iter_expand_paths (paths ):
760766 entries_added .append (store_path (filepath ))
761767 # END for each filepath
762-
763- # add the new entries to this instance, and write it
764- for entry in entries_added :
765- self .entries [(entry .path , 0 )] = IndexEntry .from_base (entry )
766-
767- # finally write the changed index
768- self .write ()
769768 # END path handling
770769
771770
@@ -823,6 +822,14 @@ def store_path(filepath):
823822 self ._flush_stdin_and_wait (proc , ignore_stdout = True )
824823 entries_added .extend (entries )
825824 # END if there are base entries
825+
826+ # FINALIZE
827+ # add the new entries to this instance, and write it
828+ for entry in entries_added :
829+ self .entries [(entry .path , 0 )] = IndexEntry .from_base (entry )
830+
831+ # finally write the changed index
832+ self .write ()
826833
827834 return entries_added
828835
@@ -840,7 +847,7 @@ def _items_to_rela_paths(self, items):
840847 # END for each item
841848 return paths
842849
843- @clear_cache
850+ @post_clear_cache
844851 @default_index
845852 def remove (self , items , working_tree = False , ** kwargs ):
846853 """
@@ -893,7 +900,7 @@ def remove(self, items, working_tree=False, **kwargs):
893900 # rm 'path'
894901 return [ p [4 :- 1 ] for p in removed_paths ]
895902
896- @clear_cache
903+ @post_clear_cache
897904 @default_index
898905 def move (self , items , skip_errors = False , ** kwargs ):
899906 """
@@ -1127,7 +1134,7 @@ def handle_stderr(proc, iter_checked_out_files):
11271134 # END paths handling
11281135 assert "Should not reach this point"
11291136
1130- @clear_cache
1137+ @post_clear_cache
11311138 @default_index
11321139 def reset (self , commit = 'HEAD' , working_tree = False , paths = None , head = False , ** kwargs ):
11331140 """
0 commit comments