5959 LazyMixin ,
6060 LockedFD ,
6161 join_path_native ,
62- file_contents_ro
62+ file_contents_ro ,
63+ LockFile
6364 )
6465
6566
6667from gitdb .base import (
6768 IStream
6869 )
6970
71+ from fun import (
72+ write_cache ,
73+ read_cache ,
74+ entry_key
75+ )
76+
7077__all__ = ( 'IndexFile' , 'CheckoutError' )
7178
7279
@@ -84,7 +91,7 @@ class IndexFile(LazyMixin, diff.Diffable, Serializable):
8491 to facilitate access.
8592
8693 You may read the entries dict or manipulate it using IndexEntry instance, i.e.::
87- index.entries[index.get_entries_key (index_entry_instance)] = index_entry_instance
94+ index.entries[index.entry_key (index_entry_instance)] = index_entry_instance
8895 Otherwise changes to it will be lost when changing the index using its methods.
8996 """
9097 __slots__ = ( "repo" , "version" , "entries" , "_extension_data" , "_file_path" )
@@ -147,123 +154,34 @@ def _delete_entries_cache(self):
147154 pass
148155 # END exception handling
149156
150- @classmethod
151- def _read_entry (cls , stream ):
152- """Return: One entry of the given stream"""
153- beginoffset = stream .tell ()
154- ctime = unpack (">8s" , stream .read (8 ))[0 ]
155- mtime = unpack (">8s" , stream .read (8 ))[0 ]
156- (dev , ino , mode , uid , gid , size , sha , flags ) = \
157- unpack (">LLLLLL20sH" , stream .read (20 + 4 * 6 + 2 ))
158- path_size = flags & CE_NAMEMASK
159- path = stream .read (path_size )
160-
161- real_size = ((stream .tell () - beginoffset + 8 ) & ~ 7 )
162- data = stream .read ((beginoffset + real_size ) - stream .tell ())
163- return IndexEntry ((mode , binascii .hexlify (sha ), flags , path , ctime , mtime , dev , ino , uid , gid , size ))
164-
165- @classmethod
166- def _read_header (cls , stream ):
167- """Return tuple(version_long, num_entries) from the given stream"""
168- type_id = stream .read (4 )
169- if type_id != "DIRC" :
170- raise AssertionError ("Invalid index file header: %r" % type_id )
171- version , num_entries = unpack (">LL" , stream .read (4 * 2 ))
172- assert version in (1 , 2 )
173- return version , num_entries
174-
175157 #{ Serializable Interface
176158
177159 def _deserialize (self , stream ):
178160 """ Initialize this instance with index values read from the given stream """
179- self .version , num_entries = self ._read_header (stream )
180- count = 0
181- self .entries = dict ()
182- while count < num_entries :
183- entry = self ._read_entry (stream )
184- self .entries [self .get_entries_key (entry )] = entry
185- count += 1
186- # END for each entry
187-
188- # the footer contains extension data and a sha on the content so far
189- # Keep the extension footer,and verify we have a sha in the end
190- # Extension data format is:
191- # 4 bytes ID
192- # 4 bytes length of chunk
193- # repeated 0 - N times
194- self ._extension_data = stream .read (~ 0 )
195- assert len (self ._extension_data ) > 19 , "Index Footer was not at least a sha on content as it was only %i bytes in size" % len (self ._extension_data )
196-
197- content_sha = self ._extension_data [- 20 :]
198-
199- # truncate the sha in the end as we will dynamically create it anyway
200- self ._extension_data = self ._extension_data [:- 20 ]
201-
161+ self .version , self .entries , self ._extension_data , conten_sha = read_cache (stream )
202162 return self
203163
204164 def _serialize (self , stream , ignore_tree_extension_data = False ):
205-
206- # wrap the stream into a compatible writer
207- stream = IndexFileSHA1Writer (stream )
208-
209- # header
210- stream .write ("DIRC" )
211- stream .write (pack (">LL" , self .version , len (self .entries )))
212-
213- # body
214165 entries_sorted = self .entries .values ()
215166 entries_sorted .sort (key = lambda e : (e [3 ], e .stage )) # use path/stage as sort key
216- for entry in entries_sorted :
217- self ._write_cache_entry (stream , entry )
218- # END for each entry
219-
220- stored_ext_data = None
221- if ignore_tree_extension_data and self ._extension_data and self ._extension_data [:4 ] == 'TREE' :
222- stored_ext_data = self ._extension_data
223- self ._extension_data = ''
224- # END extension data special handling
225-
226- # write previously cached extensions data
227- stream .write (self ._extension_data )
228-
229- if stored_ext_data :
230- self ._extension_data = stored_ext_data
231- # END reset previous ext data
232-
233- # write the sha over the content
234- stream .write_sha ()
167+ write_cache (entries_sorted ,
168+ stream ,
169+ (ignore_tree_extension_data and None ) or self ._extension_data )
235170 return self
236-
171+
172+
237173 #} END serializable interface
238174
239- @classmethod
240- def _write_cache_entry (cls , stream , entry ):
241- """ Write an IndexEntry to a stream """
242- beginoffset = stream .tell ()
243- write = stream .write
244- write (entry [4 ]) # ctime
245- write (entry [5 ]) # mtime
246- path = entry [3 ]
247- plen = len (path ) & CE_NAMEMASK # path length
248- assert plen == len (path ), "Path %s too long to fit into index" % entry [3 ]
249- flags = plen | entry [2 ]
250- write (pack (">LLLLLL20sH" , entry [6 ], entry [7 ], entry [0 ],
251- entry [8 ], entry [9 ], entry [10 ], binascii .unhexlify (entry [1 ]), flags ))
252- write (path )
253- real_size = ((stream .tell () - beginoffset + 8 ) & ~ 7 )
254- write ("\0 " * ((beginoffset + real_size ) - stream .tell ()))
255-
256175 def write (self , file_path = None , ignore_tree_extension_data = False ):
257- """
258- Write the current state to our file path or to the given one
176+ """Write the current state to our file path or to the given one
259177
260- `` file_path``
178+ :param file_path:
261179 If None, we will write to our stored file path from which we have
262180 been initialized. Otherwise we write to the given file path.
263181 Please note that this will change the file_path of this index to
264182 the one you gave.
265183
266- `` ignore_tree_extension_data``
184+ :param ignore_tree_extension_data:
267185 If True, the TREE type extension data read in the index will not
268186 be written to disk. Use this if you have altered the index and
269187 would like to use git-write-tree afterwards to create a tree
@@ -273,12 +191,10 @@ def write(self, file_path = None, ignore_tree_extension_data=False):
273191 Alternatively, use IndexFile.write_tree() to handle this case
274192 automatically
275193
276- Returns
277- self
278- """
194+ :return: self"""
279195 lfd = LockedFD (file_path or self ._file_path )
280196 stream = lfd .open (write = True , stream = True )
281-
197+
282198 self ._serialize (stream , ignore_tree_extension_data )
283199
284200 lfd .commit ()
@@ -516,19 +432,8 @@ def unmerged_blobs(self):
516432 return path_map
517433
518434 @classmethod
519- def get_entries_key (cls , * entry ):
520- """
521- Returns
522- Key suitable to be used for the index.entries dictionary
523-
524- ``entry``
525- One instance of type BaseIndexEntry or the path and the stage
526- """
527- if len (entry ) == 1 :
528- return (entry [0 ].path , entry [0 ].stage )
529- else :
530- return tuple (entry )
531-
435+ def entry_key (cls , * entry ):
436+ return entry_key (* entry )
532437
533438 def resolve_blobs (self , iter_blobs ):
534439 """
@@ -585,26 +490,31 @@ def update(self):
585490 # allows to lazily reread on demand
586491 return self
587492
588- def write_tree (self , missing_ok = False ):
589- """
590- Writes the Index in self to a corresponding Tree file into the repository
591- object database and returns it as corresponding Tree object.
493+ def _write_tree (self , missing_ok = False ):
494+ """Writes this index to a corresponding Tree object into the repository's
495+ object database and return it.
592496
593- `` missing_ok``
497+ :param missing_ok:
594498 If True, missing objects referenced by this index will not result
595499 in an error.
596500
597- Returns
598- Tree object representing this index
599- """
501+ :return: Tree object representing this index"""
502+ # we obtain no lock as we just flush our contents to disk as tree
503+ if not self .entries :
504+ raise ValueError ("Cannot write empty index" )
505+
506+
507+
508+ return Tree (self .repo , tree_sha , 0 , '' )
509+
510+ def write_tree (self , missing_ok = False ):
600511 index_path = self ._index_path ()
601512 tmp_index_mover = TemporaryFileSwap (index_path )
602-
513+
603514 self .write (index_path , ignore_tree_extension_data = True )
604515 tree_sha = self .repo .git .write_tree (missing_ok = missing_ok )
605-
606- del (tmp_index_mover ) # as soon as possible
607-
516+
517+ del (tmp_index_mover ) # as soon as possible
608518 return Tree (self .repo , tree_sha , 0 , '' )
609519
610520 def _process_diff_args (self , args ):
@@ -837,11 +747,10 @@ def _items_to_rela_paths(self, items):
837747 @post_clear_cache
838748 @default_index
839749 def remove (self , items , working_tree = False , ** kwargs ):
840- """
841- Remove the given items from the index and optionally from
750+ """Remove the given items from the index and optionally from
842751 the working tree as well.
843752
844- `` items``
753+ :param items:
845754 Multiple types of items are supported which may be be freely mixed.
846755
847756 - path string
@@ -859,21 +768,20 @@ def remove(self, items, working_tree=False, **kwargs):
859768 - BaseIndexEntry or compatible type
860769 The only relevant information here Yis the path. The stage is ignored.
861770
862- `` working_tree``
771+ :param working_tree:
863772 If True, the entry will also be removed from the working tree, physically
864773 removing the respective file. This may fail if there are uncommited changes
865774 in it.
866775
867- `` **kwargs``
776+ :param **kwargs:
868777 Additional keyword arguments to be passed to git-rm, such
869778 as 'r' to allow recurive removal of
870779
871- Returns
780+ :return:
872781 List(path_string, ...) list of repository relative paths that have
873782 been removed effectively.
874783 This is interesting to know in case you have provided a directory or
875- globs. Paths are relative to the repository.
876- """
784+ globs. Paths are relative to the repository. """
877785 args = list ()
878786 if not working_tree :
879787 args .append ("--cached" )
0 commit comments