Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/kornelski/7z.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'CPP/7zip/Archive/7z/7zUpdate.cpp')
-rw-r--r--[-rwxr-xr-x]CPP/7zip/Archive/7z/7zUpdate.cpp503
1 files changed, 358 insertions, 145 deletions
diff --git a/CPP/7zip/Archive/7z/7zUpdate.cpp b/CPP/7zip/Archive/7z/7zUpdate.cpp
index e63b09d2..96befa23 100755..100644
--- a/CPP/7zip/Archive/7z/7zUpdate.cpp
+++ b/CPP/7zip/Archive/7z/7zUpdate.cpp
@@ -4,10 +4,11 @@
#include "../../../../C/CpuArch.h"
-#include "../../Common/LimitedStreams.h"
-#include "../../Common/ProgressUtils.h"
+#include "../../../Common/Wildcard.h"
#include "../../Common/CreateCoder.h"
+#include "../../Common/LimitedStreams.h"
+#include "../../Common/ProgressUtils.h"
#include "../../Compress/CopyCoder.h"
@@ -58,19 +59,20 @@ int CUpdateItem::GetExtensionPos() const
int slashPos = GetReverseSlashPos(Name);
int dotPos = Name.ReverseFind(L'.');
if (dotPos < 0 || (dotPos < slashPos && slashPos >= 0))
- return Name.Length();
+ return Name.Len();
return dotPos + 1;
}
UString CUpdateItem::GetExtension() const
{
- return Name.Mid(GetExtensionPos());
+ return Name.Ptr(GetExtensionPos());
}
#define RINOZ(x) { int __tt = (x); if (__tt != 0) return __tt; }
#define RINOZ_COMP(a, b) RINOZ(MyCompare(a, b))
+/*
static int CompareBuffers(const CByteBuffer &a1, const CByteBuffer &a2)
{
size_t c1 = a1.GetCapacity();
@@ -110,11 +112,12 @@ static int CompareFolders(const CFolder &f1, const CFolder &f2)
RINOZ(CompareBindPairs(f1.BindPairs[i], f2.BindPairs[i]));
return 0;
}
+*/
/*
static int CompareFiles(const CFileItem &f1, const CFileItem &f2)
{
- return MyStringCompareNoCase(f1.Name, f2.Name);
+ return CompareFileNames(f1.Name, f2.Name);
}
*/
@@ -125,15 +128,19 @@ struct CFolderRepack
CNum NumCopyFiles;
};
-static int CompareFolderRepacks(const CFolderRepack *p1, const CFolderRepack *p2, void *param)
+static int CompareFolderRepacks(const CFolderRepack *p1, const CFolderRepack *p2, void * /* param */)
{
RINOZ_COMP(p1->Group, p2->Group);
int i1 = p1->FolderIndex;
int i2 = p2->FolderIndex;
- const CArchiveDatabaseEx &db = *(const CArchiveDatabaseEx *)param;
+ /*
+ // In that version we don't want to parse folders here, so we don't compare folders
+ // probably it must be improved in future
+ const CDbEx &db = *(const CDbEx *)param;
RINOZ(CompareFolders(
db.Folders[i1],
db.Folders[i2]));
+ */
return MyCompare(i1, i2);
/*
RINOZ_COMP(
@@ -147,25 +154,31 @@ static int CompareFolderRepacks(const CFolderRepack *p1, const CFolderRepack *p2
*/
}
-////////////////////////////////////////////////////////////
+/*
+ we sort empty files and dirs in such order:
+ - Dir.NonAnti (name sorted)
+ - File.NonAnti (name sorted)
+ - File.Anti (name sorted)
+ - Dir.Anti (reverse name sorted)
+*/
static int CompareEmptyItems(const int *p1, const int *p2, void *param)
{
const CObjectVector<CUpdateItem> &updateItems = *(const CObjectVector<CUpdateItem> *)param;
const CUpdateItem &u1 = updateItems[*p1];
const CUpdateItem &u2 = updateItems[*p2];
+ // NonAnti < Anti
+ if (u1.IsAnti != u2.IsAnti)
+ return (u1.IsAnti ? 1 : -1);
if (u1.IsDir != u2.IsDir)
- return (u1.IsDir) ? 1 : -1;
- if (u1.IsDir)
{
- if (u1.IsAnti != u2.IsAnti)
+ // Dir.NonAnti < File < Dir.Anti
+ if (u1.IsDir)
return (u1.IsAnti ? 1 : -1);
- int n = MyStringCompareNoCase(u1.Name, u2.Name);
- return -n;
+ return (u2.IsAnti ? -1 : 1);
}
- if (u1.IsAnti != u2.IsAnti)
- return (u1.IsAnti ? 1 : -1);
- return MyStringCompareNoCase(u1.Name, u2.Name);
+ int n = CompareFileNames(u1.Name, u2.Name);
+ return (u1.IsDir && u1.IsAnti) ? -n : n;
}
static const char *g_Exts =
@@ -198,7 +211,7 @@ static const char *g_Exts =
" exe dll ocx vbx sfx sys tlb awx com obj lib out o so "
" pdb pch idb ncb opt";
-int GetExtIndex(const char *ext)
+static int GetExtIndex(const char *ext)
{
int extIndex = 1;
const char *p = g_Exts;
@@ -237,7 +250,9 @@ struct CRefItem
UInt32 Index;
UInt32 ExtensionPos;
UInt32 NamePos;
- int ExtensionIndex;
+ unsigned ExtensionIndex;
+
+ CRefItem() {};
CRefItem(UInt32 index, const CUpdateItem &ui, bool sortByType):
UpdateItem(&ui),
Index(index),
@@ -248,64 +263,134 @@ struct CRefItem
if (sortByType)
{
int slashPos = GetReverseSlashPos(ui.Name);
- NamePos = ((slashPos >= 0) ? (slashPos + 1) : 0);
+ NamePos = slashPos + 1;
int dotPos = ui.Name.ReverseFind(L'.');
- if (dotPos < 0 || (dotPos < slashPos && slashPos >= 0))
- ExtensionPos = ui.Name.Length();
+ if (dotPos < 0 || dotPos < slashPos)
+ ExtensionPos = ui.Name.Len();
else
{
ExtensionPos = dotPos + 1;
- UString us = ui.Name.Mid(ExtensionPos);
- if (!us.IsEmpty())
+ if (ExtensionPos != ui.Name.Len())
{
- us.MakeLower();
- int i;
AString s;
- for (i = 0; i < us.Length(); i++)
+ for (unsigned pos = ExtensionPos;; pos++)
{
- wchar_t c = us[i];
+ wchar_t c = ui.Name[pos];
if (c >= 0x80)
break;
- s += (char)c;
+ if (c == 0)
+ {
+ ExtensionIndex = GetExtIndex(s);
+ break;
+ }
+ s += (char)MyCharLower_Ascii((char)c);
}
- if (i == us.Length())
- ExtensionIndex = GetExtIndex(s);
- else
- ExtensionIndex = 0;
}
}
}
}
};
+struct CSortParam
+{
+ // const CObjectVector<CTreeFolder> *TreeFolders;
+ bool SortByType;
+};
+
+/*
+ we sort files in such order:
+ - Dir.NonAnti (name sorted)
+ - alt streams
+ - Dirs
+ - Dir.Anti (reverse name sorted)
+*/
+
+
static int CompareUpdateItems(const CRefItem *p1, const CRefItem *p2, void *param)
{
const CRefItem &a1 = *p1;
const CRefItem &a2 = *p2;
const CUpdateItem &u1 = *a1.UpdateItem;
const CUpdateItem &u2 = *a2.UpdateItem;
- int n;
+
+ /*
+ if (u1.IsAltStream != u2.IsAltStream)
+ return u1.IsAltStream ? 1 : -1;
+ */
+
+ // Actually there are no dirs that time. They were stored in other steps
+ // So that code is unused?
if (u1.IsDir != u2.IsDir)
- return (u1.IsDir) ? 1 : -1;
+ return u1.IsDir ? 1 : -1;
if (u1.IsDir)
{
if (u1.IsAnti != u2.IsAnti)
return (u1.IsAnti ? 1 : -1);
- n = MyStringCompareNoCase(u1.Name, u2.Name);
+ int n = CompareFileNames(u1.Name, u2.Name);
return -n;
}
- bool sortByType = *(bool *)param;
+
+ // bool sortByType = *(bool *)param;
+ const CSortParam *sortParam = (const CSortParam *)param;
+ bool sortByType = sortParam->SortByType;
if (sortByType)
{
RINOZ_COMP(a1.ExtensionIndex, a2.ExtensionIndex);
- RINOZ(MyStringCompareNoCase(u1.Name + a1.ExtensionPos, u2.Name + a2.ExtensionPos));
- RINOZ(MyStringCompareNoCase(u1.Name + a1.NamePos, u2.Name + a2.NamePos));
+ RINOZ(CompareFileNames(u1.Name.Ptr(a1.ExtensionPos), u2.Name.Ptr(a2.ExtensionPos)));
+ RINOZ(CompareFileNames(u1.Name.Ptr(a1.NamePos), u2.Name.Ptr(a2.NamePos)));
if (!u1.MTimeDefined && u2.MTimeDefined) return 1;
if (u1.MTimeDefined && !u2.MTimeDefined) return -1;
if (u1.MTimeDefined && u2.MTimeDefined) RINOZ_COMP(u1.MTime, u2.MTime);
RINOZ_COMP(u1.Size, u2.Size);
}
- return MyStringCompareNoCase(u1.Name, u2.Name);
+ /*
+ int par1 = a1.UpdateItem->ParentFolderIndex;
+ int par2 = a2.UpdateItem->ParentFolderIndex;
+ const CTreeFolder &tf1 = (*sortParam->TreeFolders)[par1];
+ const CTreeFolder &tf2 = (*sortParam->TreeFolders)[par2];
+
+ int b1 = tf1.SortIndex, e1 = tf1.SortIndexEnd;
+ int b2 = tf2.SortIndex, e2 = tf2.SortIndexEnd;
+ if (b1 < b2)
+ {
+ if (e1 <= b2)
+ return -1;
+ // p2 in p1
+ int par = par2;
+ for (;;)
+ {
+ const CTreeFolder &tf = (*sortParam->TreeFolders)[par];
+ par = tf.Parent;
+ if (par == par1)
+ {
+ RINOZ(CompareFileNames(u1.Name, tf.Name));
+ break;
+ }
+ }
+ }
+ else if (b2 < b1)
+ {
+ if (e2 <= b1)
+ return 1;
+ // p1 in p2
+ int par = par1;
+ for (;;)
+ {
+ const CTreeFolder &tf = (*sortParam->TreeFolders)[par];
+ par = tf.Parent;
+ if (par == par2)
+ {
+ RINOZ(CompareFileNames(tf.Name, u2.Name));
+ break;
+ }
+ }
+ }
+ */
+ // RINOZ_COMP(a1.UpdateItem->ParentSortIndex, a2.UpdateItem->ParentSortIndex);
+ RINOK(CompareFileNames(u1.Name, u2.Name));
+ RINOZ_COMP(a1.UpdateItem->IndexInClient, a2.UpdateItem->IndexInClient);
+ RINOZ_COMP(a1.UpdateItem->IndexInArchive, a2.UpdateItem->IndexInArchive);
+ return 0;
}
struct CSolidGroup
@@ -313,19 +398,19 @@ struct CSolidGroup
CRecordVector<UInt32> Indices;
};
-static wchar_t *g_ExeExts[] =
+static const wchar_t *g_ExeExts[] =
{
- L"dll",
- L"exe",
- L"ocx",
- L"sfx",
- L"sys"
+ L"dll"
+ , L"exe"
+ , L"ocx"
+ , L"sfx"
+ , L"sys"
};
-static bool IsExeExt(const UString &ext)
+static bool IsExeExt(const wchar_t *ext)
{
- for (int i = 0; i < sizeof(g_ExeExts) / sizeof(g_ExeExts[0]); i++)
- if (ext.CompareNoCase(g_ExeExts[i]) == 0)
+ for (int i = 0; i < ARRAY_SIZE(g_ExeExts); i++)
+ if (MyStringCompareNoCase(ext, g_ExeExts[i]) == 0)
return true;
return false;
}
@@ -402,7 +487,6 @@ static void MakeExeMethod(CCompressionMethodMode &mode,
static void FromUpdateItemToFileItem(const CUpdateItem &ui,
CFileItem &file, CFileItem2 &file2)
{
- file.Name = NItemName::MakeLegalName(ui.Name);
if (ui.AttribDefined)
file.SetAttrib(ui.Attrib);
@@ -410,11 +494,13 @@ static void FromUpdateItemToFileItem(const CUpdateItem &ui,
file2.ATime = ui.ATime; file2.ATimeDefined = ui.ATimeDefined;
file2.MTime = ui.MTime; file2.MTimeDefined = ui.MTimeDefined;
file2.IsAnti = ui.IsAnti;
+ // file2.IsAux = false;
file2.StartPosDefined = false;
file.Size = ui.Size;
file.IsDir = ui.IsDir;
file.HasStream = ui.HasStream();
+ // file.IsAltStream = ui.IsAltStream;
}
class CFolderOutStream2:
@@ -423,11 +509,11 @@ class CFolderOutStream2:
{
COutStreamWithCRC *_crcStreamSpec;
CMyComPtr<ISequentialOutStream> _crcStream;
- const CArchiveDatabaseEx *_db;
+ const CDbEx *_db;
const CBoolVector *_extractStatuses;
CMyComPtr<ISequentialOutStream> _outStream;
UInt32 _startIndex;
- int _currentIndex;
+ unsigned _currentIndex;
bool _fileIsOpen;
UInt64 _rem;
@@ -444,7 +530,7 @@ public:
_crcStream = _crcStreamSpec;
}
- HRESULT Init(const CArchiveDatabaseEx *db, UInt32 startIndex,
+ HRESULT Init(const CDbEx *db, UInt32 startIndex,
const CBoolVector *extractStatuses, ISequentialOutStream *outStream);
void ReleaseOutStream();
HRESULT CheckFinishedState() const { return (_currentIndex == _extractStatuses->Size()) ? S_OK: E_FAIL; }
@@ -452,7 +538,7 @@ public:
STDMETHOD(Write)(const void *data, UInt32 size, UInt32 *processedSize);
};
-HRESULT CFolderOutStream2::Init(const CArchiveDatabaseEx *db, UInt32 startIndex,
+HRESULT CFolderOutStream2::Init(const CDbEx *db, UInt32 startIndex,
const CBoolVector *extractStatuses, ISequentialOutStream *outStream)
{
_db = db;
@@ -551,13 +637,13 @@ public:
CMyComPtr<ISequentialOutStream> Fos;
UInt64 StartPos;
- const UInt64 *PackSizes;
- const CFolder *Folder;
+ const CFolders *Folders;
+ int FolderIndex;
#ifndef _NO_CRYPTO
- CMyComPtr<ICryptoGetTextPassword> GetTextPassword;
+ CMyComPtr<ICryptoGetTextPassword> getTextPassword;
#endif
- DECL_EXTERNAL_CODECS_VARS
+ DECL_EXTERNAL_CODECS_LOC_VARS2;
CDecoder Decoder;
#ifndef _7ZIP_ST
@@ -585,21 +671,20 @@ void CThreadDecoder::Execute()
try
{
#ifndef _NO_CRYPTO
- bool passwordIsDefined;
+ bool isEncrypted = false;
+ bool passwordIsDefined = false;
#endif
+
Result = Decoder.Decode(
- EXTERNAL_CODECS_VARS
+ EXTERNAL_CODECS_LOC_VARS
InStream,
StartPos,
- PackSizes,
- *Folder,
+ *Folders, FolderIndex,
Fos,
NULL
- #ifndef _NO_CRYPTO
- , GetTextPassword, passwordIsDefined
- #endif
+ _7Z_DECODER_CRYPRO_VARS
#ifndef _7ZIP_ST
- , MtMode, NumThreads
+ , MtMode, NumThreads
#endif
);
}
@@ -614,7 +699,7 @@ void CThreadDecoder::Execute()
bool static Is86FilteredFolder(const CFolder &f)
{
- for (int i = 0; i < f.Coders.Size(); i++)
+ FOR_VECTOR(i, f.Coders)
{
CMethodId m = f.Coders[i].MethodID;
if (m == k_BCJ || m == k_BCJ2)
@@ -650,13 +735,26 @@ static bool IsEncryptedGroup(int group) { return (group & 2) != 0; }
static int GetGroupIndex(bool encrypted, int bcjFiltered)
{ return (encrypted ? 2 : 0) + (bcjFiltered ? 1 : 0); }
+static void GetFile(const CDatabase &inDb, int index, CFileItem &file, CFileItem2 &file2)
+{
+ file = inDb.Files[index];
+ file2.CTimeDefined = inDb.CTime.GetItem(index, file2.CTime);
+ file2.ATimeDefined = inDb.ATime.GetItem(index, file2.ATime);
+ file2.MTimeDefined = inDb.MTime.GetItem(index, file2.MTime);
+ file2.StartPosDefined = inDb.StartPos.GetItem(index, file2.StartPos);
+ file2.IsAnti = inDb.IsItemAnti(index);
+ // file2.IsAux = inDb.IsItemAux(index);
+}
+
HRESULT Update(
DECL_EXTERNAL_CODECS_LOC_VARS
IInStream *inStream,
- const CArchiveDatabaseEx *db,
+ const CDbEx *db,
const CObjectVector<CUpdateItem> &updateItems,
+ // const CObjectVector<CTreeFolder> &treeFolders,
+ // const CUniqBlocks &secureBlocks,
COutArchive &archive,
- CArchiveDatabase &newDatabase,
+ CArchiveDatabaseOut &newDatabase,
ISequentialOutStream *seqOutStream,
IArchiveUpdateCallback *updateCallback,
const CUpdateOptions &options
@@ -668,6 +766,9 @@ HRESULT Update(
UInt64 numSolidFiles = options.NumSolidFiles;
if (numSolidFiles == 0)
numSolidFiles = 1;
+
+ // size_t totalSecureDataSize = (size_t)secureBlocks.GetTotalSizeInBytes();
+
/*
CMyComPtr<IOutStream> outStream;
RINOK(seqOutStream->QueryInterface(IID_IOutStream, (void **)&outStream));
@@ -675,23 +776,23 @@ HRESULT Update(
return E_NOTIMPL;
*/
- UInt64 startBlockSize = db != 0 ? db->ArchiveInfo.StartPosition: 0;
+ UInt64 startBlockSize = db != 0 ? db->ArcInfo.StartPosition: 0;
if (startBlockSize > 0 && !options.RemoveSfxBlock)
{
RINOK(WriteRange(inStream, seqOutStream, 0, startBlockSize, NULL));
}
- CRecordVector<int> fileIndexToUpdateIndexMap;
+ CIntArr fileIndexToUpdateIndexMap;
CRecordVector<CFolderRepack> folderRefs;
UInt64 complexity = 0;
UInt64 inSizeForReduce2 = 0;
bool needEncryptedRepack = false;
if (db != 0)
{
- fileIndexToUpdateIndexMap.Reserve(db->Files.Size());
- int i;
+ fileIndexToUpdateIndexMap.Alloc(db->Files.Size());
+ unsigned i;
for (i = 0; i < db->Files.Size(); i++)
- fileIndexToUpdateIndexMap.Add(-1);
+ fileIndexToUpdateIndexMap[i] = -1;
for (i = 0; i < updateItems.Size(); i++)
{
@@ -700,7 +801,7 @@ HRESULT Update(
fileIndexToUpdateIndexMap[index] = i;
}
- for (i = 0; i < db->Folders.Size(); i++)
+ for (i = 0; i < (int)db->NumFolders; i++)
{
CNum indexInFolder = 0;
CNum numCopyItems = 0;
@@ -727,7 +828,8 @@ HRESULT Update(
CFolderRepack rep;
rep.FolderIndex = i;
rep.NumCopyFiles = numCopyItems;
- const CFolder &f = db->Folders[i];
+ CFolder f;
+ db->ParseFolderInfo(i, f);
bool isEncrypted = f.IsEncrypted();
rep.Group = GetGroupIndex(isEncrypted, Is86FilteredFolder(f));
folderRefs.Add(rep);
@@ -746,7 +848,7 @@ HRESULT Update(
}
UInt64 inSizeForReduce = 0;
- int i;
+ unsigned i;
for (i = 0; i < updateItems.Size(); i++)
{
const CUpdateItem &ui = updateItems[i];
@@ -776,18 +878,17 @@ HRESULT Update(
if (!folderRefs.IsEmpty())
{
#ifdef EXTERNAL_CODECS
- threadDecoder._codecsInfo = codecsInfo;
- threadDecoder._externalCodecs = *externalCodecs;
+ threadDecoder.__externalCodecs = __externalCodecs;
#endif
RINOK(threadDecoder.Create());
}
CObjectVector<CSolidGroup> groups;
for (i = 0; i < kNumGroupsMax; i++)
- groups.Add(CSolidGroup());
+ groups.AddNew();
{
- // ---------- Split files to 2 groups ----------
+ // ---------- Split files to groups ----------
bool useFilters = options.UseFilters;
const CCompressionMethodMode &method = *options.Method;
@@ -803,7 +904,7 @@ HRESULT Update(
{
int dotPos = ui.Name.ReverseFind(L'.');
if (dotPos >= 0)
- filteredGroup = IsExeExt(ui.Name.Mid(dotPos + 1));
+ filteredGroup = IsExeExt(ui.Name.Ptr(dotPos + 1));
}
groups[GetGroupIndex(method.PasswordIsDefined, filteredGroup)].Indices.Add(i);
}
@@ -815,7 +916,7 @@ HRESULT Update(
if (needEncryptedRepack)
{
getPasswordSpec = new CCryptoGetTextPassword;
- threadDecoder.GetTextPassword = getPasswordSpec;
+ threadDecoder.getTextPassword = getPasswordSpec;
if (options.Method->PasswordIsDefined)
getPasswordSpec->Password = options.Method->Password;
@@ -825,18 +926,111 @@ HRESULT Update(
return E_NOTIMPL;
CMyComBSTR password;
RINOK(getDecoderPassword->CryptoGetTextPassword(&password));
- getPasswordSpec->Password = password;
+ if ((BSTR)password)
+ getPasswordSpec->Password = password;
}
}
#endif
+
// ---------- Compress ----------
RINOK(archive.Create(seqOutStream, false));
RINOK(archive.SkipPrefixArchiveHeader());
- int folderRefIndex = 0;
+ /*
+ CIntVector treeFolderToArcIndex;
+ treeFolderToArcIndex.Reserve(treeFolders.Size());
+ for (i = 0; i < treeFolders.Size(); i++)
+ treeFolderToArcIndex.Add(-1);
+ // ---------- Write Tree (only AUX dirs) ----------
+ for (i = 1; i < treeFolders.Size(); i++)
+ {
+ const CTreeFolder &treeFolder = treeFolders[i];
+ CFileItem file;
+ CFileItem2 file2;
+ file2.Init();
+ int secureID = 0;
+ if (treeFolder.UpdateItemIndex < 0)
+ {
+ // we can store virtual dir item wuthout attrib, but we want all items have attrib.
+ file.SetAttrib(FILE_ATTRIBUTE_DIRECTORY);
+ file2.IsAux = true;
+ }
+ else
+ {
+ const CUpdateItem &ui = updateItems[treeFolder.UpdateItemIndex];
+ // if item is not dir, then it's parent for alt streams.
+ // we will write such items later
+ if (!ui.IsDir)
+ continue;
+ secureID = ui.SecureIndex;
+ if (ui.NewProps)
+ FromUpdateItemToFileItem(ui, file, file2);
+ else
+ GetFile(*db, ui.IndexInArchive, file, file2);
+ }
+ file.Size = 0;
+ file.HasStream = false;
+ file.IsDir = true;
+ file.Parent = treeFolder.Parent;
+
+ treeFolderToArcIndex[i] = newDatabase.Files.Size();
+ newDatabase.AddFile(file, file2, treeFolder.Name);
+
+ if (totalSecureDataSize != 0)
+ newDatabase.SecureIDs.Add(secureID);
+ }
+ */
+
+ {
+ /* ---------- Write non-AUX dirs and Empty files ---------- */
+ CRecordVector<int> emptyRefs;
+ for (i = 0; i < updateItems.Size(); i++)
+ {
+ const CUpdateItem &ui = updateItems[i];
+ if (ui.NewData)
+ {
+ if (ui.HasStream())
+ continue;
+ }
+ else if (ui.IndexInArchive != -1 && db->Files[ui.IndexInArchive].HasStream)
+ continue;
+ /*
+ if (ui.TreeFolderIndex >= 0)
+ continue;
+ */
+ emptyRefs.Add(i);
+ }
+ emptyRefs.Sort(CompareEmptyItems, (void *)&updateItems);
+ for (i = 0; i < emptyRefs.Size(); i++)
+ {
+ const CUpdateItem &ui = updateItems[emptyRefs[i]];
+ CFileItem file;
+ CFileItem2 file2;
+ UString name;
+ if (ui.NewProps)
+ {
+ FromUpdateItemToFileItem(ui, file, file2);
+ name = ui.Name;
+ }
+ else
+ {
+ GetFile(*db, ui.IndexInArchive, file, file2);
+ name = db->GetName(ui.IndexInArchive);
+ }
+
+ /*
+ if (totalSecureDataSize != 0)
+ newDatabase.SecureIDs.Add(ui.SecureIndex);
+ file.Parent = ui.ParentFolderIndex;
+ */
+ newDatabase.AddFile(file, file2, name);
+ }
+ }
+
+ unsigned folderRefIndex = 0;
lps->ProgressOffset = 0;
for (int groupIndex = 0; groupIndex < kNumGroupsMax; groupIndex++)
@@ -879,15 +1073,20 @@ HRESULT Update(
db->GetFolderStreamPos(folderIndex, 0), packSize, progress));
lps->ProgressOffset += packSize;
- const CFolder &folder = db->Folders[folderIndex];
- CNum startIndex = db->FolderStartPackStreamIndex[folderIndex];
- for (int j = 0; j < folder.PackStreams.Size(); j++)
+ CFolder &folder = newDatabase.Folders.AddNew();
+ db->ParseFolderInfo(folderIndex, folder);
+ CNum startIndex = db->FoStartPackStreamIndex[folderIndex];
+ for (unsigned j = 0; j < folder.PackStreams.Size(); j++)
{
- newDatabase.PackSizes.Add(db->PackSizes[startIndex + j]);
+ newDatabase.PackSizes.Add(db->GetStreamPackSize(startIndex + j));
// newDatabase.PackCRCsDefined.Add(db.PackCRCsDefined[startIndex + j]);
// newDatabase.PackCRCs.Add(db.PackCRCs[startIndex + j]);
}
- newDatabase.Folders.Add(folder);
+
+ UInt32 indexStart = db->FoToCoderUnpackSizes[folderIndex];
+ UInt32 indexEnd = db->FoToCoderUnpackSizes[folderIndex + 1];
+ for (; indexStart < indexEnd; indexStart++)
+ newDatabase.CoderUnpackSizes.Add(db->CoderUnpackSizes[indexStart]);
}
else
{
@@ -909,8 +1108,8 @@ HRESULT Update(
extractStatuses.Add(needExtract);
}
- int startPackIndex = newDatabase.PackSizes.Size();
- CFolder newFolder;
+ unsigned startPackIndex = newDatabase.PackSizes.Size();
+ UInt64 curUnpackSize;
{
CMyComPtr<ISequentialInStream> sbInStream;
{
@@ -921,16 +1120,17 @@ HRESULT Update(
}
threadDecoder.InStream = inStream;
- threadDecoder.Folder = &db->Folders[folderIndex];
- threadDecoder.StartPos = db->GetFolderStreamPos(folderIndex, 0);
- threadDecoder.PackSizes = &db->PackSizes[db->FolderStartPackStreamIndex[folderIndex]];
+ threadDecoder.Folders = (const CFolders *)db;
+ threadDecoder.FolderIndex = folderIndex;
+ threadDecoder.StartPos = db->ArcInfo.DataStartPosition; // db->GetFolderStreamPos(folderIndex, 0);
threadDecoder.Start();
RINOK(encoder.Encode(
- EXTERNAL_CODECS_LOC_VARS
- sbInStream, NULL, &inSizeForReduce, newFolder,
- archive.SeqStream, newDatabase.PackSizes, progress));
+ EXTERNAL_CODECS_LOC_VARS
+ sbInStream, NULL, &inSizeForReduce,
+ newDatabase.Folders.AddNew(), newDatabase.CoderUnpackSizes, curUnpackSize,
+ archive.SeqStream, newDatabase.PackSizes, progress));
threadDecoder.WaitExecuteFinish();
}
@@ -939,9 +1139,7 @@ HRESULT Update(
for (; startPackIndex < newDatabase.PackSizes.Size(); startPackIndex++)
lps->OutSize += newDatabase.PackSizes[startPackIndex];
- lps->InSize += newFolder.GetUnpackSize();
-
- newDatabase.Folders.Add(newFolder);
+ lps->InSize += curUnpackSize;
}
newDatabase.NumUnpackStreamsVector.Add(rep.NumCopyFiles);
@@ -953,7 +1151,8 @@ HRESULT Update(
{
CFileItem file;
CFileItem2 file2;
- db->GetFile(fi, file, file2);
+ GetFile(*db, fi, file, file2);
+ UString name = db->GetName(fi);
if (file.HasStream)
{
indexInFolder++;
@@ -972,30 +1171,40 @@ HRESULT Update(
uf.CrcDefined = file.CrcDefined;
uf.HasStream = file.HasStream;
file = uf;
+ name = ui.Name;
}
- newDatabase.AddFile(file, file2);
+ /*
+ file.Parent = ui.ParentFolderIndex;
+ if (ui.TreeFolderIndex >= 0)
+ treeFolderToArcIndex[ui.TreeFolderIndex] = newDatabase.Files.Size();
+ if (totalSecureDataSize != 0)
+ newDatabase.SecureIDs.Add(ui.SecureIndex);
+ */
+ newDatabase.AddFile(file, file2, name);
}
}
}
}
- int numFiles = group.Indices.Size();
+ unsigned numFiles = group.Indices.Size();
if (numFiles == 0)
continue;
CRecordVector<CRefItem> refItems;
- refItems.Reserve(numFiles);
+ refItems.ClearAndSetSize(numFiles);
bool sortByType = (numSolidFiles > 1);
for (i = 0; i < numFiles; i++)
- refItems.Add(CRefItem(group.Indices[i], updateItems[group.Indices[i]], sortByType));
- refItems.Sort(CompareUpdateItems, (void *)&sortByType);
+ refItems[i] = CRefItem(group.Indices[i], updateItems[group.Indices[i]], sortByType);
+ CSortParam sortParam;
+ // sortParam.TreeFolders = &treeFolders;
+ sortParam.SortByType = sortByType;
+ refItems.Sort(CompareUpdateItems, (void *)&sortParam);
- CRecordVector<UInt32> indices;
- indices.Reserve(numFiles);
+ CObjArray<UInt32> indices(numFiles);
for (i = 0; i < numFiles; i++)
{
UInt32 index = refItems[i].Index;
- indices.Add(index);
+ indices[i] = index;
/*
const CUpdateItem &ui = updateItems[index];
CFileItem file;
@@ -1027,7 +1236,7 @@ HRESULT Update(
if (numSubFiles == 0)
prevExtension = ext;
else
- if (ext.CompareNoCase(prevExtension) != 0)
+ if (!ext.IsEqualToNoCase(prevExtension))
break;
}
}
@@ -1038,34 +1247,39 @@ HRESULT Update(
CMyComPtr<ISequentialInStream> solidInStream(inStreamSpec);
inStreamSpec->Init(updateCallback, &indices[i], numSubFiles);
- CFolder folderItem;
-
- int startPackIndex = newDatabase.PackSizes.Size();
+ unsigned startPackIndex = newDatabase.PackSizes.Size();
+ UInt64 curFolderUnpackSize;
RINOK(encoder.Encode(
EXTERNAL_CODECS_LOC_VARS
- solidInStream, NULL, &inSizeForReduce, folderItem,
+ solidInStream, NULL, &inSizeForReduce,
+ newDatabase.Folders.AddNew(), newDatabase.CoderUnpackSizes, curFolderUnpackSize,
archive.SeqStream, newDatabase.PackSizes, progress));
for (; startPackIndex < newDatabase.PackSizes.Size(); startPackIndex++)
lps->OutSize += newDatabase.PackSizes[startPackIndex];
- lps->InSize += folderItem.GetUnpackSize();
+ lps->InSize += curFolderUnpackSize;
// for ()
// newDatabase.PackCRCsDefined.Add(false);
// newDatabase.PackCRCs.Add(0);
- newDatabase.Folders.Add(folderItem);
-
CNum numUnpackStreams = 0;
for (int subIndex = 0; subIndex < numSubFiles; subIndex++)
{
const CUpdateItem &ui = updateItems[indices[i + subIndex]];
CFileItem file;
CFileItem2 file2;
+ UString name;
if (ui.NewProps)
+ {
FromUpdateItemToFileItem(ui, file, file2);
+ name = ui.Name;
+ }
else
- db->GetFile(ui.IndexInArchive, file, file2);
+ {
+ GetFile(*db, ui.IndexInArchive, file, file2);
+ name = db->GetName(ui.IndexInArchive);
+ }
if (file2.IsAnti || file.IsDir)
return E_FAIL;
@@ -1092,7 +1306,14 @@ HRESULT Update(
file.CrcDefined = false;
file.HasStream = false;
}
- newDatabase.AddFile(file, file2);
+ /*
+ file.Parent = ui.ParentFolderIndex;
+ if (ui.TreeFolderIndex >= 0)
+ treeFolderToArcIndex[ui.TreeFolderIndex] = newDatabase.Files.Size();
+ if (totalSecureDataSize != 0)
+ newDatabase.SecureIDs.Add(ui.SecureIndex);
+ */
+ newDatabase.AddFile(file, file2, name);
}
// numUnpackStreams = 0 is very bad case for locked files
// v3.13 doesn't understand it.
@@ -1112,36 +1333,28 @@ HRESULT Update(
groups.ClearAndFree();
*/
+ /*
+ for (i = 0; i < newDatabase.Files.Size(); i++)
{
- // ---------- Write Folders & Empty Files ----------
-
- CRecordVector<int> emptyRefs;
- for (i = 0; i < updateItems.Size(); i++)
- {
- const CUpdateItem &ui = updateItems[i];
- if (ui.NewData)
- {
- if (ui.HasStream())
- continue;
- }
- else if (ui.IndexInArchive != -1 && db->Files[ui.IndexInArchive].HasStream)
- continue;
- emptyRefs.Add(i);
- }
- emptyRefs.Sort(CompareEmptyItems, (void *)&updateItems);
- for (i = 0; i < emptyRefs.Size(); i++)
+ CFileItem &file = newDatabase.Files[i];
+ file.Parent = treeFolderToArcIndex[file.Parent];
+ }
+
+ if (totalSecureDataSize != 0)
+ {
+ newDatabase.SecureBuf.SetCapacity(totalSecureDataSize);
+ size_t pos = 0;
+ newDatabase.SecureSizes.Reserve(secureBlocks.Sorted.Size());
+ for (i = 0; i < secureBlocks.Sorted.Size(); i++)
{
- const CUpdateItem &ui = updateItems[emptyRefs[i]];
- CFileItem file;
- CFileItem2 file2;
- if (ui.NewProps)
- FromUpdateItemToFileItem(ui, file, file2);
- else
- db->GetFile(ui.IndexInArchive, file, file2);
- newDatabase.AddFile(file, file2);
+ const CByteBuffer &buf = secureBlocks.Bufs[secureBlocks.Sorted[i]];
+ size_t size = buf.GetCapacity();
+ memcpy(newDatabase.SecureBuf + pos, buf, size);
+ newDatabase.SecureSizes.Add((UInt32)size);
+ pos += size;
}
}
-
+ */
newDatabase.ReserveDown();
return S_OK;
}