Trim memory usage of IReadOnlyPackage implementations.

These implementations are often backed by a Dictionary, and tend to live a long time after being loaded. Ensure TrimExcess is called on the backing dictionaries to reduce the long term memory usage. In some cases, we can also preallocate the dictionary size for efficiency.
This commit is contained in:
RoosterDragon
2024-04-01 14:29:37 +01:00
committed by Gustas
parent ed5c7bb836
commit a4bb58007f
6 changed files with 23 additions and 11 deletions

View File

@@ -25,7 +25,7 @@ namespace OpenRA.Mods.Cnc.FileSystem
public string Name { get; } public string Name { get; }
public IEnumerable<string> Contents => index.Keys; public IEnumerable<string> Contents => index.Keys;
readonly Dictionary<string, Entry> index = new(); readonly Dictionary<string, Entry> index;
readonly Stream s; readonly Stream s;
public BigFile(Stream s, string filename) public BigFile(Stream s, string filename)
@@ -48,6 +48,7 @@ namespace OpenRA.Mods.Cnc.FileSystem
// and we don't have to try seeking there since the entries typically start next in EA's .big files. // and we don't have to try seeking there since the entries typically start next in EA's .big files.
s.ReadUInt32(); s.ReadUInt32();
index = new Dictionary<string, Entry>((int)entryCount);
for (var i = 0; i < entryCount; i++) for (var i = 0; i < entryCount; i++)
{ {
var entry = new Entry(s); var entry = new Entry(s);

View File

@@ -51,7 +51,7 @@ namespace OpenRA.Mods.Cnc.FileSystem
{ {
readonly Stream s; readonly Stream s;
readonly Dictionary<string, (uint Offset, int Length)> contents = new(); readonly Dictionary<string, (uint Offset, int Length)> contents;
public MegFile(Stream s, string filename) public MegFile(Stream s, string filename)
{ {
@@ -84,6 +84,7 @@ namespace OpenRA.Mods.Cnc.FileSystem
throw new Exception("File name table in .meg file inconsistent"); throw new Exception("File name table in .meg file inconsistent");
// Now we load each file entry and associated info // Now we load each file entry and associated info
contents = new Dictionary<string, (uint Offset, int Length)>((int)numFiles);
for (var i = 0; i < numFiles; i++) for (var i = 0; i < numFiles; i++)
{ {
// Ignore flags, crc, index // Ignore flags, crc, index
@@ -94,6 +95,8 @@ namespace OpenRA.Mods.Cnc.FileSystem
contents[filenames[nameIndex]] = (offset, (int)size); contents[filenames[nameIndex]] = (offset, (int)size);
} }
contents.TrimExcess();
if (s.Position != headerSize) if (s.Position != headerSize)
throw new Exception("Expected to be at data start offset"); throw new Exception("Expected to be at data start offset");
} }

View File

@@ -66,8 +66,6 @@ namespace OpenRA.Mods.Cnc.FileSystem
Dictionary<string, PackageEntry> ParseIndex(Dictionary<uint, PackageEntry> entries, string[] globalFilenames) Dictionary<string, PackageEntry> ParseIndex(Dictionary<uint, PackageEntry> entries, string[] globalFilenames)
{ {
var classicIndex = new Dictionary<string, PackageEntry>();
var crcIndex = new Dictionary<string, PackageEntry>();
var allPossibleFilenames = new HashSet<string>(globalFilenames); var allPossibleFilenames = new HashSet<string>(globalFilenames);
// Try and find a local mix database // Try and find a local mix database
@@ -88,6 +86,9 @@ namespace OpenRA.Mods.Cnc.FileSystem
} }
} }
var classicIndex = new Dictionary<string, PackageEntry>(entries.Count);
var crcIndex = new Dictionary<string, PackageEntry>(entries.Count);
foreach (var filename in allPossibleFilenames) foreach (var filename in allPossibleFilenames)
{ {
var classicHash = PackageEntry.HashFilename(filename, PackageHashType.Classic); var classicHash = PackageEntry.HashFilename(filename, PackageHashType.Classic);
@@ -106,6 +107,7 @@ namespace OpenRA.Mods.Cnc.FileSystem
if (unknown > 0) if (unknown > 0)
Log.Write("debug", $"{Name}: failed to resolve filenames for {unknown} unknown hashes"); Log.Write("debug", $"{Name}: failed to resolve filenames for {unknown} unknown hashes");
bestIndex.TrimExcess();
return bestIndex; return bestIndex;
} }

View File

@@ -50,12 +50,11 @@ namespace OpenRA.Mods.Cnc.FileSystem
var length = (next == 0 ? (uint)stream.Length : next) - offset; var length = (next == 0 ? (uint)stream.Length : next) - offset;
// Ignore duplicate files // Ignore duplicate files
if (index.ContainsKey(file)) if (index.TryAdd(file, new Entry { Offset = offset, Length = length, Filename = file }))
continue; offset = next;
index.Add(file, new Entry { Offset = offset, Length = length, Filename = file });
offset = next;
} }
index.TrimExcess();
} }
catch catch
{ {

View File

@@ -37,7 +37,7 @@ namespace OpenRA.Mods.Common.FileSystem
public string Name { get; } public string Name { get; }
public IEnumerable<string> Contents => index.Keys; public IEnumerable<string> Contents => index.Keys;
readonly Dictionary<string, Entry> index = new(); readonly Dictionary<string, Entry> index;
readonly Stream s; readonly Stream s;
readonly long dataStart = 255; readonly long dataStart = 255;
@@ -63,7 +63,8 @@ namespace OpenRA.Mods.Common.FileSystem
s.Position = tocAddress; s.Position = tocAddress;
// Parse directories // Parse directories
var directories = new Dictionary<string, uint>(); var directories = new Dictionary<string, uint>(dirCount);
var totalFileCount = 0;
for (var i = 0; i < dirCount; i++) for (var i = 0; i < dirCount; i++)
{ {
// Parse directory header // Parse directory header
@@ -75,12 +76,16 @@ namespace OpenRA.Mods.Common.FileSystem
// Skip to the end of the chunk // Skip to the end of the chunk
s.Position += chunkSize - nameLength - 6; s.Position += chunkSize - nameLength - 6;
directories.Add(dirName, fileCount); directories.Add(dirName, fileCount);
totalFileCount += fileCount;
} }
// Parse files // Parse files
index = new Dictionary<string, Entry>(totalFileCount);
foreach (var dir in directories) foreach (var dir in directories)
for (var i = 0; i < dir.Value; i++) for (var i = 0; i < dir.Value; i++)
ParseFile(dir.Key); ParseFile(dir.Key);
index.TrimExcess();
} }
catch catch
{ {

View File

@@ -54,6 +54,8 @@ namespace OpenRA.Mods.D2k.PackageLoaders
var length = s.ReadUInt32(); var length = s.ReadUInt32();
index.Add(name, new Entry(offset, length)); index.Add(name, new Entry(offset, length));
} }
index.TrimExcess();
} }
catch catch
{ {