From 2eb5f8772f3e9fe318ebc9a8fb8c48f91fa277c7 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 9 Aug 2023 17:41:17 +0200 Subject: [PATCH 001/139] Fix BG3 LSJ serialization bugs --- LSLib/LS/Resource.cs | 79 +++++++++++++++++++ .../LS/Resources/LSJ/LSJResourceConverter.cs | 52 ++++++++++-- LSLib/LS/Resources/LSX/LSXReader.cs | 42 +--------- LSLib/LS/Resources/LSX/LSXWriter.cs | 40 +--------- 4 files changed, 129 insertions(+), 84 deletions(-) diff --git a/LSLib/LS/Resource.cs b/LSLib/LS/Resource.cs index 23a82464..3846b635 100644 --- a/LSLib/LS/Resource.cs +++ b/LSLib/LS/Resource.cs @@ -90,6 +90,85 @@ public struct LSBHeader public LSMetadata Metadata; } + public static class AttributeTypeMaps + { + public static Dictionary TypeToId = new Dictionary + { + { "None", NodeAttribute.DataType.DT_None }, + { "uint8", NodeAttribute.DataType.DT_Byte }, + { "int16", NodeAttribute.DataType.DT_Short }, + { "uint16", NodeAttribute.DataType.DT_UShort }, + { "int32", NodeAttribute.DataType.DT_Int }, + { "uint32", NodeAttribute.DataType.DT_UInt }, + { "float", NodeAttribute.DataType.DT_Float }, + { "double", NodeAttribute.DataType.DT_Double }, + { "ivec2", NodeAttribute.DataType.DT_IVec2 }, + { "ivec3", NodeAttribute.DataType.DT_IVec3 }, + { "ivec4", NodeAttribute.DataType.DT_IVec4 }, + { "fvec2", NodeAttribute.DataType.DT_Vec2 }, + { "fvec3", NodeAttribute.DataType.DT_Vec3 }, + { "fvec4", NodeAttribute.DataType.DT_Vec4 }, + { "mat2x2", NodeAttribute.DataType.DT_Mat2 }, + { "mat3x3", NodeAttribute.DataType.DT_Mat3 }, + { "mat3x4", NodeAttribute.DataType.DT_Mat3x4 }, + { "mat4x3", NodeAttribute.DataType.DT_Mat4x3 }, + { "mat4x4", NodeAttribute.DataType.DT_Mat4 }, + { "bool", NodeAttribute.DataType.DT_Bool }, + { "string", NodeAttribute.DataType.DT_String }, + { "path", NodeAttribute.DataType.DT_Path }, + { "FixedString", NodeAttribute.DataType.DT_FixedString }, + { "LSString", NodeAttribute.DataType.DT_LSString }, + { "uint64", NodeAttribute.DataType.DT_ULongLong }, + { "ScratchBuffer", NodeAttribute.DataType.DT_ScratchBuffer }, + { "old_int64", NodeAttribute.DataType.DT_Long }, + { "int8", NodeAttribute.DataType.DT_Int8 }, + { "TranslatedString", NodeAttribute.DataType.DT_TranslatedString }, + { "WString", NodeAttribute.DataType.DT_WString }, + { "LSWString", NodeAttribute.DataType.DT_LSWString }, + { "guid", NodeAttribute.DataType.DT_UUID }, + { "int64", NodeAttribute.DataType.DT_Int64 }, + { "TranslatedFSString", NodeAttribute.DataType.DT_TranslatedFSString }, + }; + + public static Dictionary IdToType = new Dictionary + { + { NodeAttribute.DataType.DT_None, "None" }, + { NodeAttribute.DataType.DT_Byte, "uint8" }, + { NodeAttribute.DataType.DT_Short, "int16" }, + { NodeAttribute.DataType.DT_UShort, "uint16" }, + { NodeAttribute.DataType.DT_Int, "int32" }, + { NodeAttribute.DataType.DT_UInt, "uint32" }, + { NodeAttribute.DataType.DT_Float, "float" }, + { NodeAttribute.DataType.DT_Double, "double" }, + { NodeAttribute.DataType.DT_IVec2, "ivec2" }, + { NodeAttribute.DataType.DT_IVec3, "ivec3" }, + { NodeAttribute.DataType.DT_IVec4, "ivec4" }, + { NodeAttribute.DataType.DT_Vec2, "fvec2" }, + { NodeAttribute.DataType.DT_Vec3, "fvec3" }, + { NodeAttribute.DataType.DT_Vec4, "fvec4" }, + { NodeAttribute.DataType.DT_Mat2, "mat2x2" }, + { NodeAttribute.DataType.DT_Mat3, "mat3x3" }, + { NodeAttribute.DataType.DT_Mat3x4, "mat3x4" }, + { NodeAttribute.DataType.DT_Mat4x3, "mat4x3" }, + { NodeAttribute.DataType.DT_Mat4, "mat4x4" }, + { NodeAttribute.DataType.DT_Bool, "bool" }, + { NodeAttribute.DataType.DT_String, "string" }, + { NodeAttribute.DataType.DT_Path, "path" }, + { NodeAttribute.DataType.DT_FixedString, "FixedString" }, + { NodeAttribute.DataType.DT_LSString, "LSString" }, + { NodeAttribute.DataType.DT_ULongLong, "uint64" }, + { NodeAttribute.DataType.DT_ScratchBuffer, "ScratchBuffer" }, + { NodeAttribute.DataType.DT_Long, "old_int64" }, + { NodeAttribute.DataType.DT_Int8, "int8" }, + { NodeAttribute.DataType.DT_TranslatedString, "TranslatedString" }, + { NodeAttribute.DataType.DT_WString, "WString" }, + { NodeAttribute.DataType.DT_LSWString, "LSWString" }, + { NodeAttribute.DataType.DT_UUID, "guid" }, + { NodeAttribute.DataType.DT_Int64, "int64" }, + { NodeAttribute.DataType.DT_TranslatedFSString, "TranslatedFSString" }, + }; + } + public class Resource { public LSMetadata Metadata; diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index 4508a243..b6705439 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -9,6 +9,8 @@ namespace LSLib.LS { public class LSJResourceConverter : JsonConverter { + private LSMetadata Metadata; + public override bool CanConvert(Type objectType) { return objectType == typeof(Node) @@ -72,7 +74,14 @@ private TranslatedFSString ReadTranslatedFSString(JsonReader reader) { if (key == "value") { - fs.Value = reader.Value.ToString(); + if (reader.Value != null) + { + fs.Value = reader.Value.ToString(); + } + else + { + fs.Value = null; + } } else if (key == "handle") { @@ -145,8 +154,28 @@ private NodeAttribute ReadAttribute(JsonReader reader) { if (key == "type") { - var type = (NodeAttribute.DataType)Convert.ToUInt32(reader.Value); - attribute = new NodeAttribute(type); + uint type; + if (!UInt32.TryParse((string)reader.Value, out type)) + { + type = (uint)AttributeTypeMaps.TypeToId[(string)reader.Value]; + } + + attribute = new NodeAttribute((NodeAttribute.DataType)type); + if (type == (uint)NodeAttribute.DataType.DT_TranslatedString) + { + attribute.Value = new TranslatedString + { + Handle = handle + }; + } + else if (type == (uint)NodeAttribute.DataType.DT_TranslatedFSString) + { + attribute.Value = new TranslatedFSString + { + Handle = handle, + Arguments = fsStringArguments + }; + } } else if (key == "value") { @@ -231,7 +260,12 @@ private NodeAttribute ReadAttribute(JsonReader reader) case NodeAttribute.DataType.DT_TranslatedFSString: { - var fsString = new TranslatedFSString(); + if (attribute.Value == null) + { + attribute.Value = new TranslatedFSString(); + } + + var fsString = (TranslatedFSString)attribute.Value; fsString.Value = reader.Value.ToString(); fsString.Handle = handle; fsString.Arguments = fsStringArguments; @@ -521,6 +555,7 @@ public override object ReadJson(JsonReader reader, Type objectType, object exist private void WriteResource(JsonWriter writer, Resource resource, JsonSerializer serializer) { + Metadata = resource.Metadata; writer.WriteStartObject(); writer.WritePropertyName("save"); @@ -591,7 +626,14 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) writer.WritePropertyName(attribute.Key); writer.WriteStartObject(); writer.WritePropertyName("type"); - writer.WriteValue((int)attribute.Value.Type); + if (Metadata.MajorVersion >= 4) + { + writer.WriteValue(AttributeTypeMaps.IdToType[attribute.Value.Type]); + } + else + { + writer.WriteValue((int)attribute.Value.Type); + } if (attribute.Value.Type != NodeAttribute.DataType.DT_TranslatedString) { diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index a72aabd8..497e8e1a 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -10,44 +10,6 @@ namespace LSLib.LS { public class LSXReader : IDisposable { - private static Dictionary TypeNames = new Dictionary - { - { "None", NodeAttribute.DataType.DT_None }, - { "uint8", NodeAttribute.DataType.DT_Byte }, - { "int16", NodeAttribute.DataType.DT_Short }, - { "uint16", NodeAttribute.DataType.DT_UShort }, - { "int32", NodeAttribute.DataType.DT_Int }, - { "uint32", NodeAttribute.DataType.DT_UInt }, - { "float", NodeAttribute.DataType.DT_Float }, - { "double", NodeAttribute.DataType.DT_Double }, - { "ivec2", NodeAttribute.DataType.DT_IVec2 }, - { "ivec3", NodeAttribute.DataType.DT_IVec3 }, - { "ivec4", NodeAttribute.DataType.DT_IVec4 }, - { "fvec2", NodeAttribute.DataType.DT_Vec2 }, - { "fvec3", NodeAttribute.DataType.DT_Vec3 }, - { "fvec4", NodeAttribute.DataType.DT_Vec4 }, - { "mat2x2", NodeAttribute.DataType.DT_Mat2 }, - { "mat3x3", NodeAttribute.DataType.DT_Mat3 }, - { "mat3x4", NodeAttribute.DataType.DT_Mat3x4 }, - { "mat4x3", NodeAttribute.DataType.DT_Mat4x3 }, - { "mat4x4", NodeAttribute.DataType.DT_Mat4 }, - { "bool", NodeAttribute.DataType.DT_Bool }, - { "string", NodeAttribute.DataType.DT_String }, - { "path", NodeAttribute.DataType.DT_Path }, - { "FixedString", NodeAttribute.DataType.DT_FixedString }, - { "LSString", NodeAttribute.DataType.DT_LSString }, - { "uint64", NodeAttribute.DataType.DT_ULongLong }, - { "ScratchBuffer", NodeAttribute.DataType.DT_ScratchBuffer }, - { "old_int64", NodeAttribute.DataType.DT_Long }, - { "int8", NodeAttribute.DataType.DT_Int8 }, - { "TranslatedString", NodeAttribute.DataType.DT_TranslatedString }, - { "WString", NodeAttribute.DataType.DT_WString }, - { "LSWString", NodeAttribute.DataType.DT_LSWString }, - { "guid", NodeAttribute.DataType.DT_UUID }, - { "int64", NodeAttribute.DataType.DT_Int64 }, - { "TranslatedFSString", NodeAttribute.DataType.DT_TranslatedFSString }, - }; - private Stream stream; private XmlReader reader; private Resource resource; @@ -185,13 +147,13 @@ private void ReadElement() UInt32 attrTypeId; if (Version >= LSXVersion.V4) { - attrTypeId = (uint)TypeNames[reader["type"]]; + attrTypeId = (uint)AttributeTypeMaps.TypeToId[reader["type"]]; } else { if (!UInt32.TryParse(reader["type"], out attrTypeId)) { - attrTypeId = (uint)TypeNames[reader["type"]]; + attrTypeId = (uint)AttributeTypeMaps.TypeToId[reader["type"]]; } } diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 5c29afbf..4a8f6b86 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -7,44 +7,6 @@ namespace LSLib.LS { public class LSXWriter { - private static Dictionary TypeNames = new Dictionary - { - { NodeAttribute.DataType.DT_None, "None" }, - { NodeAttribute.DataType.DT_Byte, "uint8" }, - { NodeAttribute.DataType.DT_Short, "int16" }, - { NodeAttribute.DataType.DT_UShort, "uint16" }, - { NodeAttribute.DataType.DT_Int, "int32" }, - { NodeAttribute.DataType.DT_UInt, "uint32" }, - { NodeAttribute.DataType.DT_Float, "float" }, - { NodeAttribute.DataType.DT_Double, "double" }, - { NodeAttribute.DataType.DT_IVec2, "ivec2" }, - { NodeAttribute.DataType.DT_IVec3, "ivec3" }, - { NodeAttribute.DataType.DT_IVec4, "ivec4" }, - { NodeAttribute.DataType.DT_Vec2, "fvec2" }, - { NodeAttribute.DataType.DT_Vec3, "fvec3" }, - { NodeAttribute.DataType.DT_Vec4, "fvec4" }, - { NodeAttribute.DataType.DT_Mat2, "mat2x2" }, - { NodeAttribute.DataType.DT_Mat3, "mat3x3" }, - { NodeAttribute.DataType.DT_Mat3x4, "mat3x4" }, - { NodeAttribute.DataType.DT_Mat4x3, "mat4x3" }, - { NodeAttribute.DataType.DT_Mat4, "mat4x4" }, - { NodeAttribute.DataType.DT_Bool, "bool" }, - { NodeAttribute.DataType.DT_String, "string" }, - { NodeAttribute.DataType.DT_Path, "path" }, - { NodeAttribute.DataType.DT_FixedString, "FixedString" }, - { NodeAttribute.DataType.DT_LSString, "LSString" }, - { NodeAttribute.DataType.DT_ULongLong, "uint64" }, - { NodeAttribute.DataType.DT_ScratchBuffer, "ScratchBuffer" }, - { NodeAttribute.DataType.DT_Long, "old_int64" }, - { NodeAttribute.DataType.DT_Int8, "int8" }, - { NodeAttribute.DataType.DT_TranslatedString, "TranslatedString" }, - { NodeAttribute.DataType.DT_WString, "WString" }, - { NodeAttribute.DataType.DT_LSWString, "LSWString" }, - { NodeAttribute.DataType.DT_UUID, "guid" }, - { NodeAttribute.DataType.DT_Int64, "int64" }, - { NodeAttribute.DataType.DT_TranslatedFSString, "TranslatedFSString" }, - }; - private Stream stream; private XmlWriter writer; @@ -132,7 +94,7 @@ private void WriteNode(Node node) writer.WriteAttributeString("id", attribute.Key); if (Version >= LSXVersion.V4) { - writer.WriteAttributeString("type", TypeNames[attribute.Value.Type]); + writer.WriteAttributeString("type", AttributeTypeMaps.IdToType[attribute.Value.Type]); } else { From 8480d96437c77200a92a6d4f5e050ae3e6ff5c60 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 12 Aug 2023 11:17:38 +0200 Subject: [PATCH 002/139] Add support for hotfix 2 savegames --- ConverterApp/OsirisPane.cs | 86 +++--------------- LSLib/LS/Save/SavegameHelpers.cs | 149 +++++++++++++++++++++++++++++++ LSLib/LS/Story/Common.cs | 4 +- LSLib/LS/Story/Story.cs | 4 +- LSLib/LSLib.csproj | 1 + 5 files changed, 167 insertions(+), 77 deletions(-) create mode 100644 LSLib/LS/Save/SavegameHelpers.cs diff --git a/ConverterApp/OsirisPane.cs b/ConverterApp/OsirisPane.cs index f3351c41..55a09d67 100644 --- a/ConverterApp/OsirisPane.cs +++ b/ConverterApp/OsirisPane.cs @@ -7,6 +7,7 @@ using System.Windows.Forms; using LSLib.LS; using LSLib.LS.Enums; +using LSLib.LS.Save; using LSLib.LS.Story; using Node = LSLib.LS.Story.Node; @@ -54,11 +55,8 @@ private void goalPathBrowseBtn_Click(object sender, EventArgs e) } } - private void LoadStory(Stream s) + private void LoadStory() { - var reader = new StoryReader(); - _story = reader.Read(s); - _databaseItems.Clear(); uint index = 0; @@ -117,13 +115,11 @@ private void loadStoryBtn_Click(object sender, EventArgs e) { case ".lsv": { - var resource = LoadResourceFromSave(storyFilePath.Text); - if (resource == null) return; - - LSLib.LS.Node storyNode = resource.Regions["Story"].Children["Story"][0]; - var storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[] ?? throw new InvalidOperationException("Cannot proceed with null Story node")); - - LoadStory(storyStream); + using (var saveHelpers = new SavegameHelpers(storyFilePath.Text)) + { + _story = saveHelpers.LoadStory(); + LoadStory(); + } MessageBox.Show("Save game database loaded successfully."); break; @@ -132,7 +128,9 @@ private void loadStoryBtn_Click(object sender, EventArgs e) { using (var file = new FileStream(storyFilePath.Text, FileMode.Open, FileAccess.Read, FileShare.Read)) { - LoadStory(file); + var reader = new StoryReader(); + reader.Read(file); + LoadStory(); } MessageBox.Show("Story file loaded successfully."); @@ -148,69 +146,11 @@ private void loadStoryBtn_Click(object sender, EventArgs e) private void SaveSavegameDatabase() { - var conversionParams = ResourceConversionParameters.FromGameVersion(Game); - var packageReader = new PackageReader(storyFilePath.Text); - Package package = packageReader.Read(); - - AbstractFileInfo globalsLsf = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - if (globalsLsf == null) - { - MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - return; - } - - // Load globals.lsf - Resource resource; - Stream rsrcStream = globalsLsf.MakeStream(); - try - { - using (var rsrcReader = new LSFReader(rsrcStream)) - { - resource = rsrcReader.Read(); - } - } - finally - { - globalsLsf.ReleaseStream(); - } - - // Save story resource and pack into the Story.Story attribute in globals.lsf - using (var storyStream = new MemoryStream()) + using (var saveHelpers = new SavegameHelpers(storyFilePath.Text)) { - var storyWriter = new StoryWriter(); - storyWriter.Write(storyStream, _story); - - LSLib.LS.Node storyNode = resource.Regions["Story"].Children["Story"][0]; - storyNode.Attributes["Story"].Value = storyStream.ToArray(); - } - - // Save globals.lsf - var rewrittenStream = new MemoryStream(); - var rsrcWriter = new LSFWriter(rewrittenStream); - rsrcWriter.Version = conversionParams.LSF; - rsrcWriter.EncodeSiblingData = false; - rsrcWriter.Write(resource); - rewrittenStream.Seek(0, SeekOrigin.Begin); - - // Re-package global.lsf - var rewrittenPackage = new Package(); - StreamFileInfo globalsRepacked = StreamFileInfo.CreateFromStream(rewrittenStream, globalsLsf.Name); - rewrittenPackage.Files.Add(globalsRepacked); - - List files = package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); - rewrittenPackage.Files.AddRange(files); - - using (var packageWriter = new PackageWriter(rewrittenPackage, $"{storyFilePath.Text}.tmp")) - { - packageWriter.Version = conversionParams.PAKVersion; - packageWriter.Compression = CompressionMethod.Zlib; - packageWriter.CompressionLevel = CompressionLevel.DefaultCompression; - packageWriter.Write(); + saveHelpers.ResaveStory(_story, Game, $"{storyFilePath.Text}.tmp"); } - rewrittenStream.Dispose(); - packageReader.Dispose(); - // Create a backup of the original .lsf string backupPath = $"{storyFilePath.Text}.backup"; if (!File.Exists(backupPath)) @@ -231,7 +171,7 @@ private void SaveStory() using (var file = new FileStream(storyFilePath.Text, FileMode.Create, FileAccess.Write)) { var writer = new StoryWriter(); - writer.Write(file, _story); + writer.Write(file, _story, false); } } diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs new file mode 100644 index 00000000..d2adbc2a --- /dev/null +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -0,0 +1,149 @@ +using LSLib.Granny; +using LSLib.LS.Enums; +using LSLib.LS.Story; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; + +namespace LSLib.LS.Save +{ + public class SavegameHelpers : IDisposable + { + private PackageReader Reader; + private Package Package; + + public SavegameHelpers(string path) + { + Reader = new PackageReader(path); + Package = Reader.Read(); + } + + public void Dispose() + { + Reader.Dispose(); + } + + public Resource LoadGlobals() + { + AbstractFileInfo globalsInfo = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + if (globalsInfo == null) + { + throw new InvalidDataException("The specified package is not a valid savegame (globals.lsf not found)"); + } + + Resource resource; + Stream rsrcStream = globalsInfo.MakeStream(); + try + { + using (var rsrcReader = new LSFReader(rsrcStream)) + { + resource = rsrcReader.Read(); + } + } + finally + { + globalsInfo.ReleaseStream(); + } + + return resource; + } + + public Story.Story LoadStory(Stream s) + { + var reader = new StoryReader(); + return reader.Read(s); + } + + public Story.Story LoadStory() + { + AbstractFileInfo storyInfo = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + if (storyInfo != null) + { + Stream rsrcStream = storyInfo.MakeStream(); + try + { + return LoadStory(rsrcStream); + } + finally + { + storyInfo.ReleaseStream(); + } + } + else + { + var globals = LoadGlobals(); + + Node storyNode = globals.Regions["Story"].Children["Story"][0]; + var storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[] ?? throw new InvalidOperationException("Cannot proceed with null Story node")); + return LoadStory(storyStream); + } + } + + public MemoryStream ResaveStoryToGlobals(Story.Story story, ResourceConversionParameters conversionParams) + { + var globals = LoadGlobals(); + + // Save story resource and pack into the Story.Story attribute in globals.lsf + using (var storyStream = new MemoryStream()) + { + var storyWriter = new StoryWriter(); + storyWriter.Write(storyStream, story, true); + + var storyNode = globals.Regions["Story"].Children["Story"][0]; + storyNode.Attributes["Story"].Value = storyStream.ToArray(); + } + + // Save globals.lsf + var rewrittenStream = new MemoryStream(); + var rsrcWriter = new LSFWriter(rewrittenStream); + rsrcWriter.Version = conversionParams.LSF; + rsrcWriter.EncodeSiblingData = false; + rsrcWriter.Write(globals); + rewrittenStream.Seek(0, SeekOrigin.Begin); + return rewrittenStream; + } + + public void ResaveStory(Story.Story story, Game game, string path) + { + // Re-package global.lsf/StorySave.bin + var rewrittenPackage = new Package(); + var conversionParams = ResourceConversionParameters.FromGameVersion(game); + + AbstractFileInfo storyBin = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + if (storyBin == null) + { + var globalsStream = ResaveStoryToGlobals(story, conversionParams); + + AbstractFileInfo globalsLsf = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + StreamFileInfo globalsRepacked = StreamFileInfo.CreateFromStream(globalsStream, globalsLsf.Name); + rewrittenPackage.Files.Add(globalsRepacked); + + List files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); + rewrittenPackage.Files.AddRange(files); + } + else + { + // Save story resource and pack into the Story.Story attribute in globals.lsf + var storyStream = new MemoryStream(); + var storyWriter = new StoryWriter(); + storyWriter.Write(storyStream, story, true); + storyStream.Seek(0, SeekOrigin.Begin); + + StreamFileInfo storyRepacked = StreamFileInfo.CreateFromStream(storyStream, "StorySave.bin"); + rewrittenPackage.Files.Add(storyRepacked); + + List files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "StorySave.bin").ToList(); + rewrittenPackage.Files.AddRange(files); + } + + using (var packageWriter = new PackageWriter(rewrittenPackage, path)) + { + packageWriter.Version = conversionParams.PAKVersion; + packageWriter.Compression = CompressionMethod.Zlib; + packageWriter.CompressionLevel = CompressionLevel.DefaultCompression; + packageWriter.Write(); + } + } + } +} diff --git a/LSLib/LS/Story/Common.cs b/LSLib/LS/Story/Common.cs index 72f6c29c..7047c734 100644 --- a/LSLib/LS/Story/Common.cs +++ b/LSLib/LS/Story/Common.cs @@ -227,8 +227,8 @@ public uint Ver get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } } - public OsiWriter(Stream stream) - : base(stream) + public OsiWriter(Stream stream, bool leaveOpen) + : base(stream, Encoding.UTF8, leaveOpen) { } diff --git a/LSLib/LS/Story/Story.cs b/LSLib/LS/Story/Story.cs index 0c34ecd4..bf11b970 100644 --- a/LSLib/LS/Story/Story.cs +++ b/LSLib/LS/Story/Story.cs @@ -467,9 +467,9 @@ private void WriteGoals(Dictionary goals) } } - public void Write(Stream stream, Story story) + public void Write(Stream stream, Story story, bool leaveOpen) { - using (Writer = new OsiWriter(stream)) + using (Writer = new OsiWriter(stream, leaveOpen)) { foreach (var node in story.Nodes) { diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index cc44ad97..f0233113 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -74,6 +74,7 @@ + From 998c6144aace98c3af5b9f9ad13304304c83deea Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 12 Aug 2023 11:56:29 +0200 Subject: [PATCH 003/139] Fix GR2 mixed marshalling --- LSLib/Granny/GR2/Format.cs | 4 +++ LSLib/Granny/GR2/Writer.cs | 2 +- LSLib/Granny/Model/Mesh.cs | 4 +-- LSLib/Granny/Model/Vertex.cs | 30 ++++++++++++++++++----- LSLib/Granny/Model/VertexSerialization.cs | 2 +- LSLib/LS/Common.cs | 2 +- StoryCompiler/ModCompiler.cs | 2 +- 7 files changed, 34 insertions(+), 12 deletions(-) diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index c8b4408e..80bc7903 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -1275,6 +1275,10 @@ public class SerializationAttribute : System.Attribute /// Maximum GR2 file version this member should be exported to /// public UInt32 MaxVersion = 0; + /// + /// Should we do mixed marshalling on this struct? + /// + public bool MixedMarshal = false; } /// diff --git a/LSLib/Granny/GR2/Writer.cs b/LSLib/Granny/GR2/Writer.cs index 68dba0e9..4dd1d039 100644 --- a/LSLib/Granny/GR2/Writer.cs +++ b/LSLib/Granny/GR2/Writer.cs @@ -232,7 +232,7 @@ internal void WriteStruct(Type type, object node, bool allowRecursion = true, bo WriteStruct(GR2.LookupStructDefinition(type, node), node, allowRecursion, allowAlign); } - internal void StoreObjectOffset(object o) + public void StoreObjectOffset(object o) { if (Writer == MainWriter) { diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index ae74d063..60894aa3 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -188,9 +188,9 @@ public SectionType SelectSection(MemberDefinition member, Type type, object obj) public class VertexData { - [Serialization(Type = MemberType.ReferenceToVariantArray, + [Serialization(Type = MemberType.ReferenceToVariantArray, MixedMarshal = true, TypeSelector = typeof(VertexSerializer), Serializer = typeof(VertexSerializer), - Kind = SerializationKind.UserElement)] + Kind = SerializationKind.UserMember)] public List Vertices; public List VertexComponentNames; public List VertexAnnotationSets; diff --git a/LSLib/Granny/Model/Vertex.cs b/LSLib/Granny/Model/Vertex.cs index 12654eb1..8d073e0e 100644 --- a/LSLib/Granny/Model/Vertex.cs +++ b/LSLib/Granny/Model/Vertex.cs @@ -286,7 +286,7 @@ public Vertex CreateInstance() } } - [StructSerialization(TypeSelector = typeof(VertexDefinitionSelector))] + [StructSerialization(TypeSelector = typeof(VertexDefinitionSelector), MixedMarshal = true)] public class Vertex { public const int MaxUVs = 4; @@ -581,7 +581,14 @@ public VertexDescriptor ConstructDescriptor(MemberDefinition memberDefn, StructD return desc; } - public object Read(GR2Reader reader, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) + public Vertex ReadVertex(GR2Reader reader, VertexDescriptor descriptor) + { + var vertex = descriptor.CreateInstance(); + vertex.Unserialize(reader); + return vertex; + } + + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { VertexDescriptor descriptor; if (!VertexTypeCache.TryGetValue(parent, out descriptor)) @@ -590,14 +597,25 @@ public object Read(GR2Reader reader, StructDefinition definition, MemberDefiniti VertexTypeCache.Add(parent, descriptor); } - var vertex = descriptor.CreateInstance(); - vertex.Unserialize(reader); - return vertex; + var vertices = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + vertices.Add(ReadVertex(gr2, descriptor)); + return vertices; } public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) { - (obj as Vertex).Serialize(section); + var items = obj as List; + + if (items.Count > 0) + { + section.StoreObjectOffset(items[0]); + } + + for (int i = 0; i < items.Count; i++) + { + items[i].Serialize(section); + } } } } diff --git a/LSLib/Granny/Model/VertexSerialization.cs b/LSLib/Granny/Model/VertexSerialization.cs index 3a784f71..bba43d3e 100644 --- a/LSLib/Granny/Model/VertexSerialization.cs +++ b/LSLib/Granny/Model/VertexSerialization.cs @@ -542,7 +542,7 @@ public StructDefinition CreateStructDefinition(object instance) var defn = new StructDefinition { Members = new List(), - MixedMarshal = false, + MixedMarshal = true, Type = typeof(Vertex) }; diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index bf412faa..e2848c54 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -9,7 +9,7 @@ public static class Common public const int MinorVersion = 8; - public const int PatchVersion = 2; + public const int PatchVersion = 3; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 2; diff --git a/StoryCompiler/ModCompiler.cs b/StoryCompiler/ModCompiler.cs index b1990a67..e4a4b636 100644 --- a/StoryCompiler/ModCompiler.cs +++ b/StoryCompiler/ModCompiler.cs @@ -496,7 +496,7 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) using (var file = new FileStream(outputPath, FileMode.Create, FileAccess.Write)) { var writer = new StoryWriter(); - writer.Write(file, story); + writer.Write(file, story, false); } Logger.TaskFinished(); From 9c88b30868f161ce72064dc148ace842d1de92e9 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 12 Aug 2023 11:59:21 +0200 Subject: [PATCH 004/139] Fix import of GR2 mesh extended properties --- LSLib/Granny/Model/ColladaImporter.cs | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 4ca94d1d..479c584c 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -301,22 +301,24 @@ private technique FindExporterExtraData(extra[] extras) return null; } - private void LoadLSLibProfileMeshType(DivinityMeshProperties props, string meshType) + private void LoadLSLibProfileMeshType(DivinityMeshExtendedData props, string meshType) { + var meshProps = props.UserMeshProperties; + switch (meshType) { // Compatibility flag, not used anymore case "Normal": break; - case "Cloth": props.MeshFlags |= DivinityModelFlag.Cloth; break; - case "Rigid": props.MeshFlags |= DivinityModelFlag.Rigid; break; - case "MeshProxy": props.MeshFlags |= DivinityModelFlag.MeshProxy | DivinityModelFlag.HasProxyGeometry; break; - case "ProxyGeometry": props.MeshFlags |= DivinityModelFlag.HasProxyGeometry; break; - case "Spring": props.MeshFlags |= DivinityModelFlag.Spring; break; - case "Occluder": props.MeshFlags |= DivinityModelFlag.Occluder; break; - case "Cloth01": props.ClothFlags |= DivinityClothFlag.Cloth01; break; - case "Cloth02": props.ClothFlags |= DivinityClothFlag.Cloth02; break; - case "Cloth04": props.ClothFlags |= DivinityClothFlag.Cloth04; break; - case "ClothPhysics": props.ClothFlags |= DivinityClothFlag.ClothPhysics; break; + case "Cloth": meshProps.MeshFlags |= DivinityModelFlag.Cloth; props.Cloth = 1; break; + case "Rigid": meshProps.MeshFlags |= DivinityModelFlag.Rigid; props.Rigid = 1; break; + case "MeshProxy": meshProps.MeshFlags |= DivinityModelFlag.MeshProxy | DivinityModelFlag.HasProxyGeometry; props.MeshProxy = 1; break; + case "ProxyGeometry": meshProps.MeshFlags |= DivinityModelFlag.HasProxyGeometry; break; + case "Spring": meshProps.MeshFlags |= DivinityModelFlag.Spring; props.Spring = 1; break; + case "Occluder": meshProps.MeshFlags |= DivinityModelFlag.Occluder; props.Occluder = 1; break; + case "Cloth01": meshProps.ClothFlags |= DivinityClothFlag.Cloth01; break; + case "Cloth02": meshProps.ClothFlags |= DivinityClothFlag.Cloth02; break; + case "Cloth04": meshProps.ClothFlags |= DivinityClothFlag.Cloth04; break; + case "ClothPhysics": meshProps.ClothFlags |= DivinityClothFlag.ClothPhysics; break; default: Utils.Warn($"Unrecognized model type in tag: {meshType}"); break; @@ -403,7 +405,7 @@ private void LoadColladaLSLibProfileData(mesh mesh, Mesh loaded) switch (setting.LocalName) { case "DivModelType": - LoadLSLibProfileMeshType(meshProps, setting.InnerText.Trim()); + LoadLSLibProfileMeshType(loaded.ExtendedData, setting.InnerText.Trim()); break; case "IsImpostor": From e5d90a31e1775df1d59f53c1db3095038c666783 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 12 Aug 2023 13:33:40 +0200 Subject: [PATCH 005/139] Add support for importing/exporting bone order --- LSLib/Granny/Model/ColladaExporter.cs | 2 +- LSLib/Granny/Model/ColladaImporter.cs | 10 ++- LSLib/Granny/Model/Root.cs | 7 +- LSLib/Granny/Model/Skeleton.cs | 92 ++++++++++++++++++++++++++- LSLib/LS/Common.cs | 2 +- 5 files changed, 101 insertions(+), 12 deletions(-) diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index aeba41b9..2231ff20 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -523,7 +523,7 @@ private skin ExportSkin(Mesh mesh, List bones, Dictionary na private node ExportBone(Skeleton skeleton, string name, int index, Bone bone) { - var node = bone.MakeCollada(name); + var node = bone.MakeCollada(Xml); var children = new List(); for (int i = 0; i < skeleton.Bones.Count; i++) { diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 479c584c..d20ef062 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -259,11 +259,12 @@ private void FindRootBones(List parents, node node, List roo { if (node.type == NodeType.JOINT) { - rootBones.Add(new RootBoneInfo + var root = new RootBoneInfo { Bone = node, Parents = parents.Select(a => a).ToList() - }); + }; + rootBones.Add(root); } else if (node.type == NodeType.NODE) { @@ -279,7 +280,7 @@ private void FindRootBones(List parents, node node, List roo } } - private technique FindExporterExtraData(extra[] extras) + public static technique FindExporterExtraData(extra[] extras) { if (extras != null) { @@ -942,6 +943,7 @@ public Root Import(string inputPath) var collGeometries = new List(); var collSkins = new List(); + var collNodes = new List(); var collAnimations = new List(); var rootBones = new List(); @@ -979,6 +981,7 @@ public Root Import(string inputPath) { foreach (var node in scene.node) { + collNodes.Add(node); FindRootBones(new List(), node, rootBones); } } @@ -1022,6 +1025,7 @@ public Root Import(string inputPath) var skeleton = Skeleton.FromCollada(bone.Bone); var rootTransform = NodeHelpers.GetTransformHierarchy(bone.Parents); skeleton.TransformRoots(rootTransform.Inverted()); + skeleton.ReorderBones(); root.Skeletons.Add(skeleton); } diff --git a/LSLib/Granny/Model/Root.cs b/LSLib/Granny/Model/Root.cs index 3a2f1b2d..09b1cc0a 100644 --- a/LSLib/Granny/Model/Root.cs +++ b/LSLib/Granny/Model/Root.cs @@ -141,12 +141,7 @@ public void PostLoad(UInt32 tag) foreach (var skeleton in Skeletons ?? Enumerable.Empty()) { - var hasSkinnedMeshes = Models.Any((model) => model.Skeleton == skeleton); - if (!hasSkinnedMeshes || skeleton.Bones.Count == 1) - { - skeleton.IsDummy = true; - Utils.Info(String.Format("Skeleton '{0}' marked as dummy", skeleton.Name)); - } + skeleton.PostLoad(this); } // Upgrade legacy animation formats diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index 97effdcd..38091b07 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -3,6 +3,9 @@ using System.Linq; using OpenTK; using LSLib.Granny.GR2; +using System.Xml; +using LSLib.LS.Story; +using System.Reflection; namespace LSLib.Granny.Model { @@ -29,6 +32,8 @@ public class Bone public Matrix4 OriginalTransform; [Serialization(Kind = SerializationKind.None)] public Matrix4 WorldTransform; + [Serialization(Kind = SerializationKind.None)] + public int ExportIndex = -1; public bool IsRoot { get { return ParentIndex == -1; } } @@ -54,6 +59,27 @@ public void UpdateWorldTransforms(List bones) }; } + private void ImportLSLibProfile(node node) + { + var extraData = ColladaImporter.FindExporterExtraData(node.extra); + if (extraData != null && extraData.Any != null) + { + foreach (var setting in extraData.Any) + { + switch (setting.LocalName) + { + case "BoneIndex": + ExportIndex = Int32.Parse(setting.InnerText.Trim()); + break; + + default: + Utils.Warn($"Unrecognized LSLib bone attribute: {setting.LocalName}"); + break; + } + } + } + } + public static Bone FromCollada(node bone, int parentIndex, List bones, Dictionary boneSIDs, Dictionary boneIDs) { var transMat = ColladaHelpers.TransformFromNode(bone); @@ -73,6 +99,7 @@ public static Bone FromCollada(node bone, int parentIndex, List bones, Dic colladaBone.OriginalTransform = transMat.transform; colladaBone.Transform = Transform.FromMatrix4(transMat.transform); colladaBone.UpdateWorldTransforms(bones); + colladaBone.ImportLSLibProfile(bone); if (bone.node1 != null) { @@ -87,8 +114,22 @@ public static Bone FromCollada(node bone, int parentIndex, List bones, Dic return colladaBone; } + private technique ExportLSLibProfile(XmlDocument Xml) + { + var profile = new technique() + { + profile = "LSTools" + }; - public node MakeCollada(string parentName) + var props = new List(); + var prop = Xml.CreateElement("BoneIndex"); + prop.InnerText = ExportIndex.ToString(); + props.Add(prop); + profile.Any = props.ToArray(); + return profile; + } + + public node MakeCollada(XmlDocument Xml) { var node = new node(); node.id = "Bone_" + Name.Replace(' ', '_'); @@ -114,6 +155,18 @@ public node MakeCollada(string parentName) node.Items = transforms.ToArray(); node.ItemsElementName = transformTypes.ToArray(); + + node.extra = new extra[] + { + new extra + { + technique = new technique[] + { + ExportLSLibProfile(Xml) + } + } + }; + return node; } } @@ -191,5 +244,42 @@ public void UpdateWorldTransforms() bone.UpdateWorldTransforms(Bones); } } + + public void ReorderBones() + { + // Reorder bones based on their ExportOrder + if (Bones.Any(m => m.ExportIndex > -1)) + { + var newBones = Bones.ToList(); + newBones.Sort((a, b) => a.ExportIndex - b.ExportIndex); + + // Fix up parent indices + foreach (var bone in newBones) + { + if (bone.ParentIndex != -1) + { + var parent = Bones[bone.ParentIndex]; + bone.ParentIndex = newBones.IndexOf(parent); + } + } + + Bones = newBones; + } + } + + public void PostLoad(Root root) + { + var hasSkinnedMeshes = root.Models.Any((model) => model.Skeleton == this); + if (!hasSkinnedMeshes || Bones.Count == 1) + { + IsDummy = true; + Utils.Info(String.Format("Skeleton '{0}' marked as dummy", this.Name)); + } + + for (var i = 0; i < Bones.Count; i++) + { + Bones[i].ExportIndex = i; + } + } } } diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index e2848c54..8483da4e 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -12,7 +12,7 @@ public static class Common public const int PatchVersion = 3; // Version of LSTools profile data in generated DAE files - public const int ColladaMetadataVersion = 2; + public const int ColladaMetadataVersion = 3; /// /// Returns the version number of the LSLib library From 720975532fecf00b4e577d3521fcd85ca434d1c4 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 13 Aug 2023 09:20:22 +0200 Subject: [PATCH 006/139] Fix new format savegames not being rewritten properly --- ConverterApp/OsirisPane.cs | 23 ++++++++++++++++------- LSLib/LS/Common.cs | 2 +- LSLib/LS/Save/SavegameHelpers.cs | 2 +- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/ConverterApp/OsirisPane.cs b/ConverterApp/OsirisPane.cs index 55a09d67..dafb5606 100644 --- a/ConverterApp/OsirisPane.cs +++ b/ConverterApp/OsirisPane.cs @@ -76,6 +76,7 @@ private void LoadStory() } databaseSelectorCb_FilterDropdownList(); + RefreshDataGrid(); } public Resource LoadResourceFromSave(string path) @@ -299,21 +300,29 @@ private void databaseSelectorCb_FilterDropdownList() } } - private void databaseSelectorCb_SelectedIndexChanged(object sender, EventArgs e) + private void RefreshDataGrid() { databaseGrid.DataSource = null; databaseGrid.Columns.Clear(); - var selectedIndex = ((KeyValuePair)databaseSelectorCb.SelectedItem).Key; - Database database = _story.Databases[selectedIndex + 1]; - databaseGrid.DataSource = database.Facts; - - for (var i = 0; i < database.Parameters.Types.Count; i++) + if (databaseSelectorCb.SelectedItem != null) { - databaseGrid.Columns[i].HeaderText = $"{i} ({_story.Types[database.Parameters.Types[i]].Name})"; + var selectedIndex = ((KeyValuePair)databaseSelectorCb.SelectedItem).Key; + Database database = _story.Databases[selectedIndex + 1]; + databaseGrid.DataSource = database.Facts; + + for (var i = 0; i < database.Parameters.Types.Count; i++) + { + databaseGrid.Columns[i].HeaderText = $"{i} ({_story.Types[database.Parameters.Types[i]].Name})"; + } } } + private void databaseSelectorCb_SelectedIndexChanged(object sender, EventArgs e) + { + RefreshDataGrid(); + } + private void btnDebugExport_Click(object sender, EventArgs e) { string filePath = Path.Combine(goalPath.Text, "debug.json"); diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 8483da4e..a386b6f1 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -9,7 +9,7 @@ public static class Common public const int MinorVersion = 8; - public const int PatchVersion = 3; + public const int PatchVersion = 4; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index d2adbc2a..58985ce5 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -133,7 +133,7 @@ public void ResaveStory(Story.Story story, Game game, string path) StreamFileInfo storyRepacked = StreamFileInfo.CreateFromStream(storyStream, "StorySave.bin"); rewrittenPackage.Files.Add(storyRepacked); - List files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "StorySave.bin").ToList(); + List files = Package.Files.Where(x => x.Name != "StorySave.bin").ToList(); rewrittenPackage.Files.AddRange(files); } From 17fbc050bc6fee26b272c2c99c28b560014c47a7 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 13 Aug 2023 09:20:31 +0200 Subject: [PATCH 007/139] Fix debug dumping hotfix 2 savegames --- ConverterApp/DebugDumper.cs | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/ConverterApp/DebugDumper.cs b/ConverterApp/DebugDumper.cs index 9f449b80..08fd240f 100644 --- a/ConverterApp/DebugDumper.cs +++ b/ConverterApp/DebugDumper.cs @@ -255,8 +255,18 @@ private void RunTasks() } ReportProgress(70, "Loading story ..."); - LSLib.LS.Node storyNode = SaveGlobals.Regions["Story"].Children["Story"][0]; - var storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[]); + AbstractFileInfo storySave = SavePackage.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + Stream storyStream; + if (storySave != null) + { + storyStream = storySave.MakeStream(); + } + else + { + LSLib.LS.Node storyNode = SaveGlobals.Regions["Story"].Children["Story"][0]; + storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[]); + } + var reader = new StoryReader(); SaveStory = reader.Read(storyStream); From 5005b58ba7e61d541317a85748d441f6cde961cf Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 14 Aug 2023 18:08:41 +0200 Subject: [PATCH 008/139] Fix loading story.div.osi --- ConverterApp/OsirisPane.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ConverterApp/OsirisPane.cs b/ConverterApp/OsirisPane.cs index dafb5606..a3ccefeb 100644 --- a/ConverterApp/OsirisPane.cs +++ b/ConverterApp/OsirisPane.cs @@ -130,7 +130,7 @@ private void loadStoryBtn_Click(object sender, EventArgs e) using (var file = new FileStream(storyFilePath.Text, FileMode.Open, FileAccess.Read, FileShare.Read)) { var reader = new StoryReader(); - reader.Read(file); + _story = reader.Read(file); LoadStory(); } From 676aa2510d2eb2b8705639ec5244e680d28ac352 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 14 Aug 2023 18:09:21 +0200 Subject: [PATCH 009/139] Fix version numbering jank --- LSLib/LS/Common.cs | 4 ++-- LSLib/Properties/AssemblyInfo.cs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index a386b6f1..0a6c39ed 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -7,9 +7,9 @@ public static class Common { public const int MajorVersion = 1; - public const int MinorVersion = 8; + public const int MinorVersion = 18; - public const int PatchVersion = 4; + public const int PatchVersion = 5; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; diff --git a/LSLib/Properties/AssemblyInfo.cs b/LSLib/Properties/AssemblyInfo.cs index 2bb0f7d3..b2be3572 100644 --- a/LSLib/Properties/AssemblyInfo.cs +++ b/LSLib/Properties/AssemblyInfo.cs @@ -31,5 +31,5 @@ // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.15.14.0")] -[assembly: AssemblyFileVersion("1.15.14.0")] +[assembly: AssemblyVersion("1.18.5.0")] +[assembly: AssemblyFileVersion("1.18.5.0")] From 8df591aa480945933ca41402552ce581d1094714 Mon Sep 17 00:00:00 2001 From: soleera <25336289+soleera@users.noreply.github.com> Date: Wed, 16 Aug 2023 16:44:22 +1200 Subject: [PATCH 010/139] Enable X-flip export options for DAE --- LSLib/Granny/Model/Exporter.cs | 3 +-- LSLib/Granny/Model/Mesh.cs | 28 ++++++++++++++++++++++++---- LSLib/Granny/Model/Skeleton.cs | 12 ++---------- LSLib/LSLib.csproj | 12 ++++++++++++ LSLib/packages.config | 4 ++++ 5 files changed, 43 insertions(+), 16 deletions(-) diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index a2019810..00091e97 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -758,8 +758,7 @@ public void Export() GenerateDummySkeleton(Root); } - if (Options.OutputFormat == ExportFormat.GR2 && - (Options.FlipMesh || Options.FlipSkeleton)) + if (Options.FlipMesh || Options.FlipSkeleton) { Root.Flip(Options.FlipMesh, Options.FlipSkeleton); } diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 60894aa3..18d1bfbc 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using System.Linq; using System.Text; +using System.Reflection; using OpenTK; using LSLib.Granny.GR2; @@ -369,10 +370,29 @@ public void Flip() { foreach (var vertex in Vertices) { - vertex.Position.X = -vertex.Position.X; - vertex.Normal = new Vector3(-vertex.Normal.X, vertex.Normal.Y, vertex.Normal.Z); - vertex.Tangent = new Vector3(-vertex.Tangent.X, vertex.Tangent.Y, vertex.Tangent.Z); - vertex.Binormal = new Vector3(-vertex.Binormal.X, vertex.Binormal.Y, vertex.Binormal.Z); + vertex.Position.X *= -1; + vertex.Normal.X *= -1; + vertex.Tangent.X *= -1; + vertex.Binormal.X *= -1; + } + + if (Deduplicator == null) return; + + // Implements CollectionsMarshal.AsSpan from .NET 5 by reflection + Span AsSpan(List list) => new Span(list.GetType() + .GetField("_items", BindingFlags.NonPublic | BindingFlags.Instance) + .GetValue(list) as T[], 0, list.Count); + + foreach (ref SkinnedVertex vertex in AsSpan(Deduplicator.Vertices.Uniques)) + { + vertex.Position.X *= -1; + } + + foreach (ref Matrix3 matrix in AsSpan(Deduplicator.Normals.Uniques)) + { + matrix.Row0.X *= -1; // vertex.Normal.X + matrix.Row1.X *= -1; // vertex.Tangent.X + matrix.Row2.X *= -1; // vertex.Binormal.X } } } diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index 38091b07..c97da639 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -221,17 +221,9 @@ public void TransformRoots(Matrix4 transform) public void Flip() { - foreach (var bone in Bones) + foreach (var bone in Bones) if (bone.IsRoot) { - if (bone.IsRoot) - { - bone.Transform.Flags |= (uint)Transform.TransformFlags.HasScaleShear; - bone.Transform.ScaleShear = new Matrix3( - -1.0f, 0.0f, 0.0f, - 0.0f, 1.0f, 0.0f, - 0.0f, 0.0f, 1.0f - ); - } + bone.Transform.SetScale(new Vector3(-1, 1, 1)); } UpdateWorldTransforms(); diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index f0233113..cdfd02b5 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -60,8 +60,20 @@ ..\external\gppg\binaries\QUT.ShiftReduceParser.dll + + ..\packages\System.Buffers.4.4.0\lib\netstandard2.0\System.Buffers.dll + + + ..\packages\System.Memory.4.5.3\lib\netstandard2.0\System.Memory.dll + + + ..\packages\System.Numerics.Vectors.4.4.0\lib\net46\System.Numerics.Vectors.dll + + + ..\packages\System.Runtime.CompilerServices.Unsafe.4.5.2\lib\netstandard2.0\System.Runtime.CompilerServices.Unsafe.dll + diff --git a/LSLib/packages.config b/LSLib/packages.config index 1ccf9bc1..dd71e77f 100644 --- a/LSLib/packages.config +++ b/LSLib/packages.config @@ -3,5 +3,9 @@ + + + + \ No newline at end of file From e70709338f0534f9ad99205dd5b3eada802abd61 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 18 Aug 2023 14:52:57 +0200 Subject: [PATCH 011/139] Handle missing version tag when converting .loca --- ConverterApp/ConverterApp.csproj | 4 ++++ LSLib/LS/Localization.cs | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ConverterApp/ConverterApp.csproj b/ConverterApp/ConverterApp.csproj index 8d87ea7d..64948aed 100644 --- a/ConverterApp/ConverterApp.csproj +++ b/ConverterApp/ConverterApp.csproj @@ -197,6 +197,10 @@ {46372c50-4288-4b8e-af21-c934560600e0} LSLib + + {ABB9DB44-14F2-46E0-A4B8-B46C300CA982} + OpenTK + diff --git a/LSLib/LS/Localization.cs b/LSLib/LS/Localization.cs index ad001981..cdcccec1 100644 --- a/LSLib/LS/Localization.cs +++ b/LSLib/LS/Localization.cs @@ -187,7 +187,7 @@ private void ReadElement() case "content": var key = reader["contentuid"]; - var version = UInt16.Parse(reader["version"]); + var version = reader["version"] != null ? UInt16.Parse(reader["version"]) : (UInt16)1; var text = reader.ReadString(); resource.Entries.Add(new LocalizedText From f0a04e39c0ed18945469bed31c865100f5cb7caa Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 18 Aug 2023 14:58:13 +0200 Subject: [PATCH 012/139] Work around unconvertable characters when exporting to LSX --- LSLib/LS/Resources/LSX/LSXWriter.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 4a8f6b86..99f92302 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -122,7 +122,8 @@ private void WriteNode(Node node) } else { - writer.WriteAttributeString("value", attribute.Value.ToString()); + // Replace bogus 001F characters found in certain LSF nodes + writer.WriteAttributeString("value", attribute.Value.ToString().Replace("\x1f", "")); } writer.WriteEndElement(); From 46ffad3abe6016d89bfa0f9f004d80c71da47137 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 18 Aug 2023 15:10:48 +0200 Subject: [PATCH 013/139] TranslatedFSString LSJ serialization fixes --- .../LS/Resources/LSJ/LSJResourceConverter.cs | 26 ++++++++++++++----- 1 file changed, 19 insertions(+), 7 deletions(-) diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index b6705439..1d47a7ae 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -150,7 +150,8 @@ private NodeAttribute ReadAttribute(JsonReader reader) else if (reader.TokenType == JsonToken.String || reader.TokenType == JsonToken.Integer || reader.TokenType == JsonToken.Float - || reader.TokenType == JsonToken.Boolean) + || reader.TokenType == JsonToken.Boolean + || reader.TokenType == JsonToken.Null) { if (key == "type") { @@ -266,7 +267,7 @@ private NodeAttribute ReadAttribute(JsonReader reader) } var fsString = (TranslatedFSString)attribute.Value; - fsString.Value = reader.Value.ToString(); + fsString.Value = reader.Value != null ? reader.Value.ToString() : null; fsString.Handle = handle; fsString.Arguments = fsStringArguments; attribute.Value = fsString; @@ -329,15 +330,26 @@ private NodeAttribute ReadAttribute(JsonReader reader) } else if (key == "handle") { - if (attribute != null && attribute.Type == NodeAttribute.DataType.DT_TranslatedString) + if (attribute != null) { - if (attribute.Value == null) + if (attribute.Type == NodeAttribute.DataType.DT_TranslatedString) { - attribute.Value = new TranslatedString(); + if (attribute.Value == null) + { + attribute.Value = new TranslatedString(); + } + + ((TranslatedString)attribute.Value).Handle = reader.Value.ToString(); } + else if (attribute.Type == NodeAttribute.DataType.DT_TranslatedFSString) + { + if (attribute.Value == null) + { + attribute.Value = new TranslatedFSString(); + } - var ts = (TranslatedString)attribute.Value; - ts.Handle = reader.Value.ToString(); + ((TranslatedFSString)attribute.Value).Handle = reader.Value.ToString(); + } } else { From 7e05210497c2cf640efdd725797fdae183cfc8b5 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 18 Aug 2023 15:17:07 +0200 Subject: [PATCH 014/139] Add commandline conversion support for .loca files --- Divine/CLI/CommandLineActions.cs | 6 ++++++ Divine/CLI/CommandLineArguments.cs | 2 +- Divine/CLI/CommandLineDataProcessor.cs | 21 +++++++++++++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index a0e24af1..505c3abc 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -185,6 +185,12 @@ private static void Process(CommandLineArguments args) { CommandLineDataProcessor.Convert(); break; + } + + case "convert-loca": + { + CommandLineDataProcessor.ConvertLoca(); + break; } case "extract-packages": diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index c9012fc3..3d58dbe5 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -81,7 +81,7 @@ public class CommandLineArguments [EnumeratedValueArgument(typeof(string), 'a', "action", Description = "Set action to execute", DefaultValue = "extract-package", - AllowedValues = "create-package;list-package;extract-single-file;extract-package;extract-packages;convert-model;convert-models;convert-resource;convert-resources", + AllowedValues = "create-package;list-package;extract-single-file;extract-package;extract-packages;convert-model;convert-models;convert-resource;convert-resources;convert-loca", ValueOptional = false, Optional = false )] diff --git a/Divine/CLI/CommandLineDataProcessor.cs b/Divine/CLI/CommandLineDataProcessor.cs index d9303027..0b2fc10e 100644 --- a/Divine/CLI/CommandLineDataProcessor.cs +++ b/Divine/CLI/CommandLineDataProcessor.cs @@ -38,6 +38,27 @@ private static void ConvertResource(string sourcePath, string destinationPath, R } } + public static void ConvertLoca() + { + ConvertLoca(CommandLineActions.SourcePath, CommandLineActions.DestinationPath); + } + + private static void ConvertLoca(string sourcePath, string destinationPath) + { + try + { + var loca = LocaUtils.Load(sourcePath); + LocaUtils.Save(loca, destinationPath); + CommandLineLogger.LogInfo($"Wrote localization to: {destinationPath}"); + } + catch (Exception e) + { + CommandLineLogger.LogFatal($"Failed to convert localization file: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); + } + } + + private static void BatchConvertResource(string sourcePath, string destinationPath, ResourceFormat inputFormat, ResourceFormat outputFormat, ResourceConversionParameters conversionParams) { try From a0e4298457936c4018e53fbb2a03a0f57316e1ba Mon Sep 17 00:00:00 2001 From: Baaleos Date: Sat, 26 Aug 2023 16:00:40 +0100 Subject: [PATCH 015/139] Attempt at getting package priority exposed to command line --- Divine/CLI/CommandLineArguments.cs | 10 ++++++++++ Divine/CLI/CommandLinePackageProcessor.cs | 2 ++ 2 files changed, 12 insertions(+) diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index 3d58dbe5..602beedd 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -125,6 +125,16 @@ public class CommandLineArguments )] public string ConformPath; + // @formatter:off + [ValueArgument(typeof(int), "package-priority", + Description = "Set a custom package priority", + DefaultValue = 0, + ValueOptional = true, + Optional = true + )] + public int PackagePriority; + + // @formatter:off [SwitchArgument("use-package-name", false, Description = "Use package name for destination folder", diff --git a/Divine/CLI/CommandLinePackageProcessor.cs b/Divine/CLI/CommandLinePackageProcessor.cs index 193da5bb..ff0c15af 100644 --- a/Divine/CLI/CommandLinePackageProcessor.cs +++ b/Divine/CLI/CommandLinePackageProcessor.cs @@ -157,6 +157,8 @@ private static void CreatePackageResource(string file = "") var options = new PackageCreationOptions(); options.Version = CommandLineActions.PackageVersion; + options.Priority = CommandLineActions.PackagePriority; + Dictionary compressionOptions = CommandLineArguments.GetCompressionOptions(Path.GetExtension(file)?.ToLower() == ".lsv" ? "zlib" : Args.CompressionMethod, options.Version); options.Compression = (CompressionMethod)compressionOptions["Compression"]; From 86d8f042a1ac0db19d5c0f54e49296578dc676ce Mon Sep 17 00:00:00 2001 From: Baaleos Date: Sat, 26 Aug 2023 16:24:12 +0100 Subject: [PATCH 016/139] Updated - confirmed it works locally --- Divine/CLI/CommandLineActions.cs | 2 ++ Divine/CLI/CommandLinePackageProcessor.cs | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index 505c3abc..2f4ba998 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -20,6 +20,7 @@ internal class CommandLineActions public static ResourceFormat InputFormat; public static ResourceFormat OutputFormat; public static PackageVersion PackageVersion; + public static int PackagePriority; public static Dictionary GR2Options; // TODO: OSI support @@ -83,6 +84,7 @@ private static void SetUpAndValidate(CommandLineArguments args) if (args.Action == "create-package") { PackageVersion = Game.PAKVersion(); + PackagePriority = 25; CommandLineLogger.LogDebug($"Using package version: {PackageVersion}"); } diff --git a/Divine/CLI/CommandLinePackageProcessor.cs b/Divine/CLI/CommandLinePackageProcessor.cs index ff0c15af..3ec4dfdc 100644 --- a/Divine/CLI/CommandLinePackageProcessor.cs +++ b/Divine/CLI/CommandLinePackageProcessor.cs @@ -157,7 +157,7 @@ private static void CreatePackageResource(string file = "") var options = new PackageCreationOptions(); options.Version = CommandLineActions.PackageVersion; - options.Priority = CommandLineActions.PackagePriority; + options.Priority = (byte)CommandLineActions.PackagePriority; Dictionary compressionOptions = CommandLineArguments.GetCompressionOptions(Path.GetExtension(file)?.ToLower() == ".lsv" ? "zlib" : Args.CompressionMethod, options.Version); From c1957e4e25956e1e255f01de205aa586e6bcc62f Mon Sep 17 00:00:00 2001 From: Baaleos Date: Sun, 27 Aug 2023 17:21:49 +0100 Subject: [PATCH 017/139] Removed the hardcoded 25 - I was wondering why my icons were not showing up :D --- Divine/CLI/CommandLineActions.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index 2f4ba998..72262a0f 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -83,8 +83,8 @@ private static void SetUpAndValidate(CommandLineArguments args) if (args.Action == "create-package") { + PackagePriority = args.PackagePriority; PackageVersion = Game.PAKVersion(); - PackagePriority = 25; CommandLineLogger.LogDebug($"Using package version: {PackageVersion}"); } From c8eb22d2d877f3421017d37db0579a0b9ffdf6f5 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 18 Sep 2023 16:23:34 +0200 Subject: [PATCH 018/139] Undo dependencies on different system package versions --- LSLib/Granny/Model/Mesh.cs | 21 ++++++++++----------- LSLib/LSLib.csproj | 12 ------------ LSLib/packages.config | 4 ---- 3 files changed, 10 insertions(+), 27 deletions(-) diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 18d1bfbc..621184af 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -378,21 +378,20 @@ public void Flip() if (Deduplicator == null) return; - // Implements CollectionsMarshal.AsSpan from .NET 5 by reflection - Span AsSpan(List list) => new Span(list.GetType() - .GetField("_items", BindingFlags.NonPublic | BindingFlags.Instance) - .GetValue(list) as T[], 0, list.Count); - - foreach (ref SkinnedVertex vertex in AsSpan(Deduplicator.Vertices.Uniques)) + for (var i = 0; i < Deduplicator.Vertices.Uniques.Count; i++) { - vertex.Position.X *= -1; + var vert = Deduplicator.Vertices.Uniques[i]; + vert.Position.X *= -1; + Deduplicator.Vertices.Uniques[i] = vert; } - foreach (ref Matrix3 matrix in AsSpan(Deduplicator.Normals.Uniques)) + for (var i = 0; i < Deduplicator.Normals.Uniques.Count; i++) { - matrix.Row0.X *= -1; // vertex.Normal.X - matrix.Row1.X *= -1; // vertex.Tangent.X - matrix.Row2.X *= -1; // vertex.Binormal.X + var normal = Deduplicator.Normals.Uniques[i]; + normal.Row0.X *= -1; // vertex.Normal.X + normal.Row1.X *= -1; // vertex.Tangent.X + normal.Row2.X *= -1; // vertex.Binormal.X + Deduplicator.Normals.Uniques[i] = normal; } } } diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index cdfd02b5..f0233113 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -60,20 +60,8 @@ ..\external\gppg\binaries\QUT.ShiftReduceParser.dll - - ..\packages\System.Buffers.4.4.0\lib\netstandard2.0\System.Buffers.dll - - - ..\packages\System.Memory.4.5.3\lib\netstandard2.0\System.Memory.dll - - - ..\packages\System.Numerics.Vectors.4.4.0\lib\net46\System.Numerics.Vectors.dll - - - ..\packages\System.Runtime.CompilerServices.Unsafe.4.5.2\lib\netstandard2.0\System.Runtime.CompilerServices.Unsafe.dll - diff --git a/LSLib/packages.config b/LSLib/packages.config index dd71e77f..1ccf9bc1 100644 --- a/LSLib/packages.config +++ b/LSLib/packages.config @@ -3,9 +3,5 @@ - - - - \ No newline at end of file From 538e1278d6da6600c790b1564746e4cbfbbfcc45 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 18 Sep 2023 17:15:55 +0200 Subject: [PATCH 019/139] Fix possible NPE while releasing package streams --- LSLib/LS/PackageReader.cs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index fe61325a..d30fcd1b 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -42,7 +42,10 @@ public void Dispose() { foreach (Stream stream in _streams) { - stream.Dispose(); + if (stream != null) + { + stream.Dispose(); + } } } } From c9f4fac1beca785728f897f0d0255d52bfe58b20 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 18 Sep 2023 18:00:00 +0200 Subject: [PATCH 020/139] Fix UUID serialization format --- ConverterApp/DebugDumper.cs | 3 +- ConverterApp/ResourcePane.Designer.cs | 166 +++++++++++++----- ConverterApp/ResourcePane.cs | 26 +-- Divine/CLI/CommandLineActions.cs | 3 + Divine/CLI/CommandLineArguments.cs | 6 + Divine/CLI/CommandLineDataProcessor.cs | 18 +- LSLib/LS/Common.cs | 2 +- LSLib/LS/NodeAttribute.cs | 69 +++++++- LSLib/LS/ResourceUtils.cs | 51 +++++- LSLib/LS/Resources/LSJ/LSJReader.cs | 3 +- .../LS/Resources/LSJ/LSJResourceConverter.cs | 26 ++- LSLib/LS/Resources/LSJ/LSJWriter.cs | 3 +- LSLib/LS/Resources/LSX/LSXReader.cs | 5 +- LSLib/LS/Resources/LSX/LSXWriter.cs | 4 +- TerrainFixup/Program.cs | 3 +- 15 files changed, 304 insertions(+), 84 deletions(-) diff --git a/ConverterApp/DebugDumper.cs b/ConverterApp/DebugDumper.cs index 08fd240f..85db14f4 100644 --- a/ConverterApp/DebugDumper.cs +++ b/ConverterApp/DebugDumper.cs @@ -72,6 +72,7 @@ private void DoExtractPackage() private void DoLsxConversion() { var conversionParams = ResourceConversionParameters.FromGameVersion(GameVersion); + var loadParams = ResourceLoadParameters.FromGameVersion(GameVersion); var lsfList = SavePackage.Files.Where(p => p.Name.EndsWith(".lsf")); var numProcessed = 0; @@ -81,7 +82,7 @@ private void DoLsxConversion() var lsxPath = Path.Combine(ExtractionPath, lsf.Name.Substring(0, lsf.Name.Length - 4) + ".lsx"); ReportProgress(20 + (numProcessed * 30 / lsfList.Count()), "Converting to LSX: " + lsf.Name); - var resource = ResourceUtils.LoadResource(lsfPath, ResourceFormat.LSF); + var resource = ResourceUtils.LoadResource(lsfPath, ResourceFormat.LSF, loadParams); ResourceUtils.SaveResource(resource, lsxPath, ResourceFormat.LSX, conversionParams); numProcessed++; } diff --git a/ConverterApp/ResourcePane.Designer.cs b/ConverterApp/ResourcePane.Designer.cs index 00c1ee38..fc5378d5 100644 --- a/ConverterApp/ResourcePane.Designer.cs +++ b/ConverterApp/ResourcePane.Designer.cs @@ -55,25 +55,31 @@ private void InitializeComponent() this.resourceOutputFileDlg = new System.Windows.Forms.SaveFileDialog(); this.resourceInputPathDlg = new System.Windows.Forms.FolderBrowserDialog(); this.resourceOutputPathDlg = new System.Windows.Forms.FolderBrowserDialog(); + this.grpGlobalSettings = new System.Windows.Forms.GroupBox(); + this.legacyGuids = new System.Windows.Forms.CheckBox(); + this.lblGuidSerializationInfo = new System.Windows.Forms.Label(); this.groupBox5.SuspendLayout(); this.groupBox4.SuspendLayout(); + this.grpGlobalSettings.SuspendLayout(); this.SuspendLayout(); // // resourceConversionProgress // this.resourceConversionProgress.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); - this.resourceConversionProgress.Location = new System.Drawing.Point(12, 175); + this.resourceConversionProgress.Location = new System.Drawing.Point(16, 215); + this.resourceConversionProgress.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceConversionProgress.Name = "resourceConversionProgress"; - this.resourceConversionProgress.Size = new System.Drawing.Size(872, 23); + this.resourceConversionProgress.Size = new System.Drawing.Size(1163, 28); this.resourceConversionProgress.TabIndex = 65; // // label17 // this.label17.AutoSize = true; - this.label17.Location = new System.Drawing.Point(9, 159); + this.label17.Location = new System.Drawing.Point(12, 196); + this.label17.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label17.Name = "label17"; - this.label17.Size = new System.Drawing.Size(51, 13); + this.label17.Size = new System.Drawing.Size(65, 16); this.label17.TabIndex = 66; this.label17.Text = "Progress:"; // @@ -86,17 +92,19 @@ private void InitializeComponent() "LSB (binary) file", "LSF (binary) file", "LSJ (JSON) file"}); - this.resourceOutputFormatCb.Location = new System.Drawing.Point(181, 131); + this.resourceOutputFormatCb.Location = new System.Drawing.Point(241, 161); + this.resourceOutputFormatCb.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceOutputFormatCb.Name = "resourceOutputFormatCb"; - this.resourceOutputFormatCb.Size = new System.Drawing.Size(151, 21); + this.resourceOutputFormatCb.Size = new System.Drawing.Size(200, 24); this.resourceOutputFormatCb.TabIndex = 64; // // label16 // this.label16.AutoSize = true; - this.label16.Location = new System.Drawing.Point(178, 114); + this.label16.Location = new System.Drawing.Point(237, 140); + this.label16.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label16.Name = "label16"; - this.label16.Size = new System.Drawing.Size(74, 13); + this.label16.Size = new System.Drawing.Size(88, 16); this.label16.TabIndex = 63; this.label16.Text = "Output format:"; // @@ -109,25 +117,28 @@ private void InitializeComponent() "LSB (binary) file", "LSF (binary) file", "LSJ (JSON) file"}); - this.resourceInputFormatCb.Location = new System.Drawing.Point(12, 131); + this.resourceInputFormatCb.Location = new System.Drawing.Point(16, 161); + this.resourceInputFormatCb.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceInputFormatCb.Name = "resourceInputFormatCb"; - this.resourceInputFormatCb.Size = new System.Drawing.Size(151, 21); + this.resourceInputFormatCb.Size = new System.Drawing.Size(200, 24); this.resourceInputFormatCb.TabIndex = 62; // // label15 // this.label15.AutoSize = true; - this.label15.Location = new System.Drawing.Point(9, 114); + this.label15.Location = new System.Drawing.Point(12, 140); + this.label15.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label15.Name = "label15"; - this.label15.Size = new System.Drawing.Size(66, 13); + this.label15.Size = new System.Drawing.Size(78, 16); this.label15.TabIndex = 61; this.label15.Text = "Input format:"; // // resourceBulkConvertBtn // - this.resourceBulkConvertBtn.Location = new System.Drawing.Point(348, 131); + this.resourceBulkConvertBtn.Location = new System.Drawing.Point(464, 161); + this.resourceBulkConvertBtn.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceBulkConvertBtn.Name = "resourceBulkConvertBtn"; - this.resourceBulkConvertBtn.Size = new System.Drawing.Size(151, 23); + this.resourceBulkConvertBtn.Size = new System.Drawing.Size(201, 28); this.resourceBulkConvertBtn.TabIndex = 60; this.resourceBulkConvertBtn.Text = "Convert"; this.resourceBulkConvertBtn.UseVisualStyleBackColor = true; @@ -136,9 +147,10 @@ private void InitializeComponent() // resourceOutputPathBrowseBtn // this.resourceOutputPathBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.resourceOutputPathBrowseBtn.Location = new System.Drawing.Point(844, 86); + this.resourceOutputPathBrowseBtn.Location = new System.Drawing.Point(1125, 106); + this.resourceOutputPathBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceOutputPathBrowseBtn.Name = "resourceOutputPathBrowseBtn"; - this.resourceOutputPathBrowseBtn.Size = new System.Drawing.Size(41, 22); + this.resourceOutputPathBrowseBtn.Size = new System.Drawing.Size(55, 27); this.resourceOutputPathBrowseBtn.TabIndex = 59; this.resourceOutputPathBrowseBtn.Text = "..."; this.resourceOutputPathBrowseBtn.UseVisualStyleBackColor = true; @@ -147,9 +159,10 @@ private void InitializeComponent() // resourceInputPathBrowseBtn // this.resourceInputPathBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.resourceInputPathBrowseBtn.Location = new System.Drawing.Point(843, 38); + this.resourceInputPathBrowseBtn.Location = new System.Drawing.Point(1124, 47); + this.resourceInputPathBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceInputPathBrowseBtn.Name = "resourceInputPathBrowseBtn"; - this.resourceInputPathBrowseBtn.Size = new System.Drawing.Size(41, 22); + this.resourceInputPathBrowseBtn.Size = new System.Drawing.Size(55, 27); this.resourceInputPathBrowseBtn.TabIndex = 56; this.resourceInputPathBrowseBtn.Text = "..."; this.resourceInputPathBrowseBtn.UseVisualStyleBackColor = true; @@ -159,17 +172,19 @@ private void InitializeComponent() // this.resourceInputDir.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); - this.resourceInputDir.Location = new System.Drawing.Point(11, 39); + this.resourceInputDir.Location = new System.Drawing.Point(15, 48); + this.resourceInputDir.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceInputDir.Name = "resourceInputDir"; - this.resourceInputDir.Size = new System.Drawing.Size(834, 20); + this.resourceInputDir.Size = new System.Drawing.Size(1111, 22); this.resourceInputDir.TabIndex = 54; // // label13 // this.label13.AutoSize = true; - this.label13.Location = new System.Drawing.Point(8, 23); + this.label13.Location = new System.Drawing.Point(11, 28); + this.label13.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label13.Name = "label13"; - this.label13.Size = new System.Drawing.Size(77, 13); + this.label13.Size = new System.Drawing.Size(93, 16); this.label13.TabIndex = 55; this.label13.Text = "Input directory:"; // @@ -177,25 +192,28 @@ private void InitializeComponent() // this.resourceOutputDir.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); - this.resourceOutputDir.Location = new System.Drawing.Point(11, 87); + this.resourceOutputDir.Location = new System.Drawing.Point(15, 107); + this.resourceOutputDir.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceOutputDir.Name = "resourceOutputDir"; - this.resourceOutputDir.Size = new System.Drawing.Size(834, 20); + this.resourceOutputDir.Size = new System.Drawing.Size(1111, 22); this.resourceOutputDir.TabIndex = 57; // // label14 // this.label14.AutoSize = true; - this.label14.Location = new System.Drawing.Point(8, 71); + this.label14.Location = new System.Drawing.Point(11, 87); + this.label14.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label14.Name = "label14"; - this.label14.Size = new System.Drawing.Size(85, 13); + this.label14.Size = new System.Drawing.Size(103, 16); this.label14.TabIndex = 58; this.label14.Text = "Output directory:"; // // resourceConvertBtn // - this.resourceConvertBtn.Location = new System.Drawing.Point(12, 116); + this.resourceConvertBtn.Location = new System.Drawing.Point(16, 143); + this.resourceConvertBtn.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceConvertBtn.Name = "resourceConvertBtn"; - this.resourceConvertBtn.Size = new System.Drawing.Size(151, 23); + this.resourceConvertBtn.Size = new System.Drawing.Size(201, 28); this.resourceConvertBtn.TabIndex = 60; this.resourceConvertBtn.Text = "Convert"; this.resourceConvertBtn.UseVisualStyleBackColor = true; @@ -204,9 +222,10 @@ private void InitializeComponent() // resourceOutputBrowseBtn // this.resourceOutputBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.resourceOutputBrowseBtn.Location = new System.Drawing.Point(844, 86); + this.resourceOutputBrowseBtn.Location = new System.Drawing.Point(1125, 106); + this.resourceOutputBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceOutputBrowseBtn.Name = "resourceOutputBrowseBtn"; - this.resourceOutputBrowseBtn.Size = new System.Drawing.Size(41, 22); + this.resourceOutputBrowseBtn.Size = new System.Drawing.Size(55, 27); this.resourceOutputBrowseBtn.TabIndex = 59; this.resourceOutputBrowseBtn.Text = "..."; this.resourceOutputBrowseBtn.UseVisualStyleBackColor = true; @@ -215,9 +234,10 @@ private void InitializeComponent() // resourceInputBrowseBtn // this.resourceInputBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.resourceInputBrowseBtn.Location = new System.Drawing.Point(844, 38); + this.resourceInputBrowseBtn.Location = new System.Drawing.Point(1125, 47); + this.resourceInputBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceInputBrowseBtn.Name = "resourceInputBrowseBtn"; - this.resourceInputBrowseBtn.Size = new System.Drawing.Size(41, 22); + this.resourceInputBrowseBtn.Size = new System.Drawing.Size(55, 27); this.resourceInputBrowseBtn.TabIndex = 56; this.resourceInputBrowseBtn.Text = "..."; this.resourceInputBrowseBtn.UseVisualStyleBackColor = true; @@ -227,17 +247,19 @@ private void InitializeComponent() // this.resourceInputPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); - this.resourceInputPath.Location = new System.Drawing.Point(11, 39); + this.resourceInputPath.Location = new System.Drawing.Point(15, 48); + this.resourceInputPath.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceInputPath.Name = "resourceInputPath"; - this.resourceInputPath.Size = new System.Drawing.Size(834, 20); + this.resourceInputPath.Size = new System.Drawing.Size(1111, 22); this.resourceInputPath.TabIndex = 54; // // label11 // this.label11.AutoSize = true; - this.label11.Location = new System.Drawing.Point(8, 23); + this.label11.Location = new System.Drawing.Point(11, 28); + this.label11.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label11.Name = "label11"; - this.label11.Size = new System.Drawing.Size(74, 13); + this.label11.Size = new System.Drawing.Size(87, 16); this.label11.TabIndex = 55; this.label11.Text = "Input file path:"; // @@ -245,17 +267,19 @@ private void InitializeComponent() // this.resourceOutputPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); - this.resourceOutputPath.Location = new System.Drawing.Point(11, 87); + this.resourceOutputPath.Location = new System.Drawing.Point(15, 107); + this.resourceOutputPath.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.resourceOutputPath.Name = "resourceOutputPath"; - this.resourceOutputPath.Size = new System.Drawing.Size(834, 20); + this.resourceOutputPath.Size = new System.Drawing.Size(1111, 22); this.resourceOutputPath.TabIndex = 57; // // label12 // this.label12.AutoSize = true; - this.label12.Location = new System.Drawing.Point(8, 71); + this.label12.Location = new System.Drawing.Point(11, 87); + this.label12.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.label12.Name = "label12"; - this.label12.Size = new System.Drawing.Size(82, 13); + this.label12.Size = new System.Drawing.Size(97, 16); this.label12.TabIndex = 58; this.label12.Text = "Output file path:"; // @@ -277,9 +301,11 @@ private void InitializeComponent() this.groupBox5.Controls.Add(this.label13); this.groupBox5.Controls.Add(this.resourceOutputDir); this.groupBox5.Controls.Add(this.label14); - this.groupBox5.Location = new System.Drawing.Point(2, 169); + this.groupBox5.Location = new System.Drawing.Point(3, 208); + this.groupBox5.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.groupBox5.Name = "groupBox5"; - this.groupBox5.Size = new System.Drawing.Size(895, 207); + this.groupBox5.Padding = new System.Windows.Forms.Padding(4, 4, 4, 4); + this.groupBox5.Size = new System.Drawing.Size(1193, 255); this.groupBox5.TabIndex = 62; this.groupBox5.TabStop = false; this.groupBox5.Text = "Batch Convert"; @@ -287,9 +313,10 @@ private void InitializeComponent() // resourceProgressLabel // this.resourceProgressLabel.AutoSize = true; - this.resourceProgressLabel.Location = new System.Drawing.Point(65, 159); + this.resourceProgressLabel.Location = new System.Drawing.Point(87, 196); + this.resourceProgressLabel.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); this.resourceProgressLabel.Name = "resourceProgressLabel"; - this.resourceProgressLabel.Size = new System.Drawing.Size(0, 13); + this.resourceProgressLabel.Size = new System.Drawing.Size(0, 16); this.resourceProgressLabel.TabIndex = 67; // // groupBox4 @@ -303,9 +330,11 @@ private void InitializeComponent() this.groupBox4.Controls.Add(this.label11); this.groupBox4.Controls.Add(this.resourceOutputPath); this.groupBox4.Controls.Add(this.label12); - this.groupBox4.Location = new System.Drawing.Point(1, 6); + this.groupBox4.Location = new System.Drawing.Point(1, 7); + this.groupBox4.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.groupBox4.Name = "groupBox4"; - this.groupBox4.Size = new System.Drawing.Size(895, 152); + this.groupBox4.Padding = new System.Windows.Forms.Padding(4, 4, 4, 4); + this.groupBox4.Size = new System.Drawing.Size(1193, 187); this.groupBox4.TabIndex = 61; this.groupBox4.TabStop = false; this.groupBox4.Text = "Convert LSX / LSB / LSF / LSJ files "; @@ -319,19 +348,57 @@ private void InitializeComponent() // this.resourceOutputFileDlg.Filter = "LS files|*.lsx;*.lsb;*.lsf;*.lsj;*.lsfx;*.lsbc;*.lsbs"; this.resourceOutputFileDlg.Title = "Select Output File"; + // + // grpGlobalSettings + // + this.grpGlobalSettings.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) + | System.Windows.Forms.AnchorStyles.Right))); + this.grpGlobalSettings.Controls.Add(this.lblGuidSerializationInfo); + this.grpGlobalSettings.Controls.Add(this.legacyGuids); + this.grpGlobalSettings.Location = new System.Drawing.Point(3, 471); + this.grpGlobalSettings.Name = "grpGlobalSettings"; + this.grpGlobalSettings.Size = new System.Drawing.Size(1193, 86); + this.grpGlobalSettings.TabIndex = 64; + this.grpGlobalSettings.TabStop = false; + this.grpGlobalSettings.Text = "Settings"; + // + // legacyGuids + // + this.legacyGuids.AutoSize = true; + this.legacyGuids.Location = new System.Drawing.Point(16, 47); + this.legacyGuids.Name = "legacyGuids"; + this.legacyGuids.Size = new System.Drawing.Size(196, 20); + this.legacyGuids.TabIndex = 64; + this.legacyGuids.Text = "Read using old UUID format"; + this.legacyGuids.UseVisualStyleBackColor = true; + // + // lblGuidSerializationInfo + // + this.lblGuidSerializationInfo.AutoSize = true; + this.lblGuidSerializationInfo.Font = new System.Drawing.Font("Microsoft Sans Serif", 7.8F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(0))); + this.lblGuidSerializationInfo.Location = new System.Drawing.Point(11, 22); + this.lblGuidSerializationInfo.Name = "lblGuidSerializationInfo"; + this.lblGuidSerializationInfo.Size = new System.Drawing.Size(690, 16); + this.lblGuidSerializationInfo.TabIndex = 65; + this.lblGuidSerializationInfo.Text = "UUID format has changed in v1.18.6! Check this to force LSX files to be read usin" + + "g the old (non-byteswapped) format."; // // ResourcePane // - this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); + this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + this.Controls.Add(this.grpGlobalSettings); this.Controls.Add(this.groupBox5); this.Controls.Add(this.groupBox4); + this.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4); this.Name = "ResourcePane"; - this.Size = new System.Drawing.Size(905, 391); + this.Size = new System.Drawing.Size(1207, 717); this.groupBox5.ResumeLayout(false); this.groupBox5.PerformLayout(); this.groupBox4.ResumeLayout(false); this.groupBox4.PerformLayout(); + this.grpGlobalSettings.ResumeLayout(false); + this.grpGlobalSettings.PerformLayout(); this.ResumeLayout(false); } @@ -365,5 +432,8 @@ private void InitializeComponent() private System.Windows.Forms.SaveFileDialog resourceOutputFileDlg; private System.Windows.Forms.FolderBrowserDialog resourceInputPathDlg; private System.Windows.Forms.FolderBrowserDialog resourceOutputPathDlg; + private System.Windows.Forms.GroupBox grpGlobalSettings; + private System.Windows.Forms.Label lblGuidSerializationInfo; + private System.Windows.Forms.CheckBox legacyGuids; } } diff --git a/ConverterApp/ResourcePane.cs b/ConverterApp/ResourcePane.cs index 029eab27..4a4bbed1 100644 --- a/ConverterApp/ResourcePane.cs +++ b/ConverterApp/ResourcePane.cs @@ -30,19 +30,21 @@ public ResourcePane(MainForm form) private void resourceConvertBtn_Click(object sender, EventArgs e) { - //try - //{ - _resource = ResourceUtils.LoadResource(resourceInputPath.Text); + try + { + var loadParams = ResourceLoadParameters.FromGameVersion(_form.GetGame()); + loadParams.ByteSwapGuids = !legacyGuids.Checked; + _resource = ResourceUtils.LoadResource(resourceInputPath.Text, loadParams); ResourceFormat format = ResourceUtils.ExtensionToResourceFormat(resourceOutputPath.Text); var conversionParams = ResourceConversionParameters.FromGameVersion(_form.GetGame()); ResourceUtils.SaveResource(_resource, resourceOutputPath.Text, format, conversionParams); MessageBox.Show("Resource saved successfully."); - //} - //catch (Exception exc) - //{ - // MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{exc}", "Conversion Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - //} + } + catch (Exception exc) + { + MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{exc}", "Conversion Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } } private void resourceInputBrowseBtn_Click(object sender, EventArgs e) @@ -92,6 +94,10 @@ public void ResourceError(string path, Exception e) private void resourceBulkConvertBtn_Click(object sender, EventArgs e) { + var loadParams = ResourceLoadParameters.FromGameVersion(_form.GetGame()); + loadParams.ByteSwapGuids = !legacyGuids.Checked; + var conversionParams = ResourceConversionParameters.FromGameVersion(_form.GetGame()); + var inputFormat = ResourceFormat.LSX; switch (resourceInputFormatCb.SelectedIndex) { @@ -118,8 +124,6 @@ private void resourceBulkConvertBtn_Click(object sender, EventArgs e) } var outputFormat = ResourceFormat.LSF; - var conversionParams = ResourceConversionParameters.FromGameVersion(_form.GetGame()); - switch (resourceOutputFormatCb.SelectedIndex) { case 0: @@ -150,7 +154,7 @@ private void resourceBulkConvertBtn_Click(object sender, EventArgs e) var utils = new ResourceUtils(); utils.progressUpdate += ResourceProgressUpdate; utils.errorDelegate += ResourceError; - utils.ConvertResources(resourceInputDir.Text, resourceOutputDir.Text, inputFormat, outputFormat, conversionParams); + utils.ConvertResources(resourceInputDir.Text, resourceOutputDir.Text, inputFormat, outputFormat, loadParams, conversionParams); MessageBox.Show("Resources converted successfully."); } diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index 72262a0f..53d20617 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -21,6 +21,7 @@ internal class CommandLineActions public static ResourceFormat OutputFormat; public static PackageVersion PackageVersion; public static int PackagePriority; + public static bool LegacyGuids; public static Dictionary GR2Options; // TODO: OSI support @@ -61,6 +62,8 @@ private static void SetUpAndValidate(CommandLineArguments args) Game = CommandLineArguments.GetGameByString(args.Game); CommandLineLogger.LogDebug($"Using game: {Game}"); + LegacyGuids = args.LegacyGuids; + if (batchActions.Any(args.Action.Contains)) { if (args.InputFormat == null || args.OutputFormat == null) diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index 602beedd..dafdd6ad 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -134,6 +134,12 @@ public class CommandLineArguments )] public int PackagePriority; + // @formatter:off + [SwitchArgument("legacy-guids", false, + Description = "Use legacy GUID serialization format when serializing LSX/LSJ files", + Optional = true + )] + public bool LegacyGuids; // @formatter:off [SwitchArgument("use-package-name", false, diff --git a/Divine/CLI/CommandLineDataProcessor.cs b/Divine/CLI/CommandLineDataProcessor.cs index 0b2fc10e..62960e65 100644 --- a/Divine/CLI/CommandLineDataProcessor.cs +++ b/Divine/CLI/CommandLineDataProcessor.cs @@ -9,23 +9,28 @@ internal class CommandLineDataProcessor public static void Convert() { var conversionParams = ResourceConversionParameters.FromGameVersion(CommandLineActions.Game); - ConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, conversionParams); + var loadParams = ResourceLoadParameters.FromGameVersion(CommandLineActions.Game); + loadParams.ByteSwapGuids = !CommandLineActions.LegacyGuids; + ConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, loadParams, conversionParams); } public static void BatchConvert() { var conversionParams = ResourceConversionParameters.FromGameVersion(CommandLineActions.Game); - BatchConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, CommandLineActions.InputFormat, CommandLineActions.OutputFormat, conversionParams); + var loadParams = ResourceLoadParameters.FromGameVersion(CommandLineActions.Game); + loadParams.ByteSwapGuids = !CommandLineActions.LegacyGuids; + BatchConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, CommandLineActions.InputFormat, CommandLineActions.OutputFormat, loadParams, conversionParams); } - private static void ConvertResource(string sourcePath, string destinationPath, ResourceConversionParameters conversionParams) + private static void ConvertResource(string sourcePath, string destinationPath, + ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) { try { ResourceFormat resourceFormat = ResourceUtils.ExtensionToResourceFormat(destinationPath); CommandLineLogger.LogDebug($"Using destination extension: {resourceFormat}"); - Resource resource = ResourceUtils.LoadResource(sourcePath); + Resource resource = ResourceUtils.LoadResource(sourcePath, loadParams); ResourceUtils.SaveResource(resource, destinationPath, resourceFormat, conversionParams); @@ -59,14 +64,15 @@ private static void ConvertLoca(string sourcePath, string destinationPath) } - private static void BatchConvertResource(string sourcePath, string destinationPath, ResourceFormat inputFormat, ResourceFormat outputFormat, ResourceConversionParameters conversionParams) + private static void BatchConvertResource(string sourcePath, string destinationPath, ResourceFormat inputFormat, ResourceFormat outputFormat, + ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) { try { CommandLineLogger.LogDebug($"Using destination extension: {outputFormat}"); var resourceUtils = new ResourceUtils(); - resourceUtils.ConvertResources(sourcePath, destinationPath, inputFormat, outputFormat, conversionParams); + resourceUtils.ConvertResources(sourcePath, destinationPath, inputFormat, outputFormat, loadParams, conversionParams); CommandLineLogger.LogInfo($"Wrote resources to: {destinationPath}"); } diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 0a6c39ed..b80cf3ce 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -9,7 +9,7 @@ public static class Common public const int MinorVersion = 18; - public const int PatchVersion = 5; + public const int PatchVersion = 6; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index 64ce7252..28dea121 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using System.Linq; namespace LSLib.LS { @@ -27,6 +28,37 @@ public class TranslatedFSString : TranslatedString public List Arguments; } + public class NodeSerializationSettings + { + public bool DefaultByteSwapGuids = true; + public bool ByteSwapGuids = true; + + public void InitFromMeta(string meta) + { + if (meta.Length == 0) + { + // No metadata available, use defaults + ByteSwapGuids = DefaultByteSwapGuids; + } + else + { + var tags = meta.Split(','); + ByteSwapGuids = tags.Contains("bswap_guids"); + } + } + + public string BuildMeta() + { + List tags = new List { "v1" }; + if (ByteSwapGuids) + { + tags.Add("bswap_guids"); + } + + return String.Join(",", tags); + } + } + public class NodeAttribute { public enum DataType @@ -90,6 +122,22 @@ public NodeAttribute(DataType type) } public override string ToString() + { + throw new NotImplementedException("ToString() is not safe to use anymore, AsString(settings) instead"); + } + + public static Guid ByteSwapGuid(Guid g) + { + var bytes = g.ToByteArray(); + for (var i = 8; i < 16; i += 2) + { + (bytes[i + 1], bytes[i]) = (bytes[i], bytes[i + 1]); + } + + return new Guid(bytes); + } + + public string AsString(NodeSerializationSettings settings) { switch (this.type) { @@ -107,6 +155,16 @@ public override string ToString() case DataType.DT_Vec4: return String.Join(" ", new List((float[])this.value).ConvertAll(i => i.ToString()).ToArray()); + case DataType.DT_UUID: + if (settings.ByteSwapGuids) + { + return ByteSwapGuid((Guid)this.value).ToString(); + } + else + { + return this.value.ToString(); + } + default: return this.value.ToString(); } @@ -180,7 +238,7 @@ public bool IsNumeric() || this.type == DataType.DT_Int8; } - public void FromString(string str) + public void FromString(string str, NodeSerializationSettings settings) { if (IsNumeric()) { @@ -326,7 +384,14 @@ public void FromString(string str) break; case DataType.DT_UUID: - value = new Guid(str); + if (settings.ByteSwapGuids) + { + value = ByteSwapGuid(new Guid(str)); + } + else + { + value = new Guid(str); + } break; default: diff --git a/LSLib/LS/ResourceUtils.cs b/LSLib/LS/ResourceUtils.cs index 73c3c2e2..90aa5832 100644 --- a/LSLib/LS/ResourceUtils.cs +++ b/LSLib/LS/ResourceUtils.cs @@ -9,6 +9,26 @@ namespace LSLib.LS { + public class ResourceLoadParameters + { + /// + /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string + /// + public bool ByteSwapGuids = true; + + public static ResourceLoadParameters FromGameVersion(Game game) + { + var p = new ResourceLoadParameters(); + // No game-specific settings yet + return p; + } + + public void ToSerializationSettings(NodeSerializationSettings settings) + { + settings.DefaultByteSwapGuids = ByteSwapGuids; + } + } + public class ResourceConversionParameters { /// @@ -46,14 +66,24 @@ public class ResourceConversionParameters /// public CompressionLevel CompressionLevel = CompressionLevel.DefaultCompression; + /// + /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string + /// + public bool ByteSwapGuids = true; + public static ResourceConversionParameters FromGameVersion(Game game) { - ResourceConversionParameters p = new ResourceConversionParameters(); + var p = new ResourceConversionParameters(); p.PAKVersion = game.PAKVersion(); p.LSF = game.LSFVersion(); p.LSX = game.LSXVersion(); return p; } + + public void ToSerializationSettings(NodeSerializationSettings settings) + { + settings.DefaultByteSwapGuids = ByteSwapGuids; + } } public class ResourceUtils @@ -90,20 +120,20 @@ public static ResourceFormat ExtensionToResourceFormat(string path) } } - public static Resource LoadResource(string inputPath) + public static Resource LoadResource(string inputPath, ResourceLoadParameters loadParams) { - return LoadResource(inputPath, ExtensionToResourceFormat(inputPath)); + return LoadResource(inputPath, ExtensionToResourceFormat(inputPath), loadParams); } - public static Resource LoadResource(string inputPath, ResourceFormat format) + public static Resource LoadResource(string inputPath, ResourceFormat format, ResourceLoadParameters loadParams) { using (var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read)) { - return LoadResource(stream, format); + return LoadResource(stream, format, loadParams); } } - public static Resource LoadResource(Stream stream, ResourceFormat format) + public static Resource LoadResource(Stream stream, ResourceFormat format, ResourceLoadParameters loadParams) { switch (format) { @@ -111,6 +141,7 @@ public static Resource LoadResource(Stream stream, ResourceFormat format) { using (var reader = new LSXReader(stream)) { + loadParams.ToSerializationSettings(reader.SerializationSettings); return reader.Read(); } } @@ -135,6 +166,7 @@ public static Resource LoadResource(Stream stream, ResourceFormat format) { using (var reader = new LSJReader(stream)) { + loadParams.ToSerializationSettings(reader.SerializationSettings); return reader.Read(); } } @@ -162,6 +194,7 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo var writer = new LSXWriter(file); writer.Version = conversionParams.LSX; writer.PrettyPrint = conversionParams.PrettyPrint; + conversionParams.ToSerializationSettings(writer.SerializationSettings); writer.Write(resource); break; } @@ -188,6 +221,7 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo { var writer = new LSJWriter(file); writer.PrettyPrint = conversionParams.PrettyPrint; + conversionParams.ToSerializationSettings(writer.SerializationSettings); writer.Write(resource); break; } @@ -233,7 +267,8 @@ private void EnumerateFiles(List paths, string rootPath, string currentP } } - public void ConvertResources(string inputDir, string outputDir, ResourceFormat inputFormat, ResourceFormat outputFormat, ResourceConversionParameters conversionParams) + public void ConvertResources(string inputDir, string outputDir, ResourceFormat inputFormat, ResourceFormat outputFormat, + ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) { this.progressUpdate("Enumerating files ...", 0, 1); var paths = new List(); @@ -251,7 +286,7 @@ public void ConvertResources(string inputDir, string outputDir, ResourceFormat i this.progressUpdate("Converting: " + inPath, i, paths.Count); try { - var resource = LoadResource(inPath, inputFormat); + var resource = LoadResource(inPath, inputFormat, loadParams); SaveResource(resource, outPath, outputFormat, conversionParams); } catch (Exception ex) diff --git a/LSLib/LS/Resources/LSJ/LSJReader.cs b/LSLib/LS/Resources/LSJ/LSJReader.cs index f035faed..12b6f6cc 100644 --- a/LSLib/LS/Resources/LSJ/LSJReader.cs +++ b/LSLib/LS/Resources/LSJ/LSJReader.cs @@ -8,6 +8,7 @@ public class LSJReader : IDisposable { private Stream stream; private JsonTextReader reader; + public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); public LSJReader(Stream stream) { @@ -22,7 +23,7 @@ public void Dispose() public Resource Read() { var settings = new JsonSerializerSettings(); - settings.Converters.Add(new LSJResourceConverter()); + settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); var serializer = JsonSerializer.Create(settings); using (var streamReader = new StreamReader(stream)) diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index 1d47a7ae..5ae0a52d 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -4,12 +4,20 @@ using System.Text.RegularExpressions; using System.Numerics; using System.Collections.Generic; +using LSLib.LS.Story; +using System.Runtime; namespace LSLib.LS { public class LSJResourceConverter : JsonConverter { private LSMetadata Metadata; + private NodeSerializationSettings SerializationSettings; + + public LSJResourceConverter(NodeSerializationSettings settings) + { + SerializationSettings = settings; + } public override bool CanConvert(Type objectType) { @@ -275,7 +283,14 @@ private NodeAttribute ReadAttribute(JsonReader reader) } case NodeAttribute.DataType.DT_UUID: - attribute.Value = new Guid(reader.Value.ToString()); + if (SerializationSettings.ByteSwapGuids) + { + attribute.Value = NodeAttribute.ByteSwapGuid(new Guid(reader.Value.ToString())); + } + else + { + attribute.Value = new Guid(reader.Value.ToString()); + } break; case NodeAttribute.DataType.DT_IVec2: @@ -742,7 +757,14 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) } case NodeAttribute.DataType.DT_UUID: - writer.WriteValue(((Guid)attribute.Value.Value).ToString()); + if (SerializationSettings.ByteSwapGuids) + { + writer.WriteValue((NodeAttribute.ByteSwapGuid((Guid)attribute.Value.Value)).ToString()); + } + else + { + writer.WriteValue(((Guid)attribute.Value.Value).ToString()); + } break; // TODO: haven't seen any vectors/matrices in D:OS JSON files so far diff --git a/LSLib/LS/Resources/LSJ/LSJWriter.cs b/LSLib/LS/Resources/LSJ/LSJWriter.cs index 6f095ab4..575616d6 100644 --- a/LSLib/LS/Resources/LSJ/LSJWriter.cs +++ b/LSLib/LS/Resources/LSJ/LSJWriter.cs @@ -8,6 +8,7 @@ public class LSJWriter private Stream stream; private JsonTextWriter writer; public bool PrettyPrint = false; + public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); public LSJWriter(Stream stream) { @@ -18,7 +19,7 @@ public void Write(Resource rsrc) { var settings = new JsonSerializerSettings(); settings.Formatting = Newtonsoft.Json.Formatting.Indented; - settings.Converters.Add(new LSJResourceConverter()); + settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); var serializer = JsonSerializer.Create(settings); using (var streamWriter = new StreamWriter(stream)) diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index 497e8e1a..ab6891b8 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -17,6 +17,7 @@ public class LSXReader : IDisposable private List stack; private int lastLine, lastColumn; private LSXVersion Version = LSXVersion.V3; + public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); public LSXReader(Stream stream) { @@ -103,6 +104,8 @@ private void ReadElement() resource.Metadata.Revision = Convert.ToUInt32(reader["revision"]); resource.Metadata.BuildNumber = Convert.ToUInt32(reader["build"]); Version = (resource.Metadata.MajorVersion >= 4) ? LSXVersion.V4 : LSXVersion.V3; + var lslibMeta = reader["lslib_meta"]; + SerializationSettings.InitFromMeta(lslibMeta); break; case "region": @@ -167,7 +170,7 @@ private void ReadElement() var attrValue = reader["value"]; if (attrValue != null) { - attr.FromString(attrValue); + attr.FromString(attrValue, SerializationSettings); } if (attr.Type == NodeAttribute.DataType.DT_TranslatedString) diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 99f92302..fc7ffcc0 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -12,6 +12,7 @@ public class LSXWriter public bool PrettyPrint = false; public LSXVersion Version = LSXVersion.V3; + public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); public LSXWriter(Stream stream) { @@ -34,6 +35,7 @@ public void Write(Resource rsrc) writer.WriteAttributeString("minor", rsrc.Metadata.MinorVersion.ToString()); writer.WriteAttributeString("revision", rsrc.Metadata.Revision.ToString()); writer.WriteAttributeString("build", rsrc.Metadata.BuildNumber.ToString()); + writer.WriteAttributeString("lslib_meta", SerializationSettings.BuildMeta()); writer.WriteEndElement(); WriteRegions(rsrc); @@ -123,7 +125,7 @@ private void WriteNode(Node node) else { // Replace bogus 001F characters found in certain LSF nodes - writer.WriteAttributeString("value", attribute.Value.ToString().Replace("\x1f", "")); + writer.WriteAttributeString("value", attribute.Value.AsString(SerializationSettings).Replace("\x1f", "")); } writer.WriteEndElement(); diff --git a/TerrainFixup/Program.cs b/TerrainFixup/Program.cs index c350121e..9cbab7c6 100644 --- a/TerrainFixup/Program.cs +++ b/TerrainFixup/Program.cs @@ -128,7 +128,8 @@ private static void LoadTerrainsFromPath(string path, string patchDir, Dictionar private static void LoadTerrainsFromLSF(string path, string patchDir, Dictionary terrains) { - var terrainRes = ResourceUtils.LoadResource(path); + var loadParams = ResourceLoadParameters.FromGameVersion(LSLib.LS.Enums.Game.DivinityOriginalSin2DE); + var terrainRes = ResourceUtils.LoadResource(path, loadParams); var tmpls = terrainRes.Regions["Templates"]; if (tmpls.Children.TryGetValue("GameObjects", out List terrainTemplates)) { From 114114ec16dad9e4d92ac0690ca76618c139cbf1 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 18 Sep 2023 22:46:58 +0200 Subject: [PATCH 021/139] Fix possible NPE during LSX serialization --- LSLib/LS/Resources/LSX/LSXReader.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index ab6891b8..eb7b2f56 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -105,7 +105,7 @@ private void ReadElement() resource.Metadata.BuildNumber = Convert.ToUInt32(reader["build"]); Version = (resource.Metadata.MajorVersion >= 4) ? LSXVersion.V4 : LSXVersion.V3; var lslibMeta = reader["lslib_meta"]; - SerializationSettings.InitFromMeta(lslibMeta); + SerializationSettings.InitFromMeta(lslibMeta ?? ""); break; case "region": From aced075b3eaaf6be5276a15126627ecce556854c Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 22 Sep 2023 21:34:21 +0200 Subject: [PATCH 022/139] Add LSF v7 --- LSLib/LS/Enums/LSFVersion.cs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/LSLib/LS/Enums/LSFVersion.cs b/LSLib/LS/Enums/LSFVersion.cs index 7e30f8fb..8e0aac22 100644 --- a/LSLib/LS/Enums/LSFVersion.cs +++ b/LSLib/LS/Enums/LSFVersion.cs @@ -32,10 +32,15 @@ public enum LSFVersion /// VerBG3AdditionalBlob = 0x06, + /// + /// BG3 Patch 3 version with unknown additions + /// + VerBG3Patch3 = 0x07, + /// /// Latest input version supported by this library /// - MaxReadVersion = 0x06, + MaxReadVersion = 0x07, /// /// Latest output version supported by this library From 9b5a8ab966c43e40d40b86985ecedc7c9d17b4f0 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 24 Sep 2023 10:16:08 +0200 Subject: [PATCH 023/139] Fix string serialization gotcha --- LSLib/LS/Resources/LSJ/LSJResourceConverter.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index 5ae0a52d..bcd1ebde 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -707,7 +707,7 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) case NodeAttribute.DataType.DT_LSString: case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: - writer.WriteValue(attribute.Value.ToString()); + writer.WriteValue(attribute.Value.AsString(SerializationSettings)); break; case NodeAttribute.DataType.DT_ULongLong: From e7510c0433fa726d1709069bc7e2f694eab8dea3 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 27 Sep 2023 16:00:20 +0200 Subject: [PATCH 024/139] Version bump --- LSLib/LS/Common.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index b80cf3ce..90349980 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -9,7 +9,7 @@ public static class Common public const int MinorVersion = 18; - public const int PatchVersion = 6; + public const int PatchVersion = 7; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; From 0354342705d23ee770452e653d5f8bb293a60296 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 27 Sep 2023 16:07:32 +0200 Subject: [PATCH 025/139] Type ID/name compatibility improvement when serializing LSX --- LSLib/LS/Resources/LSX/LSXReader.cs | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index eb7b2f56..22f9e5f9 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -148,17 +148,10 @@ private void ReadElement() case "attribute": UInt32 attrTypeId; - if (Version >= LSXVersion.V4) + if (!UInt32.TryParse(reader["type"], out attrTypeId)) { attrTypeId = (uint)AttributeTypeMaps.TypeToId[reader["type"]]; } - else - { - if (!UInt32.TryParse(reader["type"], out attrTypeId)) - { - attrTypeId = (uint)AttributeTypeMaps.TypeToId[reader["type"]]; - } - } var attrName = reader["id"]; if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) From 4069a11048e1e045dc0540f909c9f13b201d2ab7 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Thu, 23 Nov 2023 07:51:27 +0100 Subject: [PATCH 026/139] Update stat validator for BG3 --- LSLib/LS/Mods/ModResources.cs | 64 +- LSLib/LS/Stats/Parser/PropertyDefinitions.cs | 33 +- LSLib/LS/Stats/Parser/StatLua.lex | 68 + LSLib/LS/Stats/Parser/StatLua.yy | 101 ++ LSLib/LS/Stats/Parser/StatLuaParser.cs | 24 + LSLib/LS/Stats/Parser/StatNodes.cs | 1 + LSLib/LS/Stats/Parser/StatProperty.lex | 87 +- LSLib/LS/Stats/Parser/StatProperty.yy | 274 ++-- LSLib/LS/Stats/Parser/StatPropertyParser.cs | 283 ++-- LSLib/LS/Stats/StatDefinitions.cs | 1417 +++++++++++++++--- LSLib/LS/Stats/StatFileParser.cs | 230 +-- LSLib/LS/Stats/StatPropertyParsers.cs | 357 ----- LSLib/LS/Stats/StatValueParsers.cs | 752 ++++++++++ LSLib/LSLib.csproj | 10 +- StatParser/Arguments.cs | 13 +- StatParser/Program.cs | 7 +- StatParser/StatChecker.cs | 73 +- StatProperty.lst | 238 +++ 18 files changed, 2958 insertions(+), 1074 deletions(-) create mode 100644 LSLib/LS/Stats/Parser/StatLua.lex create mode 100644 LSLib/LS/Stats/Parser/StatLua.yy create mode 100644 LSLib/LS/Stats/Parser/StatLuaParser.cs delete mode 100644 LSLib/LS/Stats/StatPropertyParsers.cs create mode 100644 LSLib/LS/Stats/StatValueParsers.cs create mode 100644 StatProperty.lst diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index d867fb74..11f681fa 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -1,7 +1,7 @@ -using LSLib.LS.Story.Compiler; +using Alphaleonis.Win32.Filesystem; +using LSLib.LS.Story.Compiler; using System; using System.Collections.Generic; -using System.IO; using System.Linq; using System.Text.RegularExpressions; @@ -18,6 +18,11 @@ public class ModInfo public AbstractFileInfo OrphanQueryIgnoreList; public AbstractFileInfo StoryHeaderFile; public AbstractFileInfo TypeCoercionWhitelistFile; + public AbstractFileInfo ModifiersFile; + public AbstractFileInfo ValueListsFile; + public AbstractFileInfo ActionResourcesFile; + public AbstractFileInfo ActionResourceGroupsFile; + public List TagFiles = new List(); public ModInfo(string name) { @@ -42,6 +47,8 @@ public class ModPathVisitor private static readonly Regex metaRe = new Regex("^Mods/([^/]+)/meta\\.lsx$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); private static readonly Regex scriptRe = new Regex("^Mods/([^/]+)/Story/RawFiles/Goals/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); private static readonly Regex statRe = new Regex("^Public/([^/]+)/Stats/Generated/Data/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); + private static readonly Regex staticLsxRe = new Regex("^Public/([^/]+)/(.*\\.lsx)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); + private static readonly Regex statStructureRe = new Regex("^Public/([^/]+)/Stats/Generated/Structure/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); private static readonly Regex orphanQueryIgnoresRe = new Regex("^Mods/([^/]+)/Story/story_orphanqueries_ignore_local\\.txt$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); private static readonly Regex storyDefinitionsRe = new Regex("^Mods/([^/]+)/Story/RawFiles/story_header\\.div$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); private static readonly Regex typeCoercionWhitelistRe = new Regex("^Mods/([^/]+)/Story/RawFiles/TypeCoercionWhitelist\\.txt$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); @@ -50,12 +57,13 @@ public class ModPathVisitor // Pattern for excluding subsequent parts of a multi-part archive public static readonly Regex archivePartRe = new Regex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private readonly ModResources Resources; + public readonly ModResources Resources; public bool CollectStoryGoals = false; public bool CollectStats = false; public bool CollectGlobals = false; public bool CollectLevels = false; + public bool CollectGuidResources = false; public bool LoadPackages = true; public TargetGame Game = TargetGame.DOS2; @@ -173,12 +181,53 @@ private void DiscoverPackagedFile(AbstractFileInfo file) if (CollectStats) { - if (file.Name.EndsWith(".txt", StringComparison.Ordinal) && file.Name.Contains("/Stats/Generated/Data")) + if (file.Name.EndsWith(".txt", StringComparison.Ordinal)) { - var match = statRe.Match(file.Name); + if (file.Name.Contains("/Stats/Generated/Data")) + { + var match = statRe.Match(file.Name); + if (match != null && match.Success) + { + AddStatToMod(match.Groups[1].Value, match.Groups[2].Value, file); + } + } + else if (file.Name.Contains("/Stats/Generated/Structure")) + { + var match = statStructureRe.Match(file.Name); + if (match != null && match.Success) + { + if (file.Name.EndsWith("Modifiers.txt")) + { + GetMod(match.Groups[1].Value).ModifiersFile = file; + } + else if (file.Name.EndsWith("ValueLists.txt")) + { + GetMod(match.Groups[1].Value).ValueListsFile = file; + } + } + } + } + } + + if (CollectGuidResources) + { + if (file.Name.EndsWith(".lsx", StringComparison.Ordinal)) + { + var match = staticLsxRe.Match(file.Name); if (match != null && match.Success) { - AddStatToMod(match.Groups[1].Value, match.Groups[2].Value, file); + if (match.Groups[2].Value == "ActionResourceDefinitions/ActionResourceDefinitions.lsx") + { + GetMod(match.Groups[1].Value).ActionResourcesFile = file; + } + else if (match.Groups[2].Value == "ActionResourceGroupDefinitions/ActionResourceGroupDefinitions.lsx") + { + GetMod(match.Groups[1].Value).ActionResourceGroupsFile = file; + } + else if (match.Groups[2].Value.StartsWith("Tags/")) + { + GetMod(match.Groups[1].Value).TagFiles.Add(file); + } } } } @@ -233,12 +282,15 @@ public void DiscoverBuiltinPackages(string gameDataPath) "EngineShaders.pak", "Game.pak", "GamePlatform.pak", + "Gustav_NavCloud.pak", "Gustav_Textures.pak", + "Gustav_Video.pak", "Icons.pak", "LowTex.pak", "Materials.pak", "Minimaps.pak", "Models.pak", + "PsoCache.pak", "SharedSoundBanks.pak", "SharedSounds.pak", "Textures.pak", diff --git a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs index 69d60325..24418698 100644 --- a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs +++ b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs @@ -15,19 +15,10 @@ public class Requirement public string TagParam; } - public enum PropertyContext - { - None, - Self, - SelfOnHit, - SelfOnEquip, - Target, - AoE - } - public class Property { - public PropertyContext Context; + public string TextKey; + public string Context; public object Condition; public PropertyAction Action; } @@ -35,25 +26,7 @@ public class Property public class PropertyAction { public string Action; - public List Arguments; - } - - public class PropertyStatusBoost : PropertyAction - { - public StatusBoost Boost; - } - - public enum StatusBoostType - { - None, - AoE, - Surface - }; - - public class StatusBoost - { - public StatusBoostType Type; - public List SurfaceTypes; + public List Arguments; } public enum ConditionOperator diff --git a/LSLib/LS/Stats/Parser/StatLua.lex b/LSLib/LS/Stats/Parser/StatLua.lex new file mode 100644 index 00000000..c0be153d --- /dev/null +++ b/LSLib/LS/Stats/Parser/StatLua.lex @@ -0,0 +1,68 @@ +%namespace LSLib.LS.Stats.Lua +%visibility public +%scannertype StatLuaScanner +%scanbasetype StatLuaScanBase +%tokentype StatLuaTokens + +letter [a-zA-Z_] +digit [0-9] +namechar [a-zA-Z0-9_] +nonseparator [^,;:()\[\]!+*/^&%~|><=.# ] + +%% + +/* Special characters */ +":" return (int)':'; +"(" return (int)'('; +")" return (int)')'; +"[" return (int)'['; +"]" return (int)']'; +"," return (int)','; +";" return (int)';'; +"." return (int)'.'; +[ ] ; + + +"nil" return (int)StatLuaTokens.LUA_RESERVED_VAL; +"false" return (int)StatLuaTokens.LUA_RESERVED_VAL; +"true" return (int)StatLuaTokens.LUA_RESERVED_VAL; +"..." return (int)StatLuaTokens.LUA_RESERVED_VAL; + +"+" return (int)StatLuaTokens.BINOP; +"*" return (int)StatLuaTokens.BINOP; +"/" return (int)StatLuaTokens.BINOP; +"//" return (int)StatLuaTokens.BINOP; +"^" return (int)StatLuaTokens.BINOP; +"%" return (int)StatLuaTokens.BINOP; +"&" return (int)StatLuaTokens.BINOP; +"|" return (int)StatLuaTokens.BINOP; +">>" return (int)StatLuaTokens.BINOP; +"<<" return (int)StatLuaTokens.BINOP; +".." return (int)StatLuaTokens.BINOP; +"<" return (int)StatLuaTokens.BINOP; +"<=" return (int)StatLuaTokens.BINOP; +">" return (int)StatLuaTokens.BINOP; +">=" return (int)StatLuaTokens.BINOP; +"==" return (int)StatLuaTokens.BINOP; +"~=" return (int)StatLuaTokens.BINOP; +"and" return (int)StatLuaTokens.BINOP; +"or" return (int)StatLuaTokens.BINOP; + +"not" return (int)StatLuaTokens.UNOP; +"#" return (int)StatLuaTokens.UNOP; +"!" return (int)StatLuaTokens.UNOP; + +"~" return (int)StatLuaTokens.BIN_OR_UNOP; +"-" return (int)StatLuaTokens.BIN_OR_UNOP; + +\"[^']*\" { yylval = yytext; return (int)StatLuaTokens.LITERAL_STRING; } +'[^']*' { yylval = yytext; return (int)StatLuaTokens.LITERAL_STRING; } +{letter}({namechar})+ { yylval = yytext; return (int)StatLuaTokens.NAME; } +{digit}({digit})* { yylval = yytext; return (int)StatLuaTokens.INTEGER; } +{digit}{digit}*d{digit}{digit}* { yylval = yytext; return (int)StatLuaTokens.DICE_ROLL; } + +. return ((int)StatLuaTokens.BAD); + +%{ + yylloc = new QUT.Gppg.LexLocation(tokLin, tokCol, tokELin, tokECol); +%} diff --git a/LSLib/LS/Stats/Parser/StatLua.yy b/LSLib/LS/Stats/Parser/StatLua.yy new file mode 100644 index 00000000..ce15f3c8 --- /dev/null +++ b/LSLib/LS/Stats/Parser/StatLua.yy @@ -0,0 +1,101 @@ +%namespace LSLib.LS.Stats.Lua +%partial +%visibility public +%parsertype StatLuaParser +%tokentype StatLuaTokens +%YYSTYPE System.Object + +%start Root + +/* Special token for invalid characters */ +%token BAD + +/* Integer literal */ +%token INTEGER +/* Text-like (unquoted) literal */ +%token NAME +/* eg. 1d10 */ +%token DICE_ROLL + +/* Lua binary operator */ +%token BINOP +/* Lua unary operator */ +%token UNOP +/* Lua binary or unary operator */ +%token BIN_OR_UNOP +/* nil, true, false */ +%token LUA_RESERVED_VAL +/* quoted strings */ +%token LITERAL_STRING + +%% + +Root : LExp; + +LName : NAME; + +LVar : LName + | LPrefixExp '[' LExp ']' + | LPrefixExp '.' LName + ; + +LOptionalExpList : /* empty */ + | LExpList + ; + +LExpList : LExp + | LExpList ',' LExp + ; + +LExp : LExpNoUnOp + | LUnOp LExp + ; + +LExpNoUnOp : LUA_RESERVED_VAL + | INTEGER + | LITERAL_STRING + | DICE_ROLL + | LPrefixExp + | LTableConstructor + | LExpNoUnOp LBinOp LExp + ; + +LPrefixExp : LVar + | LFunctionCall + | '(' LExp ')' + ; + +LFunctionCall : LPrefixExp LArgs + | LPrefixExp ':' LName LArgs + ; + +LArgs : '(' LOptionalExpList ')' + | LTableConstructor + | LITERAL_STRING + ; + +LTableConstructor : '{' LOptionalFieldList '}'; + +LOptionalFieldList : /* empty */ + | LFieldList + ; + +LFieldList : LField + | LFieldList LFieldSep LField + ; + +LField : '[' LExp ']' '=' LExp + | LName '=' LExp + | LExp + ; + +LFieldSep : ','; +// Why was there ';' ? + +LBinOp : BINOP + | BIN_OR_UNOP + ; + +LUnOp : UNOP + | BIN_OR_UNOP + ; diff --git a/LSLib/LS/Stats/Parser/StatLuaParser.cs b/LSLib/LS/Stats/Parser/StatLuaParser.cs new file mode 100644 index 00000000..9aa624b4 --- /dev/null +++ b/LSLib/LS/Stats/Parser/StatLuaParser.cs @@ -0,0 +1,24 @@ +using QUT.Gppg; + +namespace LSLib.LS.Stats.Lua +{ + public partial class StatLuaScanner + { + public LexLocation LastLocation() + { + return new LexLocation(tokLin, tokCol, tokELin, tokECol); + } + } + + public abstract class StatLuaScanBase : AbstractScanner + { + protected virtual bool yywrap() { return true; } + } + + public partial class StatLuaParser + { + public StatLuaParser(StatLuaScanner scnr) : base(scnr) + { + } + } +} \ No newline at end of file diff --git a/LSLib/LS/Stats/Parser/StatNodes.cs b/LSLib/LS/Stats/Parser/StatNodes.cs index eeeb55b6..ed29f3e2 100644 --- a/LSLib/LS/Stats/Parser/StatNodes.cs +++ b/LSLib/LS/Stats/Parser/StatNodes.cs @@ -12,6 +12,7 @@ public class StatDeclaration public CodeLocation Location; public Dictionary Properties = new Dictionary(); public Dictionary PropertyLocations = new Dictionary(); + public bool WasInstantiated = false; } /// diff --git a/LSLib/LS/Stats/Parser/StatProperty.lex b/LSLib/LS/Stats/Parser/StatProperty.lex index 1f606a3c..40739323 100644 --- a/LSLib/LS/Stats/Parser/StatProperty.lex +++ b/LSLib/LS/Stats/Parser/StatProperty.lex @@ -7,60 +7,87 @@ letter [a-zA-Z_] digit [0-9] namechar [a-zA-Z0-9_] -nonseparator [^,;:()|!& ] +nonseparator [^,;:()\[\]! ] %% /* Special trigger words to determine expression type */ -"__TYPE_Properties__" return (int)StatPropertyTokens.EXPR_PROPERTIES; -"__TYPE_Conditions__" return (int)StatPropertyTokens.EXPR_CONDITIONS; +"__TYPE_Properties__" return (int)StatPropertyTokens.EXPR_PROPERTIES; +"__TYPE_DescriptionParams__" return (int)StatPropertyTokens.EXPR_DESCRIPTION_PARAMS; "__TYPE_Requirements__" return (int)StatPropertyTokens.EXPR_REQUIREMENTS; -/* Property Contexts */ -[Ss][Ee][Ll][Ff] return (int)StatPropertyTokens.CTX_SELF; -"TARGET" return (int)StatPropertyTokens.CTX_TARGET; -"AOE" return (int)StatPropertyTokens.CTX_AOE; -"OnHit" return (int)StatPropertyTokens.CTX_ON_HIT; -"OnEquip" return (int)StatPropertyTokens.CTX_ON_EQUIP; - /* Reserved words */ "IF" return (int)StatPropertyTokens.IF; +/* Text keys */ +"CastOffhand" return (int)StatPropertyTokens.TEXT_KEY; +"Cast2" return (int)StatPropertyTokens.TEXT_KEY; +"Cast3" return (int)StatPropertyTokens.TEXT_KEY; + +/* Stats contexts */ +"ABILITY_CHECK" return (int)StatPropertyTokens.CONTEXT; +"ACTION_RESOURCES_CHANGED" return (int)StatPropertyTokens.CONTEXT; +"AI_IGNORE" return (int)StatPropertyTokens.CONTEXT; +"AI_ONLY" return (int)StatPropertyTokens.CONTEXT; +"AOE" return (int)StatPropertyTokens.CONTEXT; +"ATTACK" return (int)StatPropertyTokens.CONTEXT; +"ATTACKED" return (int)StatPropertyTokens.CONTEXT; +"ATTACKED_IN_MELEE_RANGE" return (int)StatPropertyTokens.CONTEXT; +"ATTACKING_IN_MELEE_RANGE" return (int)StatPropertyTokens.CONTEXT; +"CAST" return (int)StatPropertyTokens.CONTEXT; +"CAST_RESOLVED" return (int)StatPropertyTokens.CONTEXT; +"COMBAT_ENDED" return (int)StatPropertyTokens.CONTEXT; +"CREATE_2" return (int)StatPropertyTokens.CONTEXT; +"DAMAGE" return (int)StatPropertyTokens.CONTEXT; +"DAMAGED" return (int)StatPropertyTokens.CONTEXT; +"DAMAGE_PREVENTED" return (int)StatPropertyTokens.CONTEXT; +"DAMAGED_PREVENTED" return (int)StatPropertyTokens.CONTEXT; +"ENTER_ATTACK_RANGE" return (int)StatPropertyTokens.CONTEXT; +"EQUIP" return (int)StatPropertyTokens.CONTEXT; +"LOCKPICKING_SUCCEEDED" return (int)StatPropertyTokens.CONTEXT; +"GROUND" return (int)StatPropertyTokens.CONTEXT; +"HEAL" return (int)StatPropertyTokens.CONTEXT; +"HEALED" return (int)StatPropertyTokens.CONTEXT; +"INTERRUPT_USED" return (int)StatPropertyTokens.CONTEXT; +"INVENTORY_CHANGED" return (int)StatPropertyTokens.CONTEXT; +"LEAVE_ATTACK_RANGE" return (int)StatPropertyTokens.CONTEXT; +"LONG_REST" return (int)StatPropertyTokens.CONTEXT; +"MOVED_DISTANCE" return (int)StatPropertyTokens.CONTEXT; +"OBSCURITY_CHANGED" return (int)StatPropertyTokens.CONTEXT; +"PROFICIENCY_CHANGED" return (int)StatPropertyTokens.CONTEXT; +"PROJECTILE" return (int)StatPropertyTokens.CONTEXT; +"PUSH" return (int)StatPropertyTokens.CONTEXT; +"PUSHED" return (int)StatPropertyTokens.CONTEXT; +"SELF" return (int)StatPropertyTokens.CONTEXT; +"SHORT_REST" return (int)StatPropertyTokens.CONTEXT; +"STATUS_APPLIED" return (int)StatPropertyTokens.CONTEXT; +"STATUS_APPLY" return (int)StatPropertyTokens.CONTEXT; +"STATUS_REMOVE" return (int)StatPropertyTokens.CONTEXT; +"STATUS_REMOVED" return (int)StatPropertyTokens.CONTEXT; +"SURFACE_ENTER" return (int)StatPropertyTokens.CONTEXT; +"TARGET" return (int)StatPropertyTokens.CONTEXT; +"TURN" return (int)StatPropertyTokens.CONTEXT; + /* Special characters */ ":" return (int)':'; "(" return (int)'('; ")" return (int)')'; +"[" return (int)'['; +"]" return (int)']'; "," return (int)','; ";" return (int)';'; -"|" return (int)'|'; -"&" return (int)'&'; "!" return (int)'!'; +"-" return (int)'-'; +"." return (int)'.'; [ ] ; -"Resurrect" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_RESURRECT; } -"Sabotage" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_SABOTAGE; } -"Summon" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_SUMMON; } -"Force" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_FORCE; } -"CLEANSE" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_CLEANSE; } -"AOEBOOST" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_AOEBOOST; } -"SURFACEBOOST" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_SURFACEBOOST; } - -(AlwaysBackstab|Unbreakable|CanBackstab|AlwaysHighGround) { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_CUSTOM_PROPERTY; } -(Ignite|Melt|Freeze|Electrify|Bless|Curse|Condense|Vaporize|Bloodify|Contaminate|Oilify|Shatter) { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_SURFACE_CHANGE; } -(CreateSurface|TargetCreateSurface|CreateConeSurface) { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_CREATE_SURFACE; } -"Douse" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_GAME_ACTION; } -"SwapPlaces" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_SWAP_PLACES; } -"Equalize" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_EQUALIZE; } -"Pickup" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.ACT_PICKUP; } - "Tag" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.REQUIREMENT_TAG; } {letter}({namechar})+ { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.NAME; } (-)?{digit}({digit})* { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.INTEGER; } +{digit}{digit}*d{digit}{digit}* { yylval = yytext; return (int)StatPropertyTokens.DICE_ROLL; } ({nonseparator})+ { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.TEXT; } -. return ((int)StatPropertyTokens.BAD); - %{ yylloc = new QUT.Gppg.LexLocation(tokLin, tokCol, tokELin, tokECol); %} diff --git a/LSLib/LS/Stats/Parser/StatProperty.yy b/LSLib/LS/Stats/Parser/StatProperty.yy index 497953aa..a68b2030 100644 --- a/LSLib/LS/Stats/Parser/StatProperty.yy +++ b/LSLib/LS/Stats/Parser/StatProperty.yy @@ -9,7 +9,7 @@ /* Trigger Lexemes */ %token EXPR_PROPERTIES -%token EXPR_CONDITIONS +%token EXPR_DESCRIPTION_PARAMS %token EXPR_REQUIREMENTS /* Requirements */ @@ -18,53 +18,31 @@ /* Reserved words */ %token IF -/* Property Contexts */ -%token CTX_SELF -%token CTX_TARGET -%token CTX_AOE -%token CTX_ON_HIT -%token CTX_ON_EQUIP - -/* Action Types */ -%token ACT_CUSTOM_PROPERTY -%token ACT_SURFACE_CHANGE -%token ACT_GAME_ACTION -%token ACT_CREATE_SURFACE -%token ACT_SWAP_PLACES -%token ACT_EQUALIZE -%token ACT_PICKUP -%token ACT_RESURRECT -%token ACT_SABOTAGE -%token ACT_SUMMON -%token ACT_FORCE -%token ACT_CLEANSE -%token ACT_AOEBOOST -%token ACT_SURFACEBOOST - -/* Special token for invalid characters */ -%token BAD - +/* Functor Context */ +%token CONTEXT /* Status/Tag name */ %token NAME +/* Known text keys */ +%token TEXT_KEY /* Integer literal */ %token INTEGER /* Text-like (unquoted) literal */ %token TEXT +/* eg. 1d10 */ +%token DICE_ROLL %% /* A special "trigger word" is prepended to support parsing multiple types from the same lexer/parser */ Root : EXPR_PROPERTIES Properties { $$ = $2; } - | EXPR_CONDITIONS Conditions { $$ = $2; } + | EXPR_DESCRIPTION_PARAMS OptionalFunctorArgs { $$ = $2; } | EXPR_REQUIREMENTS Requirements { $$ = $2; } ; /****************************************************************** * - * - * REQUIREMENTS PARSING - * + * REQUIREMENTS PARSING * ******************************************************************/ @@ -87,9 +65,31 @@ Requirement : NAME { $$ = MakeRequirement($1); } /****************************************************************** * + * DESCRIPTION PARAM PARSING * - * PROPERTY PARSING + ******************************************************************/ + +OptionalDescriptionParams : /* empty */ { $$ = MakePropertyList(); } + | DescriptionParams + ; + +DescriptionParams : DescriptionParam { $$ = AddProperty(MakePropertyList(), $1); } + | DescriptionParams ';' + | DescriptionParams ';' DescriptionParam { $$ = AddProperty($1, $3); } + ; + +DescriptionParam : FunctorName '(' OptionalFunctorArgs ')' { $$ = MakeProperty(null, null, MakeAction($1, $3)); } + | INTEGER + | '-' INTEGER + | NAME + | DICE_ROLL + | '-' DICE_ROLL + ; + + +/****************************************************************** * + * PROPERTY PARSING * ******************************************************************/ @@ -99,181 +99,79 @@ Properties : /* empty */ { $$ = MakePropertyList(); } | Properties ';' Property { $$ = AddProperty($1, $3); } ; -Property : PropContext PropCondition PropAction { $$ = MakeProperty($1, $2, $3); }; - -PropContext : { $$ = PropertyContext.None; } /* empty */ - | CTX_SELF ':' { $$ = PropertyContext.Self; } - | CTX_SELF ':' PropSelfContext ':' { $$ = $3; } - | CTX_TARGET ':' { $$ = PropertyContext.Target; } - | CTX_AOE ':' { $$ = PropertyContext.AoE; } - ; +TextKeyProperties : TEXT_KEY '[' Properties ']' { $$ = SetTextKey($3, $1); }; -PropSelfContext : CTX_ON_HIT { $$ = PropertyContext.SelfOnHit; } - | CTX_ON_EQUIP { $$ = PropertyContext.SelfOnEquip; } - ; - -PropCondition : /* empty */ - | IF '(' ConditionExpr ')' ':' { $$ = $3; } - ; - -PropAction : ActCustomProperty - | ActSurfaceChange - | ActGameAction - | ActCreateSurface - | ActSwapPlaces - | ActPickup - | ActEqualize - | ActResurrect - | ActSabotage - | ActSummon - | ActForce - | ActCleanse - | ActStatus - ; - -ActCustomProperty : ACT_CUSTOM_PROPERTY { $$ = MakeAction($1, MakeArgumentList()); }; -ActSurfaceChange : ACT_SURFACE_CHANGE SurfaceChangeArgs { $$ = MakeAction($1, $2); }; -ActGameAction : ACT_GAME_ACTION GameActionArgs { $$ = MakeAction($1, $2); }; -ActCreateSurface : ACT_CREATE_SURFACE CreateSurfaceArgs { $$ = MakeAction($1, $2); }; -ActSwapPlaces : ACT_SWAP_PLACES SwapPlacesArgs { $$ = MakeAction($1, $2); }; -ActPickup : ACT_PICKUP PickupArgs { $$ = MakeAction($1, $2); }; -/* Args: HealType */ -ActEqualize : ACT_EQUALIZE ',' TextArg { $$ = MakeAction($1, MakeArgumentList($3)); }; -ActResurrect : ACT_RESURRECT ResurrectArgs { $$ = MakeAction($1, $2); }; -ActSabotage : ACT_SABOTAGE SabotageArgs { $$ = MakeAction($1, $2); }; -ActSummon : ACT_SUMMON ',' TextArg SummonOptArgs { $$ = MakeAction($1, PrependArgumentList($3, $4)); }; -ActForce : ACT_FORCE ',' IntArg { $$ = MakeAction($1, MakeArgumentList($3)); }; -ActCleanse : ACT_CLEANSE ':' NAME { $$ = MakeAction($1, MakeArgumentList($3)); }; -ActStatus : StatusBoost StatusName StatusArgs { $$ = MakeStatusBoost($1, $2, $3); }; - -SurfaceChangeArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } - | ',' IntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4); } - | ',' IntArg ',' OptionalIntArg ',' IntArg { $$ = MakeArgumentList($2, $4, $6); } - | ',' IntArg ',' OptionalIntArg ',' IntArg ',' IntArg { $$ = MakeArgumentList($2, $4, $6, $8); } - ; - -/* TODO -- specific arg checks for each action !!! */ -GameActionArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } - | ',' IntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg { $$ = MakeArgumentList($2, $4, $6); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg ',' IntArg { $$ = MakeArgumentList($2, $4, $6, $8); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg ',' IntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4, $6, $8, $10); } - ; - -/* Radius; Duration; SurfaceType/DamageType; %Chance */ -CreateSurfaceArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } - | ',' IntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg { $$ = MakeArgumentList($2, $4, $6); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg ',' IntArg { $$ = MakeArgumentList($2, $4, $6, $8); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg ',' IntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4, $6, $8, $10); } - ; +Property : PropContexts PropCondition FunctorCall { $$ = MakeProperty($1, $2, $3); } + | TextKeyProperties + ; -/* -; -; CasterEffect:TargetEffect */ -SwapPlacesArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' OptionalIntArg { $$ = MakeArgumentList($2); } - | ',' OptionalIntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4); } - | ',' OptionalIntArg ',' OptionalIntArg ',' OptionalTextArg { $$ = MakeArgumentList($2, $4, $6); } - | ',' OptionalIntArg ',' OptionalIntArg ',' OptionalTextArg ':' OptionalTextArg { $$ = MakeArgumentList($2, $4, $6, $8); } - ; - -/* -; -; TargetEffect */ -PickupArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' OptionalIntArg { $$ = MakeArgumentList($2); } - | ',' OptionalIntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4); } - | ',' OptionalIntArg ',' OptionalIntArg ',' OptionalTextArg { $$ = MakeArgumentList($2, $4, $6); } - ; - -ResurrectArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } - | ',' IntArg ',' IntArg { $$ = MakeArgumentList($2, $4); } - ; - -SabotageArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } +PropContexts : /* empty */ + | PropContextList { $$ = $1; } ; - -/* TODO - Arg #2 - TotemArg */ -SummonOptArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } - | ',' IntArg ',' OptionalTextArg { $$ = MakeArgumentList($2, $4); } - | ',' IntArg ',' OptionalTextArg ',' TextArg { $$ = MakeArgumentList($2, $4, $6); } - ; - -StatusArgs : /* empty */ { $$ = MakeArgumentList(); } - | ',' IntArg { $$ = MakeArgumentList($2); } - | ',' IntArg ',' OptionalIntArg { $$ = MakeArgumentList($2, $4); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg { $$ = MakeArgumentList($2, $4, $6); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg ',' IntArg { $$ = MakeArgumentList($2, $4, $6, $8); } - | ',' IntArg ',' OptionalIntArg ',' OptionalTextArg ',' IntArg ',' IntArg { $$ = MakeArgumentList($2, $4, $6, $8, $10); } - ; -IntArg : INTEGER { $$ = Int32.Parse($1 as string); }; - -OptionalIntArg : /* empty */ - | IntArg - ; +PropContextList : PropContext { $$ = $1; } + | PropContextList PropContext { $$ = $1; } + ; -TextArg : INTEGER - | NAME - | TEXT - | ACT_SURFACE_CHANGE - | REQUIREMENT_TAG - ; +PropContext : CONTEXT ':' { $$ = $1; }; -OptionalTextArg : /* empty */ { $$ = ""; } - | TextArg - ; +PropCondition : /* empty */ + | IF '(' NonEmptyFunctorArg ')' ':' { $$ = $3; } + ; -StatusName : NAME; +FunctorCall : FunctorName OptionalFunctorArgList { $$ = MakeAction($1, $2); }; -StatusBoost : /* empty */ { $$ = MakeStatusBoostType(StatusBoostType.None, null); } - | ACT_AOEBOOST ':' { $$ = MakeStatusBoostType(StatusBoostType.AoE, null); } - | ACT_SURFACEBOOST '(' SurfaceList ')' ':' { $$ = MakeStatusBoostType(StatusBoostType.Surface, $3); } +FunctorName : NAME + | REQUIREMENT_TAG ; -SurfaceList : Surface { $$ = AddSurface(MakeSurfaceList(), $1); } - | SurfaceList '|' Surface { $$ = AddSurface($1, $3); } - ; +OptionalFunctorArgList : /* empty */ { $$ = MakeArgumentList(); } + | '(' OptionalFunctorArgs ')' { $$ = $2; } + ; -Surface : NAME { $$ = MakeSurface($1); }; +OptionalFunctorArgs : /* empty */ { $$ = MakeArgumentList(); } + | FunctorArgs + ; -/****************************************************************** - * - * - * CONDITION PARSING - * - * - ******************************************************************/ +FunctorArgs : NonEmptyFunctorArg { $$ = AddArgument(MakeArgumentList(), $1); } + | FunctorArgs ',' FunctorArg { $$ = AddArgument($1, $3); } + ; -Conditions : /* empty */ - | ConditionExpr +FunctorArg : /* empty */ + | NonEmptyFunctorArg ; -Condition : NAME { $$ = MakeCondition($1, null); } - | CTX_SELF { $$ = MakeCondition("Self", null); } /* Conflict with "SELF" action context token */ - | ACT_SUMMON { $$ = MakeCondition($1, null); } /* Token conflict between actions and the condition "Summon" */ - | NAME ':' TextArg { $$ = MakeCondition($1, $3); } - ; +NonEmptyFunctorArg : FunctorArgStart LuaRoot FunctorArgEnd { $$ = $3; }; -UnaryCondition : ConditionBlock - | UnaryOperator ConditionBlock { $$ = MakeNotCondition($2); } - ; +FunctorArgStart : /* empty */ { InitLiteral(); }; -ConditionBlock : BracketedConditionExpr - | Condition - ; +FunctorArgEnd : /* empty */ { $$ = MakeLiteral(); }; -BracketedConditionExpr : '(' ConditionExpr ')' { $$ = $2; }; +LuaRoot : LuaRootSymbol + | LuaRoot LuaRootSymbol + | LuaRoot '(' LuaExpr ')' + | LuaRoot '(' ')' + | '(' LuaExpr ')' + ; -UnaryOperator : '!'; +LuaExpr : LuaSymbol + | LuaExpr LuaSymbol + | LuaExpr '(' LuaExpr ')' + | '(' LuaExpr ')' + | LuaExpr '(' ')' + ; -ConditionExpr : UnaryCondition - | ConditionExpr BinaryOperator UnaryCondition { $$ = MakeBinaryCondition($1, $2, $3); } +LuaRootSymbol : NAME + | INTEGER + | TEXT + | CONTEXT + | DICE_ROLL + | ':' + | '!' + | ';' + | '-' ; -BinaryOperator : '|' { $$ = ConditionOperator.Or; } - | '&' { $$ = ConditionOperator.And; } - ; \ No newline at end of file +LuaSymbol : LuaRootSymbol + | ',' + ; diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLib/LS/Stats/Parser/StatPropertyParser.cs index 721e38a9..f94c0124 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLib/LS/Stats/Parser/StatPropertyParser.cs @@ -1,6 +1,10 @@ -using QUT.Gppg; +using LSLib.Granny; +using QUT.Gppg; using System; using System.Collections.Generic; +using System.Linq; +using System.Text; +using static LSLib.Granny.Model.CurveData.AnimationCurveData; namespace LSLib.LS.Stats.Properties { @@ -11,6 +15,16 @@ public LexLocation LastLocation() return new LexLocation(tokLin, tokCol, tokELin, tokECol); } + public int TokenStartPos() + { + return tokPos; + } + + public int TokenEndPos() + { + return tokEPos; + } + private object MakeLiteral(string s) => s; } @@ -19,59 +33,116 @@ public abstract class StatPropertyScanBase : AbstractScanner functors = null; + switch (ExprType) + { + case ExpressionType.Boost: functors = Definitions.Boosts; break; + case ExpressionType.Functor: functors = Definitions.Functors; break; + case ExpressionType.DescriptionParams: functors = Definitions.DescriptionParams; break; + } - if (definitions != null) + if (!functors.TryGetValue(action.Action, out StatFunctorType functor)) { - var surfaceTypeEnum = definitions.Enumerations["Surface Type"]; - SurfaceTypeParser = new EnumParser(surfaceTypeEnum); + if (ExprType != ExpressionType.DescriptionParams) + { + OnError($"'{action.Action}' is not a valid {ExprType}"); + } - var conditionSurfaceTypeEnum = definitions.Enumerations["CUSTOM_ConditionSurfaceType"]; - ConditionSurfaceTypeParser = new EnumParser(conditionSurfaceTypeEnum); + return; + } - var surfaceStateEnum = definitions.Enumerations["CUSTOM_SurfaceState"]; - SurfaceStateParser = new EnumParser(surfaceStateEnum); + // Strip property contexts + var firstArg = 0; + while (firstArg < action.Arguments.Count) + { + var arg = action.Arguments[firstArg]; + if (arg == "SELF" + || arg == "OWNER" + || arg == "SWAP" + || arg == "OBSERVER_OBSERVER" + || arg == "OBSERVER_TARGET" + || arg == "OBSERVER_SOURCE") + { + firstArg++; + } + else + { + break; + } + } - var skillTargetConditionEnum = definitions.Enumerations["SkillTargetCondition"]; - SkillTargetConditionParser = new EnumParser(skillTargetConditionEnum); + var args = action.Arguments.GetRange(firstArg, action.Arguments.Count - firstArg); - SkillConditionsWithArgument = definitions.Enumerations["CUSTOM_SkillCondition_1arg"]; + if (args.Count > functor.Args.Count) + { + OnError($"Too many arguments to '{action.Action}'; {args.Count} passed, expected at most {functor.Args.Count}"); + return; + } - var requirementEnum = definitions.Enumerations["CUSTOM_Requirement"]; - RequirementParser = new EnumParser(requirementEnum); + if (args.Count < functor.RequiredArgs) + { + OnError($"Not enough arguments to '{action.Action}'; {args.Count} passed, expected at least {functor.RequiredArgs}"); + return; + } - RequirementsWithArgument = definitions.Enumerations["CUSTOM_Requirement_1arg"]; - EngineStatuses = definitions.Enumerations["CUSTOM_EngineStatus"]; + for (var i = 0; i < Math.Min(args.Count, functor.Args.Count); i++) + { + bool succeeded = false; + string errorText = null; - StatusParser = parserFactory.CreateReferenceParser(new List + var arg = functor.Args[i]; + if (arg.Type.Length > 0) { - new StatReferenceConstraint + var parser = ParserFactory.CreateParser(arg.Type, null, null, Definitions); + parser.Parse(args[i], ref succeeded, ref errorText); + if (!succeeded) { - StatType = "StatusData" + OnError($"'{action.Action}' argument {i + 1}: {errorText}"); } - }); + } } } + } + + public partial class StatPropertyParser + { + private IStatValueParser RequirementParser; + private StatEnumeration RequirementsWithArgument; + private int LiteralStart; + private StatActionValidator ActionValidator; + private byte[] Source; + + public delegate void ErrorReportingDelegate(string message); + public event ErrorReportingDelegate OnError; + + private StatPropertyScanner StatScanner; + + public StatPropertyParser(StatPropertyScanner scnr, StatDefinitionRepository definitions, + StatValueParserFactory parserFactory, byte[] source, ExpressionType type) : base(scnr) + { + StatScanner = scnr; + Source = source; + ActionValidator = new StatActionValidator(definitions, parserFactory, type); + ActionValidator.OnError += (message) => { OnError(message); }; + } public object GetParsedObject() { @@ -139,6 +210,24 @@ private Requirement MakeTagRequirement(object name, object tag) private List MakePropertyList() => new List(); + private List SetTextKey(object properties, object textKey) + { + var props = properties as List; + var tk = (string)textKey; + foreach (var property in props) + { + property.TextKey = tk; + } + return props; + } + + private List MergeProperties(object properties, object properties2) + { + var props = properties as List; + props.Concat(properties2 as List); + return props; + } + private List AddProperty(object properties, object property) { var props = properties as List; @@ -148,55 +237,29 @@ private List AddProperty(object properties, object property) private Property MakeProperty(object context, object condition, object action) => new Property { - Context = (PropertyContext)context, + Context = (string)context, Condition = condition as object, Action = action as PropertyAction }; - private PropertyAction MakeAction(object action, object arguments) => new PropertyAction - { - Action = action as string, - Arguments = arguments as List - }; - - private PropertyAction MakeStatusBoost(object boost, object status, object arguments) - { - var statusName = status as string; - if (!EngineStatuses.ValueToIndexMap.ContainsKey(statusName)) - { - Validate(StatusParser, statusName); - } - - return new PropertyStatusBoost - { - Boost = boost as StatusBoost, - Action = statusName, - Arguments = arguments as List - }; - } - - private List MakeArgumentList(params object[] args) => new List(args); + private List MakeArgumentList() => new List(); - private List PrependArgumentList(object argument, object arguments) + private List AddArgument(object arguments, object arg) { - var args = arguments as List; - args.Insert(0, argument); + var args = arguments as List; + args.Add(arg == null ? "" : (string)arg); return args; } - private StatusBoost MakeStatusBoostType(object type, object surfaces) => new StatusBoost + private PropertyAction MakeAction(object action, object arguments) { - Type = (StatusBoostType)type, - SurfaceTypes = surfaces as List - }; - - private List MakeSurfaceList() => new List(); - - private List AddSurface(object surfaces, object surface) - { - var surfs = surfaces as List; - surfs.Add(surface as string); - return surfs; + var act = new PropertyAction + { + Action = action as string, + Arguments = arguments as List + }; + ActionValidator.Validate(act); + return act; } private void Validate(IStatValueParser parser, string value) @@ -214,80 +277,16 @@ private void Validate(IStatValueParser parser, string value) } } - private string MakeSurfaceType(object type) - { - var surfaceType = type as string; - Validate(ConditionSurfaceTypeParser, surfaceType); - return surfaceType; - } - - private string MakeSurfaceState(object state) + private object InitLiteral() { - var surfaceState = state as string; - Validate(SurfaceStateParser, surfaceState); - return surfaceState; - } - - private string MakeSurface(object type) - { - var surfaceType = type as string; - Validate(SurfaceTypeParser, surfaceType); - return surfaceType; - } - - private UnaryCondition MakeCondition(object type, object arg) - { - var conditionType = type as string; - var conditionArg = arg as string; - - Validate(SkillTargetConditionParser, conditionType); - - var hasArg = SkillConditionsWithArgument.ValueToIndexMap.ContainsKey(conditionType); - if (hasArg && arg == null) - { - OnError?.Invoke($"Condition '{conditionType}' needs an argument"); - } - else if (!hasArg && arg != null) - { - OnError?.Invoke($"Condition '{conditionType}' doesn't need any arguments"); - } - else - { - switch (conditionType) - { - case "InSurface": - Validate(ConditionSurfaceTypeParser, conditionArg); - break; - - case "Surface": - Validate(SurfaceStateParser, conditionArg); - break; - - case "HasStatus": - // FIXME - add status name validation - break; - } - } - - return new UnaryCondition - { - ConditionType = conditionType, - Argument = conditionArg - }; + LiteralStart = StatScanner.TokenStartPos(); + return null; } - private Condition MakeNotCondition(object condition) + private string MakeLiteral() { - var cond = condition as Condition; - cond.Not = true; - return cond; + var val = Encoding.UTF8.GetString(Source, LiteralStart, StatScanner.TokenStartPos() - LiteralStart); + return val; } - - private BinaryCondition MakeBinaryCondition(object lhs, object oper, object rhs) => new BinaryCondition - { - Left = lhs as Condition, - Operator = (ConditionOperator)oper, - Right = rhs as Condition - }; } } \ No newline at end of file diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs index affde2a9..31d4c6d2 100644 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ b/LSLib/LS/Stats/StatDefinitions.cs @@ -1,5 +1,10 @@ -using System; +using LSLib.LS.Enums; +using OpenTK; +using System; using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; using System.Xml.Linq; namespace LSLib.LS.Stats @@ -25,7 +30,17 @@ public void AddItem(int index, string value) } Values.Add(value); - ValueToIndexMap.Add(value, index); + + // Some vanilla enums are bogus and contain names multiple times + if (!ValueToIndexMap.ContainsKey(value)) + { + ValueToIndexMap.Add(value, index); + } + } + + public void AddItem(string value) + { + AddItem(Values.Count, value); } } @@ -49,260 +64,1280 @@ public IStatValueParser GetParser(StatValueParserFactory factory, StatDefinition } } - public class StatSubtypeDefinition + public class StatEntryType { - public readonly StatTypeDefinition Type; public readonly string Name; + public readonly string NameProperty; + public readonly string BasedOnProperty; public readonly Dictionary Fields; - public readonly Dictionary SubObjects; - public StatSubtypeDefinition(StatTypeDefinition type, string name) + public StatEntryType(string name, string nameProperty, string basedOnProperty) { - Type = type; Name = name; + NameProperty = nameProperty; + BasedOnProperty = basedOnProperty; Fields = new Dictionary(); - SubObjects = new Dictionary(); } } - public class StatTypeDefinition + public class StatFunctorArgumentType { - public readonly string Name; - public readonly string SubtypeProperty; - public string NameProperty; - public string BaseClassProperty; - public readonly Dictionary Subtypes; + public string Name; + public string Type; + } + + public class StatFunctorType + { + public string Name; + public int RequiredArgs; + public List Args; + } + + public class StatDefinitionRepository + { + // Version of modified Enumerations.xml and StatObjectDefinitions.sod we expect + public const string CustomizationsVersion = "1"; - public bool CanInstantiate + public readonly Dictionary Enumerations = new Dictionary(); + public readonly Dictionary Types = new Dictionary(); + public readonly Dictionary Functors = new Dictionary(); + public readonly Dictionary Boosts = new Dictionary(); + public readonly Dictionary DescriptionParams = new Dictionary(); + + private StatField AddField(StatEntryType defn, string name, string typeName) { - get + var field = new StatField + { + Name = name, + Type = typeName + }; + + if (Enumerations.TryGetValue(typeName, out StatEnumeration enumType) && enumType.Values.Count > 0) { - return NameProperty != null; + field.EnumType = enumType; } + + defn.Fields.Add(name, field); + return field; } - public StatTypeDefinition(string name, string subtypeProperty) + private void AddEnumeration(string name, List labels) { - Name = name; - SubtypeProperty = subtypeProperty; - Subtypes = new Dictionary(); + var enumType = new StatEnumeration(name); + foreach (var label in labels) + { + enumType.AddItem(label); + } + Enumerations.Add(name, enumType); } - } - public class StatDefinitionRepository - { - // Version of modified Enumerations.xml and StatObjectDefinitions.sod we expect - public const string CustomizationsVersion = "1"; + private StatFunctorArgumentType MakeFunctorArg(string name, string type) + { + return new StatFunctorArgumentType + { + Name = name, + Type = type + }; + } - public readonly Dictionary Enumerations = new Dictionary(); - public readonly Dictionary Definitions = new Dictionary(); + public void AddBoost(string name, int requiredArgs, List args) + { + AddFunctor(Boosts, name, requiredArgs, args); + } + + public void AddFunctor(string name, int requiredArgs, List args) + { + AddFunctor(Functors, name, requiredArgs, args); + } + + public void AddDescriptionParams(string name, int requiredArgs, List args) + { + AddFunctor(DescriptionParams, name, requiredArgs, args); + } - private void AddField(StatTypeDefinition definition, StatSubtypeDefinition subtype, XElement field) + public void AddFunctor(Dictionary dict, string name, int requiredArgs, List argDescs) { - if (field.Attribute("export_name").Value == "") + var args = new List(); + for (int i = 0; i < argDescs.Count; i += 2) { - return; + args.Add(MakeFunctorArg(argDescs[i], argDescs[i + 1])); } - var fieldName = field.Attribute("export_name").Value; - var typeName = field.Attribute("type").Value; - StatEnumeration enumeration = null; - List referenceConstraints = null; + AddFunctor(dict, name, requiredArgs, args); + } - switch (typeName) + public void AddFunctor(Dictionary dict, string name, int requiredArgs, IEnumerable args) + { + var functor = new StatFunctorType { - case "Enumeration": - case "EnumerationList": - var enumName = field.Attribute("enumeration_type_name").Value; - enumeration = Enumerations[enumName]; - break; + Name = name, + RequiredArgs = requiredArgs, + Args = args.ToList() + }; - case "Name": - if (definition.NameProperty == null) - { - definition.NameProperty = fieldName; - } - else if (definition.NameProperty != fieldName) - { - throw new Exception($"Conflicting Name property for type '{definition.Name}': First seen using '{definition.NameProperty}', now seen using '{fieldName}'."); - } - break; + dict.Add(name, functor); + } - case "BaseClass": - if (definition.BaseClassProperty == null) + public void LoadDefinitions(Stream stream) + { + StatEntryType defn = null; + string line; + + using (var reader = new StreamReader(stream)) + while ((line = reader.ReadLine()) != null) + { + var trimmed = line.Trim(); + if (trimmed.Length > 0) + { + if (trimmed.StartsWith("modifier type ")) { - definition.BaseClassProperty = fieldName; + var name = trimmed.Substring(15, trimmed.Length - 16); + defn = new StatEntryType(name, "Name", "Using"); + Types.Add(defn.Name, defn); + AddField(defn, "Name", "FixedString"); + var usingRef = AddField(defn, "Using", "StatReference"); + usingRef.ReferenceTypes = new List + { + new StatReferenceConstraint + { + StatType = name + } + }; } - else if (definition.BaseClassProperty != fieldName) + else if (trimmed.StartsWith("modifier \"")) { - throw new Exception($"Conflicting BaseClass for type '{definition.Name}': First seen using '{definition.BaseClassProperty}', now seen using '{fieldName}'."); + var nameEnd = trimmed.IndexOf('"', 10); + var name = trimmed.Substring(10, nameEnd - 10); + var typeName = trimmed.Substring(nameEnd + 3, trimmed.Length - nameEnd - 4); + AddField(defn, name, typeName); } - break; + } + } - case "StatReference": - case "StatReferences": - referenceConstraints = new List(); - var descriptions = field.Element("stat_descriptions"); - if (descriptions == null) - { - throw new Exception("Field of type 'StatReference' must have a list of stat types in the node"); - } + // Add builtins + var itemColor = new StatEntryType("ItemColor", "ItemColorName", null); + Types.Add(itemColor.Name, itemColor); + AddField(itemColor, "ItemColorName", "FixedString"); + AddField(itemColor, "Primary Color", "FixedString"); + AddField(itemColor, "Secondary Color", "FixedString"); + AddField(itemColor, "Tertiary Color", "FixedString"); - var descs = descriptions.Elements("description"); - foreach (var desc in descs) - { - var constraint = new StatReferenceConstraint - { - StatType = desc.Attribute("stat_type").Value, - StatSubtype = desc.Attribute("stat_subtype")?.Value ?? null - }; - referenceConstraints.Add(constraint); - } + var itemProgressionName = new StatEntryType("ItemProgressionNames", "Name", null); + Types.Add(itemProgressionName.Name, itemProgressionName); + AddField(itemProgressionName, "Name", "FixedString"); + AddField(itemProgressionName, "Names", "Passthrough"); - break; - - case "Boolean": - case "Integer": - case "Float": - case "String": - case "TranslatedString": - case "RootTemplate": - case "Comment": - case "Color": - case "Requirements": - case "Properties": - case "Conditions": - case "Passthrough": - case "UUID": - break; - - default: - throw new Exception($"Unsupported stat field type: '{typeName}'"); - } + var itemProgressionVisual = new StatEntryType("ItemProgressionVisuals", "Name", null); + Types.Add(itemProgressionVisual.Name, itemProgressionVisual); + AddField(itemProgressionVisual, "Name", "FixedString"); + // FIXME + AddField(itemProgressionVisual, "LevelGroups", "Passthrough"); + AddField(itemProgressionVisual, "NameGroups", "Passthrough"); + AddField(itemProgressionVisual, "RootGroups", "Passthrough"); + + var dataType = new StatEntryType("Data", "Key", null); + Types.Add(dataType.Name, dataType); + AddField(dataType, "Key", "FixedString"); + AddField(dataType, "Value", "FixedString"); - var statField = new StatField + AddEnumeration("ResurrectType", new List { - Name = fieldName, - Type = typeName, - EnumType = enumeration, - ReferenceTypes = referenceConstraints - }; - subtype.Fields.Add(fieldName, statField); + "Living", + "Guaranteed", + "Construct", + "Undead" + }); - if (typeName == "TranslatedString") + AddEnumeration("SetStatusDurationType", new List { - var translatedKeyRefField = new StatField - { - Name = fieldName + "Ref", - Type = typeName, - EnumType = enumeration - }; - subtype.Fields.Add(fieldName + "Ref", translatedKeyRefField); - } - } + "SetMinimum", + "ForceSet", + "Add", + "Multiply" + }); - private void AddSubtype(StatTypeDefinition definition, string subtypeName, IEnumerable fields) - { - var subtype = new StatSubtypeDefinition(definition, subtypeName); + AddEnumeration("ExecuteWeaponFunctorsType", new List + { + "MainHand", + "OffHand", + "BothHands" + }); - foreach (var field in fields) + AddEnumeration("SpellCooldownType", new List { - AddField(definition, subtype, field); - } - - definition.Subtypes.Add(subtypeName, subtype); - } + "Default", + "OncePerTurn", + "OncePerCombat", + "UntilRest", + "OncePerTurnNoRealtime", + "UntilShortRest", + "UntilPerRestPerItem", + "OncePerShortRestPerItem" + }); - private void AddDefinition(XElement defn) - { - var name = defn.Attribute("name").Value; - var parentName = defn.Attribute("export_type")?.Value ?? name; + AddEnumeration("SummonDuration", new List + { + "UntilLongRest", + "Permanent" + }); - if (!Definitions.TryGetValue(parentName, out StatTypeDefinition definition)) + AddEnumeration("ForceFunctorOrigin", new List { - var subtypeProperty = defn.Attribute("subtype_property")?.Value ?? null; - definition = new StatTypeDefinition(parentName, subtypeProperty); - Definitions.Add(parentName, definition); - } + "OriginToEntity", + "OriginToTarget", + "TargetToEntity" + }); - var fields = defn.Element("field_definitions").Elements("field_definition"); - AddSubtype(definition, name, fields); - } + AddEnumeration("ForceFunctorAggression", new List + { + "Aggressive", + "Friendly", + "Neutral" + }); - private void AddEnumeration(XElement enumEle) - { - var name = enumEle.Attribute("name").Value; - if (Enumerations.ContainsKey(name)) + AddEnumeration("StatItemSlot", new List { - throw new Exception($"Enumeration '{name}' defined multiple times!"); - } + "Helmet", + "Breast", + "Cloak", + "MeleeMainHand", + "MeleeOffHand", + "RangedMainHand", + "RangedOffHand", + "Ring", + "Underwear", + "Boots", + "Gloves", + "Amulet", + "Ring2", + "Wings", + "Horns", + "Overhead", + "MusicalInstrument", + "VanityBody", + "VanityBoots", + "MainHand", + "OffHand" + }); - var enumType = new StatEnumeration(name); - - var items = enumEle.Element("items").Elements("item"); + AddEnumeration("Magical", new List + { + "Magical", + "Nonmagical" + }); - foreach (var item in items) + AddEnumeration("Nonlethal", new List { - var index = Int32.Parse(item.Attribute("index").Value); - var value = item.Attribute("value").Value; - enumType.AddItem(index, value); - } + "Lethal", + "Nonlethal" + }); - Enumerations.Add(name, enumType); - } + AddEnumeration("AllEnum", new List + { + "All" + }); - public void LoadDefinitions(string definitionsPath) - { - // NOTE: This function uses a modified version of StatObjectDefinitions.sod as there - // are too many deviations in the vanilla .sod file from the actual .txt format, and - // vital stat fields are sometimes missing. - // The changes required are: - // 1) Add "subtype_property" attribute to StatusData and SkillData types - // 2) Fix export_type of ExtraData to Data - // 3) Add SkillType and StatusType fields to StatusData and SkillData types - // 4) Add type="Passthrough" fields for subobjects where required - // 5) Adjusted fields to use proper enumeration labels in many places - // etc etc. - - var root = XElement.Load(definitionsPath); - var customizationVer = root.Attribute("lslib_customizations")?.Value; - if (customizationVer == null) - { - throw new Exception("Can only load StatObjectDefinitions.sod with LSLib-specific modifications"); - } - else if (customizationVer != CustomizationsVersion) + AddEnumeration("ZoneShape", new List { - throw new Exception($"Needs StatObjectDefinitions.sod with customization version '{CustomizationsVersion}'; got version '{customizationVer}'"); - } + "Cone", + "Square", + }); - var defnRoot = root.Element("stat_object_definitions"); - var defns = defnRoot.Elements("stat_object_definition"); + AddEnumeration("SurfaceLayer", new List + { + "Ground", + "Cloud", + }); - foreach (var defn in defns) + AddEnumeration("RollAdjustmentType", new List { - AddDefinition(defn); - } - } + "All", + "Distribute", + }); - public void LoadEnumerations(string enumsPath) - { - var root = XElement.Load(enumsPath); - var customizationVer = root.Attribute("lslib_customizations")?.Value; - if (customizationVer == null) + AddEnumeration("StatsRollType", new List { - throw new Exception("Can only load Enumerations.xml with LSLib-specific modifications"); - } - else if (customizationVer != CustomizationsVersion) + "Attack", + "MeleeWeaponAttack", + "RangedWeaponAttack", + "MeleeSpellAttack", + "RangedSpellAttack", + "MeleeUnarmedAttack", + "RangedUnarmedAttack", + "SkillCheck", + "SavingThrow", + "RawAbility", + "Damage", + "MeleeOffHandWeaponAttack", + "RangedOffHandWeaponAttack", + "DeathSavingThrow", + "MeleeWeaponDamage", + "RangedWeaponDamage", + "MeleeSpellDamage", + "RangedSpellDamage", + "MeleeUnarmedDamage", + "RangedUnarmedDamage", + }); + + AddEnumeration("AdvantageType", new List { - throw new Exception($"Needs Enumerations.xml with customization version '{CustomizationsVersion}'; got version '{customizationVer}'"); - } + "AttackRoll", + "AttackTarget", + "SavingThrow", + "AllSavingThrows", + "Ability", + "AllAbilities", + "Skill", + "AllSkills", + "SourceDialogue", + "DeathSavingThrow", + "Concentration", + }); + + AddEnumeration("SkillType", new List + { + "Deception", + "Intimidation", + "Performance", + "Persuasion", + "Acrobatics", + "SleightOfHand", + "Stealth", + "Arcana", + "History", + "Investigation", + "Nature", + "Religion", + "Athletics", + "AnimalHandling", + "Insight", + "Medicine", + "Perception", + "Survival", + }); - var defnRoot = root.Element("enumerations"); - var enums = defnRoot.Elements("enumeration"); + AddEnumeration("CriticalHitType", new List + { + "AttackTarget", + "AttackRoll" + }); + + AddEnumeration("Result", new List + { + "Success", + "Failure" + }); + + AddEnumeration("CriticalHitResult", new List + { + "Success", + "Failure" + }); + + AddEnumeration("CriticalHitWhen", new List + { + "Never", + "Always", + "ForcedAlways" + }); + + AddEnumeration("MovementSpeedType", new List + { + "Stroll", + "Walk", + "Run", + "Sprint", + }); - foreach (var enum_ in enums) + AddEnumeration("DamageReductionType", new List { - AddEnumeration(enum_); + "Half", + "Flat", + "Threshold" + }); + + AddEnumeration("AttackRollAbility", new List + { + "SpellCastingAbility", + "UnarmedMeleeAbility", + "AttackAbility" + }); + + AddEnumeration("HealingDirection", new List + { + "Incoming", + "Outgoing" + }); + + AddEnumeration("ResistanceBoostFlags", new List + { + "None", + "Resistant", + "Immune", + "Vulnerable", + "BelowDamageThreshold", + "ResistantToMagical", + "ImmuneToMagical", + "VulnerableToMagical", + "ResistantToNonMagical", + "ImmuneToNonMagical", + "VulnerableToNonMagical", + }); + + AddEnumeration("UnlockSpellType", new List + { + "Singular", + "AddChildren", + "MostPowerful" + }); + + AddEnumeration("ProficiencyBonusBoostType", new List + { + "AttackRoll", + "AttackTarget", + "SavingThrow", + "AllSavingThrows", + "Ability", + "AllAbilities", + "Skill", + "AllSkills", + "SourceDialogue", + "WeaponActionDC" + }); + + AddEnumeration("ResourceReplenishType", new List + { + "Never", + "Default", + "Combat", + "Rest", + "ShortRest", + "FullRest", + "ExhaustedRest" + }); + + AddEnumeration("AttackType", new List + { + "DirectHit", + "MeleeWeaponAttack", + "RangedWeaponAttack", + "MeleeOffHandWeaponAttack", + "RangedOffHandWeaponAttack", + "MeleeSpellAttack", + "RangedSpellAttack", + "MeleeUnarmedAttack", + "RangedUnarmedAttack" + }); + + AddEnumeration("DealDamageWeaponDamageType", new List + { + "MainWeaponDamageType", + "OffhandWeaponDamageType", + "MainMeleeWeaponDamageType", + "OffhandMeleeWeaponDamageType", + "MainRangedWeaponDamageType", + "OffhandRangedWeaponDamageType", + "SourceWeaponDamageType", + "ThrownWeaponDamageType", + }); + + AddEnumeration("EngineStatusType", new List + { + "DYING", + "HEAL", + "KNOCKED_DOWN", + "TELEPORT_FALLING", + "BOOST", + "REACTION", + "STORY_FROZEN", + "SNEAKING", + "UNLOCK", + "FEAR", + "SMELLY", + "INVISIBLE", + "ROTATE", + "MATERIAL", + "CLIMBING", + "INCAPACITATED", + "INSURFACE", + "POLYMORPHED", + "EFFECT", + "DEACTIVATED", + "DOWNED", + }); + + + // Add functors + AddFunctor("ApplyStatus", 1, new List { + "StatusId", "StatusId", + "Chance", "Int", + "Duration", "Lua", + "StatusSpecificParam1", "String", + "StatusSpecificParam2", "Int", + "StatusSpecificParam3", "Int", + "StatsConditions", "Conditions", + "RequiresConcentration", "Boolean" + }); + AddFunctor("SurfaceChange", 1, new List { + "SurfaceChange", "Surface Change", + "Chance", "Float", + "Arg3", "Float", + "Arg4", "Float", + "Arg5", "Float" + }); + AddFunctor("Resurrect", 0, new List { + "Chance", "Float", + "HealthPercentage", "Float", + "Type", "ResurrectType" + }); + AddFunctor("Sabotage", 0, new List { + "Amount", "Int" + }); + AddFunctor("Summon", 1, new List { + "Template", "Guid", // Root template GUID + "Duration", "SummonDurationOrInt", + "AIHelper", "SpellId", + "Arg4", "Boolean", + "StackId", "String", + "StatusToApply1", "StatusId", + "StatusToApply2", "StatusId", + "StatusToApply3", "StatusId", + "StatusToApply4", "StatusId", + "Arg10", "Boolean", + }); + AddFunctor("Force", 1, new List { + "Distance", "Lua", + "Origin", "ForceFunctorOrigin", + "Aggression", "ForceFunctorAggression", + "Arg4", "Boolean", + "Arg5", "Boolean", + }); + AddFunctor("Douse", 0, new List { + "Arg1", "Float", + "Arg2", "Float" + }); + AddFunctor("SwapPlaces", 0, new List { + "Animation", "String", + "Arg2", "Boolean", + "Arg3", "Boolean" + }); + AddFunctor("Pickup", 0, new List { + "Arg1", "String" + }); + AddFunctor("CreateSurface", 3, new List { + "Radius", "Float", + "Duration", "Float", + "SurfaceType", "Surface Type", + "IsControlledByConcentration", "Boolean", + "Arg5", "Float", + "Arg6", "Boolean" + }); + AddFunctor("CreateConeSurface", 3, new List { + "Radius", "Float", + "Duration", "Float", + "SurfaceType", "Surface Type", + "IsControlledByConcentration", "Boolean", + "Arg5", "Float", + "Arg6", "Boolean" + }); + AddFunctor("RemoveStatus", 1, new List { + "StatusId", "StatusIdOrGroup" + }); + AddFunctor("DealDamage", 1, new List { + "Damage", "Lua", + "DamageType", "DamageTypeOrDealDamageWeaponDamageType", + "Magical", "Magical", + "Nonlethal", "Nonlethal", + "Arg5", "Int", + "Tooltip", "Guid", + }); + AddFunctor("ExecuteWeaponFunctors", 0, new List { + "WeaponType", "ExecuteWeaponFunctorsType" + }); + AddFunctor("RegainHitPoints", 1, new List { + "HitPoints", "Lua", + "Type", "ResurrectType" + }); + AddFunctor("TeleportSource", 0, new List { + "Arg1", "Boolean", + "Arg2", "Boolean", + }); + AddFunctor("SetStatusDuration", 2, new List { + "StatusId", "StatusId", + "Duration", "Float", + "ChangeType", "SetStatusDurationType", + }); + AddFunctor("UseSpell", 1, new List { + "SpellId", "SpellId", + "IgnoreHasSpell", "Boolean", + "IgnoreChecks", "Boolean", + "Arg4", "Boolean", + "SpellCastGuid", "Guid", + }); + AddFunctor("UseActionResource", 1, new List { + "ActionResource", "String", // Action resource name + "Amount", "String", // Float or percentage + "Level", "Int", + "Arg4", "Boolean" + }); + AddFunctor("UseAttack", 0, new List { + "IgnoreChecks", "Boolean" + }); + AddFunctor("CreateExplosion", 0, new List { + "SpellId", "SpellId" + }); + AddFunctor("BreakConcentration", 0, new List {}); + AddFunctor("ApplyEquipmentStatus", 2, new List { + "ItemSlot", "StatItemSlot", + "StatusId", "StatusId", + "Chance", "Int", + "Duration", "Lua", + "StatusSpecificParam1", "String", + "StatusSpecificParam2", "Int", + "StatusSpecificParam3", "Int", + "StatsConditions", "Conditions", + "RequiresConcentration", "Boolean" + }); + AddFunctor("RestoreResource", 2, new List { + "ActionResource", "String", // Action resource name + "Amount", "Lua", // or percentage? + "Level", "Int" + }); + AddFunctor("Spawn", 1, new List { + "TemplateId", "Guid", // Root template Guid + "AiHelper", "String", // Should be SpellId, but seemingly defunct? + "StatusToApply1", "StatusId", + "StatusToApply2", "StatusId", + "StatusToApply3", "StatusId", + "StatusToApply4", "StatusId", + "Arg7", "Boolean" + }); + AddFunctor("Stabilize", 0, new List{}); + AddFunctor("Unlock", 0, new List{}); + AddFunctor("ResetCombatTurn", 0, new List{}); + AddFunctor("RemoveAuraByChildStatus", 1, new List { + "StatusId", "StatusId" + }); + AddFunctor("SummonInInventory", 1, new List { + "TemplateId", "Guid", // Root template Guid + "Duration", "SummonDurationOrInt", + "Arg3", "Int", + "Arg4", "Boolean", + "Arg5", "Boolean", + "Arg6", "Boolean", + "Arg7", "Boolean", + "Arg8", "String", + "Arg9", "String", + "Arg10", "String", + "Arg11", "String", // etc. + }); + AddFunctor("SpawnInInventory", 1, new List { + "TemplateId", "Guid", // Root template Guid + "Arg2", "Int", + "Arg3", "Boolean", + "Arg4", "Boolean", + "Arg5", "Boolean", + "Arg6", "String", + "Arg7", "String", + "Arg8", "String", // etc. + }); + AddFunctor("RemoveUniqueStatus", 1, new List { + "StatusId", "StatusId" + }); + AddFunctor("DisarmWeapon", 0, new List { }); + AddFunctor("DisarmAndStealWeapon", 0, new List { }); + AddFunctor("SwitchDeathType", 1, new List { + "DeathType", "Death Type" + }); + AddFunctor("TriggerRandomCast", 2, new List { + "Arg1", "Int", + "Arg2", "Float", + "Arg3", "String", // RandomCastOutcomesID resource + "Arg4", "String", // RandomCastOutcomesID resource + "Arg5", "String", // RandomCastOutcomesID resource + "Arg6", "String", // RandomCastOutcomesID resource + }); + AddFunctor("GainTemporaryHitPoints", 1, new List { + "Amount", "Lua" + }); + AddFunctor("FireProjectile", 1, new List { + "Arg1", "String" + }); + AddFunctor("ShortRest", 0, new List {}); + AddFunctor("CreateZone", 0, new List { + "Shape", "ZoneShape", + "Arg2", "Float", + "Duration", "Float", + "Arg4", "String", + "Arg5", "Boolean", + }); + AddFunctor("DoTeleport", 0, new List { + "Arg1", "Float" + }); + AddFunctor("RegainTemporaryHitPoints", 1, new List { + "Amount", "Lua" + }); + AddFunctor("RemoveStatusByLevel", 1, new List { + "StatusId", "StatusIdOrGroup", + "Arg2", "Int", + "Arg3", "Ability" + }); + AddFunctor("SurfaceClearLayer", 0, new List { + "Layer1", "SurfaceLayer", + "Layer2", "SurfaceLayer", + }); + AddFunctor("Unsummon", 0, new List { }); + AddFunctor("CreateWall", 0, new List { }); + AddFunctor("Counterspell", 0, new List { }); + AddFunctor("AdjustRoll", 1, new List { + "Amount", "Lua", + "Type", "RollAdjustmentType", + "DamageType", "Damage Type", + }); + AddFunctor("SpawnExtraProjectiles", 0, new List { + "Arg1", "String", // ProjectileTypeId + }); + AddFunctor("Kill", 0, new List { }); + AddFunctor("TutorialEvent", 0, new List { + "Event", "Guid", + }); + AddFunctor("Drop", 0, new List { + "Arg1", "String", + }); + AddFunctor("ResetCooldowns", 1, new List { + "Type", "SpellCooldownType", + }); + AddFunctor("SetRoll", 1, new List { + "Roll", "Int", + "DistributionOrDamageType", "RollAdjustmentTypeOrDamageType" + }); + AddFunctor("SetDamageResistance", 1, new List { + "DamageType", "Damage Type", + }); + AddFunctor("SetReroll", 0, new List { + "Roll", "Int", + "Arg2", "Boolean" + }); + AddFunctor("SetAdvantage", 0, new List { }); + AddFunctor("SetDisadvantage", 0, new List { }); + AddFunctor("MaximizeRoll", 1, new List { + "DamageType", "Damage Type" + }); + AddFunctor("CameraWait", 0, new List { + "Arg1", "Float" + }); + + + + AddDescriptionParams("DealDamage", 1, new List { + "Damage", "Lua", + "DamageType", "DamageTypeOrDealDamageWeaponDamageType", + "Magical", "Magical", + "Nonlethal", "Nonlethal", + "Arg5", "Int", + "Tooltip", "Guid", + }); + AddDescriptionParams("RegainHitPoints", 1, new List { + "HitPoints", "Lua", + "Tooltip", "Guid", + }); + AddDescriptionParams("Distance", 1, new List { + "Distance", "Float" + }); + AddDescriptionParams("GainTemporaryHitPoints", 1, new List { + "Amount", "Lua" + }); + AddDescriptionParams("LevelMapValue", 1, new List { + "LevelMap", "String" + }); + AddDescriptionParams("ApplyStatus", 1, new List { + "StatusId", "StatusId", + "Chance", "Int", + "Duration", "Lua", + "StatusSpecificParam1", "String", + "StatusSpecificParam2", "Int", + "StatusSpecificParam3", "Int", + "StatsConditions", "Conditions", + "RequiresConcentration", "Boolean" + }); + + + + AddBoost("AC", 1, new List { + "AC", "Int" + }); + AddBoost("Ability", 2, new List { + "Ability", "Ability", + "Amount", "Int", + "Arg3", "Int", + }); + AddBoost("RollBonus", 2, new List { + "RollType", "StatsRollType", + "Bonus", "Lua", + "Arg3", "String", + }); + AddBoost("Advantage", 1, new List { + "Type", "AdvantageType", + "Arg2", "String", // Depends on type + "Tag1", "String", // TagManager resource + "Tag2", "String", // TagManager resource + "Tag3", "String", // TagManager resource + }); + AddBoost("Disadvantage", 1, new List { + "Type", "AdvantageType", + "Arg2", "String", // Depends on type + "Tag1", "String", // TagManager resource + "Tag2", "String", // TagManager resource + "Tag3", "String", // TagManager resource + }); + AddBoost("ActionResource", 2, new List { + "Resource", "String", // Action resource name + "Amount", "Float", + "Level", "Int", + "DieType", "DieType", + }); + AddBoost("CriticalHit", 3, new List { + "Type", "CriticalHitType", + "Result", "CriticalHitResult", + "When", "CriticalHitWhen", + "Arg4", "Float", + }); + AddBoost("AbilityFailedSavingThrow", 1, new List { + "Ability", "Ability" + }); + AddBoost("Resistance", 2, new List { + "DamageType", "AllOrDamageType", + "ResistanceBoostFlags", "ResistanceBoostFlags" + }); + AddBoost("WeaponDamageResistance", 1, new List { + "DamageType1", "Damage Type", + "DamageType2", "Damage Type", + "DamageType3", "Damage Type", + }); + AddBoost("ProficiencyBonusOverride", 1, new List { + "Bonus", "Lua" + }); + AddBoost("ActionResourceOverride", 2, new List { + "Resource", "String", // Action resource name + "Amount", "Float", + "Level", "Int", + "DieType", "DieType", + }); + AddBoost("AddProficiencyToAC", 0, new List {}); + AddBoost("JumpMaxDistanceMultiplier", 1, new List { + "Multiplier", "Float" + }); + AddBoost("AddProficiencyToDamage", 0, new List {}); + AddBoost("ActionResourceConsumeMultiplier", 3, new List { + "Resource", "String", // Action resource name + "Multiplier", "Float", + "Level", "Int", + }); + AddBoost("BlockVerbalComponent", 0, new List {}); + AddBoost("BlockSomaticComponent", 0, new List {}); + AddBoost("HalveWeaponDamage", 1, new List { + "Ability", "Ability" + }); + AddBoost("UnlockSpell", 1, new List { + "SpellId", "SpellId", + "Type", "UnlockSpellType", + "SpellGuid", "String", // "None" or GUID or "" + "Cooldown", "SpellCooldownType", + "Ability", "Ability" + }); + AddBoost("SourceAdvantageOnAttack", 0, new List { + "Arg1", "Float" + }); + AddBoost("ProficiencyBonus", 1, new List { + "Type", "ProficiencyBonusBoostType", + "Arg2", "String" + }); + AddBoost("BlockSpellCast", 0, new List { + "Arg1", "Float" + }); + AddBoost("Proficiency", 1, new List { + "Arg1", "ProficiencyGroupFlags", + "Arg2", "ProficiencyGroupFlags", + "Arg3", "ProficiencyGroupFlags", + }); + AddBoost("SourceAllyAdvantageOnAttack", 0, new List {}); + AddBoost("IncreaseMaxHP", 1, new List { + "Amount", "String" // Lua or % + }); + AddBoost("ActionResourceBlock", 1, new List { + "Resource", "String", // Action resource name + "Level", "Int", + }); + AddBoost("StatusImmunity", 1, new List { + "StatusId", "StatusIdOrGroup", + "Tag1", "String", // Tag resource name + "Tag2", "String", // Tag resource name + "Tag3", "String", // Tag resource name + "Tag4", "String", // Tag resource name + "Tag5", "String", // Tag resource name + }); + AddBoost("UseBoosts", 1, new List { + "Arg1", "StatsFunctors" + }); + AddBoost("CannotHarmCauseEntity", 1, new List { + "Arg1", "String" + }); + AddBoost("TemporaryHP", 1, new List { + "Amount", "Lua" + }); + AddBoost("Weight", 1, new List { + "Weight", "Float" + }); + AddBoost("WeightCategory", 1, new List { + "Category", "Int" + }); + AddBoost("FactionOverride", 1, new List { + "Faction", "String" // Faction resource GUID or "Source" + }); + AddBoost("ActionResourceMultiplier", 2, new List { + "Resource", "String", // Action resource name + "Multiplier", "Int", + "Level", "Int", + }); + AddBoost("BlockRegainHP", 0, new List { + "Type", "ResurrectTypes" + }); + AddBoost("Initiative", 1, new List { + "Initiative", "Int" + }); + AddBoost("DarkvisionRange", 1, new List { + "Range", "Float" + }); + AddBoost("DarkvisionRangeMin", 1, new List { + "Range", "Float" + }); + AddBoost("DarkvisionRangeOverride", 1, new List { + "Range", "Float" + }); + AddBoost("Tag", 1, new List { + "Arg1", "String" // Tag resource name + }); + AddBoost("IgnoreDamageThreshold", 2, new List { + "DamageType", "AllOrDamageType", + "Threshold", "Int" + }); + AddBoost("Skill", 2, new List { + "Skill", "SkillType", + "Amount", "Lua" + }); + AddBoost("WeaponDamage", 2, new List { + "Amount", "Lua", + "DamageType", "Damage Type", + "Arg3", "Boolean" + }); + AddBoost("NullifyAbilityScore", 1, new List { + "Ability", "Ability" + }); + AddBoost("IgnoreFallDamage", 0, new List {}); + AddBoost("Reroll", 3, new List { + "RollType", "StatsRollType", + "RollBelow", "Int", + "Arg3", "Boolean" + }); + AddBoost("DownedStatus", 1, new List { + "StatusId", "StatusId", + "Arg2", "Int" + }); + AddBoost("Invulnerable", 0, new List {}); + AddBoost("WeaponEnchantment", 1, new List { + "Enchantment", "Int" + }); + AddBoost("GuaranteedChanceRollOutcome", 1, new List { + "Arg1", "Boolean" + }); + AddBoost("Attribute", 1, new List { + "Flags", "AttributeFlags" + }); + AddBoost("IgnoreLeaveAttackRange", 0, new List {}); + AddBoost("GameplayLight", 2, new List { + "Arg1", "Float", + "Arg2", "Boolean", + "Arg3", "Float", + "Arg4", "Boolean" + }); + AddBoost("DialogueBlock", 0, new List {}); + AddBoost("DualWielding", 1, new List { + "DW", "Boolean" + }); + AddBoost("Savant", 1, new List { + "SpellSchool", "SpellSchool" + }); + AddBoost("MinimumRollResult", 2, new List { + "RollType", "StatsRollType", + "MinResult", "Int" + }); + AddBoost("Lootable", 0, new List {}); + AddBoost("CharacterWeaponDamage", 1, new List { + "Amount", "Lua", + "DamageType", "Damage Type" + }); + AddBoost("ProjectileDeflect", 0, new List { + "Type1", "String", + "Type2", "String", + }); + AddBoost("AbilityOverrideMinimum", 2, new List { + "Ability", "Ability", + "Minimum", "Int" + }); + AddBoost("ACOverrideFormula", 2, new List { + "AC", "Int", + "Arg2", "Boolean", + "Ability1", "Ability", + "Ability2", "Ability", + "Ability3", "Ability", + }); + AddBoost("FallDamageMultiplier", 1, new List { + "Multiplier", "Float" + }); + AddBoost("ActiveCharacterLight", 1, new List { + "Light", "String" + }); + AddBoost("Invisibility", 0, new List {}); + AddBoost("TwoWeaponFighting", 0, new List {}); + AddBoost("WeaponAttackTypeOverride", 1, new List { + "Type", "AttackType" + }); + AddBoost("WeaponDamageDieOverride", 1, new List { + "DamageDie", "String", // die, eg. 1d10 + }); + AddBoost("CarryCapacityMultiplier", 1, new List { + "Multiplier", "Float" + }); + AddBoost("WeaponProperty", 1, new List { + "Flags1", "WeaponFlags" + }); + AddBoost("WeaponAttackRollAbilityOverride", 1, new List { + "Ability", "AbilityOrAttackRollAbility" + }); + AddBoost("BlockTravel", 0, new List {}); + AddBoost("BlockGatherAtCamp", 0, new List {}); + AddBoost("BlockAbilityModifierDamageBonus", 0, new List {}); + AddBoost("VoicebarkBlock", 0, new List {}); + AddBoost("HiddenDuringCinematic", 0, new List {}); + AddBoost("SightRangeAdditive", 1, new List { + "Range", "Float" + }); + AddBoost("SightRangeMinimum", 1, new List { + "Range", "Float" + }); + AddBoost("SightRangeMaximum", 1, new List { + "Range", "Float" + }); + AddBoost("SightRangeOverride", 1, new List { + "Range", "Float" + }); + AddBoost("CannotBeDisarmed", 0, new List {}); + AddBoost("MovementSpeedLimit", 1, new List { + "Type", "MovementSpeedType" + }); + AddBoost("NonLethal", 0, new List {}); + AddBoost("UnlockSpellVariant", 1, new List { + "Modification1", "Lua", // TODO - add Modification parser? + "Modification2", "Lua", + "Modification3", "Lua", + "Modification4", "Lua", + "Modification5", "Lua", + "Modification6", "Lua", + "Modification7", "Lua", + "Modification8", "Lua", + "Modification9", "Lua", + "Modification10", "Lua", + "Modification11", "Lua", + "Modification12", "Lua", + "Modification13", "Lua", + "Modification14", "Lua", + "Modification15", "Lua" + }); + AddBoost("DetectDisturbancesBlock", 1, new List { + "Arg1", "Boolean" + }); + AddBoost("BlockAbilityModifierFromAC", 1, new List { + "Ability", "Ability" + }); + AddBoost("ScaleMultiplier", 0, new List { + "Multiplier", "Float" + }); + AddBoost("CriticalDamageOnHit", 0, new List {}); + AddBoost("DamageReduction", 2, new List { + "DamageType", "AllOrDamageType", + "ReductionType", "DamageReductionType", + "Amount", "Lua" + }); + AddBoost("ReduceCriticalAttackThreshold", 1, new List { + "Threshold", "Int", + "StatusId", "StatusIdOrGroup" + }); + AddBoost("PhysicalForceRangeBonus", 1, new List { + "Arg1", "String" + }); + AddBoost("ObjectSize", 1, new List { + "Size", "Int" + }); + AddBoost("ObjectSizeOverride", 1, new List { + "Size", "String" + }); + AddBoost("ItemReturnToOwner", 0, new List {}); + AddBoost("AiArchetypeOverride", 1, new List { + "Archetype", "String", + "Arg2", "Int" + }); + AddBoost("ExpertiseBonus", 1, new List { + "Skill", "SkillType" + }); + AddBoost("EntityThrowDamage", 1, new List { + "Die", "String", + "DamageType", "Damage Type" + }); + AddBoost("WeaponDamageTypeOverride", 1, new List { + "DamageType", "Damage Type" + }); + AddBoost("MaximizeHealing", 1, new List { + "Direction", "HealingDirection", + "Type", "ResurrectType" + }); + AddBoost("IgnoreEnterAttackRange", 0, new List {}); + AddBoost("DamageBonus", 1, new List { + "Amount", "Lua", + "DamageType", "Damage Type", + "Arg3", "Boolean" + }); + AddBoost("Detach", 0, new List {}); + AddBoost("ConsumeItemBlock", 0, new List {}); + AddBoost("AdvanceSpells", 1, new List { + "SpellId", "SpellId", + "Arg2", "Int" + }); + AddBoost("SpellResistance", 1, new List { + "Resistance", "ResistanceBoostFlags" + }); + AddBoost("WeaponAttackRollBonus", 1, new List { + "Amount", "Lua" + }); + AddBoost("SpellSaveDC", 1, new List { + "DC", "Int" + }); + AddBoost("RedirectDamage", 1, new List { + "Arg1", "Float", + "DamageType", "Damage Type", + "DamageType2", "Damage Type", + "Arg4", "Boolean" + }); + AddBoost("CanSeeThrough", 1, new List { + "CanSeeThrough", "Boolean" + }); + AddBoost("CanShootThrough", 1, new List { + "CanShootThrough", "Boolean" + }); + AddBoost("CanWalkThrough", 1, new List { + "CanWalkThrough", "Boolean" + }); + AddBoost("MonkWeaponAttackOverride", 0, new List {}); + AddBoost("MonkWeaponDamageDiceOverride", 1, new List { + "Arg1", "Lua" + }); + AddBoost("IntrinsicSummonerProficiency", 0, new List {}); + AddBoost("HorizontalFOVOverride", 1, new List { + "FOV", "Float" + }); + AddBoost("CharacterUnarmedDamage", 1, new List { + "Damage", "Lua", + "DamageType", "Damage Type" + }); + AddBoost("UnarmedMagicalProperty", 0, new List {}); + AddBoost("ActionResourceReplenishTypeOverride", 2, new List { + "ActionResource", "String", // Action resource name + "ReplenishType", "ResourceReplenishType" + }); + AddBoost("AreaDamageEvade", 0, new List {}); + AddBoost("ActionResourcePreventReduction", 1, new List { + "ActionResource", "String", // Action resource name + "Level", "Int" + }); + AddBoost("AttackSpellOverride", 1, new List { + "AttackSpell", "SpellId", + "OriginalSpell", "SpellId" + }); + AddBoost("Lock", 0, new List { + "DC", "Guid" + }); + AddBoost("NoAOEDamageOnLand", 0, new List {}); + AddBoost("IgnorePointBlankDisadvantage", 1, new List { + "Flags", "WeaponFlags" + }); + AddBoost("CriticalHitExtraDice", 1, new List { + "ExtraDice", "Int", + "AttackType", "AttackType" + }); + AddBoost("DodgeAttackRoll", 2, new List { + "Arg1", "Int", + "Arg2", "Int", + "Status", "StatusIdOrGroup" + }); + AddBoost("GameplayObscurity", 1, new List { + "Obscurity", "Float" + }); + AddBoost("MaximumRollResult", 2, new List { + "RollType", "StatsRollType", + "MinResult", "Int" + }); + AddBoost("UnlockInterrupt", 1, new List { + "Interrupt", "Interrupt" + }); + AddBoost("IntrinsicSourceProficiency", 0, new List {}); + AddBoost("JumpMaxDistanceBonus", 1, new List { + "Bonus", "Float" + }); + AddBoost("ArmorAbilityModifierCapOverride", 2, new List { + "ArmorType", "ArmorType", + "Cap", "Int" + }); + AddBoost("IgnoreResistance", 2, new List { + "DamageType", "Damage Type", + "Flags", "ResistanceBoostFlags" + }); + AddBoost("ConcentrationIgnoreDamage", 1, new List { + "SpellSchool", "SpellSchool" + }); + AddBoost("LeaveTriggers", 0, new List {}); + AddBoost("IgnoreLowGroundPenalty", 1, new List { + "RollType", "StatsRollType" + }); + AddBoost("IgnoreSurfaceCover", 1, new List { + "SurfaceType", "String" // Surface type + }); + AddBoost("EnableBasicItemInteractions", 0, new List {}); + AddBoost("SoundsBlocked", 0, new List {}); + } + + public void LoadEnumerations(Stream stream) + { + StatEnumeration curEnum = null; + + string line; + + using (var reader = new StreamReader(stream)) + while ((line = reader.ReadLine()) != null) + { + var trimmed = line.Trim(); + if (trimmed.Length > 0) + { + if (trimmed.StartsWith("valuelist ")) + { + var name = trimmed.Substring(11, trimmed.Length - 12); + curEnum = new StatEnumeration(name); + Enumerations.Add(curEnum.Name, curEnum); + } + else if (trimmed.StartsWith("value ")) + { + var label = trimmed.Substring(7, trimmed.Length - 8); + curEnum.AddItem(label); + } + } } } } diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index 7976ff3d..2084fff9 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -2,16 +2,18 @@ using LSLib.LS.Story.GoalParser; using System; using System.Collections.Generic; +using System.Data; using System.IO; using System.Linq; +using System.Xml; namespace LSLib.LS.Stats { - public class StatEntity + public class StatEntry { public string Name; - public StatSubtypeDefinition Type; - public StatEntity BaseClass; + public StatEntryType Type; + public StatEntry BasedOn; public CodeLocation Location; public Dictionary Properties = new Dictionary(); public Dictionary PropertyLocations = new Dictionary(); @@ -72,6 +74,7 @@ public class StatLoadingContext public List Errors = new List(); public Dictionary> DeclarationsByType = new Dictionary>(); public Dictionary> ResolvedDeclarationsByType = new Dictionary>(); + public Dictionary> GuidResources = new Dictionary>(); public void LogError(string code, string message, string path = null, int line = 0, string statObjectName = null) { @@ -86,7 +89,7 @@ public void LogError(string code, string message, string path = null, int line = } } - class StatBaseClassResolver + class StatEntryReferenceResolver { private readonly StatLoadingContext Context; public bool AllowMappingErrors = false; @@ -97,37 +100,37 @@ private class BaseClassMapping public StatDeclaration BaseClass; } - public StatBaseClassResolver(StatLoadingContext context) + public StatEntryReferenceResolver(StatLoadingContext context) { Context = context; } - public bool ResolveBaseClass( - StatTypeDefinition definition, StatDeclaration declaration, + public bool ResolveUsageRef( + StatEntryType type,StatDeclaration declaration, Dictionary declarations, - out StatDeclaration baseClassDeclaration) + out StatDeclaration basedOn) { var props = declaration.Properties; - var name = (string)props[definition.NameProperty]; - if (definition.BaseClassProperty != null && props.ContainsKey(definition.BaseClassProperty)) + var name = (string)props[type.NameProperty]; + if (type.BasedOnProperty != null && props.ContainsKey(type.BasedOnProperty)) { - var baseClass = (string)props[definition.BaseClassProperty]; + var baseClass = (string)props[type.BasedOnProperty]; if (declarations.TryGetValue(baseClass, out StatDeclaration baseDeclaration)) { - baseClassDeclaration = baseDeclaration; + basedOn = baseDeclaration; return true; } else { - Context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stat declaration '{name}' references nonexistent base class '{baseClass}'", + Context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stats entry '{name}' references nonexistent base '{baseClass}'", declaration.Location.FileName, declaration.Location.StartLine, name); - baseClassDeclaration = null; + basedOn = null; return false; } } - baseClassDeclaration = null; + basedOn = null; return true; } @@ -154,14 +157,16 @@ private void PropagateInheritedProperties(List mappings) } } - public Dictionary ResolveBaseClasses(StatTypeDefinition definition, Dictionary declarations) + public Dictionary ResolveUsageRefs(StatEntryType type, Dictionary declarations) { var mappings = new List(); var resolved = new Dictionary(); foreach (var declaration in declarations) { - var succeeded = ResolveBaseClass(definition, declaration.Value, declarations, out StatDeclaration baseClass); + if (declaration.Value.WasInstantiated) continue; + + var succeeded = ResolveUsageRef(type, declaration.Value, declarations, out StatDeclaration baseClass); if (succeeded && baseClass != null) { mappings.Add(new BaseClassMapping @@ -192,31 +197,21 @@ public StatLoaderReferenceValidator(StatLoadingContext ctx) Context = ctx; } - public bool IsValidReference(string reference, string statType, string statSubtype) + public bool IsValidReference(string reference, string statType) { if (Context.DeclarationsByType.TryGetValue(statType, out var stats)) { - if (stats.TryGetValue(reference, out var stat)) - { - if (statSubtype == null) - { - return true; - } - else - { - var subtypeProperty = Context.Definitions.Definitions[statType].SubtypeProperty; - if (subtypeProperty == null) - { - throw new Exception($"Reference constraint found for stat type '{statType}' that has no subtype."); - } + return stats.TryGetValue(reference, out var stat); + } - var subtype = (string)stat.Properties[subtypeProperty]; - if (statSubtype == subtype) - { - return true; - } - } - } + return false; + } + + public bool IsValidGuidResource(string name, string resourceType) + { + if (Context.GuidResources.TryGetValue(resourceType, out var resources)) + { + return resources.TryGetValue(name, out var resource); } return false; @@ -263,24 +258,16 @@ private void AddDeclarations(string path, List declarations) } var statType = declaration.Properties["EntityType"].ToString(); - if (statType == "CraftingStations") - { - statType = "CraftingStationsItemComboPreviewData"; - } - if (statType == "ObjectCategories") - { - statType = "ObjectCategoriesItemComboPreviewData"; - } - if (!Context.Definitions.Definitions.TryGetValue(statType, out StatTypeDefinition definition)) + if (!Context.Definitions.Types.TryGetValue(statType, out StatEntryType type)) { Context.LogError(DiagnosticCode.StatEntityTypeUnknown, $"No definition exists for stat type '{statType}'", declaration.Location.FileName, declaration.Location.StartLine); continue; } - if (!declaration.Properties.ContainsKey(definition.NameProperty)) + if (!declaration.Properties.ContainsKey(type.NameProperty)) { - Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{definition.NameProperty}' property", declaration.Location.FileName, declaration.Location.StartLine); + Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{type.NameProperty}' property", declaration.Location.FileName, declaration.Location.StartLine); continue; } @@ -292,7 +279,7 @@ private void AddDeclarations(string path, List declarations) } // TODO - duplicate declaration check? - var name = declaration.Properties[definition.NameProperty].ToString(); + var name = declaration.Properties[type.NameProperty].ToString(); declarationsByType[name] = declaration; } } @@ -306,51 +293,22 @@ public void LoadStatsFromStream(string path, Stream stream) } } - public void ResolveBaseClasses() + public void ResolveUsageRef() { + var resolver = new StatEntryReferenceResolver(Context); foreach (var type in Context.DeclarationsByType) { - var resolver = new StatBaseClassResolver(Context); - var definition = Context.Definitions.Definitions[type.Key]; - Context.ResolvedDeclarationsByType[type.Key] = resolver.ResolveBaseClasses(definition, type.Value); - } - } - - private StatSubtypeDefinition FindSubtype(StatTypeDefinition type, string declarationName, StatDeclaration declaration) - { - if (type.SubtypeProperty == null) - { - return type.Subtypes.Values.First(); - } - - if (declaration.Properties.TryGetValue(type.SubtypeProperty, out object subtypeName)) - { - var name = (string)subtypeName; - if (type.Subtypes.TryGetValue(name, out StatSubtypeDefinition subtype)) - { - return subtype; - } - else - { - Context.LogError(DiagnosticCode.StatSubtypeMissing, $"Stat declaration '{declarationName}' references unknown subtype '{name}'", - declaration.Location.FileName, declaration.Location.StartLine); - return null; - } - } - else - { - Context.LogError(DiagnosticCode.StatSubtypeMissing, $"Stat declaration '{declarationName}' is missing subtype property '{type.SubtypeProperty}'", - declaration.Location.FileName, declaration.Location.StartLine); - return null; + var typeDefn = Context.Definitions.Types[type.Key]; + Context.ResolvedDeclarationsByType[type.Key] = resolver.ResolveUsageRefs(typeDefn, type.Value); } } - private object ParseProperty(StatSubtypeDefinition subtype, string propertyName, object value, CodeLocation location, + private object ParseProperty(StatEntryType type, string propertyName, object value, CodeLocation location, string declarationName) { - if (!subtype.Fields.TryGetValue(propertyName, out StatField field)) + if (!type.Fields.TryGetValue(propertyName, out StatField field)) { - Context.LogError(DiagnosticCode.StatPropertyUnsupported, $"Property '{propertyName}' is not supported on {subtype.Name} '{declarationName}'", + Context.LogError(DiagnosticCode.StatPropertyUnsupported, $"Property '{propertyName}' is not supported on {type.Name} '{declarationName}'", location?.FileName, location?.StartLine ?? 0, declarationName); return null; } @@ -359,7 +317,13 @@ private object ParseProperty(StatSubtypeDefinition subtype, string propertyName, string errorText = null; object parsed; - if (field.Type != "Passthrough") + if (value is String && propertyName.Length + ((string)value).Length > 4085) + { + parsed = null; + Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: Line cannot be longer than 4095 characters", + location?.FileName, location?.StartLine ?? 0, declarationName); + } + else if (field.Type != "Passthrough") { var parser = field.GetParser(ParserFactory, Context.Definitions); parsed = parser.Parse((string)value, ref succeeded, ref errorText); @@ -372,8 +336,16 @@ private object ParseProperty(StatSubtypeDefinition subtype, string propertyName, if (errorText != null) { - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{subtype.Name} '{declarationName}' has invalid {propertyName}: '{value}' ({errorText})", - location?.FileName, location?.StartLine ?? 0, declarationName); + if (value is string && ((string)value).Length > 500) + { + Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: {errorText}", + location?.FileName, location?.StartLine ?? 0, declarationName); + } + else + { + Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: '{value}' ({errorText})", + location?.FileName, location?.StartLine ?? 0, declarationName); + } } if (succeeded) @@ -386,20 +358,20 @@ private object ParseProperty(StatSubtypeDefinition subtype, string propertyName, } } - private StatEntity InstantiateEntity(StatSubtypeDefinition subtype, string declarationName, StatDeclaration declaration) + private StatEntry InstantiateEntry(StatEntryType type, string declarationName, StatDeclaration declaration) { - return InstantiateEntityInternal(subtype, declarationName, declaration.Location, + return InstantiateEntryInternal(type, declarationName, declaration.Location, declaration.Properties, declaration.PropertyLocations); } - private StatEntity InstantiateEntityInternal(StatSubtypeDefinition subtype, string declarationName, + private StatEntry InstantiateEntryInternal(StatEntryType type, string declarationName, CodeLocation location, Dictionary properties, Dictionary propertyLocations) { - var entity = new StatEntity + var entity = new StatEntry { Name = declarationName, - Type = subtype, - BaseClass = null, // FIXME + Type = type, + BasedOn = null, // FIXME Location = location, Properties = new Dictionary(), PropertyLocations = propertyLocations @@ -413,7 +385,7 @@ private StatEntity InstantiateEntityInternal(StatSubtypeDefinition subtype, stri } propertyLocations.TryGetValue(property.Key, out CodeLocation propLocation); - var parsed = ParseProperty(subtype, property.Key, property.Value, propLocation, declarationName); + var parsed = ParseProperty(type, property.Key, property.Value, propLocation, declarationName); if (parsed != null) { entity.Properties.Add(property.Key, parsed); @@ -423,21 +395,77 @@ private StatEntity InstantiateEntityInternal(StatSubtypeDefinition subtype, stri return entity; } - public void InstantiateEntities() + public void InstantiateEntries() { foreach (var type in Context.ResolvedDeclarationsByType) { - var definition = Context.Definitions.Definitions[type.Key]; + var typeDefn = Context.Definitions.Types[type.Key]; foreach (var declaration in type.Value) { - var subtype = FindSubtype(definition, declaration.Key, declaration.Value); - if (subtype != null) + if (!declaration.Value.WasInstantiated) + { + InstantiateEntry(typeDefn, declaration.Key, declaration.Value); + declaration.Value.WasInstantiated = true; + } + } + } + } + + private void LoadGuidResources(Dictionary guidResources, XmlNodeList nodes) + { + foreach (var node in nodes) + { + var attributes = (node as XmlElement).GetElementsByTagName("attribute"); + foreach (var attribute in attributes) + { + var attr = attribute as XmlElement; + if (attr.GetAttribute("id") == "Name") + { + var name = attr.GetAttribute("value"); + guidResources[name] = name; + break; + } + } + } + } + + public void LoadGuidResources(XmlDocument doc, string typeName, string regionName) + { + Dictionary guidResources; + if (!Context.GuidResources.TryGetValue(typeName, out guidResources)) + { + guidResources = new Dictionary(); + Context.GuidResources[typeName] = guidResources; + } + + var regions = doc.DocumentElement.GetElementsByTagName("region"); + foreach (var region in regions) + { + if ((region as XmlElement).GetAttribute("id") == regionName) + { + var root = (region as XmlElement).GetElementsByTagName("node"); + if (root.Count > 0) { - InstantiateEntity(subtype, declaration.Key, declaration.Value); + var children = (root[0] as XmlElement).GetElementsByTagName("children"); + if (children.Count > 0) + { + var resources = (children[0] as XmlElement).GetElementsByTagName("node"); + LoadGuidResources(guidResources, resources); + } } } } } + + public void LoadActionResources(XmlDocument doc) + { + LoadGuidResources(doc, "ActionResource", "ActionResourceDefinitions"); + } + + public void LoadActionResourceGroups(XmlDocument doc) + { + LoadGuidResources(doc, "ActionResourceGroup", "ActionResourceGroupDefinitions"); + } } } diff --git a/LSLib/LS/Stats/StatPropertyParsers.cs b/LSLib/LS/Stats/StatPropertyParsers.cs deleted file mode 100644 index 03aa400a..00000000 --- a/LSLib/LS/Stats/StatPropertyParsers.cs +++ /dev/null @@ -1,357 +0,0 @@ -using LSLib.LS.Stats.Properties; -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Linq; -using System.Text; - -namespace LSLib.LS.Stats -{ - public interface IStatValueParser - { - object Parse(string value, ref bool succeeded, ref string errorText); - } - - public class StatReferenceConstraint - { - public string StatType; - public string StatSubtype; - } - - public interface IStatReferenceValidator - { - bool IsValidReference(string reference, string statType, string statSubtype); - } - - public class BooleanParser : IStatValueParser - { - public object Parse(string value, ref bool succeeded, ref string errorText) - { - if (value == "Yes" || value == "No") - { - succeeded = true; - return (value == "Yes"); - } - else - { - succeeded = false; - errorText = "expected boolean value 'Yes' or 'No'"; - return null; - } - } - } - - public class Int32Parser : IStatValueParser - { - public object Parse(string value, ref bool succeeded, ref string errorText) - { - if (Int32.TryParse(value, out int intval)) - { - succeeded = true; - return intval; - } - else - { - succeeded = false; - errorText = "expected an integer value"; - return null; - } - } - } - - public class FloatParser : IStatValueParser - { - public object Parse(string value, ref bool succeeded, ref string errorText) - { - if (Single.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) - { - succeeded = true; - return floatval; - } - else - { - succeeded = false; - errorText = "expected a float value"; - return null; - } - } - } - - public class EnumParser : IStatValueParser - { - private readonly StatEnumeration Enumeration; - - public EnumParser(StatEnumeration enumeration) - { - Enumeration = enumeration ?? throw new ArgumentNullException(); - } - - public object Parse(string value, ref bool succeeded, ref string errorText) - { - if (value == null || value == "") - { - value = "None"; - } - - if (Enumeration.ValueToIndexMap.ContainsKey(value)) - { - succeeded = true; - return value; - } - else - { - succeeded = false; - errorText = "expected one of: " + String.Join(", ", Enumeration.Values.Take(4)) + ", ..."; - return null; - } - } - } - - public class MultiValueEnumParser : IStatValueParser - { - private readonly EnumParser Parser; - - public MultiValueEnumParser(StatEnumeration enumeration) - { - Parser = new EnumParser(enumeration); - } - - public object Parse(string value, ref bool succeeded, ref string errorText) - { - succeeded = true; - - foreach (var item in value.Split(new char[] { ';' })) - { - Parser.Parse(item.Trim(new char[] { ' ' }), ref succeeded, ref errorText); - if (!succeeded) - { - errorText = $"Value '{item}' not supported; {errorText}"; - return null; - } - } - - return value; - } - } - - public class StringParser : IStatValueParser - { - public object Parse(string value, ref bool succeeded, ref string errorText) - { - succeeded = true; - return value; - } - } - - public class UUIDParser : IStatValueParser - { - public object Parse(string value, ref bool succeeded, ref string errorText) - { - if (Guid.TryParseExact(value, "D", out Guid parsed)) - { - succeeded = true; - return parsed; - } - else - { - errorText = $"'{value}' is not a valid UUID"; - succeeded = false; - return null; - } - } - } - - public class StatReferenceParser : IStatValueParser - { - private IStatReferenceValidator Validator; - private List Constraints; - - public StatReferenceParser(IStatReferenceValidator validator, List constraints) - { - Validator = validator; - Constraints = constraints; - } - - public object Parse(string value, ref bool succeeded, ref string errorText) - { - foreach (var constraint in Constraints) - { - if (Validator.IsValidReference(value, constraint.StatType, constraint.StatSubtype)) - { - succeeded = true; - return value; - } - } - - var refTypes = String.Join("/", Constraints.Select(c => c.StatType)); - errorText = $"'{value}' is not a valid {refTypes} reference"; - succeeded = false; - return null; - } - } - - public class MultiValueStatReferenceParser : IStatValueParser - { - private readonly StatReferenceParser Parser; - - public MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) - { - Parser = new StatReferenceParser(validator, constraints); - } - - public object Parse(string value, ref bool succeeded, ref string errorText) - { - succeeded = true; - - foreach (var item in value.Split(new char[] { ';' })) - { - var trimmed = item.Trim(new char[] { ' ' }); - if (trimmed.Length > 0) - { - Parser.Parse(trimmed, ref succeeded, ref errorText); - if (!succeeded) - { - return null; - } - } - } - - return value; - } - } - - public class ExpressionParser : IStatValueParser - { - private readonly String ExpressionType; - private readonly StatDefinitionRepository Definitions; - private readonly StatValueParserFactory ParserFactory; - - public ExpressionParser(String expressionType, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory) - { - ExpressionType = expressionType; - Definitions = definitions; - ParserFactory = parserFactory; - } - - public virtual object Parse(string value, ref bool succeeded, ref string errorText) - { - var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + ExpressionType + "__ " + value); - using (var buf = new MemoryStream(valueBytes)) - { - List errorTexts = new List(); - - var scanner = new StatPropertyScanner(); - scanner.SetSource(buf); - var parser = new StatPropertyParser(scanner, Definitions, ParserFactory); - parser.OnError += (string message) => errorTexts.Add(message); - succeeded = parser.Parse(); - if (!succeeded) - { - var location = scanner.LastLocation(); - var column = location.StartColumn - 10 - ExpressionType.Length + 1; - errorText = $"Syntax error at or near character {column}"; - return null; - } - else if (errorTexts.Count > 0) - { - succeeded = false; - errorText = String.Join("; ", errorTexts); - return null; - } - else - { - succeeded = true; - return parser.GetParsedObject(); - } - } - } - } - - public class ConditionsParser : IStatValueParser - { - private readonly ExpressionParser ExprParser; - - public ConditionsParser(StatDefinitionRepository definitions, StatValueParserFactory parserFactory) - { - ExprParser = new ExpressionParser("Conditions", definitions, parserFactory); - } - - public object Parse(string value, ref bool succeeded, ref string errorText) - { - value = value - .Replace(" ", "") - .Replace(";", "&") - .Trim(new char[] { '&' }); - - return ExprParser.Parse(value, ref succeeded, ref errorText); - } - } - - public class StatValueParserFactory - { - private readonly IStatReferenceValidator ReferenceValidator; - - public StatValueParserFactory(IStatReferenceValidator referenceValidator) - { - ReferenceValidator = referenceValidator; - } - - public IStatValueParser CreateReferenceParser(List constraints) - { - return new StatReferenceParser(ReferenceValidator, constraints); - } - - public IStatValueParser CreateParser(StatField field, StatDefinitionRepository definitions) - { - switch (field.Type) - { - case "Requirements": - return new ExpressionParser("Requirements", definitions, this); - - case "Properties": - return new ExpressionParser("Properties", definitions, this); - - case "Conditions": - return new ConditionsParser(definitions, this); - - case "Enumeration": - return new EnumParser(field.EnumType); - - case "EnumerationList": - return new MultiValueEnumParser(field.EnumType); - - case "Boolean": - return new BooleanParser(); - - case "Integer": - return new Int32Parser(); - - case "Float": - return new FloatParser(); - - case "UUID": - case "RootTemplate": - return new UUIDParser(); - - case "StatReference": - return new StatReferenceParser(ReferenceValidator, field.ReferenceTypes); - - case "StatReferences": - return new MultiValueStatReferenceParser(ReferenceValidator, field.ReferenceTypes); - - case "BaseClass": - case "Name": - case "String": - case "TranslatedString": - case "Comment": - case "Color": - return new StringParser(); - - default: - throw new ArgumentException($"Could not create parser for type '{field.Type}'"); - } - } - } -} \ No newline at end of file diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs new file mode 100644 index 00000000..e36503e1 --- /dev/null +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -0,0 +1,752 @@ +using LSLib.LS.Stats.Properties; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text; + +namespace LSLib.LS.Stats +{ + public interface IStatValueParser + { + object Parse(string value, ref bool succeeded, ref string errorText); + } + + public class StatReferenceConstraint + { + public string StatType; + } + + public interface IStatReferenceValidator + { + bool IsValidReference(string reference, string statType); + bool IsValidGuidResource(string name, string resourceType); + } + + public class BooleanParser : IStatValueParser + { + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value == "true" || value == "false" || value == "") + { + succeeded = true; + return (value == "true"); + } + else + { + succeeded = false; + errorText = "expected boolean value 'true' or 'false'"; + return null; + } + } + } + + public class Int32Parser : IStatValueParser + { + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value == "") + { + succeeded = true; + return 0; + } + else if (Int32.TryParse(value, out int intval)) + { + succeeded = true; + return intval; + } + else + { + succeeded = false; + errorText = "expected an integer value"; + return null; + } + } + } + + public class FloatParser : IStatValueParser + { + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value == "") + { + succeeded = true; + return 0.0f; + } + else if (Single.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) + { + succeeded = true; + return floatval; + } + else + { + succeeded = false; + errorText = "expected a float value"; + return null; + } + } + } + + public class EnumParser : IStatValueParser + { + private readonly StatEnumeration Enumeration; + + public EnumParser(StatEnumeration enumeration) + { + Enumeration = enumeration ?? throw new ArgumentNullException(); + } + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value == null || value == "") + { + value = Enumeration.Values[0]; + } + + if (Enumeration.ValueToIndexMap.ContainsKey(value)) + { + succeeded = true; + return value; + } + else + { + succeeded = false; + if (Enumeration.Values.Count > 4) + { + errorText = "expected one of: " + String.Join(", ", Enumeration.Values.Take(4)) + ", ..."; + } + else + { + errorText = "expected one of: " + String.Join(", ", Enumeration.Values); + } + return null; + } + } + } + + public class MultiValueEnumParser : IStatValueParser + { + private readonly EnumParser Parser; + + public MultiValueEnumParser(StatEnumeration enumeration) + { + Parser = new EnumParser(enumeration); + } + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + succeeded = true; + + if (value.Length == 0) + { + return true; + } + + foreach (var item in value.Split(new char[] { ';' })) + { + Parser.Parse(item.Trim(new char[] { ' ' }), ref succeeded, ref errorText); + if (!succeeded) + { + errorText = $"Value '{item}' not supported; {errorText}"; + return null; + } + } + + return value; + } + } + + public class StringParser : IStatValueParser + { + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value.Length > 2048) + { + errorText = "Value cannot be longer than 2048 characters"; + succeeded = false; + return null; + } + else + { + errorText = null; + succeeded = true; + return value; + } + } + } + + public class UUIDParser : IStatValueParser + { + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value == "") + { + succeeded = true; + return Guid.Empty; + } + else if (Guid.TryParseExact(value, "D", out Guid parsed)) + { + succeeded = true; + return parsed; + } + else + { + errorText = $"'{value}' is not a valid UUID"; + succeeded = false; + return null; + } + } + } + + public class StatReferenceParser : IStatValueParser + { + private IStatReferenceValidator Validator; + private List Constraints; + + public StatReferenceParser(IStatReferenceValidator validator, List constraints) + { + Validator = validator; + Constraints = constraints; + } + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value == "") + { + succeeded = true; + return value; + } + + foreach (var constraint in Constraints) + { + if (Validator.IsValidReference(value, constraint.StatType)) + { + succeeded = true; + return value; + } + } + + var refTypes = String.Join("/", Constraints.Select(c => c.StatType)); + errorText = $"'{value}' is not a valid {refTypes} reference"; + succeeded = false; + return null; + } + } + + public class MultiValueStatReferenceParser : IStatValueParser + { + private readonly StatReferenceParser Parser; + + public MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) + { + Parser = new StatReferenceParser(validator, constraints); + } + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + succeeded = true; + + foreach (var item in value.Split(new char[] { ';' })) + { + var trimmed = item.Trim(new char[] { ' ' }); + if (trimmed.Length > 0) + { + Parser.Parse(trimmed, ref succeeded, ref errorText); + if (!succeeded) + { + return null; + } + } + } + + return value; + } + } + + public enum ExpressionType + { + Boost, + Functor, + DescriptionParams + }; + + public class ExpressionParser : IStatValueParser + { + private readonly String ValidatorType; + private readonly StatDefinitionRepository Definitions; + private readonly StatValueParserFactory ParserFactory; + private readonly ExpressionType ExprType; + + public ExpressionParser(String validatorType, StatDefinitionRepository definitions, + StatValueParserFactory parserFactory, ExpressionType type) + { + ValidatorType = validatorType; + Definitions = definitions; + ParserFactory = parserFactory; + ExprType = type; + } + + public virtual object Parse(string value, ref bool succeeded, ref string errorText) + { + var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + ValidatorType + "__ " + value.TrimEnd()); + using (var buf = new MemoryStream(valueBytes)) + { + List errorTexts = new List(); + + var scanner = new StatPropertyScanner(); + scanner.SetSource(buf); + var parser = new StatPropertyParser(scanner, Definitions, ParserFactory, valueBytes, ExprType); + parser.OnError += (string message) => errorTexts.Add(message); + succeeded = parser.Parse(); + if (!succeeded) + { + var location = scanner.LastLocation(); + var column = location.StartColumn - 10 - ValidatorType.Length + 1; + errorText = $"Syntax error at or near character {column}"; + return null; + } + else if (errorTexts.Count > 0) + { + succeeded = false; + errorText = String.Join("; ", errorTexts); + return null; + } + else + { + succeeded = true; + return parser.GetParsedObject(); + } + } + } + } + + public class LuaExpressionParser : IStatValueParser + { + public virtual object Parse(string value, ref bool succeeded, ref string errorText) + { + value = "BHAALS_BOON_SLAYER.Duration-1"; + var valueBytes = Encoding.UTF8.GetBytes(value); + using (var buf = new MemoryStream(valueBytes)) + { + var scanner = new Lua.StatLuaScanner(); + scanner.SetSource(buf); + var parser = new Lua.StatLuaParser(scanner); + succeeded = parser.Parse(); + if (!succeeded) + { + var location = scanner.LastLocation(); + errorText = $"Syntax error at or near character {location.StartColumn}"; + return null; + } + else + { + succeeded = true; + return null; + } + } + } + } + + public class UseCostsParser : IStatValueParser + { + private readonly IStatReferenceValidator Validator; + + public UseCostsParser(IStatReferenceValidator validator) + { + Validator = validator; + } + + public virtual object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value.Length == 0) return value; + + foreach (var resource in value.Split(';')) + { + var res = resource.Trim(); + if (res.Length == 0) continue; + + var parts = res.Split(':'); + if (parts.Length < 2 || parts.Length > 4) + { + errorText = $"Malformed use costs"; + return null; + } + + if (!Validator.IsValidGuidResource(parts[0], "ActionResource") && !Validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) + { + errorText = $"Nonexistent action resource or action resource group: {parts[0]}"; + return null; + } + + var distanceExpr = parts[1].Split('*'); + if (distanceExpr[0] == "Distance") + { + if (distanceExpr.Length > 1 && !Single.TryParse(distanceExpr[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) + { + errorText = $"Malformed distance multiplier: {distanceExpr[1]}"; + return null; + } + + } + else if (!Single.TryParse(parts[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) + { + errorText = $"Malformed resource amount: {parts[1]}"; + return null; + } + + if (parts.Length == 3 && !Int32.TryParse(parts[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out int intval)) + { + errorText = $"Malformed level: {parts[2]}"; + return null; + } + + if (parts.Length == 4 && !Int32.TryParse(parts[3], NumberStyles.Integer, CultureInfo.InvariantCulture, out intval)) + { + errorText = $"Malformed level: {parts[3]}"; + return null; + } + } + + succeeded = true; + return value; + } + } + + public class DiceRollParser : IStatValueParser + { + public virtual object Parse(string value, ref bool succeeded, ref string errorText) + { + if (value.Length == 0) return value; + + var parts = value.Split('d'); + if (parts.Length != 2 + || !Int32.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out int numDice) + || !Int32.TryParse(parts[1], NumberStyles.Integer, CultureInfo.InvariantCulture, out int dieSize)) + { + errorText = $"Malformed dice roll"; + return null; + } + + if (dieSize != 4 && dieSize != 6 && dieSize != 8 && dieSize != 10 && dieSize != 12 && dieSize != 20 && dieSize != 100) + { + errorText = $"Invalid die size: {dieSize}"; + return null; + } + + succeeded = true; + return value; + } + } + + public class AnyParser : IStatValueParser + { + private readonly List Parsers; + private readonly String Message; + + public AnyParser(IEnumerable parsers, string message = null) + { + Parsers = parsers.ToList(); + Message = message; + } + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + List errors = new List(); + foreach (var parser in Parsers) + { + succeeded = false; + string error = null; + var result = parser.Parse(value, ref succeeded, ref error); + if (succeeded) + { + return result; + } + else + { + errors.Add(error); + } + } + + if (Message != null && Message.Length > 0) + { + errorText = $"'{value}': {Message}"; + } + else + { + errorText = String.Join("; ", errors); + } + + return null; + } + } + + public class AnyType + { + public List Types; + public string Message; + } + + public class StatValueParserFactory + { + private readonly IStatReferenceValidator ReferenceValidator; + + public StatValueParserFactory(IStatReferenceValidator referenceValidator) + { + ReferenceValidator = referenceValidator; + } + + public IStatValueParser CreateReferenceParser(List constraints) + { + return new StatReferenceParser(ReferenceValidator, constraints); + } + + public IStatValueParser CreateParser(StatField field, StatDefinitionRepository definitions) + { + switch (field.Name) + { + case "Boosts": + case "DefaultBoosts": + case "BoostsOnEquipMainHand": + case "BoostsOnEquipOffHand": + return new ExpressionParser("Properties", definitions, this, ExpressionType.Boost); + + case "TooltipDamage": + case "TooltipDamageList": + case "TooltipStatusApply": + case "TooltipConditionalDamage": + return new ExpressionParser("Properties", definitions, this, ExpressionType.DescriptionParams); + + case "DescriptionParams": + case "ExtraDescriptionParams": + case "ShortDescriptionParams": + case "TooltipUpcastDescriptionParams": + return new ExpressionParser("DescriptionParams", definitions, this, ExpressionType.DescriptionParams); + + case "ConcentrationSpellID": + case "CombatAIOverrideSpell": + case "SpellContainerID": + case "FollowUpOriginalSpell": + case "RootSpellID": + return new StatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "SpellData" } + }); + + case "ContainerSpells": + return new MultiValueStatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "SpellData" } + }); + + case "InterruptPrototype": + return new StatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "InterruptData" } + }); + + case "Passives": + case "PassivesOnEquip": + case "PassivesMainHand": + case "PassivesOffHand": + return new MultiValueStatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "PassiveData" } + }); + + case "StatusOnEquip": + case "StatusInInventory": + return new MultiValueStatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "StatusData" } + }); + + case "Cost": + case "UseCosts": + case "DualWieldingUseCosts": + case "ActionResources": + case "TooltipUseCosts": + case "RitualCosts": + case "HitCosts": + return new UseCostsParser(ReferenceValidator); + + case "Damage": + case "VersatileDamage": + case "StableRoll": + return new DiceRollParser(); + + case "Template": + case "StatusEffectOverride": + case "StatusEffectOnTurn": + case "ManagedStatusEffectGroup": + case "ApplyEffect": + case "SpellEffect": + case "StatusEffect": + case "DisappearEffect": + case "PreviewEffect": + case "PositionEffect": + case "HitEffect": + case "TargetEffect": + case "BeamEffect": + case "CastEffect": + case "PrepareEffect": + case "TooltipOnSave": + return new UUIDParser(); + + case "AmountOfTargets": + return new LuaExpressionParser(); + } + + return CreateParser(field.Type, field.EnumType, field.ReferenceTypes, definitions); + } + + public IStatValueParser CreateParser(string type, StatEnumeration enumType, List constraints, StatDefinitionRepository definitions) + { + if (enumType == null && definitions.Enumerations.TryGetValue(type, out StatEnumeration enumInfo) && enumInfo.Values.Count > 0) + { + enumType = enumInfo; + } + + if (enumType != null) + { + if (type == "SpellFlagList" + || type == "SpellCategoryFlags" + || type == "CinematicArenaFlags" + || type == "RestErrorFlags" + || type == "AuraFlags" + || type == "StatusEvent" + || type == "AIFlags" + || type == "WeaponFlags" + || type == "ProficiencyGroupFlags" + || type == "InterruptContext" + || type == "InterruptDefaultValue" + || type == "AttributeFlags" + || type == "PassiveFlags" + || type == "ResistanceFlags" + || type == "LineOfSightFlags" + || type == "StatusPropertyFlags" + || type == "StatusGroupFlags" + || type == "StatsFunctorContext") + { + return new MultiValueEnumParser(enumType); + } + else + { + return new EnumParser(enumType); + } + } + + switch (type) + { + case "Boolean": + return new BooleanParser(); + + case "ConstantInt": + case "Int": + return new Int32Parser(); + + case "ConstantFloat": + case "Float": + return new FloatParser(); + + case "String": + case "FixedString": + // FIXME - add TranslatedStringParser "guid;ver" + case "TranslatedString": + return new StringParser(); + + case "Guid": + return new UUIDParser(); + + case "Requirements": + return new ExpressionParser("Requirements", definitions, this, ExpressionType.Functor); + + case "StatsFunctors": + return new ExpressionParser("Properties", definitions, this, ExpressionType.Functor); + + case "Lua": + case "RollConditions": + case "TargetConditions": + case "Conditions": + return new LuaExpressionParser(); + + case "UseCosts": + return new UseCostsParser(ReferenceValidator); + + case "StatReference": + return new StatReferenceParser(ReferenceValidator, constraints); + + case "StatusId": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["EngineStatusType"]), + new StatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "StatusData" } + }) + }, "Expected a status name"); + + case "ResurrectTypes": + return new MultiValueEnumParser(definitions.Enumerations["ResurrectType"]); + + case "StatusIdOrGroup": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["StatusGroupFlags"]), + new EnumParser(definitions.Enumerations["EngineStatusType"]), + new StatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "StatusData" } + }) + }, "Expected a status or StatusGroup name"); + + case "SummonDurationOrInt": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["SummonDuration"]), + new Int32Parser() + }); + + case "AllOrDamageType": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["AllEnum"]), + new EnumParser(definitions.Enumerations["Damage Type"]), + }); + + case "RollAdjustmentTypeOrDamageType": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["RollAdjustmentType"]), + new EnumParser(definitions.Enumerations["Damage Type"]), + }); + + case "AbilityOrAttackRollAbility": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["Ability"]), + new EnumParser(definitions.Enumerations["AttackRollAbility"]), + }); + + case "DamageTypeOrDealDamageWeaponDamageType": + return new AnyParser(new List { + new EnumParser(definitions.Enumerations["Damage Type"]), + new EnumParser(definitions.Enumerations["DealDamageWeaponDamageType"]), + }); + + case "SpellId": + return new StatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "SpellData" } + }); + + case "Interrupt": + return new StatReferenceParser(ReferenceValidator, new List + { + new StatReferenceConstraint{ StatType = "InterruptData" } + }); + + // THESE NEED TO BE FIXED! + case "StatusIDs": + return new StringParser(); + + default: + throw new ArgumentException($"Could not create parser for type '{type}'"); + } + } + } +} \ No newline at end of file diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index f0233113..2f697402 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -75,6 +75,9 @@ + + + @@ -82,6 +85,8 @@ + + @@ -170,7 +175,7 @@ - + @@ -239,6 +244,9 @@ "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Stats\Parser\StatProperty.lex.cs" "$(ProjectDir)\LS\Stats\Parser\StatProperty.lex" "$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Stats\Parser\StatProperty.yy.cs" "$(ProjectDir)\LS\Stats\Parser\StatProperty.yy" + +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Stats\Parser\StatLua.lex.cs" "$(ProjectDir)\LS\Stats\Parser\StatLua.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Stats\Parser\StatLua.yy.cs" "$(ProjectDir)\LS\Stats\Parser\StatLua.yy" + + + + + \ No newline at end of file diff --git a/ConverterApp/ConverterAppSettings.cs b/ConverterApp/ConverterAppSettings.cs index b5bcae71..3dfb1464 100644 --- a/ConverterApp/ConverterAppSettings.cs +++ b/ConverterApp/ConverterAppSettings.cs @@ -76,7 +76,7 @@ public DebugPaneSettings Debugging set { debugSettings = value; } } - private Game selectedGame = Game.DivinityOriginalSin2DE; + private Game selectedGame = Game.BaldursGate3; public int SelectedGame { diff --git a/ConverterApp/MainForm.cs b/ConverterApp/MainForm.cs index 3117fd03..fd173ebd 100644 --- a/ConverterApp/MainForm.cs +++ b/ConverterApp/MainForm.cs @@ -1,10 +1,10 @@ using System; -using Alphaleonis.Win32.Filesystem; using System.Windows.Forms; using LSLib.LS; using LSLib.LS.Enums; using Newtonsoft.Json; using System.ComponentModel; +using System.IO; namespace ConverterApp { diff --git a/ConverterApp/PackagePane.cs b/ConverterApp/PackagePane.cs index defd2034..0db0ef7f 100644 --- a/ConverterApp/PackagePane.cs +++ b/ConverterApp/PackagePane.cs @@ -1,7 +1,7 @@ using System; using System.Diagnostics; +using System.IO; using System.Windows.Forms; -using Alphaleonis.Win32.Filesystem; using LSLib.LS; using LSLib.LS.Enums; diff --git a/ConverterApp/Program.cs b/ConverterApp/Program.cs index 00334a96..db9175b5 100644 --- a/ConverterApp/Program.cs +++ b/ConverterApp/Program.cs @@ -1,6 +1,8 @@ using System; +using System.Runtime.Versioning; using System.Windows.Forms; +[assembly: SupportedOSPlatform("windows")] namespace ConverterApp { static class Program diff --git a/ConverterApp/Properties/AssemblyInfo.cs b/ConverterApp/Properties/AssemblyInfo.cs deleted file mode 100644 index 7dfb1673..00000000 --- a/ConverterApp/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,35 +0,0 @@ -using System.Reflection; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("LSLib Toolkit - Main Application")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("Copyright © Norbyte 2012-2023")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("565dd5e7-6720-469d-b9f4-a922a7014747")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.15.14.0")] -[assembly: AssemblyFileVersion("1.15.14.0")] diff --git a/ConverterApp/VirtualTexturesPane.Designer.cs b/ConverterApp/VirtualTexturesPane.Designer.cs index 69fa5488..ba424168 100644 --- a/ConverterApp/VirtualTexturesPane.Designer.cs +++ b/ConverterApp/VirtualTexturesPane.Designer.cs @@ -28,166 +28,312 @@ protected override void Dispose(bool disposing) /// private void InitializeComponent() { - this.groupBox1 = new System.Windows.Forms.GroupBox(); - this.extractTileSetBtn = new System.Windows.Forms.Button(); - this.destinationPathBrowseBtn = new System.Windows.Forms.Button(); - this.gtsBrowseBtn = new System.Windows.Forms.Button(); - this.gtsPath = new System.Windows.Forms.TextBox(); - this.label4 = new System.Windows.Forms.Label(); - this.destinationPath = new System.Windows.Forms.TextBox(); - this.label3 = new System.Windows.Forms.Label(); - this.destinationPathDlg = new System.Windows.Forms.FolderBrowserDialog(); - this.gtsFileDlg = new System.Windows.Forms.OpenFileDialog(); - this.actionProgressLabel = new System.Windows.Forms.Label(); - this.actionProgress = new System.Windows.Forms.ProgressBar(); - this.label5 = new System.Windows.Forms.Label(); - this.groupBox1.SuspendLayout(); - this.SuspendLayout(); + groupBox1 = new System.Windows.Forms.GroupBox(); + extractTileSetBtn = new System.Windows.Forms.Button(); + destinationPathBrowseBtn = new System.Windows.Forms.Button(); + gtsBrowseBtn = new System.Windows.Forms.Button(); + gtsPath = new System.Windows.Forms.TextBox(); + label4 = new System.Windows.Forms.Label(); + destinationPath = new System.Windows.Forms.TextBox(); + label3 = new System.Windows.Forms.Label(); + destinationPathDlg = new System.Windows.Forms.FolderBrowserDialog(); + gtsFileDlg = new System.Windows.Forms.OpenFileDialog(); + actionProgressLabel = new System.Windows.Forms.Label(); + actionProgress = new System.Windows.Forms.ProgressBar(); + label5 = new System.Windows.Forms.Label(); + groupBox2 = new System.Windows.Forms.GroupBox(); + modRootPathBrowseBtn = new System.Windows.Forms.Button(); + tileSetBrowseBtn = new System.Windows.Forms.Button(); + tileSetBuildBtn = new System.Windows.Forms.Button(); + button1 = new System.Windows.Forms.Button(); + button2 = new System.Windows.Forms.Button(); + button3 = new System.Windows.Forms.Button(); + tileSetConfigPath = new System.Windows.Forms.TextBox(); + label1 = new System.Windows.Forms.Label(); + modRootPath = new System.Windows.Forms.TextBox(); + label2 = new System.Windows.Forms.Label(); + modRootPathDlg = new System.Windows.Forms.FolderBrowserDialog(); + tileSetConfigDlg = new System.Windows.Forms.OpenFileDialog(); + groupBox1.SuspendLayout(); + groupBox2.SuspendLayout(); + SuspendLayout(); // // groupBox1 // - this.groupBox1.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.groupBox1.Controls.Add(this.extractTileSetBtn); - this.groupBox1.Controls.Add(this.destinationPathBrowseBtn); - this.groupBox1.Controls.Add(this.gtsBrowseBtn); - this.groupBox1.Controls.Add(this.gtsPath); - this.groupBox1.Controls.Add(this.label4); - this.groupBox1.Controls.Add(this.destinationPath); - this.groupBox1.Controls.Add(this.label3); - this.groupBox1.Location = new System.Drawing.Point(9, 16); - this.groupBox1.Margin = new System.Windows.Forms.Padding(4); - this.groupBox1.Name = "groupBox1"; - this.groupBox1.Padding = new System.Windows.Forms.Padding(4); - this.groupBox1.Size = new System.Drawing.Size(1167, 170); - this.groupBox1.TabIndex = 66; - this.groupBox1.TabStop = false; - this.groupBox1.Text = "Extract Virtual Textures"; + groupBox1.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + groupBox1.Controls.Add(extractTileSetBtn); + groupBox1.Controls.Add(destinationPathBrowseBtn); + groupBox1.Controls.Add(gtsBrowseBtn); + groupBox1.Controls.Add(gtsPath); + groupBox1.Controls.Add(label4); + groupBox1.Controls.Add(destinationPath); + groupBox1.Controls.Add(label3); + groupBox1.Location = new System.Drawing.Point(9, 20); + groupBox1.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox1.Name = "groupBox1"; + groupBox1.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox1.Size = new System.Drawing.Size(1167, 212); + groupBox1.TabIndex = 66; + groupBox1.TabStop = false; + groupBox1.Text = "Extract Virtual Textures"; // // extractTileSetBtn // - this.extractTileSetBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right))); - this.extractTileSetBtn.Location = new System.Drawing.Point(945, 134); - this.extractTileSetBtn.Margin = new System.Windows.Forms.Padding(4); - this.extractTileSetBtn.Name = "extractTileSetBtn"; - this.extractTileSetBtn.Size = new System.Drawing.Size(213, 28); - this.extractTileSetBtn.TabIndex = 62; - this.extractTileSetBtn.Text = "Extract Textures"; - this.extractTileSetBtn.UseVisualStyleBackColor = true; - this.extractTileSetBtn.Click += new System.EventHandler(this.extractTileSetBtn_Click); + extractTileSetBtn.Anchor = System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right; + extractTileSetBtn.Location = new System.Drawing.Point(945, 168); + extractTileSetBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + extractTileSetBtn.Name = "extractTileSetBtn"; + extractTileSetBtn.Size = new System.Drawing.Size(213, 35); + extractTileSetBtn.TabIndex = 62; + extractTileSetBtn.Text = "Extract Textures"; + extractTileSetBtn.UseVisualStyleBackColor = true; + extractTileSetBtn.Click += extractTileSetBtn_Click; // // destinationPathBrowseBtn // - this.destinationPathBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.destinationPathBrowseBtn.Location = new System.Drawing.Point(1105, 94); - this.destinationPathBrowseBtn.Margin = new System.Windows.Forms.Padding(4); - this.destinationPathBrowseBtn.Name = "destinationPathBrowseBtn"; - this.destinationPathBrowseBtn.Size = new System.Drawing.Size(55, 28); - this.destinationPathBrowseBtn.TabIndex = 61; - this.destinationPathBrowseBtn.Text = "..."; - this.destinationPathBrowseBtn.UseVisualStyleBackColor = true; - this.destinationPathBrowseBtn.Click += new System.EventHandler(this.destinationPathBrowseBtn_Click); + destinationPathBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + destinationPathBrowseBtn.Location = new System.Drawing.Point(1105, 118); + destinationPathBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + destinationPathBrowseBtn.Name = "destinationPathBrowseBtn"; + destinationPathBrowseBtn.Size = new System.Drawing.Size(55, 35); + destinationPathBrowseBtn.TabIndex = 61; + destinationPathBrowseBtn.Text = "..."; + destinationPathBrowseBtn.UseVisualStyleBackColor = true; + destinationPathBrowseBtn.Click += destinationPathBrowseBtn_Click; // // gtsBrowseBtn // - this.gtsBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.gtsBrowseBtn.Location = new System.Drawing.Point(1105, 37); - this.gtsBrowseBtn.Margin = new System.Windows.Forms.Padding(4); - this.gtsBrowseBtn.Name = "gtsBrowseBtn"; - this.gtsBrowseBtn.Size = new System.Drawing.Size(55, 28); - this.gtsBrowseBtn.TabIndex = 58; - this.gtsBrowseBtn.Text = "..."; - this.gtsBrowseBtn.UseVisualStyleBackColor = true; - this.gtsBrowseBtn.Click += new System.EventHandler(this.gtpBrowseBtn_Click); + gtsBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + gtsBrowseBtn.Location = new System.Drawing.Point(1105, 46); + gtsBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gtsBrowseBtn.Name = "gtsBrowseBtn"; + gtsBrowseBtn.Size = new System.Drawing.Size(55, 35); + gtsBrowseBtn.TabIndex = 58; + gtsBrowseBtn.Text = "..."; + gtsBrowseBtn.UseVisualStyleBackColor = true; + gtsBrowseBtn.Click += gtpBrowseBtn_Click; // // gtsPath // - this.gtsPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.gtsPath.Location = new System.Drawing.Point(12, 39); - this.gtsPath.Margin = new System.Windows.Forms.Padding(4); - this.gtsPath.Name = "gtsPath"; - this.gtsPath.Size = new System.Drawing.Size(1093, 22); - this.gtsPath.TabIndex = 56; + gtsPath.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + gtsPath.Location = new System.Drawing.Point(12, 49); + gtsPath.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gtsPath.Name = "gtsPath"; + gtsPath.Size = new System.Drawing.Size(1093, 27); + gtsPath.TabIndex = 56; // // label4 // - this.label4.AutoSize = true; - this.label4.Location = new System.Drawing.Point(8, 20); - this.label4.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); - this.label4.Name = "label4"; - this.label4.Size = new System.Drawing.Size(119, 16); - this.label4.TabIndex = 57; - this.label4.Text = "Tileset (GTS) path:"; + label4.AutoSize = true; + label4.Location = new System.Drawing.Point(8, 25); + label4.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label4.Name = "label4"; + label4.Size = new System.Drawing.Size(129, 20); + label4.TabIndex = 57; + label4.Text = "Tileset (GTS) path:"; // // destinationPath // - this.destinationPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.destinationPath.Location = new System.Drawing.Point(12, 96); - this.destinationPath.Margin = new System.Windows.Forms.Padding(4); - this.destinationPath.Name = "destinationPath"; - this.destinationPath.Size = new System.Drawing.Size(1093, 22); - this.destinationPath.TabIndex = 59; + destinationPath.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + destinationPath.Location = new System.Drawing.Point(12, 120); + destinationPath.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + destinationPath.Name = "destinationPath"; + destinationPath.Size = new System.Drawing.Size(1093, 27); + destinationPath.TabIndex = 59; // // label3 // - this.label3.AutoSize = true; - this.label3.Location = new System.Drawing.Point(8, 76); - this.label3.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); - this.label3.Name = "label3"; - this.label3.Size = new System.Drawing.Size(106, 16); - this.label3.TabIndex = 60; - this.label3.Text = "Destination path:"; + label3.AutoSize = true; + label3.Location = new System.Drawing.Point(8, 95); + label3.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label3.Name = "label3"; + label3.Size = new System.Drawing.Size(122, 20); + label3.TabIndex = 60; + label3.Text = "Destination path:"; // // gtsFileDlg // - this.gtsFileDlg.Filter = "Virtual Texture Set (.gts)|*.gts"; + gtsFileDlg.Filter = "Virtual Texture Set (.gts)|*.gts"; // // actionProgressLabel // - this.actionProgressLabel.AutoSize = true; - this.actionProgressLabel.Location = new System.Drawing.Point(94, 195); - this.actionProgressLabel.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); - this.actionProgressLabel.Name = "actionProgressLabel"; - this.actionProgressLabel.Size = new System.Drawing.Size(0, 16); - this.actionProgressLabel.TabIndex = 67; + actionProgressLabel.AutoSize = true; + actionProgressLabel.Location = new System.Drawing.Point(88, 478); + actionProgressLabel.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + actionProgressLabel.Name = "actionProgressLabel"; + actionProgressLabel.Size = new System.Drawing.Size(0, 20); + actionProgressLabel.TabIndex = 67; // // actionProgress // - this.actionProgress.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.actionProgress.Location = new System.Drawing.Point(9, 212); - this.actionProgress.Margin = new System.Windows.Forms.Padding(4); - this.actionProgress.Name = "actionProgress"; - this.actionProgress.Size = new System.Drawing.Size(1168, 28); - this.actionProgress.TabIndex = 65; + actionProgress.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + actionProgress.Location = new System.Drawing.Point(9, 501); + actionProgress.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + actionProgress.Name = "actionProgress"; + actionProgress.Size = new System.Drawing.Size(1168, 35); + actionProgress.TabIndex = 65; // // label5 // - this.label5.AutoSize = true; - this.label5.Location = new System.Drawing.Point(5, 193); - this.label5.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); - this.label5.Name = "label5"; - this.label5.Size = new System.Drawing.Size(65, 16); - this.label5.TabIndex = 66; - this.label5.Text = "Progress:"; + label5.AutoSize = true; + label5.Location = new System.Drawing.Point(5, 477); + label5.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label5.Name = "label5"; + label5.Size = new System.Drawing.Size(68, 20); + label5.TabIndex = 66; + label5.Text = "Progress:"; + // + // groupBox2 + // + groupBox2.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + groupBox2.Controls.Add(modRootPathBrowseBtn); + groupBox2.Controls.Add(tileSetBrowseBtn); + groupBox2.Controls.Add(tileSetBuildBtn); + groupBox2.Controls.Add(button1); + groupBox2.Controls.Add(button2); + groupBox2.Controls.Add(button3); + groupBox2.Controls.Add(tileSetConfigPath); + groupBox2.Controls.Add(label1); + groupBox2.Controls.Add(modRootPath); + groupBox2.Controls.Add(label2); + groupBox2.Location = new System.Drawing.Point(10, 252); + groupBox2.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox2.Name = "groupBox2"; + groupBox2.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox2.Size = new System.Drawing.Size(1167, 212); + groupBox2.TabIndex = 68; + groupBox2.TabStop = false; + groupBox2.Text = "Build Tile Set"; + // + // modRootPathBrowseBtn + // + modRootPathBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + modRootPathBrowseBtn.Location = new System.Drawing.Point(1104, 119); + modRootPathBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + modRootPathBrowseBtn.Name = "modRootPathBrowseBtn"; + modRootPathBrowseBtn.Size = new System.Drawing.Size(55, 35); + modRootPathBrowseBtn.TabIndex = 64; + modRootPathBrowseBtn.Text = "..."; + modRootPathBrowseBtn.UseVisualStyleBackColor = true; + modRootPathBrowseBtn.Click += modRootPathBrowseBtn_Click; + // + // tileSetBrowseBtn + // + tileSetBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + tileSetBrowseBtn.Location = new System.Drawing.Point(1104, 47); + tileSetBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + tileSetBrowseBtn.Name = "tileSetBrowseBtn"; + tileSetBrowseBtn.Size = new System.Drawing.Size(55, 35); + tileSetBrowseBtn.TabIndex = 63; + tileSetBrowseBtn.Text = "..."; + tileSetBrowseBtn.UseVisualStyleBackColor = true; + tileSetBrowseBtn.Click += tileSetConfigBrowseBtn_Click; + // + // tileSetBuildBtn + // + tileSetBuildBtn.Anchor = System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right; + tileSetBuildBtn.Location = new System.Drawing.Point(944, 167); + tileSetBuildBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + tileSetBuildBtn.Name = "tileSetBuildBtn"; + tileSetBuildBtn.Size = new System.Drawing.Size(213, 35); + tileSetBuildBtn.TabIndex = 63; + tileSetBuildBtn.Text = "Build"; + tileSetBuildBtn.UseVisualStyleBackColor = true; + tileSetBuildBtn.Click += tileSetBuildBtn_Click; + // + // button1 + // + button1.Anchor = System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right; + button1.Location = new System.Drawing.Point(1911, 278); + button1.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + button1.Name = "button1"; + button1.Size = new System.Drawing.Size(213, 35); + button1.TabIndex = 62; + button1.Text = "Extract Textures"; + button1.UseVisualStyleBackColor = true; + // + // button2 + // + button2.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + button2.Location = new System.Drawing.Point(2071, 120); + button2.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + button2.Name = "button2"; + button2.Size = new System.Drawing.Size(55, 35); + button2.TabIndex = 61; + button2.Text = "..."; + button2.UseVisualStyleBackColor = true; + // + // button3 + // + button3.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + button3.Location = new System.Drawing.Point(2071, 48); + button3.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + button3.Name = "button3"; + button3.Size = new System.Drawing.Size(55, 35); + button3.TabIndex = 58; + button3.Text = "..."; + button3.UseVisualStyleBackColor = true; + // + // tileSetConfigPath + // + tileSetConfigPath.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + tileSetConfigPath.Location = new System.Drawing.Point(13, 51); + tileSetConfigPath.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + tileSetConfigPath.Name = "tileSetConfigPath"; + tileSetConfigPath.Size = new System.Drawing.Size(1091, 27); + tileSetConfigPath.TabIndex = 56; + // + // label1 + // + label1.AutoSize = true; + label1.Location = new System.Drawing.Point(9, 27); + label1.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label1.Name = "label1"; + label1.Size = new System.Drawing.Size(156, 20); + label1.TabIndex = 57; + label1.Text = "Tile Set Configuration:"; + // + // modRootPath + // + modRootPath.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + modRootPath.Location = new System.Drawing.Point(13, 122); + modRootPath.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + modRootPath.Name = "modRootPath"; + modRootPath.Size = new System.Drawing.Size(1091, 27); + modRootPath.TabIndex = 59; + // + // label2 + // + label2.AutoSize = true; + label2.Location = new System.Drawing.Point(9, 97); + label2.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label2.Name = "label2"; + label2.Size = new System.Drawing.Size(109, 20); + label2.TabIndex = 60; + label2.Text = "Mod root path:"; + // + // tileSetConfigDlg + // + tileSetConfigDlg.Filter = "Virtual Texture Set Configuration (.xml)|*.xml"; // // VirtualTexturesPane // - this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F); - this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; - this.Controls.Add(this.actionProgressLabel); - this.Controls.Add(this.groupBox1); - this.Controls.Add(this.actionProgress); - this.Controls.Add(this.label5); - this.Name = "VirtualTexturesPane"; - this.Size = new System.Drawing.Size(1188, 378); - this.groupBox1.ResumeLayout(false); - this.groupBox1.PerformLayout(); - this.ResumeLayout(false); - this.PerformLayout(); - + AutoScaleDimensions = new System.Drawing.SizeF(8F, 20F); + AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + Controls.Add(groupBox2); + Controls.Add(actionProgressLabel); + Controls.Add(groupBox1); + Controls.Add(actionProgress); + Controls.Add(label5); + Margin = new System.Windows.Forms.Padding(3, 4, 3, 4); + Name = "VirtualTexturesPane"; + Size = new System.Drawing.Size(1188, 570); + groupBox1.ResumeLayout(false); + groupBox1.PerformLayout(); + groupBox2.ResumeLayout(false); + groupBox2.PerformLayout(); + ResumeLayout(false); + PerformLayout(); } #endregion @@ -205,5 +351,18 @@ private void InitializeComponent() private System.Windows.Forms.Label actionProgressLabel; private System.Windows.Forms.ProgressBar actionProgress; private System.Windows.Forms.Label label5; + private System.Windows.Forms.GroupBox groupBox2; + private System.Windows.Forms.Button button1; + private System.Windows.Forms.Button button2; + private System.Windows.Forms.Button button3; + private System.Windows.Forms.TextBox tileSetConfigPath; + private System.Windows.Forms.Label label1; + private System.Windows.Forms.TextBox modRootPath; + private System.Windows.Forms.Label label2; + private System.Windows.Forms.FolderBrowserDialog modRootPathDlg; + private System.Windows.Forms.OpenFileDialog tileSetConfigDlg; + private System.Windows.Forms.Button modRootPathBrowseBtn; + private System.Windows.Forms.Button tileSetBrowseBtn; + private System.Windows.Forms.Button tileSetBuildBtn; } } diff --git a/ConverterApp/VirtualTexturesPane.cs b/ConverterApp/VirtualTexturesPane.cs index caad02b5..0782e3a5 100644 --- a/ConverterApp/VirtualTexturesPane.cs +++ b/ConverterApp/VirtualTexturesPane.cs @@ -1,14 +1,7 @@ -using Alphaleonis.Win32.Filesystem; -using LSLib.LS; -using LSLib.VirtualTextures; +using LSLib.VirtualTextures; using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.Data; -using System.Drawing; +using System.IO; using System.Linq; -using System.Text; -using System.Threading.Tasks; using System.Windows.Forms; namespace ConverterApp @@ -46,11 +39,13 @@ private void extractTileSetBtn_Click(object sender, EventArgs e) try { var tileSet = new VirtualTileSet(gtsPath.Text); - for (var pfIdx = 0; pfIdx < tileSet.PageFileInfos.Count; pfIdx++) + var textures = tileSet.FourCCMetadata.ExtractTextureMetadata(); + + var i = 0; + foreach (var texture in textures) { - var fileInfo = tileSet.PageFileInfos[pfIdx]; - actionProgressLabel.Text = fileInfo.FileName; - actionProgress.Value = pfIdx * 100 / tileSet.PageFileInfos.Count; + actionProgressLabel.Text = "GTex: " + texture.Name; + actionProgress.Value = i++ * 100 / textures.Count; Application.DoEvents(); for (var layer = 0; layer < tileSet.TileSetLayers.Length; layer++) @@ -59,13 +54,13 @@ private void extractTileSetBtn_Click(object sender, EventArgs e) var level = 0; do { - tex = tileSet.ExtractPageFileTexture(pfIdx, level, layer); + tex = tileSet.ExtractTexture(level, layer, texture); level++; } while (tex == null && level < tileSet.TileSetLevels.Length); if (tex != null) { - var outputPath = destinationPath.Text + Path.DirectorySeparator + Path.GetFileNameWithoutExtension(fileInfo.FileName) + $"_{layer}.dds"; + var outputPath = destinationPath.Text + Path.PathSeparator + texture.Name + $"_{layer}.dds"; tex.SaveDDS(outputPath); } } @@ -87,5 +82,69 @@ private void extractTileSetBtn_Click(object sender, EventArgs e) extractTileSetBtn.Enabled = true; } } + + private void tileSetConfigBrowseBtn_Click(object sender, EventArgs e) + { + if (tileSetConfigDlg.ShowDialog(this) == DialogResult.OK) + { + tileSetConfigPath.Text = tileSetConfigDlg.FileName; + } + } + + private void modRootPathBrowseBtn_Click(object sender, EventArgs e) + { + DialogResult result = modRootPathDlg.ShowDialog(this); + if (result == DialogResult.OK) + { + modRootPath.Text = modRootPathDlg.SelectedPath; + } + } + + private void tileSetBuildBtn_Click(object sender, EventArgs ev) + { + try + { + var descriptor = new TileSetDescriptor(); + descriptor.RootPath = modRootPath.Text; + descriptor.Load(tileSetConfigPath.Text); + + var builder = new TileSetBuilder(descriptor.Config); + builder.OnStepStarted += (step) => { + actionProgressLabel.Text = step; + Application.DoEvents(); + }; + builder.OnStepProgress += (numerator, denumerator) => { + actionProgress.Maximum = denumerator; + actionProgress.Value = numerator; + Application.DoEvents(); + }; + + builder.OnStepStarted("Adding textures"); + foreach (var texture in descriptor.Textures) + { + var layerPaths = texture.Layers.Select(name => name != null ? Path.Combine(descriptor.SourceTexturePath, name) : null).ToList(); + builder.AddTexture(texture.Name, layerPaths); + } + + builder.Build(descriptor.VirtualTexturePath); + + MessageBox.Show("Tile set build completed."); + } + catch (InvalidDataException e) + { + MessageBox.Show($"{e.Message}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + catch (FileNotFoundException e) + { + MessageBox.Show($"{e.Message}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + catch (Exception e) + { + MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{e}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + + actionProgressLabel.Text = ""; + actionProgress.Value = 0; + } } } diff --git a/ConverterApp/VirtualTexturesPane.resx b/ConverterApp/VirtualTexturesPane.resx index 2f94ab20..3d97df92 100644 --- a/ConverterApp/VirtualTexturesPane.resx +++ b/ConverterApp/VirtualTexturesPane.resx @@ -1,17 +1,17 @@  - @@ -118,9 +118,15 @@ System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089 - 1045, 20 + 53, 22 - 1202, 20 + 234, 23 + + + 415, 25 + + + 581, 25 \ No newline at end of file diff --git a/ConverterApp/packages.config b/ConverterApp/packages.config deleted file mode 100644 index 1a84d93a..00000000 --- a/ConverterApp/packages.config +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/DebuggerFrontend/DebugInfoLoader.cs b/DebuggerFrontend/DebugInfoLoader.cs index ef642757..07fc859f 100644 --- a/DebuggerFrontend/DebugInfoLoader.cs +++ b/DebuggerFrontend/DebugInfoLoader.cs @@ -1,5 +1,4 @@ using System; -using Google.Protobuf; using System.IO; using LSLib.LS; using LSLib.LS.Story.Compiler; @@ -204,7 +203,7 @@ public StoryDebugInfo Load(byte[] msgPayload) var compressed = new byte[msgPayload.Length - 4]; Array.Copy(msgPayload, 0, compressed, 0, msgPayload.Length - 4); - byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.CompressionLevel.FastCompression); + byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); byte[] decompressed = BinUtils.Decompress(compressed, (int)decompressedSize, flags); var msg = StoryDebugInfoMsg.Parser.ParseFrom(decompressed); var debugInfo = FromProtobuf(msg); diff --git a/DebuggerFrontend/DebuggerFrontend.csproj b/DebuggerFrontend/DebuggerFrontend.csproj index 1edc4e61..8d69683b 100644 --- a/DebuggerFrontend/DebuggerFrontend.csproj +++ b/DebuggerFrontend/DebuggerFrontend.csproj @@ -1,105 +1,35 @@ - - - + - Debug - AnyCPU - {31E71543-CBCF-43BB-AF77-D210D548118E} + net8.0 Exe LSTools.DebuggerFrontend - DebuggerFrontend - v4.7.2 - 512 - true - - - - x64 - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - - + false x64 - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - - ..\packages\CommandLineArgumentsParser.3.0.19\lib\net452\CommandLineArgumentsParser.dll - - - ..\packages\Google.Protobuf.3.6.1\lib\net45\Google.Protobuf.dll - - - ..\packages\Newtonsoft.Json.13.0.1\lib\net45\Newtonsoft.Json.dll - - - ..\External\gppg\binaries\QUT.ShiftReduceParser.dll - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + - - - - - + - - {46372c50-4288-4b8e-af21-c934560600e0} - LSLib - + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + - - $(SolutionDir)\External\protoc\bin\protoc.exe --proto_path=$(ProjectDir) --csharp_out=$(ProjectDir) DbgProtocol.proto -$(SolutionDir)\External\protoc\bin\protoc.exe --proto_path=$(ProjectDir)..\StoryCompiler\ --csharp_out=$(ProjectDir) debuginfo.proto -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\ExpressionParser\Expression.lex.cs" "$(ProjectDir)\ExpressionParser\Expression.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\ExpressionParser\Expression.yy.cs" "$(ProjectDir)\ExpressionParser\Expression.yy" + "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\DebuggerFrontend\ExpressionParser\Expression.lex.cs" "$(SolutionDir)\DebuggerFrontend\ExpressionParser\Expression.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\DebuggerFrontend\ExpressionParser\Expression.yy.cs" "$(SolutionDir)\DebuggerFrontend\ExpressionParser\Expression.yy" + Osiris VS Code Debugger Frontend + LSLib + Copyright © Norbyte 2012-2018 + 1.0.0.0 + 1.0.0.0 \ No newline at end of file diff --git a/DebuggerFrontend/Properties/AssemblyInfo.cs b/DebuggerFrontend/Properties/AssemblyInfo.cs index 3499587c..04588128 100644 --- a/DebuggerFrontend/Properties/AssemblyInfo.cs +++ b/DebuggerFrontend/Properties/AssemblyInfo.cs @@ -1,16 +1,7 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("Osiris VS Code Debugger Frontend")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("Copyright © Norbyte 2012-2018")] +using System.Runtime.Versioning; [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -20,17 +11,4 @@ [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("31e71543-cbcf-43bb-af77-d210d548118e")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] +[assembly: Guid("31e71543-cbcf-43bb-af77-d210d548118e")] \ No newline at end of file diff --git a/DebuggerFrontend/debuginfo.proto b/DebuggerFrontend/debuginfo.proto new file mode 100644 index 00000000..14914e97 --- /dev/null +++ b/DebuggerFrontend/debuginfo.proto @@ -0,0 +1,89 @@ +syntax = "proto3"; + +package LSTools.StoryCompiler; + +option optimize_for = LITE_RUNTIME; + +message DatabaseDebugInfoMsg { + uint32 id = 1; + string name = 2; + repeated uint32 param_types = 3; +} + +message ActionDebugInfoMsg { + uint32 line = 1; +} + +message GoalDebugInfoMsg { + uint32 id = 1; + string name = 2; + string path = 3; + repeated ActionDebugInfoMsg init_actions = 4; + repeated ActionDebugInfoMsg exit_actions = 5; +} + +message RuleVariableDebugInfoMsg { + uint32 index = 1; + uint32 type = 2; + string name = 3; + bool unused = 4; +} + +message RuleDebugInfoMsg { + uint32 id = 1; + uint32 goal_id = 2; + repeated RuleVariableDebugInfoMsg variables = 3; + string name = 4; + repeated ActionDebugInfoMsg actions = 5; + uint32 conditions_start_line = 6; + uint32 conditions_end_line = 7; + uint32 actions_start_line = 8; + uint32 actions_end_line = 9; +} + +message NodeDebugInfoMsg { + enum NodeType { + UNUSED = 0; + DATABASE = 1; + PROC = 2; + DIV_QUERY = 3; + AND = 4; + NOT_AND = 5; + REL_OP = 6; + RULE = 7; + INTERNAL_QUERY = 8; + USER_QUERY = 9; + } + + uint32 id = 1; + uint32 rule_id = 2; + uint32 line = 3; + map column_maps = 4; + uint32 database_id = 5; + string name = 6; + NodeType type = 7; + uint32 parent_node_id = 8; + string function_name = 9; + uint32 function_arity = 10; +} + +message FunctionParamDebugInfoMsg { + uint32 type_id = 1; + string name = 2; + bool out = 3; +} + +message FunctionDebugInfoMsg { + string name = 1; + repeated FunctionParamDebugInfoMsg params = 2; + uint32 type_id = 3; +} + +message StoryDebugInfoMsg { + repeated DatabaseDebugInfoMsg databases = 1; + repeated GoalDebugInfoMsg goals = 2; + repeated RuleDebugInfoMsg rules = 3; + repeated NodeDebugInfoMsg nodes = 4; + repeated FunctionDebugInfoMsg functions = 5; + uint32 version = 6; +} diff --git a/DebuggerFrontend/packages.config b/DebuggerFrontend/packages.config deleted file mode 100644 index 23b37023..00000000 --- a/DebuggerFrontend/packages.config +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index 53d20617..a2874c10 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -1,8 +1,8 @@ using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Text.RegularExpressions; -using Alphaleonis.Win32.Filesystem; using LSLib.LS; using LSLib.LS.Enums; diff --git a/Divine/Divine.csproj b/Divine/Divine.csproj index f2e2e89c..e4a8d2e9 100644 --- a/Divine/Divine.csproj +++ b/Divine/Divine.csproj @@ -1,81 +1,19 @@ - - - + - Debug - AnyCPU - {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA} + net8.0 Exe - Properties - Divine - divine - v4.7.2 - 512 - true - - - - x64 - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - - + false x64 - pdbonly - true - bin\Release\ - TRACE - prompt - 4 + LSLib - Divine Commandline Tool + LSLib + 1.15.14.0 + 1.15.14.0 - - ..\packages\AlphaFS.2.2.6\lib\net452\AlphaFS.dll - - - ..\packages\CommandLineArgumentsParser.3.0.19\lib\net452\CommandLineArgumentsParser.dll - - - - - - - - - - - - - - - - - - - - - - - + - - {46372c50-4288-4b8e-af21-c934560600e0} - LSLib - + + - - \ No newline at end of file diff --git a/Divine/Properties/AssemblyInfo.cs b/Divine/Properties/AssemblyInfo.cs index 2f7fd57d..48400ce6 100644 --- a/Divine/Properties/AssemblyInfo.cs +++ b/Divine/Properties/AssemblyInfo.cs @@ -1,16 +1,6 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("LSLib - Divine Commandline Tool")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -21,16 +11,3 @@ // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("cbfee38f-5f12-4d6f-b4fb-267fb68a6bea")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.15.14.0")] -[assembly: AssemblyFileVersion("1.15.14.0")] diff --git a/Divine/packages.config b/Divine/packages.config deleted file mode 100644 index 60c919ae..00000000 --- a/Divine/packages.config +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/LSLib/Granny/Collada.cs b/LSLib/Granny/Collada.cs index 9013b1c4..14e61362 100644 --- a/LSLib/Granny/Collada.cs +++ b/LSLib/Granny/Collada.cs @@ -41,15 +41,18 @@ public static source MakeFloatSource(string parentName, string name, string[] co parentName = string.Join("", hash.Select(c => ((int)c).ToString("X2"))); } - var positions = new float_array(); - positions.id = parentName + "-" + name + "-array"; - - var source = new source(); - source.id = parentName + "-" + name; - source.name = name; + var positions = new float_array + { + id = parentName + "-" + name + "-array", + count = (ulong)values.Length, + Values = values.Select(x => (double)x).ToArray() + }; - positions.count = (ulong)values.Length; - positions.Values = values.Select(x => (double)x).ToArray(); + var source = new source + { + id = parentName + "-" + name, + name = name + }; var technique = MakeAccessor(type, components, stride, values.Length / components.Length, positions.id); source.technique_common = technique; @@ -59,18 +62,21 @@ public static source MakeFloatSource(string parentName, string name, string[] co public static source MakeNameSource(string parentName, string name, string[] components, string[] values, string type = "name") { - var names = new Name_array(); - names.id = parentName + "-" + name + "-array"; - - var source = new source(); - source.id = parentName + "-" + name; - source.name = name; - - names.count = (ulong)values.Length; var varNames = from v in values select v.Replace(' ', '_'); - names.Values = varNames.ToArray(); + var names = new Name_array + { + id = parentName + "-" + name + "-array", + count = (ulong)values.Length, + Values = varNames.ToArray() + }; + + var source = new source + { + id = parentName + "-" + name, + name = name + }; var technique = MakeAccessor(type, components, 1, values.Length / components.Length, names.id); source.technique_common = technique; diff --git a/LSLib/Granny/ColladaAnimation.cs b/LSLib/Granny/ColladaAnimation.cs index 09fd98dd..12705cbb 100644 --- a/LSLib/Granny/ColladaAnimation.cs +++ b/LSLib/Granny/ColladaAnimation.cs @@ -3,8 +3,7 @@ using System.Linq; using LSLib.Granny.GR2; using LSLib.Granny.Model; -using LSLib.Granny.Model.CurveData; -using OpenTK; +using OpenTK.Mathematics; namespace LSLib.Granny { @@ -23,7 +22,7 @@ public Single Duration private void ImportSources() { - Sources = new Dictionary(); + Sources = []; foreach (var item in Animation.Items) { if (item is source) @@ -55,8 +54,7 @@ private void ImportSampler() if (input.source[0] != '#') throw new ParsingException("Only ID references are supported for animation input sources"); - ColladaSource source; - if (!Sources.TryGetValue(input.source.Substring(1), out source)) + if (!Sources.TryGetValue(input.source.Substring(1), out ColladaSource source)) throw new ParsingException("Animation sampler " + input.semantic + " references nonexistent source: " + input.source); switch (input.semantic) @@ -119,14 +117,13 @@ private void ImportChannel(Skeleton skeleton) if (channel == null) throw new ParsingException("Animation " + Animation.id + " has no channel!"); - var parts = channel.target.Split(new char[] { '/' }); + var parts = channel.target.Split(['/']); if (parts.Length != 2) throw new ParsingException("Unsupported channel target format: " + channel.target); if (skeleton != null) { - Bone bone = null; - if (!skeleton.BonesByID.TryGetValue(parts[0], out bone)) + if (!skeleton.BonesByID.TryGetValue(parts[0], out Bone bone)) throw new ParsingException("Animation channel references nonexistent bone: " + parts[0]); if (bone.TransformSID != parts[1]) diff --git a/LSLib/Granny/ColladaSchema.cs b/LSLib/Granny/ColladaSchema.cs index c90142e8..84570078 100644 --- a/LSLib/Granny/ColladaSchema.cs +++ b/LSLib/Granny/ColladaSchema.cs @@ -25,6 +25,8 @@ using System.Xml; using System.Xml.Serialization; +#pragma warning disable 8981 + namespace LSLib.Granny { // @@ -9974,4 +9976,6 @@ public void Save(Stream stream) xSerializer.Serialize(writer, this); } } -} \ No newline at end of file +} + +#pragma warning restore 8981 diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index 80bc7903..edfcb208 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -3,7 +3,7 @@ using System.Diagnostics; using System.Linq; using System.Text; -using OpenTK; +using OpenTK.Mathematics; using System.IO; using System.Reflection; @@ -183,29 +183,7 @@ public Matrix4 ToMatrix4() public override string ToString() { - var sb = new StringBuilder(); - sb.Append("Rotation: ("); - sb.Append(Rotation.X); - sb.Append(", "); - sb.Append(Rotation.Y); - sb.Append(", "); - sb.Append(Rotation.Z); - sb.Append(", "); - sb.Append(Rotation.W); - sb.Append("); Translation: ("); - sb.Append(Translation.X); - sb.Append(", "); - sb.Append(Translation.Y); - sb.Append(", "); - sb.Append(Translation.Z); - sb.Append("); Scale: ("); - sb.Append(ScaleShear[0, 0]); - sb.Append(", "); - sb.Append(ScaleShear[1, 1]); - sb.Append(", "); - sb.Append(ScaleShear[2, 2]); - sb.Append(")"); - return sb.ToString(); + return $"Rotation: ({Rotation.X}, {Rotation.Y}, {Rotation.Z}); Translation: ({Translation.X}, {Translation.Y}, {Translation.Z}); Scale: ({ScaleShear[0, 0]}, {ScaleShear[1, 1]}, {ScaleShear[2, 2]})"; } } @@ -217,47 +195,47 @@ public class Magic /// /// Magic value used for version 7 little-endian 32-bit Granny files /// - private static byte[] LittleEndian32Magic = new byte[] { 0x29, 0xDE, 0x6C, 0xC0, 0xBA, 0xA4, 0x53, 0x2B, 0x25, 0xF5, 0xB7, 0xA5, 0xF6, 0x66, 0xE2, 0xEE }; + private static readonly byte[] LittleEndian32Magic = [0x29, 0xDE, 0x6C, 0xC0, 0xBA, 0xA4, 0x53, 0x2B, 0x25, 0xF5, 0xB7, 0xA5, 0xF6, 0x66, 0xE2, 0xEE]; /// /// Magic value used for version 7 little-endian 32-bit Granny files /// - private static byte[] LittleEndian32Magic2 = new byte[] { 0x29, 0x75, 0x31, 0x82, 0xBA, 0x02, 0x11, 0x77, 0x25, 0x3A, 0x60, 0x2F, 0xF6, 0x6A, 0x8C, 0x2E }; + private static readonly byte[] LittleEndian32Magic2 = [0x29, 0x75, 0x31, 0x82, 0xBA, 0x02, 0x11, 0x77, 0x25, 0x3A, 0x60, 0x2F, 0xF6, 0x6A, 0x8C, 0x2E]; /// /// Magic value used for version 6 little-endian 32-bit Granny files /// - private static byte[] LittleEndian32MagicV6 = new byte[] { 0xB8, 0x67, 0xB0, 0xCA, 0xF8, 0x6D, 0xB1, 0x0F, 0x84, 0x72, 0x8C, 0x7E, 0x5E, 0x19, 0x00, 0x1E }; + private static readonly byte[] LittleEndian32MagicV6 = [0xB8, 0x67, 0xB0, 0xCA, 0xF8, 0x6D, 0xB1, 0x0F, 0x84, 0x72, 0x8C, 0x7E, 0x5E, 0x19, 0x00, 0x1E]; /// /// Magic value used for version 7 big-endian 32-bit Granny files /// - private static byte[] BigEndian32Magic = new byte[] { 0x0E, 0x11, 0x95, 0xB5, 0x6A, 0xA5, 0xB5, 0x4B, 0xEB, 0x28, 0x28, 0x50, 0x25, 0x78, 0xB3, 0x04 }; + private static readonly byte[] BigEndian32Magic = [0x0E, 0x11, 0x95, 0xB5, 0x6A, 0xA5, 0xB5, 0x4B, 0xEB, 0x28, 0x28, 0x50, 0x25, 0x78, 0xB3, 0x04]; /// /// Magic value used for version 7 big-endian 32-bit Granny files /// - private static byte[] BigEndian32Magic2 = new byte[] { 0x0E, 0x74, 0xA2, 0x0A, 0x6A, 0xEB, 0xEB, 0x64, 0xEB, 0x4E, 0x1E, 0xAB, 0x25, 0x91, 0xDB, 0x8F }; + private static readonly byte[] BigEndian32Magic2 = [0x0E, 0x74, 0xA2, 0x0A, 0x6A, 0xEB, 0xEB, 0x64, 0xEB, 0x4E, 0x1E, 0xAB, 0x25, 0x91, 0xDB, 0x8F]; /// /// Magic value used for version 7 little-endian 64-bit Granny files /// - private static byte[] LittleEndian64Magic = new byte[] { 0xE5, 0x9B, 0x49, 0x5E, 0x6F, 0x63, 0x1F, 0x14, 0x1E, 0x13, 0xEB, 0xA9, 0x90, 0xBE, 0xED, 0xC4 }; + private static readonly byte[] LittleEndian64Magic = [0xE5, 0x9B, 0x49, 0x5E, 0x6F, 0x63, 0x1F, 0x14, 0x1E, 0x13, 0xEB, 0xA9, 0x90, 0xBE, 0xED, 0xC4]; /// /// Magic value used for version 7 little-endian 64-bit Granny files /// - private static byte[] LittleEndian64Magic2 = new byte[] { 0xE5, 0x2F, 0x4A, 0xE1, 0x6F, 0xC2, 0x8A, 0xEE, 0x1E, 0xD2, 0xB4, 0x4C, 0x90, 0xD7, 0x55, 0xAF }; + private static readonly byte[] LittleEndian64Magic2 = [0xE5, 0x2F, 0x4A, 0xE1, 0x6F, 0xC2, 0x8A, 0xEE, 0x1E, 0xD2, 0xB4, 0x4C, 0x90, 0xD7, 0x55, 0xAF]; /// /// Magic value used for version 7 big-endian 64-bit Granny files /// - private static byte[] BigEndian64Magic = new byte[] { 0x31, 0x95, 0xD4, 0xE3, 0x20, 0xDC, 0x4F, 0x62, 0xCC, 0x36, 0xD0, 0x3A, 0xB1, 0x82, 0xFF, 0x89 }; + private static readonly byte[] BigEndian64Magic = [0x31, 0x95, 0xD4, 0xE3, 0x20, 0xDC, 0x4F, 0x62, 0xCC, 0x36, 0xD0, 0x3A, 0xB1, 0x82, 0xFF, 0x89]; /// /// Magic value used for version 7 big-endian 64-bit Granny files /// - private static byte[] BigEndian64Magic2 = new byte[] { 0x31, 0xC2, 0x4E, 0x7C, 0x20, 0x40, 0xA3, 0x25, 0xCC, 0xE1, 0xC2, 0x7A, 0xB1, 0x32, 0x49, 0xF3 }; + private static readonly byte[] BigEndian64Magic2 = [0x31, 0xC2, 0x4E, 0x7C, 0x20, 0x40, 0xA3, 0x25, 0xCC, 0xE1, 0xC2, 0x7A, 0xB1, 0x32, 0x49, 0xF3]; /// /// Size of magic value structure, in bytes @@ -344,23 +322,14 @@ public static Format FormatFromSignature(byte[] sig) public static byte[] SignatureFromFormat(Format format) { - switch (format) + return format switch { - case Format.LittleEndian32: - return LittleEndian32Magic; - - case Format.LittleEndian64: - return LittleEndian64Magic; - - case Format.BigEndian32: - return BigEndian32Magic; - - case Format.BigEndian64: - return BigEndian64Magic; - - default: - throw new ArgumentException(); - } + Format.LittleEndian32 => LittleEndian32Magic, + Format.LittleEndian64 => LittleEndian64Magic, + Format.BigEndian32 => BigEndian32Magic, + Format.BigEndian64 => BigEndian64Magic, + _ => throw new ArgumentException(), + }; } public void SetFormat(Format format, bool alternateSignature) @@ -369,45 +338,23 @@ public void SetFormat(Format format, bool alternateSignature) if (alternateSignature) { - switch (format) + this.signature = format switch { - case Format.LittleEndian32: - this.signature = LittleEndian32Magic2; - break; - - case Format.LittleEndian64: - this.signature = LittleEndian64Magic2; - break; - - case Format.BigEndian32: - this.signature = BigEndian32Magic2; - break; - - case Format.BigEndian64: - this.signature = BigEndian64Magic2; - break; - } + Format.LittleEndian32 => LittleEndian32Magic2, + Format.LittleEndian64 => LittleEndian64Magic2, + Format.BigEndian32 => BigEndian32Magic2, + Format.BigEndian64 => BigEndian64Magic2 + }; } else { - switch (format) + this.signature = format switch { - case Format.LittleEndian32: - this.signature = LittleEndian32Magic; - break; - - case Format.LittleEndian64: - this.signature = LittleEndian64Magic; - break; - - case Format.BigEndian32: - this.signature = BigEndian32Magic; - break; - - case Format.BigEndian64: - this.signature = BigEndian64Magic; - break; - } + Format.LittleEndian32 => LittleEndian32Magic, + Format.LittleEndian64 => LittleEndian64Magic, + Format.BigEndian32 => BigEndian32Magic, + Format.BigEndian64 => BigEndian64Magic + }; } } } @@ -494,14 +441,12 @@ public class Header public UInt32 Size() { - UInt32 headerSize; - switch (version) + var headerSize = version switch { - case 6: headerSize = HeaderSize_V6; break; - case 7: headerSize = HeaderSize_V7; break; - default: throw new InvalidDataException("Cannot calculate CRC for unknown header versions."); - } - + 6 => HeaderSize_V6, + 7 => HeaderSize_V7, + _ => throw new InvalidDataException("Cannot calculate CRC for unknown header versions."), + }; return headerSize; } @@ -642,8 +587,7 @@ public override bool Equals(object o) if (o == null) return false; - var reference = o as SectionReference; - return reference != null && reference.Section == Section && reference.Offset == Offset; + return o is SectionReference reference && reference.Section == Section && reference.Offset == Offset; } public bool Equals(SectionReference reference) @@ -680,8 +624,7 @@ public override bool Equals(object o) if (o == null) return false; - var reference = o as RelocatableReference; - return reference != null && reference.Offset == Offset; + return o is RelocatableReference reference && reference.Offset == Offset; } public bool Equals(RelocatableReference reference) @@ -792,7 +735,7 @@ public List Resolve(GR2Reader gr2) #endif var originalPos = gr2.Stream.Position; gr2.Seek(this); - Items = new List(); + Items = []; for (int i = 0; i < Size; i++) { Items.Add(gr2.ReadReference()); @@ -863,61 +806,37 @@ public bool IsScalar public UInt32 Size(GR2Reader gr2) { - switch (Type) + return Type switch { - case MemberType.Inline: - return Definition.Resolve(gr2).Size(gr2); + MemberType.Inline => Definition.Resolve(gr2).Size(gr2), - case MemberType.Int8: - case MemberType.BinormalInt8: - case MemberType.UInt8: - case MemberType.NormalUInt8: - return 1; + MemberType.Int8 => 1, + MemberType.BinormalInt8 => 1, + MemberType.UInt8 => 1, + MemberType.NormalUInt8 => 1, - case MemberType.Int16: - case MemberType.BinormalInt16: - case MemberType.UInt16: - case MemberType.NormalUInt16: - case MemberType.Real16: - return 2; + MemberType.Int16 => 2, + MemberType.BinormalInt16 => 2, + MemberType.UInt16 => 2, + MemberType.NormalUInt16 => 2, + MemberType.Real16 => 2, - case MemberType.Reference: - if (gr2.Magic.Is32Bit) - return 4; - else - return 8; + MemberType.Reference => gr2.Magic.Is32Bit ? 4u : 8, - case MemberType.String: - case MemberType.Real32: - case MemberType.Int32: - case MemberType.UInt32: - return 4; - - case MemberType.VariantReference: - if (gr2.Magic.Is32Bit) - return 8; - else - return 16; + MemberType.String => 4, + MemberType.Real32 => 4, + MemberType.Int32 => 4, + MemberType.UInt32 => 4, - case MemberType.ArrayOfReferences: - case MemberType.ReferenceToArray: - if (gr2.Magic.Is32Bit) - return 8; - else - return 12; + MemberType.VariantReference => gr2.Magic.Is32Bit ? 8u : 16, + MemberType.ArrayOfReferences => gr2.Magic.Is32Bit ? 8u : 12, + MemberType.ReferenceToArray => gr2.Magic.Is32Bit ? 8u : 12, + MemberType.ReferenceToVariantArray => gr2.Magic.Is32Bit ? 12u : 20, - case MemberType.ReferenceToVariantArray: - if (gr2.Magic.Is32Bit) - return 12; - else - return 20; + MemberType.Transform => 17 * 4, - case MemberType.Transform: - return 17 * 4; - - default: - throw new ParsingException(String.Format("Unhandled member type: {0}", Type.ToString())); - } + _ => throw new ParsingException($"Unhandled member type: {Type}") + }; } public UInt32 MarshallingSize() @@ -1028,7 +947,7 @@ public static MemberDefinition CreateFromFieldInfo(FieldInfo info, GR2Writer wri var type = info.FieldType; member.Name = info.Name; member.GrannyName = info.Name; - member.Extra = new UInt32[] { 0, 0, 0 }; + member.Extra = [0, 0, 0]; member.CachedField = info; member.HasCachedField = true; @@ -1055,7 +974,7 @@ public static MemberDefinition CreateFromFieldInfo(FieldInfo info, GR2Writer wri member.Type = MemberType.Int32; else if (type == typeof(UInt32)) member.Type = MemberType.UInt32; - else if (type == typeof(Half)) + else if (type == typeof(OpenTK.Mathematics.Half)) member.Type = MemberType.Real16; else if (type == typeof(Single)) member.Type = MemberType.Real32; diff --git a/LSLib/Granny/GR2/Helpers.cs b/LSLib/Granny/GR2/Helpers.cs index 62f32907..f6999b00 100644 --- a/LSLib/Granny/GR2/Helpers.cs +++ b/LSLib/Granny/GR2/Helpers.cs @@ -6,8 +6,8 @@ namespace LSLib.Granny.GR2 { public static class Helpers { - private static Dictionary CachedConstructors = new Dictionary(); - private static Dictionary CachedArrayConstructors = new Dictionary(); + private static readonly Dictionary CachedConstructors = []; + private static readonly Dictionary CachedArrayConstructors = []; public delegate object ObjectCtor(); public delegate object ArrayCtor(int size); @@ -34,8 +34,7 @@ public static object CreateInstance(Type type) public static object CreateArrayInstance(Type type, int size) { - ArrayCtor ctor; - if (!CachedArrayConstructors.TryGetValue(type, out ctor)) + if (!CachedArrayConstructors.TryGetValue(type, out ArrayCtor ctor)) { var typeCtor = type.GetConstructor(new Type[] { typeof(int) }); var sizeParam = Expression.Parameter(typeof(int), ""); diff --git a/LSLib/Granny/GR2/Reader.cs b/LSLib/Granny/GR2/Reader.cs index 35784ee0..a1b8076d 100644 --- a/LSLib/Granny/GR2/Reader.cs +++ b/LSLib/Granny/GR2/Reader.cs @@ -11,26 +11,23 @@ namespace LSLib.Granny.GR2 { - public class ParsingException : Exception + public class ParsingException(string message) : Exception(message) { - public ParsingException(string message) - : base(message) - { } } - public class GR2Reader + public class GR2Reader(Stream stream) { - internal Stream InputStream; + internal Stream InputStream = stream; internal BinaryReader InputReader; internal Stream Stream; internal BinaryReader Reader; internal Magic Magic; internal Header Header; - internal List
Sections = new List
(); - internal Dictionary Types = new Dictionary(); - private Dictionary CachedStructs = new Dictionary(); + internal List
Sections = []; + internal Dictionary Types = []; + private readonly Dictionary CachedStructs = []; #if DEBUG_GR2_SERIALIZATION - private HashSet DebugPendingResolve = new HashSet(); + private HashSet DebugPendingResolve = []; #endif public UInt32 Tag @@ -38,15 +35,9 @@ public UInt32 Tag get { return Header.tag; } } - public GR2Reader(Stream stream) - { - this.InputStream = stream; - } - public void Dispose() { - if (Stream != null) - Stream.Dispose(); + Stream?.Dispose(); } public void Read(object root) @@ -61,8 +52,10 @@ public void Read(object root) Header = ReadHeader(); for (int i = 0; i < Header.numSections; i++) { - var section = new Section(); - section.Header = ReadSectionHeader(); + var section = new Section + { + Header = ReadSectionHeader() + }; Sections.Add(section); } @@ -84,8 +77,10 @@ public void Read(object root) } } - var rootStruct = new StructReference(); - rootStruct.Offset = Sections[(int)Header.rootType.Section].Header.offsetInFile + Header.rootType.Offset; + var rootStruct = new StructReference + { + Offset = Sections[(int)Header.rootType.Section].Header.offsetInFile + Header.rootType.Offset + }; Seek(Header.rootNode); ReadStruct(rootStruct.Resolve(this), MemberType.Inline, root, null); @@ -94,15 +89,16 @@ public void Read(object root) private Magic ReadMagic() { - var magic = new Magic(); - magic.signature = InputReader.ReadBytes(16); + var magic = new Magic + { + signature = InputReader.ReadBytes(16), + headersSize = InputReader.ReadUInt32(), + headerFormat = InputReader.ReadUInt32(), + reserved1 = InputReader.ReadUInt32(), + reserved2 = InputReader.ReadUInt32() + }; magic.format = Magic.FormatFromSignature(magic.signature); - magic.headersSize = InputReader.ReadUInt32(); - magic.headerFormat = InputReader.ReadUInt32(); - magic.reserved1 = InputReader.ReadUInt32(); - magic.reserved2 = InputReader.ReadUInt32(); - if (magic.headerFormat != 0) throw new ParsingException("Compressed GR2 files are not supported"); @@ -120,16 +116,18 @@ private Magic ReadMagic() private Header ReadHeader() { - var header = new Header(); - header.version = InputReader.ReadUInt32(); - header.fileSize = InputReader.ReadUInt32(); - header.crc = InputReader.ReadUInt32(); - header.sectionsOffset = InputReader.ReadUInt32(); - header.numSections = InputReader.ReadUInt32(); - header.rootType = ReadSectionReferenceUnchecked(); - header.rootNode = ReadSectionReferenceUnchecked(); - header.tag = InputReader.ReadUInt32(); - header.extraTags = new UInt32[Header.ExtraTagCount]; + var header = new Header + { + version = InputReader.ReadUInt32(), + fileSize = InputReader.ReadUInt32(), + crc = InputReader.ReadUInt32(), + sectionsOffset = InputReader.ReadUInt32(), + numSections = InputReader.ReadUInt32(), + rootType = ReadSectionReferenceUnchecked(), + rootNode = ReadSectionReferenceUnchecked(), + tag = InputReader.ReadUInt32(), + extraTags = new UInt32[Header.ExtraTagCount] + }; for (int i = 0; i < Header.ExtraTagCount; i++) header.extraTags[i] = InputReader.ReadUInt32(); @@ -173,18 +171,20 @@ private Header ReadHeader() private SectionHeader ReadSectionHeader() { - var header = new SectionHeader(); - header.compression = InputReader.ReadUInt32(); - header.offsetInFile = InputReader.ReadUInt32(); - header.compressedSize = InputReader.ReadUInt32(); - header.uncompressedSize = InputReader.ReadUInt32(); - header.alignment = InputReader.ReadUInt32(); - header.first16bit = InputReader.ReadUInt32(); - header.first8bit = InputReader.ReadUInt32(); - header.relocationsOffset = InputReader.ReadUInt32(); - header.numRelocations = InputReader.ReadUInt32(); - header.mixedMarshallingDataOffset = InputReader.ReadUInt32(); - header.numMixedMarshallingData = InputReader.ReadUInt32(); + var header = new SectionHeader + { + compression = InputReader.ReadUInt32(), + offsetInFile = InputReader.ReadUInt32(), + compressedSize = InputReader.ReadUInt32(), + uncompressedSize = InputReader.ReadUInt32(), + alignment = InputReader.ReadUInt32(), + first16bit = InputReader.ReadUInt32(), + first8bit = InputReader.ReadUInt32(), + relocationsOffset = InputReader.ReadUInt32(), + numRelocations = InputReader.ReadUInt32(), + mixedMarshallingDataOffset = InputReader.ReadUInt32(), + numMixedMarshallingData = InputReader.ReadUInt32() + }; Debug.Assert(header.offsetInFile <= Header.fileSize); @@ -271,25 +271,23 @@ private void ReadSectionRelocationsInternal(Section section, Stream relocationsS System.Console.WriteLine(String.Format(" ===== Relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); #endif - using (var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true)) + using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); + for (int i = 0; i < section.Header.numRelocations; i++) { - for (int i = 0; i < section.Header.numRelocations; i++) - { - UInt32 offsetInSection = relocationsReader.ReadUInt32(); - Debug.Assert(offsetInSection <= section.Header.uncompressedSize); - var reference = ReadSectionReference(relocationsReader); + UInt32 offsetInSection = relocationsReader.ReadUInt32(); + Debug.Assert(offsetInSection <= section.Header.uncompressedSize); + var reference = ReadSectionReference(relocationsReader); - Stream.Position = section.Header.offsetInFile + offsetInSection; - var fixupAddress = Sections[(int)reference.Section].Header.offsetInFile + reference.Offset; - Stream.Write(BitConverter.GetBytes(fixupAddress), 0, 4); + Stream.Position = section.Header.offsetInFile + offsetInSection; + var fixupAddress = Sections[(int)reference.Section].Header.offsetInFile + reference.Offset; + Stream.Write(BitConverter.GetBytes(fixupAddress), 0, 4); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" LOCAL {0:X8} --> {1}:{2:X8}", offsetInSection, (SectionType)reference.Section, reference.Offset)); - System.Console.WriteLine(String.Format(" GLOBAL {0:X8} --> {1:X8}", - offsetInSection + section.Header.offsetInFile, - reference.Offset + Sections[(int)reference.Section].Header.offsetInFile)); + System.Console.WriteLine(String.Format(" LOCAL {0:X8} --> {1}:{2:X8}", offsetInSection, (SectionType)reference.Section, reference.Offset)); + System.Console.WriteLine(String.Format(" GLOBAL {0:X8} --> {1:X8}", + offsetInSection + section.Header.offsetInFile, + reference.Offset + Sections[(int)reference.Section].Header.offsetInFile)); #endif - } } } @@ -300,17 +298,13 @@ private void ReadSectionRelocations(Section section) InputStream.Seek(section.Header.relocationsOffset, SeekOrigin.Begin); if (section.Header.compression == 4) { - using (var reader = new BinaryReader(InputStream, Encoding.Default, true)) - { - UInt32 compressedSize = reader.ReadUInt32(); - byte[] compressed = reader.ReadBytes((int)compressedSize); - var uncompressed = Granny2Compressor.Decompress4( - compressed, (int)(section.Header.numRelocations * 12)); - using (var ms = new MemoryStream(uncompressed)) - { - ReadSectionRelocationsInternal(section, ms); - } - } + using var reader = new BinaryReader(InputStream, Encoding.Default, true); + UInt32 compressedSize = reader.ReadUInt32(); + byte[] compressed = reader.ReadBytes((int)compressedSize); + var uncompressed = Granny2Compressor.Decompress4( + compressed, (int)(section.Header.numRelocations * 12)); + using var ms = new MemoryStream(uncompressed); + ReadSectionRelocationsInternal(section, ms); } else { @@ -361,24 +355,24 @@ private void ReadSectionMixedMarshallingRelocationsInternal(Section section, Str System.Console.WriteLine(String.Format(" ===== Mixed marshalling relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); #endif - using (var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true)) + using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); + for (int i = 0; i < section.Header.numMixedMarshallingData; i++) { - for (int i = 0; i < section.Header.numMixedMarshallingData; i++) + UInt32 count = relocationsReader.ReadUInt32(); + UInt32 offsetInSection = relocationsReader.ReadUInt32(); + Debug.Assert(offsetInSection <= section.Header.uncompressedSize); + var type = ReadSectionReference(relocationsReader); + var typeDefn = new StructReference { - UInt32 count = relocationsReader.ReadUInt32(); - UInt32 offsetInSection = relocationsReader.ReadUInt32(); - Debug.Assert(offsetInSection <= section.Header.uncompressedSize); - var type = ReadSectionReference(relocationsReader); - var typeDefn = new StructReference(); - typeDefn.Offset = Sections[(int)type.Section].Header.offsetInFile + type.Offset; + Offset = Sections[(int)type.Section].Header.offsetInFile + type.Offset + }; - Seek(section, offsetInSection); - MixedMarshal(count, typeDefn.Resolve(this)); + Seek(section, offsetInSection); + MixedMarshal(count, typeDefn.Resolve(this)); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0:X8} [{1}] --> {2}:{3:X8}", offsetInSection, count, (SectionType)type.Section, type.Offset)); + System.Console.WriteLine(String.Format(" {0:X8} [{1}] --> {2}:{3:X8}", offsetInSection, count, (SectionType)type.Section, type.Offset)); #endif - } } } @@ -389,17 +383,13 @@ private void ReadSectionMixedMarshallingRelocations(Section section) InputStream.Seek(section.Header.mixedMarshallingDataOffset, SeekOrigin.Begin); if (section.Header.compression == 4) { - using (var reader = new BinaryReader(InputStream, Encoding.Default, true)) - { - UInt32 compressedSize = reader.ReadUInt32(); - byte[] compressed = reader.ReadBytes((int)compressedSize); - var uncompressed = Granny2Compressor.Decompress4( - compressed, (int)(section.Header.numMixedMarshallingData * 16)); - using (var ms = new MemoryStream(uncompressed)) - { - ReadSectionMixedMarshallingRelocationsInternal(section, ms); - } - } + using var reader = new BinaryReader(InputStream, Encoding.Default, true); + UInt32 compressedSize = reader.ReadUInt32(); + byte[] compressed = reader.ReadBytes((int)compressedSize); + var uncompressed = Granny2Compressor.Decompress4( + compressed, (int)(section.Header.numMixedMarshallingData * 16)); + using var ms = new MemoryStream(uncompressed); + ReadSectionMixedMarshallingRelocationsInternal(section, ms); } else { @@ -409,10 +399,11 @@ private void ReadSectionMixedMarshallingRelocations(Section section) public SectionReference ReadSectionReferenceUnchecked(BinaryReader reader) { - var reference = new SectionReference(); - reference.Section = reader.ReadUInt32(); - reference.Offset = reader.ReadUInt32(); - return reference; + return new SectionReference + { + Section = reader.ReadUInt32(), + Offset = reader.ReadUInt32() + }; } public SectionReference ReadSectionReferenceUnchecked() @@ -465,8 +456,10 @@ public StringReference ReadStringReference() public ArrayReference ReadArrayReference() { - var reference = new ArrayReference(); - reference.Size = Reader.ReadUInt32(); + var reference = new ArrayReference + { + Size = Reader.ReadUInt32() + }; if (Magic.Is32Bit) reference.Offset = Reader.ReadUInt32(); else @@ -476,8 +469,10 @@ public ArrayReference ReadArrayReference() public ArrayIndicesReference ReadArrayIndicesReference() { - var reference = new ArrayIndicesReference(); - reference.Size = Reader.ReadUInt32(); + var reference = new ArrayIndicesReference + { + Size = Reader.ReadUInt32() + }; if (Magic.Is32Bit) reference.Offset = Reader.ReadUInt32(); else @@ -506,7 +501,7 @@ public MemberDefinition ReadMemberDefinition() // Remove "The Divinity Engine" prefix from LSM fields if (defn.Name.StartsWith("The Divinity Engine", StringComparison.Ordinal)) { - defn.Name = defn.Name.Substring(19); + defn.Name = defn.Name[19..]; } defn.GrannyName = defn.Name; @@ -728,11 +723,9 @@ internal object ReadInstance(MemberDefinition definition, object node, Type prop else { // For non-native arrays we always assume the property is an IList - if (node == null) - node = Helpers.CreateInstance(propertyType); + node ??= Helpers.CreateInstance(propertyType); var items = node as System.Collections.IList; - var type = items.GetType().GetGenericArguments().Single(); for (int i = 0; i < definition.ArraySize; i++) { items.Add(ReadElement(definition, null, elementType, parent)); diff --git a/LSLib/Granny/GR2/Writer.cs b/LSLib/Granny/GR2/Writer.cs index 4dd1d039..2a5374af 100644 --- a/LSLib/Granny/GR2/Writer.cs +++ b/LSLib/Granny/GR2/Writer.cs @@ -27,11 +27,11 @@ public class WritableSection : Section public BinaryWriter Writer; public GR2Writer GR2; - public Dictionary Fixups = new Dictionary(); + public Dictionary Fixups = []; // Fixups for the data area that we'll need to update after serialization is finished - public Dictionary DataFixups = new Dictionary(); + public Dictionary DataFixups = []; - public List MixedMarshalling = new List(); + public List MixedMarshalling = []; public WritableSection(SectionType type, GR2Writer writer) { @@ -61,20 +61,20 @@ public void Finish() private SectionHeader InitHeader() { - var header = new SectionHeader(); - header.compression = 0; - header.offsetInFile = 0; // Set after serialization is finished - header.compressedSize = 0; // Set after serialization is finished - header.uncompressedSize = 0; // Set after serialization is finished - header.alignment = 4; - header.first16bit = 0; // Set after serialization is finished - header.first8bit = 0; // Set after serialization is finished - header.relocationsOffset = 0; // Set after serialization is finished - header.numRelocations = 0; // Set after serialization is finished - header.mixedMarshallingDataOffset = 0; // Set after serialization is finished - header.numMixedMarshallingData = 0; // Set after serialization is finished - - return header; + return new SectionHeader + { + compression = 0, + offsetInFile = 0, // Set after serialization is finished + compressedSize = 0, // Set after serialization is finished + uncompressedSize = 0, // Set after serialization is finished + alignment = 4, + first16bit = 0, // Set after serialization is finished + first8bit = 0, // Set after serialization is finished + relocationsOffset = 0, // Set after serialization is finished + numRelocations = 0, // Set after serialization is finished + mixedMarshallingDataOffset = 0, // Set after serialization is finished + numMixedMarshallingData = 0 // Set after serialization is finished + }; } public void AddFixup(object o) @@ -91,10 +91,12 @@ public void AddFixup(object o) internal void AddMixedMarshalling(object o, UInt32 count, StructDefinition type) { - var marshal = new MixedMarshallingData(); - marshal.Obj = o; - marshal.Count = count; - marshal.Type = type; + var marshal = new MixedMarshallingData + { + Obj = o, + Count = count, + Type = type + }; MixedMarshalling.Add(marshal); } @@ -133,11 +135,7 @@ public void WriteStructReference(StructDefinition defn) if (defn != null) { AddFixup(defn); - - if (!GR2.Types.ContainsKey(defn.Type)) - { - GR2.Types.Add(defn.Type, defn); - } + GR2.Types.TryAdd(defn.Type, defn); } if (GR2.Magic.Is32Bit) @@ -216,9 +214,11 @@ public void WriteStructDefinition(StructDefinition defn) } } - var end = new MemberDefinition(); - end.Type = MemberType.None; - end.Extra = new UInt32[] { 0, 0, 0 }; + var end = new MemberDefinition + { + Type = MemberType.None, + Extra = [0, 0, 0] + }; WriteMemberDefinition(end); } @@ -369,35 +369,32 @@ internal void WriteInstance(MemberDefinition definition, Type propertyType, obje { if (definition.ArraySize == 0) { - WriteElement(definition, propertyType, node); + WriteElement(definition, node); return; } if (propertyType.IsArray) { // If the property is a native array (ie. SomeType[]), create an array instance and set its values - var elementType = propertyType.GetElementType(); - Array arr = node as Array; Debug.Assert(arr.Length == definition.ArraySize); for (int i = 0; i < definition.ArraySize; i++) { - WriteElement(definition, elementType, arr.GetValue(i)); + WriteElement(definition, arr.GetValue(i)); } } else { // For non-native arrays we always assume the property is an IList var items = node as System.Collections.IList; - var elementType = items.GetType().GetGenericArguments().Single(); foreach (var element in items) { - WriteElement(definition, elementType, element); + WriteElement(definition, element); } } } - private void WriteElement(MemberDefinition definition, Type propertyType, object node) + private void WriteElement(MemberDefinition definition, object node) { var type = definition.CachedField.FieldType; bool dataArea = definition.DataArea || (Writer == DataWriter); @@ -679,17 +676,17 @@ struct QueuedStringSerialization internal Magic Magic; internal Header Header; internal WritableSection CurrentSection; - internal List Sections = new List(); - internal Dictionary Types = new Dictionary(); + internal List Sections = []; + internal Dictionary Types = []; internal RelocationArea Relocations; - private List StructWrites = new List(); - private List ArrayWrites = new List(); - private List StringWrites = new List(); + private List StructWrites = []; + private List ArrayWrites = []; + private List StringWrites = []; - internal Dictionary ObjectOffsets = new Dictionary(); - internal Dictionary DataObjectOffsets = new Dictionary(); - internal HashSet Strings = new HashSet(); + internal Dictionary ObjectOffsets = []; + internal Dictionary DataObjectOffsets = []; + internal HashSet Strings = []; // Version tag that will be written to the GR2 file public UInt32 VersionTag = Header.DefaultTag; @@ -720,9 +717,9 @@ public void FlushPendingWrites() var arrayWrites = ArrayWrites; var structWrites = StructWrites; var stringWrites = StringWrites; - ArrayWrites = new List(); - StructWrites = new List(); - StringWrites = new List(); + ArrayWrites = []; + StructWrites = []; + StringWrites = []; foreach (var write in structWrites) { @@ -896,22 +893,26 @@ private void WriteMagic(Magic magic) private Header InitHeader(uint numCustomSections) { - var header = new Header(); - header.version = Header.Version; - header.fileSize = 0; // Set after serialization is finished - header.crc = 0; // Set after serialization is finished + var header = new Header + { + version = Header.Version, + fileSize = 0, // Set after serialization is finished + crc = 0, // Set after serialization is finished + rootType = new SectionReference(), // Updated after serialization is finished + rootNode = new SectionReference(), // Updated after serialization is finished + numSections = (UInt32)SectionType.FirstVertexData + numCustomSections, + tag = VersionTag, + extraTags = new UInt32[Header.ExtraTagCount], + stringTableCrc = 0, + reserved1 = 0, + reserved2 = 0, + reserved3 = 0 + }; + header.sectionsOffset = header.Size(); - header.rootType = new SectionReference(); // Updated after serialization is finished - header.rootNode = new SectionReference(); // Updated after serialization is finished - header.numSections = (UInt32)SectionType.FirstVertexData + numCustomSections; - header.tag = VersionTag; - header.extraTags = new UInt32[Header.ExtraTagCount]; + for (int i = 0; i < Header.ExtraTagCount; i++) header.extraTags[i] = 0; - header.stringTableCrc = 0; - header.reserved1 = 0; - header.reserved2 = 0; - header.reserved3 = 0; return header; } @@ -957,8 +958,7 @@ public void WriteSectionReference(SectionReference r) internal StructDefinition LookupStructDefinition(Type type, object instance) { - StructDefinition defn = null; - if (Types.TryGetValue(type, out defn)) + if (Types.TryGetValue(type, out StructDefinition defn)) { return defn; } diff --git a/LSLib/Granny/GR2Utils.cs b/LSLib/Granny/GR2Utils.cs index 41a2b793..a38d334a 100644 --- a/LSLib/Granny/GR2Utils.cs +++ b/LSLib/Granny/GR2Utils.cs @@ -5,9 +5,6 @@ using LSLib.Granny.GR2; using LSLib.Granny.Model; using LSLib.LS; -using Directory = Alphaleonis.Win32.Filesystem.Directory; -using Path = Alphaleonis.Win32.Filesystem.Path; -using File = Alphaleonis.Win32.Filesystem.File; namespace LSLib.Granny { @@ -24,18 +21,12 @@ public static ExportFormat ExtensionToModelFormat(string path) { string extension = Path.GetExtension(path)?.ToLower(); - switch (extension) + return extension switch { - case ".gr2": - case ".lsm": - return ExportFormat.GR2; - - case ".dae": - return ExportFormat.DAE; - - default: - throw new ArgumentException($"Unrecognized model file extension: {extension}"); - } + ".gr2" or ".lsm" => ExportFormat.GR2, + ".dae" => ExportFormat.DAE, + _ => throw new ArgumentException($"Unrecognized model file extension: {extension}"), + }; } public static Root LoadModel(string inputPath) @@ -53,14 +44,12 @@ public static Root LoadModel(string inputPath, ExporterOptions options) { case ExportFormat.GR2: { - using (var fs = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) - { - var root = new Root(); - var gr2 = new GR2Reader(fs); - gr2.Read(root); - root.PostLoad(gr2.Tag); - return root; - } + using var fs = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + var root = new Root(); + var gr2 = new GR2Reader(fs); + gr2.Read(root); + root.PostLoad(gr2.Tag); + return root; } case ExportFormat.DAE: diff --git a/LSLib/Granny/Model/Animation.cs b/LSLib/Granny/Model/Animation.cs index e13827d0..b3eca439 100644 --- a/LSLib/Granny/Model/Animation.cs +++ b/LSLib/Granny/Model/Animation.cs @@ -1,7 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.Model.CurveData; using LSLib.Granny.GR2; @@ -47,11 +47,14 @@ public void UpgradeToGr7() if (this.Degree == 0) { // Degree 0 curves are identities in all cases - var curve = new DaIdentity(); - curve.CurveDataHeader_DaIdentity = new CurveDataHeader(); - curve.CurveDataHeader_DaIdentity.Format = (byte)CurveFormat.DaIdentity; - curve.CurveDataHeader_DaIdentity.Degree = 0; - this.CurveData = curve; + CurveData = new DaIdentity + { + CurveDataHeader_DaIdentity = new CurveDataHeader + { + Format = (byte)CurveFormat.DaIdentity, + Degree = 0 + } + }; } else if (this.Degree == 2) { @@ -61,13 +64,16 @@ public void UpgradeToGr7() } // Degree 2 curves are stored in K32fC32f (v6 didn't support multiple curve formats) - var curve = new DaK32fC32f(); - curve.CurveDataHeader_DaK32fC32f = new CurveDataHeader(); - curve.CurveDataHeader_DaK32fC32f.Format = (byte)CurveFormat.DaK32fC32f; - curve.CurveDataHeader_DaK32fC32f.Degree = 2; - curve.Controls = Controls; - curve.Knots = Knots; - this.CurveData = curve; + CurveData = new DaK32fC32f + { + CurveDataHeader_DaK32fC32f = new CurveDataHeader + { + Format = (byte)CurveFormat.DaK32fC32f, + Degree = 2 + }, + Controls = Controls, + Knots = Knots + }; } else { @@ -105,14 +111,13 @@ public void FromTransform(Transform transform) public class KeyframeTrack { - public SortedList Keyframes = new SortedList(); + public SortedList Keyframes = []; private static Int32 FindFrame(IList list, T value, IComparer comparer = null) { - if (list == null) - throw new ArgumentNullException("list"); + ArgumentNullException.ThrowIfNull(list); - comparer = comparer ?? Comparer.Default; + comparer ??= Comparer.Default; Int32 lower = 0; Int32 upper = list.Count - 1; @@ -372,7 +377,7 @@ public void RemoveTrivialRotations() } } - if (keyframesToRemove == transforms.Count - 2 && (transforms[0] - transforms[transforms.Count - 1]).Length < 0.0001f) + if (keyframesToRemove == transforms.Count - 2 && (transforms[0] - transforms[^1]).Length < 0.0001f) { for (int i = 1; i < times.Count; i++) { @@ -534,16 +539,20 @@ public class TransformTrack public static TransformTrack FromKeyframes(KeyframeTrack keyframes) { - var track = new TransformTrack(); - track.Flags = 0; - + var track = new TransformTrack + { + Flags = 0 + }; + var translateTimes = keyframes.Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Key).ToList(); var translations = keyframes.Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Value.Translation).ToList(); if (translateTimes.Count == 1) { - var posCurve = new D3Constant32f(); - posCurve.CurveDataHeader_D3Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D3Constant32f, Degree = 2 }; - posCurve.Controls = new float[3] { translations[0].X, translations[0].Y, translations[0].Z }; + var posCurve = new D3Constant32f + { + CurveDataHeader_D3Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D3Constant32f, Degree = 2 }, + Controls = new float[3] { translations[0].X, translations[0].Y, translations[0].Z } + }; track.PositionCurve = new AnimationCurve { CurveData = posCurve }; } else @@ -559,9 +568,11 @@ public static TransformTrack FromKeyframes(KeyframeTrack keyframes) var rotations = keyframes.Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Value.Rotation).ToList(); if (rotationTimes.Count == 1) { - var rotCurve = new D4Constant32f(); - rotCurve.CurveDataHeader_D4Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D4Constant32f, Degree = 2 }; - rotCurve.Controls = new float[4] { rotations[0].X, rotations[0].Y, rotations[0].Z, rotations[0].W }; + var rotCurve = new D4Constant32f + { + CurveDataHeader_D4Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D4Constant32f, Degree = 2 }, + Controls = new float[4] { rotations[0].X, rotations[0].Y, rotations[0].Z, rotations[0].W } + }; track.OrientationCurve = new AnimationCurve { CurveData = rotCurve }; } else @@ -580,12 +591,12 @@ public static TransformTrack FromKeyframes(KeyframeTrack keyframes) var scaleCurve = new DaConstant32f(); scaleCurve.CurveDataHeader_DaConstant32f = new CurveDataHeader { Format = (int)CurveFormat.DaConstant32f, Degree = 2 }; var m = scales[0]; - scaleCurve.Controls = new List - { + scaleCurve.Controls = + [ m[0, 0], m[0, 1], m[0, 2], m[1, 0], m[1, 1], m[1, 2], m[2, 0], m[2, 1], m[2, 2] - }; + ]; track.ScaleShearCurve = new AnimationCurve { CurveData = scaleCurve }; } else diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index 2231ff20..21a98420 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -3,29 +3,22 @@ using System.Linq; using LSLib.Granny.GR2; using LSLib.LS; -using Alphaleonis.Win32.Filesystem; using System.Xml; using System.Xml.Linq; using LSLib.LS.Enums; +using System.IO; namespace LSLib.Granny.Model { - public class ColladaMeshExporter + public class ColladaMeshExporter(Mesh mesh, ExporterOptions options) { - private Mesh ExportedMesh; - private ExporterOptions Options; + private Mesh ExportedMesh = mesh; + private ExporterOptions Options = options; private List Sources; private List Inputs; private List InputOffsets; private ulong LastInputOffset = 0; - private XmlDocument Xml = new XmlDocument(); - - - public ColladaMeshExporter(Mesh mesh, ExporterOptions options) - { - ExportedMesh = mesh; - Options = options; - } + private XmlDocument Xml = new(); private void AddInput(source collSource, string inputSemantic, string localInputSemantic = null, ulong setIndex = 0) { @@ -36,18 +29,22 @@ private void AddInput(source collSource, string inputSemantic, string localInput if (inputSemantic != null) { - var input = new InputLocal(); - input.semantic = inputSemantic; - input.source = "#" + collSource.id; + var input = new InputLocal + { + semantic = inputSemantic, + source = "#" + collSource.id + }; Inputs.Add(input); } if (localInputSemantic != null) { - var vertexInputOff = new InputLocalOffset(); - vertexInputOff.semantic = localInputSemantic; - vertexInputOff.source = "#" + collSource.id; - vertexInputOff.offset = LastInputOffset++; + var vertexInputOff = new InputLocalOffset + { + semantic = localInputSemantic, + source = "#" + collSource.id, + offset = LastInputOffset++ + }; if (localInputSemantic == "TEXCOORD" || localInputSemantic == "COLOR") { vertexInputOff.set = setIndex; @@ -109,7 +106,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { if (Options.ExportUVs) { - int uvIndex = Int32.Parse(component.Substring(component.Length - 1)); + int uvIndex = Int32.Parse(component[^1..]); var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } @@ -125,7 +122,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { if (Options.ExportUVs) { - int uvIndex = Int32.Parse(component.Substring(component.Length - 1)) - 1; + int uvIndex = Int32.Parse(component[^1..]) - 1; var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } @@ -309,9 +306,9 @@ private technique ExportLSLibProfile() public mesh Export() { // Jank we need to create XMLElements on the fly - Sources = new List(); - Inputs = new List(); - InputOffsets = new List(); + Sources = []; + Inputs = []; + InputOffsets = []; LastInputOffset = 0; var vertexData = ExportedMesh.PrimaryVertexData; @@ -336,21 +333,25 @@ public mesh Export() vertexData.Deduplicator.Colors.Select(color => color.DeduplicationMap).ToList() ); - var colladaMesh = new mesh(); - colladaMesh.vertices = new vertices(); - colladaMesh.vertices.id = ExportedMesh.Name + "-vertices"; - colladaMesh.vertices.input = Inputs.ToArray(); - colladaMesh.source = Sources.ToArray(); - colladaMesh.Items = new object[] { triangles }; - colladaMesh.extra = new extra[] + var colladaMesh = new mesh { - new extra + vertices = new vertices { - technique = new technique[] + id = ExportedMesh.Name + "-vertices", + input = Inputs.ToArray() + }, + source = Sources.ToArray(), + Items = [triangles], + extra = + [ + new extra { - ExportLSLibProfile() + technique = + [ + ExportLSLibProfile() + ] } - } + ] }; return colladaMesh; @@ -361,18 +362,20 @@ public mesh Export() public class ColladaExporter { [Serialization(Kind = SerializationKind.None)] - public ExporterOptions Options = new ExporterOptions(); + public ExporterOptions Options = new(); - private XmlDocument Xml = new XmlDocument(); + private XmlDocument Xml = new(); private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBinding, List geometries, List controllers, List geomNodes) { var exporter = new ColladaMeshExporter(meshBinding.Mesh, Options); var mesh = exporter.Export(); - var geom = new geometry(); - geom.id = meshBinding.Mesh.Name + "-geom"; - geom.name = meshBinding.Mesh.Name; - geom.Item = mesh; + var geom = new geometry + { + id = meshBinding.Mesh.Name + "-geom", + name = meshBinding.Mesh.Name, + Item = mesh + }; geometries.Add(geom); bool hasSkin = skelRef != null && meshBinding.Mesh.IsSkinned(); @@ -387,30 +390,38 @@ private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBind } skin = ExportSkin(meshBinding.Mesh, model.Skeleton.Bones, boneNames, geom.id); - ctrl = new controller(); - ctrl.id = meshBinding.Mesh.Name + "-skin"; - ctrl.name = meshBinding.Mesh.Name + "_Skin"; - ctrl.Item = skin; + ctrl = new controller + { + id = meshBinding.Mesh.Name + "-skin", + name = meshBinding.Mesh.Name + "_Skin", + Item = skin + }; controllers.Add(ctrl); } - var geomNode = new node(); - geomNode.id = geom.name + "-node"; - geomNode.name = geom.name; - geomNode.type = NodeType.NODE; + var geomNode = new node + { + id = geom.name + "-node", + name = geom.name, + type = NodeType.NODE + }; if (hasSkin) { - var controllerInstance = new instance_controller(); - controllerInstance.url = "#" + ctrl.id; - controllerInstance.skeleton = new string[] { "#" + skelRef }; - geomNode.instance_controller = new instance_controller[] { controllerInstance }; + var controllerInstance = new instance_controller + { + url = "#" + ctrl.id, + skeleton = ["#" + skelRef] + }; + geomNode.instance_controller = [controllerInstance]; } else { - var geomInstance = new instance_geometry(); - geomInstance.url = "#" + geom.id; - geomNode.instance_geometry = new instance_geometry[] { geomInstance }; + var geomInstance = new instance_geometry + { + url = "#" + geom.id + }; + geomNode.instance_geometry = [geomInstance]; } geomNodes.Add(geomNode); @@ -459,8 +470,8 @@ private skin ExportSkin(Mesh mesh, List bones, Dictionary na }); } - var jointSource = ColladaUtils.MakeNameSource(mesh.Name, "joints", new string[] { "JOINT" }, joints.ToArray()); - var poseSource = ColladaUtils.MakeFloatSource(mesh.Name, "poses", new string[] { "TRANSFORM" }, poses.ToArray(), 16, "float4x4"); + var jointSource = ColladaUtils.MakeNameSource(mesh.Name, "joints", ["JOINT"], joints.ToArray()); + var poseSource = ColladaUtils.MakeFloatSource(mesh.Name, "poses", ["TRANSFORM"], poses.ToArray(), 16, "float4x4"); var weightsSource = mesh.PrimaryVertexData.MakeBoneWeights(mesh.Name); var vertices = mesh.PrimaryVertexData.Deduplicator.Vertices.Uniques; @@ -485,38 +496,52 @@ private skin ExportSkin(Mesh mesh, List bones, Dictionary na vertexInfluenceCounts.Add(influences); } - var jointOffsets = new InputLocalOffset(); - jointOffsets.semantic = "JOINT"; - jointOffsets.source = "#" + jointSource.id; - jointOffsets.offset = 0; + var jointOffsets = new InputLocalOffset + { + semantic = "JOINT", + source = "#" + jointSource.id, + offset = 0 + }; - var weightOffsets = new InputLocalOffset(); - weightOffsets.semantic = "WEIGHT"; - weightOffsets.source = "#" + weightsSource.id; - weightOffsets.offset = 1; + var weightOffsets = new InputLocalOffset + { + semantic = "WEIGHT", + source = "#" + weightsSource.id, + offset = 1 + }; - var vertWeights = new skinVertex_weights(); - vertWeights.count = (ulong)vertices.Count; - vertWeights.input = new InputLocalOffset[] { jointOffsets, weightOffsets }; - vertWeights.v = string.Join(" ", vertexInfluences.Select(x => x.ToString()).ToArray()); - vertWeights.vcount = string.Join(" ", vertexInfluenceCounts.Select(x => x.ToString()).ToArray()); + var vertWeights = new skinVertex_weights + { + count = (ulong)vertices.Count, + input = [jointOffsets, weightOffsets], + v = string.Join(" ", vertexInfluences.Select(x => x.ToString()).ToArray()), + vcount = string.Join(" ", vertexInfluenceCounts.Select(x => x.ToString()).ToArray()) + }; - var skin = new skin(); - skin.source1 = "#" + geometryId; - skin.bind_shape_matrix = "1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1"; + var skin = new skin + { + source1 = "#" + geometryId, + bind_shape_matrix = "1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1", - var skinJoints = new skinJoints(); - var skinJointInput = new InputLocal(); - skinJointInput.semantic = "JOINT"; - skinJointInput.source = "#" + jointSource.id; - var skinInvBindInput = new InputLocal(); - skinInvBindInput.semantic = "INV_BIND_MATRIX"; - skinInvBindInput.source = "#" + poseSource.id; - skinJoints.input = new InputLocal[] { skinJointInput, skinInvBindInput }; + joints = new skinJoints + { + input = [ + new InputLocal + { + semantic = "JOINT", + source = "#" + jointSource.id + }, + new InputLocal + { + semantic = "INV_BIND_MATRIX", + source = "#" + poseSource.id + } + ] + }, - skin.joints = skinJoints; - skin.source = new source[] { jointSource, poseSource, weightsSource }; - skin.vertex_weights = vertWeights; + source = [jointSource, poseSource, weightsSource], + vertex_weights = vertWeights + }; return skin; } @@ -619,7 +644,6 @@ public List ExportKeyframeTrack(TransformTrack transformTrack, BG3Tra track.InterpolateFrames(); var anims = new List(); - var inputs = new List(); var outputs = new List(track.Keyframes.Count * 16); foreach (var keyframe in track.Keyframes.Values) @@ -658,53 +682,60 @@ public List ExportKeyframeTrack(TransformTrack transformTrack, BG3Tra interpolations.Add(interpolations[0]); } - var knotsSource = ColladaUtils.MakeFloatSource(name, "inputs", new string[] { "TIME" }, knots.ToArray()); - var knotsInput = new InputLocal(); - knotsInput.semantic = "INPUT"; - knotsInput.source = "#" + knotsSource.id; - inputs.Add(knotsInput); - - var outSource = ColladaUtils.MakeFloatSource(name, "outputs", new string[] { "TRANSFORM" }, outputs.ToArray(), 16, "float4x4"); - var outInput = new InputLocal(); - outInput.semantic = "OUTPUT"; - outInput.source = "#" + outSource.id; - inputs.Add(outInput); - - var interpSource = ColladaUtils.MakeNameSource(name, "interpolations", new string[] { "INTERPOLATION" }, interpolations.ToArray()); + var knotsSource = ColladaUtils.MakeFloatSource(name, "inputs", ["TIME"], knots.ToArray()); + var outSource = ColladaUtils.MakeFloatSource(name, "outputs", ["TRANSFORM"], outputs.ToArray(), 16, "float4x4"); + var interpSource = ColladaUtils.MakeNameSource(name, "interpolations", ["INTERPOLATION"], interpolations.ToArray()); - var interpInput = new InputLocal(); - interpInput.semantic = "INTERPOLATION"; - interpInput.source = "#" + interpSource.id; - inputs.Add(interpInput); - - var sampler = new sampler(); - sampler.id = name + "_sampler"; - sampler.input = inputs.ToArray(); - - var channel = new channel(); - channel.source = "#" + sampler.id; - channel.target = target; - - var animation = new animation(); - animation.id = name; - animation.name = name; - var animItems = new List(); - animItems.Add(knotsSource); - animItems.Add(outSource); - animItems.Add(interpSource); - animItems.Add(sampler); - animItems.Add(channel); - animation.Items = animItems.ToArray(); + var sampler = new sampler + { + id = name + "_sampler", + input = + [ + new InputLocal + { + semantic = "INTERPOLATION", + source = "#" + interpSource.id + }, + new InputLocal + { + semantic = "OUTPUT", + source = "#" + outSource.id + }, + new InputLocal + { + semantic = "INPUT", + source = "#" + knotsSource.id + } + ] + }; - animation.extra = new extra[] + var channel = new channel { - new extra - { - technique = new technique[] + source = "#" + sampler.id, + target = target + }; + + var animation = new animation + { + id = name, + name = name, + Items = [ + knotsSource, + outSource, + interpSource, + sampler, + channel + ], + extra = + [ + new extra { - ExportAnimationLSLibProfile(extData) + technique = + [ + ExportAnimationLSLibProfile(extData) + ] } - } + ] }; anims.Add(animation); @@ -832,25 +863,29 @@ private technique ExportRootLSLibProfile(Root root) public void Export(Root root, string outputPath) { - var collada = new COLLADA(); - var asset = new asset(); var contributor = new assetContributor(); if (root.ArtToolInfo != null) contributor.authoring_tool = root.ArtToolInfo.FromArtToolName; else contributor.authoring_tool = "LSLib COLLADA Exporter v" + Common.LibraryVersion(); - asset.contributor = new assetContributor[] { contributor }; - asset.created = DateTime.Now; - asset.modified = DateTime.Now; - asset.unit = new assetUnit(); - asset.unit.name = "meter"; + + var asset = new asset + { + contributor = [contributor], + created = DateTime.Now, + modified = DateTime.Now, + unit = new assetUnit + { + name = "meter" + }, + up_axis = UpAxisType.Y_UP + }; + // TODO: Handle up vector, etc. properly? if (root.ArtToolInfo != null) asset.unit.meter = root.ArtToolInfo.UnitsPerMeter; else asset.unit.meter = 1; - asset.up_axis = UpAxisType.Y_UP; - collada.asset = asset; var geometries = new List(); var controllers = new List(); @@ -864,18 +899,22 @@ public void Export(Root root, string outputPath) { var anims = ExportAnimations(anim); animations.AddRange(anims); - var clip = new animation_clip(); - clip.id = anim.Name + "_Animation"; - clip.name = anim.Name; - clip.start = 0.0; - clip.end = anim.Duration; - clip.endSpecified = true; + var clip = new animation_clip + { + id = anim.Name + "_Animation", + name = anim.Name, + start = 0.0, + end = anim.Duration, + endSpecified = true + }; var animInstances = new List(); foreach (var animChannel in anims) { - var instance = new InstanceWithExtra(); - instance.url = "#" + animChannel.id; + var instance = new InstanceWithExtra + { + url = "#" + animChannel.id + }; animInstances.Add(instance); } @@ -887,65 +926,79 @@ public void Export(Root root, string outputPath) if (animations.Count > 0) { - var animationLib = new library_animations(); - animationLib.animation = animations.ToArray(); + var animationLib = new library_animations + { + animation = animations.ToArray() + }; rootElements.Add(animationLib); } if (animationClips.Count > 0) { - var animationClipLib = new library_animation_clips(); - animationClipLib.animation_clip = animationClips.ToArray(); + var animationClipLib = new library_animation_clips + { + animation_clip = animationClips.ToArray() + }; rootElements.Add(animationClipLib); } if (geometries.Count > 0) { - var geometryLib = new library_geometries(); - geometryLib.geometry = geometries.ToArray(); + var geometryLib = new library_geometries + { + geometry = geometries.ToArray() + }; rootElements.Add(geometryLib); } if (controllers.Count > 0) { - var controllerLib = new library_controllers(); - controllerLib.controller = controllers.ToArray(); + var controllerLib = new library_controllers + { + controller = controllers.ToArray() + }; rootElements.Add(controllerLib); } var visualScenes = new library_visual_scenes(); - var visualScene = new visual_scene(); - visualScene.id = "DefaultVisualScene"; - visualScene.name = "unnamed"; - - visualScene.node = geomNodes.ToArray(); - visualScenes.visual_scene = new visual_scene[] { visualScene }; + var visualScene = new visual_scene + { + id = "DefaultVisualScene", + name = "unnamed", + node = geomNodes.ToArray() + }; + visualScenes.visual_scene = [visualScene]; - var visualSceneInstance = new InstanceWithExtra(); - visualSceneInstance.url = "#DefaultVisualScene"; + var visualSceneInstance = new InstanceWithExtra + { + url = "#DefaultVisualScene" + }; rootElements.Add(visualScenes); - var scene = new COLLADAScene(); - scene.instance_visual_scene = visualSceneInstance; - collada.scene = scene; - - collada.Items = rootElements.ToArray(); + var scene = new COLLADAScene + { + instance_visual_scene = visualSceneInstance + }; - collada.extra = new extra[] + var collada = new COLLADA { - new extra - { - technique = new technique[] + asset = asset, + scene = scene, + Items = rootElements.ToArray(), + extra = + [ + new extra { - ExportRootLSLibProfile(root) + technique = + [ + ExportRootLSLibProfile(root) + ] } - } + ] }; - using (var stream = File.Open(outputPath, System.IO.FileMode.Create)) - { - collada.Save(stream); - } + using var stream = File.Open(outputPath, FileMode.Create); + collada.Save(stream); } } } diff --git a/LSLib/Granny/Model/ColladaHelpers.cs b/LSLib/Granny/Model/ColladaHelpers.cs index feb1659a..b024793b 100644 --- a/LSLib/Granny/Model/ColladaHelpers.cs +++ b/LSLib/Granny/Model/ColladaHelpers.cs @@ -1,5 +1,5 @@ using LSLib.Granny.GR2; -using OpenTK; +using OpenTK.Mathematics; using System; using System.Collections.Generic; @@ -38,15 +38,13 @@ public static Matrix4 ToMatrix4(this rotate r) public static Matrix4 TranslationToMatrix4(this TargetableFloat3 t) { - Matrix4 trans; - Matrix4.CreateTranslation((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out trans); + Matrix4.CreateTranslation((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out Matrix4 trans); return trans; } public static Matrix4 ScaleToMatrix4(this TargetableFloat3 t) { - Matrix4 scale; - Matrix4.CreateScale((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out scale); + Matrix4.CreateScale((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out Matrix4 scale); return scale; } @@ -59,27 +57,14 @@ public static Matrix4 GetLocalTransform(this node n) for (var i = 0; i < n.ItemsElementName.Length; i++) { var name = n.ItemsElementName[i]; - switch (name) + accum = name switch { - case ItemsChoiceType2.matrix: - accum = (n.Items[i] as matrix).ToMatrix4() * Matrix4.Identity; - break; - - case ItemsChoiceType2.translate: - accum = (n.Items[i] as TargetableFloat3).TranslationToMatrix4() * Matrix4.Identity; - break; - - case ItemsChoiceType2.rotate: - accum = (n.Items[i] as rotate).ToMatrix4() * Matrix4.Identity; - break; - - case ItemsChoiceType2.scale: - accum = (n.Items[i] as TargetableFloat3).ScaleToMatrix4() * Matrix4.Identity; - break; - - default: - throw new Exception("Unsupported Collada NODE transform: " + name); - } + ItemsChoiceType2.matrix => (n.Items[i] as matrix).ToMatrix4() * Matrix4.Identity, + ItemsChoiceType2.translate => (n.Items[i] as TargetableFloat3).TranslationToMatrix4() * Matrix4.Identity, + ItemsChoiceType2.rotate => (n.Items[i] as rotate).ToMatrix4() * Matrix4.Identity, + ItemsChoiceType2.scale => (n.Items[i] as TargetableFloat3).ScaleToMatrix4() * Matrix4.Identity, + _ => throw new Exception("Unsupported Collada NODE transform: " + name), + }; } } @@ -203,14 +188,14 @@ public static List StringsToIntegers(String s) { if (startingPos != -1) { - floats.Add(int.Parse(s.Substring(startingPos, i - startingPos))); + floats.Add(int.Parse(s[startingPos..i])); startingPos = -1; } } } if (startingPos != -1) - floats.Add(int.Parse(s.Substring(startingPos, s.Length - startingPos))); + floats.Add(int.Parse(s[startingPos..])); return floats; } diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index d20ef062..5c38f59d 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -1,10 +1,10 @@ using System; using System.Collections.Generic; +using System.IO; using System.Linq; -using Alphaleonis.Win32.Filesystem; using LSLib.Granny.GR2; using LSLib.LS; -using OpenTK; +using OpenTK.Mathematics; namespace LSLib.Granny.Model { @@ -17,8 +17,10 @@ internal class ColladaSource public static ColladaSource FromCollada(source src) { - var source = new ColladaSource(); - source.id = src.id; + var source = new ColladaSource + { + id = src.id + }; var accessor = src.technique_common.accessor; // TODO: check src.#ID? @@ -29,8 +31,7 @@ public static ColladaSource FromCollada(source src) { floats = src.Item as float_array; // Workaround for empty arrays being null - if (floats.Values == null) - floats.Values = new double[] { }; + floats.Values ??= []; if ((int)floats.count != floats.Values.Length || floats.count < accessor.stride * accessor.count + accessor.offset) throw new ParsingException("Float source data size mismatch. Check source and accessor item counts."); @@ -39,8 +40,7 @@ public static ColladaSource FromCollada(source src) { names = src.Item as Name_array; // Workaround for empty arrays being null - if (names.Values == null) - names.Values = new string[] { }; + names.Values ??= []; if ((int)names.count != names.Values.Length || names.count < accessor.stride * accessor.count + accessor.offset) throw new ParsingException("Name source data size mismatch. Check source and accessor item counts."); @@ -51,8 +51,7 @@ public static ColladaSource FromCollada(source src) var paramOffset = 0; foreach (var param in accessor.param) { - if (param.name == null) - param.name = "default"; + param.name ??= "default"; if (param.type == "float" || param.type == "double") { var items = new List((int)accessor.count); @@ -115,7 +114,7 @@ class RootBoneInfo public class ColladaImporter { [Serialization(Kind = SerializationKind.None)] - public ExporterOptions Options = new ExporterOptions(); + public ExporterOptions Options = new(); private bool ZUp = false; @@ -128,12 +127,14 @@ public class ColladaImporter private ArtToolInfo ImportArtToolInfo(COLLADA collada) { ZUp = false; - var toolInfo = new ArtToolInfo(); - toolInfo.FromArtToolName = "Unknown"; - toolInfo.ArtToolMajorRevision = 1; - toolInfo.ArtToolMinorRevision = 0; - toolInfo.ArtToolPointerSize = Options.Is64Bit ? 64 : 32; - toolInfo.Origin = new float[] { 0, 0, 0 }; + var toolInfo = new ArtToolInfo + { + FromArtToolName = "Unknown", + ArtToolMajorRevision = 1, + ArtToolMinorRevision = 0, + ArtToolPointerSize = Options.Is64Bit ? 64 : 32, + Origin = [0, 0, 0] + }; toolInfo.SetYUp(); if (collada.asset != null) @@ -176,13 +177,14 @@ private ArtToolInfo ImportArtToolInfo(COLLADA collada) private ExporterInfo ImportExporterInfo(COLLADA collada) { - var exporterInfo = new ExporterInfo(); - exporterInfo.ExporterName = String.Format("LSLib GR2 Exporter v{0}", Common.LibraryVersion()); - exporterInfo.ExporterMajorRevision = Common.MajorVersion; - exporterInfo.ExporterMinorRevision = Common.MinorVersion; - exporterInfo.ExporterBuildNumber = 0; - exporterInfo.ExporterCustomization = Common.PatchVersion; - return exporterInfo; + return new ExporterInfo + { + ExporterName = $"LSLib GR2 Exporter v{Common.LibraryVersion()}", + ExporterMajorRevision = Common.MajorVersion, + ExporterMinorRevision = Common.MinorVersion, + ExporterBuildNumber = 0, + ExporterCustomization = Common.PatchVersion + }; } private DivinityModelFlag DetermineSkeletonModelFlagsFromModels(Root root, Skeleton skeleton, DivinityModelFlag meshFlagOverrides) @@ -219,10 +221,7 @@ private void BuildExtendedData(Root root) modelFlags = mesh.ExtendedData.UserMeshProperties.MeshFlags; } - if (mesh.ExtendedData == null) - { - mesh.ExtendedData = DivinityMeshExtendedData.Make(); - } + mesh.ExtendedData ??= DivinityMeshExtendedData.Make(); mesh.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; mesh.ExtendedData.UpdateFromModelInfo(mesh, Options.ModelInfoFormat); } @@ -242,11 +241,7 @@ private void BuildExtendedData(Root root) foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) { - if (bone.ExtendedData == null) - { - bone.ExtendedData = new DivinityBoneExtendedData(); - } - + bone.ExtendedData ??= new DivinityBoneExtendedData(); var userDefinedProperties = UserDefinedPropertiesHelpers.MeshFlagsToUserDefinedProperties(accumulatedFlags); bone.ExtendedData.UserDefinedProperties = userDefinedProperties; bone.ExtendedData.IsRigid = (accumulatedFlags.IsRigid()) ? 1 : 0; @@ -282,19 +277,13 @@ private void FindRootBones(List parents, node node, List roo public static technique FindExporterExtraData(extra[] extras) { - if (extras != null) + foreach (var extra in extras ?? Enumerable.Empty()) { - foreach (var extra in extras) + foreach (var technique in extra.technique ?? Enumerable.Empty()) { - if (extra.technique != null) + if (technique.profile == "LSTools") { - foreach (var technique in extra.technique) - { - if (technique.profile == "LSTools") - { - return technique; - } - } + return technique; } } } @@ -445,7 +434,7 @@ private void ValidateLSLibProfileMetadataVersion(string ver) } } - private void LoadColladaLSLibProfileData(Root root, COLLADA collada) + private void LoadColladaLSLibProfileData(COLLADA collada) { var technique = FindExporterExtraData(collada.extra); if (technique == null || technique.Any == null) return; @@ -476,12 +465,32 @@ private Mesh ImportMesh(geometry geom, mesh mesh, VertexDescriptor vertexFormat) bool isSkinned = SkinnedMeshes.Contains(geom.id); collada.ImportFromCollada(mesh, vertexFormat, isSkinned, Options); - var m = new Mesh(); - m.VertexFormat = collada.InternalVertexType; - m.Name = "Unnamed"; + var m = new Mesh + { + VertexFormat = collada.InternalVertexType, + Name = "Unnamed", + + PrimaryVertexData = new VertexData + { + Vertices = collada.ConsolidatedVertices + }, + + PrimaryTopology = new TriTopology + { + Indices = collada.ConsolidatedIndices, + Groups = [ + new TriTopologyGroup + { + MaterialIndex = 0, + TriFirst = 0, + TriCount = collada.TriangleCount + } + ] + }, - m.PrimaryVertexData = new VertexData(); - m.PrimaryVertexData.Vertices = collada.ConsolidatedVertices; + MaterialBindings = [new MaterialBinding()], + OriginalToConsolidatedVertexIndexMap = collada.OriginalToConsolidatedVertexIndexMap + }; if (!Options.StripMetadata) { @@ -493,22 +502,6 @@ private Mesh ImportMesh(geometry geom, mesh mesh, VertexDescriptor vertexFormat) m.PrimaryVertexData.VertexComponentNames = null; } - m.PrimaryTopology = new TriTopology(); - m.PrimaryTopology.Indices = collada.ConsolidatedIndices; - m.PrimaryTopology.Groups = new List(); - var triGroup = new TriTopologyGroup(); - triGroup.MaterialIndex = 0; - triGroup.TriFirst = 0; - triGroup.TriCount = collada.TriangleCount; - m.PrimaryTopology.Groups.Add(triGroup); - - m.MaterialBindings = new List(); - m.MaterialBindings.Add(new MaterialBinding()); - - // m.BoneBindings; - TODO - - m.OriginalToConsolidatedVertexIndexMap = collada.OriginalToConsolidatedVertexIndexMap; - MakeExtendedData(mesh, m); Utils.Info(String.Format("Imported {0} mesh ({1} tri groups, {2} tris)", @@ -534,8 +527,7 @@ private void ImportSkin(Root root, skin skin) if (skin.source1[0] != '#') throw new ParsingException("Only ID references are supported for skin geometries"); - Mesh mesh = null; - if (!ColladaGeometries.TryGetValue(skin.source1.Substring(1), out mesh)) + if (!ColladaGeometries.TryGetValue(skin.source1[1..], out Mesh mesh)) throw new ParsingException("Skin references nonexistent mesh: " + skin.source1); if (!mesh.VertexFormat.HasBoneWeights) @@ -559,8 +551,7 @@ private void ImportSkin(Root root, skin skin) if (input.source[0] != '#') throw new ParsingException("Only ID references are supported for joint input sources"); - ColladaSource inputSource = null; - if (!sources.TryGetValue(input.source.Substring(1), out inputSource)) + if (!sources.TryGetValue(input.source.Substring(1), out ColladaSource inputSource)) throw new ParsingException("Joint input source does not exist: " + input.source); if (input.semantic == "JOINT") @@ -570,12 +561,11 @@ private void ImportSkin(Root root, skin skin) throw new ParsingException("Joint input source 'JOINT' must contain array of names."); var skeleton = root.Skeletons[0]; - joints = new List(); + joints = []; foreach (var name in jointNames) { - Bone bone = null; var lookupName = name.Replace("_x0020_", " "); - if (!skeleton.BonesBySID.TryGetValue(lookupName, out bone)) + if (!skeleton.BonesBySID.TryGetValue(lookupName, out Bone bone)) throw new ParsingException("Joint name list references nonexistent bone: " + lookupName); joints.Add(bone); @@ -628,8 +618,7 @@ private void ImportSkin(Root root, skin skin) if (input.source[0] != '#') throw new ParsingException("Only ID references are supported for weight input sources"); - ColladaSource inputSource = null; - if (!sources.TryGetValue(input.source.Substring(1), out inputSource)) + if (!sources.TryGetValue(input.source[1..], out ColladaSource inputSource)) throw new ParsingException("Weight input source does not exist: " + input.source); if (!inputSource.FloatParams.TryGetValue("WEIGHT", out weights)) @@ -658,8 +647,7 @@ private void ImportSkin(Root root, skin skin) var weightIndex = influences[offset + weightInputIndex]; var joint = joints[jointIndex]; var weight = weights[weightIndex]; - if (!boundBones.Contains(joint)) - boundBones.Add(joint); + boundBones.Add(joint); offset += stride; } @@ -667,7 +655,7 @@ private void ImportSkin(Root root, skin skin) if (boundBones.Count > 127) throw new ParsingException("D:OS supports at most 127 bound bones per mesh."); - mesh.BoneBindings = new List(); + mesh.BoneBindings = []; var boneToIndexMaps = new Dictionary(); for (var i = 0; i < joints.Count; i++) { @@ -678,22 +666,24 @@ private void ImportSkin(Root root, skin skin) // Hopefully the Collada ones are all equal ... var iwt = invBindMatrices[i]; // iwt.Transpose(); - joints[i].InverseWorldTransform = new float[] { + joints[i].InverseWorldTransform = [ iwt[0, 0], iwt[1, 0], iwt[2, 0], iwt[3, 0], iwt[0, 1], iwt[1, 1], iwt[2, 1], iwt[3, 1], iwt[0, 2], iwt[1, 2], iwt[2, 2], iwt[3, 2], iwt[0, 3], iwt[1, 3], iwt[2, 3], iwt[3, 3] - }; + ]; // Bind all bones that affect vertices to the mesh, so we can reference them // later from the vertexes BoneIndices. - var binding = new BoneBinding(); - binding.BoneName = joints[i].Name; - // TODO - // Use small bounding box values, as it interferes with object placement - // in D:OS 2 (after the Gift Bag 2 update) - binding.OBBMin = new float[] { -0.1f, -0.1f, -0.1f }; - binding.OBBMax = new float[] { 0.1f, 0.1f, 0.1f }; + var binding = new BoneBinding + { + BoneName = joints[i].Name, + // TODO + // Use small bounding box values, as it interferes with object placement + // in D:OS 2 (after the Gift Bag 2 update) + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; mesh.BoneBindings.Add(binding); boneToIndexMaps.Add(joints[i], boneToIndexMaps.Count); } @@ -748,7 +738,7 @@ private void ImportSkin(Root root, skin skin) if (skin.bind_shape_matrix != null) { - var bindShapeFloats = skin.bind_shape_matrix.Trim().Split(new char[] { ' ' }).Select(s => Single.Parse(s)).ToArray(); + var bindShapeFloats = skin.bind_shape_matrix.Trim().Split([' ']).Select(s => Single.Parse(s)).ToArray(); var bindShapeMat = ColladaHelpers.FloatsToMatrix(bindShapeFloats); bindShapeMat.Transpose(); @@ -794,7 +784,7 @@ private void UpdateOBBs(Skeleton skeleton, Mesh mesh) var bone = skeleton.GetBoneByName(mesh.BoneBindings[bi].BoneName); var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); - var transformed = Vector3.Transform(vert.Position, invWorldTransform); + var transformed = Vector3.TransformPosition(vert.Position, invWorldTransform); obb.Min.X = Math.Min(obb.Min.X, transformed.X); obb.Min.Y = Math.Min(obb.Min.Y, transformed.Y); @@ -812,13 +802,13 @@ private void UpdateOBBs(Skeleton skeleton, Mesh mesh) var obb = obbs[i]; if (obb.NumVerts > 0) { - mesh.BoneBindings[i].OBBMin = new float[] { obb.Min.X, obb.Min.Y, obb.Min.Z }; - mesh.BoneBindings[i].OBBMax = new float[] { obb.Max.X, obb.Max.Y, obb.Max.Z }; + mesh.BoneBindings[i].OBBMin = [obb.Min.X, obb.Min.Y, obb.Min.Z]; + mesh.BoneBindings[i].OBBMax = [obb.Max.X, obb.Max.Y, obb.Max.Z]; } else { - mesh.BoneBindings[i].OBBMin = new float[] { 0.0f, 0.0f, 0.0f }; - mesh.BoneBindings[i].OBBMax = new float[] { 0.0f, 0.0f, 0.0f }; + mesh.BoneBindings[i].OBBMin = [0.0f, 0.0f, 0.0f]; + mesh.BoneBindings[i].OBBMax = [0.0f, 0.0f, 0.0f]; } } } @@ -851,10 +841,10 @@ public void ImportAnimations(IEnumerable anims, Root root, Skeleton s var trackGroup = new TrackGroup { Name = (skeleton != null) ? skeleton.Name : "Dummy_Root", - TransformTracks = new List(), + TransformTracks = [], InitialPlacement = new Transform(), AccumulationFlags = 2, - LoopTranslation = new float[] { 0, 0, 0 } + LoopTranslation = [0, 0, 0] }; var animation = new Animation @@ -865,7 +855,7 @@ public void ImportAnimations(IEnumerable anims, Root root, Skeleton s DefaultLoopCount = 1, Flags = 1, Duration = .0f, - TrackGroups = new List { trackGroup } + TrackGroups = [trackGroup] }; foreach (var colladaTrack in anims) @@ -899,7 +889,7 @@ public void ImportAnimation(animation colladaAnim, Animation animation, TrackGro var duration = .0f; if (childAnims < colladaAnim.Items.Length) { - ColladaAnimation importAnim = new ColladaAnimation(); + ColladaAnimation importAnim = new(); if (importAnim.ImportFromCollada(colladaAnim, skeleton)) { duration = Math.Max(duration, importAnim.Duration); @@ -920,26 +910,26 @@ public Root Import(string inputPath) collada = COLLADA.Load(stream); } - var root = new Root(); - LoadColladaLSLibProfileData(root, collada); - root.ArtToolInfo = ImportArtToolInfo(collada); - if (!Options.StripMetadata) - { - root.ExporterInfo = ImportExporterInfo(collada); - } - - root.FromFileName = inputPath; + LoadColladaLSLibProfileData(collada); - root.Skeletons = new List(); - root.VertexDatas = new List(); - root.TriTopologies = new List(); - root.Meshes = new List(); - root.Models = new List(); - root.TrackGroups = new List(); - root.Animations = new List(); + var root = new Root + { + ArtToolInfo = ImportArtToolInfo(collada), + ExporterInfo = Options.StripMetadata ? null : ImportExporterInfo(collada), + + FromFileName = inputPath, + + Skeletons = [], + VertexDatas = [], + TriTopologies = [], + Meshes = [], + Models = [], + TrackGroups = [], + Animations = [] + }; - ColladaGeometries = new Dictionary(); - SkinnedMeshes = new HashSet(); + ColladaGeometries = []; + SkinnedMeshes = []; var collGeometries = new List(); var collSkins = new List(); @@ -961,7 +951,7 @@ public Root Import(string inputPath) if (controller.Item is skin) { collSkins.Add(controller.Item as skin); - SkinnedMeshes.Add((controller.Item as skin).source1.Substring(1)); + SkinnedMeshes.Add((controller.Item as skin).source1[1..]); } else { @@ -982,7 +972,7 @@ public Root Import(string inputPath) foreach (var node in scene.node) { collNodes.Add(node); - FindRootBones(new List(), node, rootBones); + FindRootBones([], node, rootBones); } } } @@ -1016,7 +1006,7 @@ public Root Import(string inputPath) } else { - Utils.Warn(String.Format("Library {0} is unsupported and will be ignored", item.GetType().Name)); + Utils.Warn($"Library {item.GetType().Name} is unsupported and will be ignored"); } } @@ -1031,9 +1021,8 @@ public Root Import(string inputPath) foreach (var geometry in collGeometries) { - VertexDescriptor vertexFormat = null; // Use the override vertex format, if one was specified - Options.VertexFormats.TryGetValue(geometry.name, out vertexFormat); + Options.VertexFormats.TryGetValue(geometry.name, out VertexDescriptor vertexFormat); var mesh = ImportMesh(root, geometry.name, geometry, geometry.Item as mesh, vertexFormat); ColladaGeometries.Add(geometry.id, mesh); } diff --git a/LSLib/Granny/Model/ColladaMesh.cs b/LSLib/Granny/Model/ColladaMesh.cs index a8a05126..387f2258 100644 --- a/LSLib/Granny/Model/ColladaMesh.cs +++ b/LSLib/Granny/Model/ColladaMesh.cs @@ -1,7 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model @@ -24,8 +24,8 @@ public class ColladaMesh private int NormalsInputIndex = -1; private int TangentsInputIndex = -1; private int BinormalsInputIndex = -1; - private List UVInputIndices = new List(); - private List ColorInputIndices = new List(); + private List UVInputIndices = []; + private List ColorInputIndices = []; private VertexDescriptor InputVertexType; private VertexDescriptor OutputVertexType; private bool HasNormals = false; @@ -77,7 +77,7 @@ public int GetHashCode(int[] obj) void computeTangents() { // Check if the vertex format has at least one UV set - if (ConsolidatedVertices.Count() > 0) + if (ConsolidatedVertices.Count > 0) { var v = ConsolidatedVertices[0]; if (v.Format.TextureCoordinates == 0) @@ -194,7 +194,7 @@ private void computeNormals() { for (var vertexIdx = 0; vertexIdx < Vertices.Count; vertexIdx++) { - Vector3 N = new Vector3(0, 0, 0); + Vector3 N = new(0, 0, 0); var numIndices = VertexIndexCount(); for (int triVertIdx = 0; triVertIdx < numIndices; triVertIdx++) { @@ -206,7 +206,7 @@ private void computeNormals() VertexIndex(baseIdx + 1), VertexIndex(baseIdx + 2) }; - N = N + triangleNormalFromVertex(indices, triVertIdx - baseIdx); + N += triangleNormalFromVertex(indices, triVertIdx - baseIdx); } } @@ -266,8 +266,7 @@ private ColladaSource FindSource(string id) if (id.Length == 0 || id[0] != '#') throw new ParsingException("Only ID references are supported for input sources: " + id); - ColladaSource inputSource = null; - if (!Sources.TryGetValue(id.Substring(1), out inputSource)) + if (!Sources.TryGetValue(id.Substring(1), out ColladaSource inputSource)) throw new ParsingException("Input source does not exist: " + id); return inputSource; @@ -361,7 +360,7 @@ private void ImportVertices() private void ImportColors() { ColorInputIndices.Clear(); - Colors = new List>(); + Colors = []; foreach (var input in Inputs) { if (input.semantic == "COLOR") @@ -402,7 +401,7 @@ private void ImportUVs() { bool flip = Options.FlipUVs; UVInputIndices.Clear(); - UVs = new List>(); + UVs = []; foreach (var input in Inputs) { if (input.semantic == "TEXCOORD") @@ -413,7 +412,7 @@ private void ImportUVs() throw new ParsingException("Only ID references are supported for UV input sources"); ColladaSource inputSource = null; - if (!Sources.TryGetValue(input.source.Substring(1), out inputSource)) + if (!Sources.TryGetValue(input.source[1..], out inputSource)) throw new ParsingException("UV input source does not exist: " + input.source); List s = null, t = null; @@ -434,7 +433,7 @@ private void ImportUVs() private void ImportSources() { - Sources = new Dictionary(); + Sources = []; foreach (var source in Mesh.source) { var src = ColladaSource.FromCollada(source); @@ -444,8 +443,10 @@ private void ImportSources() private VertexDescriptor FindVertexFormat(bool isSkinned) { - var desc = new VertexDescriptor(); - desc.PositionType = PositionType.Float3; + var desc = new VertexDescriptor + { + PositionType = PositionType.Float3 + }; if (isSkinned) { desc.HasBoneWeights = true; @@ -497,10 +498,7 @@ public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isS ImportSources(); ImportFaces(); - if (vertexFormat == null) - { - vertexFormat = FindVertexFormat(isSkinned); - } + vertexFormat ??= FindVertexFormat(isSkinned); InputVertexType = vertexFormat; OutputVertexType = new VertexDescriptor @@ -534,13 +532,13 @@ public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isS ImportColors(); ImportUVs(); - if (UVInputIndices.Count() > 0 || ColorInputIndices.Count() > 0 + if (UVInputIndices.Count > 0 || ColorInputIndices.Count > 0 || NormalsInputIndex != -1 || TangentsInputIndex != -1 || BinormalsInputIndex != -1) { var outVertexIndices = new Dictionary(new VertexIndexComparer()); ConsolidatedIndices = new List(TriangleCount * 3); ConsolidatedVertices = new List(Vertices.Count); - OriginalToConsolidatedVertexIndexMap = new Dictionary>(); + OriginalToConsolidatedVertexIndexMap = []; for (var vert = 0; vert < TriangleCount * 3; vert++) { var index = new int[InputOffsetCount]; @@ -549,8 +547,7 @@ public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isS index[i] = Indices[vert * InputOffsetCount + i]; } - int consolidatedIndex; - if (!outVertexIndices.TryGetValue(index, out consolidatedIndex)) + if (!outVertexIndices.TryGetValue(index, out int consolidatedIndex)) { var vertexIndex = index[VertexInputIndex]; consolidatedIndex = ConsolidatedVertices.Count; @@ -567,21 +564,20 @@ public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isS { vertex.Binormal = Binormals[index[BinormalsInputIndex]]; } - for (int uv = 0; uv < UVInputIndices.Count(); uv++ ) + for (int uv = 0; uv < UVInputIndices.Count; uv++) { vertex.SetUV(uv, UVs[uv][index[UVInputIndices[uv]]]); } - for (int color = 0; color < ColorInputIndices.Count(); color++) + for (int color = 0; color < ColorInputIndices.Count; color++) { vertex.SetColor(color, Colors[color][index[ColorInputIndices[color]]]); } outVertexIndices.Add(index, consolidatedIndex); ConsolidatedVertices.Add(vertex); - List mappedIndices = null; - if (!OriginalToConsolidatedVertexIndexMap.TryGetValue(vertexIndex, out mappedIndices)) + if (!OriginalToConsolidatedVertexIndexMap.TryGetValue(vertexIndex, out List mappedIndices)) { - mappedIndices = new List(); + mappedIndices = []; OriginalToConsolidatedVertexIndexMap.Add(vertexIndex, mappedIndices); } @@ -603,9 +599,9 @@ public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isS for (var vert = 0; vert < TriangleCount * 3; vert++) ConsolidatedIndices.Add(VertexIndex(vert)); - OriginalToConsolidatedVertexIndexMap = new Dictionary>(); + OriginalToConsolidatedVertexIndexMap = []; for (var i = 0; i < Vertices.Count; i++) - OriginalToConsolidatedVertexIndexMap.Add(i, new List { i }); + OriginalToConsolidatedVertexIndexMap.Add(i, [i]); } if ((InputVertexType.TangentType == NormalType.None diff --git a/LSLib/Granny/Model/CurveData/AnimationCurveData.cs b/LSLib/Granny/Model/CurveData/AnimationCurveData.cs index c4f096fd..29782f07 100644 --- a/LSLib/Granny/Model/CurveData/AnimationCurveData.cs +++ b/LSLib/Granny/Model/CurveData/AnimationCurveData.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData @@ -23,8 +23,8 @@ private static void Init() return; } - TypeToFormatMap = new Dictionary(); - NameToTypeMap = new Dictionary(); + TypeToFormatMap = []; + NameToTypeMap = []; Register(typeof(DaKeyframes32f), CurveFormat.DaKeyframes32f); Register(typeof(DaK32fC32f), CurveFormat.DaK32fC32f); @@ -58,8 +58,7 @@ public static Type Resolve(String name) { Init(); - Type type = null; - if (!NameToTypeMap.TryGetValue(name, out type)) + if (!NameToTypeMap.TryGetValue(name, out Type type)) throw new ParsingException("Unsupported curve type: " + name); return type; @@ -181,10 +180,10 @@ public Type SelectType(MemberDefinition member, object node) public Type SelectType(MemberDefinition member, StructDefinition defn, object parent) { var fieldName = defn.Members[0].Name; - if (fieldName.Substring(0, 16) != "CurveDataHeader_") + if (fieldName[..16] != "CurveDataHeader_") throw new ParsingException("Unrecognized curve data header type: " + fieldName); - var curveType = fieldName.Substring(16); + var curveType = fieldName[16..]; return CurveRegistry.Resolve(curveType); } } @@ -204,7 +203,7 @@ public enum ExportType protected float ConvertOneOverKnotScaleTrunc(UInt16 oneOverKnotScaleTrunc) { - UInt32[] i = new UInt32[] { (UInt32)oneOverKnotScaleTrunc << 16 }; + UInt32[] i = [(UInt32)oneOverKnotScaleTrunc << 16]; float[] f = new float[1]; Buffer.BlockCopy(i, 0, f, 0, i.Length * 4); return f[0]; @@ -231,7 +230,7 @@ public virtual List GetMatrices() public virtual List GetQuaternions() { var matrices = GetMatrices(); - List quats = new List(matrices.Count); + List quats = new(matrices.Count); foreach (var matrix in matrices) { // Check that the matrix is orthogonal diff --git a/LSLib/Granny/Model/CurveData/D3Constant32f.cs b/LSLib/Granny/Model/CurveData/D3Constant32f.cs index d9ce2ec5..adbc18cb 100644 --- a/LSLib/Granny/Model/CurveData/D3Constant32f.cs +++ b/LSLib/Granny/Model/CurveData/D3Constant32f.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData @@ -20,12 +20,12 @@ public override int NumKnots() public override List GetKnots() { - return new List() { 0.0f }; + return [0.0f]; } public override List GetPoints() { - return new List() { new Vector3(Controls[0], Controls[1], Controls[2]) }; + return [new Vector3(Controls[0], Controls[1], Controls[2])]; } } } diff --git a/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs b/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs index ebaa8106..dcc0783a 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs b/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs index 886de201..40856eb9 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs b/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs index 0953e02d..115d79f0 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D3K16uC16u.cs b/LSLib/Granny/Model/CurveData/D3K16uC16u.cs index b9b79ee6..6a74d99e 100644 --- a/LSLib/Granny/Model/CurveData/D3K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D3K16uC16u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D3K8uC8u.cs b/LSLib/Granny/Model/CurveData/D3K8uC8u.cs index a4f875c9..d562324b 100644 --- a/LSLib/Granny/Model/CurveData/D3K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D3K8uC8u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D4Constant32f.cs b/LSLib/Granny/Model/CurveData/D4Constant32f.cs index 2ce96118..df13f86f 100644 --- a/LSLib/Granny/Model/CurveData/D4Constant32f.cs +++ b/LSLib/Granny/Model/CurveData/D4Constant32f.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData @@ -20,13 +20,13 @@ public override int NumKnots() public override List GetKnots() { - return new List() { 0.0f }; + return [0.0f]; } // TODO: GetMatrices public override List GetQuaternions() { - return new List() { new Quaternion(Controls[0], Controls[1], Controls[2], Controls[3]) }; + return [new Quaternion(Controls[0], Controls[1], Controls[2], Controls[3])]; } } } diff --git a/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs b/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs index 108556f8..8cd7774f 100644 --- a/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs +++ b/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs @@ -1,25 +1,25 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData { public class D4nK16uC15u : AnimationCurveData { - private static float[] ScaleTable = { + private readonly static float[] ScaleTable = [ 1.4142135f, 0.70710677f, 0.35355338f, 0.35355338f, 0.35355338f, 0.17677669f, 0.17677669f, 0.17677669f, -1.4142135f, -0.70710677f, -0.35355338f, -0.35355338f, -0.35355338f, -0.17677669f, -0.17677669f, -0.17677669f - }; + ]; - private static float[] OffsetTable = { + private readonly static float[] OffsetTable = [ -0.70710677f, -0.35355338f, -0.53033006f, -0.17677669f, 0.17677669f, -0.17677669f, -0.088388346f, 0.0f, 0.70710677f, 0.35355338f, 0.53033006f, 0.17677669f, -0.17677669f, 0.17677669f, 0.088388346f, -0.0f - }; + ]; [Serialization(Type = MemberType.Inline)] public CurveDataHeader CurveDataHeader_D4nK16uC15u; diff --git a/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs b/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs index 99eed782..2a58537d 100644 --- a/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs +++ b/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs @@ -1,25 +1,25 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData { public class D4nK8uC7u : AnimationCurveData { - private static float[] ScaleTable = { + private static readonly float[] ScaleTable = [ 1.4142135f, 0.70710677f, 0.35355338f, 0.35355338f, 0.35355338f, 0.17677669f, 0.17677669f, 0.17677669f, -1.4142135f, -0.70710677f, -0.35355338f, -0.35355338f, -0.35355338f, -0.17677669f, -0.17677669f, -0.17677669f - }; + ]; - private static float[] OffsetTable = { + private static readonly float[] OffsetTable = [ -0.70710677f, -0.35355338f, -0.53033006f, -0.17677669f, 0.17677669f, -0.17677669f, -0.088388346f, 0.0f, 0.70710677f, 0.35355338f, 0.53033006f, 0.17677669f, -0.17677669f, 0.17677669f, 0.088388346f, -0.0f - }; + ]; [Serialization(Type = MemberType.Inline)] public CurveDataHeader CurveDataHeader_D4nK8uC7u; diff --git a/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs b/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs index 28e19437..e700b6bb 100644 --- a/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs b/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs index 5a507cc9..bb9ed368 100644 --- a/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs b/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs index 07600bd6..b51b8bd2 100644 --- a/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs b/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs index d59d7e10..9b2fa0ff 100644 --- a/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/DaConstant32f.cs b/LSLib/Granny/Model/CurveData/DaConstant32f.cs index 10a53e48..53d8b4f2 100644 --- a/LSLib/Granny/Model/CurveData/DaConstant32f.cs +++ b/LSLib/Granny/Model/CurveData/DaConstant32f.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; using System.Diagnostics; @@ -21,20 +21,20 @@ public override int NumKnots() public override List GetKnots() { - return new List() { 0.0f }; + return [0.0f]; } public override List GetMatrices() { Debug.Assert(Controls.Count == 9); var m = Controls; - Matrix3 mat = new Matrix3( + Matrix3 mat = new( m[0], m[1], m[2], m[3], m[4], m[5], m[6], m[7], m[8] ); - return new List { mat }; + return [mat]; } } } diff --git a/LSLib/Granny/Model/CurveData/DaIdentity.cs b/LSLib/Granny/Model/CurveData/DaIdentity.cs index 6f08cf1c..0179b378 100644 --- a/LSLib/Granny/Model/CurveData/DaIdentity.cs +++ b/LSLib/Granny/Model/CurveData/DaIdentity.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData @@ -18,17 +18,17 @@ public override int NumKnots() public override List GetKnots() { - return new List() { 0.0f }; + return [0.0f]; } public override List GetPoints() { - return new List() { new Vector3(0.0f, 0.0f, 0.0f) }; + return [new Vector3(0.0f, 0.0f, 0.0f)]; } public override List GetMatrices() { - return new List() { Matrix3.Identity }; + return [Matrix3.Identity]; } } } diff --git a/LSLib/Granny/Model/CurveData/DaK16uC16u.cs b/LSLib/Granny/Model/CurveData/DaK16uC16u.cs index 8305caf9..cfbac448 100644 --- a/LSLib/Granny/Model/CurveData/DaK16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/DaK16uC16u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using System.Diagnostics; using LSLib.Granny.GR2; diff --git a/LSLib/Granny/Model/CurveData/DaK32fC32f.cs b/LSLib/Granny/Model/CurveData/DaK32fC32f.cs index 8b628c0d..c12d4829 100644 --- a/LSLib/Granny/Model/CurveData/DaK32fC32f.cs +++ b/LSLib/Granny/Model/CurveData/DaK32fC32f.cs @@ -1,7 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/CurveData/DaK8uC8u.cs b/LSLib/Granny/Model/CurveData/DaK8uC8u.cs index 3300661a..fb730f70 100644 --- a/LSLib/Granny/Model/CurveData/DaK8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/DaK8uC8u.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using OpenTK; +using OpenTK.Mathematics; using System.Diagnostics; using LSLib.Granny.GR2; diff --git a/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs b/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs index b69a349e..6b065dd2 100644 --- a/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs +++ b/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs @@ -1,7 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData diff --git a/LSLib/Granny/Model/DivinityMesh.cs b/LSLib/Granny/Model/DivinityMesh.cs index 2e9a0f70..2aa64c10 100644 --- a/LSLib/Granny/Model/DivinityMesh.cs +++ b/LSLib/Granny/Model/DivinityMesh.cs @@ -130,12 +130,12 @@ private static DivinityFormatDesc Make(DivinityVertexUsage usage, DivinityVertex { return new DivinityFormatDesc { - Stream = new SByte[] { 0 }, - Usage = new Byte[] { (byte)usage }, - UsageIndex = new Byte[] { usageIndex }, - RefType = new Byte[] { 0 }, - Format = new Byte[] { (byte)format }, - Size = new Byte[] { size } + Stream = [0], + Usage = [(byte)usage], + UsageIndex = [usageIndex], + RefType = [0], + Format = [(byte)format], + Size = [size] }; } @@ -278,12 +278,12 @@ public static DivinityMeshExtendedData Make() UserDefinedProperties = "", UserMeshProperties = new DivinityMeshProperties { - Flags = new UInt32[] { 0, 0, 0, 0 }, - Lod = new Int32[] { -1 }, + Flags = [0, 0, 0, 0], + Lod = [-1], FormatDescs = null, ExtendedData = null, - LodDistance = new float[] { 3.40282347E+38f }, - IsImpostor = new Int32[] { 0 } + LodDistance = [3.40282347E+38f], + IsImpostor = [0] }, LSMVersion = CurrentLSMVersion }; @@ -336,7 +336,7 @@ public void UpdateFromModelInfo(Mesh mesh, DivinityModelInfoFormat format) else { LSMVersion = 0; - UserMeshProperties.FormatDescs = new List(); + UserMeshProperties.FormatDescs = []; } } } @@ -357,7 +357,7 @@ public static class UserDefinedPropertiesHelpers public static string MeshFlagsToUserDefinedProperties(DivinityModelFlag meshFlags) { - List properties = new List(); + List properties = new(); if (meshFlags.IsRigid()) { properties.Add(UserDefinedProperties_Rigid); diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 00091e97..3418a879 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -4,19 +4,12 @@ using System.IO; using System.Linq; using LSLib.LS; -using OpenTK; -using Alphaleonis.Win32.Filesystem; -using File = Alphaleonis.Win32.Filesystem.File; using LSLib.LS.Enums; -using System.Numerics; namespace LSLib.Granny.Model { - public class ExportException : Exception + public class ExportException(string message) : Exception(message) { - public ExportException(string message) - : base(message) - { } } public enum ExportFormat @@ -93,7 +86,7 @@ public class ExporterOptions public bool ConformAnimations = true; public bool ConformMeshBoneBindings = true; public bool ConformModels = true; - public Dictionary VertexFormats = new Dictionary(); + public Dictionary VertexFormats = []; // Extended model info format to use when exporting to D:OS public DivinityModelInfoFormat ModelInfoFormat = DivinityModelInfoFormat.None; // Model flags to use when exporting @@ -117,9 +110,9 @@ public class ExporterOptions // See: Spherical Skinning with Dual-Quaternions and QTangents, Crytek R&D public bool EnableQTangents = true; - public List DisabledAnimations = new List(); - public List DisabledModels = new List(); - public List DisabledSkeletons = new List(); + public List DisabledAnimations = []; + public List DisabledModels = []; + public List DisabledSkeletons = []; public void LoadGameSettings(Game game) { @@ -168,9 +161,9 @@ public class Exporter private Root LoadGR2(string inPath) { - var root = new LSLib.Granny.Model.Root(); - FileStream fs = File.Open(inPath, FileMode.Open, System.IO.FileAccess.Read, FileShare.ReadWrite); - var gr2 = new LSLib.Granny.GR2.GR2Reader(fs); + var root = new Root(); + FileStream fs = File.Open(inPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + var gr2 = new GR2Reader(fs); gr2.Read(root); root.PostLoad(gr2.Tag); fs.Close(); @@ -180,8 +173,10 @@ private Root LoadGR2(string inPath) private Root LoadDAE(string inPath) { - var importer = new ColladaImporter(); - importer.Options = Options; + var importer = new ColladaImporter + { + Options = Options + }; return importer.Import(inPath); } @@ -203,12 +198,12 @@ private Root Load(string inPath, ExportFormat format) private void SaveGR2(string outPath, Root root) { root.PreSave(); - var writer = new LSLib.Granny.GR2.GR2Writer(); - - writer.Format = Options.Is64Bit ? Magic.Format.LittleEndian64 : Magic.Format.LittleEndian32; - writer.AlternateMagic = Options.AlternateSignature; - writer.VersionTag = Options.VersionTag; - + var writer = new GR2Writer + { + Format = Options.Is64Bit ? Magic.Format.LittleEndian64 : Magic.Format.LittleEndian32, + AlternateMagic = Options.AlternateSignature, + VersionTag = Options.VersionTag + }; if (Options.UseObsoleteVersionTag) { @@ -227,8 +222,10 @@ private void SaveGR2(string outPath, Root root) private void SaveDAE(Root root, ExporterOptions options) { - var exporter = new ColladaExporter(); - exporter.Options = options; + var exporter = new ColladaExporter + { + Options = options + }; exporter.Export(root, options.OutputPath); } @@ -257,17 +254,21 @@ private void GenerateDummySkeleton(Root root) if (model.Skeleton == null) { Utils.Info($"Generating dummy skeleton for model '{model.Name}'"); - var skeleton = new Skeleton(); - skeleton.Name = model.Name; - skeleton.LODType = 1; - skeleton.IsDummy = true; - root.Skeletons.Add(skeleton); + var bone = new Bone + { + Name = model.Name, + ParentIndex = -1, + Transform = new Transform() + }; - var bone = new Bone(); - bone.Name = model.Name; - bone.ParentIndex = -1; - skeleton.Bones = new List { bone }; - bone.Transform = new Transform(); + var skeleton = new Skeleton + { + Name = model.Name, + LODType = 1, + IsDummy = true, + Bones = [bone] + }; + root.Skeletons.Add(skeleton); // TODO: Transform / IWT is not always identity on dummy bones! skeleton.UpdateWorldTransforms(); @@ -280,14 +281,16 @@ private void GenerateDummySkeleton(Root root) throw new ParsingException("Failed to generate dummy skeleton: Mesh already has bone bindings."); } - var binding = new BoneBinding(); - binding.BoneName = bone.Name; - // TODO: Calculate bounding box! - // Use small bounding box values, as it interferes with object placement - // in D:OS 2 (after the Gift Bag 2 update) - binding.OBBMin = new float[] { -0.1f, -0.1f, -0.1f }; - binding.OBBMax = new float[] { 0.1f, 0.1f, 0.1f }; - mesh.Mesh.BoneBindings = new List { binding }; + var binding = new BoneBinding + { + BoneName = bone.Name, + // TODO: Calculate bounding box! + // Use small bounding box values, as it interferes with object placement + // in D:OS 2 (after the Gift Bag 2 update) + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; + mesh.Mesh.BoneBindings = [binding]; } } } @@ -303,11 +306,8 @@ private void ConformAnimationBindPoses(Skeleton skeleton, Skeleton conformToSkel { var track = trackGroup.TransformTracks[i]; var bone = skeleton.GetBoneByName(track.Name); - if(bone == null) - { - //Dummy_Foot -> Dummy_Foot_01 - bone = skeleton.GetBoneByName(track.Name + "_01"); - } + //Dummy_Foot -> Dummy_Foot_01 + bone ??= skeleton.GetBoneByName(track.Name + "_01"); if (bone == null) { @@ -393,9 +393,9 @@ private void ConformSkeletonAnimations(Skeleton skeleton) { foreach (var track in trackGroup.TransformTracks) { - var bone = skeleton.GetBoneByName(track.Name); //Dummy_Foot -> Dummy_Foot_01 - if (bone == null) bone = skeleton.GetBoneByName(track.Name + "_01"); + var bone = skeleton.GetBoneByName(track.Name) ?? skeleton.GetBoneByName(track.Name + "_01"); + if (bone == null) { throw new ExportException($"Animation track references bone '{track.Name}' that cannot be found in the skeleton '{skeleton.Name}'."); @@ -423,15 +423,14 @@ private void ConformSkeletons(IEnumerable skeletons) // Generate a dummy model if there isn't one, otherwise we won't // be able to bind the animations to anything - if (Root.Models == null) - { - Root.Models = new List(); - var model = new Model(); - model.InitialPlacement = new Transform(); - model.Name = skeleton.Name; - model.Skeleton = skeleton; - Root.Models.Add(model); - } + Root.Models ??= [ + new Model + { + InitialPlacement = new Transform(), + Name = skeleton.Name, + Skeleton = skeleton + } + ]; ConformSkeletonAnimations(skeleton); } @@ -453,7 +452,7 @@ private void ConformSkeletons(IEnumerable skeletons) } // Allow name mismatches if there is only 1 skeleton in each file - if (conformingSkel == null && skeletons.Count() == 1 && Root.Skeletons.Count() == 1) + if (conformingSkel == null && skeletons.Count() == 1 && Root.Skeletons.Count == 1) { conformingSkel = skeletons.First(); } @@ -469,10 +468,7 @@ private void ConformSkeletons(IEnumerable skeletons) private void ConformMeshBoneBindings(Mesh mesh, Mesh conformToMesh) { - if (mesh.BoneBindings == null) - { - mesh.BoneBindings = new List(); - } + mesh.BoneBindings ??= []; foreach (var conformBone in conformToMesh.BoneBindings) { @@ -489,8 +485,10 @@ private void ConformMeshBoneBindings(Mesh mesh, Mesh conformToMesh) if (inputBone == null) { // Create a new "dummy" binding if it does not exist in the new mesh - inputBone = new BoneBinding(); - inputBone.BoneName = conformBone.BoneName; + inputBone = new BoneBinding + { + BoneName = conformBone.BoneName + }; mesh.BoneBindings.Add(inputBone); } @@ -533,30 +531,35 @@ private Mesh GenerateDummyMesh(MeshBinding meshBinding) var vertexData = new VertexData(); vertexData.VertexComponentNames = meshBinding.Mesh.PrimaryVertexData.VertexComponentNames .Select(name => new GrannyString(name.String)).ToList(); - vertexData.Vertices = new List(); + vertexData.Vertices = []; var dummyVertex = meshBinding.Mesh.VertexFormat.CreateInstance(); vertexData.Vertices.Add(dummyVertex); Root.VertexDatas.Add(vertexData); - var topology = new TriTopology(); - topology.Groups = new List(); - var group = new TriTopologyGroup(); - group.MaterialIndex = 0; - group.TriCount = 0; - group.TriFirst = 0; - topology.Groups.Add(group); - - topology.Indices = new List(); + var topology = new TriTopology + { + Groups = [ + new TriTopologyGroup + { + MaterialIndex = 0, + TriCount = 0, + TriFirst = 0 + } + ], + Indices = [] + }; Root.TriTopologies.Add(topology); - var mesh = new Mesh(); - mesh.Name = meshBinding.Mesh.Name; - mesh.VertexFormat = meshBinding.Mesh.VertexFormat; - mesh.PrimaryTopology = topology; - mesh.PrimaryVertexData = vertexData; + var mesh = new Mesh + { + Name = meshBinding.Mesh.Name, + VertexFormat = meshBinding.Mesh.VertexFormat, + PrimaryTopology = topology, + PrimaryVertexData = vertexData + }; if (meshBinding.Mesh.BoneBindings != null) { - mesh.BoneBindings = new List(); + mesh.BoneBindings = []; ConformMeshBoneBindings(mesh, meshBinding.Mesh); } @@ -565,9 +568,11 @@ private Mesh GenerateDummyMesh(MeshBinding meshBinding) private Model MakeDummyModel(Model original) { - var newModel = new Model(); - newModel.InitialPlacement = original.InitialPlacement; - newModel.Name = original.Name; + var newModel = new Model + { + InitialPlacement = original.InitialPlacement, + Name = original.Name + }; if (original.Skeleton != null) { @@ -582,7 +587,7 @@ private Model MakeDummyModel(Model original) if (original.MeshBindings != null) { - newModel.MeshBindings = new List(); + newModel.MeshBindings = []; foreach (var meshBinding in original.MeshBindings) { // Try to bind the original mesh, if it exists in the source file. @@ -594,8 +599,10 @@ private Model MakeDummyModel(Model original) Root.Meshes.Add(mesh); } - var binding = new MeshBinding(); - binding.Mesh = mesh; + var binding = new MeshBinding + { + Mesh = mesh + }; newModel.MeshBindings.Add(binding); } } @@ -614,7 +621,7 @@ private void ConformModels(IEnumerable models) // Rebuild the model list to match the order used in the original GR2 // If a model is missing, generate a dummy model & mesh. var originalModels = Root.Models; - Root.Models = new List(); + Root.Models = []; foreach (var model in models) { diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 621184af..790afc91 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -2,22 +2,16 @@ using System.Collections.Generic; using System.Linq; using System.Text; -using System.Reflection; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model { - public class Deduplicator + public class Deduplicator(IEqualityComparer comparer) { - private IEqualityComparer Comparer; - public Dictionary DeduplicationMap = new Dictionary(); - public List Uniques = new List(); - - public Deduplicator(IEqualityComparer comparer) - { - Comparer = comparer; - } + private readonly IEqualityComparer Comparer = comparer; + public Dictionary DeduplicationMap = []; + public List Uniques = []; public void MakeIdentityMapping(IEnumerable items) { @@ -36,8 +30,7 @@ public void Deduplicate(IEnumerable items) var i = 0; foreach (var item in items) { - int mappedIndex; - if (!uniqueItems.TryGetValue(item, out mappedIndex)) + if (!uniqueItems.TryGetValue(item, out int mappedIndex)) { mappedIndex = uniqueItems.Count; uniqueItems.Add(item, mappedIndex); @@ -84,14 +77,14 @@ public override int GetHashCode() public class VertexDeduplicator { - public Deduplicator Vertices = new Deduplicator(new GenericEqualityComparer()); - public Deduplicator Normals = new Deduplicator(new GenericEqualityComparer()); - public List> UVs = new List>(); - public List> Colors = new List>(); + public Deduplicator Vertices = new(new GenericEqualityComparer()); + public Deduplicator Normals = new(new GenericEqualityComparer()); + public List> UVs = []; + public List> Colors = []; public void MakeIdentityMapping(List vertices) { - if (vertices.Count() == 0) return; + if (vertices.Count == 0) return; var format = vertices[0].Format; @@ -127,7 +120,7 @@ public void MakeIdentityMapping(List vertices) public void Deduplicate(List vertices) { - if (vertices.Count() == 0) return; + if (vertices.Count == 0) return; var format = vertices[0].Format; @@ -176,9 +169,9 @@ public class VertexDataSectionSelector : SectionSelector { public SectionType SelectSection(MemberDefinition member, Type type, object obj) { - if (obj is VertexData) + if (obj is VertexData data) { - return ((VertexData)obj).SerializationSection; + return data.SerializationSection; } else { @@ -206,7 +199,7 @@ public void PostLoad() // Fix missing vertex component names if (VertexComponentNames == null) { - VertexComponentNames = new List(); + VertexComponentNames = []; if (Vertices.Count > 0) { var components = Vertices[0].Format.ComponentNames(); @@ -250,7 +243,7 @@ public source MakeColladaPositions(string name) positions[index++] = pos[2]; } - return ColladaUtils.MakeFloatSource(name, "positions", new string[] { "X", "Y", "Z" }, positions); + return ColladaUtils.MakeFloatSource(name, "positions", ["X", "Y", "Z"], positions); } public source MakeColladaNormals(string name) @@ -267,7 +260,7 @@ public source MakeColladaNormals(string name) normals[index++] = normal[2]; } - return ColladaUtils.MakeFloatSource(name, "normals", new string[] { "X", "Y", "Z" }, normals); + return ColladaUtils.MakeFloatSource(name, "normals", ["X", "Y", "Z"], normals); } public source MakeColladaTangents(string name) @@ -284,7 +277,7 @@ public source MakeColladaTangents(string name) tangents[index++] = tangent[2]; } - return ColladaUtils.MakeFloatSource(name, "tangents", new string[] { "X", "Y", "Z" }, tangents); + return ColladaUtils.MakeFloatSource(name, "tangents", ["X", "Y", "Z"], tangents); } public source MakeColladaBinormals(string name) @@ -301,7 +294,7 @@ public source MakeColladaBinormals(string name) binormals[index++] = binormal[2]; } - return ColladaUtils.MakeFloatSource(name, "binormals", new string[] { "X", "Y", "Z" }, binormals); + return ColladaUtils.MakeFloatSource(name, "binormals", ["X", "Y", "Z"], binormals); } public source MakeColladaUVs(string name, int uvIndex, bool flip) @@ -319,7 +312,7 @@ public source MakeColladaUVs(string name, int uvIndex, bool flip) uvs[index++] = uv[1]; } - return ColladaUtils.MakeFloatSource(name, "uvs" + uvIndex.ToString(), new string[] { "S", "T" }, uvs); + return ColladaUtils.MakeFloatSource(name, "uvs" + uvIndex.ToString(), ["S", "T"], uvs); } public source MakeColladaColors(string name, int setIndex) @@ -335,7 +328,7 @@ public source MakeColladaColors(string name, int setIndex) colors[index++] = color[2]; } - return ColladaUtils.MakeFloatSource(name, "colors" + setIndex.ToString(), new string[] { "R", "G", "B" }, colors); + return ColladaUtils.MakeFloatSource(name, "colors" + setIndex.ToString(), ["R", "G", "B"], colors); } public source MakeBoneWeights(string name) @@ -353,7 +346,7 @@ public source MakeBoneWeights(string name) } } - return ColladaUtils.MakeFloatSource(name, "weights", new string[] { "WEIGHT" }, weights.ToArray()); + return ColladaUtils.MakeFloatSource(name, "weights", ["WEIGHT"], weights.ToArray()); } public void Transform(Matrix4 transformation) @@ -514,11 +507,13 @@ public triangles MakeColladaTriangles(InputLocalOffset[] inputs, int numTris = (from grp in Groups select grp.TriCount).Sum(); - var tris = new triangles(); - tris.count = (ulong)numTris; - tris.input = inputs; + var tris = new triangles + { + count = (ulong)numTris, + input = inputs + }; - List> inputMaps = new List>(); + List> inputMaps = []; int uvIndex = 0, colorIndex = 0; for (int i = 0; i < inputs.Length; i++) { diff --git a/LSLib/Granny/Model/Root.cs b/LSLib/Granny/Model/Root.cs index 09b1cc0a..e8c44a03 100644 --- a/LSLib/Granny/Model/Root.cs +++ b/LSLib/Granny/Model/Root.cs @@ -2,7 +2,7 @@ using System.Collections.Generic; using System.Linq; using LSLib.Granny.GR2; -using OpenTK; +using OpenTK.Mathematics; namespace LSLib.Granny.Model { @@ -71,10 +71,7 @@ public void ConvertToYUp(bool transformSkeletons) TransformSkeletons(transform); } - if (ArtToolInfo != null) - { - ArtToolInfo.SetYUp(); - } + ArtToolInfo?.SetYUp(); ZUp = false; } @@ -125,10 +122,7 @@ public void PostLoad(UInt32 tag) triTopology.PostLoad(); } - if (Meshes != null) - { - Meshes.ForEach(m => m.PostLoad()); - } + Meshes?.ForEach(m => m.PostLoad()); var modelIndex = 0; foreach (var model in Models ?? Enumerable.Empty()) diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index c97da639..0ad614f4 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -1,11 +1,9 @@ using System; using System.Collections.Generic; using System.Linq; -using OpenTK; +using OpenTK.Mathematics; using LSLib.Granny.GR2; using System.Xml; -using LSLib.LS.Story; -using System.Reflection; namespace LSLib.Granny.Model { @@ -51,12 +49,12 @@ public void UpdateWorldTransforms(List bones) } var iwt = WorldTransform.Inverted(); - InverseWorldTransform = new float[] { + InverseWorldTransform = [ iwt[0, 0], iwt[0, 1], iwt[0, 2], iwt[0, 3], iwt[1, 0], iwt[1, 1], iwt[1, 2], iwt[1, 3], iwt[2, 0], iwt[2, 1], iwt[2, 2], iwt[2, 3], iwt[3, 0], iwt[3, 1], iwt[3, 2], iwt[3, 3] - }; + ]; } private void ImportLSLibProfile(node node) @@ -83,21 +81,25 @@ private void ImportLSLibProfile(node node) public static Bone FromCollada(node bone, int parentIndex, List bones, Dictionary boneSIDs, Dictionary boneIDs) { var transMat = ColladaHelpers.TransformFromNode(bone); - var colladaBone = new Bone(); - colladaBone.TransformSID = transMat.TransformSID; var myIndex = bones.Count; - bones.Add(colladaBone); - boneSIDs.Add(bone.sid, colladaBone); + var colladaBone = new Bone + { + TransformSID = transMat.TransformSID, + ParentIndex = parentIndex, + Name = bone.name, + LODError = 0, // TODO + OriginalTransform = transMat.transform, + Transform = Transform.FromMatrix4(transMat.transform) + }; + if (bone.id != null) { boneIDs.Add(bone.id, colladaBone); } - colladaBone.ParentIndex = parentIndex; - colladaBone.Name = bone.name; - colladaBone.LODError = 0; // TODO - colladaBone.OriginalTransform = transMat.transform; - colladaBone.Transform = Transform.FromMatrix4(transMat.transform); + bones.Add(colladaBone); + boneSIDs.Add(bone.sid, colladaBone); + colladaBone.UpdateWorldTransforms(bones); colladaBone.ImportLSLibProfile(bone); @@ -131,43 +133,41 @@ private technique ExportLSLibProfile(XmlDocument Xml) public node MakeCollada(XmlDocument Xml) { - var node = new node(); - node.id = "Bone_" + Name.Replace(' ', '_'); - node.name = Name; // .Replace(' ', '_'); - node.sid = Name.Replace(' ', '_'); - node.type = NodeType.JOINT; - - var transforms = new List(); - var transformTypes = new List(); - - var transform = new matrix(); - transform.sid = "Transform"; var mat = Transform.ToMatrix4(); mat.Transpose(); - transform.Values = new double[] { - mat[0, 0], mat[0, 1], mat[0, 2], mat[0, 3], - mat[1, 0], mat[1, 1], mat[1, 2], mat[1, 3], - mat[2, 0], mat[2, 1], mat[2, 2], mat[2, 3], - mat[3, 0], mat[3, 1], mat[3, 2], mat[3, 3] - }; - transforms.Add(transform); - transformTypes.Add(ItemsChoiceType2.matrix); - - node.Items = transforms.ToArray(); - node.ItemsElementName = transformTypes.ToArray(); - node.extra = new extra[] + return new node { - new extra - { - technique = new technique[] + id = "Bone_" + Name.Replace(' ', '_'), + name = Name, // .Replace(' ', '_'); + sid = Name.Replace(' ', '_'), + type = NodeType.JOINT, + + Items = [ + new matrix { - ExportLSLibProfile(Xml) + sid = "Transform", + Values = [ + mat[0, 0], mat[0, 1], mat[0, 2], mat[0, 3], + mat[1, 0], mat[1, 1], mat[1, 2], mat[1, 3], + mat[2, 0], mat[2, 1], mat[2, 2], mat[2, 3], + mat[3, 0], mat[3, 1], mat[3, 2], mat[3, 3] + ] } - } - }; + ], + ItemsElementName = [ItemsChoiceType2.matrix], - return node; + extra = + [ + new extra + { + technique = + [ + ExportLSLibProfile(Xml) + ] + } + ] + }; } } @@ -190,12 +190,14 @@ public class Skeleton public static Skeleton FromCollada(node root) { - var skeleton = new Skeleton(); - skeleton.Bones = new List(); - skeleton.LODType = 1; - skeleton.Name = root.name; - skeleton.BonesBySID = new Dictionary(); - skeleton.BonesByID = new Dictionary(); + var skeleton = new Skeleton + { + Bones = [], + LODType = 1, + Name = root.name, + BonesBySID = [], + BonesByID = [] + }; Bone.FromCollada(root, -1, skeleton.Bones, skeleton.BonesBySID, skeleton.BonesByID); return skeleton; } diff --git a/LSLib/Granny/Model/Vertex.cs b/LSLib/Granny/Model/Vertex.cs index 8d073e0e..15840380 100644 --- a/LSLib/Granny/Model/Vertex.cs +++ b/LSLib/Granny/Model/Vertex.cs @@ -1,5 +1,5 @@ using LSLib.Granny.GR2; -using OpenTK; +using OpenTK.Mathematics; using System; using System.Collections.Generic; @@ -312,16 +312,16 @@ protected Vertex() { } public Vector2 GetUV(int index) { - switch (index) + return index switch { - case 0: return TextureCoordinates0; - case 1: return TextureCoordinates1; - case 2: return TextureCoordinates2; - case 3: return TextureCoordinates3; - case 4: return TextureCoordinates4; - case 5: return TextureCoordinates5; - default: throw new ArgumentException($"At most {MaxUVs} UVs are supported."); - } + 0 => TextureCoordinates0, + 1 => TextureCoordinates1, + 2 => TextureCoordinates2, + 3 => TextureCoordinates3, + 4 => TextureCoordinates4, + 5 => TextureCoordinates5, + _ => throw new ArgumentException($"At most {MaxUVs} UVs are supported."), + }; } public void SetUV(int index, Vector2 uv) @@ -340,12 +340,12 @@ public void SetUV(int index, Vector2 uv) public Vector4 GetColor(int index) { - switch (index) + return index switch { - case 0: return Color0; - case 1: return Color1; - default: throw new ArgumentException($"At most {MaxColors} color maps are supported."); - } + 0 => Color0, + 1 => Color1, + _ => throw new ArgumentException($"At most {MaxColors} color maps are supported."), + }; } public void SetColor(int index, Vector4 color) @@ -590,8 +590,7 @@ public Vertex ReadVertex(GR2Reader reader, VertexDescriptor descriptor) public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { - VertexDescriptor descriptor; - if (!VertexTypeCache.TryGetValue(parent, out descriptor)) + if (!VertexTypeCache.TryGetValue(parent, out VertexDescriptor descriptor)) { descriptor = ConstructDescriptor(member, definition, parent); VertexTypeCache.Add(parent, descriptor); diff --git a/LSLib/Granny/Model/VertexSerialization.cs b/LSLib/Granny/Model/VertexSerialization.cs index bba43d3e..543b6d23 100644 --- a/LSLib/Granny/Model/VertexSerialization.cs +++ b/LSLib/Granny/Model/VertexSerialization.cs @@ -1,5 +1,5 @@ using LSLib.Granny.GR2; -using OpenTK; +using OpenTK.Mathematics; using System; using System.Collections.Generic; using System.Reflection; @@ -55,12 +55,13 @@ public static Vector3 ReadHalfVector4As3(GR2Reader reader) public static Quaternion ReadBinormalShortVector4(GR2Reader reader) { - Quaternion v = new Quaternion(); - v.X = reader.Reader.ReadInt16() / 32767.0f; - v.Y = reader.Reader.ReadInt16() / 32767.0f; - v.Z = reader.Reader.ReadInt16() / 32767.0f; - v.W = reader.Reader.ReadInt16() / 32767.0f; - return v; + return new Quaternion + { + X = reader.Reader.ReadInt16() / 32767.0f, + Y = reader.Reader.ReadInt16() / 32767.0f, + Z = reader.Reader.ReadInt16() / 32767.0f, + W = reader.Reader.ReadInt16() / 32767.0f + }; } public static Vector4 ReadVector4(GR2Reader reader) @@ -444,14 +445,12 @@ public static void Unserialize(GR2Reader reader, Vertex v) { for (var i = 0; i < d.ColorMaps; i++) { - Vector4 color; - switch (d.ColorMapType) + var color = d.ColorMapType switch { - case ColorMapType.Float4: color = ReadVector4(reader); break; - case ColorMapType.Byte4: color = ReadNormalByteVector4(reader); break; - default: throw new Exception($"Cannot unserialize color map: Unsupported format {d.ColorMapType}"); - } - + ColorMapType.Float4 => ReadVector4(reader), + ColorMapType.Byte4 => ReadNormalByteVector4(reader), + _ => throw new Exception($"Cannot unserialize color map: Unsupported format {d.ColorMapType}"), + }; v.SetColor(i, color); } } @@ -460,14 +459,12 @@ public static void Unserialize(GR2Reader reader, Vertex v) { for (var i = 0; i < d.TextureCoordinates; i++) { - Vector2 uv; - switch (d.TextureCoordinateType) + var uv = d.TextureCoordinateType switch { - case TextureCoordinateType.Float2: uv = ReadVector2(reader); break; - case TextureCoordinateType.Half2: uv = ReadHalfVector2(reader); break; - default: throw new Exception($"Cannot unserialize UV map: Unsupported format {d.TextureCoordinateType}"); - } - + TextureCoordinateType.Float2 => ReadVector2(reader), + TextureCoordinateType.Half2 => ReadHalfVector2(reader), + _ => throw new Exception($"Cannot unserialize UV map: Unsupported format {d.TextureCoordinateType}"), + }; v.SetUV(i, uv); } } @@ -486,8 +483,8 @@ private static ModuleBuilder GetModuleBuilder() } var an = new AssemblyName("VertexFactoryAssembly"); - AssemblyBuilder assemblyBuilder = AppDomain.CurrentDomain.DefineDynamicAssembly(an, AssemblyBuilderAccess.Run); - ModuleBuilder moduleBuilder = assemblyBuilder.DefineDynamicModule("VertexFactoryClasses"); + var assemblyBuilder = AssemblyBuilder.DefineDynamicAssembly(an, AssemblyBuilderAccess.Run); + var moduleBuilder = assemblyBuilder.DefineDynamicModule("VertexFactoryClasses"); ModBuilder = moduleBuilder; return ModBuilder; } @@ -530,7 +527,7 @@ private void AddMember(StructDefinition defn, String name, MemberType type, UInt GrannyName = name, Definition = null, ArraySize = arraySize, - Extra = new UInt32[] { 0, 0, 0 }, + Extra = [0, 0, 0], Unknown = 0 }; defn.Members.Add(member); @@ -541,7 +538,7 @@ public StructDefinition CreateStructDefinition(object instance) var desc = (instance as Vertex).Format; var defn = new StructDefinition { - Members = new List(), + Members = [], MixedMarshal = true, Type = typeof(Vertex) }; diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 9cfd04db..ceb30d6e 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -1,9 +1,9 @@ -using zlib; -using LZ4; +using LZ4; using System; using System.IO; using System.Runtime.InteropServices; using LSLib.LS.Enums; +using System.IO.Compression; namespace LSLib.LS { @@ -270,41 +270,27 @@ public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) public static CompressionMethod CompressionFlagsToMethod(byte flags) { - switch (flags & 0x0f) + return (flags & 0x0f) switch { - case (int)CompressionMethod.None: - return CompressionMethod.None; - - case (int)CompressionMethod.Zlib: - return CompressionMethod.Zlib; - - case (int)CompressionMethod.LZ4: - return CompressionMethod.LZ4; - - default: - throw new ArgumentException("Invalid compression method"); - } + (int)CompressionMethod.None => CompressionMethod.None, + (int)CompressionMethod.Zlib => CompressionMethod.Zlib, + (int)CompressionMethod.LZ4 => CompressionMethod.LZ4, + _ => throw new ArgumentException("Invalid compression method") + }; } - public static CompressionLevel CompressionFlagsToLevel(byte flags) + public static LSCompressionLevel CompressionFlagsToLevel(byte flags) { - switch (flags & 0xf0) + return (flags & 0xf0) switch { - case (int)CompressionFlags.FastCompress: - return CompressionLevel.FastCompression; - - case (int)CompressionFlags.DefaultCompress: - return CompressionLevel.DefaultCompression; - - case (int)CompressionFlags.MaxCompressionLevel: - return CompressionLevel.MaxCompression; - - default: - throw new ArgumentException("Invalid compression flags"); - } + (int)CompressionFlags.FastCompress => LSCompressionLevel.FastCompression, + (int)CompressionFlags.DefaultCompress => LSCompressionLevel.DefaultCompression, + (int)CompressionFlags.MaxCompressionLevel => LSCompressionLevel.MaxCompression, + _ => throw new ArgumentException("Invalid compression flags") + }; } - public static byte MakeCompressionFlags(CompressionMethod method, CompressionLevel level) + public static byte MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) { if (method == CompressionMethod.None) { @@ -317,11 +303,11 @@ public static byte MakeCompressionFlags(CompressionMethod method, CompressionLev else if (method == CompressionMethod.LZ4) flags = 0x2; - if (level == CompressionLevel.FastCompression) + if (level == LSCompressionLevel.FastCompression) flags |= 0x10; - else if (level == CompressionLevel.DefaultCompression) + else if (level == LSCompressionLevel.DefaultCompression) flags |= 0x20; - else if (level == CompressionLevel.MaxCompression) + else if (level == LSCompressionLevel.MaxCompression) flags |= 0x40; return flags; @@ -329,7 +315,7 @@ public static byte MakeCompressionFlags(CompressionMethod method, CompressionLev public static byte[] Decompress(byte[] compressed, int decompressedSize, byte compressionFlags, bool chunked = false) { - switch ((CompressionMethod)(compressionFlags & 0x0F)) + switch (CompressionFlagsToMethod(compressionFlags)) { case CompressionMethod.None: return compressed; @@ -338,11 +324,11 @@ public static byte[] Decompress(byte[] compressed, int decompressedSize, byte co { using (var compressedStream = new MemoryStream(compressed)) using (var decompressedStream = new MemoryStream()) - using (var stream = new ZInputStream(compressedStream)) + using (var stream = new ZLibStream(compressedStream, CompressionMode.Decompress)) { byte[] buf = new byte[0x10000]; int length = 0; - while ((length = stream.read(buf, 0, buf.Length)) > 0) + while ((length = stream.Read(buf, 0, buf.Length)) > 0) { decompressedStream.Write(buf, 0, length); } @@ -377,58 +363,42 @@ public static byte[] Compress(byte[] uncompressed, byte compressionFlags) return Compress(uncompressed, (CompressionMethod)(compressionFlags & 0x0F), CompressionFlagsToLevel(compressionFlags)); } - public static byte[] Compress(byte[] uncompressed, CompressionMethod method, CompressionLevel compressionLevel, bool chunked = false) + public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel compressionLevel, bool chunked = false) { - switch (method) + return method switch { - case CompressionMethod.None: - return uncompressed; - - case CompressionMethod.Zlib: - return CompressZlib(uncompressed, compressionLevel); - - case CompressionMethod.LZ4: - return CompressLZ4(uncompressed, compressionLevel, chunked); - - default: - throw new ArgumentException("Invalid compression method specified"); - } + CompressionMethod.None => uncompressed, + CompressionMethod.Zlib => CompressZlib(uncompressed, compressionLevel), + CompressionMethod.LZ4 => CompressLZ4(uncompressed, compressionLevel, chunked), + _ => throw new ArgumentException("Invalid compression method specified") + }; } - public static byte[] CompressZlib(byte[] uncompressed, CompressionLevel compressionLevel) + public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel compressionLevel) { - int level = zlib.zlibConst.Z_DEFAULT_COMPRESSION; - switch (compressionLevel) + var level = compressionLevel switch { - case CompressionLevel.FastCompression: - level = zlib.zlibConst.Z_BEST_SPEED; - break; - - case CompressionLevel.DefaultCompression: - level = zlib.zlibConst.Z_DEFAULT_COMPRESSION; - break; - - case CompressionLevel.MaxCompression: - level = zlib.zlibConst.Z_BEST_COMPRESSION; - break; - } - - using (var outputStream = new MemoryStream()) - using (var compressor = new ZOutputStream(outputStream, level)) - { - compressor.Write(uncompressed, 0, uncompressed.Length); - compressor.finish(); - return outputStream.ToArray(); - } + LSCompressionLevel.FastCompression => CompressionLevel.Fastest, + LSCompressionLevel.DefaultCompression => CompressionLevel.Optimal, + LSCompressionLevel.MaxCompression => CompressionLevel.SmallestSize, + _ => throw new ArgumentException() + }; + + using var outputStream = new MemoryStream(); + using var compressor = new ZLibStream(outputStream, level); + + compressor.Write(uncompressed, 0, uncompressed.Length); + compressor.Flush(); + return outputStream.ToArray(); } - public static byte[] CompressLZ4(byte[] uncompressed, CompressionLevel compressionLevel, bool chunked = false) + public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compressionLevel, bool chunked = false) { if (chunked) { return Native.LZ4FrameCompressor.Compress(uncompressed); } - else if (compressionLevel == CompressionLevel.FastCompression) + else if (compressionLevel == LSCompressionLevel.FastCompression) { return LZ4Codec.Encode(uncompressed, 0, uncompressed.Length); } diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 90349980..a3c59c04 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -7,9 +7,9 @@ public static class Common { public const int MajorVersion = 1; - public const int MinorVersion = 18; + public const int MinorVersion = 19; - public const int PatchVersion = 7; + public const int PatchVersion = 0; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; diff --git a/LSLib/LS/Enums/CompressionLevel.cs b/LSLib/LS/Enums/CompressionLevel.cs index 81684d7d..0119e48a 100644 --- a/LSLib/LS/Enums/CompressionLevel.cs +++ b/LSLib/LS/Enums/CompressionLevel.cs @@ -1,6 +1,6 @@ namespace LSLib.LS.Enums { - public enum CompressionLevel + public enum LSCompressionLevel { FastCompression, DefaultCompression, diff --git a/LSLib/LS/FileManager.cs b/LSLib/LS/FileManager.cs index d5877e39..478a2692 100644 --- a/LSLib/LS/FileManager.cs +++ b/LSLib/LS/FileManager.cs @@ -1,5 +1,5 @@ using System; -using Alphaleonis.Win32.Filesystem; +using System.IO; namespace LSLib.LS { diff --git a/LSLib/LS/Localization.cs b/LSLib/LS/Localization.cs index cdcccec1..a2a2a550 100644 --- a/LSLib/LS/Localization.cs +++ b/LSLib/LS/Localization.cs @@ -1,15 +1,9 @@ using System; using System.IO; -using System.Linq; using System.Runtime.InteropServices; using System.Text; -using LZ4; -using File = Alphaleonis.Win32.Filesystem.File; -using LSLib.LS.Enums; using System.Collections.Generic; -using LSLib.Granny; using System.Xml; -using System.Diagnostics; namespace LSLib.LS { @@ -67,14 +61,9 @@ public class LocaResource } - public class LocaReader : IDisposable + public class LocaReader(Stream stream) : IDisposable { - private Stream Stream; - - public LocaReader(Stream stream) - { - this.Stream = stream; - } + private readonly Stream Stream = stream; public void Dispose() { @@ -83,95 +72,82 @@ public void Dispose() public LocaResource Read() { - using (var reader = new BinaryReader(Stream)) + using var reader = new BinaryReader(Stream); + var loca = new LocaResource { - var loca = new LocaResource - { - Entries = new List() - }; - var header = BinUtils.ReadStruct(reader); + Entries = [] + }; + var header = BinUtils.ReadStruct(reader); - if (header.Signature != (ulong)LocaHeader.DefaultSignature) - { - throw new InvalidDataException("Incorrect signature in localization file"); - } + if (header.Signature != (ulong)LocaHeader.DefaultSignature) + { + throw new InvalidDataException("Incorrect signature in localization file"); + } - var entries = new LocaEntry[header.NumEntries]; - BinUtils.ReadStructs(reader, entries); + var entries = new LocaEntry[header.NumEntries]; + BinUtils.ReadStructs(reader, entries); - Stream.Position = header.TextsOffset; - foreach (var entry in entries) + Stream.Position = header.TextsOffset; + foreach (var entry in entries) + { + var text = Encoding.UTF8.GetString(reader.ReadBytes((int)entry.Length - 1)); + loca.Entries.Add(new LocalizedText { - var text = Encoding.UTF8.GetString(reader.ReadBytes((int)entry.Length - 1)); - loca.Entries.Add(new LocalizedText - { - Key = entry.KeyString, - Version = entry.Version, - Text = text - }); - reader.ReadByte(); - } - - return loca; + Key = entry.KeyString, + Version = entry.Version, + Text = text + }); + reader.ReadByte(); } + + return loca; } } - public class LocaWriter + public class LocaWriter(Stream stream) { - private Stream stream; - - public LocaWriter(Stream stream) - { - this.stream = stream; - } + private readonly Stream stream = stream; public void Write(LocaResource res) { - using (var writer = new BinaryWriter(stream)) + using var writer = new BinaryWriter(stream); + var header = new LocaHeader { - var header = new LocaHeader { - Signature = LocaHeader.DefaultSignature, - NumEntries = (uint)res.Entries.Count, - TextsOffset = (uint)(Marshal.SizeOf(typeof(LocaHeader)) + Marshal.SizeOf(typeof(LocaEntry)) * res.Entries.Count) - }; - BinUtils.WriteStruct(writer, ref header); + Signature = LocaHeader.DefaultSignature, + NumEntries = (uint)res.Entries.Count, + TextsOffset = (uint)(Marshal.SizeOf(typeof(LocaHeader)) + Marshal.SizeOf(typeof(LocaEntry)) * res.Entries.Count) + }; + BinUtils.WriteStruct(writer, ref header); - var entries = new LocaEntry[header.NumEntries]; - for (var i = 0; i < entries.Length; i++) + var entries = new LocaEntry[header.NumEntries]; + for (var i = 0; i < entries.Length; i++) + { + var entry = res.Entries[i]; + entries[i] = new LocaEntry { - var entry = res.Entries[i]; - entries[i] = new LocaEntry - { - KeyString = entry.Key, - Version = entry.Version, - Length = (uint)Encoding.UTF8.GetByteCount(entry.Text) + 1 - }; - } + KeyString = entry.Key, + Version = entry.Version, + Length = (uint)Encoding.UTF8.GetByteCount(entry.Text) + 1 + }; + } - BinUtils.WriteStructs(writer, entries); + BinUtils.WriteStructs(writer, entries); - foreach (var entry in res.Entries) - { - var bin = Encoding.UTF8.GetBytes(entry.Text); - writer.Write(bin); - writer.Write((Byte)0); - } + foreach (var entry in res.Entries) + { + var bin = Encoding.UTF8.GetBytes(entry.Text); + writer.Write(bin); + writer.Write((Byte)0); } } } - public class LocaXmlReader : IDisposable + public class LocaXmlReader(Stream stream) : IDisposable { - private Stream stream; + private readonly Stream stream = stream; private XmlReader reader; private LocaResource resource; - public LocaXmlReader(Stream stream) - { - this.stream = stream; - } - public void Dispose() { stream.Dispose(); @@ -207,7 +183,7 @@ public LocaResource Read() { resource = new LocaResource { - Entries = new List() + Entries = [] }; using (this.reader = XmlReader.Create(stream)) @@ -226,38 +202,32 @@ public LocaResource Read() } - public class LocaXmlWriter + public class LocaXmlWriter(Stream stream) { - private Stream stream; - private XmlWriter writer; - - public LocaXmlWriter(Stream stream) - { - this.stream = stream; - } + private readonly Stream stream = stream; public void Write(LocaResource res) { - var settings = new XmlWriterSettings(); - settings.Indent = true; - settings.IndentChars = "\t"; - - using (this.writer = XmlWriter.Create(stream, settings)) + var settings = new XmlWriterSettings { - writer.WriteStartElement("contentList"); + Indent = true, + IndentChars = "\t" + }; - foreach (var entry in res.Entries) - { - writer.WriteStartElement("content"); - writer.WriteAttributeString("contentuid", entry.Key); - writer.WriteAttributeString("version", entry.Version.ToString()); - writer.WriteString(entry.Text); - writer.WriteEndElement(); - } + using var writer = XmlWriter.Create(stream, settings); + writer.WriteStartElement("contentList"); + foreach (var entry in res.Entries) + { + writer.WriteStartElement("content"); + writer.WriteAttributeString("contentuid", entry.Key); + writer.WriteAttributeString("version", entry.Version.ToString()); + writer.WriteString(entry.Text); writer.WriteEndElement(); - writer.Flush(); } + + writer.WriteEndElement(); + writer.Flush(); } } @@ -273,17 +243,12 @@ public static LocaFormat ExtensionToFileFormat(string path) { var extension = Path.GetExtension(path).ToLower(); - switch (extension) + return extension switch { - case ".loca": - return LocaFormat.Loca; - - case ".xml": - return LocaFormat.Xml; - - default: - throw new ArgumentException("Unrecognized file extension: " + extension); - } + ".loca" => LocaFormat.Loca, + ".xml" => LocaFormat.Xml, + _ => throw new ArgumentException("Unrecognized file extension: " + extension), + }; } public static LocaResource Load(string inputPath) @@ -293,10 +258,8 @@ public static LocaResource Load(string inputPath) public static LocaResource Load(string inputPath, LocaFormat format) { - using (var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read)) - { - return Load(stream, format); - } + using var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read); + return Load(stream, format); } public static LocaResource Load(Stream stream, LocaFormat format) @@ -305,18 +268,14 @@ public static LocaResource Load(Stream stream, LocaFormat format) { case LocaFormat.Loca: { - using (var reader = new LocaReader(stream)) - { - return reader.Read(); - } + using var reader = new LocaReader(stream); + return reader.Read(); } case LocaFormat.Xml: { - using (var reader = new LocaXmlReader(stream)) - { - return reader.Read(); - } + using var reader = new LocaXmlReader(stream); + return reader.Read(); } default: @@ -333,27 +292,25 @@ public static void Save(LocaResource resource, string outputPath, LocaFormat for { FileManager.TryToCreateDirectory(outputPath); - using (var file = File.Open(outputPath, FileMode.Create, FileAccess.Write)) + using var file = File.Open(outputPath, FileMode.Create, FileAccess.Write); + switch (format) { - switch (format) - { - case LocaFormat.Loca: - { - var writer = new LocaWriter(file); - writer.Write(resource); - break; - } - - case LocaFormat.Xml: - { - var writer = new LocaXmlWriter(file); - writer.Write(resource); - break; - } - - default: - throw new ArgumentException("Invalid loca format"); - } + case LocaFormat.Loca: + { + var writer = new LocaWriter(file); + writer.Write(resource); + break; + } + + case LocaFormat.Xml: + { + var writer = new LocaXmlWriter(file); + writer.Write(resource); + break; + } + + default: + throw new ArgumentException("Invalid loca format"); } } } diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 11f681fa..f4fb4af3 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -1,20 +1,20 @@ -using Alphaleonis.Win32.Filesystem; -using LSLib.LS.Story.Compiler; +using LSLib.LS.Story.Compiler; using System; using System.Collections.Generic; +using System.IO; using System.Linq; using System.Text.RegularExpressions; namespace LSLib.LS { - public class ModInfo + public class ModInfo(string name) { - public string Name; + public string Name = name; public AbstractFileInfo Meta; - public Dictionary Scripts = new Dictionary(); - public Dictionary Stats = new Dictionary(); - public Dictionary Globals = new Dictionary(); - public Dictionary LevelObjects = new Dictionary(); + public Dictionary Scripts = []; + public Dictionary Stats = []; + public Dictionary Globals = []; + public Dictionary LevelObjects = []; public AbstractFileInfo OrphanQueryIgnoreList; public AbstractFileInfo StoryHeaderFile; public AbstractFileInfo TypeCoercionWhitelistFile; @@ -22,18 +22,13 @@ public class ModInfo public AbstractFileInfo ValueListsFile; public AbstractFileInfo ActionResourcesFile; public AbstractFileInfo ActionResourceGroupsFile; - public List TagFiles = new List(); - - public ModInfo(string name) - { - Name = name; - } + public List TagFiles = []; } public class ModResources : IDisposable { - public Dictionary Mods = new Dictionary(); - public List LoadedPackages = new List(); + public Dictionary Mods = []; + public List LoadedPackages = []; public void Dispose() { @@ -42,20 +37,20 @@ public void Dispose() } } - public class ModPathVisitor + public partial class ModPathVisitor { - private static readonly Regex metaRe = new Regex("^Mods/([^/]+)/meta\\.lsx$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex scriptRe = new Regex("^Mods/([^/]+)/Story/RawFiles/Goals/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex statRe = new Regex("^Public/([^/]+)/Stats/Generated/Data/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex staticLsxRe = new Regex("^Public/([^/]+)/(.*\\.lsx)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex statStructureRe = new Regex("^Public/([^/]+)/Stats/Generated/Structure/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex orphanQueryIgnoresRe = new Regex("^Mods/([^/]+)/Story/story_orphanqueries_ignore_local\\.txt$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex storyDefinitionsRe = new Regex("^Mods/([^/]+)/Story/RawFiles/story_header\\.div$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex typeCoercionWhitelistRe = new Regex("^Mods/([^/]+)/Story/RawFiles/TypeCoercionWhitelist\\.txt$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex globalsRe = new Regex("^Mods/([^/]+)/Globals/.*/.*/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - private static readonly Regex levelObjectsRe = new Regex("^Mods/([^/]+)/Levels/.*/(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); + private static readonly Regex metaRe = MetaRegex(); + private static readonly Regex scriptRe = ScriptRegex(); + private static readonly Regex statRe = StatRegex(); + private static readonly Regex staticLsxRe = StaticLsxRegex(); + private static readonly Regex statStructureRe = StatStructureRegex(); + private static readonly Regex orphanQueryIgnoresRe = OrphanQueryIgnoresRegex(); + private static readonly Regex storyDefinitionsRe = StoryDefinitionsRegex(); + private static readonly Regex typeCoercionWhitelistRe = TypeCoercionWhitelistRegex(); + private static readonly Regex globalsRe = GlobalsRegex(); + private static readonly Regex levelObjectsRe = LevelObjectsRegex(); // Pattern for excluding subsequent parts of a multi-part archive - public static readonly Regex archivePartRe = new Regex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); + public static readonly Regex archivePartRe = ArchivePartRegex(); public readonly ModResources Resources; @@ -76,10 +71,10 @@ private static void EnumerateFiles(List paths, string rootPath, string c { foreach (string filePath in Directory.GetFiles(currentPath, pattern)) { - var relativePath = filePath.Substring(rootPath.Length); + var relativePath = filePath[rootPath.Length..]; if (relativePath[0] == '/' || relativePath[0] == '\\') { - relativePath = relativePath.Substring(1); + relativePath = relativePath[1..]; } paths.Add(relativePath); @@ -274,8 +269,8 @@ public void DiscoverBuiltinPackages(string gameDataPath) // List of packages we won't ever load // These packages don't contain any mod resources, but have a large // file table that makes loading unneccessarily slow. - HashSet packageBlacklist = new HashSet - { + HashSet packageBlacklist = + [ "Assets.pak", "Effects.pak", "Engine.pak", @@ -295,7 +290,7 @@ public void DiscoverBuiltinPackages(string gameDataPath) "SharedSounds.pak", "Textures.pak", "VirtualTextures.pak" - }; + ]; // Collect priority value from headers var packagePriorities = new List>(); @@ -344,7 +339,7 @@ private void DiscoverModGoals(string modName, string modPath) var goalPath = modPath + @"\Story\RawFiles\Goals"; if (!Directory.Exists(goalPath)) return; - List goalFiles = new List(); + List goalFiles = []; EnumerateFiles(goalFiles, goalPath, goalPath, "*.txt"); foreach (var goalFile in goalFiles) @@ -363,7 +358,7 @@ private void DiscoverModStats(string modName, string modPublicPath) var statsPath = modPublicPath + @"\Stats\Generated\Data"; if (!Directory.Exists(statsPath)) return; - List statFiles = new List(); + List statFiles = []; EnumerateFiles(statFiles, statsPath, statsPath, "*.txt"); foreach (var statFile in statFiles) @@ -382,7 +377,7 @@ private void DiscoverModGlobals(string modName, string modPath) var globalsPath = modPath + @"\Globals"; if (!Directory.Exists(globalsPath)) return; - List globalFiles = new List(); + List globalFiles = []; EnumerateFiles(globalFiles, globalsPath, globalsPath, "*.lsf"); foreach (var globalFile in globalFiles) @@ -401,10 +396,10 @@ private void DiscoverModLevelObjects(string modName, string modPath) var levelsPath = modPath + @"\Levels"; if (!Directory.Exists(levelsPath)) return; - List levelFiles = new List(); + List levelFiles = []; EnumerateFiles(levelFiles, levelsPath, levelsPath, "*.lsf"); - var levelObjectsRe = new Regex("^(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant); + var levelObjectsRe = LevelObjectsLocalRegex(); foreach (var levelFile in levelFiles) { var fileInfo = new FilesystemFileInfo @@ -505,5 +500,41 @@ public void Discover(String gameDataPath) DiscoverMods(gameDataPath); } + + [GeneratedRegex("^Mods/([^/]+)/meta\\.lsx$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex MetaRegex(); + + [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/Goals/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex ScriptRegex(); + + [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Data/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StatRegex(); + + [GeneratedRegex("^Public/([^/]+)/(.*\\.lsx)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StaticLsxRegex(); + + [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Structure/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StatStructureRegex(); + + [GeneratedRegex("^Mods/([^/]+)/Story/story_orphanqueries_ignore_local\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex OrphanQueryIgnoresRegex(); + + [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/story_header\\.div$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StoryDefinitionsRegex(); + + [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/TypeCoercionWhitelist\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex TypeCoercionWhitelistRegex(); + + [GeneratedRegex("^Mods/([^/]+)/Globals/.*/.*/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex GlobalsRegex(); + + [GeneratedRegex("^Mods/([^/]+)/Levels/.*/(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex LevelObjectsRegex(); + + [GeneratedRegex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex ArchivePartRegex(); + + [GeneratedRegex("^(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)] + private static partial Regex LevelObjectsLocalRegex(); } } diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index 28dea121..801e2b14 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -49,7 +49,7 @@ public void InitFromMeta(string meta) public string BuildMeta() { - List tags = new List { "v1" }; + List tags = [ "v1" ]; if (ByteSwapGuids) { tags.Add("bswap_guids"); @@ -59,7 +59,7 @@ public string BuildMeta() } } - public class NodeAttribute + public class NodeAttribute(NodeAttribute.DataType type) { public enum DataType { @@ -102,7 +102,7 @@ public enum DataType DT_Max = DT_TranslatedFSString }; - private DataType type; + private readonly DataType type = type; private object value; public DataType Type @@ -116,11 +116,6 @@ public object Value set { this.value = value; } } - public NodeAttribute(DataType type) - { - this.type = type; - } - public override string ToString() { throw new NotImplementedException("ToString() is not safe to use anymore, AsString(settings) instead"); @@ -139,34 +134,34 @@ public static Guid ByteSwapGuid(Guid g) public string AsString(NodeSerializationSettings settings) { - switch (this.type) + switch (type) { case DataType.DT_ScratchBuffer: // ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want - return Convert.ToBase64String((byte[])this.value); + return Convert.ToBase64String((byte[])value); case DataType.DT_IVec2: case DataType.DT_IVec3: case DataType.DT_IVec4: - return String.Join(" ", new List((int[])this.value).ConvertAll(i => i.ToString()).ToArray()); + return String.Join(" ", new List((int[])value).ConvertAll(i => i.ToString()).ToArray()); case DataType.DT_Vec2: case DataType.DT_Vec3: case DataType.DT_Vec4: - return String.Join(" ", new List((float[])this.value).ConvertAll(i => i.ToString()).ToArray()); + return String.Join(" ", new List((float[])value).ConvertAll(i => i.ToString()).ToArray()); case DataType.DT_UUID: if (settings.ByteSwapGuids) { - return ByteSwapGuid((Guid)this.value).ToString(); + return ByteSwapGuid((Guid)value).ToString(); } else { - return this.value.ToString(); + return value.ToString(); } default: - return this.value.ToString(); + return value.ToString(); } } @@ -248,9 +243,9 @@ public void FromString(string str, NodeSerializationSettings settings) str = "0"; } // Handle hexadecimal integers in XML files - else if (str.Length > 2 && str.Substring(0, 2) == "0x") + else if (str.Length > 2 && str[..2] == "0x") { - str = Convert.ToUInt64(str.Substring(2), 16).ToString(); + str = Convert.ToUInt64(str[2..], 16).ToString(); } } @@ -351,8 +346,7 @@ public void FromString(string str, NodeSerializationSettings settings) case DataType.DT_TranslatedString: // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part // That can be changed separately via attribute.Value.Handle - if (value == null) - value = new TranslatedString(); + value ??= new TranslatedString(); ((TranslatedString)value).Value = str; break; @@ -360,8 +354,7 @@ public void FromString(string str, NodeSerializationSettings settings) case DataType.DT_TranslatedFSString: // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part // That can be changed separately via attribute.Value.Handle - if (value == null) - value = new TranslatedFSString(); + value ??= new TranslatedFSString(); ((TranslatedFSString)value).Value = str; break; diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 56308ceb..ba701226 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -6,11 +6,6 @@ using System.Text; using LSLib.LS.Enums; using LSLib.Native; -using Alphaleonis.Win32.Filesystem; -using Path = Alphaleonis.Win32.Filesystem.Path; -using FileInfo = Alphaleonis.Win32.Filesystem.FileInfo; -using Directory = Alphaleonis.Win32.Filesystem.Directory; -using File = Alphaleonis.Win32.Filesystem.File; namespace LSLib.LS { @@ -440,7 +435,7 @@ internal static PackagedFileInfo CreateFromEntry(FileEntry7 entry, Stream dataSt info.ArchivePart = entry.ArchivePart; info.Crc = 0; - info.Flags = entry.UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, CompressionLevel.DefaultCompression) : (uint) 0; + info.Flags = entry.UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.DefaultCompression) : (uint) 0; return info; } @@ -537,15 +532,11 @@ public void Dispose() public override UInt32 CRC() => throw new NotImplementedException("!"); - public override Stream MakeStream() => _stream ?? (_stream = File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read)); + public override Stream MakeStream() => _stream ??= File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); public override void ReleaseStream() { - if (_stream == null) - { - return; - } - _stream.Dispose(); + _stream?.Dispose(); _stream = null; } @@ -602,16 +593,10 @@ public class Package { public const PackageVersion CurrentVersion = PackageVersion.V18; - public static byte[] Signature = - { - 0x4C, - 0x53, - 0x50, - 0x4B - }; - - public PackageMetadata Metadata = new PackageMetadata(); - public List Files = new List(); + public readonly static byte[] Signature = [ 0x4C, 0x53, 0x50, 0x4B ]; + + public PackageMetadata Metadata = new(); + public List Files = []; public PackageVersion Version; public static string MakePartFilename(string path, int part) @@ -676,16 +661,12 @@ public void UncompressPackage(Package package, string outputPath, Func 0) { - using (FileStream outFile = File.Open(outPath, FileMode.Create, FileAccess.Write)) - { - int read; - while ((read = inReader.Read(buffer, 0, buffer.Length)) > 0) - { - outFile.Write(buffer, 0, read); - } - } + outFile.Write(buffer, 0, read); } } finally @@ -698,11 +679,9 @@ public void UncompressPackage(Package package, string outputPath, Func filter = null) { ProgressUpdate("Reading package headers ...", 0, 1, null); - using (var reader = new PackageReader(packagePath)) - { - Package package = reader.Read(); - UncompressPackage(package, outputPath, filter); - } + using var reader = new PackageReader(packagePath); + Package package = reader.Read(); + UncompressPackage(package, outputPath, filter); } private static Package CreatePackageFromPath(string path) @@ -737,14 +716,12 @@ public void CreatePackage(string packagePath, string inputPath, PackageCreationO package.Metadata.Priority = options.Priority; ProgressUpdate("Creating archive ...", 0, 1, null); - using (var writer = new PackageWriter(package, packagePath)) - { - writer.WriteProgress += WriteProgressUpdate; - writer.Version = options.Version; - writer.Compression = options.Compression; - writer.CompressionLevel = options.FastCompression ? CompressionLevel.FastCompression : CompressionLevel.DefaultCompression; - writer.Write(); - } + using var writer = new PackageWriter(package, packagePath); + writer.WriteProgress += WriteProgressUpdate; + writer.Version = options.Version; + writer.Compression = options.Compression; + writer.LSCompressionLevel = options.FastCompression ? LSCompressionLevel.FastCompression : LSCompressionLevel.DefaultCompression; + writer.Write(); } } } diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index d30fcd1b..2d235663 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -4,7 +4,6 @@ using System.Runtime.InteropServices; using System.Text; using LZ4; -using File = Alphaleonis.Win32.Filesystem.File; using LSLib.LS.Enums; namespace LSLib.LS @@ -24,29 +23,15 @@ public NotAPackageException(string message, Exception innerException) : base(mes } } - public class PackageReader : IDisposable + public class PackageReader(string path, bool metadataOnly = false) : IDisposable { - private readonly String _path; - private readonly bool _metadataOnly; private Stream[] _streams; - public PackageReader(string path, bool metadataOnly = false) - { - this._path = path; - this._metadataOnly = metadataOnly; - } - public void Dispose() { - if (_streams != null) + foreach (Stream stream in _streams ?? []) { - foreach (Stream stream in _streams) - { - if (stream != null) - { - stream.Dispose(); - } - } + stream?.Dispose(); } } @@ -58,7 +43,7 @@ private void OpenStreams(FileStream mainStream, int numParts) for (var part = 1; part < numParts; part++) { - string partPath = Package.MakePartFilename(_path, part); + string partPath = Package.MakePartFilename(path, part); _streams[part] = File.Open(partPath, FileMode.Open, FileAccess.Read, FileShare.Read); } } @@ -73,7 +58,7 @@ private Package ReadPackageV7(FileStream mainStream, BinaryReader reader) package.Metadata.Priority = 0; package.Version = PackageVersion.V7; - if (_metadataOnly) return package; + if (metadataOnly) return package; OpenStreams(mainStream, (int) header.NumParts); for (uint i = 0; i < header.NumFiles; i++) @@ -99,7 +84,7 @@ private Package ReadPackageV10(FileStream mainStream, BinaryReader reader) package.Metadata.Priority = header.Priority; package.Version = PackageVersion.V10; - if (_metadataOnly) return package; + if (metadataOnly) return package; OpenStreams(mainStream, header.NumParts); for (uint i = 0; i < header.NumFiles; i++) @@ -133,7 +118,7 @@ private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) package.Metadata.Priority = header.Priority; package.Version = PackageVersion.V13; - if (_metadataOnly) return package; + if (metadataOnly) return package; OpenStreams(mainStream, header.NumParts); mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); @@ -288,7 +273,7 @@ private Package ReadPackageV15(FileStream mainStream, BinaryReader reader) package.Metadata.Priority = header.Priority; package.Version = PackageVersion.V15; - if (_metadataOnly) return package; + if (metadataOnly) return package; OpenStreams(mainStream, 1); mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); @@ -312,7 +297,7 @@ private Package ReadPackageV16(FileStream mainStream, BinaryReader reader) package.Metadata.Priority = header.Priority; package.Version = PackageVersion.V16; - if (_metadataOnly) return package; + if (metadataOnly) return package; OpenStreams(mainStream, header.NumParts); mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); @@ -336,7 +321,7 @@ private Package ReadPackageV18(FileStream mainStream, BinaryReader reader) package.Metadata.Priority = header.Priority; package.Version = PackageVersion.V18; - if (_metadataOnly) return package; + if (metadataOnly) return package; OpenStreams(mainStream, header.NumParts); mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); @@ -347,62 +332,60 @@ private Package ReadPackageV18(FileStream mainStream, BinaryReader reader) public Package Read() { - var mainStream = File.Open(_path, FileMode.Open, FileAccess.Read, FileShare.Read); + var mainStream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new BinaryReader(mainStream, new UTF8Encoding(), true); + + // Check for v13 package headers + mainStream.Seek(-8, SeekOrigin.End); + Int32 headerSize = reader.ReadInt32(); + byte[] signature = reader.ReadBytes(4); + if (Package.Signature.SequenceEqual(signature)) + { + mainStream.Seek(-headerSize, SeekOrigin.End); + return ReadPackageV13(mainStream, reader); + } - using (var reader = new BinaryReader(mainStream, new UTF8Encoding(), true)) + // Check for v10 package headers + mainStream.Seek(0, SeekOrigin.Begin); + signature = reader.ReadBytes(4); + Int32 version; + if (Package.Signature.SequenceEqual(signature)) { - // Check for v13 package headers - mainStream.Seek(-8, SeekOrigin.End); - Int32 headerSize = reader.ReadInt32(); - byte[] signature = reader.ReadBytes(4); - if (Package.Signature.SequenceEqual(signature)) + version = reader.ReadInt32(); + if (version == 10) { - mainStream.Seek(-headerSize, SeekOrigin.End); - return ReadPackageV13(mainStream, reader); + return ReadPackageV10(mainStream, reader); } - - // Check for v10 package headers - mainStream.Seek(0, SeekOrigin.Begin); - signature = reader.ReadBytes(4); - Int32 version; - if (Package.Signature.SequenceEqual(signature)) + else if (version == 15) { - version = reader.ReadInt32(); - if (version == 10) - { - return ReadPackageV10(mainStream, reader); - } - else if (version == 15) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV15(mainStream, reader); - } - else if (version == 16) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV16(mainStream, reader); - } - else if (version == 18) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV18(mainStream, reader); - } - else - { - throw new InvalidDataException($"Package version v{version} not supported"); - } + mainStream.Seek(4, SeekOrigin.Begin); + return ReadPackageV15(mainStream, reader); } - - // Check for v9 and v7 package headers - mainStream.Seek(0, SeekOrigin.Begin); - version = reader.ReadInt32(); - if (version == 7 || version == 9) + else if (version == 16) { - return ReadPackageV7(mainStream, reader); + mainStream.Seek(4, SeekOrigin.Begin); + return ReadPackageV16(mainStream, reader); } + else if (version == 18) + { + mainStream.Seek(4, SeekOrigin.Begin); + return ReadPackageV18(mainStream, reader); + } + else + { + throw new InvalidDataException($"Package version v{version} not supported"); + } + } - throw new NotAPackageException("No valid signature found in package file"); + // Check for v9 and v7 package headers + mainStream.Seek(0, SeekOrigin.Begin); + version = reader.ReadInt32(); + if (version == 7 || version == 9) + { + return ReadPackageV7(mainStream, reader); } + + throw new NotAPackageException("No valid signature found in package file"); } } } diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 82db1c09..8ba81383 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -8,32 +8,21 @@ using LSLib.LS.Enums; using LSLib.Native; using LZ4; -using Alphaleonis.Win32.Filesystem; -using File = Alphaleonis.Win32.Filesystem.File; namespace LSLib.LS { - public class PackageWriter : IDisposable + public class PackageWriter(Package package, string path) : IDisposable { public delegate void WriteProgressDelegate(AbstractFileInfo abstractFile, long numerator, long denominator); private const long MaxPackageSizeDOS = 0x40000000; private const long MaxPackageSizeBG3 = 0x100000000; public CompressionMethod Compression = CompressionMethod.None; - public CompressionLevel CompressionLevel = CompressionLevel.DefaultCompression; - - private readonly Package _package; - private readonly String _path; - private readonly List _streams = new List(); + public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + private readonly List _streams = []; public PackageVersion Version = Package.CurrentVersion; public WriteProgressDelegate WriteProgress = delegate { }; - public PackageWriter(Package package, string path) - { - this._package = package; - this._path = path; - } - public void Dispose() { foreach (Stream stream in _streams) @@ -52,11 +41,20 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) || (Version >= PackageVersion.V16 && _streams.Last().Position + size > MaxPackageSizeBG3)) { // Start a new package file if the current one is full. - string partPath = Package.MakePartFilename(_path, _streams.Count); + string partPath = Package.MakePartFilename(path, _streams.Count); var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); _streams.Add(nextPart); } + var compression = Compression; + var compressionLevel = LSCompressionLevel; + + if (info.Name.EndsWith(".gts") || info.Name.EndsWith(".gtp")) + { + compression = CompressionMethod.None; + compressionLevel = LSCompressionLevel.FastCompression; + } + Stream stream = _streams.Last(); var packaged = new PackagedFileInfo { @@ -65,19 +63,17 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) UncompressedSize = (ulong)size, ArchivePart = (UInt32) (_streams.Count - 1), OffsetInFile = (UInt32) stream.Position, - Flags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel) + Flags = BinUtils.MakeCompressionFlags(compression, compressionLevel) }; Stream packagedStream = info.MakeStream(); byte[] compressed; try { - using (var reader = new BinaryReader(packagedStream, Encoding.UTF8, true)) - { - byte[] uncompressed = reader.ReadBytes((int) reader.BaseStream.Length); - compressed = BinUtils.Compress(uncompressed, Compression, CompressionLevel); - stream.Write(compressed, 0, compressed.Length); - } + using var reader = new BinaryReader(packagedStream, Encoding.UTF8, true); + byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); + compressed = BinUtils.Compress(uncompressed, compression, compressionLevel); + stream.Write(compressed, 0, compressed.Length); } finally { @@ -87,7 +83,7 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) packaged.SizeOnDisk = (UInt64) (stream.Position - (long)packaged.OffsetInFile); packaged.Crc = Crc32.Compute(compressed, 0); - if ((_package.Metadata.Flags & PackageFlags.Solid) == 0) + if ((package.Metadata.Flags & PackageFlags.Solid) == 0) { int padLength = PaddingLength(); long alignTo; @@ -121,164 +117,158 @@ public void WriteV7(FileStream mainStream) throw new ArgumentException("LZ4 compression is only supported by V10 and later package versions"); } - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); + var header = new LSPKHeader7 { - var header = new LSPKHeader7 - { - Version = (uint) Version, - NumFiles = (UInt32) _package.Files.Count, - FileListSize = (UInt32) (Marshal.SizeOf(typeof(FileEntry7)) * _package.Files.Count) - }; - header.DataOffset = (UInt32) Marshal.SizeOf(typeof(LSPKHeader7)) + header.FileListSize; - int paddingLength = PaddingLength(); - if (header.DataOffset % paddingLength > 0) - { - header.DataOffset += (UInt32) (paddingLength - header.DataOffset % paddingLength); - } + Version = (uint)Version, + NumFiles = (UInt32)package.Files.Count, + FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry7)) * package.Files.Count) + }; + header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader7)) + header.FileListSize; + int paddingLength = PaddingLength(); + if (header.DataOffset % paddingLength > 0) + { + header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); + } - // Write a placeholder instead of the actual headers; we'll write them after we - // compressed and flushed all files to disk - var placeholder = new byte[header.DataOffset]; - writer.Write(placeholder); + // Write a placeholder instead of the actual headers; we'll write them after we + // compressed and flushed all files to disk + var placeholder = new byte[header.DataOffset]; + writer.Write(placeholder); - long totalSize = _package.Files.Sum(p => (long) p.Size()); - long currentSize = 0; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in _package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } + long totalSize = package.Files.Sum(p => (long)p.Size()); + long currentSize = 0; + var writtenFiles = new List(); + foreach (AbstractFileInfo file in package.Files) + { + WriteProgress(file, currentSize, totalSize); + writtenFiles.Add(WriteFile(file)); + currentSize += (long)file.Size(); + } - mainStream.Seek(0, SeekOrigin.Begin); - header.LittleEndian = 0; - header.NumParts = (UInt16) _streams.Count; - BinUtils.WriteStruct(writer, ref header); + mainStream.Seek(0, SeekOrigin.Begin); + header.LittleEndian = 0; + header.NumParts = (UInt16)_streams.Count; + BinUtils.WriteStruct(writer, ref header); - foreach (PackagedFileInfo file in writtenFiles) + foreach (PackagedFileInfo file in writtenFiles) + { + FileEntry7 entry = file.MakeEntryV7(); + if (entry.ArchivePart == 0) { - FileEntry7 entry = file.MakeEntryV7(); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile -= header.DataOffset; - } - - BinUtils.WriteStruct(writer, ref entry); + entry.OffsetInFile -= header.DataOffset; } + + BinUtils.WriteStruct(writer, ref entry); } } public void WriteV10(FileStream mainStream) { - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); + var header = new LSPKHeader10 { - var header = new LSPKHeader10 - { - Version = (uint) Version, - NumFiles = (UInt32) _package.Files.Count, - FileListSize = (UInt32) (Marshal.SizeOf(typeof(FileEntry13)) * _package.Files.Count) - }; - header.DataOffset = (UInt32) Marshal.SizeOf(typeof(LSPKHeader10)) + 4 + header.FileListSize; - int paddingLength = PaddingLength(); - if (header.DataOffset % paddingLength > 0) - { - header.DataOffset += (UInt32) (paddingLength - header.DataOffset % paddingLength); - } + Version = (uint)Version, + NumFiles = (UInt32)package.Files.Count, + FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry13)) * package.Files.Count) + }; + header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader10)) + 4 + header.FileListSize; + int paddingLength = PaddingLength(); + if (header.DataOffset % paddingLength > 0) + { + header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); + } - // Write a placeholder instead of the actual headers; we'll write them after we - // compressed and flushed all files to disk - var placeholder = new byte[header.DataOffset]; - writer.Write(placeholder); + // Write a placeholder instead of the actual headers; we'll write them after we + // compressed and flushed all files to disk + var placeholder = new byte[header.DataOffset]; + writer.Write(placeholder); - long totalSize = _package.Files.Sum(p => (long) p.Size()); - long currentSize = 0; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in _package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } + long totalSize = package.Files.Sum(p => (long)p.Size()); + long currentSize = 0; + var writtenFiles = new List(); + foreach (AbstractFileInfo file in package.Files) + { + WriteProgress(file, currentSize, totalSize); + writtenFiles.Add(WriteFile(file)); + currentSize += (long)file.Size(); + } - mainStream.Seek(0, SeekOrigin.Begin); - writer.Write(Package.Signature); - header.NumParts = (UInt16) _streams.Count; - header.Priority = _package.Metadata.Priority; - header.Flags = (byte)_package.Metadata.Flags; - BinUtils.WriteStruct(writer, ref header); + mainStream.Seek(0, SeekOrigin.Begin); + writer.Write(Package.Signature); + header.NumParts = (UInt16)_streams.Count; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; + BinUtils.WriteStruct(writer, ref header); - foreach (PackagedFileInfo file in writtenFiles) + foreach (PackagedFileInfo file in writtenFiles) + { + FileEntry13 entry = file.MakeEntryV13(); + if (entry.ArchivePart == 0) { - FileEntry13 entry = file.MakeEntryV13(); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile -= header.DataOffset; - } - - // v10 packages don't support compression level in the flags field - entry.Flags &= 0x0f; - BinUtils.WriteStruct(writer, ref entry); + entry.OffsetInFile -= header.DataOffset; } + + // v10 packages don't support compression level in the flags field + entry.Flags &= 0x0f; + BinUtils.WriteStruct(writer, ref entry); } } public void WriteV13(FileStream mainStream) { - long totalSize = _package.Files.Sum(p => (long) p.Size()); + long totalSize = package.Files.Sum(p => (long) p.Size()); long currentSize = 0; var writtenFiles = new List(); - foreach (AbstractFileInfo file in _package.Files) + foreach (AbstractFileInfo file in package.Files) { WriteProgress(file, currentSize, totalSize); writtenFiles.Add(WriteFile(file)); currentSize += (long)file.Size(); } - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); + var header = new LSPKHeader13 { - var header = new LSPKHeader13 - { - Version = (uint) Version, - FileListOffset = (UInt32) mainStream.Position - }; + Version = (uint)Version, + FileListOffset = (UInt32)mainStream.Position + }; - writer.Write((UInt32) writtenFiles.Count); + writer.Write((UInt32)writtenFiles.Count); - var fileList = new MemoryStream(); - var fileListWriter = new BinaryWriter(fileList); - foreach (PackagedFileInfo file in writtenFiles) - { - FileEntry13 entry = file.MakeEntryV13(); - BinUtils.WriteStruct(fileListWriter, ref entry); - } + var fileList = new MemoryStream(); + var fileListWriter = new BinaryWriter(fileList); + foreach (PackagedFileInfo file in writtenFiles) + { + FileEntry13 entry = file.MakeEntryV13(); + BinUtils.WriteStruct(fileListWriter, ref entry); + } - byte[] fileListBuf = fileList.ToArray(); - fileListWriter.Dispose(); - byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); + byte[] fileListBuf = fileList.ToArray(); + fileListWriter.Dispose(); + byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); - writer.Write(compressedFileList); + writer.Write(compressedFileList); - header.FileListSize = (UInt32) mainStream.Position - header.FileListOffset; - header.NumParts = (UInt16) _streams.Count; - header.Priority = _package.Metadata.Priority; - header.Flags = (byte)_package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - BinUtils.WriteStruct(writer, ref header); + header.FileListSize = (UInt32)mainStream.Position - header.FileListOffset; + header.NumParts = (UInt16)_streams.Count; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; + header.Md5 = ComputeArchiveHash(); + BinUtils.WriteStruct(writer, ref header); - writer.Write((UInt32) (8 + Marshal.SizeOf(typeof(LSPKHeader13)))); - writer.Write(Package.Signature); - } + writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(LSPKHeader13)))); + writer.Write(Package.Signature); } private List PackFiles() { - long totalSize = _package.Files.Sum(p => (long)p.Size()); + long totalSize = package.Files.Sum(p => (long)p.Size()); long currentSize = 0; var writtenFiles = new List(); - foreach (AbstractFileInfo file in _package.Files) + foreach (AbstractFileInfo file in package.Files) { WriteProgress(file, currentSize, totalSize); writtenFiles.Add(WriteFile(file)); @@ -331,8 +321,8 @@ public void WriteV15(FileStream mainStream) WriteFileListV15(writer, writtenFiles); header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = _package.Metadata.Priority; - header.Flags = (byte)_package.Metadata.Flags; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; header.Md5 = ComputeArchiveHash(); mainStream.Seek(4, SeekOrigin.Begin); BinUtils.WriteStruct(writer, ref header); @@ -360,8 +350,8 @@ public void WriteV16(FileStream mainStream) WriteFileListV15(writer, writtenFiles); header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = _package.Metadata.Priority; - header.Flags = (byte)_package.Metadata.Flags; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; header.Md5 = ComputeArchiveHash(); header.NumParts = (UInt16)_streams.Count; mainStream.Seek(4, SeekOrigin.Begin); @@ -412,8 +402,8 @@ public void WriteV18(FileStream mainStream) WriteFileListV18(writer, writtenFiles); header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = _package.Metadata.Priority; - header.Flags = (byte)_package.Metadata.Flags; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; header.Md5 = ComputeArchiveHash(); header.NumParts = (UInt16)_streams.Count; mainStream.Seek(4, SeekOrigin.Begin); @@ -424,47 +414,45 @@ public void WriteV18(FileStream mainStream) public byte[] ComputeArchiveHash() { // MD5 is computed over the contents of all files in an alphabetically sorted order - List orderedFileList = _package.Files.Select(item => item).ToList(); + List orderedFileList = package.Files.Select(item => item).ToList(); if (Version < PackageVersion.V15) { orderedFileList.Sort((a, b) => String.CompareOrdinal(a.Name, b.Name)); } - using (MD5 md5 = MD5.Create()) + using MD5 md5 = MD5.Create(); + foreach (AbstractFileInfo file in orderedFileList) { - foreach (AbstractFileInfo file in orderedFileList) + Stream packagedStream = file.MakeStream(); + try { - Stream packagedStream = file.MakeStream(); - try - { - using (var reader = new BinaryReader(packagedStream)) - { - byte[] uncompressed = reader.ReadBytes((int) reader.BaseStream.Length); - md5.TransformBlock(uncompressed, 0, uncompressed.Length, uncompressed, 0); - } - } - finally + using (var reader = new BinaryReader(packagedStream)) { - file.ReleaseStream(); + byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); + md5.TransformBlock(uncompressed, 0, uncompressed.Length, uncompressed, 0); } } - - md5.TransformFinalBlock(new byte[0], 0, 0); - byte[] hash = md5.Hash; - - // All hash bytes are incremented by 1 - for (var i = 0; i < hash.Length; i++) + finally { - hash[i] += 1; + file.ReleaseStream(); } + } + + md5.TransformFinalBlock(new byte[0], 0, 0); + byte[] hash = md5.Hash; - return hash; + // All hash bytes are incremented by 1 + for (var i = 0; i < hash.Length; i++) + { + hash[i] += 1; } + + return hash; } public void Write() { - var mainStream = File.Open(_path, FileMode.Create, FileAccess.Write); + var mainStream = File.Open(path, FileMode.Create, FileAccess.Write); _streams.Add(mainStream); switch (Version) diff --git a/LSLib/LS/Resource.cs b/LSLib/LS/Resource.cs index 3846b635..5d6837d3 100644 --- a/LSLib/LS/Resource.cs +++ b/LSLib/LS/Resource.cs @@ -5,11 +5,8 @@ namespace LSLib.LS { - public class InvalidFormatException : Exception + public class InvalidFormatException(string message) : Exception(message) { - public InvalidFormatException(string message) - : base(message) - { } } public struct PackedVersion @@ -41,7 +38,7 @@ public static PackedVersion FromInt32(Int32 packed) }; } - public Int32 ToVersion32() + public readonly Int32 ToVersion32() { return (Int32)((Major & 0x0f) << 28 | (Minor & 0x0f) << 24 | @@ -49,7 +46,7 @@ public Int32 ToVersion32() (Build & 0xffff) << 0); } - public Int64 ToVersion64() + public readonly Int64 ToVersion64() { return (Int64)(((Int64)Major & 0x7f) << 55 | ((Int64)Minor & 0xff) << 47 | @@ -76,7 +73,7 @@ public struct LSBHeader /// /// LSB file signature since BG3 /// - public static byte[] SignatureBG3 = new byte[] { 0x4C, 0x53, 0x46, 0x4D }; + public readonly static byte[] SignatureBG3 = "LSFM"u8.ToArray(); /// /// LSB signature up to FW3 (DOS2 DE) @@ -92,7 +89,7 @@ public struct LSBHeader public static class AttributeTypeMaps { - public static Dictionary TypeToId = new Dictionary + public readonly static Dictionary TypeToId = new() { { "None", NodeAttribute.DataType.DT_None }, { "uint8", NodeAttribute.DataType.DT_Byte }, @@ -130,7 +127,7 @@ public static class AttributeTypeMaps { "TranslatedFSString", NodeAttribute.DataType.DT_TranslatedFSString }, }; - public static Dictionary IdToType = new Dictionary + public readonly static Dictionary IdToType = new() { { NodeAttribute.DataType.DT_None, "None" }, { NodeAttribute.DataType.DT_Byte, "uint8" }, @@ -172,7 +169,7 @@ public static class AttributeTypeMaps public class Resource { public LSMetadata Metadata; - public Dictionary Regions = new Dictionary(); + public Dictionary Regions = []; public Resource() { @@ -189,8 +186,8 @@ public class Node { public string Name; public Node Parent; - public Dictionary Attributes = new Dictionary(); - public Dictionary> Children = new Dictionary>(); + public Dictionary Attributes = []; + public Dictionary> Children = []; public int ChildCount { @@ -218,10 +215,9 @@ public int TotalChildCount() public void AppendChild(Node child) { - List children; - if (!Children.TryGetValue(child.Name, out children)) + if (!Children.TryGetValue(child.Name, out List children)) { - children = new List(); + children = []; Children.Add(child.Name, children); } diff --git a/LSLib/LS/ResourceUtils.cs b/LSLib/LS/ResourceUtils.cs index 90aa5832..1a0584c8 100644 --- a/LSLib/LS/ResourceUtils.cs +++ b/LSLib/LS/ResourceUtils.cs @@ -2,10 +2,6 @@ using System.Collections.Generic; using System.IO; using LSLib.LS.Enums; -using Alphaleonis.Win32.Filesystem; -using Path = Alphaleonis.Win32.Filesystem.Path; -using Directory = Alphaleonis.Win32.Filesystem.Directory; -using File = Alphaleonis.Win32.Filesystem.File; namespace LSLib.LS { @@ -64,7 +60,7 @@ public class ResourceConversionParameters /// /// LSF/LSB compression level (i.e. size/compression time tradeoff) /// - public CompressionLevel CompressionLevel = CompressionLevel.DefaultCompression; + public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; /// /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string @@ -73,11 +69,12 @@ public class ResourceConversionParameters public static ResourceConversionParameters FromGameVersion(Game game) { - var p = new ResourceConversionParameters(); - p.PAKVersion = game.PAKVersion(); - p.LSF = game.LSFVersion(); - p.LSX = game.LSXVersion(); - return p; + return new ResourceConversionParameters + { + PAKVersion = game.PAKVersion(), + LSF = game.LSFVersion(), + LSX = game.LSXVersion() + }; } public void ToSerializationSettings(NodeSerializationSettings settings) @@ -98,26 +95,14 @@ public static ResourceFormat ExtensionToResourceFormat(string path) { var extension = Path.GetExtension(path).ToLower(); - switch (extension) + return extension switch { - case ".lsx": - return ResourceFormat.LSX; - - case ".lsb": - return ResourceFormat.LSB; - - case ".lsf": - case ".lsfx": - case ".lsbc": - case ".lsbs": - return ResourceFormat.LSF; - - case ".lsj": - return ResourceFormat.LSJ; - - default: - throw new ArgumentException("Unrecognized file extension: " + extension); - } + ".lsx" => ResourceFormat.LSX, + ".lsb" => ResourceFormat.LSB, + ".lsf" or ".lsfx" or ".lsbc" or ".lsbs" => ResourceFormat.LSF, + ".lsj" => ResourceFormat.LSJ, + _ => throw new ArgumentException("Unrecognized file extension: " + extension), + }; } public static Resource LoadResource(string inputPath, ResourceLoadParameters loadParams) @@ -127,10 +112,8 @@ public static Resource LoadResource(string inputPath, ResourceLoadParameters loa public static Resource LoadResource(string inputPath, ResourceFormat format, ResourceLoadParameters loadParams) { - using (var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read)) - { - return LoadResource(stream, format, loadParams); - } + using var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read); + return LoadResource(stream, format, loadParams); } public static Resource LoadResource(Stream stream, ResourceFormat format, ResourceLoadParameters loadParams) @@ -139,36 +122,28 @@ public static Resource LoadResource(Stream stream, ResourceFormat format, Resour { case ResourceFormat.LSX: { - using (var reader = new LSXReader(stream)) - { - loadParams.ToSerializationSettings(reader.SerializationSettings); - return reader.Read(); - } + using var reader = new LSXReader(stream); + loadParams.ToSerializationSettings(reader.SerializationSettings); + return reader.Read(); } case ResourceFormat.LSB: { - using (var reader = new LSBReader(stream)) - { - return reader.Read(); - } + using var reader = new LSBReader(stream); + return reader.Read(); } case ResourceFormat.LSF: { - using (var reader = new LSFReader(stream)) - { - return reader.Read(); - } + using var reader = new LSFReader(stream); + return reader.Read(); } case ResourceFormat.LSJ: { - using (var reader = new LSJReader(stream)) - { - loadParams.ToSerializationSettings(reader.SerializationSettings); - return reader.Read(); - } + using var reader = new LSJReader(stream); + loadParams.ToSerializationSettings(reader.SerializationSettings); + return reader.Read(); } default: @@ -185,64 +160,68 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo { FileManager.TryToCreateDirectory(outputPath); - using (var file = File.Open(outputPath, FileMode.Create, FileAccess.Write)) + using var file = File.Open(outputPath, FileMode.Create, FileAccess.Write); + switch (format) { - switch (format) - { - case ResourceFormat.LSX: - { - var writer = new LSXWriter(file); - writer.Version = conversionParams.LSX; - writer.PrettyPrint = conversionParams.PrettyPrint; - conversionParams.ToSerializationSettings(writer.SerializationSettings); - writer.Write(resource); - break; - } - - case ResourceFormat.LSB: + case ResourceFormat.LSX: + { + var writer = new LSXWriter(file) { - var writer = new LSBWriter(file); - writer.Write(resource); - break; - } + Version = conversionParams.LSX, + PrettyPrint = conversionParams.PrettyPrint + }; + conversionParams.ToSerializationSettings(writer.SerializationSettings); + writer.Write(resource); + break; + } + + case ResourceFormat.LSB: + { + var writer = new LSBWriter(file); + writer.Write(resource); + break; + } - case ResourceFormat.LSF: + case ResourceFormat.LSF: + { + var writer = new LSFWriter(file) { - var writer = new LSFWriter(file); - writer.Version = conversionParams.LSF; - writer.EncodeSiblingData = conversionParams.LSFEncodeSiblingData; - writer.Compression = conversionParams.Compression; - writer.CompressionLevel = conversionParams.CompressionLevel; - writer.Write(resource); - break; - } - - case ResourceFormat.LSJ: + Version = conversionParams.LSF, + EncodeSiblingData = conversionParams.LSFEncodeSiblingData, + Compression = conversionParams.Compression, + LSCompressionLevel = conversionParams.LSCompressionLevel + }; + writer.Write(resource); + break; + } + + case ResourceFormat.LSJ: + { + var writer = new LSJWriter(file) { - var writer = new LSJWriter(file); - writer.PrettyPrint = conversionParams.PrettyPrint; - conversionParams.ToSerializationSettings(writer.SerializationSettings); - writer.Write(resource); - break; - } - - default: - throw new ArgumentException("Invalid resource format"); - } + PrettyPrint = conversionParams.PrettyPrint + }; + conversionParams.ToSerializationSettings(writer.SerializationSettings); + writer.Write(resource); + break; + } + + default: + throw new ArgumentException("Invalid resource format"); } } private bool IsA(string path, ResourceFormat format) { var extension = Path.GetExtension(path).ToLower(); - switch (format) + return format switch { - case ResourceFormat.LSX: return extension == ".lsx"; - case ResourceFormat.LSB: return extension == ".lsb"; - case ResourceFormat.LSF: return extension == ".lsf" || extension == ".lsbc" || extension == ".lsfx"; - case ResourceFormat.LSJ: return extension == ".lsj"; - default: return false; - } + ResourceFormat.LSX => extension == ".lsx", + ResourceFormat.LSB => extension == ".lsb", + ResourceFormat.LSF => extension == ".lsf" || extension == ".lsbc" || extension == ".lsfx", + ResourceFormat.LSJ => extension == ".lsj", + _ => false, + }; } private void EnumerateFiles(List paths, string rootPath, string currentPath, ResourceFormat format) @@ -251,10 +230,10 @@ private void EnumerateFiles(List paths, string rootPath, string currentP { if (IsA(filePath, format)) { - var relativePath = filePath.Substring(rootPath.Length); + var relativePath = filePath[rootPath.Length..]; if (relativePath[0] == '/' || relativePath[0] == '\\') { - relativePath = relativePath.Substring(1); + relativePath = relativePath[1..]; } paths.Add(relativePath); diff --git a/LSLib/LS/Resources/LSB/LSBReader.cs b/LSLib/LS/Resources/LSB/LSBReader.cs index ffa651ad..9df8ab78 100644 --- a/LSLib/LS/Resources/LSB/LSBReader.cs +++ b/LSLib/LS/Resources/LSB/LSBReader.cs @@ -4,18 +4,12 @@ namespace LSLib.LS { - public class LSBReader : IDisposable + public class LSBReader(Stream stream) : IDisposable { - private Stream stream; private BinaryReader reader; - private Dictionary staticStrings = new Dictionary(); + private Dictionary staticStrings = []; private bool IsBG3; - public LSBReader(Stream stream) - { - this.stream = stream; - } - public void Dispose() { stream.Dispose(); @@ -40,8 +34,10 @@ public Resource Read() IsBG3 = (header.Signature == BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0)); ReadStaticStrings(); - Resource rsrc = new Resource(); - rsrc.Metadata = header.Metadata; + Resource rsrc = new Resource + { + Metadata = header.Metadata + }; ReadRegions(rsrc); return rsrc; } @@ -55,8 +51,10 @@ private void ReadRegions(Resource rsrc) UInt32 regionNameId = reader.ReadUInt32(); UInt32 regionOffset = reader.ReadUInt32(); - Region rgn = new Region(); - rgn.RegionName = staticStrings[regionNameId]; + Region rgn = new Region + { + RegionName = staticStrings[regionNameId] + }; var lastRegionPos = stream.Position; stream.Seek(regionOffset, SeekOrigin.Begin); @@ -85,8 +83,10 @@ private void ReadNode(Node node) for (UInt32 i = 0; i < childCount; i++) { - Node child = new Node(); - child.Parent = node; + Node child = new Node + { + Parent = node + }; ReadNode(child); node.AppendChild(child); } @@ -101,16 +101,20 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type) case NodeAttribute.DataType.DT_FixedString: case NodeAttribute.DataType.DT_LSString: { - var attr = new NodeAttribute(type); - attr.Value = ReadString(true); + var attr = new NodeAttribute(type) + { + Value = ReadString(true) + }; return attr; } case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: { - var attr = new NodeAttribute(type); - attr.Value = ReadWideString(true); + var attr = new NodeAttribute(type) + { + Value = ReadWideString(true) + }; return attr; } diff --git a/LSLib/LS/Resources/LSB/LSBWriter.cs b/LSLib/LS/Resources/LSB/LSBWriter.cs index ad9cab83..c45e7903 100644 --- a/LSLib/LS/Resources/LSB/LSBWriter.cs +++ b/LSLib/LS/Resources/LSB/LSBWriter.cs @@ -4,19 +4,13 @@ namespace LSLib.LS { - public class LSBWriter + public class LSBWriter(Stream stream) { - private Stream stream; private BinaryWriter writer; - private Dictionary staticStrings = new Dictionary(); + private Dictionary staticStrings = []; private UInt32 nextStaticStringId = 0; private UInt32 Version; - public LSBWriter(Stream stream) - { - this.stream = stream; - } - public void Write(Resource rsrc) { Version = rsrc.Metadata.MajorVersion; @@ -62,7 +56,7 @@ private void WriteRegions(Resource rsrc) writer.Write((UInt32)0); // Offset of region, will be updater after we finished serializing } - List regionPositions = new List(); + List regionPositions = []; foreach (var rgn in rsrc.Regions) { regionPositions.Add((UInt32)stream.Position); diff --git a/LSLib/LS/Resources/LSF/LSFCommon.cs b/LSLib/LS/Resources/LSF/LSFCommon.cs index 1de8bc1a..70141db9 100644 --- a/LSLib/LS/Resources/LSF/LSFCommon.cs +++ b/LSLib/LS/Resources/LSF/LSFCommon.cs @@ -9,7 +9,7 @@ internal struct LSFMagic /// /// LSOF file signature /// - public static byte[] Signature = new byte[] { 0x4C, 0x53, 0x4F, 0x46 }; + public readonly static byte[] Signature = "LSOF"u8.ToArray(); /// /// LSOF file signature; should be the same as LSFHeader.Signature diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index c095c0e6..20c53190 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -9,12 +9,12 @@ namespace LSLib.LS { - public class LSFReader : IDisposable + public class LSFReader(Stream stream) : IDisposable { /// /// Input stream /// - private Stream Stream; + private readonly Stream Stream = stream; /// /// Static string hash map @@ -46,11 +46,6 @@ public class LSFReader : IDisposable private PackedVersion GameVersion; private LSFMetadataV6 Metadata; - public LSFReader(Stream stream) - { - this.Stream = stream; - } - public void Dispose() { Stream.Dispose(); @@ -72,25 +67,23 @@ private void ReadNames(Stream s) // L x 16-bit string length (S) // [S bytes of UTF-8 string data] - using (var reader = new BinaryReader(s)) + using var reader = new BinaryReader(s); + var numHashEntries = reader.ReadUInt32(); + while (numHashEntries-- > 0) { - var numHashEntries = reader.ReadUInt32(); - while (numHashEntries-- > 0) - { - var hash = new List(); - Names.Add(hash); + var hash = new List(); + Names.Add(hash); - var numStrings = reader.ReadUInt16(); - while (numStrings-- > 0) - { - var nameLen = reader.ReadUInt16(); - byte[] bytes = reader.ReadBytes(nameLen); - var name = System.Text.Encoding.UTF8.GetString(bytes); - hash.Add(name); + var numStrings = reader.ReadUInt16(); + while (numStrings-- > 0) + { + var nameLen = reader.ReadUInt16(); + byte[] bytes = reader.ReadBytes(nameLen); + var name = System.Text.Encoding.UTF8.GetString(bytes); + hash.Add(name); #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format("{0,3:X}/{1}: {2}", Names.Count - 1, hash.Count - 1, name)); + Console.WriteLine(String.Format("{0,3:X}/{1}: {2}", Names.Count - 1, hash.Count - 1, name)); #endif - } } } } @@ -106,44 +99,42 @@ private void ReadNodes(Stream s, bool longNodes) Console.WriteLine(" ----- DUMP OF NODE TABLE -----"); #endif - using (var reader = new BinaryReader(s)) + using var reader = new BinaryReader(s); + Int32 index = 0; + while (s.Position < s.Length) { - Int32 index = 0; - while (s.Position < s.Length) - { - var resolved = new LSFNodeInfo(); + var resolved = new LSFNodeInfo(); #if DEBUG_LSF_SERIALIZATION var pos = s.Position; #endif - if (longNodes) - { - var item = BinUtils.ReadStruct(reader); - resolved.ParentIndex = item.ParentIndex; - resolved.NameIndex = item.NameIndex; - resolved.NameOffset = item.NameOffset; - resolved.FirstAttributeIndex = item.FirstAttributeIndex; - } - else - { - var item = BinUtils.ReadStruct(reader); - resolved.ParentIndex = item.ParentIndex; - resolved.NameIndex = item.NameIndex; - resolved.NameOffset = item.NameOffset; - resolved.FirstAttributeIndex = item.FirstAttributeIndex; - } + if (longNodes) + { + var item = BinUtils.ReadStruct(reader); + resolved.ParentIndex = item.ParentIndex; + resolved.NameIndex = item.NameIndex; + resolved.NameOffset = item.NameOffset; + resolved.FirstAttributeIndex = item.FirstAttributeIndex; + } + else + { + var item = BinUtils.ReadStruct(reader); + resolved.ParentIndex = item.ParentIndex; + resolved.NameIndex = item.NameIndex; + resolved.NameOffset = item.NameOffset; + resolved.FirstAttributeIndex = item.FirstAttributeIndex; + } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format( - "{0}: {1} @ {2:X} (parent {3}, firstAttribute {4})", - index, Names[resolved.NameIndex][resolved.NameOffset], pos, resolved.ParentIndex, - resolved.FirstAttributeIndex - )); + Console.WriteLine(String.Format( + "{0}: {1} @ {2:X} (parent {3}, firstAttribute {4})", + index, Names[resolved.NameIndex][resolved.NameOffset], pos, resolved.ParentIndex, + resolved.FirstAttributeIndex + )); #endif - Nodes.Add(resolved); - index++; - } + Nodes.Add(resolved); + index++; } } @@ -153,79 +144,79 @@ private void ReadNodes(Stream s, bool longNodes) /// Stream to read the attribute headers from private void ReadAttributesV2(Stream s) { - using (var reader = new BinaryReader(s)) - { + using var reader = new BinaryReader(s); #if DEBUG_LSF_SERIALIZATION - var rawAttributes = new List(); + var rawAttributes = new List(); #endif - var prevAttributeRefs = new List(); - UInt32 dataOffset = 0; - Int32 index = 0; - while (s.Position < s.Length) + var prevAttributeRefs = new List(); + UInt32 dataOffset = 0; + Int32 index = 0; + while (s.Position < s.Length) + { + var attribute = BinUtils.ReadStruct(reader); + + var resolved = new LSFAttributeInfo { - var attribute = BinUtils.ReadStruct(reader); - - var resolved = new LSFAttributeInfo(); - resolved.NameIndex = attribute.NameIndex; - resolved.NameOffset = attribute.NameOffset; - resolved.TypeId = attribute.TypeId; - resolved.Length = attribute.Length; - resolved.DataOffset = dataOffset; - resolved.NextAttributeIndex = -1; - - var nodeIndex = attribute.NodeIndex + 1; - if (prevAttributeRefs.Count > nodeIndex) - { - if (prevAttributeRefs[nodeIndex] != -1) - { - Attributes[prevAttributeRefs[nodeIndex]].NextAttributeIndex = index; - } + NameIndex = attribute.NameIndex, + NameOffset = attribute.NameOffset, + TypeId = attribute.TypeId, + Length = attribute.Length, + DataOffset = dataOffset, + NextAttributeIndex = -1 + }; - prevAttributeRefs[nodeIndex] = index; - } - else + var nodeIndex = attribute.NodeIndex + 1; + if (prevAttributeRefs.Count > nodeIndex) + { + if (prevAttributeRefs[nodeIndex] != -1) { - while (prevAttributeRefs.Count < nodeIndex) - { - prevAttributeRefs.Add(-1); - } + Attributes[prevAttributeRefs[nodeIndex]].NextAttributeIndex = index; + } - prevAttributeRefs.Add(index); + prevAttributeRefs[nodeIndex] = index; + } + else + { + while (prevAttributeRefs.Count < nodeIndex) + { + prevAttributeRefs.Add(-1); } + prevAttributeRefs.Add(index); + } + #if DEBUG_LSF_SERIALIZATION rawAttributes.Add(attribute); #endif - dataOffset += resolved.Length; - Attributes.Add(resolved); - index++; - } + dataOffset += resolved.Length; + Attributes.Add(resolved); + index++; + } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); - for (int i = 0; i < prevAttributeRefs.Count; i++) - { - Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); - } + Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); + for (int i = 0; i < prevAttributeRefs.Count; i++) + { + Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); + } - Console.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); - for (int i = 0; i < Attributes.Count; i++) - { - var resolved = Attributes[i]; - var attribute = rawAttributes[i]; - - var debug = String.Format( - "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4}, node {5})", - i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, - resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex - ); - Console.WriteLine(debug); - } -#endif + Console.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); + for (int i = 0; i < Attributes.Count; i++) + { + var resolved = Attributes[i]; + var attribute = rawAttributes[i]; + + var debug = String.Format( + "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4}, node {5})", + i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, + resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex + ); + Console.WriteLine(debug); } +#endif } /// @@ -234,38 +225,38 @@ private void ReadAttributesV2(Stream s) /// Stream to read the attribute headers from private void ReadAttributesV3(Stream s) { - using (var reader = new BinaryReader(s)) + using var reader = new BinaryReader(s); + while (s.Position < s.Length) { - while (s.Position < s.Length) - { - var attribute = BinUtils.ReadStruct(reader); + var attribute = BinUtils.ReadStruct(reader); - var resolved = new LSFAttributeInfo(); - resolved.NameIndex = attribute.NameIndex; - resolved.NameOffset = attribute.NameOffset; - resolved.TypeId = attribute.TypeId; - resolved.Length = attribute.Length; - resolved.DataOffset = attribute.Offset; - resolved.NextAttributeIndex = attribute.NextAttributeIndex; + var resolved = new LSFAttributeInfo + { + NameIndex = attribute.NameIndex, + NameOffset = attribute.NameOffset, + TypeId = attribute.TypeId, + Length = attribute.Length, + DataOffset = attribute.Offset, + NextAttributeIndex = attribute.NextAttributeIndex + }; - Attributes.Add(resolved); - } + Attributes.Add(resolved); + } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); - for (int i = 0; i < Attributes.Count; i++) - { - var resolved = Attributes[i]; - - var debug = String.Format( - "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4})", - i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, - resolved.TypeId, resolved.NextAttributeIndex - ); - Console.WriteLine(debug); - } -#endif + Console.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); + for (int i = 0; i < Attributes.Count; i++) + { + var resolved = Attributes[i]; + + var debug = String.Format( + "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4})", + i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, + resolved.TypeId, resolved.NextAttributeIndex + ); + Console.WriteLine(debug); } +#endif } private MemoryStream Decompress(BinaryReader reader, uint sizeOnDisk, uint uncompressedSize, string debugDumpTo, bool allowChunked) @@ -370,60 +361,58 @@ private void ReadHeaders(BinaryReader reader) public Resource Read() { - using (var reader = new BinaryReader(Stream)) + using var reader = new BinaryReader(Stream); + ReadHeaders(reader); + + Names = []; + var namesStream = Decompress(reader, Metadata.StringsSizeOnDisk, Metadata.StringsUncompressedSize, "strings.bin", false); + using (namesStream) { - ReadHeaders(reader); + ReadNames(namesStream); + } - Names = new List>(); - var namesStream = Decompress(reader, Metadata.StringsSizeOnDisk, Metadata.StringsUncompressedSize, "strings.bin", false); - using (namesStream) - { - ReadNames(namesStream); - } + Nodes = []; + var nodesStream = Decompress(reader, Metadata.NodesSizeOnDisk, Metadata.NodesUncompressedSize, "nodes.bin", true); + using (nodesStream) + { + var longNodes = Version >= LSFVersion.VerExtendedNodes + && Metadata.HasSiblingData == 1; + ReadNodes(nodesStream, longNodes); + } - Nodes = new List(); - var nodesStream = Decompress(reader, Metadata.NodesSizeOnDisk, Metadata.NodesUncompressedSize, "nodes.bin", true); - using (nodesStream) + Attributes = []; + var attributesStream = Decompress(reader, Metadata.AttributesSizeOnDisk, Metadata.AttributesUncompressedSize, "attributes.bin", true); + using (attributesStream) + { + var hasSiblingData = Version >= LSFVersion.VerExtendedNodes + && Metadata.HasSiblingData == 1; + if (hasSiblingData) { - var longNodes = Version >= LSFVersion.VerExtendedNodes - && Metadata.HasSiblingData == 1; - ReadNodes(nodesStream, longNodes); + ReadAttributesV3(attributesStream); } - - Attributes = new List(); - var attributesStream = Decompress(reader, Metadata.AttributesSizeOnDisk, Metadata.AttributesUncompressedSize, "attributes.bin", true); - using (attributesStream) + else { - var hasSiblingData = Version >= LSFVersion.VerExtendedNodes - && Metadata.HasSiblingData == 1; - if (hasSiblingData) - { - ReadAttributesV3(attributesStream); - } - else - { - ReadAttributesV2(attributesStream); - } + ReadAttributesV2(attributesStream); } + } - this.Values = Decompress(reader, Metadata.ValuesSizeOnDisk, Metadata.ValuesUncompressedSize, "values.bin", true); + this.Values = Decompress(reader, Metadata.ValuesSizeOnDisk, Metadata.ValuesUncompressedSize, "values.bin", true); - Resource resource = new Resource(); - ReadRegions(resource); + Resource resource = new(); + ReadRegions(resource); - resource.Metadata.MajorVersion = GameVersion.Major; - resource.Metadata.MinorVersion = GameVersion.Minor; - resource.Metadata.Revision = GameVersion.Revision; - resource.Metadata.BuildNumber = GameVersion.Build; + resource.Metadata.MajorVersion = GameVersion.Major; + resource.Metadata.MinorVersion = GameVersion.Minor; + resource.Metadata.Revision = GameVersion.Revision; + resource.Metadata.BuildNumber = GameVersion.Build; - return resource; - } + return resource; } private void ReadRegions(Resource resource) { var attrReader = new BinaryReader(Values); - NodeInstances = new List(); + NodeInstances = []; for (int i = 0; i < Nodes.Count; i++) { var defn = Nodes[i]; @@ -493,8 +482,10 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader re case NodeAttribute.DataType.DT_WString: case NodeAttribute.DataType.DT_LSWString: { - var attr = new NodeAttribute(type); - attr.Value = ReadString(reader, (int)length); + var attr = new NodeAttribute(type) + { + Value = ReadString(reader, (int)length) + }; return attr; } @@ -526,15 +517,19 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader re case NodeAttribute.DataType.DT_TranslatedFSString: { - var attr = new NodeAttribute(type); - attr.Value = ReadTranslatedFSString(reader); + var attr = new NodeAttribute(type) + { + Value = ReadTranslatedFSString(reader) + }; return attr; } case NodeAttribute.DataType.DT_ScratchBuffer: { - var attr = new NodeAttribute(type); - attr.Value = reader.ReadBytes((int)length); + var attr = new NodeAttribute(type) + { + Value = reader.ReadBytes((int)length) + }; return attr; } @@ -600,7 +595,7 @@ private string ReadString(BinaryReader reader, int length) private string ReadString(BinaryReader reader) { - List bytes = new List(); + List bytes = []; while (true) { var b = reader.ReadByte(); diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index 477d3fd6..ae4df84f 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -6,11 +6,11 @@ namespace LSLib.LS { - public class LSFWriter + public class LSFWriter(Stream stream) { - private static int StringHashMapSize = 0x200; + private readonly static int StringHashMapSize = 0x200; - private Stream Stream; + private readonly Stream Stream = stream; private BinaryWriter Writer; private LSMetadata Meta; @@ -32,12 +32,7 @@ public class LSFWriter public LSFVersion Version = LSFVersion.MaxWriteVersion; public bool EncodeSiblingData = false; public CompressionMethod Compression = CompressionMethod.LZ4; - public CompressionLevel CompressionLevel = CompressionLevel.DefaultCompression; - - public LSFWriter(Stream stream) - { - this.Stream = stream; - } + public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; public void Write(Resource resource) { @@ -59,12 +54,12 @@ public void Write(Resource resource) { NextNodeIndex = 0; NextAttributeIndex = 0; - NodeIndices = new Dictionary(); + NodeIndices = []; NextSiblingIndices = null; StringHashMap = new List>(StringHashMapSize); while (StringHashMap.Count < StringHashMapSize) { - StringHashMap.Add(new List()); + StringHashMap.Add([]); } if (EncodeSiblingData) @@ -86,12 +81,14 @@ public void Write(Resource resource) var attributeBuffer = AttributeStream.ToArray(); var valueBuffer = ValueStream.ToArray(); - var magic = new LSFMagic(); - magic.Magic = BitConverter.ToUInt32(LSFMagic.Signature, 0); - magic.Version = (uint)Version; + var magic = new LSFMagic + { + Magic = BitConverter.ToUInt32(LSFMagic.Signature, 0), + Version = (uint)Version + }; BinUtils.WriteStruct(Writer, ref magic); - PackedVersion gameVersion = new PackedVersion + PackedVersion gameVersion = new() { Major = resource.Metadata.MajorVersion, Minor = resource.Metadata.MinorVersion, @@ -101,30 +98,36 @@ public void Write(Resource resource) if (Version < LSFVersion.VerBG3ExtendedHeader) { - var header = new LSFHeader(); - header.EngineVersion = gameVersion.ToVersion32(); + var header = new LSFHeader + { + EngineVersion = gameVersion.ToVersion32() + }; BinUtils.WriteStruct(Writer, ref header); } else { - var header = new LSFHeaderV5(); - header.EngineVersion = gameVersion.ToVersion64(); + var header = new LSFHeaderV5 + { + EngineVersion = gameVersion.ToVersion64() + }; BinUtils.WriteStruct(Writer, ref header); } bool chunked = Version >= LSFVersion.VerChunkedCompress; - byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, CompressionLevel); - byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, CompressionLevel, chunked); - byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, CompressionLevel, chunked); - byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, CompressionLevel, chunked); + byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, LSCompressionLevel); + byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, LSCompressionLevel, chunked); + byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, LSCompressionLevel, chunked); + byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, LSCompressionLevel, chunked); if (Version < LSFVersion.VerBG3AdditionalBlob) { - var meta = new LSFMetadataV5(); - meta.StringsUncompressedSize = (UInt32)stringBuffer.Length; - meta.NodesUncompressedSize = (UInt32)nodeBuffer.Length; - meta.AttributesUncompressedSize = (UInt32)attributeBuffer.Length; - meta.ValuesUncompressedSize = (UInt32)valueBuffer.Length; + var meta = new LSFMetadataV5 + { + StringsUncompressedSize = (UInt32)stringBuffer.Length, + NodesUncompressedSize = (UInt32)nodeBuffer.Length, + AttributesUncompressedSize = (UInt32)attributeBuffer.Length, + ValuesUncompressedSize = (UInt32)valueBuffer.Length + }; if (Compression == CompressionMethod.None) { @@ -141,7 +144,7 @@ public void Write(Resource resource) meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; } - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); + meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); meta.Unknown2 = 0; meta.Unknown3 = 0; meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; @@ -150,11 +153,13 @@ public void Write(Resource resource) } else { - var meta = new LSFMetadataV6(); - meta.StringsUncompressedSize = (UInt32)stringBuffer.Length; - meta.NodesUncompressedSize = (UInt32)nodeBuffer.Length; - meta.AttributesUncompressedSize = (UInt32)attributeBuffer.Length; - meta.ValuesUncompressedSize = (UInt32)valueBuffer.Length; + var meta = new LSFMetadataV6 + { + StringsUncompressedSize = (UInt32)stringBuffer.Length, + NodesUncompressedSize = (UInt32)nodeBuffer.Length, + AttributesUncompressedSize = (UInt32)attributeBuffer.Length, + ValuesUncompressedSize = (UInt32)valueBuffer.Length + }; if (Compression == CompressionMethod.None) { @@ -172,7 +177,7 @@ public void Write(Resource resource) } meta.Unknown = 0; - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); + meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); meta.Unknown2 = 0; meta.Unknown3 = 0; meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; @@ -214,7 +219,7 @@ private int ComputeSiblingIndices(Node node) private void ComputeSiblingIndices(Resource resource) { NextNodeIndex = 0; - NextSiblingIndices = new List(); + NextSiblingIndices = []; int lastRegionIndex = -1; foreach (var region in resource.Regions) @@ -484,14 +489,14 @@ private void WriteStaticStrings(BinaryWriter writer) private void WriteStaticString(BinaryWriter writer, string s) { - byte[] utf = System.Text.Encoding.UTF8.GetBytes(s); + byte[] utf = Encoding.UTF8.GetBytes(s); writer.Write((UInt16)utf.Length); writer.Write(utf); } private void WriteStringWithLength(BinaryWriter writer, string s) { - byte[] utf = System.Text.Encoding.UTF8.GetBytes(s); + byte[] utf = Encoding.UTF8.GetBytes(s); writer.Write((Int32)(utf.Length + 1)); writer.Write(utf); writer.Write((Byte)0); @@ -499,7 +504,7 @@ private void WriteStringWithLength(BinaryWriter writer, string s) private void WriteString(BinaryWriter writer, string s) { - byte[] utf = System.Text.Encoding.UTF8.GetBytes(s); + byte[] utf = Encoding.UTF8.GetBytes(s); writer.Write(utf); writer.Write((Byte)0); } diff --git a/LSLib/LS/Resources/LSJ/LSJReader.cs b/LSLib/LS/Resources/LSJ/LSJReader.cs index 12b6f6cc..1d89c98c 100644 --- a/LSLib/LS/Resources/LSJ/LSJReader.cs +++ b/LSLib/LS/Resources/LSJ/LSJReader.cs @@ -4,16 +4,10 @@ namespace LSLib.LS { - public class LSJReader : IDisposable + public class LSJReader(Stream stream) : IDisposable { - private Stream stream; - private JsonTextReader reader; - public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); - - public LSJReader(Stream stream) - { - this.stream = stream; - } + private readonly Stream stream = stream; + public NodeSerializationSettings SerializationSettings = new(); public void Dispose() { @@ -26,11 +20,9 @@ public Resource Read() settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); var serializer = JsonSerializer.Create(settings); - using (var streamReader = new StreamReader(stream)) - using (this.reader = new JsonTextReader(streamReader)) - { - return serializer.Deserialize(this.reader); - } + using var streamReader = new StreamReader(stream); + using var reader = new JsonTextReader(streamReader); + return serializer.Deserialize(reader); } } } diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index bcd1ebde..d3739b82 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -4,20 +4,13 @@ using System.Text.RegularExpressions; using System.Numerics; using System.Collections.Generic; -using LSLib.LS.Story; -using System.Runtime; namespace LSLib.LS { - public class LSJResourceConverter : JsonConverter + public class LSJResourceConverter(NodeSerializationSettings settings) : JsonConverter { private LSMetadata Metadata; - private NodeSerializationSettings SerializationSettings; - - public LSJResourceConverter(NodeSerializationSettings settings) - { - SerializationSettings = settings; - } + private readonly NodeSerializationSettings SerializationSettings = settings; public override bool CanConvert(Type objectType) { @@ -163,8 +156,7 @@ private NodeAttribute ReadAttribute(JsonReader reader) { if (key == "type") { - uint type; - if (!UInt32.TryParse((string)reader.Value, out type)) + if (!UInt32.TryParse((string)reader.Value, out uint type)) { type = (uint)AttributeTypeMaps.TypeToId[(string)reader.Value]; } @@ -256,10 +248,7 @@ private NodeAttribute ReadAttribute(JsonReader reader) case NodeAttribute.DataType.DT_TranslatedString: { - if (attribute.Value == null) - { - attribute.Value = new TranslatedString(); - } + attribute.Value ??= new TranslatedString(); var ts = (TranslatedString)attribute.Value; ts.Value = reader.Value.ToString(); @@ -269,13 +258,10 @@ private NodeAttribute ReadAttribute(JsonReader reader) case NodeAttribute.DataType.DT_TranslatedFSString: { - if (attribute.Value == null) - { - attribute.Value = new TranslatedFSString(); - } + attribute.Value ??= new TranslatedFSString(); var fsString = (TranslatedFSString)attribute.Value; - fsString.Value = reader.Value != null ? reader.Value.ToString() : null; + fsString.Value = reader.Value?.ToString(); fsString.Handle = handle; fsString.Arguments = fsStringArguments; attribute.Value = fsString; @@ -349,19 +335,13 @@ private NodeAttribute ReadAttribute(JsonReader reader) { if (attribute.Type == NodeAttribute.DataType.DT_TranslatedString) { - if (attribute.Value == null) - { - attribute.Value = new TranslatedString(); - } + attribute.Value ??= new TranslatedString(); ((TranslatedString)attribute.Value).Handle = reader.Value.ToString(); } else if (attribute.Type == NodeAttribute.DataType.DT_TranslatedFSString) { - if (attribute.Value == null) - { - attribute.Value = new TranslatedFSString(); - } + attribute.Value ??= new TranslatedFSString(); ((TranslatedFSString)attribute.Value).Handle = reader.Value.ToString(); } @@ -373,10 +353,7 @@ private NodeAttribute ReadAttribute(JsonReader reader) } else if (key == "version") { - if (attribute.Value == null) - { - attribute.Value = new TranslatedString(); - } + attribute.Value ??= new TranslatedString(); var ts = (TranslatedString)attribute.Value; ts.Version = UInt16.Parse(reader.Value.ToString()); @@ -437,8 +414,10 @@ private Node ReadNode(JsonReader reader, Node node) } else if (reader.TokenType == JsonToken.StartObject) { - var childNode = new Node(); - childNode.Name = key; + var childNode = new Node + { + Name = key + }; ReadNode(reader, childNode); node.AppendChild(childNode); childNode.Parent = node; @@ -460,7 +439,7 @@ private Node ReadNode(JsonReader reader, Node node) private Resource ReadResource(JsonReader reader, Resource resource) { - if (resource == null) resource = new Resource(); + resource ??= new Resource(); if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("save")) { diff --git a/LSLib/LS/Resources/LSJ/LSJWriter.cs b/LSLib/LS/Resources/LSJ/LSJWriter.cs index 575616d6..64ca368f 100644 --- a/LSLib/LS/Resources/LSJ/LSJWriter.cs +++ b/LSLib/LS/Resources/LSJ/LSJWriter.cs @@ -3,32 +3,26 @@ namespace LSLib.LS { - public class LSJWriter + public class LSJWriter(Stream stream) { - private Stream stream; - private JsonTextWriter writer; + private readonly Stream stream = stream; public bool PrettyPrint = false; - public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); - - public LSJWriter(Stream stream) - { - this.stream = stream; - } + public NodeSerializationSettings SerializationSettings = new(); public void Write(Resource rsrc) { - var settings = new JsonSerializerSettings(); - settings.Formatting = Newtonsoft.Json.Formatting.Indented; + var settings = new JsonSerializerSettings + { + Formatting = Formatting.Indented + }; settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); var serializer = JsonSerializer.Create(settings); - using (var streamWriter = new StreamWriter(stream)) - using (this.writer = new JsonTextWriter(streamWriter)) - { - writer.IndentChar = '\t'; - writer.Indentation = 1; - serializer.Serialize(writer, rsrc); - } + using var streamWriter = new StreamWriter(stream); + using var writer = new JsonTextWriter(streamWriter); + writer.IndentChar = '\t'; + writer.Indentation = 1; + serializer.Serialize(writer, rsrc); } } } diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index 22f9e5f9..c8d495f0 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -8,21 +8,16 @@ namespace LSLib.LS { - public class LSXReader : IDisposable + public class LSXReader(Stream stream) : IDisposable { - private Stream stream; + private Stream stream = stream; private XmlReader reader; private Resource resource; private Region currentRegion; private List stack; private int lastLine, lastColumn; private LSXVersion Version = LSXVersion.V3; - public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); - - public LSXReader(Stream stream) - { - this.stream = stream; - } + public NodeSerializationSettings SerializationSettings = new(); public void Dispose() { @@ -55,9 +50,11 @@ private void ReadTranslatedFSString(TranslatedFSString fs) throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); } - var arg = new TranslatedFSStringArgument(); - arg.Key = reader["key"]; - arg.Value = reader["value"]; + var arg = new TranslatedFSStringArgument + { + Key = reader["key"], + Value = reader["value"] + }; while (reader.Read() && reader.NodeType != XmlNodeType.Element); if (reader.Name != "string") @@ -88,7 +85,7 @@ private void ReadElement() { case "save": // Root element - if (stack.Count() > 0) + if (stack.Count > 0) throw new InvalidFormatException("Node was unexpected."); break; @@ -125,7 +122,7 @@ private void ReadElement() throw new InvalidFormatException("A must be located inside a region."); Node node; - if (stack.Count() == 0) + if (stack.Count == 0) { // The node is the root node of the region node = currentRegion; @@ -133,14 +130,15 @@ private void ReadElement() else { // New node under the current parent - node = new Node(); - node.Parent = stack.Last(); + node = new Node + { + Parent = stack.Last() + }; } node.Name = reader["id"]; Debug.Assert(node.Name != null); - if (node.Parent != null) - node.Parent.AppendChild(node); + node.Parent?.AppendChild(node); if (!reader.IsEmptyElement) stack.Add(node); @@ -168,10 +166,7 @@ private void ReadElement() if (attr.Type == NodeAttribute.DataType.DT_TranslatedString) { - if (attr.Value == null) - { - attr.Value = new TranslatedString(); - } + attr.Value ??= new TranslatedString(); var ts = ((TranslatedString)attr.Value); ts.Handle = reader["handle"]; @@ -258,7 +253,7 @@ public Resource Read() { resource = new Resource(); currentRegion = null; - stack = new List(); + stack = []; lastLine = lastColumn = 0; var resultResource = resource; diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index e765ec52..49488ae4 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -5,19 +5,14 @@ namespace LSLib.LS { - public class LSXWriter + public class LSXWriter(Stream stream) { - private Stream stream; + private readonly Stream stream = stream; private XmlWriter writer; public bool PrettyPrint = false; public LSXVersion Version = LSXVersion.V3; - public NodeSerializationSettings SerializationSettings = new NodeSerializationSettings(); - - public LSXWriter(Stream stream) - { - this.stream = stream; - } + public NodeSerializationSettings SerializationSettings = new(); public void Write(Resource rsrc) { @@ -26,9 +21,11 @@ public void Write(Resource rsrc) throw new InvalidDataException("Cannot resave a BG3 (v4.x) resource in D:OS2 (v3.x) file format, maybe you have the wrong game selected?"); } - var settings = new XmlWriterSettings(); - settings.Indent = PrettyPrint; - settings.IndentChars = "\t"; + var settings = new XmlWriterSettings + { + Indent = PrettyPrint, + IndentChars = "\t" + }; using (this.writer = XmlWriter.Create(stream, settings)) { diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index 58985ce5..938a8c43 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -10,8 +10,8 @@ namespace LSLib.LS.Save { public class SavegameHelpers : IDisposable { - private PackageReader Reader; - private Package Package; + private readonly PackageReader Reader; + private readonly Package Package; public SavegameHelpers(string path) { @@ -36,10 +36,8 @@ public Resource LoadGlobals() Stream rsrcStream = globalsInfo.MakeStream(); try { - using (var rsrcReader = new LSFReader(rsrcStream)) - { - resource = rsrcReader.Read(); - } + using var rsrcReader = new LSFReader(rsrcStream); + resource = rsrcReader.Read(); } finally { @@ -96,9 +94,11 @@ public MemoryStream ResaveStoryToGlobals(Story.Story story, ResourceConversionPa // Save globals.lsf var rewrittenStream = new MemoryStream(); - var rsrcWriter = new LSFWriter(rewrittenStream); - rsrcWriter.Version = conversionParams.LSF; - rsrcWriter.EncodeSiblingData = false; + var rsrcWriter = new LSFWriter(rewrittenStream) + { + Version = conversionParams.LSF, + EncodeSiblingData = false + }; rsrcWriter.Write(globals); rewrittenStream.Seek(0, SeekOrigin.Begin); return rewrittenStream; @@ -141,7 +141,7 @@ public void ResaveStory(Story.Story story, Game game, string path) { packageWriter.Version = conversionParams.PAKVersion; packageWriter.Compression = CompressionMethod.Zlib; - packageWriter.CompressionLevel = CompressionLevel.DefaultCompression; + packageWriter.LSCompressionLevel = LSCompressionLevel.DefaultCompression; packageWriter.Write(); } } diff --git a/LSLib/LS/Save/VariableManager.cs b/LSLib/LS/Save/VariableManager.cs index 4e91e49a..bbc3476b 100644 --- a/LSLib/LS/Save/VariableManager.cs +++ b/LSLib/LS/Save/VariableManager.cs @@ -1,19 +1,17 @@ -using OpenTK; +using OpenTK.Mathematics; using System; using System.Collections.Generic; using System.IO; -using System.Linq; using System.Runtime.InteropServices; using System.Text; -using System.Threading.Tasks; namespace LSLib.LS.Save { public class OsirisVariableHelper { private Int32 NumericStringId; - private Dictionary IdentifierToKey = new Dictionary(); - private Dictionary KeyToIdentifier = new Dictionary(); + private Dictionary IdentifierToKey = []; + private Dictionary KeyToIdentifier = []; public void Load(Node helper) { @@ -41,14 +39,14 @@ public string GetName(Int32 variableIndex) abstract public class VariableHolder { - protected List Values = new List(); - private List Remaps = new List(); + protected List Values = []; + private List Remaps = []; public TValue GetRaw(int index) { if (index == 0) { - return default(TValue); + return default; } var valueSlot = Remaps[index - 1]; @@ -64,13 +62,11 @@ public void Load(Node variableList) Remaps.Clear(); Remaps.Capacity = remaps.Length / 2; - using (var ms = new MemoryStream(remaps)) - using (var reader = new BinaryReader(ms)) + using var ms = new MemoryStream(remaps); + using var reader = new BinaryReader(ms); + for (var i = 0; i < remaps.Length / 2; i++) { - for (var i = 0; i < remaps.Length/2; i++) - { - Remaps.Add(reader.ReadUInt16()); - } + Remaps.Add(reader.ReadUInt16()); } } @@ -100,13 +96,11 @@ override protected void LoadVariables(Node variableList) Values.Clear(); Values.Capacity = numVars; - using (var ms = new MemoryStream(variables)) - using (var reader = new BinaryReader(ms)) + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) { - for (var i = 0; i < numVars; i++) - { - Values.Add(reader.ReadInt32()); - } + Values.Add(reader.ReadInt32()); } } } @@ -134,13 +128,11 @@ override protected void LoadVariables(Node variableList) Values.Clear(); Values.Capacity = numVars; - using (var ms = new MemoryStream(variables)) - using (var reader = new BinaryReader(ms)) + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) { - for (var i = 0; i < numVars; i++) - { - Values.Add(reader.ReadInt64()); - } + Values.Add(reader.ReadInt64()); } } } @@ -169,13 +161,11 @@ override protected void LoadVariables(Node variableList) Values.Clear(); Values.Capacity = numVars; - using (var ms = new MemoryStream(variables)) - using (var reader = new BinaryReader(ms)) + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) { - for (var i = 0; i < numVars; i++) - { - Values.Add(reader.ReadSingle()); - } + Values.Add(reader.ReadSingle()); } } } @@ -199,21 +189,19 @@ override protected void LoadVariables(Node variableList) { var variables = (byte[])variableList.Attributes["Variables"].Value; - using (var ms = new MemoryStream(variables)) - using (var reader = new BinaryReader(ms)) - { - var numVars = reader.ReadInt32(); + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + var numVars = reader.ReadInt32(); - Values.Clear(); - Values.Capacity = numVars; + Values.Clear(); + Values.Capacity = numVars; - for (var i = 0; i < numVars; i++) - { - var length = reader.ReadUInt16(); - var bytes = reader.ReadBytes(length); - var str = Encoding.UTF8.GetString(bytes); - Values.Add(str); - } + for (var i = 0; i < numVars; i++) + { + var length = reader.ReadUInt16(); + var bytes = reader.ReadBytes(length); + var str = Encoding.UTF8.GetString(bytes); + Values.Add(str); } } } @@ -242,19 +230,17 @@ override protected void LoadVariables(Node variableList) Values.Clear(); Values.Capacity = numVars; - using (var ms = new MemoryStream(variables)) - using (var reader = new BinaryReader(ms)) + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) { - for (var i = 0; i < numVars; i++) + Vector3 vec = new() { - Vector3 vec = new Vector3 - { - X = reader.ReadSingle(), - Y = reader.ReadSingle(), - Z = reader.ReadSingle() - }; - Values.Add(vec); - } + X = reader.ReadSingle(), + Y = reader.ReadSingle(), + Z = reader.ReadSingle() + }; + Values.Add(vec); } } } @@ -305,28 +291,22 @@ public VariableType ValueType } }; - public class VariableManager + public class VariableManager(OsirisVariableHelper variableHelper) { - private readonly OsirisVariableHelper VariableHelper; - private readonly Dictionary Keys = new Dictionary(); - private readonly IntVariableHolder IntList = new IntVariableHolder(); - private readonly Int64VariableHolder Int64List = new Int64VariableHolder(); - private readonly FloatVariableHolder FloatList = new FloatVariableHolder(); - private readonly StringVariableHolder StringList = new StringVariableHolder(); - private readonly StringVariableHolder FixedStringList = new StringVariableHolder(); - private readonly Float3VariableHolder Float3List = new Float3VariableHolder(); - - public VariableManager(OsirisVariableHelper variableHelper) - { - VariableHelper = variableHelper; - } + private readonly Dictionary Keys = []; + private readonly IntVariableHolder IntList = new(); + private readonly Int64VariableHolder Int64List = new(); + private readonly FloatVariableHolder FloatList = new(); + private readonly StringVariableHolder StringList = new(); + private readonly StringVariableHolder FixedStringList = new(); + private readonly Float3VariableHolder Float3List = new(); public Dictionary GetAll(bool includeDeleted = false) { var variables = new Dictionary(); foreach (var key in Keys.Values) { - var name = VariableHelper.GetName((int)key.NameIndex); + var name = variableHelper.GetName((int)key.NameIndex); var value = includeDeleted ? GetRaw(key.ValueType, key.ValueIndex) : Get(key.ValueType, key.ValueIndex); if (value != null) { @@ -339,59 +319,57 @@ public Dictionary GetAll(bool includeDeleted = false) public object Get(string name) { - var index = VariableHelper.GetKey(name); + var index = variableHelper.GetKey(name); var key = Keys[index]; return Get(key.ValueType, key.ValueIndex); } private object Get(VariableType type, int index) { - switch (type) + return type switch { - case VariableType.Int: return IntList.Get(index); - case VariableType.Int64: return Int64List.Get(index); - case VariableType.Float: return FloatList.Get(index); - case VariableType.String: return StringList.Get(index); - case VariableType.FixedString: return FixedStringList.Get(index); - case VariableType.Float3: return Float3List.Get(index); - default: throw new ArgumentException("Unsupported variable type"); - } + VariableType.Int => IntList.Get(index), + VariableType.Int64 => Int64List.Get(index), + VariableType.Float => FloatList.Get(index), + VariableType.String => StringList.Get(index), + VariableType.FixedString => FixedStringList.Get(index), + VariableType.Float3 => Float3List.Get(index), + _ => throw new ArgumentException("Unsupported variable type"), + }; } public object GetRaw(string name) { - var index = VariableHelper.GetKey(name); + var index = variableHelper.GetKey(name); var key = Keys[index]; return GetRaw(key.ValueType, key.ValueIndex); } private object GetRaw(VariableType type, int index) { - switch (type) + return type switch { - case VariableType.Int: return IntList.GetRaw(index); - case VariableType.Int64: return Int64List.GetRaw(index); - case VariableType.Float: return FloatList.GetRaw(index); - case VariableType.String: return StringList.GetRaw(index); - case VariableType.FixedString: return FixedStringList.GetRaw(index); - case VariableType.Float3: return Float3List.GetRaw(index); - default: throw new ArgumentException("Unsupported variable type"); - } + VariableType.Int => IntList.GetRaw(index), + VariableType.Int64 => Int64List.GetRaw(index), + VariableType.Float => FloatList.GetRaw(index), + VariableType.String => StringList.GetRaw(index), + VariableType.FixedString => FixedStringList.GetRaw(index), + VariableType.Float3 => Float3List.GetRaw(index), + _ => throw new ArgumentException("Unsupported variable type"), + }; } private void LoadKeys(byte[] handleList) { Keys.Clear(); - using (var ms = new MemoryStream(handleList)) - using (var reader = new BinaryReader(ms)) + using var ms = new MemoryStream(handleList); + using var reader = new BinaryReader(ms); + var numHandles = reader.ReadInt32(); + for (var i = 0; i < numHandles; i++) { - var numHandles = reader.ReadInt32(); - for (var i = 0; i < numHandles; i++) - { - var entry = BinUtils.ReadStruct(reader); - Keys.Add((int)entry.NameIndex, entry); - } + var entry = BinUtils.ReadStruct(reader); + Keys.Add((int)entry.NameIndex, entry); } } diff --git a/LSLib/LS/Stats/ShiftReduceParser.cs b/LSLib/LS/Stats/ShiftReduceParser.cs new file mode 100644 index 00000000..f6aa09cf --- /dev/null +++ b/LSLib/LS/Stats/ShiftReduceParser.cs @@ -0,0 +1,923 @@ +// Gardens Point Parser Generator +// Copyright (c) Wayne Kelly, QUT 2005-2014 +// (see accompanying GPPGcopyright.rtf) + +using System; +using System.Text; +using System.Globalization; +using System.Collections.Generic; +using System.Runtime.Serialization; +using System.Diagnostics.CodeAnalysis; + +namespace QUT.Gppg +{ + /// + /// Abstract class for GPPG shift-reduce parsers. + /// Parsers generated by GPPG derive from this base + /// class, overriding the abstract Initialize() and + /// DoAction() methods. + /// + /// Semantic value type + /// Location type +#if EXPORT_GPPG + public abstract class ShiftReduceParser +#else + internal abstract class ShiftReduceParser +#endif + where TSpan : IMerge, new() + { + private AbstractScanner scanner; + /// + /// The abstract scanner for this parser. + /// + protected AbstractScanner Scanner + { + get { return scanner; } + set { scanner = value; } + } + + /// + /// Constructor for base class + /// + /// Scanner instance for this parser + protected ShiftReduceParser(AbstractScanner scanner) + { + this.scanner = scanner; + } + + // ============================================================== + // TECHNICAL EXPLANATION. + // Why the next two fields are not exposed via properties. + // ============================================================== + // These fields are of the generic parameter types, and are + // frequently instantiated as struct types in derived classes. + // Semantic actions are defined in the derived classes and refer + // to instance fields of these structs. In such cases the code + // "get_CurrentSemanticValue().myField = blah;" will fail since + // the getter pushes the value of the field, not the reference. + // So, in the presence of properties, gppg would need to encode + // such field accesses as ... + // "tmp = get_CurrentSemanticValue(); // Fetch value + // tmp.myField = blah; // update + // set_CurrentSemanticValue(tmp); " // Write update back. + // There is no issue if TValue is restricted to be a ref type. + // The same explanation applies to scanner.yylval. + // ============================================================== + /// + /// The current value of the "$$" symbolic variable in the parser + /// + [SuppressMessage("Microsoft.Design", "CA1051:DoNotDeclareVisibleInstanceFields")] + protected TValue CurrentSemanticValue; + + /// + /// The current value of the "@$" symbolic variable in the parser + /// + [SuppressMessage("Microsoft.Design", "CA1051:DoNotDeclareVisibleInstanceFields")] + protected TSpan CurrentLocationSpan; + protected int NextToken; + + private TSpan LastSpan; + private State FsaState; + private bool recovering; + private int tokensSinceLastError; + + private PushdownPrefixState StateStack = new PushdownPrefixState(); + private PushdownPrefixState valueStack = new PushdownPrefixState(); + private PushdownPrefixState locationStack = new PushdownPrefixState(); + + /// + /// The stack of semantic value (YYSTYPE) values. + /// + protected PushdownPrefixState ValueStack { get { return valueStack; } } + + /// + /// The stack of location value (YYLTYPE) varlues. + /// + protected PushdownPrefixState LocationStack { get { return locationStack; } } + + private int errorToken; + private int endOfFileToken; + private string[] nonTerminals; + private State[] states; + private Rule[] rules; + + /// + /// Initialization method to allow derived classes + /// to insert the rule list into this base class. + /// + /// The array of Rule objects + protected void InitRules(Rule[] rules) { this.rules = rules; } + + /// + /// Initialization method to allow derived classes to + /// insert the states table into this base class. + /// + /// The pre-initialized states table + protected void InitStates(State[] states) { this.states = states; } + + /// + /// OBSOLETE FOR VERSION 1.4.0 + /// + /// + protected void InitStateTable(int size) { states = new State[size]; } + + /// + /// Initialization method to allow derived classes + /// to insert the special value for the error and EOF tokens. + /// + /// The error state ordinal + /// The EOF stat ordinal + protected void InitSpecialTokens(int err, int end) + { + errorToken = err; + endOfFileToken = end; + } + + /// + /// Initialization method to allow derived classes to + /// insert the non-terminal symbol names into this base class. + /// + /// Non-terminal symbol names + protected void InitNonTerminals(string[] names) { nonTerminals = names; } + + #region YYAbort, YYAccept etcetera. + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1064:ExceptionsShouldBePublic")] + // Reason for FxCop message suppression - + // This exception cannot escape from the local context + private class AcceptException : Exception + { + internal AcceptException() { } + } + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1064:ExceptionsShouldBePublic")] + // Reason for FxCop message suppression - + // This exception cannot escape from the local context + private class AbortException : Exception + { + internal AbortException() { } + } + [Serializable] + [SuppressMessage("Microsoft.Design", "CA1064:ExceptionsShouldBePublic")] + // Reason for FxCop message suppression - + // This exception cannot escape from the local context + private class ErrorException : Exception + { + internal ErrorException() { } + } + + // The following methods are only called from within + // a semantic action. The thrown exceptions can never + // propagate outside the ShiftReduceParser class in + // which they are nested. + + /// + /// Force parser to terminate, returning "true" + /// + protected static void YYAccept() { throw new AcceptException(); } + + /// + /// Force parser to terminate, returning "false" + /// + protected static void YYAbort() { throw new AbortException(); } + + /// + /// Force parser to terminate, returning + /// "false" if error recovery fails. + /// + protected static void YYError() { throw new ErrorException(); } + + /// + /// Check if parser in error recovery state. + /// + protected bool YYRecovering { get { return recovering; } } + #endregion + + /// + /// Abstract base method. ShiftReduceParser calls this + /// to initialize the base class data structures. Concrete + /// parser classes must override this method. + /// + protected abstract void Initialize(); + + /// + /// Main entry point of the Shift-Reduce Parser. + /// + /// True if parse succeeds, else false for + /// unrecoverable errors + public bool Parse() + { + Initialize(); // allow derived classes to instantiate rules, states and nonTerminals + + NextToken = 0; + FsaState = states[0]; + + StateStack.Push(FsaState); + valueStack.Push(CurrentSemanticValue); + LocationStack.Push(CurrentLocationSpan); + + while (true) + { +#if TRACE_ACTIONS + Console.Error.WriteLine("Entering state {0} ", FsaState.number); + DisplayStack(); +#endif + int action = FsaState.defaultAction; + + if (FsaState.ParserTable != null) + { + if (NextToken == 0) + { + // We save the last token span, so that the location span + // of production right hand sides that begin or end with a + // nullable production will be correct. + LastSpan = scanner.yylloc; + NextToken = scanner.yylex(); +#if TRACE_ACTIONS + Console.Error.WriteLine( "Reading: Next token is {0}", TerminalToString( NextToken ) ); +#endif + } +#if TRACE_ACTIONS + else + Console.Error.WriteLine( "Next token is still {0}", TerminalToString( NextToken ) ); +#endif + if (FsaState.ParserTable.ContainsKey(NextToken)) + action = FsaState.ParserTable[NextToken]; + } + + if (action > 0) // shift + { + Shift(action); + } + else if (action < 0) // reduce + { + try + { + Reduce(-action); + if (action == -1) // accept + return true; + } + catch (Exception x) + { + if (x is AbortException) + return false; + else if (x is AcceptException) + return true; + else if (x is ErrorException && !ErrorRecovery()) + return false; + else + throw; // Rethrow x, preserving information. + + } + } + else if (action == 0) // error + if (!ErrorRecovery()) + return false; + } + } + + private void Shift(int stateIndex) + { +#if TRACE_ACTIONS + Console.Error.Write("Shifting token {0}, ", TerminalToString(NextToken)); +#endif + FsaState = states[stateIndex]; + + valueStack.Push(scanner.yylval); + StateStack.Push(FsaState); + LocationStack.Push(scanner.yylloc); + + if (recovering) + { + if (NextToken != errorToken) + tokensSinceLastError++; + + if (tokensSinceLastError > 5) + recovering = false; + } + + if (NextToken != endOfFileToken) + NextToken = 0; + } + + private void Reduce(int ruleNumber) + { +#if TRACE_ACTIONS + DisplayRule(ruleNumber); +#endif + Rule rule = rules[ruleNumber]; + int rhLen = rule.RightHandSide.Length; + // + // Default actions for unit productions. + // + if (rhLen == 1) + { + CurrentSemanticValue = valueStack.TopElement(); // Default action: $$ = $1; + CurrentLocationSpan = LocationStack.TopElement(); // Default action "@$ = @1; + } + else if (rhLen == 0) + { + // Create a new blank value. + // Explicit semantic action may mutate this value + CurrentSemanticValue = default(TValue); + // The location span for an empty production will start with the + // beginning of the next lexeme, and end with the finish of the + // previous lexeme. This gives the correct behaviour when this + // nonsense value is used in later Merge operations. + CurrentLocationSpan = (scanner.yylloc != null && LastSpan != null ? + scanner.yylloc.Merge(LastSpan) : + default(TSpan)); + } + else + { + // Default action: $$ = $1; + CurrentSemanticValue = valueStack[LocationStack.Depth - rhLen]; + // Default action "@$ = @1.Merge(@N)" for location info. + TSpan at1 = LocationStack[LocationStack.Depth - rhLen]; + TSpan atN = LocationStack[LocationStack.Depth - 1]; + CurrentLocationSpan = + ((at1 != null && atN != null) ? at1.Merge(atN) : default(TSpan)); + } + + DoAction(ruleNumber); + + for (int i = 0; i < rule.RightHandSide.Length; i++) + { + StateStack.Pop(); + valueStack.Pop(); + LocationStack.Pop(); + } + FsaState = StateStack.TopElement(); + + if (FsaState.Goto.ContainsKey(rule.LeftHandSide)) + FsaState = states[FsaState.Goto[rule.LeftHandSide]]; + + StateStack.Push(FsaState); + valueStack.Push(CurrentSemanticValue); + LocationStack.Push(CurrentLocationSpan); + } + + /// + /// Execute the selected action from array. + /// Must be overriden in derived classes. + /// + /// Index of the action to perform + protected abstract void DoAction(int actionNumber); + + private bool ErrorRecovery() + { + bool discard; + + if (!recovering) // if not recovering from previous error + ReportError(); + + if (!FindErrorRecoveryState()) + return false; + // + // The interim fix for the "looping in error recovery" + // artifact involved moving the setting of the recovering + // bool until after invalid tokens have been discarded. + // + ShiftErrorToken(); + discard = DiscardInvalidTokens(); + recovering = true; + tokensSinceLastError = 0; + return discard; + } + + private void ReportError() + { + StringBuilder errorMsg = new StringBuilder(); + errorMsg.AppendFormat("Syntax error, unexpected {0}", TerminalToString(NextToken)); + + if (FsaState.ParserTable.Count < 7) + { + bool first = true; + foreach (int terminal in FsaState.ParserTable.Keys) + { + if (first) + errorMsg.Append(", expecting "); + else + errorMsg.Append(", or "); + + errorMsg.Append(TerminalToString(terminal)); + first = false; + } + } + scanner.yyerror(errorMsg.ToString()); + } + + private void ShiftErrorToken() + { + int old_next = NextToken; + NextToken = errorToken; + + Shift(FsaState.ParserTable[NextToken]); + +#if TRACE_ACTIONS + Console.Error.WriteLine("Entering state {0} ", FsaState.number); +#endif + NextToken = old_next; + } + + private bool FindErrorRecoveryState() + { + while (true) // pop states until one found that accepts error token + { + if (FsaState.ParserTable != null && + FsaState.ParserTable.ContainsKey(errorToken) && + FsaState.ParserTable[errorToken] > 0) // shift + return true; + +#if TRACE_ACTIONS + Console.Error.WriteLine("Error: popping state {0}", StateStack.TopElement().number); +#endif + StateStack.Pop(); + valueStack.Pop(); + LocationStack.Pop(); + +#if TRACE_ACTIONS + DisplayStack(); +#endif + if (StateStack.IsEmpty()) + { +#if TRACE_ACTIONS + Console.Error.WriteLine("Aborting: didn't find a state that accepts error token"); +#endif + return false; + } + else + FsaState = StateStack.TopElement(); + } + } + + private bool DiscardInvalidTokens() + { + + int action = FsaState.defaultAction; + + if (FsaState.ParserTable != null) + { + // Discard tokens until find one that works ... + while (true) + { + if (NextToken == 0) + { +#if TRACE_ACTIONS + Console.Error.Write("Reading a token: "); +#endif + NextToken = scanner.yylex(); + } +#if TRACE_ACTIONS + Console.Error.WriteLine("Next token is {0}", TerminalToString(NextToken)); +#endif + if (NextToken == endOfFileToken) + return false; + + if (FsaState.ParserTable.ContainsKey(NextToken)) + action = FsaState.ParserTable[NextToken]; + + if (action != 0) + return true; + else + { +#if TRACE_ACTIONS + Console.Error.WriteLine("Error: Discarding {0}", TerminalToString(NextToken)); +#endif + NextToken = 0; + } + } + } + else if (recovering && tokensSinceLastError == 0) + { + // + // Boolean recovering is not set until after the first + // error token has been shifted. Thus if we get back + // here with recovering set and no tokens read we are + // looping on the same error recovery action. This + // happens if current_state.ParserTable is null because + // the state has an LR(0) reduction, but not all + // lookahead tokens are valid. This only occurs for + // error productions that *end* on "error". + // + // This action discards tokens one at a time until + // the looping stops. Another attack would be to always + // use the LALR(1) table if a production ends on "error" + // +#if TRACE_ACTIONS + Console.Error.WriteLine("Error: panic discard of {0}", TerminalToString(NextToken)); +#endif + if (NextToken == endOfFileToken) + return false; + NextToken = 0; + return true; + } + else + return true; + } + + /// + /// Traditional YACC method. Discards the next input token. + /// + [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "yyclearin")] + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "yyclearin")] + // Reason for FxCop message suppression - + // This is a traditional name for YACC-like functionality + protected void yyclearin() { NextToken = 0; } + + /// + /// Tradional YACC method. Clear the "recovering" flag. + /// + [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "yyerrok")] + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "yyerrok")] + // Reason for FxCop message suppression - + // This is a traditional name for YACC-like functionality + protected void yyerrok() { recovering = false; } + + /// + /// OBSOLETE FOR VERSION 1.4.0 + /// Method used by derived types to insert new + /// state instances in the "states" array. + /// + /// index of the state + /// data for the state + protected void AddState(int stateNumber, State state) + { + states[stateNumber] = state; + state.number = stateNumber; + } + + private void DisplayStack() + { + Console.Error.Write("State stack is now:"); + for (int i = 0; i < StateStack.Depth; i++) + Console.Error.Write(" {0}", StateStack[i].number); + Console.Error.WriteLine(); + } + + private void DisplayRule(int ruleNumber) + { + Console.Error.Write("Reducing stack by rule {0}, ", ruleNumber); + DisplayProduction(rules[ruleNumber]); + } + + private void DisplayProduction(Rule rule) + { + if (rule.RightHandSide.Length == 0) + Console.Error.Write("/* empty */ "); + else + foreach (int symbol in rule.RightHandSide) + Console.Error.Write("{0} ", SymbolToString(symbol)); + + Console.Error.WriteLine("-> {0}", SymbolToString(rule.LeftHandSide)); + } + + /// + /// Abstract state class naming terminal symbols. + /// This is overridden by derived classes with the + /// name (or alias) to be used in error messages. + /// + /// The terminal ordinal + /// + protected abstract string TerminalToString(int terminal); + + private string SymbolToString(int symbol) + { + if (symbol < 0) + return nonTerminals[-symbol - 1]; + else + return TerminalToString(symbol); + } + + /// + /// Return text representation of argument character + /// + /// The character to convert + /// String representation of the character + protected static string CharToString(char input) + { + switch (input) + { + case '\a': return @"'\a'"; + case '\b': return @"'\b'"; + case '\f': return @"'\f'"; + case '\n': return @"'\n'"; + case '\r': return @"'\r'"; + case '\t': return @"'\t'"; + case '\v': return @"'\v'"; + case '\0': return @"'\0'"; + default: return string.Format(CultureInfo.InvariantCulture, "'{0}'", input); + } + } + } + + /// + /// Classes implementing this interface must supply a + /// method that merges two location objects to return + /// a new object of the same type. + /// GPPG-generated parsers have the default location + /// action equivalent to "@$ = @1.Merge(@N);" where N + /// is the right-hand-side length of the production. + /// + /// The Location type +#if EXPORT_GPPG + public interface IMerge +#else + internal interface IMerge +#endif + { + /// + /// Interface method that creates a location object from + /// the current and last object. Typically used to create + /// a location object extending from the start of the @1 + /// object to the end of the @N object. + /// + /// The lexically last object to merge + /// The merged location object + TSpan Merge(TSpan last); + } + + /// + /// This is the default class that carries location + /// information from the scanner to the parser. + /// If you don't declare "%YYLTYPE Foo" the parser + /// will expect to deal with this type. + /// +#if EXPORT_GPPG + public class LexLocation : IMerge +#else + [SuppressMessage( "Microsoft.Performance", "CA1812:AvoidUninstantiatedInternalClasses" )] + internal class LexLocation : IMerge +#endif + { + private int startLine; // start line + private int startColumn; // start column + private int endLine; // end line + private int endColumn; // end column + + /// + /// The line at which the text span starts. + /// + public int StartLine { get { return startLine; } } + + /// + /// The column at which the text span starts. + /// + public int StartColumn { get { return startColumn; } } + + /// + /// The line on which the text span ends. + /// + public int EndLine { get { return endLine; } } + + /// + /// The column of the first character + /// beyond the end of the text span. + /// + public int EndColumn { get { return endColumn; } } + + /// + /// Default no-arg constructor. + /// + public LexLocation() { } + + /// + /// Constructor for text-span with given start and end. + /// + /// start line + /// start column + /// end line + /// end column + public LexLocation(int sl, int sc, int el, int ec) { startLine = sl; startColumn = sc; endLine = el; endColumn = ec; } + + /// + /// Create a text location which spans from the + /// start of "this" to the end of the argument "last" + /// + /// The last location in the result span + /// The merged span + public LexLocation Merge(LexLocation last) { return new LexLocation(this.startLine, this.startColumn, last.endLine, last.endColumn); } + } + + /// + /// Abstract scanner class that GPPG expects its scanners to + /// extend. + /// + /// Semantic value type YYSTYPE + /// Source location type YYLTYPE +#if EXPORT_GPPG + public abstract class AbstractScanner +#else + internal abstract class AbstractScanner +#endif + where TSpan : IMerge + { + /// + /// Lexical value optionally set by the scanner. The value + /// is of the %YYSTYPE type declared in the parser spec. + /// + [SuppressMessage("Microsoft.Design", "CA1051:DoNotDeclareVisibleInstanceFields")] + [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "yylval")] + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "yylval")] + // Reason for FxCop message suppression - + // This is a traditional name for YACC-like functionality + // A field must be declared for this value of parametric type, + // since it may be instantiated by a value struct. If it were + // implemented as a property, machine generated code in derived + // types would not be able to select on the returned value. +#pragma warning disable 649 + public TValue yylval; // Lexical value: set by scanner +#pragma warning restore 649 + + /// + /// Current scanner location property. The value is of the + /// type declared by %YYLTYPE in the parser specification. + /// + [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "yylloc")] + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "yylloc")] + // Reason for FxCop message suppression - + // This is a traditional name for YACC-like functionality + public virtual TSpan yylloc + { + get { return default(TSpan); } // Empty implementation allowing + set { /* skip */ } // yylloc to be ignored entirely. + } + + /// + /// Main call point for LEX-like scanners. Returns an int + /// corresponding to the token recognized by the scanner. + /// + /// An int corresponding to the token + [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "yylex")] + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "yylex")] + // Reason for FxCop message suppression - + // This is a traditional name for YACC-like functionality + public abstract int yylex(); + + /// + /// Traditional error reporting provided by LEX-like scanners + /// to their YACC-like clients. + /// + /// Message format string + /// Optional array of args + [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "yyerror")] + [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "yyerror")] + // Reason for FxCop message suppression - + // This is a traditional name for YACC-like functionality + public virtual void yyerror(string format, params object[] args) { } + } + + /// + /// Encapsulated state for the parser. + /// Opaque to users, visible to the tool-generated code. + /// +#if EXPORT_GPPG + public class State + { + /// + /// The number of states in the automaton. + /// + public int number; +#else + internal class State { + /// + /// The index of this state in the states array. + /// + internal int number; +#endif + internal Dictionary ParserTable; // Terminal -> ParseAction + internal Dictionary Goto; // NonTerminal -> State; + internal int defaultAction; // = 0; // ParseAction + + /// + /// State transition data for this state. Pairs of elements of the + /// goto array associate symbol ordinals with next state indices. + /// The actions array is passed to another constructor. + /// + /// The action listc + /// Next state data + public State(int[] actions, int[] goToList) + : this(actions) + { + Goto = new Dictionary(); + for (int i = 0; i < goToList.Length; i += 2) + Goto.Add(goToList[i], goToList[i + 1]); + } + + /// + /// Action data for this state. Pairs of elements of the + /// action array associate action ordinals with each of + /// those symbols that have actions in the current state. + /// + /// The action array + public State(int[] actions) + { + ParserTable = new Dictionary(); + for (int i = 0; i < actions.Length; i += 2) + ParserTable.Add(actions[i], actions[i + 1]); + } + + /// + /// Set the default action for this state. + /// + /// Ordinal of the default action + public State(int defaultAction) + { + this.defaultAction = defaultAction; + } + + /// + /// Set the default action and the state transition table. + /// + /// The default action + /// Transitions from this state + public State(int defaultAction, int[] goToList) + : this(defaultAction) + { + Goto = new Dictionary(); + for (int i = 0; i < goToList.Length; i += 2) + Goto.Add(goToList[i], goToList[i + 1]); + } + } + + /// + /// Rule representation at runtime. + /// +#if EXPORT_GPPG + public class Rule +#else + internal class Rule +#endif + { + internal int LeftHandSide; // symbol + internal int[] RightHandSide; // symbols + + /// + /// Rule constructor. This holds the ordinal of + /// the left hand side symbol, and the list of + /// right hand side symbols, in lexical order. + /// + /// The LHS non-terminal + /// The RHS symbols, in lexical order + public Rule(int left, int[] right) + { + this.LeftHandSide = left; + this.RightHandSide = right; + } + } + + /// + /// Stack utility for the shift-reduce parser. + /// GPPG parsers have three instances: + /// (1) The parser state stack, T = QUT.Gppg.State, + /// (2) The semantic value stack, T = TValue, + /// (3) The location stack, T = TSpan. + /// + /// +#if EXPORT_GPPG + public class PushdownPrefixState +#else + internal class PushdownPrefixState +#endif + { + // Note that we cannot use the BCL Stack class + // here as derived types need to index into stacks. + // + private T[] array = new T[8]; + private int tos = 0; + + /// + /// Indexer for values of the stack below the top. + /// + /// index of the element, starting from the bottom + /// the selected element + public T this[int index] { get { return array[index]; } } + + /// + /// The current depth of the stack. + /// + public int Depth { get { return tos; } } + + internal void Push(T value) + { + if (tos >= array.Length) + { + T[] newarray = new T[array.Length * 2]; + System.Array.Copy(array, newarray, tos); + array = newarray; + } + array[tos++] = value; + } + + internal T Pop() + { + T rslt = array[--tos]; + array[tos] = default(T); + return rslt; + } + + internal T TopElement() { return array[tos - 1]; } + + internal bool IsEmpty() { return tos == 0; } + } +} diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs index 51046ec8..3feddf40 100644 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ b/LSLib/LS/Stats/StatDefinitions.cs @@ -1,26 +1,15 @@ -using LSLib.LS.Enums; -using OpenTK; -using System; +using System; using System.Collections.Generic; using System.IO; using System.Linq; -using System.Runtime.InteropServices; -using System.Xml.Linq; namespace LSLib.LS.Stats { - public class StatEnumeration + public class StatEnumeration(string name) { - public readonly string Name; - public readonly List Values; - public readonly Dictionary ValueToIndexMap; - - public StatEnumeration(string name) - { - Name = name; - Values = new List(); - ValueToIndexMap = new Dictionary(); - } + public readonly string Name = name; + public readonly List Values = []; + public readonly Dictionary ValueToIndexMap = []; public void AddItem(int index, string value) { @@ -32,10 +21,7 @@ public void AddItem(int index, string value) Values.Add(value); // Some vanilla enums are bogus and contain names multiple times - if (!ValueToIndexMap.ContainsKey(value)) - { - ValueToIndexMap.Add(value, index); - } + ValueToIndexMap.TryAdd(value, index); } public void AddItem(string value) @@ -55,29 +41,17 @@ public class StatField public IStatValueParser GetParser(StatValueParserFactory factory, StatDefinitionRepository definitions) { - if (parser == null) - { - parser = factory.CreateParser(this, definitions); - } - + parser ??= factory.CreateParser(this, definitions); return parser; } } - public class StatEntryType + public class StatEntryType(string name, string nameProperty, string basedOnProperty) { - public readonly string Name; - public readonly string NameProperty; - public readonly string BasedOnProperty; - public readonly Dictionary Fields; - - public StatEntryType(string name, string nameProperty, string basedOnProperty) - { - Name = name; - NameProperty = nameProperty; - BasedOnProperty = basedOnProperty; - Fields = new Dictionary(); - } + public readonly string Name = name; + public readonly string NameProperty = nameProperty; + public readonly string BasedOnProperty = basedOnProperty; + public readonly Dictionary Fields = []; } public class StatFunctorArgumentType @@ -98,11 +72,11 @@ public class StatDefinitionRepository // Version of modified Enumerations.xml and StatObjectDefinitions.sod we expect public const string CustomizationsVersion = "1"; - public readonly Dictionary Enumerations = new Dictionary(); - public readonly Dictionary Types = new Dictionary(); - public readonly Dictionary Functors = new Dictionary(); - public readonly Dictionary Boosts = new Dictionary(); - public readonly Dictionary DescriptionParams = new Dictionary(); + public readonly Dictionary Enumerations = []; + public readonly Dictionary Types = []; + public readonly Dictionary Functors = []; + public readonly Dictionary Boosts = []; + public readonly Dictionary DescriptionParams = []; private StatField AddField(StatEntryType defn, string name, string typeName) { @@ -191,23 +165,23 @@ public void LoadDefinitions(Stream stream) { if (trimmed.StartsWith("modifier type ")) { - var name = trimmed.Substring(15, trimmed.Length - 16); + var name = trimmed[15..^1]; defn = new StatEntryType(name, "Name", "Using"); Types.Add(defn.Name, defn); AddField(defn, "Name", "FixedString"); var usingRef = AddField(defn, "Using", "StatReference"); - usingRef.ReferenceTypes = new List - { + usingRef.ReferenceTypes = + [ new StatReferenceConstraint { StatType = name } - }; + ]; } else if (trimmed.StartsWith("modifier \"")) { var nameEnd = trimmed.IndexOf('"', 10); - var name = trimmed.Substring(10, nameEnd - 10); + var name = trimmed[10..nameEnd]; var typeName = trimmed.Substring(nameEnd + 3, trimmed.Length - nameEnd - 4); AddField(defn, name, typeName); } @@ -240,31 +214,31 @@ public void LoadDefinitions(Stream stream) AddField(dataType, "Key", "FixedString"); AddField(dataType, "Value", "FixedString"); - AddEnumeration("ResurrectType", new List - { + AddEnumeration("ResurrectType", + [ "Living", "Guaranteed", "Construct", "Undead" - }); + ]); - AddEnumeration("SetStatusDurationType", new List - { + AddEnumeration("SetStatusDurationType", + [ "SetMinimum", "ForceSet", "Add", "Multiply" - }); + ]); - AddEnumeration("ExecuteWeaponFunctorsType", new List - { + AddEnumeration("ExecuteWeaponFunctorsType", + [ "MainHand", "OffHand", "BothHands" - }); + ]); - AddEnumeration("SpellCooldownType", new List - { + AddEnumeration("SpellCooldownType", + [ "Default", "OncePerTurn", "OncePerCombat", @@ -273,30 +247,30 @@ public void LoadDefinitions(Stream stream) "UntilShortRest", "UntilPerRestPerItem", "OncePerShortRestPerItem" - }); + ]); - AddEnumeration("SummonDuration", new List - { + AddEnumeration("SummonDuration", + [ "UntilLongRest", "Permanent" - }); + ]); - AddEnumeration("ForceFunctorOrigin", new List - { + AddEnumeration("ForceFunctorOrigin", + [ "OriginToEntity", "OriginToTarget", "TargetToEntity" - }); + ]); - AddEnumeration("ForceFunctorAggression", new List - { + AddEnumeration("ForceFunctorAggression", + [ "Aggressive", "Friendly", "Neutral" - }); + ]); - AddEnumeration("StatItemSlot", new List - { + AddEnumeration("StatItemSlot", + [ "Helmet", "Breast", "Cloak", @@ -318,45 +292,45 @@ public void LoadDefinitions(Stream stream) "VanityBoots", "MainHand", "OffHand" - }); + ]); - AddEnumeration("Magical", new List - { + AddEnumeration("Magical", + [ "Magical", "Nonmagical" - }); + ]); - AddEnumeration("Nonlethal", new List - { + AddEnumeration("Nonlethal", + [ "Lethal", "Nonlethal" - }); + ]); - AddEnumeration("AllEnum", new List - { + AddEnumeration("AllEnum", + [ "All" - }); + ]); - AddEnumeration("ZoneShape", new List - { + AddEnumeration("ZoneShape", + [ "Cone", "Square", - }); + ]); - AddEnumeration("SurfaceLayer", new List - { + AddEnumeration("SurfaceLayer", + [ "Ground", "Cloud", - }); + ]); - AddEnumeration("RollAdjustmentType", new List - { + AddEnumeration("RollAdjustmentType", + [ "All", "Distribute", - }); + ]); - AddEnumeration("StatsRollType", new List - { + AddEnumeration("StatsRollType", + [ "Attack", "MeleeWeaponAttack", "RangedWeaponAttack", @@ -377,10 +351,10 @@ public void LoadDefinitions(Stream stream) "RangedSpellDamage", "MeleeUnarmedDamage", "RangedUnarmedDamage", - }); + ]); - AddEnumeration("AdvantageType", new List - { + AddEnumeration("AdvantageType", + [ "AttackRoll", "AttackTarget", "SavingThrow", @@ -392,10 +366,10 @@ public void LoadDefinitions(Stream stream) "SourceDialogue", "DeathSavingThrow", "Concentration", - }); + ]); - AddEnumeration("SkillType", new List - { + AddEnumeration("SkillType", + [ "Deception", "Intimidation", "Performance", @@ -414,63 +388,63 @@ public void LoadDefinitions(Stream stream) "Medicine", "Perception", "Survival", - }); + ]); - AddEnumeration("CriticalHitType", new List - { + AddEnumeration("CriticalHitType", + [ "AttackTarget", "AttackRoll" - }); + ]); - AddEnumeration("Result", new List - { + AddEnumeration("Result", + [ "Success", "Failure" - }); + ]); - AddEnumeration("CriticalHitResult", new List - { + AddEnumeration("CriticalHitResult", + [ "Success", "Failure" - }); + ]); - AddEnumeration("CriticalHitWhen", new List - { + AddEnumeration("CriticalHitWhen", + [ "Never", "Always", "ForcedAlways" - }); + ]); - AddEnumeration("MovementSpeedType", new List - { + AddEnumeration("MovementSpeedType", + [ "Stroll", "Walk", "Run", "Sprint", - }); + ]); - AddEnumeration("DamageReductionType", new List - { + AddEnumeration("DamageReductionType", + [ "Half", "Flat", "Threshold" - }); + ]); - AddEnumeration("AttackRollAbility", new List - { + AddEnumeration("AttackRollAbility", + [ "SpellCastingAbility", "UnarmedMeleeAbility", "AttackAbility" - }); + ]); - AddEnumeration("HealingDirection", new List - { + AddEnumeration("HealingDirection", + [ "Incoming", "Outgoing" - }); + ]); - AddEnumeration("ResistanceBoostFlags", new List - { + AddEnumeration("ResistanceBoostFlags", + [ "None", "Resistant", "Immune", @@ -482,17 +456,17 @@ public void LoadDefinitions(Stream stream) "ResistantToNonMagical", "ImmuneToNonMagical", "VulnerableToNonMagical", - }); + ]); - AddEnumeration("UnlockSpellType", new List - { + AddEnumeration("UnlockSpellType", + [ "Singular", "AddChildren", "MostPowerful" - }); + ]); - AddEnumeration("ProficiencyBonusBoostType", new List - { + AddEnumeration("ProficiencyBonusBoostType", + [ "AttackRoll", "AttackTarget", "SavingThrow", @@ -503,10 +477,10 @@ public void LoadDefinitions(Stream stream) "AllSkills", "SourceDialogue", "WeaponActionDC" - }); + ]); - AddEnumeration("ResourceReplenishType", new List - { + AddEnumeration("ResourceReplenishType", + [ "Never", "Default", "Combat", @@ -514,10 +488,10 @@ public void LoadDefinitions(Stream stream) "ShortRest", "FullRest", "ExhaustedRest" - }); + ]); - AddEnumeration("AttackType", new List - { + AddEnumeration("AttackType", + [ "DirectHit", "MeleeWeaponAttack", "RangedWeaponAttack", @@ -527,10 +501,10 @@ public void LoadDefinitions(Stream stream) "RangedSpellAttack", "MeleeUnarmedAttack", "RangedUnarmedAttack" - }); + ]); - AddEnumeration("DealDamageWeaponDamageType", new List - { + AddEnumeration("DealDamageWeaponDamageType", + [ "MainWeaponDamageType", "OffhandWeaponDamageType", "MainMeleeWeaponDamageType", @@ -539,10 +513,10 @@ public void LoadDefinitions(Stream stream) "OffhandRangedWeaponDamageType", "SourceWeaponDamageType", "ThrownWeaponDamageType", - }); + ]); - AddEnumeration("EngineStatusType", new List - { + AddEnumeration("EngineStatusType", + [ "DYING", "HEAL", "KNOCKED_DOWN", @@ -564,11 +538,11 @@ public void LoadDefinitions(Stream stream) "EFFECT", "DEACTIVATED", "DOWNED", - }); + ]); // Add functors - AddFunctor("ApplyStatus", 1, new List { + AddFunctor("ApplyStatus", 1, [ "StatusId", "StatusId", "Chance", "Int", "Duration", "Lua", @@ -577,23 +551,23 @@ public void LoadDefinitions(Stream stream) "StatusSpecificParam3", "Int", "StatsConditions", "Conditions", "RequiresConcentration", "Boolean" - }); - AddFunctor("SurfaceChange", 1, new List { + ]); + AddFunctor("SurfaceChange", 1, [ "SurfaceChange", "Surface Change", "Chance", "Float", "Arg3", "Float", "Arg4", "Float", "Arg5", "Float" - }); - AddFunctor("Resurrect", 0, new List { + ]); + AddFunctor("Resurrect", 0, [ "Chance", "Float", "HealthPercentage", "Float", "Type", "ResurrectType" - }); - AddFunctor("Sabotage", 0, new List { + ]); + AddFunctor("Sabotage", 0, [ "Amount", "Int" - }); - AddFunctor("Summon", 1, new List { + ]); + AddFunctor("Summon", 1, [ "Template", "Guid", // Root template GUID "Duration", "SummonDurationOrInt", "AIHelper", "SpellId", @@ -604,46 +578,46 @@ public void LoadDefinitions(Stream stream) "StatusToApply3", "StatusId", "StatusToApply4", "StatusId", "Arg10", "Boolean", - }); - AddFunctor("Force", 1, new List { + ]); + AddFunctor("Force", 1, [ "Distance", "Lua", "Origin", "ForceFunctorOrigin", "Aggression", "ForceFunctorAggression", "Arg4", "Boolean", "Arg5", "Boolean", - }); - AddFunctor("Douse", 0, new List { + ]); + AddFunctor("Douse", 0, [ "Arg1", "Float", "Arg2", "Float" - }); - AddFunctor("SwapPlaces", 0, new List { + ]); + AddFunctor("SwapPlaces", 0, [ "Animation", "String", "Arg2", "Boolean", "Arg3", "Boolean" - }); - AddFunctor("Pickup", 0, new List { + ]); + AddFunctor("Pickup", 0, [ "Arg1", "String" - }); - AddFunctor("CreateSurface", 3, new List { + ]); + AddFunctor("CreateSurface", 3, [ "Radius", "Float", "Duration", "Float", "SurfaceType", "Surface Type", "IsControlledByConcentration", "Boolean", "Arg5", "Float", "Arg6", "Boolean" - }); - AddFunctor("CreateConeSurface", 3, new List { + ]); + AddFunctor("CreateConeSurface", 3, [ "Radius", "Float", "Duration", "Float", "SurfaceType", "Surface Type", "IsControlledByConcentration", "Boolean", "Arg5", "Float", "Arg6", "Boolean" - }); - AddFunctor("RemoveStatus", 1, new List { + ]); + AddFunctor("RemoveStatus", 1, [ "StatusId", "StatusIdOrGroup" - }); - AddFunctor("DealDamage", 1, new List { + ]); + AddFunctor("DealDamage", 1, [ "Damage", "Lua", "DamageType", "DamageTypeOrDealDamageWeaponDamageType", "Magical", "Magical", @@ -654,44 +628,44 @@ public void LoadDefinitions(Stream stream) "Arg8", "Boolean", "Arg9", "Boolean", "Arg10", "Boolean", - }); - AddFunctor("ExecuteWeaponFunctors", 0, new List { + ]); + AddFunctor("ExecuteWeaponFunctors", 0, [ "WeaponType", "ExecuteWeaponFunctorsType" - }); - AddFunctor("RegainHitPoints", 1, new List { + ]); + AddFunctor("RegainHitPoints", 1, [ "HitPoints", "Lua", "Type", "ResurrectType" - }); - AddFunctor("TeleportSource", 0, new List { + ]); + AddFunctor("TeleportSource", 0, [ "Arg1", "Boolean", "Arg2", "Boolean", - }); - AddFunctor("SetStatusDuration", 2, new List { + ]); + AddFunctor("SetStatusDuration", 2, [ "StatusId", "StatusId", "Duration", "Float", "ChangeType", "SetStatusDurationType", - }); - AddFunctor("UseSpell", 1, new List { + ]); + AddFunctor("UseSpell", 1, [ "SpellId", "SpellId", "IgnoreHasSpell", "Boolean", "IgnoreChecks", "Boolean", "Arg4", "Boolean", "SpellCastGuid", "Guid", - }); - AddFunctor("UseActionResource", 1, new List { + ]); + AddFunctor("UseActionResource", 1, [ "ActionResource", "String", // Action resource name "Amount", "String", // Float or percentage "Level", "Int", "Arg4", "Boolean" - }); - AddFunctor("UseAttack", 0, new List { + ]); + AddFunctor("UseAttack", 0, [ "IgnoreChecks", "Boolean" - }); - AddFunctor("CreateExplosion", 0, new List { + ]); + AddFunctor("CreateExplosion", 0, [ "SpellId", "SpellId" - }); - AddFunctor("BreakConcentration", 0, new List {}); - AddFunctor("ApplyEquipmentStatus", 2, new List { + ]); + AddFunctor("BreakConcentration", 0, []); + AddFunctor("ApplyEquipmentStatus", 2, [ "ItemSlot", "StatItemSlot", "StatusId", "StatusId", "Chance", "Int", @@ -701,13 +675,13 @@ public void LoadDefinitions(Stream stream) "StatusSpecificParam3", "Int", "StatsConditions", "Conditions", "RequiresConcentration", "Boolean" - }); - AddFunctor("RestoreResource", 2, new List { + ]); + AddFunctor("RestoreResource", 2, [ "ActionResource", "String", // Action resource name "Amount", "Lua", // or percentage? "Level", "Int" - }); - AddFunctor("Spawn", 1, new List { + ]); + AddFunctor("Spawn", 1, [ "TemplateId", "Guid", // Root template Guid "AiHelper", "String", // Should be SpellId, but seemingly defunct? "StatusToApply1", "StatusId", @@ -715,14 +689,14 @@ public void LoadDefinitions(Stream stream) "StatusToApply3", "StatusId", "StatusToApply4", "StatusId", "Arg7", "Boolean" - }); - AddFunctor("Stabilize", 0, new List{}); - AddFunctor("Unlock", 0, new List{}); - AddFunctor("ResetCombatTurn", 0, new List{}); - AddFunctor("RemoveAuraByChildStatus", 1, new List { + ]); + AddFunctor("Stabilize", 0, []); + AddFunctor("Unlock", 0, []); + AddFunctor("ResetCombatTurn", 0, []); + AddFunctor("RemoveAuraByChildStatus", 1, [ "StatusId", "StatusId" - }); - AddFunctor("SummonInInventory", 1, new List { + ]); + AddFunctor("SummonInInventory", 1, [ "TemplateId", "Guid", // Root template Guid "Duration", "SummonDurationOrInt", "Arg3", "Int", @@ -734,8 +708,8 @@ public void LoadDefinitions(Stream stream) "Arg9", "String", "Arg10", "String", "Arg11", "String", // etc. - }); - AddFunctor("SpawnInInventory", 1, new List { + ]); + AddFunctor("SpawnInInventory", 1, [ "TemplateId", "Guid", // Root template Guid "Arg2", "Int", "Arg3", "Boolean", @@ -744,117 +718,117 @@ public void LoadDefinitions(Stream stream) "Arg6", "String", "Arg7", "String", "Arg8", "String", // etc. - }); - AddFunctor("RemoveUniqueStatus", 1, new List { + ]); + AddFunctor("RemoveUniqueStatus", 1, [ "StatusId", "StatusId" - }); - AddFunctor("DisarmWeapon", 0, new List { }); - AddFunctor("DisarmAndStealWeapon", 0, new List { }); - AddFunctor("SwitchDeathType", 1, new List { + ]); + AddFunctor("DisarmWeapon", 0, []); + AddFunctor("DisarmAndStealWeapon", 0, []); + AddFunctor("SwitchDeathType", 1, [ "DeathType", "Death Type" - }); - AddFunctor("TriggerRandomCast", 2, new List { + ]); + AddFunctor("TriggerRandomCast", 2, [ "Arg1", "Int", "Arg2", "Float", "Arg3", "String", // RandomCastOutcomesID resource "Arg4", "String", // RandomCastOutcomesID resource "Arg5", "String", // RandomCastOutcomesID resource "Arg6", "String", // RandomCastOutcomesID resource - }); - AddFunctor("GainTemporaryHitPoints", 1, new List { + ]); + AddFunctor("GainTemporaryHitPoints", 1, [ "Amount", "Lua" - }); - AddFunctor("FireProjectile", 1, new List { + ]); + AddFunctor("FireProjectile", 1, [ "Arg1", "String" - }); - AddFunctor("ShortRest", 0, new List {}); - AddFunctor("CreateZone", 0, new List { + ]); + AddFunctor("ShortRest", 0, []); + AddFunctor("CreateZone", 0, [ "Shape", "ZoneShape", "Arg2", "Float", "Duration", "Float", "Arg4", "String", "Arg5", "Boolean", - }); - AddFunctor("DoTeleport", 0, new List { + ]); + AddFunctor("DoTeleport", 0, [ "Arg1", "Float" - }); - AddFunctor("RegainTemporaryHitPoints", 1, new List { + ]); + AddFunctor("RegainTemporaryHitPoints", 1, [ "Amount", "Lua" - }); - AddFunctor("RemoveStatusByLevel", 1, new List { + ]); + AddFunctor("RemoveStatusByLevel", 1, [ "StatusId", "StatusIdOrGroup", "Arg2", "Int", "Arg3", "Ability" - }); - AddFunctor("SurfaceClearLayer", 0, new List { + ]); + AddFunctor("SurfaceClearLayer", 0, [ "Layer1", "SurfaceLayer", "Layer2", "SurfaceLayer", - }); - AddFunctor("Unsummon", 0, new List { }); - AddFunctor("CreateWall", 0, new List { }); - AddFunctor("Counterspell", 0, new List { }); - AddFunctor("AdjustRoll", 1, new List { + ]); + AddFunctor("Unsummon", 0, []); + AddFunctor("CreateWall", 0, []); + AddFunctor("Counterspell", 0, []); + AddFunctor("AdjustRoll", 1, [ "Amount", "Lua", "Type", "RollAdjustmentType", "DamageType", "Damage Type", - }); - AddFunctor("SpawnExtraProjectiles", 0, new List { + ]); + AddFunctor("SpawnExtraProjectiles", 0, [ "Arg1", "String", // ProjectileTypeId - }); - AddFunctor("Kill", 0, new List { }); - AddFunctor("TutorialEvent", 0, new List { + ]); + AddFunctor("Kill", 0, []); + AddFunctor("TutorialEvent", 0, [ "Event", "Guid", - }); - AddFunctor("Drop", 0, new List { + ]); + AddFunctor("Drop", 0, [ "Arg1", "String", - }); - AddFunctor("ResetCooldowns", 1, new List { + ]); + AddFunctor("ResetCooldowns", 1, [ "Type", "SpellCooldownType", - }); - AddFunctor("SetRoll", 1, new List { + ]); + AddFunctor("SetRoll", 1, [ "Roll", "Int", "DistributionOrDamageType", "RollAdjustmentTypeOrDamageType" - }); - AddFunctor("SetDamageResistance", 1, new List { + ]); + AddFunctor("SetDamageResistance", 1, [ "DamageType", "Damage Type", - }); - AddFunctor("SetReroll", 0, new List { + ]); + AddFunctor("SetReroll", 0, [ "Roll", "Int", "Arg2", "Boolean" - }); - AddFunctor("SetAdvantage", 0, new List { }); - AddFunctor("SetDisadvantage", 0, new List { }); - AddFunctor("MaximizeRoll", 1, new List { + ]); + AddFunctor("SetAdvantage", 0, []); + AddFunctor("SetDisadvantage", 0, []); + AddFunctor("MaximizeRoll", 1, [ "DamageType", "Damage Type" - }); - AddFunctor("CameraWait", 0, new List { + ]); + AddFunctor("CameraWait", 0, [ "Arg1", "Float" - }); + ]); - AddDescriptionParams("DealDamage", 1, new List { + AddDescriptionParams("DealDamage", 1, [ "Damage", "Lua", "DamageType", "DamageTypeOrDealDamageWeaponDamageType", "Magical", "Magical", "Nonlethal", "Nonlethal", "Arg5", "Int", "Tooltip", "Guid", - }); - AddDescriptionParams("RegainHitPoints", 1, new List { + ]); + AddDescriptionParams("RegainHitPoints", 1, [ "HitPoints", "Lua", "Tooltip", "Guid", - }); - AddDescriptionParams("Distance", 1, new List { + ]); + AddDescriptionParams("Distance", 1, [ "Distance", "Float" - }); - AddDescriptionParams("GainTemporaryHitPoints", 1, new List { + ]); + AddDescriptionParams("GainTemporaryHitPoints", 1, [ "Amount", "Lua" - }); - AddDescriptionParams("LevelMapValue", 1, new List { + ]); + AddDescriptionParams("LevelMapValue", 1, [ "LevelMap", "String" - }); - AddDescriptionParams("ApplyStatus", 1, new List { + ]); + AddDescriptionParams("ApplyStatus", 1, [ "StatusId", "StatusId", "Chance", "Int", "Duration", "Lua", @@ -863,284 +837,284 @@ public void LoadDefinitions(Stream stream) "StatusSpecificParam3", "Int", "StatsConditions", "Conditions", "RequiresConcentration", "Boolean" - }); + ]); - AddBoost("AC", 1, new List { + AddBoost("AC", 1, [ "AC", "Int" - }); - AddBoost("Ability", 2, new List { + ]); + AddBoost("Ability", 2, [ "Ability", "Ability", "Amount", "Int", "Arg3", "Int", - }); - AddBoost("RollBonus", 2, new List { + ]); + AddBoost("RollBonus", 2, [ "RollType", "StatsRollType", "Bonus", "Lua", "Arg3", "String", - }); - AddBoost("Advantage", 1, new List { + ]); + AddBoost("Advantage", 1, [ "Type", "AdvantageType", "Arg2", "String", // Depends on type "Tag1", "String", // TagManager resource "Tag2", "String", // TagManager resource "Tag3", "String", // TagManager resource - }); - AddBoost("Disadvantage", 1, new List { + ]); + AddBoost("Disadvantage", 1, [ "Type", "AdvantageType", "Arg2", "String", // Depends on type "Tag1", "String", // TagManager resource "Tag2", "String", // TagManager resource "Tag3", "String", // TagManager resource - }); - AddBoost("ActionResource", 2, new List { + ]); + AddBoost("ActionResource", 2, [ "Resource", "String", // Action resource name "Amount", "Float", "Level", "Int", "DieType", "DieType", - }); - AddBoost("CriticalHit", 3, new List { + ]); + AddBoost("CriticalHit", 3, [ "Type", "CriticalHitType", "Result", "CriticalHitResult", "When", "CriticalHitWhen", "Arg4", "Float", - }); - AddBoost("AbilityFailedSavingThrow", 1, new List { + ]); + AddBoost("AbilityFailedSavingThrow", 1, [ "Ability", "Ability" - }); - AddBoost("Resistance", 2, new List { + ]); + AddBoost("Resistance", 2, [ "DamageType", "AllOrDamageType", "ResistanceBoostFlags", "ResistanceBoostFlags" - }); - AddBoost("WeaponDamageResistance", 1, new List { + ]); + AddBoost("WeaponDamageResistance", 1, [ "DamageType1", "Damage Type", "DamageType2", "Damage Type", "DamageType3", "Damage Type", - }); - AddBoost("ProficiencyBonusOverride", 1, new List { + ]); + AddBoost("ProficiencyBonusOverride", 1, [ "Bonus", "Lua" - }); - AddBoost("ActionResourceOverride", 2, new List { + ]); + AddBoost("ActionResourceOverride", 2, [ "Resource", "String", // Action resource name "Amount", "Float", "Level", "Int", "DieType", "DieType", - }); - AddBoost("AddProficiencyToAC", 0, new List {}); - AddBoost("JumpMaxDistanceMultiplier", 1, new List { + ]); + AddBoost("AddProficiencyToAC", 0, []); + AddBoost("JumpMaxDistanceMultiplier", 1, [ "Multiplier", "Float" - }); - AddBoost("AddProficiencyToDamage", 0, new List {}); - AddBoost("ActionResourceConsumeMultiplier", 3, new List { + ]); + AddBoost("AddProficiencyToDamage", 0, []); + AddBoost("ActionResourceConsumeMultiplier", 3, [ "Resource", "String", // Action resource name "Multiplier", "Float", "Level", "Int", - }); - AddBoost("BlockVerbalComponent", 0, new List {}); - AddBoost("BlockSomaticComponent", 0, new List {}); - AddBoost("HalveWeaponDamage", 1, new List { + ]); + AddBoost("BlockVerbalComponent", 0, []); + AddBoost("BlockSomaticComponent", 0, []); + AddBoost("HalveWeaponDamage", 1, [ "Ability", "Ability" - }); - AddBoost("UnlockSpell", 1, new List { + ]); + AddBoost("UnlockSpell", 1, [ "SpellId", "SpellId", "Type", "UnlockSpellType", "SpellGuid", "String", // "None" or GUID or "" "Cooldown", "SpellCooldownType", "Ability", "Ability" - }); - AddBoost("SourceAdvantageOnAttack", 0, new List { + ]); + AddBoost("SourceAdvantageOnAttack", 0, [ "Arg1", "Float" - }); - AddBoost("ProficiencyBonus", 1, new List { + ]); + AddBoost("ProficiencyBonus", 1, [ "Type", "ProficiencyBonusBoostType", "Arg2", "String" - }); - AddBoost("BlockSpellCast", 0, new List { + ]); + AddBoost("BlockSpellCast", 0, [ "Arg1", "Float" - }); - AddBoost("Proficiency", 1, new List { + ]); + AddBoost("Proficiency", 1, [ "Arg1", "ProficiencyGroupFlags", "Arg2", "ProficiencyGroupFlags", "Arg3", "ProficiencyGroupFlags", - }); - AddBoost("SourceAllyAdvantageOnAttack", 0, new List {}); - AddBoost("IncreaseMaxHP", 1, new List { + ]); + AddBoost("SourceAllyAdvantageOnAttack", 0, []); + AddBoost("IncreaseMaxHP", 1, [ "Amount", "String" // Lua or % - }); - AddBoost("ActionResourceBlock", 1, new List { + ]); + AddBoost("ActionResourceBlock", 1, [ "Resource", "String", // Action resource name "Level", "Int", - }); - AddBoost("StatusImmunity", 1, new List { + ]); + AddBoost("StatusImmunity", 1, [ "StatusId", "StatusIdOrGroup", "Tag1", "String", // Tag resource name "Tag2", "String", // Tag resource name "Tag3", "String", // Tag resource name "Tag4", "String", // Tag resource name "Tag5", "String", // Tag resource name - }); - AddBoost("UseBoosts", 1, new List { + ]); + AddBoost("UseBoosts", 1, [ "Arg1", "StatsFunctors" - }); - AddBoost("CannotHarmCauseEntity", 1, new List { + ]); + AddBoost("CannotHarmCauseEntity", 1, [ "Arg1", "String" - }); - AddBoost("TemporaryHP", 1, new List { + ]); + AddBoost("TemporaryHP", 1, [ "Amount", "Lua" - }); - AddBoost("Weight", 1, new List { + ]); + AddBoost("Weight", 1, [ "Weight", "Float" - }); - AddBoost("WeightCategory", 1, new List { + ]); + AddBoost("WeightCategory", 1, [ "Category", "Int" - }); - AddBoost("FactionOverride", 1, new List { + ]); + AddBoost("FactionOverride", 1, [ "Faction", "String" // Faction resource GUID or "Source" - }); - AddBoost("ActionResourceMultiplier", 2, new List { + ]); + AddBoost("ActionResourceMultiplier", 2, [ "Resource", "String", // Action resource name "Multiplier", "Int", "Level", "Int", - }); - AddBoost("BlockRegainHP", 0, new List { + ]); + AddBoost("BlockRegainHP", 0, [ "Type", "ResurrectTypes" - }); - AddBoost("Initiative", 1, new List { + ]); + AddBoost("Initiative", 1, [ "Initiative", "Int" - }); - AddBoost("DarkvisionRange", 1, new List { + ]); + AddBoost("DarkvisionRange", 1, [ "Range", "Float" - }); - AddBoost("DarkvisionRangeMin", 1, new List { + ]); + AddBoost("DarkvisionRangeMin", 1, [ "Range", "Float" - }); - AddBoost("DarkvisionRangeOverride", 1, new List { + ]); + AddBoost("DarkvisionRangeOverride", 1, [ "Range", "Float" - }); - AddBoost("Tag", 1, new List { + ]); + AddBoost("Tag", 1, [ "Arg1", "String" // Tag resource name - }); - AddBoost("IgnoreDamageThreshold", 2, new List { + ]); + AddBoost("IgnoreDamageThreshold", 2, [ "DamageType", "AllOrDamageType", "Threshold", "Int" - }); - AddBoost("Skill", 2, new List { + ]); + AddBoost("Skill", 2, [ "Skill", "SkillType", "Amount", "Lua" - }); - AddBoost("WeaponDamage", 2, new List { + ]); + AddBoost("WeaponDamage", 2, [ "Amount", "Lua", "DamageType", "Damage Type", "Arg3", "Boolean" - }); - AddBoost("NullifyAbilityScore", 1, new List { + ]); + AddBoost("NullifyAbilityScore", 1, [ "Ability", "Ability" - }); - AddBoost("IgnoreFallDamage", 0, new List {}); - AddBoost("Reroll", 3, new List { + ]); + AddBoost("IgnoreFallDamage", 0, []); + AddBoost("Reroll", 3, [ "RollType", "StatsRollType", "RollBelow", "Int", "Arg3", "Boolean" - }); - AddBoost("DownedStatus", 1, new List { + ]); + AddBoost("DownedStatus", 1, [ "StatusId", "StatusId", "Arg2", "Int" - }); - AddBoost("Invulnerable", 0, new List {}); - AddBoost("WeaponEnchantment", 1, new List { + ]); + AddBoost("Invulnerable", 0, []); + AddBoost("WeaponEnchantment", 1, [ "Enchantment", "Int" - }); - AddBoost("GuaranteedChanceRollOutcome", 1, new List { + ]); + AddBoost("GuaranteedChanceRollOutcome", 1, [ "Arg1", "Boolean" - }); - AddBoost("Attribute", 1, new List { + ]); + AddBoost("Attribute", 1, [ "Flags", "AttributeFlags" - }); - AddBoost("IgnoreLeaveAttackRange", 0, new List {}); - AddBoost("GameplayLight", 2, new List { + ]); + AddBoost("IgnoreLeaveAttackRange", 0, []); + AddBoost("GameplayLight", 2, [ "Arg1", "Float", "Arg2", "Boolean", "Arg3", "Float", "Arg4", "Boolean" - }); - AddBoost("DialogueBlock", 0, new List {}); - AddBoost("DualWielding", 1, new List { + ]); + AddBoost("DialogueBlock", 0, []); + AddBoost("DualWielding", 1, [ "DW", "Boolean" - }); - AddBoost("Savant", 1, new List { + ]); + AddBoost("Savant", 1, [ "SpellSchool", "SpellSchool" - }); - AddBoost("MinimumRollResult", 2, new List { + ]); + AddBoost("MinimumRollResult", 2, [ "RollType", "StatsRollType", "MinResult", "Int" - }); - AddBoost("Lootable", 0, new List {}); - AddBoost("CharacterWeaponDamage", 1, new List { + ]); + AddBoost("Lootable", 0, []); + AddBoost("CharacterWeaponDamage", 1, [ "Amount", "Lua", "DamageType", "Damage Type" - }); - AddBoost("ProjectileDeflect", 0, new List { + ]); + AddBoost("ProjectileDeflect", 0, [ "Type1", "String", "Type2", "String", - }); - AddBoost("AbilityOverrideMinimum", 2, new List { + ]); + AddBoost("AbilityOverrideMinimum", 2, [ "Ability", "Ability", "Minimum", "Int" - }); - AddBoost("ACOverrideFormula", 2, new List { + ]); + AddBoost("ACOverrideFormula", 2, [ "AC", "Int", "Arg2", "Boolean", "Ability1", "Ability", "Ability2", "Ability", "Ability3", "Ability", - }); - AddBoost("FallDamageMultiplier", 1, new List { + ]); + AddBoost("FallDamageMultiplier", 1, [ "Multiplier", "Float" - }); - AddBoost("ActiveCharacterLight", 1, new List { + ]); + AddBoost("ActiveCharacterLight", 1, [ "Light", "String" - }); - AddBoost("Invisibility", 0, new List {}); - AddBoost("TwoWeaponFighting", 0, new List {}); - AddBoost("WeaponAttackTypeOverride", 1, new List { + ]); + AddBoost("Invisibility", 0, []); + AddBoost("TwoWeaponFighting", 0, []); + AddBoost("WeaponAttackTypeOverride", 1, [ "Type", "AttackType" - }); - AddBoost("WeaponDamageDieOverride", 1, new List { + ]); + AddBoost("WeaponDamageDieOverride", 1, [ "DamageDie", "String", // die, eg. 1d10 - }); - AddBoost("CarryCapacityMultiplier", 1, new List { + ]); + AddBoost("CarryCapacityMultiplier", 1, [ "Multiplier", "Float" - }); - AddBoost("WeaponProperty", 1, new List { + ]); + AddBoost("WeaponProperty", 1, [ "Flags1", "WeaponFlags" - }); - AddBoost("WeaponAttackRollAbilityOverride", 1, new List { + ]); + AddBoost("WeaponAttackRollAbilityOverride", 1, [ "Ability", "AbilityOrAttackRollAbility" - }); - AddBoost("BlockTravel", 0, new List {}); - AddBoost("BlockGatherAtCamp", 0, new List {}); - AddBoost("BlockAbilityModifierDamageBonus", 0, new List {}); - AddBoost("VoicebarkBlock", 0, new List {}); - AddBoost("HiddenDuringCinematic", 0, new List {}); - AddBoost("SightRangeAdditive", 1, new List { + ]); + AddBoost("BlockTravel", 0, []); + AddBoost("BlockGatherAtCamp", 0, []); + AddBoost("BlockAbilityModifierDamageBonus", 0, []); + AddBoost("VoicebarkBlock", 0, []); + AddBoost("HiddenDuringCinematic", 0, []); + AddBoost("SightRangeAdditive", 1, [ "Range", "Float" - }); - AddBoost("SightRangeMinimum", 1, new List { + ]); + AddBoost("SightRangeMinimum", 1, [ "Range", "Float" - }); - AddBoost("SightRangeMaximum", 1, new List { + ]); + AddBoost("SightRangeMaximum", 1, [ "Range", "Float" - }); - AddBoost("SightRangeOverride", 1, new List { + ]); + AddBoost("SightRangeOverride", 1, [ "Range", "Float" - }); - AddBoost("CannotBeDisarmed", 0, new List {}); - AddBoost("MovementSpeedLimit", 1, new List { + ]); + AddBoost("CannotBeDisarmed", 0, []); + AddBoost("MovementSpeedLimit", 1, [ "Type", "MovementSpeedType" - }); - AddBoost("NonLethal", 0, new List {}); - AddBoost("UnlockSpellVariant", 1, new List { + ]); + AddBoost("NonLethal", 0, []); + AddBoost("UnlockSpellVariant", 1, [ "Modification1", "Lua", // TODO - add Modification parser? "Modification2", "Lua", "Modification3", "Lua", @@ -1156,166 +1130,166 @@ public void LoadDefinitions(Stream stream) "Modification13", "Lua", "Modification14", "Lua", "Modification15", "Lua" - }); - AddBoost("DetectDisturbancesBlock", 1, new List { + ]); + AddBoost("DetectDisturbancesBlock", 1, [ "Arg1", "Boolean" - }); - AddBoost("BlockAbilityModifierFromAC", 1, new List { + ]); + AddBoost("BlockAbilityModifierFromAC", 1, [ "Ability", "Ability" - }); - AddBoost("ScaleMultiplier", 0, new List { + ]); + AddBoost("ScaleMultiplier", 0, [ "Multiplier", "Float" - }); - AddBoost("CriticalDamageOnHit", 0, new List {}); - AddBoost("DamageReduction", 2, new List { + ]); + AddBoost("CriticalDamageOnHit", 0, []); + AddBoost("DamageReduction", 2, [ "DamageType", "AllOrDamageType", "ReductionType", "DamageReductionType", "Amount", "Lua" - }); - AddBoost("ReduceCriticalAttackThreshold", 1, new List { + ]); + AddBoost("ReduceCriticalAttackThreshold", 1, [ "Threshold", "Int", "StatusId", "StatusIdOrGroup" - }); - AddBoost("PhysicalForceRangeBonus", 1, new List { + ]); + AddBoost("PhysicalForceRangeBonus", 1, [ "Arg1", "String" - }); - AddBoost("ObjectSize", 1, new List { + ]); + AddBoost("ObjectSize", 1, [ "Size", "Int" - }); - AddBoost("ObjectSizeOverride", 1, new List { + ]); + AddBoost("ObjectSizeOverride", 1, [ "Size", "String" - }); - AddBoost("ItemReturnToOwner", 0, new List {}); - AddBoost("AiArchetypeOverride", 1, new List { + ]); + AddBoost("ItemReturnToOwner", 0, []); + AddBoost("AiArchetypeOverride", 1, [ "Archetype", "String", "Arg2", "Int" - }); - AddBoost("ExpertiseBonus", 1, new List { + ]); + AddBoost("ExpertiseBonus", 1, [ "Skill", "SkillType" - }); - AddBoost("EntityThrowDamage", 1, new List { + ]); + AddBoost("EntityThrowDamage", 1, [ "Die", "String", "DamageType", "Damage Type" - }); - AddBoost("WeaponDamageTypeOverride", 1, new List { + ]); + AddBoost("WeaponDamageTypeOverride", 1, [ "DamageType", "Damage Type" - }); - AddBoost("MaximizeHealing", 1, new List { + ]); + AddBoost("MaximizeHealing", 1, [ "Direction", "HealingDirection", "Type", "ResurrectType" - }); - AddBoost("IgnoreEnterAttackRange", 0, new List {}); - AddBoost("DamageBonus", 1, new List { + ]); + AddBoost("IgnoreEnterAttackRange", 0, []); + AddBoost("DamageBonus", 1, [ "Amount", "Lua", "DamageType", "Damage Type", "Arg3", "Boolean" - }); - AddBoost("Detach", 0, new List {}); - AddBoost("ConsumeItemBlock", 0, new List {}); - AddBoost("AdvanceSpells", 1, new List { + ]); + AddBoost("Detach", 0, []); + AddBoost("ConsumeItemBlock", 0, []); + AddBoost("AdvanceSpells", 1, [ "SpellId", "SpellId", "Arg2", "Int" - }); - AddBoost("SpellResistance", 1, new List { + ]); + AddBoost("SpellResistance", 1, [ "Resistance", "ResistanceBoostFlags" - }); - AddBoost("WeaponAttackRollBonus", 1, new List { + ]); + AddBoost("WeaponAttackRollBonus", 1, [ "Amount", "Lua" - }); - AddBoost("SpellSaveDC", 1, new List { + ]); + AddBoost("SpellSaveDC", 1, [ "DC", "Int" - }); - AddBoost("RedirectDamage", 1, new List { + ]); + AddBoost("RedirectDamage", 1, [ "Arg1", "Float", "DamageType", "Damage Type", "DamageType2", "Damage Type", "Arg4", "Boolean" - }); - AddBoost("CanSeeThrough", 1, new List { + ]); + AddBoost("CanSeeThrough", 1, [ "CanSeeThrough", "Boolean" - }); - AddBoost("CanShootThrough", 1, new List { + ]); + AddBoost("CanShootThrough", 1, [ "CanShootThrough", "Boolean" - }); - AddBoost("CanWalkThrough", 1, new List { + ]); + AddBoost("CanWalkThrough", 1, [ "CanWalkThrough", "Boolean" - }); - AddBoost("MonkWeaponAttackOverride", 0, new List {}); - AddBoost("MonkWeaponDamageDiceOverride", 1, new List { + ]); + AddBoost("MonkWeaponAttackOverride", 0, []); + AddBoost("MonkWeaponDamageDiceOverride", 1, [ "Arg1", "Lua" - }); - AddBoost("IntrinsicSummonerProficiency", 0, new List {}); - AddBoost("HorizontalFOVOverride", 1, new List { + ]); + AddBoost("IntrinsicSummonerProficiency", 0, []); + AddBoost("HorizontalFOVOverride", 1, [ "FOV", "Float" - }); - AddBoost("CharacterUnarmedDamage", 1, new List { + ]); + AddBoost("CharacterUnarmedDamage", 1, [ "Damage", "Lua", "DamageType", "Damage Type" - }); - AddBoost("UnarmedMagicalProperty", 0, new List {}); - AddBoost("ActionResourceReplenishTypeOverride", 2, new List { + ]); + AddBoost("UnarmedMagicalProperty", 0, []); + AddBoost("ActionResourceReplenishTypeOverride", 2, [ "ActionResource", "String", // Action resource name "ReplenishType", "ResourceReplenishType" - }); - AddBoost("AreaDamageEvade", 0, new List {}); - AddBoost("ActionResourcePreventReduction", 1, new List { + ]); + AddBoost("AreaDamageEvade", 0, []); + AddBoost("ActionResourcePreventReduction", 1, [ "ActionResource", "String", // Action resource name "Level", "Int" - }); - AddBoost("AttackSpellOverride", 1, new List { + ]); + AddBoost("AttackSpellOverride", 1, [ "AttackSpell", "SpellId", "OriginalSpell", "SpellId" - }); - AddBoost("Lock", 0, new List { + ]); + AddBoost("Lock", 0, [ "DC", "Guid" - }); - AddBoost("NoAOEDamageOnLand", 0, new List {}); - AddBoost("IgnorePointBlankDisadvantage", 1, new List { + ]); + AddBoost("NoAOEDamageOnLand", 0, []); + AddBoost("IgnorePointBlankDisadvantage", 1, [ "Flags", "WeaponFlags" - }); - AddBoost("CriticalHitExtraDice", 1, new List { + ]); + AddBoost("CriticalHitExtraDice", 1, [ "ExtraDice", "Int", "AttackType", "AttackType" - }); - AddBoost("DodgeAttackRoll", 2, new List { + ]); + AddBoost("DodgeAttackRoll", 2, [ "Arg1", "Int", "Arg2", "Int", "Status", "StatusIdOrGroup" - }); - AddBoost("GameplayObscurity", 1, new List { + ]); + AddBoost("GameplayObscurity", 1, [ "Obscurity", "Float" - }); - AddBoost("MaximumRollResult", 2, new List { + ]); + AddBoost("MaximumRollResult", 2, [ "RollType", "StatsRollType", "MinResult", "Int" - }); - AddBoost("UnlockInterrupt", 1, new List { + ]); + AddBoost("UnlockInterrupt", 1, [ "Interrupt", "Interrupt" - }); - AddBoost("IntrinsicSourceProficiency", 0, new List {}); - AddBoost("JumpMaxDistanceBonus", 1, new List { + ]); + AddBoost("IntrinsicSourceProficiency", 0, []); + AddBoost("JumpMaxDistanceBonus", 1, [ "Bonus", "Float" - }); - AddBoost("ArmorAbilityModifierCapOverride", 2, new List { + ]); + AddBoost("ArmorAbilityModifierCapOverride", 2, [ "ArmorType", "ArmorType", "Cap", "Int" - }); - AddBoost("IgnoreResistance", 2, new List { + ]); + AddBoost("IgnoreResistance", 2, [ "DamageType", "Damage Type", "Flags", "ResistanceBoostFlags" - }); - AddBoost("ConcentrationIgnoreDamage", 1, new List { + ]); + AddBoost("ConcentrationIgnoreDamage", 1, [ "SpellSchool", "SpellSchool" - }); - AddBoost("LeaveTriggers", 0, new List {}); - AddBoost("IgnoreLowGroundPenalty", 1, new List { + ]); + AddBoost("LeaveTriggers", 0, []); + AddBoost("IgnoreLowGroundPenalty", 1, [ "RollType", "StatsRollType" - }); - AddBoost("IgnoreSurfaceCover", 1, new List { + ]); + AddBoost("IgnoreSurfaceCover", 1, [ "SurfaceType", "String" // Surface type - }); - AddBoost("EnableBasicItemInteractions", 0, new List {}); - AddBoost("SoundsBlocked", 0, new List {}); + ]); + AddBoost("EnableBasicItemInteractions", 0, []); + AddBoost("SoundsBlocked", 0, []); } public void LoadEnumerations(Stream stream) @@ -1324,7 +1298,7 @@ public void LoadEnumerations(Stream stream) string line; - using (var reader = new StreamReader(stream)) + using var reader = new StreamReader(stream); while ((line = reader.ReadLine()) != null) { var trimmed = line.Trim(); @@ -1332,13 +1306,13 @@ public void LoadEnumerations(Stream stream) { if (trimmed.StartsWith("valuelist ")) { - var name = trimmed.Substring(11, trimmed.Length - 12); + var name = trimmed[11..^1]; curEnum = new StatEnumeration(name); Enumerations.Add(curEnum.Name, curEnum); } else if (trimmed.StartsWith("value ")) { - var label = trimmed.Substring(7, trimmed.Length - 8); + var label = trimmed[7..^1]; curEnum.AddItem(label); } } diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index 9b8cd17e..bc72c6c5 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -15,8 +15,8 @@ public class StatEntry public StatEntryType Type; public StatEntry BasedOn; public CodeLocation Location; - public Dictionary Properties = new Dictionary(); - public Dictionary PropertyLocations = new Dictionary(); + public Dictionary Properties = []; + public Dictionary PropertyLocations = []; } /// @@ -71,10 +71,10 @@ public class StatLoadingError public class StatLoadingContext { public StatDefinitionRepository Definitions; - public List Errors = new List(); - public Dictionary> DeclarationsByType = new Dictionary>(); - public Dictionary> ResolvedDeclarationsByType = new Dictionary>(); - public Dictionary> GuidResources = new Dictionary>(); + public List Errors = []; + public Dictionary> DeclarationsByType = []; + public Dictionary> ResolvedDeclarationsByType = []; + public Dictionary> GuidResources = []; public void LogError(string code, string message, string path = null, int line = 0, string statObjectName = null) { @@ -89,9 +89,8 @@ public void LogError(string code, string message, string path = null, int line = } } - class StatEntryReferenceResolver + class StatEntryReferenceResolver(StatLoadingContext context) { - private readonly StatLoadingContext Context; public bool AllowMappingErrors = false; private class BaseClassMapping @@ -99,11 +98,6 @@ private class BaseClassMapping public StatDeclaration Declaration; public StatDeclaration BaseClass; } - - public StatEntryReferenceResolver(StatLoadingContext context) - { - Context = context; - } public bool ResolveUsageRef( StatEntryType type,StatDeclaration declaration, @@ -112,9 +106,9 @@ public bool ResolveUsageRef( { var props = declaration.Properties; var name = (string)props[type.NameProperty]; - if (type.BasedOnProperty != null && props.ContainsKey(type.BasedOnProperty)) + if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out object value)) { - var baseClass = (string)props[type.BasedOnProperty]; + var baseClass = (string)value; if (declarations.TryGetValue(baseClass, out StatDeclaration baseDeclaration)) { @@ -123,7 +117,7 @@ public bool ResolveUsageRef( } else { - Context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stats entry '{name}' references nonexistent base '{baseClass}'", + context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stats entry '{name}' references nonexistent base '{baseClass}'", declaration.Location.FileName, declaration.Location.StartLine, name); basedOn = null; return false; @@ -188,20 +182,13 @@ public Dictionary ResolveUsageRefs(StatEntryType type, } } - class StatLoaderReferenceValidator : IStatReferenceValidator + class StatLoaderReferenceValidator(StatLoadingContext ctx) : IStatReferenceValidator { - private readonly StatLoadingContext Context; - - public StatLoaderReferenceValidator(StatLoadingContext ctx) - { - Context = ctx; - } - public bool IsValidReference(string reference, string statType) { - if (Context.DeclarationsByType.TryGetValue(statType, out var stats)) + if (ctx.DeclarationsByType.TryGetValue(statType, out var stats)) { - return stats.TryGetValue(reference, out var stat); + return stats.TryGetValue(reference, out _); } return false; @@ -209,9 +196,9 @@ public bool IsValidReference(string reference, string statType) public bool IsValidGuidResource(string name, string resourceType) { - if (Context.GuidResources.TryGetValue(resourceType, out var resources)) + if (ctx.GuidResources.TryGetValue(resourceType, out var resources)) { - return resources.TryGetValue(name, out var resource); + return resources.TryGetValue(name, out _); } return false; @@ -221,8 +208,8 @@ public bool IsValidGuidResource(string name, string resourceType) public class StatLoader { private readonly StatLoadingContext Context; - private StatValueParserFactory ParserFactory; - private StatLoaderReferenceValidator ReferenceValidator; + private readonly StatValueParserFactory ParserFactory; + private readonly StatLoaderReferenceValidator ReferenceValidator; public StatLoader(StatLoadingContext ctx) { @@ -246,7 +233,7 @@ private List ParseStatStream(string path, Stream stream) return parsed ? parser.GetDeclarations() : null; } - private void AddDeclarations(string path, List declarations) + private void AddDeclarations(List declarations) { foreach (var declaration in declarations) { @@ -270,11 +257,10 @@ private void AddDeclarations(string path, List declarations) Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{type.NameProperty}' property", declaration.Location.FileName, declaration.Location.StartLine); continue; } - - Dictionary declarationsByType; - if (!Context.DeclarationsByType.TryGetValue(statType, out declarationsByType)) + + if (!Context.DeclarationsByType.TryGetValue(statType, out Dictionary declarationsByType)) { - declarationsByType = new Dictionary(); + declarationsByType = []; Context.DeclarationsByType[statType] = declarationsByType; } @@ -289,7 +275,7 @@ public void LoadStatsFromStream(string path, Stream stream) var stats = ParseStatStream(path, stream); if (stats != null) { - AddDeclarations(path, stats); + AddDeclarations(stats); } } @@ -336,7 +322,7 @@ private object ParseProperty(StatEntryType type, string propertyName, object val if (errorText != null) { - if (value is string && ((string)value).Length > 500) + if (value is string v && v.Length > 500) { Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: {errorText}", location?.FileName, location?.StartLine ?? 0, declarationName); @@ -373,7 +359,7 @@ private StatEntry InstantiateEntryInternal(StatEntryType type, string declaratio Type = type, BasedOn = null, // FIXME Location = location, - Properties = new Dictionary(), + Properties = [], PropertyLocations = propertyLocations }; @@ -431,10 +417,9 @@ private void LoadGuidResources(Dictionary guidResources, XmlNode public void LoadGuidResources(XmlDocument doc, string typeName, string regionName) { - Dictionary guidResources; - if (!Context.GuidResources.TryGetValue(typeName, out guidResources)) + if (!Context.GuidResources.TryGetValue(typeName, out Dictionary guidResources)) { - guidResources = new Dictionary(); + guidResources = []; Context.GuidResources[typeName] = guidResources; } diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs index e36503e1..fce4b975 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -88,14 +88,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } - public class EnumParser : IStatValueParser + public class EnumParser(StatEnumeration enumeration) : IStatValueParser { - private readonly StatEnumeration Enumeration; - - public EnumParser(StatEnumeration enumeration) - { - Enumeration = enumeration ?? throw new ArgumentNullException(); - } + private readonly StatEnumeration Enumeration = enumeration ?? throw new ArgumentNullException(); public object Parse(string value, ref bool succeeded, ref string errorText) { @@ -125,14 +120,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } - public class MultiValueEnumParser : IStatValueParser + public class MultiValueEnumParser(StatEnumeration enumeration) : IStatValueParser { - private readonly EnumParser Parser; - - public MultiValueEnumParser(StatEnumeration enumeration) - { - Parser = new EnumParser(enumeration); - } + private readonly EnumParser Parser = new(enumeration); public object Parse(string value, ref bool succeeded, ref string errorText) { @@ -143,9 +133,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) return true; } - foreach (var item in value.Split(new char[] { ';' })) + foreach (var item in value.Split([';'])) { - Parser.Parse(item.Trim(new char[] { ' ' }), ref succeeded, ref errorText); + Parser.Parse(item.Trim([' ']), ref succeeded, ref errorText); if (!succeeded) { errorText = $"Value '{item}' not supported; {errorText}"; @@ -199,17 +189,8 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } - public class StatReferenceParser : IStatValueParser + public class StatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser { - private IStatReferenceValidator Validator; - private List Constraints; - - public StatReferenceParser(IStatReferenceValidator validator, List constraints) - { - Validator = validator; - Constraints = constraints; - } - public object Parse(string value, ref bool succeeded, ref string errorText) { if (value == "") @@ -218,38 +199,33 @@ public object Parse(string value, ref bool succeeded, ref string errorText) return value; } - foreach (var constraint in Constraints) + foreach (var constraint in constraints) { - if (Validator.IsValidReference(value, constraint.StatType)) + if (validator.IsValidReference(value, constraint.StatType)) { succeeded = true; return value; } } - var refTypes = String.Join("/", Constraints.Select(c => c.StatType)); + var refTypes = String.Join("/", constraints.Select(c => c.StatType)); errorText = $"'{value}' is not a valid {refTypes} reference"; succeeded = false; return null; } } - public class MultiValueStatReferenceParser : IStatValueParser + public class MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser { - private readonly StatReferenceParser Parser; - - public MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) - { - Parser = new StatReferenceParser(validator, constraints); - } + private readonly StatReferenceParser Parser = new(validator, constraints); public object Parse(string value, ref bool succeeded, ref string errorText) { succeeded = true; - foreach (var item in value.Split(new char[] { ';' })) + foreach (var item in value.Split([';'])) { - var trimmed = item.Trim(new char[] { ' ' }); + var trimmed = item.Trim([' ']); if (trimmed.Length > 0) { Parser.Parse(trimmed, ref succeeded, ref errorText); @@ -271,52 +247,37 @@ public enum ExpressionType DescriptionParams }; - public class ExpressionParser : IStatValueParser + public class ExpressionParser(String validatorType, StatDefinitionRepository definitions, + StatValueParserFactory parserFactory, ExpressionType type) : IStatValueParser { - private readonly String ValidatorType; - private readonly StatDefinitionRepository Definitions; - private readonly StatValueParserFactory ParserFactory; - private readonly ExpressionType ExprType; - - public ExpressionParser(String validatorType, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory, ExpressionType type) - { - ValidatorType = validatorType; - Definitions = definitions; - ParserFactory = parserFactory; - ExprType = type; - } - public virtual object Parse(string value, ref bool succeeded, ref string errorText) { - var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + ValidatorType + "__ " + value.TrimEnd()); - using (var buf = new MemoryStream(valueBytes)) + var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + validatorType + "__ " + value.TrimEnd()); + using var buf = new MemoryStream(valueBytes); + List errorTexts = []; + + var scanner = new StatPropertyScanner(); + scanner.SetSource(buf); + var parser = new StatPropertyParser(scanner, definitions, parserFactory, valueBytes, type); + parser.OnError += (string message) => errorTexts.Add(message); + succeeded = parser.Parse(); + if (!succeeded) + { + var location = scanner.LastLocation(); + var column = location.StartColumn - 10 - validatorType.Length + 1; + errorText = $"Syntax error at or near character {column}"; + return null; + } + else if (errorTexts.Count > 0) { - List errorTexts = new List(); - - var scanner = new StatPropertyScanner(); - scanner.SetSource(buf); - var parser = new StatPropertyParser(scanner, Definitions, ParserFactory, valueBytes, ExprType); - parser.OnError += (string message) => errorTexts.Add(message); - succeeded = parser.Parse(); - if (!succeeded) - { - var location = scanner.LastLocation(); - var column = location.StartColumn - 10 - ValidatorType.Length + 1; - errorText = $"Syntax error at or near character {column}"; - return null; - } - else if (errorTexts.Count > 0) - { - succeeded = false; - errorText = String.Join("; ", errorTexts); - return null; - } - else - { - succeeded = true; - return parser.GetParsedObject(); - } + succeeded = false; + errorText = String.Join("; ", errorTexts); + return null; + } + else + { + succeeded = true; + return parser.GetParsedObject(); } } } @@ -327,36 +288,27 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe { value = "BHAALS_BOON_SLAYER.Duration-1"; var valueBytes = Encoding.UTF8.GetBytes(value); - using (var buf = new MemoryStream(valueBytes)) + using var buf = new MemoryStream(valueBytes); + var scanner = new Lua.StatLuaScanner(); + scanner.SetSource(buf); + var parser = new Lua.StatLuaParser(scanner); + succeeded = parser.Parse(); + if (!succeeded) + { + var location = scanner.LastLocation(); + errorText = $"Syntax error at or near character {location.StartColumn}"; + return null; + } + else { - var scanner = new Lua.StatLuaScanner(); - scanner.SetSource(buf); - var parser = new Lua.StatLuaParser(scanner); - succeeded = parser.Parse(); - if (!succeeded) - { - var location = scanner.LastLocation(); - errorText = $"Syntax error at or near character {location.StartColumn}"; - return null; - } - else - { - succeeded = true; - return null; - } + succeeded = true; + return null; } } } - public class UseCostsParser : IStatValueParser + public class UseCostsParser(IStatReferenceValidator validator) : IStatValueParser { - private readonly IStatReferenceValidator Validator; - - public UseCostsParser(IStatReferenceValidator validator) - { - Validator = validator; - } - public virtual object Parse(string value, ref bool succeeded, ref string errorText) { if (value.Length == 0) return value; @@ -373,7 +325,7 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe return null; } - if (!Validator.IsValidGuidResource(parts[0], "ActionResource") && !Validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) + if (!validator.IsValidGuidResource(parts[0], "ActionResource") && !validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) { errorText = $"Nonexistent action resource or action resource group: {parts[0]}"; return null; @@ -439,20 +391,13 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe } } - public class AnyParser : IStatValueParser + public class AnyParser(IEnumerable parsers, string message = null) : IStatValueParser { - private readonly List Parsers; - private readonly String Message; - - public AnyParser(IEnumerable parsers, string message = null) - { - Parsers = parsers.ToList(); - Message = message; - } + private readonly List Parsers = parsers.ToList(); public object Parse(string value, ref bool succeeded, ref string errorText) { - List errors = new List(); + List errors = []; foreach (var parser in Parsers) { succeeded = false; @@ -468,9 +413,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } - if (Message != null && Message.Length > 0) + if (message != null && message.Length > 0) { - errorText = $"'{value}': {Message}"; + errorText = $"'{value}': {message}"; } else { @@ -487,18 +432,11 @@ public class AnyType public string Message; } - public class StatValueParserFactory + public class StatValueParserFactory(IStatReferenceValidator referenceValidator) { - private readonly IStatReferenceValidator ReferenceValidator; - - public StatValueParserFactory(IStatReferenceValidator referenceValidator) - { - ReferenceValidator = referenceValidator; - } - public IStatValueParser CreateReferenceParser(List constraints) { - return new StatReferenceParser(ReferenceValidator, constraints); + return new StatReferenceParser(referenceValidator, constraints); } public IStatValueParser CreateParser(StatField field, StatDefinitionRepository definitions) @@ -528,38 +466,38 @@ public IStatValueParser CreateParser(StatField field, StatDefinitionRepository d case "SpellContainerID": case "FollowUpOriginalSpell": case "RootSpellID": - return new StatReferenceParser(ReferenceValidator, new List - { + return new StatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "SpellData" } - }); + ]); case "ContainerSpells": - return new MultiValueStatReferenceParser(ReferenceValidator, new List - { + return new MultiValueStatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "SpellData" } - }); + ]); case "InterruptPrototype": - return new StatReferenceParser(ReferenceValidator, new List - { + return new StatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "InterruptData" } - }); + ]); case "Passives": case "PassivesOnEquip": case "PassivesMainHand": case "PassivesOffHand": - return new MultiValueStatReferenceParser(ReferenceValidator, new List - { + return new MultiValueStatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "PassiveData" } - }); + ]); case "StatusOnEquip": case "StatusInInventory": - return new MultiValueStatReferenceParser(ReferenceValidator, new List - { + return new MultiValueStatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "StatusData" } - }); + ]); case "Cost": case "UseCosts": @@ -568,7 +506,7 @@ public IStatValueParser CreateParser(StatField field, StatDefinitionRepository d case "TooltipUseCosts": case "RitualCosts": case "HitCosts": - return new UseCostsParser(ReferenceValidator); + return new UseCostsParser(referenceValidator); case "Damage": case "VersatileDamage": @@ -636,117 +574,66 @@ public IStatValueParser CreateParser(string type, StatEnumeration enumType, List } } - switch (type) + return type switch { - case "Boolean": - return new BooleanParser(); - - case "ConstantInt": - case "Int": - return new Int32Parser(); - - case "ConstantFloat": - case "Float": - return new FloatParser(); - - case "String": - case "FixedString": - // FIXME - add TranslatedStringParser "guid;ver" - case "TranslatedString": - return new StringParser(); - - case "Guid": - return new UUIDParser(); - - case "Requirements": - return new ExpressionParser("Requirements", definitions, this, ExpressionType.Functor); - - case "StatsFunctors": - return new ExpressionParser("Properties", definitions, this, ExpressionType.Functor); - - case "Lua": - case "RollConditions": - case "TargetConditions": - case "Conditions": - return new LuaExpressionParser(); - - case "UseCosts": - return new UseCostsParser(ReferenceValidator); - - case "StatReference": - return new StatReferenceParser(ReferenceValidator, constraints); - - case "StatusId": - return new AnyParser(new List { + "Boolean" => new BooleanParser(), + "ConstantInt" or "Int" => new Int32Parser(), + "ConstantFloat" or "Float" => new FloatParser(), + "String" or "FixedString" or "TranslatedString" => new StringParser(), + "Guid" => new UUIDParser(), + "Requirements" => new ExpressionParser("Requirements", definitions, this, ExpressionType.Functor), + "StatsFunctors" => new ExpressionParser("Properties", definitions, this, ExpressionType.Functor), + "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionParser(), + "UseCosts" => new UseCostsParser(referenceValidator), + "StatReference" => new StatReferenceParser(referenceValidator, constraints), + "StatusId" => new AnyParser(new List { new EnumParser(definitions.Enumerations["EngineStatusType"]), - new StatReferenceParser(ReferenceValidator, new List - { + new StatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "StatusData" } - }) - }, "Expected a status name"); - - case "ResurrectTypes": - return new MultiValueEnumParser(definitions.Enumerations["ResurrectType"]); - - case "StatusIdOrGroup": - return new AnyParser(new List { + ]) + }, "Expected a status name"), + "ResurrectTypes" => new MultiValueEnumParser(definitions.Enumerations["ResurrectType"]), + "StatusIdOrGroup" => new AnyParser(new List { new EnumParser(definitions.Enumerations["StatusGroupFlags"]), new EnumParser(definitions.Enumerations["EngineStatusType"]), - new StatReferenceParser(ReferenceValidator, new List - { + new StatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "StatusData" } - }) - }, "Expected a status or StatusGroup name"); - - case "SummonDurationOrInt": - return new AnyParser(new List { + ]) + }, "Expected a status or StatusGroup name"), + "SummonDurationOrInt" => new AnyParser(new List { new EnumParser(definitions.Enumerations["SummonDuration"]), new Int32Parser() - }); - - case "AllOrDamageType": - return new AnyParser(new List { + }), + "AllOrDamageType" => new AnyParser(new List { new EnumParser(definitions.Enumerations["AllEnum"]), new EnumParser(definitions.Enumerations["Damage Type"]), - }); - - case "RollAdjustmentTypeOrDamageType": - return new AnyParser(new List { + }), + "RollAdjustmentTypeOrDamageType" => new AnyParser(new List { new EnumParser(definitions.Enumerations["RollAdjustmentType"]), new EnumParser(definitions.Enumerations["Damage Type"]), - }); - - case "AbilityOrAttackRollAbility": - return new AnyParser(new List { + }), + "AbilityOrAttackRollAbility" => new AnyParser(new List { new EnumParser(definitions.Enumerations["Ability"]), new EnumParser(definitions.Enumerations["AttackRollAbility"]), - }); - - case "DamageTypeOrDealDamageWeaponDamageType": - return new AnyParser(new List { + }), + "DamageTypeOrDealDamageWeaponDamageType" => new AnyParser(new List { new EnumParser(definitions.Enumerations["Damage Type"]), new EnumParser(definitions.Enumerations["DealDamageWeaponDamageType"]), - }); - - case "SpellId": - return new StatReferenceParser(ReferenceValidator, new List - { + }), + "SpellId" => new StatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "SpellData" } - }); - - case "Interrupt": - return new StatReferenceParser(ReferenceValidator, new List - { + ]), + "Interrupt" => new StatReferenceParser(referenceValidator, + [ new StatReferenceConstraint{ StatType = "InterruptData" } - }); - + ]), // THESE NEED TO BE FIXED! - case "StatusIDs": - return new StringParser(); - - default: - throw new ArgumentException($"Could not create parser for type '{type}'"); - } + "StatusIDs" => new StringParser(), + _ => throw new ArgumentException($"Could not create parser for type '{type}'"), + }; } } -} \ No newline at end of file +} diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 2f697402..ee8119d0 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -1,38 +1,16 @@ - - - + - Debug - AnyCPU - {46372C50-4288-4B8E-AF21-C934560600E0} + net8.0 Library - Properties - LSLib - LSLib - v4.7.2 - 512 - + false - true - full - false - bin\Debug\ TRACE;DEBUG;EXPORT_GPPG - prompt - 4 - false x64 true - pdbonly - true - bin\Release\ EXPORT_GPPG - prompt - 4 - false x64 true @@ -41,219 +19,40 @@ TRACE;EXPORT_GPPG true true - pdbonly x86 - prompt MinimumRecommendedRules.ruleset - - ..\packages\AlphaFS.2.2.6\lib\net452\AlphaFS.dll - - - ..\packages\lz4net.1.0.15.93\lib\net4-client\LZ4.dll - - - ..\packages\Newtonsoft.Json.13.0.1\lib\net45\Newtonsoft.Json.dll - - - ..\external\gppg\binaries\QUT.ShiftReduceParser.dll - - - - - - - - - - - False - ..\packages\zlib.net.1.0.4.0\lib\zlib.net.dll - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + - - {d8b26b12-e45c-47ea-88f7-56628eb2ccd1} - LSLibNative - - - {abb9db44-14f2-46e0-a4b8-b46c300ca982} - OpenTK - + - - - - - - + - "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Story\GoalParser\Goal.lex.cs" "$(ProjectDir)\LS\Story\GoalParser\Goal.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Story\GoalParser\Goal.yy.cs" "$(ProjectDir)\LS\Story\GoalParser\Goal.yy" + "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.lex.cs" "$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.yy.cs" "$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Story\HeaderParser\StoryHeader.lex.cs" "$(ProjectDir)\LS\Story\HeaderParser\StoryHeader.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Story\HeaderParser\StoryHeader.yy.cs" "$(ProjectDir)\LS\Story\HeaderParser\StoryHeader.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.lex.cs" "$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.yy.cs" "$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Stats\Parser\Stat.lex.cs" "$(ProjectDir)\LS\Stats\Parser\Stat.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Stats\Parser\Stat.yy.cs" "$(ProjectDir)\LS\Stats\Parser\Stat.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.lex.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.yy.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Stats\Parser\StatProperty.lex.cs" "$(ProjectDir)\LS\Stats\Parser\StatProperty.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Stats\Parser\StatProperty.yy.cs" "$(ProjectDir)\LS\Stats\Parser\StatProperty.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.lex.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.yy.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Stats\Parser\StatLua.lex.cs" "$(ProjectDir)\LS\Stats\Parser\StatLua.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(ProjectDir)\LS\Stats\Parser\StatLua.yy.cs" "$(ProjectDir)\LS\Stats\Parser\StatLua.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.lex.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.yy.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.yy" + LSLib + LSLib + Copyright © Norbyte 2012-2023 + 1.18.5.0 + 1.18.5.0 - \ No newline at end of file diff --git a/LSLib/Properties/AssemblyInfo.cs b/LSLib/Properties/AssemblyInfo.cs index b2be3572..91962f52 100644 --- a/LSLib/Properties/AssemblyInfo.cs +++ b/LSLib/Properties/AssemblyInfo.cs @@ -1,15 +1,6 @@ -using System.Reflection; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("LSLib")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("Copyright © Norbyte 2012-2023")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -20,16 +11,3 @@ // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("cfb98b54-f577-4855-86a3-0c3988b5df8f")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.18.5.0")] -[assembly: AssemblyFileVersion("1.18.5.0")] diff --git a/LSLib/VirtualTextures/BC5Image.cs b/LSLib/VirtualTextures/BC5Image.cs index da93f0d8..37b0aae1 100644 --- a/LSLib/VirtualTextures/BC5Image.cs +++ b/LSLib/VirtualTextures/BC5Image.cs @@ -71,28 +71,28 @@ public void CopyTo(BC5Image destination, int srcX, int srcY, int dstX, int dstY, public void SaveDDS(string path) { - var header = new DDSHeader(); - header.dwMagic = DDSHeader.DDSMagic; - header.dwSize = DDSHeader.HeaderSize; - header.dwFlags = 0x1007; - header.dwWidth = (uint)Width; - header.dwHeight = (uint)Height; - header.dwPitchOrLinearSize = (uint)(Width * Height); - header.dwDepth = 1; - header.dwMipMapCount = 1; - - header.dwPFSize = 32; - header.dwPFFlags = 0x04; - header.dwFourCC = DDSHeader.FourCC_DXT5; - - header.dwCaps = 0x1000; - - using (var pagef = new FileStream(path, FileMode.Create, FileAccess.Write)) - using (var bw = new BinaryWriter(pagef)) + var header = new DDSHeader { - BinUtils.WriteStruct(bw, ref header); - bw.Write(Data, 0, Data.Length); - } + dwMagic = DDSHeader.DDSMagic, + dwSize = DDSHeader.HeaderSize, + dwFlags = 0x1007, + dwWidth = (uint)Width, + dwHeight = (uint)Height, + dwPitchOrLinearSize = (uint)(Width * Height), + dwDepth = 1, + dwMipMapCount = 1, + + dwPFSize = 32, + dwPFFlags = 0x04, + dwFourCC = DDSHeader.FourCC_DXT5, + + dwCaps = 0x1000 + }; + + using var pagef = new FileStream(path, FileMode.Create, FileAccess.Write); + using var bw = new BinaryWriter(pagef); + BinUtils.WriteStruct(bw, ref header); + bw.Write(Data, 0, Data.Length); } } @@ -102,57 +102,55 @@ public class BC5Mips public void LoadDDS(string path) { - using (var f = new FileStream(path, FileMode.Open, FileAccess.Read)) - using (var reader = new BinaryReader(f)) - { - var header = BinUtils.ReadStruct(reader); - Mips = new List(); + using var f = new FileStream(path, FileMode.Open, FileAccess.Read); + using var reader = new BinaryReader(f); + var header = BinUtils.ReadStruct(reader); + Mips = []; - if (header.dwMagic != DDSHeader.DDSMagic) - { - throw new InvalidDataException($"{path}: Incorrect DDS signature, or file is not a DDS file"); - } + if (header.dwMagic != DDSHeader.DDSMagic) + { + throw new InvalidDataException($"{path}: Incorrect DDS signature, or file is not a DDS file"); + } - if (header.dwSize != DDSHeader.HeaderSize) - { - throw new InvalidDataException($"{path}: Incorrect DDS header size"); - } + if (header.dwSize != DDSHeader.HeaderSize) + { + throw new InvalidDataException($"{path}: Incorrect DDS header size"); + } - if ((header.dwFlags & 0xffff) != 0x1007) - { - throw new InvalidDataException($"{path}: Incorrect DDS texture flags"); - } + if ((header.dwFlags & 0xffff) != 0x1007) + { + throw new InvalidDataException($"{path}: Incorrect DDS texture flags"); + } - if (header.dwDepth != 0 && header.dwDepth != 1) - { - throw new InvalidDataException($"{path}: Only single-layer textures are supported"); - } + if (header.dwDepth != 0 && header.dwDepth != 1) + { + throw new InvalidDataException($"{path}: Only single-layer textures are supported"); + } - if ((header.dwPFFlags & 4) != 4) - { - throw new InvalidDataException($"{path}: DDS does not have a valid FourCC code"); - } + if ((header.dwPFFlags & 4) != 4) + { + throw new InvalidDataException($"{path}: DDS does not have a valid FourCC code"); + } - if (header.FourCCName != "DXT5") - { - throw new InvalidDataException($"{path}: Expected a DXT5 encoded texture, got: " + header.FourCCName); - } + if (header.FourCCName != "DXT5") + { + throw new InvalidDataException($"{path}: Expected a DXT5 encoded texture, got: " + header.FourCCName); + } - Int32 mips = 1; - if ((header.dwFlags & 0x20000) == 0x20000) - { - mips = (Int32)header.dwMipMapCount; - } + Int32 mips = 1; + if ((header.dwFlags & 0x20000) == 0x20000) + { + mips = (Int32)header.dwMipMapCount; + } - Mips = new List(mips); - for (var i = 0; i < mips; i++) - { - var width = Math.Max((int)header.dwWidth >> i, 1); - var height = Math.Max((int)header.dwHeight >> i, 1); - var bytes = Math.Max(width / 4, 1) * Math.Max(height / 4, 1) * 16; - var blob = reader.ReadBytes(bytes); - Mips.Add(new BC5Image(blob, width, height)); - } + Mips = new List(mips); + for (var i = 0; i < mips; i++) + { + var width = Math.Max((int)header.dwWidth >> i, 1); + var height = Math.Max((int)header.dwHeight >> i, 1); + var bytes = Math.Max(width / 4, 1) * Math.Max(height / 4, 1) * 16; + var blob = reader.ReadBytes(bytes); + Mips.Add(new BC5Image(blob, width, height)); } } } diff --git a/LSLib/VirtualTextures/Build.cs b/LSLib/VirtualTextures/Build.cs new file mode 100644 index 00000000..9585f054 --- /dev/null +++ b/LSLib/VirtualTextures/Build.cs @@ -0,0 +1,1132 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; +using System.Xml; + +namespace LSLib.VirtualTextures +{ + public class TextureDescriptor + { + public string Name; + public List Layers; + } + + public class TileSetDescriptor + { + public string Name; + public List Textures = []; + public TileSetConfiguration Config = new(); + public string RootPath; + public string SourceTexturePath; + public string VirtualTexturePath; + + public void Load(string path) + { + using var f = new FileStream(path, FileMode.Open, FileAccess.Read); + var doc = new XmlDocument(); + doc.Load(f); + Load(doc); + } + + public void Load(XmlDocument doc) + { + var version = doc.DocumentElement.GetAttribute("Version"); + if (version == null || !Int32.TryParse(version, out int versionNum) || versionNum != 2) + { + throw new InvalidDataException("Expected TileSet XML descriptor version 2"); + } + + Name = doc.DocumentElement.GetAttribute("Name"); + Config.GTSName = Name; + Config.Layers = []; + + var tileSetConfig = doc.DocumentElement.GetElementsByTagName("TileSetConfig"); + foreach (var node in (tileSetConfig[0] as XmlElement).ChildNodes) + { + if (node is XmlElement) + { + var key = (node as XmlElement).Name; + var value = (node as XmlElement).InnerText; + + switch (key) + { + case "TileWidth": Config.TileWidth = Int32.Parse(value); break; + case "TileHeight": Config.TileHeight = Int32.Parse(value); break; + case "TileBorder": Config.TileBorder = Int32.Parse(value); break; + case "Compression": Config.Compression = (TileCompressionPreference)Enum.Parse(typeof(TileCompressionPreference), value); break; + case "PageSize": Config.PageSize = Int32.Parse(value); break; + case "OneFilePerGTex": Config.OneFilePerGTex = Boolean.Parse(value); break; + case "BackfillPages": Config.BackfillPages = Boolean.Parse(value); break; + case "EmbedMips": Config.EmbedMips = Boolean.Parse(value); break; + case "EmbedTopLevelMips": Config.EmbedTopLevelMips = Boolean.Parse(value); break; + default: throw new InvalidDataException($"Unsupported configuration key: {key}"); + } + } + } + + var paths = doc.DocumentElement.GetElementsByTagName("Paths"); + foreach (var node in (paths[0] as XmlElement).ChildNodes) + { + if (node is XmlElement) + { + var key = (node as XmlElement).Name; + var value = (node as XmlElement).InnerText; + + switch (key) + { + case "SourceTextures": SourceTexturePath = Path.Combine(RootPath, value); break; + case "VirtualTextures": VirtualTexturePath = Path.Combine(RootPath, value); break; + default: throw new InvalidDataException($"Unsupported path type: {key}"); + } + } + } + + var layers = doc.DocumentElement.GetElementsByTagName("Layers"); + foreach (var node in (layers[0] as XmlElement).GetElementsByTagName("Layer")) + { + Config.Layers.Add(new BuildLayer + { + DataType = (GTSDataType)Enum.Parse(typeof(GTSDataType), (node as XmlElement).GetAttribute("Type")), + Name = (node as XmlElement).GetAttribute("Name") + }); + } + + if (Config.Layers.Count == 0) + { + throw new InvalidDataException("No tile set layers specified"); + } + + var textures = doc.DocumentElement.GetElementsByTagName("Texture"); + foreach (var texture in textures) + { + var tex = new TextureDescriptor() + { + Name = (texture as XmlElement).GetAttribute("Name"), + Layers = [] + }; + Textures.Add(tex); + + foreach (var layer in Config.Layers) + { + tex.Layers.Add(null); + } + + var texLayers = (texture as XmlElement).GetElementsByTagName("Layer"); + foreach (var layerNode in texLayers) + { + var name = (layerNode as XmlElement).GetAttribute("Name"); + var index = Config.Layers.FindIndex(ly => ly.Name == name); + if (index == -1) + { + throw new InvalidDataException($"Layer does not exist: '{name}'"); + } + + tex.Layers[index] = (layerNode as XmlElement).GetAttribute("Source"); + } + } + } + } + + public class BuildTile + { + public BC5Image Image; + public BC5Image EmbeddedMip; + public CompressedTile Compressed; + + // Set during initialization + public int Layer; + public GTSCodec Codec; + public GTSDataType DataType; + + // Set during layout + public int Level; + public int X; + public int Y; + + // Set during page file build + public bool AddedToPageFile = false; + public int PageFileIndex; + public int PageIndex; + public int ChunkIndex; + } + + public class BuildLayer + { + public GTSDataType DataType; + public string Name; + + public List Levels; + } + + public class TileSetConfiguration + { + public string GTSName; + public Int32 TileWidth = 0x80; + public Int32 TileHeight = 0x80; + public Int32 TileBorder = 8; + public List Layers; + public TileCompressionPreference Compression = TileCompressionPreference.Best; + public Int32 PageSize = 0x100000; + public bool OneFilePerGTex = true; + public bool BackfillPages = false; + public bool EmbedMips = true; + public bool EmbedTopLevelMips = false; + } + + public class BuildLayerTexture + { + public string Path; + public int FirstMip; + public BC5Mips Mips; + } + + public class BuildLevel + { + public int Level; // Level index (0..n) + public int Width; + public int Height; + public int TilesX; + public int TilesY; + public int PaddedTileWidth; + public int PaddedTileHeight; + public BuildTile[] Tiles; + + public BuildTile Get(int x, int y) + { + if (x >= TilesX || y >= TilesY) + { + throw new ArgumentException("Invalid tile index"); + } + + var off = x + TilesX * y; + return Tiles[off]; + } + + public BuildTile GetOrCreateTile(int x, int y, int layer, GTSCodec codec, GTSDataType dataType) + { + if (x >= TilesX || y >= TilesY) + { + throw new ArgumentException("Invalid tile index"); + } + + var off = x + TilesX * y; + if (Tiles[off] == null) + { + Tiles[off] = new BuildTile + { + Image = new BC5Image(PaddedTileWidth, PaddedTileHeight), + Layer = layer, + Codec = codec, + DataType = dataType + }; + } + + return Tiles[off]; + } + } + + public class BuildTexture + { + public string Name; + public int Width; + public int Height; + // Position at level 0 (including FirstMip) + public int X; + public int Y; + public List Layers; + } + + public class TileSetBuildData + { + public List Layers; + public string GTSName; + // Size of tile including borders + public int PaddedTileWidth; + public int PaddedTileHeight; + // Size of tile excluding borders from adjacent tiles + public int RawTileWidth; + public int RawTileHeight; + // Size of tile border + public int TileBorder; + // Total size of tileset in pixels + public int TotalWidth; + public int TotalHeight; + // Number of mip levels to save in page files + public int PageFileLevels; + // Number of mip levels to generate + public int BuildLevels; + // First mip level to save in a separate mip page file + public int MipFileStartLevel; + } + + public class ParameterBlock + { + public GTSCodec Codec; + public GTSDataType DataType; + public TileCompressionMethod Compression; + public UInt32 ParameterBlockID; + } + + public class ParameterBlockContainer + { + public List ParameterBlocks = []; + private UInt32 NextParameterBlockID = 1; + + public ParameterBlock GetOrAdd(GTSCodec codec, GTSDataType dataType, TileCompressionMethod compression) + { + foreach (var block in ParameterBlocks) + { + if (block.Codec == codec && block.DataType == dataType && block.Compression == compression) + { + return block; + } + } + + var newBlock = new ParameterBlock + { + Codec = codec, + DataType = dataType, + Compression = compression, + ParameterBlockID = NextParameterBlockID++ + }; + ParameterBlocks.Add(newBlock); + + return newBlock; + } + } + + public class TileSetBuilder + { + private readonly TileSetBuildData BuildData; + private readonly TileSetConfiguration Config; + private readonly TileCompressor Compressor; + private readonly ParameterBlockContainer ParameterBlocks; + + public VirtualTileSet TileSet; + public List Textures; + public List PageFiles; + + public delegate void BuildStepDelegate(string step); + public BuildStepDelegate OnStepStarted = delegate { }; + public delegate void BuildStepProgressDelegate(int numerator, int denumerator); + public BuildStepProgressDelegate OnStepProgress = delegate { }; + + private List PerLevelFlatTiles; + + public TileSetBuilder(TileSetConfiguration config) + { + BuildData = new TileSetBuildData + { + Layers = config.Layers, + GTSName = config.GTSName, + PaddedTileWidth = config.TileWidth + 2 * config.TileBorder, + PaddedTileHeight = config.TileHeight + 2 * config.TileBorder, + RawTileWidth = config.TileWidth, + RawTileHeight = config.TileHeight, + TileBorder = config.TileBorder + }; + Config = config; + + Compressor = new TileCompressor(); + ParameterBlocks = new ParameterBlockContainer(); + Compressor.Preference = Config.Compression; + Compressor.ParameterBlocks = ParameterBlocks; + + Textures = []; + } + + public void AddTexture(string name, List texturePaths) + { + var tex = new BuildTexture + { + Name = name, + Width = 0, + Height = 0, + X = 0, + Y = 0, + Layers = [] + }; + + foreach (var path in texturePaths) + { + if (path != null) + { + var mips = new BC5Mips(); + mips.LoadDDS(path); + if (mips.Mips.Count <= 1) + { + throw new InvalidDataException($"Texture must include mipmaps: {path}"); + } + + var mip = mips.Mips[0]; + if ((mip.Width % BuildData.RawTileWidth) != 0 + || (mip.Height % BuildData.RawTileHeight) != 0) + { + throw new InvalidDataException($"Texture {path} size ({mip.Width}x{mip.Height}) must be a multiple of the virtual tile size ({BuildData.RawTileWidth}x{BuildData.RawTileHeight})"); + } + + if ((mip.Width & (mip.Width - 1)) != 0 + || (mip.Height & (mip.Height - 1)) != 0) + { + throw new InvalidDataException($"Texture {path} size ({mip.Width}x{mip.Height}) must be a multiple of two"); + } + + tex.Layers.Add(new BuildLayerTexture + { + Path = path, + FirstMip = 0, + Mips = mips + }); + } + else + { + tex.Layers.Add(null); + } + } + + // Figure out top-level size for texture across all layers + foreach (var layer in tex.Layers) + { + if (layer == null) continue; + + tex.Width = Math.Max(tex.Width, layer.Mips.Mips[0].Width); + tex.Height = Math.Max(tex.Height, layer.Mips.Mips[0].Height); + } + + // Adjust first layer index for textures + foreach (var layer in tex.Layers) + { + if (layer == null) continue; + + var mip = layer.Mips.Mips[0]; + if (mip.Width > tex.Width || mip.Height > tex.Height) + { + throw new InvalidDataException($"Top-level texture size mismatch; texture {layer.Path} is {mip.Width}x{mip.Height}, size across all layers is {tex.Width}x{tex.Height}"); + } + + var mulW = tex.Width / mip.Width; + var mulH = tex.Height / mip.Height; + + if ((tex.Width % mip.Width) != 0 || (tex.Height % mip.Height) != 0 + || mulW != mulH + // Check if total layer size size is a power-of-two of the texture size + || (mulW & (mulW - 1)) != 0) + { + throw new InvalidDataException($"Texture sizes within all layers should be multiples of each other; texture {layer.Path} is {mip.Width}x{mip.Height}, size across all layers is {tex.Width}x{tex.Height}"); + } + + // Adjust first mip index based on texture size + while (mulW > 1) + { + mulW >>= 1; + layer.FirstMip++; + } + } + + Console.WriteLine($"Added GTex {tex.Name} ({tex.Width}x{tex.Height})"); + Textures.Add(tex); + } + + private void BuildParameterBlocks() + { + var blocks = ParameterBlocks.ParameterBlocks; + TileSet.ParameterBlockHeaders = new GTSParameterBlockHeader[blocks.Count]; + TileSet.ParameterBlocks = []; + + for (var i = 0; i < blocks.Count; i++) + { + var block = blocks[i]; + ref var header = ref TileSet.ParameterBlockHeaders[i]; + + header.ParameterBlockID = block.ParameterBlockID; + header.Codec = block.Codec; + + switch (block.Codec) + { + case GTSCodec.BC: + header.ParameterBlockSize = (uint)Marshal.SizeOf(typeof(GTSBCParameterBlock)); + + string compression1, compression2; + switch (block.Compression) + { + case TileCompressionMethod.Raw: + compression1 = "raw"; + compression2 = ""; + break; + + case TileCompressionMethod.LZ4: + compression1 = "lz4"; + compression2 = "lz40.1.0"; + break; + + case TileCompressionMethod.LZ77: + compression1 = "lz77"; + compression2 = "fastlz0.1.0"; + break; + + default: + throw new ArgumentException("Unsupported compression method"); + } + + TileSet.ParameterBlocks[block.ParameterBlockID] = new GTSBCParameterBlock + { + Version = 0x238e, + CompressionName1 = compression1, + CompressionName2 = compression2, + B = 0, + C1 = 0, + C2 = 0, + BCField3 = 0, + DataType = (Byte)block.DataType, + D = 0, + FourCC = 0x20334342, + E1 = 0, + SaveMip = 1, + E3 = 0, + E4 = 0, + F = 0 + }; + break; + + case GTSCodec.Uniform: + header.ParameterBlockSize = (uint)Marshal.SizeOf(typeof(GTSUniformParameterBlock)); + TileSet.ParameterBlocks[block.ParameterBlockID] = new GTSUniformParameterBlock + { + Version = 0x42, + A_Unused = 0, + Width = 4, + Height = 1, + DataType = block.DataType + }; + break; + + default: + throw new ArgumentException("Unsupported codec type"); + } + } + } + + private void BuildFourCC() + { + var fourCC = new TileSetFourCC(); + var meta = FourCCElement.Make("META"); + fourCC.Root = meta; + + var atlas = FourCCElement.Make("ATLS"); + meta.Children.Add(atlas); + + var textures = FourCCElement.Make("TXTS"); + atlas.Children.Add(textures); + + foreach (var texture in Textures) + { + var tex = FourCCElement.Make("TXTR"); + textures.Children.Add(tex); + tex.Children.Add(FourCCElement.Make("NAME", texture.Name)); + tex.Children.Add(FourCCElement.Make("WDTH", (uint)texture.Width)); + tex.Children.Add(FourCCElement.Make("HGHT", (uint)texture.Height)); + tex.Children.Add(FourCCElement.Make("XXXX", (uint)texture.X)); + tex.Children.Add(FourCCElement.Make("YYYY", (uint)texture.Y)); + tex.Children.Add(FourCCElement.Make("ADDR", "None")); + tex.Children.Add(FourCCElement.Make("SRGB", FourCCElementType.BinaryInt, [ + 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00 + ])); + tex.Children.Add(FourCCElement.Make("THMB", FourCCElementType.BinaryGuid, [ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + ])); + } + + var project = FourCCElement.Make("PROJ", ""); + meta.Children.Add(project); + + var layers = FourCCElement.Make("LINF"); + meta.Children.Add(layers); + + for (var i = 0; i < BuildData.Layers.Count; i++) + { + var layerInfo = FourCCElement.Make("LAYR"); + layers.Children.Add(layerInfo); + layerInfo.Children.Add(FourCCElement.Make("INDX", (uint)i)); + layerInfo.Children.Add(FourCCElement.Make("TYPE", "BC3")); + layerInfo.Children.Add(FourCCElement.Make("NAME", BuildData.Layers[i].Name)); + } + + var info = FourCCElement.Make("INFO"); + meta.Children.Add(info); + + var compiler = FourCCElement.Make("COMP"); + info.Children.Add(compiler); + + var compVer = FourCCElement.Make("CMPW"); + compiler.Children.Add(compVer); + compVer.Children.Add(FourCCElement.Make("MAJR", 5)); + compVer.Children.Add(FourCCElement.Make("MINR", 0)); + + var buildVer = FourCCElement.Make("BLDV"); + compiler.Children.Add(buildVer); + buildVer.Children.Add(FourCCElement.Make("MAJR", 5)); + buildVer.Children.Add(FourCCElement.Make("MINR", 1)); + buildVer.Children.Add(FourCCElement.Make("BINF", "LSLib")); + + info.Children.Add(FourCCElement.Make("DATE", "02-08-2023 07:49:30.7662814 PM +02:00")); + info.Children.Add(FourCCElement.Make("BLKS", "4096")); + info.Children.Add(FourCCElement.Make("TILE", "Software")); + info.Children.Add(FourCCElement.Make("BDPR", "default")); + info.Children.Add(FourCCElement.Make("LTMP", 0)); + + TileSet.FourCCMetadata = fourCC; + } + + private void CalculateGeometry() + { + var geom = new TileSetGeometryCalculator + { + BuildData = BuildData, + Textures = Textures + }; + geom.Update(); + + Console.WriteLine($"Tile set geometry: {BuildData.TotalWidth}x{BuildData.TotalHeight} ({BuildData.TotalWidth/BuildData.RawTileWidth}x{BuildData.TotalHeight/BuildData.RawTileHeight} tiles), {BuildData.RawTileWidth}x{BuildData.RawTileHeight} tile size, {BuildData.PaddedTileWidth}x{BuildData.PaddedTileHeight} tile size with adjacency data"); + } + + private static int Clamp(int x, int min, int max) + { + return Math.Min(max, Math.Max(x, min)); + } + + private void StitchPartialTile(BuildTile tile, BC5Image source, int tileX, int tileY, int sourceX, int sourceY, int width, int height) + { + source.CopyTo( + tile.Image, + sourceX, sourceY, + tileX + BuildData.TileBorder, + tileY + BuildData.TileBorder, + width, height + ); + } + + private void StitchTiles(BuildLevel level, int layer, int x, int y, BC5Image mip) + { + var layerInfo = BuildData.Layers[layer]; + var firstTileX = x / BuildData.RawTileWidth; + var firstTileY = y / BuildData.RawTileHeight; + var lastTileX = (x + mip.Width - 1) / BuildData.RawTileWidth; + var lastTileY = (y + mip.Height - 1) / BuildData.RawTileHeight; + + int sourceY = 0; + for (var tileY = firstTileY; tileY <= lastTileY; tileY++) + { + int sourceX = 0; + for (var tileX = firstTileX; tileX <= lastTileX; tileX++) + { + var tileXPixelsMin = tileX * BuildData.RawTileWidth; + var tileYPixelsMin = tileY * BuildData.RawTileHeight; + var tileXPixelsMax = tileXPixelsMin + BuildData.RawTileWidth; + var tileYPixelsMax = tileYPixelsMin + BuildData.RawTileHeight; + + var stitchXMin = Clamp(x, tileXPixelsMin, tileXPixelsMax); + var stitchYMin = Clamp(y, tileYPixelsMin, tileYPixelsMax); + var stitchXMax = Clamp(x + mip.Width, tileXPixelsMin, tileXPixelsMax); + var stitchYMax = Clamp(y + mip.Height, tileYPixelsMin, tileYPixelsMax); + + var stitchW = stitchXMax - stitchXMin; + var stitchH = stitchYMax - stitchYMin; + + // GIGA JANK + if (stitchW >= 4 && stitchH >= 4) + { + var tile = level.GetOrCreateTile(tileX, tileY, layer, GTSCodec.BC, layerInfo.DataType); + StitchPartialTile(tile, mip, + stitchXMin - tileXPixelsMin, + stitchYMin - tileYPixelsMin, + sourceX, sourceY, + stitchXMax - stitchXMin, + stitchYMax - stitchYMin + ); + } + + sourceX += BuildData.RawTileWidth; + } + + sourceY += BuildData.RawTileHeight; + } + } + + private void BuildTextureTiles(BuildTexture texture, int level, int layerIndex, BuildLayer layer, BC5Image mip) + { + var x = texture.X >> level; + var y = texture.Y >> level; + StitchTiles(layer.Levels[level], layerIndex, x, y, mip); + } + + private void BuildTextureTiles(BuildTexture texture, int layerIndex, BuildLayerTexture texLayer, BuildLayer layer) + { + if (texLayer.FirstMip + texLayer.Mips.Mips.Count < BuildData.BuildLevels) + { + throw new InvalidDataException($"Insufficient mip layers in texture '{texture.Name}', layer '{layer.Name}'; got {texLayer.FirstMip}+{texLayer.Mips.Mips.Count}, virtual texture has {BuildData.BuildLevels}"); + } + + for (var i = texLayer.FirstMip; i < BuildData.BuildLevels; i++) + { + BuildTextureTiles(texture, i, layerIndex, layer, texLayer.Mips.Mips[i - texLayer.FirstMip]); + } + } + + private void BuildTiles() + { + foreach (var texture in Textures) + { + for (var layerIdx = 0; layerIdx < texture.Layers.Count; layerIdx++) + { + if (texture.Layers[layerIdx] != null) + { + BuildTextureTiles(texture, layerIdx, texture.Layers[layerIdx], BuildData.Layers[layerIdx]); + } + } + } + } + + private void BuildTileBorders(BuildLevel level) + { + for (var y = 0; y < level.TilesY; y++) + { + for (var x = 0; x < level.TilesX; x++) + { + var tile = level.Get(x, y); + if (tile == null) continue; + + // Left + if (x > 0) + { + level.Get(x - 1, y)?.Image.CopyTo(tile.Image, + BuildData.RawTileWidth, 0, + 0, 0, + BuildData.TileBorder, BuildData.PaddedTileHeight); + } + + // Right + if (x + 1 < level.TilesX) + { + level.Get(x + 1, y)?.Image.CopyTo(tile.Image, + BuildData.TileBorder, 0, + BuildData.RawTileWidth + BuildData.TileBorder, 0, + BuildData.TileBorder, BuildData.PaddedTileHeight); + } + + // Top + if (y > 0) + { + level.Get(x, y - 1)?.Image.CopyTo(tile.Image, + 0, BuildData.RawTileHeight, + 0, 0, + BuildData.PaddedTileWidth, BuildData.TileBorder); + } + + // Bottom + if (y + 1 < level.TilesY) + { + level.Get(x, y + 1)?.Image.CopyTo(tile.Image, + 0, BuildData.TileBorder, + 0, BuildData.RawTileHeight + BuildData.TileBorder, + BuildData.PaddedTileWidth, BuildData.TileBorder); + + // Bottom Left corner + if (x > 0) + { + level.Get(x - 1, y + 1)?.Image.CopyTo(tile.Image, + BuildData.RawTileWidth, BuildData.TileBorder, + 0, BuildData.RawTileHeight + BuildData.TileBorder, + BuildData.TileBorder, BuildData.TileBorder); + } + + // Bottom Right corner + if (x + 1 < level.TilesX) + { + level.Get(x + 1, y + 1)?.Image.CopyTo(tile.Image, + BuildData.TileBorder, BuildData.TileBorder, + BuildData.RawTileWidth + BuildData.TileBorder, BuildData.RawTileHeight + BuildData.TileBorder, + BuildData.TileBorder, BuildData.TileBorder); + } + } + } + } + } + + private void BuildTileBorders() + { + foreach (var layer in BuildData.Layers) + { + foreach (var level in layer.Levels) + { + BuildTileBorders(level); + } + } + } + + private void EmbedTileMips(BuildLayer layer, BuildLevel level) + { + for (var y = 0; y < level.TilesY; y++) + { + for (var x = 0; x < level.TilesX; x++) + { + var tile = level.Get(x, y); + if (tile != null) + { + if (level.Level + 1 < BuildData.BuildLevels) + { + var nextLevelTile = layer.Levels[level.Level + 1].Get(x / 2, y / 2); + if (nextLevelTile != null) + { + var nextMip = new BC5Image(BuildData.PaddedTileWidth / 2, BuildData.PaddedTileHeight / 2); + var mipX = (x & 1) * (BuildData.RawTileWidth / 2) + BuildData.TileBorder / 2; + var mipY = (y & 1) * (BuildData.RawTileHeight / 2) + BuildData.TileBorder / 2; + nextLevelTile.Image.CopyTo(nextMip, mipX, mipY, 0, 0, BuildData.PaddedTileWidth / 2, BuildData.PaddedTileHeight / 2); + tile.EmbeddedMip = nextMip; + } + } + } + } + } + } + + private void EmbedTileMips() + { + foreach (var layer in BuildData.Layers) + { + foreach (var level in layer.Levels) + { + if (level.Level > 0 || Config.EmbedTopLevelMips) + { + EmbedTileMips(layer, level); + } + } + } + } + + private void BuildGTSHeaders() + { + // Configuration-independent defaults + ref GTSHeader header = ref TileSet.Header; + header.Magic = GTSHeader.GRPGMagic; + header.Version = GTSHeader.CurrentVersion; + header.Unused = 0; + header.GUID = Guid.NewGuid(); + header.I6 = 0; + header.I7 = 0; + header.M = 0; + header.N = 0; + header.O = 0; + header.P = 0; + header.Q = 0; + header.R = 0; + header.S = 0; + header.PageSize = (UInt32)Config.PageSize; + header.XJJ = 0; + header.XKK = 0; + header.XLL = 0; + header.XMM = 0; + + header.TileWidth = BuildData.PaddedTileWidth; + header.TileHeight = BuildData.PaddedTileHeight; + header.TileBorder = BuildData.TileBorder; + } + + private void BuildPageFiles() + { + var builder = new PageFileSetBuilder(BuildData, Config); + if (Config.OneFilePerGTex) + { + PageFiles = builder.BuildFilePerGTex(Textures); + } + else + { + PageFiles = builder.BuildSingleFile(); + } + + TileSet.PageFileInfos = []; + uint firstPageIndex = 0; + foreach (var file in PageFiles) + { + var fileInfo = new PageFileInfo + { + Meta = new GTSPageFileInfo + { + FileName = file.FileName, + NumPages = (uint)file.Pages.Count, + Checksum = file.Checksum, + F = 2 + }, + FirstPageIndex = firstPageIndex, + FileName = file.FileName + }; + TileSet.PageFileInfos.Add(fileInfo); + firstPageIndex += (uint)file.Pages.Count; + } + } + + private void BuildGTS() + { + TileSet = new VirtualTileSet(); + BuildGTSHeaders(); + + TileSet.TileSetLayers = new GTSTileSetLayer[BuildData.Layers.Count]; + for (int i = 0; i < BuildData.Layers.Count; i++) + { + var layer = BuildData.Layers[i]; + ref var gtsLayer = ref TileSet.TileSetLayers[i]; + gtsLayer.DataType = layer.DataType; + gtsLayer.B = -1; + } + + var levels = BuildData.Layers[0].Levels; + + TileSet.TileSetLevels = new GTSTileSetLevel[BuildData.PageFileLevels]; + for (int i = 0; i < BuildData.PageFileLevels; i++) + { + var level = levels[i]; + ref var gtsLevel = ref TileSet.TileSetLevels[i]; + gtsLevel.Width = (uint)level.TilesX; + gtsLevel.Height = (uint)level.TilesY; + } + + OnStepStarted("Generating tile lists"); + BuildFlatTileList(); + OnStepStarted("Encoding tiles"); + CompressTiles(); + + OnStepStarted("Building page files"); + BuildPageFiles(); + + OnStepStarted("Building metadata"); + BuildTileInfos(); + BuildTileDownsampleInfos(); + + BuildParameterBlocks(); + BuildFourCC(); + } + + public void BuildFlatTileList() + { + PerLevelFlatTiles = new List(BuildData.PageFileLevels); + + for (var level = 0; level < BuildData.PageFileLevels; level++) + { + var levelInfo = BuildData.Layers[0].Levels[level]; + var flatTiles = new BuildTile[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; + PerLevelFlatTiles.Add(flatTiles); + + var tileIdx = 0; + for (var y = 0; y < levelInfo.TilesY; y++) + { + for (var x = 0; x < levelInfo.TilesX; x++) + { + for (var layer = 0; layer < BuildData.Layers.Count; layer++) + { + var tile = BuildData.Layers[layer].Levels[level].Get(x, y); + if (tile != null) + { + tile.Layer = layer; + tile.Level = level; + tile.X = x; + tile.Y = y; + flatTiles[tileIdx] = tile; + } + else + { + flatTiles[tileIdx] = null; + } + + tileIdx++; + } + } + } + } + } + + public void CompressTiles() + { + var numTiles = PerLevelFlatTiles.Sum(tiles => tiles.Length); + var nextTile = 0; + + foreach (var level in PerLevelFlatTiles) + { + foreach (var tile in level) + { + OnStepProgress(nextTile++, numTiles); + if (tile != null) + { + Compressor.Compress(tile); + } + } + } + } + + public void BuildTileInfos() + { + TileSet.PerLevelFlatTileIndices = new List(BuildData.PageFileLevels); + PerLevelFlatTiles = new List(BuildData.PageFileLevels); + + var flatTileInfos = new List(); + var packedTileIds = new List(); + + for (var level = 0; level < BuildData.PageFileLevels; level++) + { + var levelInfo = BuildData.Layers[0].Levels[level]; + var flatTileIndices = new UInt32[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; + TileSet.PerLevelFlatTileIndices.Add(flatTileIndices); + + var flatTiles = new BuildTile[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; + PerLevelFlatTiles.Add(flatTiles); + + var tileIdx = 0; + for (var y = 0; y < levelInfo.TilesY; y++) + { + for (var x = 0; x < levelInfo.TilesX; x++) + { + for (var layer = 0; layer < BuildData.Layers.Count; layer++) + { + var tile = BuildData.Layers[layer].Levels[level].Get(x, y); + if (tile != null) + { + var flatTileIdx = (uint)flatTileInfos.Count; + var packedTileIdx = (uint)packedTileIds.Count; + + var packedTile = new GTSPackedTileID((uint)layer, (uint)level, (uint)x, (uint)y); + packedTileIds.Add(packedTile); + + var tileInfo = new GTSFlatTileInfo + { + PageFileIndex = (UInt16)tile.PageFileIndex, + PageIndex = (UInt16)tile.PageIndex, + ChunkIndex = (UInt16)tile.ChunkIndex, + D = 1, + PackedTileIndex = packedTileIdx + }; + flatTileInfos.Add(tileInfo); + + flatTileIndices[tileIdx] = flatTileIdx; + flatTiles[tileIdx] = tile; + } + else + { + flatTileIndices[tileIdx] = 0xFFFFFFFF; + flatTiles[tileIdx] = null; + } + + tileIdx++; + } + } + } + } + + TileSet.PackedTileIDs = packedTileIds.ToArray(); + TileSet.FlatTileInfos = flatTileInfos.ToArray(); + } + + public void BuildTileDownsampleInfos() + { + for (var level = 0; level < BuildData.PageFileLevels; level++) + { + var levelInfo = BuildData.Layers[0].Levels[level]; + var flatTileIndices = TileSet.PerLevelFlatTileIndices[level]; + + var tileIdx = 0; + for (var y = 0; y < levelInfo.TilesY; y++) + { + for (var x = 0; x < levelInfo.TilesX; x++) + { + for (var layer = 0; layer < BuildData.Layers.Count; layer++) + { + if (flatTileIndices[tileIdx] == 0xFFFFFFFF) + { + for (var downsampleLevel = level + 1; downsampleLevel < BuildData.PageFileLevels; downsampleLevel++) + { + var downsampleX = x >> (downsampleLevel - level); + var downsampleY = y >> (downsampleLevel - level); + + var dsIndices = TileSet.PerLevelFlatTileIndices[downsampleLevel]; + var dsIndex = dsIndices[layer + BuildData.Layers.Count * (downsampleX + downsampleY * BuildData.Layers[layer].Levels[downsampleLevel].TilesX)]; + if ((dsIndex & 0x80000000) == 0) + { + flatTileIndices[tileIdx] = dsIndex | 0x80000000; + break; + } + } + } + + tileIdx++; + } + } + } + } + } + + public void Build(string dir) + { + OnStepStarted("Calculating geometry"); + CalculateGeometry(); + OnStepStarted("Building tiles"); + BuildTiles(); + OnStepStarted("Building tile borders"); + BuildTileBorders(); + OnStepStarted("Embedding tile mipmaps"); + if (Config.EmbedMips) + { + EmbedTileMips(); + } + + BuildGTS(); + + long tileBytes = 0, embeddedMipBytes = 0, tileCompressedBytes = 0, pages = 0, chunks = 0, levelTiles = 0; + + foreach (var pageFile in PageFiles) + { + pages += pageFile.Pages.Count; + foreach (var page in pageFile.Pages) + { + chunks += page.Chunks.Count; + } + } + + foreach (var level in PerLevelFlatTiles) + { + levelTiles += level.Length; + foreach (var tile in level) + { + if (tile != null) + { + tileBytes += tile.Image.Data.Length; + if (tile.EmbeddedMip != null) + { + embeddedMipBytes += tile.EmbeddedMip.Data.Length; + } + + tileCompressedBytes += tile.Compressed.Data.Length; + } + } + } + + Console.WriteLine($"Flat tiles: {levelTiles} total, {TileSet.FlatTileInfos.Length} in use"); + Console.WriteLine($"Generated {PageFiles.Count} page files, {pages} pages, {chunks} chunks"); + Console.WriteLine($"Raw tile data: {tileBytes / 1024} KB tiles, {embeddedMipBytes / 1024} KB embedded mips, {tileCompressedBytes / 1024} KB transcoded, {pages*Config.PageSize/1024} KB pages total"); + + OnStepStarted("Saving tile set"); + TileSet.Save(dir + "\\" + BuildData.GTSName + ".gts"); + + foreach (var file in PageFiles) + { + OnStepStarted($"Saving page file: {file.FileName}"); + file.Save(dir + "\\" + file.FileName); + } + } + } +} diff --git a/LSLib/VirtualTextures/Compression.cs b/LSLib/VirtualTextures/Compression.cs new file mode 100644 index 00000000..8a59fad6 --- /dev/null +++ b/LSLib/VirtualTextures/Compression.cs @@ -0,0 +1,154 @@ +using System; +using System.IO; +using LZ4; + +namespace LSLib.VirtualTextures +{ + public enum TileCompressionMethod + { + Raw, + LZ4, + LZ77 + }; + + public enum TileCompressionPreference + { + Uncompressed, + Best, + LZ4, + LZ77 + }; + + public class CompressedTile + { + public TileCompressionMethod Method; + public UInt32 ParameterBlockID; + public byte[] Data; + } + + public class TileCompressor + { + public ParameterBlockContainer ParameterBlocks; + public TileCompressionPreference Preference = TileCompressionPreference.Best; + + private byte[] GetRawBytes(BuildTile tile) + { + if (tile.EmbeddedMip == null) + { + return tile.Image.Data; + } + else + { + var data = new byte[tile.Image.Data.Length + tile.EmbeddedMip.Data.Length]; + Array.Copy(tile.Image.Data, 0, data, 0, tile.Image.Data.Length); + Array.Copy(tile.EmbeddedMip.Data, 0, data, tile.Image.Data.Length, tile.EmbeddedMip.Data.Length); + return data; + } + } + + public static byte[] CompressLZ4(byte[] raw) + { + return LZ4Codec.EncodeHC(raw, 0, raw.Length); + } + + public static byte[] CompressLZ77(byte[] raw) + { + return Native.FastLZCompressor.Compress(raw, 2); + } + + public byte[] Compress(byte[] uncompressed, out TileCompressionMethod method) + { + switch (Preference) + { + case TileCompressionPreference.Uncompressed: + method = TileCompressionMethod.Raw; + return uncompressed; + + case TileCompressionPreference.Best: + var lz4 = CompressLZ4(uncompressed); + var lz77 = CompressLZ77(uncompressed); + if (lz4.Length <= lz77.Length) + { + method = TileCompressionMethod.LZ4; + return lz4; + } + else + { + method = TileCompressionMethod.LZ77; + return lz77; + } + + case TileCompressionPreference.LZ4: + method = TileCompressionMethod.LZ4; + return CompressLZ4(uncompressed); + + case TileCompressionPreference.LZ77: + method = TileCompressionMethod.LZ77; + return CompressLZ77(uncompressed); + + default: + throw new ArgumentException("Invalid compression preference"); + } + } + + public CompressedTile Compress(BuildTile tile) + { + if (tile.Compressed != null) + { + return tile.Compressed; + } + + var uncompressed = GetRawBytes(tile); + var compressed = new CompressedTile(); + compressed.Data = Compress(uncompressed, out compressed.Method); + + var paramBlock = ParameterBlocks.GetOrAdd(tile.Codec, tile.DataType, compressed.Method); + compressed.ParameterBlockID = paramBlock.ParameterBlockID; + + tile.Compressed = compressed; + return compressed; + } + + public TileCompressionMethod GetMethod(string method1, string method2) + { + if (method1 == "lz77" && method2 == "fastlz0.1.0") + { + return TileCompressionMethod.LZ77; + } + else if (method1 == "lz4" && method2 == "lz40.1.0") + { + return TileCompressionMethod.LZ4; + } + else if (method1 == "raw") + { + return TileCompressionMethod.Raw; + } + else + { + throw new InvalidDataException($"Unsupported compression format: '{method1}', '{method2}'"); + } + } + + public byte[] Decompress(byte[] compressed, int outputSize, string method1, string method2) + { + return Decompress(compressed, outputSize, GetMethod(method1, method2)); + } + + public byte[] Decompress(byte[] compressed, int outputSize, TileCompressionMethod method) + { + switch (method) + { + case TileCompressionMethod.Raw: + return compressed; + case TileCompressionMethod.LZ4: + var decompressed = new byte[outputSize]; + LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, outputSize, true); + return decompressed; + case TileCompressionMethod.LZ77: + return Native.FastLZCompressor.Decompress(compressed, outputSize); + default: + throw new ArgumentException(); + } + } + } +} diff --git a/LSLib/VirtualTextures/Geometry.cs b/LSLib/VirtualTextures/Geometry.cs new file mode 100644 index 00000000..53fee2bc --- /dev/null +++ b/LSLib/VirtualTextures/Geometry.cs @@ -0,0 +1,183 @@ +using System; +using System.Collections.Generic; + +namespace LSLib.VirtualTextures +{ + public class TileSetGeometryCalculator + { + public List Textures; + public TileSetBuildData BuildData; + + private int PlacementTileWidth = 0x1000; + private int PlacementTileHeight = 0x1000; + private int PlacementGridWidth; + private int PlacementGridHeight; + private BuildTexture[] PlacementGrid; + + private void ResizePlacementGrid(int w, int h) + { + PlacementGridWidth = w; + PlacementGridHeight = h; + PlacementGrid = new BuildTexture[w * h]; + } + + private void GrowPlacementGrid() + { + if (PlacementGridWidth <= PlacementGridHeight) + { + ResizePlacementGrid(PlacementGridWidth * 2, PlacementGridHeight); + } + else + { + ResizePlacementGrid(PlacementGridWidth, PlacementGridHeight * 2); + } + } + + private bool TryToPlaceTexture(BuildTexture texture, int texX, int texY) + { + var width = texture.Width / BuildData.RawTileWidth / PlacementTileWidth; + var height = texture.Height / BuildData.RawTileHeight / PlacementTileHeight; + + for (var y = texY; y < texY + height; y++) + { + for (var x = texX; x < texX + width; x++) + { + if (PlacementGrid[x + y * PlacementGridWidth] != null) + { + return false; + } + } + } + + texture.X = texX * PlacementTileWidth * BuildData.RawTileWidth; + texture.Y = texY * PlacementTileHeight * BuildData.RawTileHeight; + + for (var y = texY; y < texY + height; y++) + { + for (var x = texX; x < texX + width; x++) + { + PlacementGrid[x + y * PlacementGridWidth] = texture; + } + } + + return true; + } + + private bool TryToPlaceTexture(BuildTexture texture) + { + var width = texture.Width / BuildData.RawTileWidth / PlacementTileWidth; + var height = texture.Height / BuildData.RawTileHeight / PlacementTileHeight; + + for (var y = 0; y < PlacementGridHeight - height + 1; y++) + { + for (var x = 0; x < PlacementGridWidth - width + 1; x++) + { + if (TryToPlaceTexture(texture, x, y)) + { + return true; + } + } + } + + return false; + } + + private bool PlaceAllTextures() + { + foreach (var tex in Textures) + { + if (!TryToPlaceTexture(tex)) + { + return false; + } + } + + return true; + } + + private void DoAutoPlacement() + { + var startingX = 0; + var startingY = 0; + + foreach (var tex in Textures) + { + PlacementTileWidth = Math.Min(PlacementTileWidth, tex.Width / BuildData.RawTileWidth); + PlacementTileHeight = Math.Min(PlacementTileHeight, tex.Height / BuildData.RawTileHeight); + startingX = Math.Max(startingX, tex.Width / BuildData.RawTileWidth); + startingY = Math.Max(startingY, tex.Height / BuildData.RawTileHeight); + } + + ResizePlacementGrid(startingX / PlacementTileWidth, startingY / PlacementTileHeight); + + while (!PlaceAllTextures()) + { + GrowPlacementGrid(); + } + + BuildData.TotalWidth = PlacementTileWidth * PlacementGridWidth * BuildData.RawTileWidth; + BuildData.TotalHeight = PlacementTileHeight * PlacementGridHeight * BuildData.RawTileWidth; + } + + private void UpdateGeometry() + { + var minTexSize = 0x10000; + foreach (var tex in Textures) + { + minTexSize = Math.Min(minTexSize, Math.Min(tex.Height, tex.Width)); + } + + BuildData.MipFileStartLevel = 0; + while (minTexSize >= BuildData.RawTileHeight) + { + BuildData.MipFileStartLevel++; + minTexSize >>= 1; + } + + // Max W/H of all textures + var maxSize = Math.Max(BuildData.TotalWidth, BuildData.TotalHeight); + BuildData.PageFileLevels = 0; + while (maxSize >= BuildData.RawTileHeight) + { + BuildData.PageFileLevels++; + maxSize >>= 1; + } + + BuildData.BuildLevels = BuildData.PageFileLevels + 1; + + foreach (var layer in BuildData.Layers) + { + var levelWidth = BuildData.TotalWidth; + var levelHeight = BuildData.TotalHeight; + + layer.Levels = new List(BuildData.BuildLevels); + for (var i = 0; i < BuildData.BuildLevels; i++) + { + var tilesX = levelWidth / BuildData.RawTileWidth + (((levelWidth % BuildData.RawTileWidth) > 0) ? 1 : 0); + var tilesY = levelHeight / BuildData.RawTileHeight + (((levelHeight % BuildData.RawTileHeight) > 0) ? 1 : 0); + var level = new BuildLevel + { + Level = i, + Width = tilesX * BuildData.RawTileWidth, + Height = tilesY * BuildData.RawTileHeight, + TilesX = tilesX, + TilesY = tilesY, + PaddedTileWidth = BuildData.PaddedTileWidth, + PaddedTileHeight = BuildData.PaddedTileHeight, + Tiles = new BuildTile[tilesX * tilesY] + }; + layer.Levels.Add(level); + + levelWidth = Math.Max(1, levelWidth >> 1); + levelHeight = Math.Max(1, levelHeight >> 1); + } + } + } + + public void Update() + { + DoAutoPlacement(); + UpdateGeometry(); + } + } +} diff --git a/LSLib/VirtualTextures/PageFile.cs b/LSLib/VirtualTextures/PageFile.cs index bc00b98f..68f8f434 100644 --- a/LSLib/VirtualTextures/PageFile.cs +++ b/LSLib/VirtualTextures/PageFile.cs @@ -2,19 +2,16 @@ using System; using System.Collections.Generic; using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; namespace LSLib.VirtualTextures { public class PageFile : IDisposable { - private VirtualTileSet TileSet; - private FileStream Stream; - private BinaryReader Reader; + private readonly VirtualTileSet TileSet; + private readonly FileStream Stream; + private readonly BinaryReader Reader; public GTPHeader Header; - private List ChunkOffsets; + private readonly List ChunkOffsets; public PageFile(VirtualTileSet tileset, string path) { @@ -25,7 +22,7 @@ public PageFile(VirtualTileSet tileset, string path) Header = BinUtils.ReadStruct(Reader); var numPages = Stream.Length / tileset.Header.PageSize; - ChunkOffsets = new List(); + ChunkOffsets = []; for (var page = 0; page < numPages; page++) { @@ -44,22 +41,11 @@ public void Dispose() Stream.Dispose(); } - private byte[] DoUnpackTileBC(GTPChunkHeader header, int outputSize) + private byte[] DoUnpackTileBC(GTPChunkHeader header, int outputSize, TileCompressor compressor) { var parameterBlock = (GTSBCParameterBlock)TileSet.ParameterBlocks[header.ParameterBlockID]; - if (parameterBlock.CompressionName1 == "lz77" && parameterBlock.CompressionName2 == "fastlz0.1.0") - { - var buf = Reader.ReadBytes((int)header.Size); - return Native.FastLZCompressor.Decompress(buf, outputSize); - } - else if (parameterBlock.CompressionName1 == "raw") - { - return Reader.ReadBytes((int)header.Size); - } - else - { - throw new InvalidDataException($"Unsupported BC compression format: '{parameterBlock.CompressionName1}', '{parameterBlock.CompressionName2}'"); - } + var compressed = Reader.ReadBytes((int)header.Size); + return compressor.Decompress(compressed, outputSize, parameterBlock.CompressionName1, parameterBlock.CompressionName2); } private byte[] DoUnpackTileUniform(GTPChunkHeader header) @@ -71,23 +57,23 @@ private byte[] DoUnpackTileUniform(GTPChunkHeader header) return img; } - public byte[] UnpackTile(int pageIndex, int chunkIndex, int outputSize) + public byte[] UnpackTile(int pageIndex, int chunkIndex, int outputSize, TileCompressor compressor) { Stream.Position = ChunkOffsets[pageIndex][chunkIndex] + (pageIndex * TileSet.Header.PageSize); var chunkHeader = BinUtils.ReadStruct(Reader); - switch (chunkHeader.Codec) + return chunkHeader.Codec switch { - case GTSCodec.Uniform: return DoUnpackTileUniform(chunkHeader); - case GTSCodec.BC: return DoUnpackTileBC(chunkHeader, outputSize); - default: throw new InvalidDataException($"Unsupported codec: {chunkHeader.Codec}"); - } + GTSCodec.Uniform => DoUnpackTileUniform(chunkHeader), + GTSCodec.BC => DoUnpackTileBC(chunkHeader, outputSize, compressor), + _ => throw new InvalidDataException($"Unsupported codec: {chunkHeader.Codec}"), + }; } - public BC5Image UnpackTileBC5(int pageIndex, int chunkIndex) + public BC5Image UnpackTileBC5(int pageIndex, int chunkIndex, TileCompressor compressor) { var compressedSize = 16 * ((TileSet.Header.TileWidth + 3) / 4) * ((TileSet.Header.TileHeight + 3) / 4) + 16 * ((TileSet.Header.TileWidth/2 + 3) / 4) * ((TileSet.Header.TileHeight/2 + 3) / 4); - var chunk = UnpackTile(pageIndex, chunkIndex, compressedSize); + var chunk = UnpackTile(pageIndex, chunkIndex, compressedSize, compressor); return new BC5Image(chunk, TileSet.Header.TileWidth, TileSet.Header.TileHeight); } } diff --git a/LSLib/VirtualTextures/PageFileBuild.cs b/LSLib/VirtualTextures/PageFileBuild.cs new file mode 100644 index 00000000..d6a7a50b --- /dev/null +++ b/LSLib/VirtualTextures/PageFileBuild.cs @@ -0,0 +1,289 @@ +using LSLib.LS; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.InteropServices; + +namespace LSLib.VirtualTextures +{ + + public class BuiltChunk + { + public GTSCodec Codec; + public UInt32 ParameterBlockID; + public byte[] EncodedBlob; + public int ChunkIndex; + public UInt32 OffsetInPage; + } + + public class PageBuilder + { + public PageFileBuilder PageFile; + public List Chunks; + public int PageFileIndex; + public int PageIndex; + public int Budget = 0; + + public PageBuilder() + { + Chunks = []; + } + + public bool TryAdd(BuildTile tile) + { + if (tile.AddedToPageFile) + { + throw new InvalidOperationException("Tried to add tile to page file multiple times"); + } + + var chunkSize = 4 + Marshal.SizeOf(typeof(GTPChunkHeader)) + tile.Compressed.Data.Length; + if (Budget + chunkSize > PageFile.Config.PageSize) + { + return false; + } + + var chunk = new BuiltChunk + { + Codec = GTSCodec.BC, + ParameterBlockID = tile.Compressed.ParameterBlockID, + EncodedBlob = tile.Compressed.Data, + ChunkIndex = Chunks.Count + }; + + tile.AddedToPageFile = true; + tile.PageFileIndex = PageFileIndex; + tile.PageIndex = PageIndex; + tile.ChunkIndex = chunk.ChunkIndex; + Chunks.Add(chunk); + Budget += chunkSize; + return true; + } + } + + public class PageFileBuilder(TileSetConfiguration config) + { + public readonly TileSetConfiguration Config = config; + public List Pages = []; + public string Name; + public string FileName; + public Guid Checksum; + public int PageFileIndex; + + public void AddTile(BuildTile tile) + { + if (Config.BackfillPages) + { + foreach (var page in Pages) + { + if (page.TryAdd(tile)) + { + return; + } + } + } + + if (Pages.Count == 0 || !Pages.Last().TryAdd(tile)) + { + var newPage = new PageBuilder + { + PageFile = this, + PageFileIndex = PageFileIndex, + PageIndex = Pages.Count + }; + + if (newPage.PageIndex == 0) + { + newPage.Budget += Marshal.SizeOf(typeof(GTPHeader)); + } + + Pages.Add(newPage); + newPage.TryAdd(tile); + } + } + + public void Save(string path) + { + using var stream = new FileStream(path, FileMode.Create, FileAccess.ReadWrite); + using var writer = new BinaryWriter(stream); + Save(stream, writer); + } + + public void SaveChunk(BinaryWriter writer, BuiltChunk chunk) + { + var header = new GTPChunkHeader + { + Codec = chunk.Codec, + ParameterBlockID = chunk.ParameterBlockID, + Size = (UInt32)chunk.EncodedBlob.Length + }; + BinUtils.WriteStruct(writer, ref header); + writer.Write(chunk.EncodedBlob); + } + + public void Save(Stream s, BinaryWriter writer) + { + var header = new GTPHeader + { + Magic = GTPHeader.HeaderMagic, + Version = GTPHeader.DefaultVersion, + GUID = Checksum + }; + BinUtils.WriteStruct(writer, ref header); + + for (var i = 0; i < Pages.Count; i++) + { + var page = Pages[i]; + + writer.Write((UInt32)page.Chunks.Count); + foreach (var chunk in page.Chunks) + { + writer.Write(chunk.OffsetInPage); + } + + foreach (var chunk in page.Chunks) + { + chunk.OffsetInPage = (uint)(s.Position % Config.PageSize); + SaveChunk(writer, chunk); + } + + var padSize = (Config.PageSize - (s.Position % Config.PageSize) % Config.PageSize); + if (padSize > 0) + { + var pad = new byte[padSize]; + Array.Clear(pad, 0, (int)padSize); + writer.Write(pad); + } + } + + for (var i = 0; i < Pages.Count; i++) + { + var page = Pages[i]; + s.Position = (i * Config.PageSize); + if (i == 0) + { + s.Position += Marshal.SizeOf(typeof(GTPHeader)); + } + + writer.Write((UInt32)page.Chunks.Count); + foreach (var chunk in page.Chunks) + { + writer.Write(chunk.OffsetInPage); + } + } + } + } + + public class PageFileSetBuilder(TileSetBuildData buildData, TileSetConfiguration config) + { + private readonly TileSetBuildData BuildData = buildData; + private readonly TileSetConfiguration Config = config; + + private void BuildPageFile(PageFileBuilder file, int level, int minTileX, int minTileY, int maxTileX, int maxTileY) + { + for (var y = minTileY; y <= maxTileY; y++) + { + for (var x = minTileX; x <= maxTileX; x++) + { + for (var layer = 0; layer < BuildData.Layers.Count; layer++) + { + var tile = BuildData.Layers[layer].Levels[level].Get(x, y); + if (tile != null) + { + file.AddTile(tile); + } + } + } + } + } + + private void BuildPageFile(PageFileBuilder file, BuildTexture texture) + { + for (var level = 0; level < BuildData.MipFileStartLevel; level++) + { + var x = texture.X >> level; + var y = texture.Y >> level; + var width = texture.Width >> level; + var height = texture.Height >> level; + + var minTileX = x / BuildData.RawTileWidth; + var minTileY = y / BuildData.RawTileHeight; + var maxTileX = (x + width - 1) / BuildData.RawTileWidth; + var maxTileY = (y + height - 1) / BuildData.RawTileHeight; + + BuildPageFile(file, level, minTileX, minTileY, maxTileX, maxTileY); + } + } + + private void BuildMipPageFile(PageFileBuilder file) + { + for (var level = BuildData.MipFileStartLevel; level < BuildData.PageFileLevels; level++) + { + var lvl = BuildData.Layers[0].Levels[level]; + BuildPageFile(file, level, 0, 0, lvl.TilesX - 1, lvl.TilesY - 1); + } + } + + private void BuildFullPageFile(PageFileBuilder file) + { + for (var level = 0; level < BuildData.PageFileLevels; level++) + { + var lvl = BuildData.Layers[0].Levels[level]; + BuildPageFile(file, level, 0, 0, lvl.TilesX - 1, lvl.TilesY - 1); + } + } + + public List BuildFilePerGTex(List textures) + { + var pageFiles = new List(); + + uint firstPageIndex = 0; + foreach (var texture in textures) + { + var file = new PageFileBuilder(Config) + { + Name = texture.Name, + FileName = BuildData.GTSName + "_" + texture.Name + ".gtp", + Checksum = Guid.NewGuid(), + PageFileIndex = pageFiles.Count + }; + pageFiles.Add(file); + BuildPageFile(file, texture); + + firstPageIndex += (uint)file.Pages.Count; + } + + if (BuildData.MipFileStartLevel < BuildData.PageFileLevels) + { + var file = new PageFileBuilder(Config) + { + Name = "Mips", + FileName = BuildData.GTSName + "_Mips.gtp", + Checksum = Guid.NewGuid(), + PageFileIndex = pageFiles.Count + }; + pageFiles.Add(file); + BuildMipPageFile(file); + } + + return pageFiles; + } + + public List BuildSingleFile() + { + var pageFiles = new List(); + + var file = new PageFileBuilder(Config) + { + Name = "Global", + FileName = BuildData.GTSName + ".gtp", + Checksum = Guid.NewGuid(), + PageFileIndex = pageFiles.Count + }; + pageFiles.Add(file); + BuildFullPageFile(file); + + return pageFiles; + } + } +} diff --git a/LSLib/VirtualTextures/VirtualTexture.cs b/LSLib/VirtualTextures/VirtualTexture.cs index 4021b420..99b2faf6 100644 --- a/LSLib/VirtualTextures/VirtualTexture.cs +++ b/LSLib/VirtualTextures/VirtualTexture.cs @@ -3,9 +3,7 @@ using System.Collections.Generic; using System.Diagnostics; using System.IO; -using System.Linq; using System.Text; -using System.Threading.Tasks; namespace LSLib.VirtualTextures { @@ -40,7 +38,7 @@ public static FourCCElement Make(string fourCC) { Type = FourCCElementType.Node, FourCC = fourCC, - Children = new List() + Children = [] }; } @@ -73,6 +71,28 @@ public static FourCCElement Make(string fourCC, FourCCElementType type, byte[] v Blob = value }; } + + public FourCCElement GetChild(string fourCC) + { + foreach (var child in Children) + { + if (child.FourCC == fourCC) + { + return child; + } + } + + return null; + } + } + + public class FourCCTextureMeta + { + public string Name; + public int X; + public int Y; + public int Width; + public int Height; } public class TileSetFourCC @@ -98,7 +118,7 @@ public void Read(Stream fs, BinaryReader reader, long length, List(); + cc.Children = []; Read(fs, reader, valueSize, cc.Children); break; } @@ -159,6 +179,27 @@ public void Read(Stream fs, BinaryReader reader, long length, List ExtractTextureMetadata() + { + var metaList = new List(); + var textures = Root.GetChild("ATLS").GetChild("TXTS").Children; + foreach (var tex in textures) + { + var meta = new FourCCTextureMeta + { + Name = tex.GetChild("NAME").Str, + Width = (int)tex.GetChild("WDTH").UInt, + Height = (int)tex.GetChild("HGHT").UInt, + X = (int)tex.GetChild("XXXX").UInt, + Y = (int)tex.GetChild("YYYY").UInt + }; + metaList.Add(meta); + } + + return metaList; + } + public void Write(Stream fs, BinaryWriter writer) { Write(fs, writer, Root); @@ -166,58 +207,29 @@ public void Write(Stream fs, BinaryWriter writer) public void Write(Stream fs, BinaryWriter writer, FourCCElement element) { - var header = new GTSFourCCMetadata(); - header.FourCCName = element.FourCC; - - UInt32 length; - switch (element.Type) + var header = new GTSFourCCMetadata { - case FourCCElementType.Node: - length = 0x10000000; - break; - - case FourCCElementType.Int: - length = 4; - break; - - case FourCCElementType.String: - length = (UInt32)Encoding.Unicode.GetBytes(element.Str).Length + 2; - break; - - case FourCCElementType.BinaryInt: - case FourCCElementType.BinaryGuid: - length = (UInt32)element.Blob.Length; - break; - - default: - throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"); - } + FourCCName = element.FourCC + }; - switch (element.Type) + var length = element.Type switch { - case FourCCElementType.Node: - header.Format = 1; - break; - - case FourCCElementType.Int: - header.Format = 3; - break; - - case FourCCElementType.String: - header.Format = 2; - break; - - case FourCCElementType.BinaryInt: - header.Format = 8; - break; - - case FourCCElementType.BinaryGuid: - header.Format = 0xD; - break; + FourCCElementType.Node => (uint)0x10000000, + FourCCElementType.Int => (uint)4, + FourCCElementType.String => (UInt32)Encoding.Unicode.GetBytes(element.Str).Length + 2, + FourCCElementType.BinaryInt or FourCCElementType.BinaryGuid => (UInt32)element.Blob.Length, + _ => throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"), + }; - default: - throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"); - } + header.Format = element.Type switch + { + FourCCElementType.Node => 1, + FourCCElementType.Int => 3, + FourCCElementType.String => 2, + FourCCElementType.BinaryInt => 8, + FourCCElementType.BinaryGuid => 0xD, + _ => throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"), + }; header.Length = (UInt16)(length & 0xffff); if (length > 0xffff) @@ -294,17 +306,17 @@ public class VirtualTileSet : IDisposable public GTSPackedTileID[] PackedTileIDs; public GTSFlatTileInfo[] FlatTileInfos; - private Dictionary PageFiles = new Dictionary(); + private readonly Dictionary PageFiles = []; + private readonly TileCompressor Compressor; public VirtualTileSet(string path, string pagePath) { PagePath = pagePath; + Compressor = new TileCompressor(); - using (var fs = new FileStream(path, FileMode.Open, FileAccess.Read)) - using (var reader = new BinaryReader(fs)) - { - LoadFromStream(fs, reader, false); - } + using var fs = new FileStream(path, FileMode.Open, FileAccess.Read); + using var reader = new BinaryReader(fs); + LoadFromStream(fs, reader, false); } public VirtualTileSet(string path) : this(path, Path.GetDirectoryName(path)) @@ -317,11 +329,9 @@ public VirtualTileSet() public void Save(string path) { - using (var fs = new FileStream(path, FileMode.Create, FileAccess.Write)) - using (var writer = new BinaryWriter(fs)) - { - SaveToStream(fs, writer); - } + using var fs = new FileStream(path, FileMode.Create, FileAccess.Write); + using var writer = new BinaryWriter(fs); + SaveToStream(fs, writer); } public void Dispose() @@ -383,7 +393,7 @@ public void LoadFromStream(Stream fs, BinaryReader reader, bool loadThumbnails) TileSetLevels = new GTSTileSetLevel[Header.NumLevels]; BinUtils.ReadStructs(reader, TileSetLevels); - PerLevelFlatTileIndices = new List(); + PerLevelFlatTileIndices = []; foreach (var level in TileSetLevels) { fs.Position = (uint)level.FlatTileIndicesOffset; @@ -396,7 +406,7 @@ public void LoadFromStream(Stream fs, BinaryReader reader, bool loadThumbnails) ParameterBlockHeaders = new GTSParameterBlockHeader[Header.ParameterBlockHeadersCount]; BinUtils.ReadStructs(reader, ParameterBlockHeaders); - ParameterBlocks = new Dictionary(); + ParameterBlocks = []; foreach (var hdr in ParameterBlockHeaders) { fs.Position = (uint)hdr.FileInfoOffset; @@ -409,15 +419,15 @@ public void LoadFromStream(Stream fs, BinaryReader reader, bool loadThumbnails) Debug.Assert(bc.B == 0); Debug.Assert(bc.C1 == 0); Debug.Assert(bc.C2 == 0); - Debug.Assert(bc.DataType == (Byte)GTSDataType.R8G8B8A8_SRGB || bc.DataType == (Byte)GTSDataType.X8Y8Z8W8); Debug.Assert(bc.BCField3 == 0); + Debug.Assert(bc.DataType == (Byte)GTSDataType.R8G8B8A8_SRGB || bc.DataType == (Byte)GTSDataType.X8Y8Z8W8); + Debug.Assert(bc.D == 0); + Debug.Assert(bc.FourCC == 0x20334342); Debug.Assert(bc.E1 == 0); - Debug.Assert(bc.E3 == 0); Debug.Assert(bc.SaveMip == 1); + Debug.Assert(bc.E3 == 0); Debug.Assert(bc.E4 == 0); - Debug.Assert(bc.D == 0); Debug.Assert(bc.F == 0); - Debug.Assert(bc.FourCC == 0x20334342); } else { @@ -438,7 +448,7 @@ public void LoadFromStream(Stream fs, BinaryReader reader, bool loadThumbnails) var pageFileInfos = new GTSPageFileInfo[Header.NumPageFiles]; BinUtils.ReadStructs(reader, pageFileInfos); - PageFileInfos = new List(); + PageFileInfos = []; uint nextPageIndex = 0; foreach (var info in pageFileInfos) { @@ -504,15 +514,6 @@ public void SaveToStream(Stream fs, BinaryWriter writer) if (hdr.Codec == GTSCodec.BC) { var block = (GTSBCParameterBlock)ParameterBlocks[hdr.ParameterBlockID]; - hdr.ParameterBlockSize = 0x38; - block.Version = 0x238e; - var comp1 = Encoding.UTF8.GetBytes("lz77"); - Array.Copy(comp1, block.Compression1, comp1.Length); - var comp2 = Encoding.UTF8.GetBytes("fastlz0.1.0"); - Array.Copy(comp2, block.Compression2, comp2.Length); - block.DataType = (Byte)GTSDataType.R8G8B8A8_SRGB; // X8Y8Z8W8 for normal/phys - block.SaveMip = 1; - block.FourCC = 0x20334342; BinUtils.WriteStruct(writer, ref block); } else @@ -521,11 +522,6 @@ public void SaveToStream(Stream fs, BinaryWriter writer) hdr.ParameterBlockSize = 0x10; var block = (GTSUniformParameterBlock)ParameterBlocks[hdr.ParameterBlockID]; - block.Version = 0x42; - block.A_Unused = 0; - block.Width = 4; - block.Height = 1; - block.DataType = GTSDataType.R8G8B8A8_SRGB; // X8Y8Z8W8 for normal/phys BinUtils.WriteStruct(writer, ref block); } } @@ -544,8 +540,10 @@ public void SaveToStream(Stream fs, BinaryWriter writer) Header.FourCCListSize = (uint)((ulong)fs.Position - Header.FourCCListOffset); Header.ThumbnailsOffset = (ulong)fs.Position; - var thumbHdr = new GTSThumbnailInfoHeader(); - thumbHdr.NumThumbnails = 0; + var thumbHdr = new GTSThumbnailInfoHeader + { + NumThumbnails = 0 + }; BinUtils.WriteStruct(writer, ref thumbHdr); Header.PackedTileIDsOffset = (ulong)fs.Position; @@ -581,8 +579,7 @@ public bool GetTileInfo(int level, int layer, int x, int y, ref GTSFlatTileInfo public PageFile GetOrLoadPageFile(int pageFileIdx) { - PageFile file; - if (!PageFiles.TryGetValue(pageFileIdx, out file)) + if (!PageFiles.TryGetValue(pageFileIdx, out PageFile file)) { var meta = PageFileInfos[pageFileIdx]; file = new PageFile(this, PagePath + Path.DirectorySeparatorChar + meta.FileName); @@ -596,7 +593,7 @@ public void StitchTexture(int level, int layer, int minX, int minY, int maxX, in { var tileWidth = Header.TileWidth - Header.TileBorder * 2; var tileHeight = Header.TileHeight - Header.TileBorder * 2; - GTSFlatTileInfo tileInfo = new GTSFlatTileInfo(); + GTSFlatTileInfo tileInfo = new(); for (var y = minY; y <= maxY; y++) { for (var x = minX; x <= maxX; x++) @@ -604,7 +601,7 @@ public void StitchTexture(int level, int layer, int minX, int minY, int maxX, in if (GetTileInfo(level, layer, x, y, ref tileInfo)) { var pageFile = GetOrLoadPageFile(tileInfo.PageFileIndex); - var tile = pageFile.UnpackTileBC5(tileInfo.PageIndex, tileInfo.ChunkIndex); + var tile = pageFile.UnpackTileBC5(tileInfo.PageIndex, tileInfo.ChunkIndex, Compressor); tile.CopyTo(output, 8, 8, (x - minX) * tileWidth, (y - minY) * tileHeight, tileWidth, tileHeight); } } @@ -638,50 +635,39 @@ public void ReleasePageFiles() this.PageFiles.Clear(); } - public BC5Image ExtractPageFileTexture(int pageFileIndex, int levelIndex, int layer) + public BC5Image ExtractTexture(int level, int layer, FourCCTextureMeta tex) { - int minX = 0, maxX = 0, minY = 0, maxY = 0; - bool foundPages = false; + var tlW = Header.TileWidth - Header.TileBorder * 2; + var tlH = Header.TileHeight - Header.TileBorder * 2; + var tX = tex.X / tlW; + var tY = tex.Y / tlH; + var tW = tex.Width / tlW; + var tH = tex.Height / tlH; + var lv = (1 << level); + + var minX = (tX / lv) + ((tX % lv) > 0 ? 1 : 0); + var minY = (tY / lv) + ((tY % lv) > 0 ? 1 : 0); + var maxX = ((tX+tW) / lv) + (((tX + tW) % lv) > 0 ? 1 : 0) - 1; + var maxY = ((tY+tH) / lv) + (((tY + tH) % lv) > 0 ? 1 : 0) - 1; + + return ExtractTextureIfExists(level, layer, minX, minY, maxX, maxY); + } - GTSFlatTileInfo tile = new GTSFlatTileInfo(); - var level = TileSetLevels[levelIndex]; - for (var x = 0; x < level.Width; x++) + public BC5Image ExtractTextureIfExists(int levelIndex, int layer, int minX, int minY, int maxX, int maxY) + { + GTSFlatTileInfo tile = new(); + for (var x = minX; x <= maxX; x++) { - for (var y = 0; y < level.Height; y++) + for (var y = minY; y <= maxY; y++) { - if (GetTileInfo(levelIndex, layer, x, y, ref tile)) + if (!GetTileInfo(levelIndex, layer, x, y, ref tile)) { - if (tile.PageFileIndex == pageFileIndex) - { - if (!foundPages) - { - minX = x; - maxX = x; - minY = y; - maxY = y; - foundPages = true; - } - else - { - minX = Math.Min(minX, x); - maxX = Math.Max(maxX, x); - minY = Math.Min(minY, y); - maxY = Math.Max(maxY, y); - } - } + return null; } } } - // Temporary workaround for page files that contain split textures - if (!foundPages || (maxX - minX) > 16 || (maxY - minY) > 16) - { - return null; - } - else - { - return ExtractTexture(levelIndex, layer, minX, minY, maxX, maxY); - } + return ExtractTexture(levelIndex, layer, minX, minY, maxX, maxY); } } } diff --git a/LSLib/VirtualTextures/VirtualTextureFormats.cs b/LSLib/VirtualTextures/VirtualTextureFormats.cs index f93a51a9..0a7f6a4d 100644 --- a/LSLib/VirtualTextures/VirtualTextureFormats.cs +++ b/LSLib/VirtualTextures/VirtualTextureFormats.cs @@ -1,4 +1,5 @@ -using System; +using LSLib.Granny; +using System; using System.Runtime.InteropServices; using System.Text; @@ -7,9 +8,9 @@ namespace LSLib.VirtualTextures [StructLayout(LayoutKind.Sequential, Pack = 1)] public struct DDSHeader { - public static UInt32 DDSMagic = 0x20534444; - public static UInt32 HeaderSize = 0x7c; - public static UInt32 FourCC_DXT5 = 0x35545844; + public const UInt32 DDSMagic = 0x20534444; + public const UInt32 HeaderSize = 0x7c; + public const UInt32 FourCC_DXT5 = 0x35545844; public UInt32 dwMagic; public UInt32 dwSize; @@ -107,6 +108,9 @@ public enum GTSCodec : UInt32 [StructLayout(LayoutKind.Sequential, Pack = 1)] public struct GTSHeader { + public const UInt32 GRPGMagic = 0x47505247; // 'GRPG' + public const UInt32 CurrentVersion = 5; + public UInt32 Magic; public UInt32 Version; public UInt32 Unused; @@ -194,6 +198,13 @@ public string CompressionName1 for (len = 0; len < Compression1.Length && Compression1[len] != 0; len ++) {} return Encoding.UTF8.GetString(Compression1, 0, len); } + set + { + Compression1 = new byte[0x10]; + Array.Clear(Compression1, 0, 0x10); + byte[] encoded = Encoding.UTF8.GetBytes(value); + Array.Copy(encoded, Compression1, encoded.Length); + } } public string CompressionName2 @@ -204,6 +215,13 @@ public string CompressionName2 for (len = 0; len < Compression2.Length && Compression2[len] != 0; len ++) {} return Encoding.UTF8.GetString(Compression2, 0, len); } + set + { + Compression2 = new byte[0x10]; + Array.Clear(Compression2, 0, 0x10); + byte[] encoded = Encoding.UTF8.GetBytes(value); + Array.Copy(encoded, Compression2, encoded.Length); + } } public UInt32 B; @@ -250,6 +268,13 @@ public string FileName } return Encoding.Unicode.GetString(FileNameBuf, 0, nameLen); } + set + { + FileNameBuf = new byte[512]; + Array.Clear(FileNameBuf, 0, 512); + byte[] encoded = Encoding.Unicode.GetBytes(value); + Array.Copy(encoded, FileNameBuf, encoded.Length); + } } } @@ -301,9 +326,12 @@ public struct GTSThumbnailInfo } [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSPackedTileID + public struct GTSPackedTileID(UInt32 layer, UInt32 level, UInt32 x, UInt32 y) { - public UInt32 Val; + public UInt32 Val = (layer & 0xF) + | ((level & 0xF) << 4) + | ((y & 0xFFF) << 8) + | ((x & 0xFFF) << 20); public UInt32 Layer { @@ -352,6 +380,9 @@ public struct GTSFlatTileInfo [StructLayout(LayoutKind.Sequential, Pack = 1)] public struct GTPHeader { + public const UInt32 HeaderMagic = 0x50415247; + public const UInt32 DefaultVersion = 4; + public UInt32 Magic; public UInt32 Version; public Guid GUID; diff --git a/LSLib/packages.config b/LSLib/packages.config deleted file mode 100644 index 1ccf9bc1..00000000 --- a/LSLib/packages.config +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/LSTools.sln b/LSTools.sln index 40a847bd..ab953a61 100644 --- a/LSTools.sln +++ b/LSTools.sln @@ -1,37 +1,35 @@ - Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 16 -VisualStudioVersion = 16.0.30330.147 +# Visual Studio Version 17 +VisualStudioVersion = 17.8.34309.116 MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LSLib", "LSLib\LSLib.csproj", "{46372C50-4288-4B8E-AF21-C934560600E0}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LSLib", "LSLib\LSLib.csproj", "{46372C50-4288-4B8E-AF21-C934560600E0}" EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "LSLibNative", "LSLibNative\LSLibNative.vcxproj", "{D8B26B12-E45C-47EA-88F7-56628EB2CCD1}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "OpenTK", "OpenTK\OpenTK.csproj", "{ABB9DB44-14F2-46E0-A4B8-B46C300CA982}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverterApp", "ConverterApp\ConverterApp.csproj", "{FAD67294-6223-47E0-8838-E4E7FBC53ED2}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ConverterApp", "ConverterApp\ConverterApp.csproj", "{FAD67294-6223-47E0-8838-E4E7FBC53ED2}" ProjectSection(ProjectDependencies) = postProject {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} = {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} EndProjectSection EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Divine", "Divine\Divine.csproj", "{CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Divine", "Divine\Divine.csproj", "{CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RconClient", "RconClient\RconClient.csproj", "{EF82C289-53D6-41C8-B5C3-72B37655C7F3}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "RconClient", "RconClient\RconClient.csproj", "{EF82C289-53D6-41C8-B5C3-72B37655C7F3}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StoryCompiler", "StoryCompiler\StoryCompiler.csproj", "{32F08B9A-F50B-4C2E-AB56-533FED066DDE}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StoryCompiler", "StoryCompiler\StoryCompiler.csproj", "{32F08B9A-F50B-4C2E-AB56-533FED066DDE}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DebuggerFrontend", "DebuggerFrontend\DebuggerFrontend.csproj", "{31E71543-CBCF-43BB-AF77-D210D548118E}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DebuggerFrontend", "DebuggerFrontend\DebuggerFrontend.csproj", "{31E71543-CBCF-43BB-AF77-D210D548118E}" ProjectSection(ProjectDependencies) = postProject {46372C50-4288-4B8E-AF21-C934560600E0} = {46372C50-4288-4B8E-AF21-C934560600E0} EndProjectSection EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StoryDecompiler", "StoryDecompiler\StoryDecompiler.csproj", "{E4B4F95E-F027-44D7-AB93-B96EF2E661B6}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StatParser", "StatParser\StatParser.csproj", "{94D900D1-EC77-4170-8942-56E3736E44DE}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StoryDecompiler", "StoryDecompiler\StoryDecompiler.csproj", "{E4B4F95E-F027-44D7-AB93-B96EF2E661B6}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "UnpackGrid", "UnpackGrid\UnpackGrid.csproj", "{1948A6AF-41EE-4711-B617-B5CBA8434013}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StatParser", "StatParser\StatParser.csproj", "{94D900D1-EC77-4170-8942-56E3736E44DE}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TerrainFixup", "TerrainFixup\TerrainFixup.csproj", "{C3B44D0D-D7D8-4080-859B-90B391F8D5B1}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "VTexTool", "VTexTool\VTexTool.csproj", "{67E646C2-3C3C-4327-A0B4-40C1DB32579F}" + ProjectSection(ProjectDependencies) = postProject + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} = {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} + EndProjectSection EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -64,18 +62,6 @@ Global {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|Any CPU.ActiveCfg = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|Any CPU.Build.0 = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|x64.ActiveCfg = Release|x64 - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Debug|Any CPU.Build.0 = Debug|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Debug|x64.ActiveCfg = Debug|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Debug|x64.Build.0 = Debug|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Release|Any CPU.ActiveCfg = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Release|Any CPU.Build.0 = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Release|x64.ActiveCfg = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.Release|x64.Build.0 = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982}.RelWithDebInfo|x64.Build.0 = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|Any CPU.Build.0 = Debug|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|x64.ActiveCfg = Debug|Any CPU @@ -160,30 +146,18 @@ Global {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|x64.Build.0 = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Debug|x64.ActiveCfg = Debug|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Debug|x64.Build.0 = Debug|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Release|Any CPU.Build.0 = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Release|x64.ActiveCfg = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.Release|x64.Build.0 = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {1948A6AF-41EE-4711-B617-B5CBA8434013}.RelWithDebInfo|x64.Build.0 = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Debug|x64.ActiveCfg = Debug|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Debug|x64.Build.0 = Debug|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Release|Any CPU.Build.0 = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Release|x64.ActiveCfg = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.Release|x64.Build.0 = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {C3B44D0D-D7D8-4080-859B-90B391F8D5B1}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|x64.ActiveCfg = Debug|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|x64.Build.0 = Debug|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|Any CPU.Build.0 = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|x64.ActiveCfg = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|x64.Build.0 = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/OpenTK/Math/BezierCurve.cs b/OpenTK/Math/BezierCurve.cs deleted file mode 100644 index 4e3035bd..00000000 --- a/OpenTK/Math/BezierCurve.cs +++ /dev/null @@ -1,261 +0,0 @@ -#region --- License --- -/* Licensed under the MIT/X11 license. - * Copyright (c) 2006-2008 the OpenTK Team. - * This notice may not be removed from any source distribution. - * See license.txt for licensing detailed licensing details. - * - * Contributions by Georg W�chter. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ - /// - /// Represents a bezier curve with as many points as you want. - /// - [Serializable] - public struct BezierCurve - { - #region Fields - - private List points; - - /// - /// The parallel value. - /// - /// This value defines whether the curve should be calculated as a - /// parallel curve to the original bezier curve. A value of 0.0f represents - /// the original curve, 5.0f i.e. stands for a curve that has always a distance - /// of 5.0f to the orignal curve at any point. - public float Parallel; - - #endregion - - #region Properties - - /// - /// Gets the points of this curve. - /// - /// The first point and the last points represent the anchor points. - public IList Points - { - get - { - return points; - } - } - - #endregion - - #region Constructors - - /// - /// Constructs a new . - /// - /// The points. - public BezierCurve(IEnumerable points) - { - if (points == null) - throw new ArgumentNullException("points", "Must point to a valid list of Vector2 structures."); - - this.points = new List(points); - this.Parallel = 0.0f; - } - - /// - /// Constructs a new . - /// - /// The points. - public BezierCurve(params Vector2[] points) - { - if (points == null) - throw new ArgumentNullException("points", "Must point to a valid list of Vector2 structures."); - - this.points = new List(points); - this.Parallel = 0.0f; - } - - /// - /// Constructs a new . - /// - /// The parallel value. - /// The points. - public BezierCurve(float parallel, params Vector2[] points) - { - if (points == null) - throw new ArgumentNullException("points", "Must point to a valid list of Vector2 structures."); - - this.Parallel = parallel; - this.points = new List(points); - } - - /// - /// Constructs a new . - /// - /// The parallel value. - /// The points. - public BezierCurve(float parallel, IEnumerable points) - { - if (points == null) - throw new ArgumentNullException("points", "Must point to a valid list of Vector2 structures."); - - this.Parallel = parallel; - this.points = new List(points); - } - - #endregion - - #region Functions - - - /// - /// Calculates the point with the specified t. - /// - /// The t value, between 0.0f and 1.0f. - /// Resulting point. - public Vector2 CalculatePoint(float t) - { - return BezierCurve.CalculatePoint(points, t, Parallel); - } - - /// - /// Calculates the length of this bezier curve. - /// - /// The precision. - /// Length of curve. - /// The precision gets better as the - /// value gets smaller. - public float CalculateLength(float precision) - { - return BezierCurve.CalculateLength(points, precision, Parallel); - } - - #region Static methods - - /// - /// Calculates the length of the specified bezier curve. - /// - /// The points. - /// The precision value. - /// The precision gets better as the - /// value gets smaller. - public static float CalculateLength(IList points, float precision) - { - return BezierCurve.CalculateLength(points, precision, 0.0f); - } - - /// - /// Calculates the length of the specified bezier curve. - /// - /// The points. - /// The precision value. - /// The parallel value. - /// Length of curve. - /// The precision gets better as the - /// value gets smaller. - /// The parameter defines whether the curve should be calculated as a - /// parallel curve to the original bezier curve. A value of 0.0f represents - /// the original curve, 5.0f represents a curve that has always a distance - /// of 5.0f to the orignal curve. - public static float CalculateLength(IList points, float precision, float parallel) - { - float length = 0.0f; - Vector2 old = BezierCurve.CalculatePoint(points, 0.0f, parallel); - - for (float i = precision; i < (1.0f + precision); i += precision) - { - Vector2 n = CalculatePoint(points, i, parallel); - length += (n - old).Length; - old = n; - } - - return length; - } - - /// - /// Calculates the point on the given bezier curve with the specified t parameter. - /// - /// The points. - /// The t parameter, a value between 0.0f and 1.0f. - /// Resulting point. - public static Vector2 CalculatePoint(IList points, float t) - { - return BezierCurve.CalculatePoint(points, t, 0.0f); - } - - /// - /// Calculates the point on the given bezier curve with the specified t parameter. - /// - /// The points. - /// The t parameter, a value between 0.0f and 1.0f. - /// The parallel value. - /// Resulting point. - /// The parameter defines whether the curve should be calculated as a - /// parallel curve to the original bezier curve. A value of 0.0f represents - /// the original curve, 5.0f represents a curve that has always a distance - /// of 5.0f to the orignal curve. - public static Vector2 CalculatePoint(IList points, float t, float parallel) - { - Vector2 r = new Vector2(); - double c = 1.0d - (double)t; - float temp; - int i = 0; - - foreach (Vector2 pt in points) - { - temp = (float)MathHelper.BinomialCoefficient(points.Count - 1, i) * (float)(System.Math.Pow(t, i) * - System.Math.Pow(c, (points.Count - 1) - i)); - - r.X += temp * pt.X; - r.Y += temp * pt.Y; - i++; - } - - if (parallel == 0.0f) - return r; - - Vector2 perpendicular = new Vector2(); - - if (t != 0.0f) - perpendicular = r - BezierCurve.CalculatePointOfDerivative(points, t); - else - perpendicular = points[1] - points[0]; - - return r + Vector2.Normalize(perpendicular).PerpendicularRight * parallel; - } - - /// - /// Calculates the point with the specified t of the derivative of the given bezier function. - /// - /// The points. - /// The t parameter, value between 0.0f and 1.0f. - /// Resulting point. - private static Vector2 CalculatePointOfDerivative(IList points, float t) - { - Vector2 r = new Vector2(); - double c = 1.0d - (double)t; - float temp; - int i = 0; - - foreach (Vector2 pt in points) - { - temp = (float)MathHelper.BinomialCoefficient(points.Count - 2, i) * (float)(System.Math.Pow(t, i) * - System.Math.Pow(c, (points.Count - 2) - i)); - - r.X += temp * pt.X; - r.Y += temp * pt.Y; - i++; - } - - return r; - } - - #endregion - - #endregion - } -} diff --git a/OpenTK/Math/BezierCurveCubic.cs b/OpenTK/Math/BezierCurveCubic.cs deleted file mode 100644 index 149bbf7a..00000000 --- a/OpenTK/Math/BezierCurveCubic.cs +++ /dev/null @@ -1,163 +0,0 @@ -#region --- License --- -/* Licensed under the MIT/X11 license. - * Copyright (c) 2006-2008 the OpenTK Team. - * This notice may not be removed from any source distribution. - * See license.txt for licensing detailed licensing details. - * - * Contributions by Georg W�chter. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ - /// - /// Represents a cubic bezier curve with two anchor and two control points. - /// - [Serializable] - public struct BezierCurveCubic - { - #region Fields - - /// - /// Start anchor point. - /// - public Vector2 StartAnchor; - - /// - /// End anchor point. - /// - public Vector2 EndAnchor; - - /// - /// First control point, controls the direction of the curve start. - /// - public Vector2 FirstControlPoint; - - /// - /// Second control point, controls the direction of the curve end. - /// - public Vector2 SecondControlPoint; - - /// - /// Gets or sets the parallel value. - /// - /// This value defines whether the curve should be calculated as a - /// parallel curve to the original bezier curve. A value of 0.0f represents - /// the original curve, 5.0f i.e. stands for a curve that has always a distance - /// of 5.f to the orignal curve at any point. - public float Parallel; - - #endregion - - #region Constructors - - /// - /// Constructs a new . - /// - /// The start anchor point. - /// The end anchor point. - /// The first control point. - /// The second control point. - public BezierCurveCubic(Vector2 startAnchor, Vector2 endAnchor, Vector2 firstControlPoint, Vector2 secondControlPoint) - { - this.StartAnchor = startAnchor; - this.EndAnchor = endAnchor; - this.FirstControlPoint = firstControlPoint; - this.SecondControlPoint = secondControlPoint; - this.Parallel = 0.0f; - } - - /// - /// Constructs a new . - /// - /// The parallel value. - /// The start anchor point. - /// The end anchor point. - /// The first control point. - /// The second control point. - public BezierCurveCubic(float parallel, Vector2 startAnchor, Vector2 endAnchor, Vector2 firstControlPoint, Vector2 secondControlPoint) - { - this.Parallel = parallel; - this.StartAnchor = startAnchor; - this.EndAnchor = endAnchor; - this.FirstControlPoint = firstControlPoint; - this.SecondControlPoint = secondControlPoint; - } - - #endregion - - #region Functions - - /// - /// Calculates the point with the specified t. - /// - /// The t value, between 0.0f and 1.0f. - /// Resulting point. - public Vector2 CalculatePoint(float t) - { - Vector2 r = new Vector2(); - float c = 1.0f - t; - - r.X = (StartAnchor.X * c * c * c) + (FirstControlPoint.X * 3 * t * c * c) + (SecondControlPoint.X * 3 * t * t * c) - + EndAnchor.X * t * t * t; - r.Y = (StartAnchor.Y * c * c * c) + (FirstControlPoint.Y * 3 * t * c * c) + (SecondControlPoint.Y * 3 * t * t * c) - + EndAnchor.Y * t * t * t; - - if (Parallel == 0.0f) - return r; - - Vector2 perpendicular = new Vector2(); - - if (t == 0.0f) - perpendicular = FirstControlPoint - StartAnchor; - else - perpendicular = r - CalculatePointOfDerivative(t); - - return r + Vector2.Normalize(perpendicular).PerpendicularRight * Parallel; - } - - /// - /// Calculates the point with the specified t of the derivative of this function. - /// - /// The t, value between 0.0f and 1.0f. - /// Resulting point. - private Vector2 CalculatePointOfDerivative(float t) - { - Vector2 r = new Vector2(); - float c = 1.0f - t; - - r.X = (c * c * StartAnchor.X) + (2 * t * c * FirstControlPoint.X) + (t * t * SecondControlPoint.X); - r.Y = (c * c * StartAnchor.Y) + (2 * t * c * FirstControlPoint.Y) + (t * t * SecondControlPoint.Y); - - return r; - } - - /// - /// Calculates the length of this bezier curve. - /// - /// The precision. - /// Length of the curve. - /// The precision gets better when the - /// value gets smaller. - public float CalculateLength(float precision) - { - float length = 0.0f; - Vector2 old = CalculatePoint(0.0f); - - for (float i = precision; i < (1.0f + precision); i += precision) - { - Vector2 n = CalculatePoint(i); - length += (n - old).Length; - old = n; - } - - return length; - } - - #endregion - } -} diff --git a/OpenTK/Math/BezierCurveQuadric.cs b/OpenTK/Math/BezierCurveQuadric.cs deleted file mode 100644 index 500b7fff..00000000 --- a/OpenTK/Math/BezierCurveQuadric.cs +++ /dev/null @@ -1,151 +0,0 @@ -#region --- License --- -/* Licensed under the MIT/X11 license. - * Copyright (c) 2006-2008 the OpenTK Team. - * This notice may not be removed from any source distribution. - * See license.txt for licensing detailed licensing details. - * - * Contributions by Georg W�chter. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ - /// - /// Represents a quadric bezier curve with two anchor and one control point. - /// - [Serializable] - public struct BezierCurveQuadric - { - #region Fields - - /// - /// Start anchor point. - /// - public Vector2 StartAnchor; - - /// - /// End anchor point. - /// - public Vector2 EndAnchor; - - /// - /// Control point, controls the direction of both endings of the curve. - /// - public Vector2 ControlPoint; - - /// - /// The parallel value. - /// - /// This value defines whether the curve should be calculated as a - /// parallel curve to the original bezier curve. A value of 0.0f represents - /// the original curve, 5.0f i.e. stands for a curve that has always a distance - /// of 5.f to the orignal curve at any point. - public float Parallel; - - #endregion - - #region Constructors - - /// - /// Constructs a new . - /// - /// The start anchor. - /// The end anchor. - /// The control point. - public BezierCurveQuadric(Vector2 startAnchor, Vector2 endAnchor, Vector2 controlPoint) - { - this.StartAnchor = startAnchor; - this.EndAnchor = endAnchor; - this.ControlPoint = controlPoint; - this.Parallel = 0.0f; - } - - /// - /// Constructs a new . - /// - /// The parallel value. - /// The start anchor. - /// The end anchor. - /// The control point. - public BezierCurveQuadric(float parallel, Vector2 startAnchor, Vector2 endAnchor, Vector2 controlPoint) - { - this.Parallel = parallel; - this.StartAnchor = startAnchor; - this.EndAnchor = endAnchor; - this.ControlPoint = controlPoint; - } - - #endregion - - #region Functions - - /// - /// Calculates the point with the specified t. - /// - /// The t value, between 0.0f and 1.0f. - /// Resulting point. - public Vector2 CalculatePoint(float t) - { - Vector2 r = new Vector2(); - float c = 1.0f - t; - - r.X = (c * c * StartAnchor.X) + (2 * t * c * ControlPoint.X) + (t * t * EndAnchor.X); - r.Y = (c * c * StartAnchor.Y) + (2 * t * c * ControlPoint.Y) + (t * t * EndAnchor.Y); - - if (Parallel == 0.0f) - return r; - - Vector2 perpendicular = new Vector2(); - - if (t == 0.0f) - perpendicular = ControlPoint - StartAnchor; - else - perpendicular = r - CalculatePointOfDerivative(t); - - return r + Vector2.Normalize(perpendicular).PerpendicularRight * Parallel; - } - - /// - /// Calculates the point with the specified t of the derivative of this function. - /// - /// The t, value between 0.0f and 1.0f. - /// Resulting point. - private Vector2 CalculatePointOfDerivative(float t) - { - Vector2 r = new Vector2(); - - r.X = (1.0f - t) * StartAnchor.X + t * ControlPoint.X; - r.Y = (1.0f - t) * StartAnchor.Y + t * ControlPoint.Y; - - return r; - } - - /// - /// Calculates the length of this bezier curve. - /// - /// The precision. - /// Length of curve. - /// The precision gets better when the - /// value gets smaller. - public float CalculateLength(float precision) - { - float length = 0.0f; - Vector2 old = CalculatePoint(0.0f); - - for (float i = precision; i < (1.0f + precision); i += precision) - { - Vector2 n = CalculatePoint(i); - length += (n - old).Length; - old = n; - } - - return length; - } - - #endregion - } -} diff --git a/OpenTK/Math/Box2.cs b/OpenTK/Math/Box2.cs deleted file mode 100644 index 4c54b622..00000000 --- a/OpenTK/Math/Box2.cs +++ /dev/null @@ -1,99 +0,0 @@ -#region --- License --- -/* Copyright (c) 2006, 2007 Stefanos Apostolopoulos - * See license.txt for license info - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Text; -using System.Runtime.InteropServices; -namespace OpenTK -{ - /// - /// Defines a 2d box (rectangle). - /// - [StructLayout(LayoutKind.Sequential)] - public struct Box2 - { - /// - /// The left boundary of the structure. - /// - public float Left; - - /// - /// The right boundary of the structure. - /// - public float Right; - - /// - /// The top boundary of the structure. - /// - public float Top; - - /// - /// The bottom boundary of the structure. - /// - public float Bottom; - - /// - /// Constructs a new Box2 with the specified dimensions. - /// - /// AnOpenTK.Vector2 describing the top-left corner of the Box2. - /// An OpenTK.Vector2 describing the bottom-right corner of the Box2. - public Box2(Vector2 topLeft, Vector2 bottomRight) - { - Left = topLeft.X; - Top = topLeft.Y; - Right = bottomRight.X; - Bottom = bottomRight.Y; - } - - /// - /// Constructs a new Box2 with the specified dimensions. - /// - /// The position of the left boundary. - /// The position of the top boundary. - /// The position of the right boundary. - /// The position of the bottom boundary. - public Box2(float left, float top, float right, float bottom) - { - Left = left; - Top = top; - Right = right; - Bottom = bottom; - } - - /// - /// Creates a new Box2 with the specified dimensions. - /// - /// The position of the top boundary. - /// The position of the left boundary. - /// The position of the right boundary. - /// The position of the bottom boundary. - /// A new OpenTK.Box2 with the specfied dimensions. - public static Box2 FromTLRB(float top, float left, float right, float bottom) - { - return new Box2(left, top, right, bottom); - } - - /// - /// Gets a float describing the width of the Box2 structure. - /// - public float Width { get { return (float)System.Math.Abs(Right - Left); } } - - /// - /// Gets a float describing the height of the Box2 structure. - /// - public float Height { get { return (float)System.Math.Abs(Bottom - Top); } } - - /// - /// Returns a describing the current instance. - /// - /// - public override string ToString() - { - return String.Format("({0},{1})-({2},{3})", Left, Top, Right, Bottom); - } - } -} diff --git a/OpenTK/Math/Functions.cs b/OpenTK/Math/Functions.cs deleted file mode 100644 index 15fd0f60..00000000 --- a/OpenTK/Math/Functions.cs +++ /dev/null @@ -1,371 +0,0 @@ -#region --- License --- -/* Licensed under the MIT/X11 license. - * Copyright (c) 2006-2008 the OpenTK Team. - * This notice may not be removed from any source distribution. - * See license.txt for licensing detailed licensing details. - * - * Contributions by Andy Gill, James Talton and Georg Wächter. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ - /// - /// Contains mathematical functions for the OpenTK.Math toolkit. - /// - [Obsolete("Use OpenTK.MathHelper instead.")] - public static class Functions - { - #region NextPowerOfTwo - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static long NextPowerOfTwo(long n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return (long)System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static int NextPowerOfTwo(int n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return (int)System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static float NextPowerOfTwo(float n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return (float)System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static double NextPowerOfTwo(double n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - #endregion - - #region Factorial - - /// Calculates the factorial of a given natural number. - /// - /// The number. - /// n! - public static long Factorial(int n) - { - long result = 1; - - for (; n > 1; n--) - result *= n; - - return result; - } - - #endregion - - #region BinomialCoefficient - - /// - /// Calculates the binomial coefficient above . - /// - /// The n. - /// The k. - /// n! / (k! * (n - k)!) - public static long BinomialCoefficient(int n, int k) - { - return Factorial(n) / (Factorial(k) * Factorial(n - k)); - } - - #endregion - - #region InverseSqrtFast - - /// - /// Returns an approximation of the inverse square root of left number. - /// - /// A number. - /// An approximation of the inverse square root of the specified number, with an upper error bound of 0.001 - /// - /// This is an improved implementation of the the method known as Carmack's inverse square root - /// which is found in the Quake III source code. This implementation comes from - /// http://www.codemaestro.com/reviews/review00000105.html. For the history of this method, see - /// http://www.beyond3d.com/content/articles/8/ - /// - public static float InverseSqrtFast(float x) - { - unsafe - { - float xhalf = 0.5f * x; - int i = *(int*)&x; // Read bits as integer. - i = 0x5f375a86 - (i >> 1); // Make an initial guess for Newton-Raphson approximation - x = *(float*)&i; // Convert bits back to float - x = x * (1.5f - xhalf * x * x); // Perform left single Newton-Raphson step. - return x; - } - } - - /// - /// Returns an approximation of the inverse square root of left number. - /// - /// A number. - /// An approximation of the inverse square root of the specified number, with an upper error bound of 0.001 - /// - /// This is an improved implementation of the the method known as Carmack's inverse square root - /// which is found in the Quake III source code. This implementation comes from - /// http://www.codemaestro.com/reviews/review00000105.html. For the history of this method, see - /// http://www.beyond3d.com/content/articles/8/ - /// - public static double InverseSqrtFast(double x) - { - return InverseSqrtFast((float)x); - // TODO: The following code is wrong. Fix it, to improve precision. -#if false - unsafe - { - double xhalf = 0.5f * x; - int i = *(int*)&x; // Read bits as integer. - i = 0x5f375a86 - (i >> 1); // Make an initial guess for Newton-Raphson approximation - x = *(float*)&i; // Convert bits back to float - x = x * (1.5f - xhalf * x * x); // Perform left single Newton-Raphson step. - return x; - } -#endif - } - - #endregion - - #region DegreesToRadians - - /// - /// Convert degrees to radians - /// - /// An angle in degrees - /// The angle expressed in radians - public static float DegreesToRadians(float degrees) - { - const float degToRad = (float)System.Math.PI / 180.0f; - return degrees * degToRad; - } - - /// - /// Convert radians to degrees - /// - /// An angle in radians - /// The angle expressed in degrees - public static float RadiansToDegrees(float radians) - { - const float radToDeg = 180.0f / (float)System.Math.PI; - return radians * radToDeg; - } - - #endregion - - #region Mathematical constants - - /// - /// Obsolete. Do not use. - /// - public static readonly float PIF = 3.141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128481117450284102701938521105559644622948954930382f; - - /// - /// Obsolete. Do not use. - /// - public static readonly float RTODF = 180.0f / PIF; - - /// - /// Obsolete. Do not use. - /// - public static readonly float DTORF = PIF / 180.0f; - - /// - /// Obsolete. Do not use. - /// - public static readonly double PI = 3.141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128481117450284102701938521105559644622948954930382d; - - /// - /// Obsolete. Do not use. - /// - public static readonly double RTOD = 180.0d / PIF; - - /// - /// Obsolete. Do not use. - /// - public static readonly double DTOR = PIF / 180.0d; - - #endregion - - #region Swap - - /// - /// Swaps two float values. - /// - /// The first value. - /// The second value. - public static void Swap(ref double a, ref double b) - { - double temp = a; - a = b; - b = temp; - } - - /// - /// Swaps two float values. - /// - /// The first value. - /// The second value. - public static void Swap(ref float a, ref float b) - { - float temp = a; - a = b; - b = temp; - } - - #endregion - } - -#if false - public static partial class Math - { - #region --- Vectors --- - - #region --- Addition --- - - /// - /// Adds the given Vector2 to the current Vector3. - /// - /// The right operand of the addition. - /// A new Vector3 containing the result of the addition. - public static Vector2 Add(Vector2 left, Vector2 right) - { - return new Vector2(left).Add(right); - } - - /// - /// Adds the given Vector3 to the current Vector3. - /// - /// The right operand of the addition. - /// A new Vector3 containing the result of the addition. - public static Vector3 Add(Vector2 left, Vector3 right) - { - return new Vector3(left).Add(right); - } - - /// - /// Adds the given Vector4 to the current Vector3. W-coordinate remains unaffected. - /// - /// The right operand of the addition. - /// A new Vector4 containing the result of the addition. - public static Vector4 Add(Vector2 left, Vector4 right) - { - return new Vector4(left).Add(right); - } - - /// - /// Adds the given Vector2 to the current Vector3. - /// - /// The right operand of the addition. - /// A new Vector3 containing the result of the addition. - public static Vector3 Add(Vector3 left, Vector2 right) - { - return new Vector3(left).Add(right); - } - - /// - /// Adds the given Vector3 to the current Vector3. - /// - /// The right operand of the addition. - /// A new Vector3 containing the result of the addition. - public static Vector3 Add(Vector3 left, Vector3 right) - { - return new Vector3(left).Add(right); - } - - /// - /// Adds the given Vector4 to the current Vector3. W-coordinate remains unaffected. - /// - /// The right operand of the addition. - /// A new Vector4 containing the result of the addition. - public static Vector4 Add(Vector3 left, Vector4 right) - { - return new Vector4(left).Add(right); - } - - /// - /// Adds the given Vector2 to the current Vector3. - /// - /// The right operand of the addition. - /// A new Vector3 containing the result of the addition. - public static Vector4 Add(Vector4 left, Vector2 right) - { - return new Vector4(left).Add(right); - } - - /// - /// Adds the given Vector3 to the current Vector3. - /// - /// The right operand of the addition. - /// A new Vector3 containing the result of the addition. - public static Vector4 Add(Vector4 left, Vector3 right) - { - return new Vector4(left).Add(right); - } - - /// - /// Adds the given Vector4 to the current Vector3. W-coordinate remains unaffected. - /// - /// The right operand of the addition. - /// A new Vector4 containing the result of the addition. - public static Vector4 Add(Vector4 left, Vector4 right) - { - return new Vector4(left).Add(right); - } - - #endregion - - #region --- Subtraction --- - - - - #endregion - - #region --- Cross --- - - /// - /// Computes the cross product between the current and the given Vector3. The current Vector3 is set to the result of the computation. - /// - /// The right operand of the cross product - /// The current - public static Vector3 Cross(Vector3 left, Vector3 right) - { - return new Vector3(left).Cross(right); - } - - #endregion - - #endregion - } -#endif -} diff --git a/OpenTK/Math/Half.cs b/OpenTK/Math/Half.cs deleted file mode 100644 index 06ff241c..00000000 --- a/OpenTK/Math/Half.cs +++ /dev/null @@ -1,588 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -/* -The conversion functions are derived from OpenEXR's implementation and are -governed by the following license: - -Copyright (c) 2002, Industrial Light & Magic, a division of Lucas -Digital Ltd. LLC - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: -* Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. -* Neither the name of Industrial Light & Magic nor the names of -its contributors may be used to endorse or promote products derived -from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -#endregion --- License --- - -using System; -using System.IO; -using System.Runtime.InteropServices; -using System.Runtime.Serialization; - -namespace OpenTK -{ - - /// - /// The name Half is derived from half-precision floating-point number. - /// It occupies only 16 bits, which are split into 1 Sign bit, 5 Exponent bits and 10 Mantissa bits. - /// - /// - /// Quote from ARB_half_float_pixel specification: - /// Any representable 16-bit floating-point value is legal as input to a GL command that accepts 16-bit floating-point data. The - /// result of providing a value that is not a floating-point number (such as infinity or NaN) to such a command is unspecified, - /// but must not lead to GL interruption or termination. Providing a denormalized number or negative zero to GL must yield - /// predictable results. - /// - [Serializable, StructLayout(LayoutKind.Sequential)] - public struct Half : ISerializable, IComparable, IFormattable, IEquatable - { - #region Internal Field - - UInt16 bits; - - #endregion Internal Field - - #region Properties - - /// Returns true if the Half is zero. - public bool IsZero { get { return (bits == 0) || (bits == 0x8000); } } - - /// Returns true if the Half represents Not A Number (NaN) - public bool IsNaN { get { return (((bits & 0x7C00) == 0x7C00) && (bits & 0x03FF) != 0x0000); } } - - /// Returns true if the Half represents positive infinity. - public bool IsPositiveInfinity { get { return (bits == 31744); } } - - /// Returns true if the Half represents negative infinity. - public bool IsNegativeInfinity { get { return (bits == 64512); } } - - #endregion Properties - - #region Constructors - - /// - /// The new Half instance will convert the parameter into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - public Half(Single f) - : this() - { - unsafe - { - bits = SingleToHalf(*(int*)&f); - } - } - - /// - /// The new Half instance will convert the parameter into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// Enable checks that will throw if the conversion result is not meaningful. - public Half(Single f, bool throwOnError) - : this(f) - { - if (throwOnError) - { - // handle cases that cause overflow rather than silently ignoring it - if (f > Half.MaxValue) throw new ArithmeticException("Half: Positive maximum value exceeded."); - if (f < -Half.MaxValue) throw new ArithmeticException("Half: Negative minimum value exceeded."); - - // handle cases that make no sense - if (Single.IsNaN(f)) throw new ArithmeticException("Half: Input is not a number (NaN)."); - if (Single.IsPositiveInfinity(f)) throw new ArithmeticException("Half: Input is positive infinity."); - if (Single.IsNegativeInfinity(f)) throw new ArithmeticException("Half: Input is negative infinity."); - } - } - - /// - /// The new Half instance will convert the parameter into 16-bit half-precision floating-point. - /// - /// 64-bit double-precision floating-point number. - public Half(Double d) : this((Single)d) { } - - /// - /// The new Half instance will convert the parameter into 16-bit half-precision floating-point. - /// - /// 64-bit double-precision floating-point number. - /// Enable checks that will throw if the conversion result is not meaningful. - public Half(Double d, bool throwOnError) : this((Single)d, throwOnError) { } - - #endregion Constructors - - #region Single -> Half - - /// Ported from OpenEXR's IlmBase 1.0.1 - private UInt16 SingleToHalf(Int32 si32) - { - // Our floating point number, F, is represented by the bit pattern in integer i. - // Disassemble that bit pattern into the sign, S, the exponent, E, and the significand, M. - // Shift S into the position where it will go in in the resulting half number. - // Adjust E, accounting for the different exponent bias of float and half (127 versus 15). - - Int32 sign = (si32 >> 16) & 0x00008000; - Int32 exponent = ((si32 >> 23) & 0x000000ff) - (127 - 15); - Int32 mantissa = si32 & 0x007fffff; - - // Now reassemble S, E and M into a half: - - if (exponent <= 0) - { - if (exponent < -10) - { - // E is less than -10. The absolute value of F is less than Half.MinValue - // (F may be a small normalized float, a denormalized float or a zero). - // - // We convert F to a half zero with the same sign as F. - - return (UInt16)sign; - } - - // E is between -10 and 0. F is a normalized float whose magnitude is less than Half.MinNormalizedValue. - // - // We convert F to a denormalized half. - - // Add an explicit leading 1 to the significand. - - mantissa = mantissa | 0x00800000; - - // Round to M to the nearest (10+E)-bit value (with E between -10 and 0); in case of a tie, round to the nearest even value. - // - // Rounding may cause the significand to overflow and make our number normalized. Because of the way a half's bits - // are laid out, we don't have to treat this case separately; the code below will handle it correctly. - - Int32 t = 14 - exponent; - Int32 a = (1 << (t - 1)) - 1; - Int32 b = (mantissa >> t) & 1; - - mantissa = (mantissa + a + b) >> t; - - // Assemble the half from S, E (==zero) and M. - - return (UInt16)(sign | mantissa); - } - else if (exponent == 0xff - (127 - 15)) - { - if (mantissa == 0) - { - // F is an infinity; convert F to a half infinity with the same sign as F. - - return (UInt16)(sign | 0x7c00); - } - else - { - // F is a NAN; we produce a half NAN that preserves the sign bit and the 10 leftmost bits of the - // significand of F, with one exception: If the 10 leftmost bits are all zero, the NAN would turn - // into an infinity, so we have to set at least one bit in the significand. - - mantissa >>= 13; - return (UInt16)(sign | 0x7c00 | mantissa | ((mantissa == 0) ? 1 : 0)); - } - } - else - { - // E is greater than zero. F is a normalized float. We try to convert F to a normalized half. - - // Round to M to the nearest 10-bit value. In case of a tie, round to the nearest even value. - - mantissa = mantissa + 0x00000fff + ((mantissa >> 13) & 1); - - if ((mantissa & 0x00800000) != 0) - { - mantissa = 0; // overflow in significand, - exponent += 1; // adjust exponent - } - - // exponent overflow - if (exponent > 30) throw new ArithmeticException("Half: Hardware floating-point overflow."); - - // Assemble the half from S, E and M. - - return (UInt16)(sign | (exponent << 10) | (mantissa >> 13)); - } - } - - #endregion Single -> Half - - #region Half -> Single - - /// Converts the 16-bit half to 32-bit floating-point. - /// A single-precision floating-point number. - public Single ToSingle() - { - int i = HalfToFloat(bits); - - unsafe - { - return *(float*)&i; - } - } - - /// Ported from OpenEXR's IlmBase 1.0.1 - private Int32 HalfToFloat(UInt16 ui16) - { - - Int32 sign = (ui16 >> 15) & 0x00000001; - Int32 exponent = (ui16 >> 10) & 0x0000001f; - Int32 mantissa = ui16 & 0x000003ff; - - if (exponent == 0) - { - if (mantissa == 0) - { - // Plus or minus zero - - return sign << 31; - } - else - { - // Denormalized number -- renormalize it - - while ((mantissa & 0x00000400) == 0) - { - mantissa <<= 1; - exponent -= 1; - } - - exponent += 1; - mantissa &= ~0x00000400; - } - } - else if (exponent == 31) - { - if (mantissa == 0) - { - // Positive or negative infinity - - return (sign << 31) | 0x7f800000; - } - else - { - // Nan -- preserve sign and significand bits - - return (sign << 31) | 0x7f800000 | (mantissa << 13); - } - } - - // Normalized number - - exponent = exponent + (127 - 15); - mantissa = mantissa << 13; - - // Assemble S, E and M. - - return (sign << 31) | (exponent << 23) | mantissa; - } - - #endregion Half -> Single - - #region Conversions - - /// - /// Converts a System.Single to a OpenTK.Half. - /// - /// The value to convert. - /// A - /// - /// The result of the conversion. - /// A - /// - public static explicit operator Half(float f) - { - return new Half(f); - } - - /// - /// Converts a System.Double to a OpenTK.Half. - /// - /// The value to convert. - /// A - /// - /// The result of the conversion. - /// A - /// - public static explicit operator Half(double d) - { - return new Half(d); - } - - /// - /// Converts a OpenTK.Half to a System.Single. - /// - /// The value to convert. - /// A - /// - /// The result of the conversion. - /// A - /// - public static implicit operator float(Half h) - { - return h.ToSingle(); - } - - /// - /// Converts a OpenTK.Half to a System.Double. - /// - /// The value to convert. - /// A - /// - /// The result of the conversion. - /// A - /// - public static implicit operator double(Half h) - { - return (double)h.ToSingle(); - } - - #endregion Conversions - - #region Constants - - /// The size in bytes for an instance of the Half struct. - public static readonly Int32 SizeInBytes = 2; - - /// Smallest positive half - public static readonly Single MinValue = 5.96046448e-08f; - - /// Smallest positive normalized half - public static readonly Single MinNormalizedValue = 6.10351562e-05f; - - /// Largest positive half - public static readonly Single MaxValue = 65504.0f; - - /// Smallest positive e for which half (1.0 + e) != half (1.0) - public static readonly Single Epsilon = 0.00097656f; - - #endregion Constants - - #region ISerializable - - /// Constructor used by ISerializable to deserialize the object. - /// - /// - public Half(SerializationInfo info, StreamingContext context) - { - this.bits = (ushort)info.GetValue("bits", typeof(ushort)); - } - - /// Used by ISerialize to serialize the object. - /// - /// - public void GetObjectData(SerializationInfo info, StreamingContext context) - { - info.AddValue("bits", this.bits); - } - - #endregion ISerializable - - #region Binary dump - - /// Updates the Half by reading from a Stream. - /// A BinaryReader instance associated with an open Stream. - public void FromBinaryStream(BinaryReader bin) - { - this.bits = bin.ReadUInt16(); - - } - - /// Writes the Half into a Stream. - /// A BinaryWriter instance associated with an open Stream. - public void ToBinaryStream(BinaryWriter bin) - { - bin.Write(this.bits); - } - - #endregion Binary dump - - #region IEquatable Members - - const int maxUlps = 1; - - /// - /// Returns a value indicating whether this instance is equal to a specified OpenTK.Half value. - /// - /// OpenTK.Half object to compare to this instance.. - /// True, if other is equal to this instance; false otherwise. - public bool Equals(Half other) - { - short aInt, bInt; - unchecked { aInt = (short)other.bits; } - unchecked { bInt = (short)this.bits; } - - // Make aInt lexicographically ordered as a twos-complement int - if (aInt < 0) - aInt = (short)(0x8000 - aInt); - - // Make bInt lexicographically ordered as a twos-complement int - if (bInt < 0) - bInt = (short)(0x8000 - bInt); - - short intDiff = System.Math.Abs((short)(aInt - bInt)); - - if (intDiff <= maxUlps) - return true; - - return false; - } - - #endregion - - #region IComparable Members - - /// - /// Compares this instance to a specified half-precision floating-point number - /// and returns an integer that indicates whether the value of this instance - /// is less than, equal to, or greater than the value of the specified half-precision - /// floating-point number. - /// - /// A half-precision floating-point number to compare. - /// - /// A signed number indicating the relative values of this instance and value. If the number is: - /// Less than zero, then this instance is less than other, or this instance is not a number - /// (OpenTK.Half.NaN) and other is a number. - /// Zero: this instance is equal to value, or both this instance and other - /// are not a number (OpenTK.Half.NaN), OpenTK.Half.PositiveInfinity, or - /// OpenTK.Half.NegativeInfinity. - /// Greater than zero: this instance is greater than othrs, or this instance is a number - /// and other is not a number (OpenTK.Half.NaN). - /// - public int CompareTo(Half other) - { - return ((float)this).CompareTo((float)other); - } - - #endregion IComparable Members - - #region IFormattable Members - - /// Converts this Half into a human-legible string representation. - /// The string representation of this instance. - public override string ToString() - { - return this.ToSingle().ToString(); - } - - /// Converts this Half into a human-legible string representation. - /// Formatting for the output string. - /// Culture-specific formatting information. - /// The string representation of this instance. - public string ToString(string format, IFormatProvider formatProvider) - { - return this.ToSingle().ToString(format, formatProvider); - } - - #endregion IFormattable Members - - #region String -> Half - - /// Converts the string representation of a number to a half-precision floating-point equivalent. - /// String representation of the number to convert. - /// A new Half instance. - public static Half Parse(string s) - { - return (Half)Single.Parse(s); - } - - /// Converts the string representation of a number to a half-precision floating-point equivalent. - /// String representation of the number to convert. - /// Specifies the format of s. - /// Culture-specific formatting information. - /// A new Half instance. - public static Half Parse(string s, System.Globalization.NumberStyles style, IFormatProvider provider) - { - return (Half)Single.Parse(s, style, provider); - } - - /// Converts the string representation of a number to a half-precision floating-point equivalent. Returns success. - /// String representation of the number to convert. - /// The Half instance to write to. - /// Success. - public static bool TryParse(string s, out Half result) - { - float f; - bool b = Single.TryParse(s, out f); - result = (Half)f; - return b; - } - - /// Converts the string representation of a number to a half-precision floating-point equivalent. Returns success. - /// String representation of the number to convert. - /// Specifies the format of s. - /// Culture-specific formatting information. - /// The Half instance to write to. - /// Success. - public static bool TryParse(string s, System.Globalization.NumberStyles style, IFormatProvider provider, out Half result) - { - float f; - bool b = Single.TryParse(s, style, provider, out f); - result = (Half)f; - return b; - } - - #endregion String -> Half - - #region BitConverter - - /// Returns the Half as an array of bytes. - /// The Half to convert. - /// The input as byte array. - public static byte[] GetBytes(Half h) - { - return BitConverter.GetBytes(h.bits); - } - - /// Converts an array of bytes into Half. - /// A Half in it's byte[] representation. - /// The starting position within value. - /// A new Half instance. - public static Half FromBytes(byte[] value, int startIndex) - { - Half h; - h.bits = BitConverter.ToUInt16(value, startIndex); - return h; - } - - #endregion BitConverter - } -} \ No newline at end of file diff --git a/OpenTK/Math/MathHelper.cs b/OpenTK/Math/MathHelper.cs deleted file mode 100644 index 87fad768..00000000 --- a/OpenTK/Math/MathHelper.cs +++ /dev/null @@ -1,333 +0,0 @@ -#region --- License --- -/* Licensed under the MIT/X11 license. - * Copyright (c) 2006-2008 the OpenTK Team. - * This notice may not be removed from any source distribution. - * See license.txt for licensing detailed licensing details. - * - * Contributions by Andy Gill, James Talton and Georg Wächter. - */ -#endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ - /// - /// Contains common mathematical functions and constants. - /// - public static class MathHelper - { - #region Fields - - /// - /// Defines the value of Pi as a . - /// - public const float Pi = 3.141592653589793238462643383279502884197169399375105820974944592307816406286208998628034825342117067982148086513282306647093844609550582231725359408128481117450284102701938521105559644622948954930382f; - - /// - /// Defines the value of Pi divided by two as a . - /// - public const float PiOver2 = Pi / 2; - - /// - /// Defines the value of Pi divided by three as a . - /// - public const float PiOver3 = Pi / 3; - - /// - /// Definesthe value of Pi divided by four as a . - /// - public const float PiOver4 = Pi / 4; - - /// - /// Defines the value of Pi divided by six as a . - /// - public const float PiOver6 = Pi / 6; - - /// - /// Defines the value of Pi multiplied by two as a . - /// - public const float TwoPi = 2 * Pi; - - /// - /// Defines the value of Pi multiplied by 3 and divided by two as a . - /// - public const float ThreePiOver2 = 3 * Pi / 2; - - /// - /// Defines the value of E as a . - /// - public const float E = 2.71828182845904523536f; - - /// - /// Defines the base-10 logarithm of E. - /// - public const float Log10E = 0.434294482f; - - /// - /// Defines the base-2 logarithm of E. - /// - public const float Log2E = 1.442695041f; - - #endregion - - #region Public Members - - #region NextPowerOfTwo - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static long NextPowerOfTwo(long n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return (long)System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static int NextPowerOfTwo(int n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return (int)System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static float NextPowerOfTwo(float n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return (float)System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - /// - /// Returns the next power of two that is larger than the specified number. - /// - /// The specified number. - /// The next power of two. - public static double NextPowerOfTwo(double n) - { - if (n < 0) throw new ArgumentOutOfRangeException("n", "Must be positive."); - return System.Math.Pow(2, System.Math.Ceiling(System.Math.Log((double)n, 2))); - } - - #endregion - - #region Factorial - - /// Calculates the factorial of a given natural number. - /// - /// The number. - /// n! - public static long Factorial(int n) - { - long result = 1; - - for (; n > 1; n--) - result *= n; - - return result; - } - - #endregion - - #region BinomialCoefficient - - /// - /// Calculates the binomial coefficient above . - /// - /// The n. - /// The k. - /// n! / (k! * (n - k)!) - public static long BinomialCoefficient(int n, int k) - { - return Factorial(n) / (Factorial(k) * Factorial(n - k)); - } - - #endregion - - #region InverseSqrtFast - - /// - /// Returns an approximation of the inverse square root of left number. - /// - /// A number. - /// An approximation of the inverse square root of the specified number, with an upper error bound of 0.001 - /// - /// This is an improved implementation of the the method known as Carmack's inverse square root - /// which is found in the Quake III source code. This implementation comes from - /// http://www.codemaestro.com/reviews/review00000105.html. For the history of this method, see - /// http://www.beyond3d.com/content/articles/8/ - /// - public static float InverseSqrtFast(float x) - { - unsafe - { - float xhalf = 0.5f * x; - int i = *(int*)&x; // Read bits as integer. - i = 0x5f375a86 - (i >> 1); // Make an initial guess for Newton-Raphson approximation - x = *(float*)&i; // Convert bits back to float - x = x * (1.5f - xhalf * x * x); // Perform left single Newton-Raphson step. - return x; - } - } - - /// - /// Returns an approximation of the inverse square root of left number. - /// - /// A number. - /// An approximation of the inverse square root of the specified number, with an upper error bound of 0.001 - /// - /// This is an improved implementation of the the method known as Carmack's inverse square root - /// which is found in the Quake III source code. This implementation comes from - /// http://www.codemaestro.com/reviews/review00000105.html. For the history of this method, see - /// http://www.beyond3d.com/content/articles/8/ - /// - public static double InverseSqrtFast(double x) - { - return InverseSqrtFast((float)x); - // TODO: The following code is wrong. Fix it, to improve precision. -#if false - unsafe - { - double xhalf = 0.5f * x; - int i = *(int*)&x; // Read bits as integer. - i = 0x5f375a86 - (i >> 1); // Make an initial guess for Newton-Raphson approximation - x = *(float*)&i; // Convert bits back to float - x = x * (1.5f - xhalf * x * x); // Perform left single Newton-Raphson step. - return x; - } -#endif - } - - #endregion - - #region DegreesToRadians - - /// - /// Convert degrees to radians - /// - /// An angle in degrees - /// The angle expressed in radians - public static float DegreesToRadians(float degrees) - { - const float degToRad = (float)System.Math.PI / 180.0f; - return degrees * degToRad; - } - - /// - /// Convert radians to degrees - /// - /// An angle in radians - /// The angle expressed in degrees - public static float RadiansToDegrees(float radians) - { - const float radToDeg = 180.0f / (float)System.Math.PI; - return radians * radToDeg; - } - - /// - /// Convert degrees to radians - /// - /// An angle in degrees - /// The angle expressed in radians - public static double DegreesToRadians(double degrees) - { - const double degToRad = System.Math.PI / 180.0; - return degrees * degToRad; - } - - /// - /// Convert radians to degrees - /// - /// An angle in radians - /// The angle expressed in degrees - public static double RadiansToDegrees(double radians) - { - const double radToDeg = 180.0 / System.Math.PI; - return radians * radToDeg; - } - - #endregion - - #region Swap - - /// - /// Swaps two double values. - /// - /// The first value. - /// The second value. - public static void Swap(ref double a, ref double b) - { - double temp = a; - a = b; - b = temp; - } - - /// - /// Swaps two float values. - /// - /// The first value. - /// The second value. - public static void Swap(ref float a, ref float b) - { - float temp = a; - a = b; - b = temp; - } - - #endregion - - #region Clamp - - /// - /// Clamps a number between a minimum and a maximum. - /// - /// The number to clamp. - /// The minimum allowed value. - /// The maximum allowed value. - /// min, if n is lower than min; max, if n is higher than max; n otherwise. - public static int Clamp(int n, int min, int max) - { - return Math.Max(Math.Min(n, max), min); - } - - /// - /// Clamps a number between a minimum and a maximum. - /// - /// The number to clamp. - /// The minimum allowed value. - /// The maximum allowed value. - /// min, if n is lower than min; max, if n is higher than max; n otherwise. - public static float Clamp(float n, float min, float max) - { - return Math.Max(Math.Min(n, max), min); - } - - /// - /// Clamps a number between a minimum and a maximum. - /// - /// The number to clamp. - /// The minimum allowed value. - /// The maximum allowed value. - /// min, if n is lower than min; max, if n is higher than max; n otherwise. - public static double Clamp(double n, double min, double max) - { - return Math.Max(Math.Min(n, max), min); - } - - #endregion - - #endregion - } -} diff --git a/OpenTK/Math/Matrix2.cs b/OpenTK/Math/Matrix2.cs deleted file mode 100644 index 0a0cb382..00000000 --- a/OpenTK/Math/Matrix2.cs +++ /dev/null @@ -1,764 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 2x2 matrix - /// - public struct Matrix2 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector2 Row0; - - /// - /// Bottom row of the matrix. - /// - public Vector2 Row1; - - /// - /// The identity matrix. - /// - public static readonly Matrix2 Identity = new Matrix2(Vector2.UnitX, Vector2.UnitY); - - /// - /// The zero matrix. - /// - public static readonly Matrix2 Zero = new Matrix2(Vector2.Zero, Vector2.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Bottom row of the matrix. - public Matrix2(Vector2 row0, Vector2 row1) - { - Row0 = row0; - Row1 = row1; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - public Matrix2( - float m00, float m01, - float m10, float m11) - { - Row0 = new Vector2(m00, m01); - Row1 = new Vector2(m10, m11); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the determinant of this matrix. - /// - public float Determinant - { - get - { - float m11 = Row0.X, m12 = Row0.Y, - m21 = Row1.X, m22 = Row1.Y; - - return m11 * m22 - m12 * m21; - } - } - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector2 Column0 - { - get { return new Vector2(Row0.X, Row1.X); } - set { Row0.X = value.X; Row1.X = value.Y; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector2 Column1 - { - get { return new Vector2(Row0.Y, Row1.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2 Diagonal - { - get - { - return new Vector2(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Transpose() - - /// - /// Converts this instance to it's transpose. - /// - public void Transpose() - { - this = Matrix2.Transpose(this); - } - - #endregion - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix2.Invert(this); - } - - #endregion - - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2 instance. - public static void CreateRotation(float angle, out Matrix2 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2 instance. - public static Matrix2 CreateRotation(float angle) - { - Matrix2 result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix2 result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix2 CreateScale(float scale) - { - Matrix2 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2 scale, out Matrix2 result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix2 CreateScale(Vector2 scale) - { - Matrix2 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(float x, float y, out Matrix2 result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = y; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix2 CreateScale(float x, float y) - { - Matrix2 result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2 left, float right, out Matrix2 result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2 Mult(Matrix2 left, float right) - { - Matrix2 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2 left, ref Matrix2 right, out Matrix2 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2 Mult(Matrix2 left, Matrix2 right) - { - Matrix2 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2 left, ref Matrix2x3 right, out Matrix2x3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3 Mult(Matrix2 left, Matrix2x3 right) - { - Matrix2x3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2 left, ref Matrix2x4 right, out Matrix2x4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4 Mult(Matrix2 left, Matrix2x4 right) - { - Matrix2x4 result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix2 left, ref Matrix2 right, out Matrix2 result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix2 Add(Matrix2 left, Matrix2 right) - { - Matrix2 result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix2 left, ref Matrix2 right, out Matrix2 result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix2 Subtract(Matrix2 left, Matrix2 right) - { - Matrix2 result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix2 is singular. - public static void Invert(ref Matrix2 mat, out Matrix2 result) - { - float det = mat.Determinant; - - if (det == 0) - throw new InvalidOperationException("Matrix is singular and cannot be inverted."); - - float invDet = 1f / det; - - result.Row0.X = mat.Row1.Y * invDet; - result.Row0.Y = -mat.Row0.Y * invDet; - result.Row1.X = -mat.Row1.X * invDet; - result.Row1.Y = mat.Row0.X * invDet; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix2 is singular. - public static Matrix2 Invert(Matrix2 mat) - { - Matrix2 result; - Invert(ref mat, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix2 mat, out Matrix2 result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix2 Transpose(Matrix2 mat) - { - Matrix2 result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the multiplication - public static Matrix2 operator *(float left, Matrix2 right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the multiplication - public static Matrix2 operator *(Matrix2 left, float right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the multiplication - public static Matrix2 operator *(Matrix2 left, Matrix2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the multiplication - public static Matrix2x3 operator *(Matrix2 left, Matrix2x3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4 which holds the result of the multiplication - public static Matrix2x4 operator *(Matrix2 left, Matrix2x4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the addition - public static Matrix2 operator +(Matrix2 left, Matrix2 right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the subtraction - public static Matrix2 operator -(Matrix2 left, Matrix2 right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix2 left, Matrix2 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix2 left, Matrix2 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}", Row0, Row1); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix2)) - return false; - - return this.Equals((Matrix2)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix2 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix2d.cs b/OpenTK/Math/Matrix2d.cs deleted file mode 100644 index 999f105e..00000000 --- a/OpenTK/Math/Matrix2d.cs +++ /dev/null @@ -1,764 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 2x2 matrix - /// - public struct Matrix2d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector2d Row0; - - /// - /// Bottom row of the matrix. - /// - public Vector2d Row1; - - /// - /// The identity matrix. - /// - public static readonly Matrix2d Identity = new Matrix2d(Vector2d.UnitX, Vector2d.UnitY); - - /// - /// The zero matrix. - /// - public static readonly Matrix2d Zero = new Matrix2d(Vector2d.Zero, Vector2d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Bottom row of the matrix. - public Matrix2d(Vector2d row0, Vector2d row1) - { - Row0 = row0; - Row1 = row1; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - public Matrix2d( - double m00, double m01, - double m10, double m11) - { - Row0 = new Vector2d(m00, m01); - Row1 = new Vector2d(m10, m11); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the determinant of this matrix. - /// - public double Determinant - { - get - { - double m11 = Row0.X, m12 = Row0.Y, - m21 = Row1.X, m22 = Row1.Y; - - return m11 * m22 - m12 * m21; - } - } - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector2d Column0 - { - get { return new Vector2d(Row0.X, Row1.X); } - set { Row0.X = value.X; Row1.X = value.Y; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector2d Column1 - { - get { return new Vector2d(Row0.Y, Row1.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2d Diagonal - { - get - { - return new Vector2d(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Transpose() - - /// - /// Converts this instance to it's transpose. - /// - public void Transpose() - { - this = Matrix2d.Transpose(this); - } - - #endregion - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix2d.Invert(this); - } - - #endregion - - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2d instance. - public static void CreateRotation(double angle, out Matrix2d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2d instance. - public static Matrix2d CreateRotation(double angle) - { - Matrix2d result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(double scale, out Matrix2d result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix2d CreateScale(double scale) - { - Matrix2d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2d scale, out Matrix2d result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix2d CreateScale(Vector2d scale) - { - Matrix2d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(double x, double y, out Matrix2d result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = y; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix2d CreateScale(double x, double y) - { - Matrix2d result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2d left, double right, out Matrix2d result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2d Mult(Matrix2d left, double right) - { - Matrix2d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2d left, ref Matrix2d right, out Matrix2d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2d Mult(Matrix2d left, Matrix2d right) - { - Matrix2d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2d left, ref Matrix2x3d right, out Matrix2x3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3d Mult(Matrix2d left, Matrix2x3d right) - { - Matrix2x3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2d left, ref Matrix2x4d right, out Matrix2x4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4d Mult(Matrix2d left, Matrix2x4d right) - { - Matrix2x4d result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix2d left, ref Matrix2d right, out Matrix2d result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix2d Add(Matrix2d left, Matrix2d right) - { - Matrix2d result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix2d left, ref Matrix2d right, out Matrix2d result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix2d Subtract(Matrix2d left, Matrix2d right) - { - Matrix2d result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix2d is singular. - public static void Invert(ref Matrix2d mat, out Matrix2d result) - { - double det = mat.Determinant; - - if (det == 0) - throw new InvalidOperationException("Matrix is singular and cannot be inverted."); - - double invDet = 1f / det; - - result.Row0.X = mat.Row1.Y * invDet; - result.Row0.Y = -mat.Row0.Y * invDet; - result.Row1.X = -mat.Row1.X * invDet; - result.Row1.Y = mat.Row0.X * invDet; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix2d is singular. - public static Matrix2d Invert(Matrix2d mat) - { - Matrix2d result; - Invert(ref mat, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix2d mat, out Matrix2d result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix2d Transpose(Matrix2d mat) - { - Matrix2d result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the multiplication - public static Matrix2d operator *(double left, Matrix2d right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the multiplication - public static Matrix2d operator *(Matrix2d left, double right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the multiplication - public static Matrix2d operator *(Matrix2d left, Matrix2d right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the multiplication - public static Matrix2x3d operator *(Matrix2d left, Matrix2x3d right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4d which holds the result of the multiplication - public static Matrix2x4d operator *(Matrix2d left, Matrix2x4d right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the addition - public static Matrix2d operator +(Matrix2d left, Matrix2d right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the subtraction - public static Matrix2d operator -(Matrix2d left, Matrix2d right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix2d left, Matrix2d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix2d left, Matrix2d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}", Row0, Row1); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix2d)) - return false; - - return this.Equals((Matrix2d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix2d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix2x3.cs b/OpenTK/Math/Matrix2x3.cs deleted file mode 100644 index bd2236ca..00000000 --- a/OpenTK/Math/Matrix2x3.cs +++ /dev/null @@ -1,724 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Collections.Generic; - -namespace OpenTK -{ - /// - /// Represents a 2x3 matrix. - /// - public struct Matrix2x3 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector3 Row0; - - /// - /// Bottom row of the matrix. - /// - public Vector3 Row1; - - /// - /// The zero matrix. - /// - public static readonly Matrix2x3 Zero = new Matrix2x3(Vector3.Zero, Vector3.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Bottom row of the matrix. - public Matrix2x3(Vector3 row0, Vector3 row1) - { - Row0 = row0; - Row1 = row1; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - public Matrix2x3( - float m00, float m01, float m02, - float m10, float m11, float m12) - { - Row0 = new Vector3(m00, m01, m02); - Row1 = new Vector3(m10, m11, m12); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector2 Column0 - { - get { return new Vector2(Row0.X, Row1.X); } - set { Row0.X = value.X; Row1.X = value.Y; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector2 Column1 - { - get { return new Vector2(Row0.Y, Row1.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; } - } - - /// - /// Gets or sets the third column of this matrix. - /// - public Vector2 Column2 - { - get { return new Vector2(Row0.Z, Row1.Z); } - set { Row0.Z = value.X; Row1.Z = value.Y; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public float M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public float M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2 Diagonal - { - get - { - return new Vector2(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x3 instance. - public static void CreateRotation(float angle, out Matrix2x3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x3 instance. - public static Matrix2x3 CreateRotation(float angle) - { - Matrix2x3 result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix2x3 result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row1.Z = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix2x3 CreateScale(float scale) - { - Matrix2x3 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2 scale, out Matrix2x3 result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row1.Z = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix2x3 CreateScale(Vector2 scale) - { - Matrix2x3 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(float x, float y, out Matrix2x3 result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix2x3 CreateScale(float x, float y) - { - Matrix2x3 result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3 left, float right, out Matrix2x3 result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row0.Z = left.Row0.Z * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row1.Z = left.Row1.Z * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3 Mult(Matrix2x3 left, float right) - { - Matrix2x3 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3 left, ref Matrix3x2 right, out Matrix2 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y, - rM31 = right.Row2.X, rM32 = right.Row2.Y; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2 Mult(Matrix2x3 left, Matrix3x2 right) - { - Matrix2 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3 left, ref Matrix3 right, out Matrix2x3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rm32 = right.Row2.Y, rM33 = right.Row2.Z; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rm32); - result.Row0.Z = ((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rm32); - result.Row1.Z = ((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3 Mult(Matrix2x3 left, Matrix3 right) - { - Matrix2x3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3 left, ref Matrix3x4 right, out Matrix2x4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rm32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rm32); - result.Row0.Z = ((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33); - result.Row0.W = ((lM11 * rM14) + (lM12 * rM24)) + (lM13 * rM34); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rm32); - result.Row1.Z = ((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33); - result.Row1.W = ((lM21 * rM14) + (lM22 * rM24)) + (lM23 * rM34); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4 Mult(Matrix2x3 left, Matrix3x4 right) - { - Matrix2x4 result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix2x3 left, ref Matrix2x3 right, out Matrix2x3 result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row0.Z = left.Row0.Z + right.Row0.Z; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row1.Z = left.Row1.Z + right.Row1.Z; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix2x3 Add(Matrix2x3 left, Matrix2x3 right) - { - Matrix2x3 result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix2x3 left, ref Matrix2x3 right, out Matrix2x3 result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row0.Z = left.Row0.Z - right.Row0.Z; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row1.Z = left.Row1.Z - right.Row1.Z; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix2x3 Subtract(Matrix2x3 left, Matrix2x3 right) - { - Matrix2x3 result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix2x3 mat, out Matrix3x2 result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row2.X = mat.Row0.Z; - result.Row2.Y = mat.Row1.Z; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix3x2 Transpose(Matrix2x3 mat) - { - Matrix3x2 result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the multiplication - public static Matrix2x3 operator *(float left, Matrix2x3 right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the multiplication - public static Matrix2x3 operator *(Matrix2x3 left, float right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the multiplication - public static Matrix2 operator *(Matrix2x3 left, Matrix3x2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the multiplication - public static Matrix2x3 operator *(Matrix2x3 left, Matrix3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4 which holds the result of the multiplication - public static Matrix2x4 operator *(Matrix2x3 left, Matrix3x4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the addition - public static Matrix2x3 operator +(Matrix2x3 left, Matrix2x3 right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the subtraction - public static Matrix2x3 operator -(Matrix2x3 left, Matrix2x3 right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix2x3 left, Matrix2x3 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix2x3 left, Matrix2x3 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix2x3. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}", Row0, Row1); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare tresult. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix2x3)) - return false; - - return this.Equals((Matrix2x3)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix2x3 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix2x3d.cs b/OpenTK/Math/Matrix2x3d.cs deleted file mode 100644 index 30a59eb5..00000000 --- a/OpenTK/Math/Matrix2x3d.cs +++ /dev/null @@ -1,724 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Collections.Generic; - -namespace OpenTK -{ - /// - /// Represents a 2x3 matrix. - /// - public struct Matrix2x3d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector3d Row0; - - /// - /// Bottom row of the matrix. - /// - public Vector3d Row1; - - /// - /// The zero matrix. - /// - public static readonly Matrix2x3d Zero = new Matrix2x3d(Vector3d.Zero, Vector3d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Bottom row of the matrix. - public Matrix2x3d(Vector3d row0, Vector3d row1) - { - Row0 = row0; - Row1 = row1; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - public Matrix2x3d( - double m00, double m01, double m02, - double m10, double m11, double m12) - { - Row0 = new Vector3d(m00, m01, m02); - Row1 = new Vector3d(m10, m11, m12); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector2d Column0 - { - get { return new Vector2d(Row0.X, Row1.X); } - set { Row0.X = value.X; Row1.X = value.Y; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector2d Column1 - { - get { return new Vector2d(Row0.Y, Row1.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; } - } - - /// - /// Gets or sets the third column of this matrix. - /// - public Vector2d Column2 - { - get { return new Vector2d(Row0.Z, Row1.Z); } - set { Row0.Z = value.X; Row1.Z = value.Y; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public double M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public double M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2d Diagonal - { - get - { - return new Vector2d(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x3d instance. - public static void CreateRotation(double angle, out Matrix2x3d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x3d instance. - public static Matrix2x3d CreateRotation(double angle) - { - Matrix2x3d result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(double scale, out Matrix2x3d result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row1.Z = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix2x3d CreateScale(double scale) - { - Matrix2x3d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2d scale, out Matrix2x3d result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row1.Z = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix2x3d CreateScale(Vector2d scale) - { - Matrix2x3d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(double x, double y, out Matrix2x3d result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix2x3d CreateScale(double x, double y) - { - Matrix2x3d result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3d left, double right, out Matrix2x3d result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row0.Z = left.Row0.Z * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row1.Z = left.Row1.Z * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3d Mult(Matrix2x3d left, double right) - { - Matrix2x3d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3d left, ref Matrix3x2 right, out Matrix2d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y, - rM31 = right.Row2.X, rM32 = right.Row2.Y; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2d Mult(Matrix2x3d left, Matrix3x2 right) - { - Matrix2d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3d left, ref Matrix3 right, out Matrix2x3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rm32 = right.Row2.Y, rM33 = right.Row2.Z; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rm32); - result.Row0.Z = ((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rm32); - result.Row1.Z = ((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3d Mult(Matrix2x3d left, Matrix3 right) - { - Matrix2x3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x3d left, ref Matrix3x4 right, out Matrix2x4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rm32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rm32); - result.Row0.Z = ((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33); - result.Row0.W = ((lM11 * rM14) + (lM12 * rM24)) + (lM13 * rM34); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rm32); - result.Row1.Z = ((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33); - result.Row1.W = ((lM21 * rM14) + (lM22 * rM24)) + (lM23 * rM34); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4d Mult(Matrix2x3d left, Matrix3x4 right) - { - Matrix2x4d result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix2x3d left, ref Matrix2x3d right, out Matrix2x3d result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row0.Z = left.Row0.Z + right.Row0.Z; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row1.Z = left.Row1.Z + right.Row1.Z; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix2x3d Add(Matrix2x3d left, Matrix2x3d right) - { - Matrix2x3d result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix2x3d left, ref Matrix2x3d right, out Matrix2x3d result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row0.Z = left.Row0.Z - right.Row0.Z; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row1.Z = left.Row1.Z - right.Row1.Z; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix2x3d Subtract(Matrix2x3d left, Matrix2x3d right) - { - Matrix2x3d result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix2x3d mat, out Matrix3x2d result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row2.X = mat.Row0.Z; - result.Row2.Y = mat.Row1.Z; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix3x2d Transpose(Matrix2x3d mat) - { - Matrix3x2d result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the multiplication - public static Matrix2x3d operator *(double left, Matrix2x3d right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the multiplication - public static Matrix2x3d operator *(Matrix2x3d left, double right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the multiplication - public static Matrix2d operator *(Matrix2x3d left, Matrix3x2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the multiplication - public static Matrix2x3d operator *(Matrix2x3d left, Matrix3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4d which holds the result of the multiplication - public static Matrix2x4d operator *(Matrix2x3d left, Matrix3x4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the addition - public static Matrix2x3d operator +(Matrix2x3d left, Matrix2x3d right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the subtraction - public static Matrix2x3d operator -(Matrix2x3d left, Matrix2x3d right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix2x3d left, Matrix2x3d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix2x3d left, Matrix2x3d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix2x3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}", Row0, Row1); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare tresult. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix2x3d)) - return false; - - return this.Equals((Matrix2x3d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix2x3d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix2x4.cs b/OpenTK/Math/Matrix2x4.cs deleted file mode 100644 index c3afecdb..00000000 --- a/OpenTK/Math/Matrix2x4.cs +++ /dev/null @@ -1,761 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 2x4 matrix. - /// - public struct Matrix2x4 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector4 Row0; - - /// - /// Bottom row of the matrix. - /// - public Vector4 Row1; - - /// - /// The zero matrix. - /// - public static readonly Matrix2x4 Zero = new Matrix2x4(Vector4.Zero, Vector4.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Bottom row of the matrix. - public Matrix2x4(Vector4 row0, Vector4 row1) - { - Row0 = row0; - Row1 = row1; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// Fourth item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// Fourth item of the second row of the matrix. - public Matrix2x4( - float m00, float m01, float m02, float m03, - float m10, float m11, float m12, float m13) - { - Row0 = new Vector4(m00, m01, m02, m03); - Row1 = new Vector4(m10, m11, m12, m13); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of the matrix. - /// - public Vector2 Column0 - { - get { return new Vector2(Row0.X, Row1.X); } - set { Row0.X = value.X; Row1.X = value.Y; } - } - - /// - /// Gets or sets the second column of the matrix. - /// - public Vector2 Column1 - { - get { return new Vector2(Row0.Y, Row1.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; } - } - - /// - /// Gets or sets the third column of the matrix. - /// - public Vector2 Column2 - { - get { return new Vector2(Row0.Z, Row1.Z); } - set { Row0.Z = value.X; Row1.Z = value.Y; } - } - - /// - /// Gets or sets the fourth column of the matrix. - /// - public Vector2 Column3 - { - get { return new Vector2(Row0.W, Row1.W); } - set { Row0.W = value.X; Row1.W = value.Y; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public float M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 1, column 4 of this instance. - /// - public float M14 { get { return Row0.W; } set { Row0.W = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public float M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 4 of this instance. - /// - public float M24 { get { return Row1.W; } set { Row1.W = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2 Diagonal - { - get - { - return new Vector2(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x4 instance. - public static void CreateRotation(float angle, out Matrix2x4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x3 instance. - public static Matrix2x4 CreateRotation(float angle) - { - Matrix2x4 result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix2x4 result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix2x4 CreateScale(float scale) - { - Matrix2x4 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2 scale, out Matrix2x4 result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix2x4 CreateScale(Vector2 scale) - { - Matrix2x4 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(float x, float y, out Matrix2x4 result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix2x4 CreateScale(float x, float y) - { - Matrix2x4 result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4 left, float right, out Matrix2x4 result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row0.Z = left.Row0.Z * right; - result.Row0.W = left.Row0.W * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row1.Z = left.Row1.Z * right; - result.Row1.W = left.Row1.W * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4 Mult(Matrix2x4 left, float right) - { - Matrix2x4 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4 left, ref Matrix4x2 right, out Matrix2 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y, - rM31 = right.Row2.X, rM32 = right.Row2.Y, - rM41 = right.Row3.X, rM42 = right.Row3.Y; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2 Mult(Matrix2x4 left, Matrix4x2 right) - { - Matrix2 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4 left, ref Matrix4x3 right, out Matrix2x3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row0.Z = (((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33)) + (lM14 * rM43); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - result.Row1.Z = (((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33)) + (lM24 * rM43); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3 Mult(Matrix2x4 left, Matrix4x3 right) - { - Matrix2x3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4 left, ref Matrix4 right, out Matrix2x4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z, rM44 = right.Row3.W; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row0.Z = (((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33)) + (lM14 * rM43); - result.Row0.W = (((lM11 * rM14) + (lM12 * rM24)) + (lM13 * rM34)) + (lM14 * rM44); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - result.Row1.Z = (((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33)) + (lM24 * rM43); - result.Row1.W = (((lM21 * rM14) + (lM22 * rM24)) + (lM23 * rM34)) + (lM24 * rM44); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4 Mult(Matrix2x4 left, Matrix4 right) - { - Matrix2x4 result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix2x4 left, ref Matrix2x4 right, out Matrix2x4 result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row0.Z = left.Row0.Z + right.Row0.Z; - result.Row0.W = left.Row0.W + right.Row0.W; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row1.Z = left.Row1.Z + right.Row1.Z; - result.Row1.W = left.Row1.W + right.Row1.W; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix2x4 Add(Matrix2x4 left, Matrix2x4 right) - { - Matrix2x4 result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix2x4 left, ref Matrix2x4 right, out Matrix2x4 result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row0.Z = left.Row0.Z - right.Row0.Z; - result.Row0.W = left.Row0.W - right.Row0.W; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row1.Z = left.Row1.Z - right.Row1.Z; - result.Row1.W = left.Row1.W - right.Row1.W; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix2x4 Subtract(Matrix2x4 left, Matrix2x4 right) - { - Matrix2x4 result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix2x4 mat, out Matrix4x2 result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row2.X = mat.Row0.Z; - result.Row2.Y = mat.Row1.Z; - result.Row3.X = mat.Row0.W; - result.Row3.Y = mat.Row1.W; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix4x2 Transpose(Matrix2x4 mat) - { - Matrix4x2 result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4 which holds the result of the multiplication - public static Matrix2x4 operator *(float left, Matrix2x4 right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4 which holds the result of the multiplication - public static Matrix2x4 operator *(Matrix2x4 left, float right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the multiplication - public static Matrix2 operator *(Matrix2x4 left, Matrix4x2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3 which holds the result of the multiplication - public static Matrix2x3 operator *(Matrix2x4 left, Matrix4x3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4 which holds the result of the multiplication - public static Matrix2x4 operator *(Matrix2x4 left, Matrix4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the addition - public static Matrix2x4 operator +(Matrix2x4 left, Matrix2x4 right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4 which holds the result of the subtraction - public static Matrix2x4 operator -(Matrix2x4 left, Matrix2x4 right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix2x4 left, Matrix2x4 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix2x4 left, Matrix2x4 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}", Row0, Row1); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix2x4)) - return false; - - return this.Equals((Matrix2x4)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix2x4 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix2x4d.cs b/OpenTK/Math/Matrix2x4d.cs deleted file mode 100644 index deb2b1cb..00000000 --- a/OpenTK/Math/Matrix2x4d.cs +++ /dev/null @@ -1,761 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 2x4 matrix. - /// - public struct Matrix2x4d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector4d Row0; - - /// - /// Bottom row of the matrix. - /// - public Vector4d Row1; - - /// - /// The zero matrix. - /// - public static readonly Matrix2x4d Zero = new Matrix2x4d(Vector4d.Zero, Vector4d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Bottom row of the matrix. - public Matrix2x4d(Vector4d row0, Vector4d row1) - { - Row0 = row0; - Row1 = row1; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// Fourth item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// Fourth item of the second row of the matrix. - public Matrix2x4d( - double m00, double m01, double m02, double m03, - double m10, double m11, double m12, double m13) - { - Row0 = new Vector4d(m00, m01, m02, m03); - Row1 = new Vector4d(m10, m11, m12, m13); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of the matrix. - /// - public Vector2d Column0 - { - get { return new Vector2d(Row0.X, Row1.X); } - set { Row0.X = value.X; Row1.X = value.Y; } - } - - /// - /// Gets or sets the second column of the matrix. - /// - public Vector2d Column1 - { - get { return new Vector2d(Row0.Y, Row1.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; } - } - - /// - /// Gets or sets the third column of the matrix. - /// - public Vector2d Column2 - { - get { return new Vector2d(Row0.Z, Row1.Z); } - set { Row0.Z = value.X; Row1.Z = value.Y; } - } - - /// - /// Gets or sets the fourth column of the matrix. - /// - public Vector2d Column3 - { - get { return new Vector2d(Row0.W, Row1.W); } - set { Row0.W = value.X; Row1.W = value.Y; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public double M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 1, column 4 of this instance. - /// - public double M14 { get { return Row0.W; } set { Row0.W = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public double M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 4 of this instance. - /// - public double M24 { get { return Row1.W; } set { Row1.W = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2d Diagonal - { - get - { - return new Vector2d(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x4d instance. - public static void CreateRotation(double angle, out Matrix2x4d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix2x3d instance. - public static Matrix2x4d CreateRotation(double angle) - { - Matrix2x4d result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(double scale, out Matrix2x4d result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix2x4d CreateScale(double scale) - { - Matrix2x4d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2d scale, out Matrix2x4d result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix2x4d CreateScale(Vector2d scale) - { - Matrix2x4d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(double x, double y, out Matrix2x4d result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row1.W = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix2x4d CreateScale(double x, double y) - { - Matrix2x4d result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4d left, double right, out Matrix2x4d result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row0.Z = left.Row0.Z * right; - result.Row0.W = left.Row0.W * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row1.Z = left.Row1.Z * right; - result.Row1.W = left.Row1.W * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4d Mult(Matrix2x4d left, double right) - { - Matrix2x4d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4d left, ref Matrix4x2 right, out Matrix2d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y, - rM31 = right.Row2.X, rM32 = right.Row2.Y, - rM41 = right.Row3.X, rM42 = right.Row3.Y; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2d Mult(Matrix2x4d left, Matrix4x2 right) - { - Matrix2d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4d left, ref Matrix4x3 right, out Matrix2x3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row0.Z = (((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33)) + (lM14 * rM43); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - result.Row1.Z = (((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33)) + (lM24 * rM43); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x3d Mult(Matrix2x4d left, Matrix4x3 right) - { - Matrix2x3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix2x4d left, ref Matrix4 right, out Matrix2x4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z, rM44 = right.Row3.W; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row0.Z = (((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33)) + (lM14 * rM43); - result.Row0.W = (((lM11 * rM14) + (lM12 * rM24)) + (lM13 * rM34)) + (lM14 * rM44); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - result.Row1.Z = (((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33)) + (lM24 * rM43); - result.Row1.W = (((lM21 * rM14) + (lM22 * rM24)) + (lM23 * rM34)) + (lM24 * rM44); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix2x4d Mult(Matrix2x4d left, Matrix4 right) - { - Matrix2x4d result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix2x4d left, ref Matrix2x4d right, out Matrix2x4d result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row0.Z = left.Row0.Z + right.Row0.Z; - result.Row0.W = left.Row0.W + right.Row0.W; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row1.Z = left.Row1.Z + right.Row1.Z; - result.Row1.W = left.Row1.W + right.Row1.W; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix2x4d Add(Matrix2x4d left, Matrix2x4d right) - { - Matrix2x4d result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix2x4d left, ref Matrix2x4d right, out Matrix2x4d result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row0.Z = left.Row0.Z - right.Row0.Z; - result.Row0.W = left.Row0.W - right.Row0.W; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row1.Z = left.Row1.Z - right.Row1.Z; - result.Row1.W = left.Row1.W - right.Row1.W; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix2x4d Subtract(Matrix2x4d left, Matrix2x4d right) - { - Matrix2x4d result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix2x4d mat, out Matrix4x2d result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row2.X = mat.Row0.Z; - result.Row2.Y = mat.Row1.Z; - result.Row3.X = mat.Row0.W; - result.Row3.Y = mat.Row1.W; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix4x2d Transpose(Matrix2x4d mat) - { - Matrix4x2d result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4d which holds the result of the multiplication - public static Matrix2x4d operator *(double left, Matrix2x4d right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4d which holds the result of the multiplication - public static Matrix2x4d operator *(Matrix2x4d left, double right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the multiplication - public static Matrix2d operator *(Matrix2x4d left, Matrix4x2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x3d which holds the result of the multiplication - public static Matrix2x3d operator *(Matrix2x4d left, Matrix4x3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4d which holds the result of the multiplication - public static Matrix2x4d operator *(Matrix2x4d left, Matrix4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the addition - public static Matrix2x4d operator +(Matrix2x4d left, Matrix2x4d right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2x4d which holds the result of the subtraction - public static Matrix2x4d operator -(Matrix2x4d left, Matrix2x4d right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix2x4d left, Matrix2x4d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix2x4d left, Matrix2x4d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}", Row0, Row1); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix2x4d)) - return false; - - return this.Equals((Matrix2x4d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix2x4d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix3.cs b/OpenTK/Math/Matrix3.cs deleted file mode 100644 index cd2f3bc7..00000000 --- a/OpenTK/Math/Matrix3.cs +++ /dev/null @@ -1,974 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x3 matrix containing 3D rotation and scale. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix3 : IEquatable - { - #region Fields - - /// - /// First row of the matrix. - /// - public Vector3 Row0; - - /// - /// Second row of the matrix. - /// - public Vector3 Row1; - - /// - /// Third row of the matrix. - /// - public Vector3 Row2; - - /// - /// The identity matrix. - /// - public static readonly Matrix3 Identity = new Matrix3(Vector3.UnitX, Vector3.UnitY, Vector3.UnitZ); - - /// - /// The zero matrix. - /// - public static readonly Matrix3 Zero = new Matrix3(Vector3.Zero, Vector3.Zero, Vector3.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Bottom row of the matrix - public Matrix3(Vector3 row0, Vector3 row1, Vector3 row2) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - public Matrix3( - float m00, float m01, float m02, - float m10, float m11, float m12, - float m20, float m21, float m22) - { - Row0 = new Vector3(m00, m01, m02); - Row1 = new Vector3(m10, m11, m12); - Row2 = new Vector3(m20, m21, m22); - } - - /// - /// Constructs a new instance. - /// - /// A Matrix4 to take the upper-left 3x3 from. - public Matrix3(Matrix4 matrix) - { - Row0 = matrix.Row0.Xyz; - Row1 = matrix.Row1.Xyz; - Row2 = matrix.Row2.Xyz; - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the determinant of this matrix. - /// - public float Determinant - { - get - { - float m11 = Row0.X, m12 = Row0.Y, m13 = Row0.Z, - m21 = Row1.X, m22 = Row1.Y, m23 = Row1.Z, - m31 = Row2.X, m32 = Row2.Y, m33 = Row2.Z; - - return m11 * m22 * m33 + m12 * m23 * m31 + m13 * m21 * m32 - - m13 * m22 * m31 - m11 * m23 * m32 - m12 * m21 * m33; - } - } - - /// - /// Gets the first column of this matrix. - /// - public Vector3 Column0 - { - get { return new Vector3(Row0.X, Row1.X, Row2.X); } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector3 Column1 - { - get { return new Vector3(Row0.Y, Row1.Y, Row2.Y); } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector3 Column2 - { - get { return new Vector3(Row0.Z, Row1.Z, Row2.Z); } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public float M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public float M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public float M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public float M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public float M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector3 Diagonal - { - get - { - return new Vector3(Row0.X, Row1.Y, Row2.Z); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y + Row2.Z; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix3.Invert(this); - } - - #endregion - - #region public void Transpose() - - /// - /// Converts this instance into its transpose. - /// - public void Transpose() - { - this = Matrix3.Transpose(this); - } - - #endregion - - /// - /// Returns a normalised copy of this instance. - /// - public Matrix3 Normalized() - { - Matrix3 m = this; - m.Normalize(); - return m; - } - - /// - /// Divides each element in the Matrix by the . - /// - public void Normalize() - { - var determinant = this.Determinant; - Row0 /= determinant; - Row1 /= determinant; - Row2 /= determinant; - } - - /// - /// Returns an inverted copy of this instance. - /// - public Matrix3 Inverted() - { - Matrix3 m = this; - if (m.Determinant != 0) - m.Invert(); - return m; - } - - /// - /// Returns a copy of this Matrix3 without scale. - /// - public Matrix3 ClearScale() - { - Matrix3 m = this; - m.Row0 = m.Row0.Normalized(); - m.Row1 = m.Row1.Normalized(); - m.Row2 = m.Row2.Normalized(); - return m; - } - /// - /// Returns a copy of this Matrix3 without rotation. - /// - public Matrix3 ClearRotation() - { - Matrix3 m = this; - m.Row0 = new Vector3(m.Row0.Length, 0, 0); - m.Row1 = new Vector3(0, m.Row1.Length, 0); - m.Row2 = new Vector3(0, 0, m.Row2.Length); - return m; - } - - /// - /// Returns the scale component of this instance. - /// - public Vector3 ExtractScale() { return new Vector3(Row0.Length, Row1.Length, Row2.Length); } - - /// - /// Returns the rotation component of this instance. Quite slow. - /// - /// Whether the method should row-normalise (i.e. remove scale from) the Matrix. Pass false if you know it's already normalised. - public Quaternion ExtractRotation(bool row_normalise = true) - { - var row0 = Row0; - var row1 = Row1; - var row2 = Row2; - - if (row_normalise) - { - row0 = row0.Normalized(); - row1 = row1.Normalized(); - row2 = row2.Normalized(); - } - - // code below adapted from Blender - - Quaternion q = new Quaternion(); - double trace = 0.25 * (row0[0] + row1[1] + row2[2] + 1.0); - - if (trace > 0) - { - double sq = Math.Sqrt(trace); - - q.W = (float)sq; - sq = 1.0 / (4.0 * sq); - q.X = (float)((row1[2] - row2[1]) * sq); - q.Y = (float)((row2[0] - row0[2]) * sq); - q.Z = (float)((row0[1] - row1[0]) * sq); - } - else if (row0[0] > row1[1] && row0[0] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row0[0] - row1[1] - row2[2]); - - q.X = (float)(0.25 * sq); - sq = 1.0 / sq; - q.W = (float)((row2[1] - row1[2]) * sq); - q.Y = (float)((row1[0] + row0[1]) * sq); - q.Z = (float)((row2[0] + row0[2]) * sq); - } - else if (row1[1] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row1[1] - row0[0] - row2[2]); - - q.Y = (float)(0.25 * sq); - sq = 1.0 / sq; - q.W = (float)((row2[0] - row0[2]) * sq); - q.X = (float)((row1[0] + row0[1]) * sq); - q.Z = (float)((row2[1] + row1[2]) * sq); - } - else - { - double sq = 2.0 * Math.Sqrt(1.0 + row2[2] - row0[0] - row1[1]); - - q.Z = (float)(0.25 * sq); - sq = 1.0 / sq; - q.W = (float)((row1[0] - row0[1]) * sq); - q.X = (float)((row2[0] + row0[2]) * sq); - q.Y = (float)((row2[1] + row1[2]) * sq); - } - - q.Normalize(); - return q; - } - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3 axis, float angle, out Matrix3 result) - { - //normalize and create a local copy of the vector. - axis.Normalize(); - float axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - //calculate angles - float cos = (float)System.Math.Cos(-angle); - float sin = (float)System.Math.Sin(-angle); - float t = 1.0f - cos; - - //do the conversion math once - float tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - float sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix3 CreateFromAxisAngle(Vector3 axis, float angle) - { - Matrix3 result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// Matrix result. - public static void CreateFromQuaternion(ref Quaternion q, out Matrix3 result) - { - Vector3 axis; - float angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result); - } - - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// A matrix instance. - public static Matrix3 CreateFromQuaternion(Quaternion q) - { - Matrix3 result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3 instance. - public static void CreateRotationX(float angle, out Matrix3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result = Identity; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row2.Y = -sin; - result.Row2.Z = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3 instance. - public static Matrix3 CreateRotationX(float angle) - { - Matrix3 result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3 instance. - public static void CreateRotationY(float angle, out Matrix3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result = Identity; - result.Row0.X = cos; - result.Row0.Z = -sin; - result.Row2.X = sin; - result.Row2.Z = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3 instance. - public static Matrix3 CreateRotationY(float angle) - { - Matrix3 result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3 instance. - public static void CreateRotationZ(float angle, out Matrix3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result = Identity; - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3 instance. - public static Matrix3 CreateRotationZ(float angle) - { - Matrix3 result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static Matrix3 CreateScale(float scale) - { - Matrix3 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x, y, and z axes. - /// A scale matrix. - public static Matrix3 CreateScale(Vector3 scale) - { - Matrix3 result; - CreateScale(ref scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// Scale factor for the z axis. - /// A scale matrix. - public static Matrix3 CreateScale(float x, float y, float z) - { - Matrix3 result; - CreateScale(x, y, z, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix3 result) - { - result = Identity; - result.Row0.X = scale; - result.Row1.Y = scale; - result.Row2.Z = scale; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(ref Vector3 scale, out Matrix3 result) - { - result = Identity; - result.Row0.X = scale.X; - result.Row1.Y = scale.Y; - result.Row2.Z = scale.Z; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// Scale factor for the z axis. - /// A scale matrix. - public static void CreateScale(float x, float y, float z, out Matrix3 result) - { - result = Identity; - result.Row0.X = x; - result.Row1.Y = y; - result.Row2.Z = z; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3 Mult(Matrix3 left, Matrix3 right) - { - Matrix3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3 left, ref Matrix3 right, out Matrix3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32); - result.Row0.Z = ((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32); - result.Row1.Z = ((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33); - result.Row2.X = ((lM31 * rM11) + (lM32 * rM21)) + (lM33 * rM31); - result.Row2.Y = ((lM31 * rM12) + (lM32 * rM22)) + (lM33 * rM32); - result.Row2.Z = ((lM31 * rM13) + (lM32 * rM23)) + (lM33 * rM33); - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix3 is singular. - public static void Invert(ref Matrix3 mat, out Matrix3 result) - { - int[] colIdx = { 0, 0, 0 }; - int[] rowIdx = { 0, 0, 0 }; - int[] pivotIdx = { -1, -1, -1 }; - - float[,] inverse = {{mat.Row0.X, mat.Row0.Y, mat.Row0.Z}, - {mat.Row1.X, mat.Row1.Y, mat.Row1.Z}, - {mat.Row2.X, mat.Row2.Y, mat.Row2.Z}}; - - int icol = 0; - int irow = 0; - for (int i = 0; i < 3; i++) - { - float maxPivot = 0.0f; - for (int j = 0; j < 3; j++) - { - if (pivotIdx[j] != 0) - { - for (int k = 0; k < 3; ++k) - { - if (pivotIdx[k] == -1) - { - float absVal = System.Math.Abs(inverse[j, k]); - if (absVal > maxPivot) - { - maxPivot = absVal; - irow = j; - icol = k; - } - } - else if (pivotIdx[k] > 0) - { - result = mat; - return; - } - } - } - } - - ++(pivotIdx[icol]); - - if (irow != icol) - { - for (int k = 0; k < 3; ++k) - { - float f = inverse[irow, k]; - inverse[irow, k] = inverse[icol, k]; - inverse[icol, k] = f; - } - } - - rowIdx[i] = irow; - colIdx[i] = icol; - - float pivot = inverse[icol, icol]; - - if (pivot == 0.0f) - { - throw new InvalidOperationException("Matrix is singular and cannot be inverted."); - } - - float oneOverPivot = 1.0f / pivot; - inverse[icol, icol] = 1.0f; - for (int k = 0; k < 3; ++k) - inverse[icol, k] *= oneOverPivot; - - for (int j = 0; j < 3; ++j) - { - if (icol != j) - { - float f = inverse[j, icol]; - inverse[j, icol] = 0.0f; - for (int k = 0; k < 3; ++k) - inverse[j, k] -= inverse[icol, k] * f; - } - } - } - - for (int j = 2; j >= 0; --j) - { - int ir = rowIdx[j]; - int ic = colIdx[j]; - for (int k = 0; k < 3; ++k) - { - float f = inverse[k, ir]; - inverse[k, ir] = inverse[k, ic]; - inverse[k, ic] = f; - } - } - - result.Row0.X = inverse[0, 0]; - result.Row0.Y = inverse[0, 1]; - result.Row0.Z = inverse[0, 2]; - result.Row1.X = inverse[1, 0]; - result.Row1.Y = inverse[1, 1]; - result.Row1.Z = inverse[1, 2]; - result.Row2.X = inverse[2, 0]; - result.Row2.Y = inverse[2, 1]; - result.Row2.Z = inverse[2, 2]; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix3 Invert(Matrix3 mat) - { - Matrix3 result; - Invert(ref mat, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix3 Transpose(Matrix3 mat) - { - return new Matrix3(mat.Column0, mat.Column1, mat.Column2); - } - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix3 mat, out Matrix3 result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row0.Z = mat.Row2.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row1.Z = mat.Row2.Y; - result.Row2.X = mat.Row0.Z; - result.Row2.Y = mat.Row1.Z; - result.Row2.Z = mat.Row2.Z; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3d which holds the result of the multiplication - public static Matrix3 operator *(Matrix3 left, Matrix3 right) - { - return Matrix3.Mult(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix3 left, Matrix3 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix3 left, Matrix3 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix3)) - return false; - - return this.Equals((Matrix3)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// A matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix3 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2; - } - - #endregion - } -} \ No newline at end of file diff --git a/OpenTK/Math/Matrix3d.cs b/OpenTK/Math/Matrix3d.cs deleted file mode 100644 index 757c36ca..00000000 --- a/OpenTK/Math/Matrix3d.cs +++ /dev/null @@ -1,965 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x3 matrix containing 3D rotation and scale with double-precision components. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix3d : IEquatable - { - #region Fields - - /// - /// First row of the matrix. - /// - public Vector3d Row0; - - /// - /// Second row of the matrix. - /// - public Vector3d Row1; - - /// - /// Third row of the matrix. - /// - public Vector3d Row2; - - /// - /// The identity matrix. - /// - public static Matrix3d Identity = new Matrix3d(Vector3d.UnitX, Vector3d.UnitY, Vector3d.UnitZ); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Bottom row of the matrix - public Matrix3d(Vector3d row0, Vector3d row1, Vector3d row2) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - public Matrix3d( - double m00, double m01, double m02, - double m10, double m11, double m12, - double m20, double m21, double m22) - { - Row0 = new Vector3d(m00, m01, m02); - Row1 = new Vector3d(m10, m11, m12); - Row2 = new Vector3d(m20, m21, m22); - } - - /// - /// Constructs a new instance. - /// - /// A Matrix4d to take the upper-left 3x3 from. - public Matrix3d(Matrix4d matrix) - { - Row0 = matrix.Row0.Xyz; - Row1 = matrix.Row1.Xyz; - Row2 = matrix.Row2.Xyz; - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the determinant of this matrix. - /// - public double Determinant - { - get - { - double m11 = Row0.X, m12 = Row0.Y, m13 = Row0.Z, - m21 = Row1.X, m22 = Row1.Y, m23 = Row1.Z, - m31 = Row2.X, m32 = Row2.Y, m33 = Row2.Z; - - return - m11 * m22 * m33 + m12 * m23 * m31 + m13 * m21 * m32 - - m13 * m22 * m31 - m11 * m23 * m32 - m12 * m21 * m33; - } - } - - /// - /// Gets the first column of this matrix. - /// - public Vector3d Column0 - { - get { return new Vector3d(Row0.X, Row1.X, Row2.X); } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector3d Column1 - { - get { return new Vector3d(Row0.Y, Row1.Y, Row2.Y); } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector3d Column2 - { - get { return new Vector3d(Row0.Z, Row1.Z, Row2.Z); } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public double M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public double M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public double M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public double M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public double M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector3d Diagonal - { - get - { - return new Vector3d(Row0.X, Row1.Y, Row2.Z); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y + Row2.Z; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix3d.Invert(this); - } - - #endregion - - #region public void Transpose() - - /// - /// Converts this instance into its transpose. - /// - public void Transpose() - { - this = Matrix3d.Transpose(this); - } - - #endregion - - /// - /// Returns a normalised copy of this instance. - /// - public Matrix3d Normalized() - { - Matrix3d m = this; - m.Normalize(); - return m; - } - - /// - /// Divides each element in the Matrix by the . - /// - public void Normalize() - { - var determinant = this.Determinant; - Row0 /= determinant; - Row1 /= determinant; - Row2 /= determinant; - } - - /// - /// Returns an inverted copy of this instance. - /// - public Matrix3d Inverted() - { - Matrix3d m = this; - if (m.Determinant != 0) - m.Invert(); - return m; - } - - - /// - /// Returns a copy of this Matrix3 without scale. - /// - public Matrix3d ClearScale() - { - Matrix3d m = this; - m.Row0 = m.Row0.Normalized(); - m.Row1 = m.Row1.Normalized(); - m.Row2 = m.Row2.Normalized(); - return m; - } - /// - /// Returns a copy of this Matrix3 without rotation. - /// - public Matrix3d ClearRotation() - { - Matrix3d m = this; - m.Row0 = new Vector3d(m.Row0.Length, 0, 0); - m.Row1 = new Vector3d(0, m.Row1.Length, 0); - m.Row2 = new Vector3d(0, 0, m.Row2.Length); - return m; - } - - /// - /// Returns the scale component of this instance. - /// - public Vector3d ExtractScale() { return new Vector3d(Row0.Length, Row1.Length, Row2.Length); } - - /// - /// Returns the rotation component of this instance. Quite slow. - /// - /// Whether the method should row-normalise (i.e. remove scale from) the Matrix. Pass false if you know it's already normalised. - public Quaterniond ExtractRotation(bool row_normalise = true) - { - var row0 = Row0; - var row1 = Row1; - var row2 = Row2; - - if (row_normalise) - { - row0 = row0.Normalized(); - row1 = row1.Normalized(); - row2 = row2.Normalized(); - } - - // code below adapted from Blender - - Quaterniond q = new Quaterniond(); - double trace = 0.25 * (row0[0] + row1[1] + row2[2] + 1.0); - - if (trace > 0) - { - double sq = Math.Sqrt(trace); - - q.W = sq; - sq = 1.0 / (4.0 * sq); - q.X = (row1[2] - row2[1]) * sq; - q.Y = (row2[0] - row0[2]) * sq; - q.Z = (row0[1] - row1[0]) * sq; - } - else if (row0[0] > row1[1] && row0[0] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row0[0] - row1[1] - row2[2]); - - q.X = 0.25 * sq; - sq = 1.0 / sq; - q.W = (row2[1] - row1[2]) * sq; - q.Y = (row1[0] + row0[1]) * sq; - q.Z = (row2[0] + row0[2]) * sq; - } - else if (row1[1] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row1[1] - row0[0] - row2[2]); - - q.Y = 0.25 * sq; - sq = 1.0 / sq; - q.W = (row2[0] - row0[2]) * sq; - q.X = (row1[0] + row0[1]) * sq; - q.Z = (row2[1] + row1[2]) * sq; - } - else - { - double sq = 2.0 * Math.Sqrt(1.0 + row2[2] - row0[0] - row1[1]); - - q.Z = 0.25 * sq; - sq = 1.0 / sq; - q.W = (row1[0] - row0[1]) * sq; - q.X = (row2[0] + row0[2]) * sq; - q.Y = (row2[1] + row1[2]) * sq; - } - - q.Normalize(); - return q; - } - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3d axis, double angle, out Matrix3d result) - { - //normalize and create a local copy of the vector. - axis.Normalize(); - double axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - //calculate angles - double cos = System.Math.Cos(-angle); - double sin = System.Math.Sin(-angle); - double t = 1.0f - cos; - - //do the conversion math once - double tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - double sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix3d CreateFromAxisAngle(Vector3d axis, double angle) - { - Matrix3d result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// Matrix result. - public static void CreateFromQuaternion(ref Quaterniond q, out Matrix3d result) - { - Vector3d axis; - double angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result); - } - - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// A matrix instance. - public static Matrix3d CreateFromQuaternion(Quaterniond q) - { - Matrix3d result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3d instance. - public static void CreateRotationX(double angle, out Matrix3d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result = Identity; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row2.Y = -sin; - result.Row2.Z = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3d instance. - public static Matrix3d CreateRotationX(double angle) - { - Matrix3d result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3d instance. - public static void CreateRotationY(double angle, out Matrix3d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result = Identity; - result.Row0.X = cos; - result.Row0.Z = -sin; - result.Row2.X = sin; - result.Row2.Z = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3d instance. - public static Matrix3d CreateRotationY(double angle) - { - Matrix3d result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3d instance. - public static void CreateRotationZ(double angle, out Matrix3d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result = Identity; - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3d instance. - public static Matrix3d CreateRotationZ(double angle) - { - Matrix3d result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static Matrix3d CreateScale(double scale) - { - Matrix3d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x, y, and z axes. - /// A scale matrix. - public static Matrix3d CreateScale(Vector3d scale) - { - Matrix3d result; - CreateScale(ref scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// Scale factor for the z axis. - /// A scale matrix. - public static Matrix3d CreateScale(double x, double y, double z) - { - Matrix3d result; - CreateScale(x, y, z, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(double scale, out Matrix3d result) - { - result = Identity; - result.Row0.X = scale; - result.Row1.Y = scale; - result.Row2.Z = scale; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(ref Vector3d scale, out Matrix3d result) - { - result = Identity; - result.Row0.X = scale.X; - result.Row1.Y = scale.Y; - result.Row2.Z = scale.Z; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// Scale factor for the z axis. - /// A scale matrix. - public static void CreateScale(double x, double y, double z, out Matrix3d result) - { - result = Identity; - result.Row0.X = x; - result.Row1.Y = y; - result.Row2.Z = z; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3d Mult(Matrix3d left, Matrix3d right) - { - Matrix3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3d left, ref Matrix3d right, out Matrix3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z; - - result.Row0.X = ((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31); - result.Row0.Y = ((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32); - result.Row0.Z = ((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33); - result.Row1.X = ((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31); - result.Row1.Y = ((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32); - result.Row1.Z = ((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33); - result.Row2.X = ((lM31 * rM11) + (lM32 * rM21)) + (lM33 * rM31); - result.Row2.Y = ((lM31 * rM12) + (lM32 * rM22)) + (lM33 * rM32); - result.Row2.Z = ((lM31 * rM13) + (lM32 * rM23)) + (lM33 * rM33); - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix3d is singular. - public static void Invert(ref Matrix3d mat, out Matrix3d result) - { - int[] colIdx = { 0, 0, 0 }; - int[] rowIdx = { 0, 0, 0 }; - int[] pivotIdx = { -1, -1, -1 }; - - double[,] inverse = {{mat.Row0.X, mat.Row0.Y, mat.Row0.Z}, - {mat.Row1.X, mat.Row1.Y, mat.Row1.Z}, - {mat.Row2.X, mat.Row2.Y, mat.Row2.Z}}; - - int icol = 0; - int irow = 0; - for (int i = 0; i < 3; i++) - { - double maxPivot = 0.0; - for (int j = 0; j < 3; j++) - { - if (pivotIdx[j] != 0) - { - for (int k = 0; k < 3; ++k) - { - if (pivotIdx[k] == -1) - { - double absVal = System.Math.Abs(inverse[j, k]); - if (absVal > maxPivot) - { - maxPivot = absVal; - irow = j; - icol = k; - } - } - else if (pivotIdx[k] > 0) - { - result = mat; - return; - } - } - } - } - - ++(pivotIdx[icol]); - - if (irow != icol) - { - for (int k = 0; k < 3; ++k) - { - double f = inverse[irow, k]; - inverse[irow, k] = inverse[icol, k]; - inverse[icol, k] = f; - } - } - - rowIdx[i] = irow; - colIdx[i] = icol; - - double pivot = inverse[icol, icol]; - - if (pivot == 0.0) - { - throw new InvalidOperationException("Matrix is singular and cannot be inverted."); - } - - double oneOverPivot = 1.0 / pivot; - inverse[icol, icol] = 1.0; - for (int k = 0; k < 3; ++k) - inverse[icol, k] *= oneOverPivot; - - for (int j = 0; j < 3; ++j) - { - if (icol != j) - { - double f = inverse[j, icol]; - inverse[j, icol] = 0.0; - for (int k = 0; k < 3; ++k) - inverse[j, k] -= inverse[icol, k] * f; - } - } - } - - for (int j = 2; j >= 0; --j) - { - int ir = rowIdx[j]; - int ic = colIdx[j]; - for (int k = 0; k < 3; ++k) - { - double f = inverse[k, ir]; - inverse[k, ir] = inverse[k, ic]; - inverse[k, ic] = f; - } - } - - result.Row0.X = inverse[0, 0]; - result.Row0.Y = inverse[0, 1]; - result.Row0.Z = inverse[0, 2]; - result.Row1.X = inverse[1, 0]; - result.Row1.Y = inverse[1, 1]; - result.Row1.Z = inverse[1, 2]; - result.Row2.X = inverse[2, 0]; - result.Row2.Y = inverse[2, 1]; - result.Row2.Z = inverse[2, 2]; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix3d Invert(Matrix3d mat) - { - Matrix3d result; - Invert(ref mat, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix3d Transpose(Matrix3d mat) - { - return new Matrix3d(mat.Column0, mat.Column1, mat.Column2); - } - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix3d mat, out Matrix3d result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3d which holds the result of the multiplication - public static Matrix3d operator *(Matrix3d left, Matrix3d right) - { - return Matrix3d.Mult(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix3d left, Matrix3d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix3d left, Matrix3d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix3d)) - return false; - - return this.Equals((Matrix3d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// A matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix3d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix3x2.cs b/OpenTK/Math/Matrix3x2.cs deleted file mode 100644 index 639f093a..00000000 --- a/OpenTK/Math/Matrix3x2.cs +++ /dev/null @@ -1,736 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x2 matrix. - /// - public struct Matrix3x2 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector2 Row0; - - /// - /// Second row of the matrix. - /// - public Vector2 Row1; - - /// - /// Bottom row of the matrix. - /// - public Vector2 Row2; - - /// - /// The zero matrix. - /// - public static readonly Matrix3x2 Zero = new Matrix3x2(Vector2.Zero, Vector2.Zero, Vector2.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Second row of the matrix. - /// Bottom row of the matrix. - public Matrix3x2(Vector2 row0, Vector2 row1, Vector2 row2) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - public Matrix3x2( - float m00, float m01, - float m10, float m11, - float m20, float m21) - { - Row0 = new Vector2(m00, m01); - Row1 = new Vector2(m10, m11); - Row2 = new Vector2(m20, m21); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector3 Column0 - { - get { return new Vector3(Row0.X, Row1.X, Row2.X); } - set { Row0.X = value.X; Row1.X = value.Y; Row2.X = value.Z; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector3 Column1 - { - get { return new Vector3(Row0.Y, Row1.Y, Row2.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; Row2.Y = value.Z; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public float M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public float M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2 Diagonal - { - get - { - return new Vector2(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2 instance. - public static void CreateRotation(float angle, out Matrix3x2 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2 instance. - public static Matrix3x2 CreateRotation(float angle) - { - Matrix3x2 result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix3x2 result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix3x2 CreateScale(float scale) - { - Matrix3x2 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2 scale, out Matrix3x2 result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix3x2 CreateScale(Vector2 scale) - { - Matrix3x2 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(float x, float y, out Matrix3x2 result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix3x2 CreateScale(float x, float y) - { - Matrix3x2 result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2 left, float right, out Matrix3x2 result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row2.X = left.Row2.X * right; - result.Row2.Y = left.Row2.Y * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3x2 Mult(Matrix3x2 left, float right) - { - Matrix3x2 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2 left, ref Matrix2 right, out Matrix3x2 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3x2 Mult(Matrix3x2 left, Matrix2 right) - { - Matrix3x2 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2 left, ref Matrix2x3 right, out Matrix3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3 Mult(Matrix3x2 left, Matrix2x3 right) - { - Matrix3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2 left, ref Matrix2x4 right, out Matrix3x4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3x4 Mult(Matrix3x2 left, Matrix2x4 right) - { - Matrix3x4 result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix3x2 left, ref Matrix3x2 right, out Matrix3x2 result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row2.X = left.Row2.X + right.Row2.X; - result.Row2.Y = left.Row2.Y + right.Row2.Y; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix3x2 Add(Matrix3x2 left, Matrix3x2 right) - { - Matrix3x2 result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix3x2 left, ref Matrix3x2 right, out Matrix3x2 result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row2.X = left.Row2.X - right.Row2.X; - result.Row2.Y = left.Row2.Y - right.Row2.Y; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix3x2 Subtract(Matrix3x2 left, Matrix3x2 right) - { - Matrix3x2 result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix3x2 mat, out Matrix2x3 result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row0.Z = mat.Row2.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row1.Z = mat.Row2.Y; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix2x3 Transpose(Matrix3x2 mat) - { - Matrix2x3 result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2 which holds the result of the multiplication - public static Matrix3x2 operator *(float left, Matrix3x2 right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2 which holds the result of the multiplication - public static Matrix3x2 operator *(Matrix3x2 left, float right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2 which holds the result of the multiplication - public static Matrix3x2 operator *(Matrix3x2 left, Matrix2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3 which holds the result of the multiplication - public static Matrix3 operator *(Matrix3x2 left, Matrix2x3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4 which holds the result of the multiplication - public static Matrix3x4 operator *(Matrix3x2 left, Matrix2x4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2 which holds the result of the addition - public static Matrix3x2 operator +(Matrix3x2 left, Matrix3x2 right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2 which holds the result of the subtraction - public static Matrix3x2 operator -(Matrix3x2 left, Matrix3x2 right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix3x2 left, Matrix3x2 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix3x2 left, Matrix3x2 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix3x2)) - return false; - - return this.Equals((Matrix3x2)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix3x2 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix3x2d.cs b/OpenTK/Math/Matrix3x2d.cs deleted file mode 100644 index 8f1804e9..00000000 --- a/OpenTK/Math/Matrix3x2d.cs +++ /dev/null @@ -1,736 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x2 matrix. - /// - public struct Matrix3x2d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector2d Row0; - - /// - /// Second row of the matrix. - /// - public Vector2d Row1; - - /// - /// Bottom row of the matrix. - /// - public Vector2d Row2; - - /// - /// The zero matrix. - /// - public static readonly Matrix3x2d Zero = new Matrix3x2d(Vector2d.Zero, Vector2d.Zero, Vector2d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Second row of the matrix. - /// Bottom row of the matrix. - public Matrix3x2d(Vector2d row0, Vector2d row1, Vector2d row2) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - } - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - public Matrix3x2d( - double m00, double m01, - double m10, double m11, - double m20, double m21) - { - Row0 = new Vector2d(m00, m01); - Row1 = new Vector2d(m10, m11); - Row2 = new Vector2d(m20, m21); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector3d Column0 - { - get { return new Vector3d(Row0.X, Row1.X, Row2.X); } - set { Row0.X = value.X; Row1.X = value.Y; Row2.X = value.Z; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector3d Column1 - { - get { return new Vector3d(Row0.Y, Row1.Y, Row2.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; Row2.Y = value.Z; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public double M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public double M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2d Diagonal - { - get - { - return new Vector2d(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2d instance. - public static void CreateRotation(double angle, out Matrix3x2d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2d instance. - public static Matrix3x2d CreateRotation(double angle) - { - Matrix3x2d result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(double scale, out Matrix3x2d result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix3x2d CreateScale(double scale) - { - Matrix3x2d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2d scale, out Matrix3x2d result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix3x2d CreateScale(Vector2d scale) - { - Matrix3x2d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(double x, double y, out Matrix3x2d result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row2.X = 0; - result.Row2.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix3x2d CreateScale(double x, double y) - { - Matrix3x2d result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2d left, double right, out Matrix3x2d result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row2.X = left.Row2.X * right; - result.Row2.Y = left.Row2.Y * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3x2d Mult(Matrix3x2d left, double right) - { - Matrix3x2d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2d left, ref Matrix2d right, out Matrix3x2d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3x2d Mult(Matrix3x2d left, Matrix2d right) - { - Matrix3x2d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2d left, ref Matrix2x3d right, out Matrix3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3d Mult(Matrix3x2d left, Matrix2x3d right) - { - Matrix3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix3x2d left, ref Matrix2x4d right, out Matrix3x4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix3x4d Mult(Matrix3x2d left, Matrix2x4d right) - { - Matrix3x4d result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix3x2d left, ref Matrix3x2d right, out Matrix3x2d result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row2.X = left.Row2.X + right.Row2.X; - result.Row2.Y = left.Row2.Y + right.Row2.Y; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix3x2d Add(Matrix3x2d left, Matrix3x2d right) - { - Matrix3x2d result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix3x2d left, ref Matrix3x2d right, out Matrix3x2d result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row2.X = left.Row2.X - right.Row2.X; - result.Row2.Y = left.Row2.Y - right.Row2.Y; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix3x2d Subtract(Matrix3x2d left, Matrix3x2d right) - { - Matrix3x2d result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix3x2d mat, out Matrix2x3d result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row0.Z = mat.Row2.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row1.Z = mat.Row2.Y; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix2x3d Transpose(Matrix3x2d mat) - { - Matrix2x3d result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2d which holds the result of the multiplication - public static Matrix3x2d operator *(double left, Matrix3x2d right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2d which holds the result of the multiplication - public static Matrix3x2d operator *(Matrix3x2d left, double right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2d which holds the result of the multiplication - public static Matrix3x2d operator *(Matrix3x2d left, Matrix2d right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3d which holds the result of the multiplication - public static Matrix3d operator *(Matrix3x2d left, Matrix2x3d right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4 which holds the result of the multiplication - public static Matrix3x4d operator *(Matrix3x2d left, Matrix2x4d right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2d which holds the result of the addition - public static Matrix3x2d operator +(Matrix3x2d left, Matrix3x2d right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x2d which holds the result of the subtraction - public static Matrix3x2d operator -(Matrix3x2d left, Matrix3x2d right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix3x2d left, Matrix3x2d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix3x2d left, Matrix3x2d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix3x2d)) - return false; - - return this.Equals((Matrix3x2d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix3x2d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix3x4.cs b/OpenTK/Math/Matrix3x4.cs deleted file mode 100644 index 7d251d57..00000000 --- a/OpenTK/Math/Matrix3x4.cs +++ /dev/null @@ -1,1000 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x4 Matrix - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix3x4 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix - /// - public Vector4 Row0; - - /// - /// 2nd row of the matrix - /// - public Vector4 Row1; - - /// - /// Bottom row of the matrix - /// - public Vector4 Row2; - - /// - /// The zero matrix - /// - public static Matrix3x4 Zero = new Matrix3x4(Vector4.Zero, Vector4.Zero, Vector4.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Bottom row of the matrix - public Matrix3x4(Vector4 row0, Vector4 row1, Vector4 row2) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// Fourth item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// Fourth item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - /// First item of the third row of the matrix. - public Matrix3x4( - float m00, float m01, float m02, float m03, - float m10, float m11, float m12, float m13, - float m20, float m21, float m22, float m23) - { - Row0 = new Vector4(m00, m01, m02, m03); - Row1 = new Vector4(m10, m11, m12, m13); - Row2 = new Vector4(m20, m21, m22, m23); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the first column of this matrix. - /// - public Vector3 Column0 - { - get { return new Vector3(Row0.X, Row1.X, Row2.X); } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector3 Column1 - { - get { return new Vector3(Row0.Y, Row1.Y, Row2.Y); } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector3 Column2 - { - get { return new Vector3(Row0.Z, Row1.Z, Row2.Z); } - } - - /// - /// Gets the fourth column of this matrix. - /// - public Vector3 Column3 - { - get { return new Vector3(Row0.W, Row1.W, Row2.W); } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public float M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 1, column 4 of this instance. - /// - public float M14 { get { return Row0.W; } set { Row0.W = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public float M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 4 of this instance. - /// - public float M24 { get { return Row1.W; } set { Row1.W = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public float M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public float M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public float M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 4 of this instance. - /// - public float M34 { get { return Row2.W; } set { Row2.W = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector3 Diagonal - { - get - { - return new Vector3(Row0.X, Row1.Y, Row2.Z); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y + Row2.Z; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix3x4.Invert(this); - } - - #endregion - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3 axis, float angle, out Matrix3x4 result) - { - axis.Normalize(); - float axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - float t = 1.0f - cos; - - float tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - float sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row0.W = 0; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row1.W = 0; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - result.Row2.W = 0; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix3x4 CreateFromAxisAngle(Vector3 axis, float angle) - { - Matrix3x4 result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static void CreateFromQuaternion(ref Quaternion q, out Matrix3x4 result) - { - float x = q.X, y = q.Y, z = q.Z, w = q.W, - tx = 2 * x, ty = 2 * y, tz = 2 * z, - txx = tx * x, tyy = ty * y, tzz = tz * z, - txy = tx * y, txz = tx * z, tyz = ty * z, - txw = tx * w, tyw = ty * w, tzw = tz * w; - - result.Row0.X = 1f - (tyy + tzz); - result.Row0.Y = txy + tzw; - result.Row0.Z = txz - tyw; - result.Row0.W = 0f; - result.Row1.X = txy - tzw; - result.Row1.Y = 1f - (txx + tzz); - result.Row1.Z = tyz + txw; - result.Row1.W = 0f; - result.Row2.X = txz + tyw; - result.Row2.Y = tyz - txw; - result.Row2.Z = 1f - (txx + tyy); - result.Row2.W = 0f; - - /*Vector3 axis; - float angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result);*/ - } - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static Matrix3x4 CreateFromQuaternion(Quaternion q) - { - Matrix3x4 result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationX(float angle, out Matrix3x4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row1.W = 0; - result.Row2.X = 0; - result.Row2.Y = -sin; - result.Row2.Z = cos; - result.Row2.W = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix3x4 CreateRotationX(float angle) - { - Matrix3x4 result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationY(float angle, out Matrix3x4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = 0; - result.Row0.Z = -sin; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = sin; - result.Row2.Y = 0; - result.Row2.Z = cos; - result.Row2.W = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix3x4 CreateRotationY(float angle) - { - Matrix3x4 result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationZ(float angle, out Matrix3x4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row2.W = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix3x4 CreateRotationZ(float angle) - { - Matrix3x4 result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateTranslation - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static void CreateTranslation(float x, float y, float z, out Matrix3x4 result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = x; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row1.W = y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row2.W = z; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static void CreateTranslation(ref Vector3 vector, out Matrix3x4 result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = vector.X; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row1.W = vector.Y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row2.W = vector.Z; - } - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static Matrix3x4 CreateTranslation(float x, float y, float z) - { - Matrix3x4 result; - CreateTranslation(x, y, z, out result); - return result; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static Matrix3x4 CreateTranslation(Vector3 vector) - { - Matrix3x4 result; - CreateTranslation(vector.X, vector.Y, vector.Z, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Build a scaling matrix - /// - /// Single scale factor for x,y and z axes - /// A scaling matrix - public static Matrix3x4 CreateScale(float scale) - { - return CreateScale(scale, scale, scale); - } - - /// - /// Build a scaling matrix - /// - /// Scale factors for x,y and z axes - /// A scaling matrix - public static Matrix3x4 CreateScale(Vector3 scale) - { - return CreateScale(scale.X, scale.Y, scale.Z); - } - - /// - /// Build a scaling matrix - /// - /// Scale factor for x-axis - /// Scale factor for y-axis - /// Scale factor for z-axis - /// A scaling matrix - public static Matrix3x4 CreateScale(float x, float y, float z) - { - Matrix3x4 result; - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = z; - result.Row2.W = 0; - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3 Mult(Matrix3x4 left, Matrix4x3 right) - { - Matrix3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3x4 left, ref Matrix4x3 right, out Matrix3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, lM34 = left.Row2.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31) + (lM14 * rM41); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32) + (lM14 * rM42); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33) + (lM14 * rM43); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31) + (lM24 * rM41); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32) + (lM24 * rM42); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33) + (lM24 * rM43); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31) + (lM34 * rM41); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32) + (lM34 * rM42); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33) + (lM34 * rM43); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3x4 Mult(Matrix3x4 left, Matrix3x4 right) - { - Matrix3x4 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3x4 left, ref Matrix3x4 right, out Matrix3x4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, lM34 = left.Row2.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24) + (lM13 * rM34) + lM14; - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24) + (lM23 * rM34) + lM24; - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24) + (lM33 * rM34) + lM34; - - /*result.Row0 = (right.Row0 * lM11 + right.Row1 * lM12 + right.Row2 * lM13); - result.Row0.W += lM14; - - result.Row1 = (right.Row0 * lM21 + right.Row1 * lM22 + right.Row2 * lM23); - result.Row1.W += lM24; - - result.Row2 = (right.Row0 * lM31 + right.Row1 * lM32 + right.Row2 * lM33); - result.Row2.W += lM34;*/ - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3x4 Mult(Matrix3x4 left, float right) - { - Matrix3x4 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3x4 left, float right, out Matrix3x4 result) - { - result.Row0 = left.Row0 * right; - result.Row1 = left.Row1 * right; - result.Row2 = left.Row2 * right; - } - - #endregion - - #region Add Functions - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix3x4 Add(Matrix3x4 left, Matrix3x4 right) - { - Matrix3x4 result; - Add(ref left, ref right, out result); - return result; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix3x4 left, ref Matrix3x4 right, out Matrix3x4 result) - { - result.Row0 = left.Row0 + right.Row0; - result.Row1 = left.Row1 + right.Row1; - result.Row2 = left.Row2 + right.Row2; - } - - #endregion - - #region Subtract Functions - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static Matrix3x4 Subtract(Matrix3x4 left, Matrix3x4 right) - { - Matrix3x4 result; - Subtract(ref left, ref right, out result); - return result; - } - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static void Subtract(ref Matrix3x4 left, ref Matrix3x4 right, out Matrix3x4 result) - { - result.Row0 = left.Row0 - right.Row0; - result.Row1 = left.Row1 - right.Row1; - result.Row2 = left.Row2 - right.Row2; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix3x4 Invert(Matrix3x4 mat) - { - Matrix3x4 result; - Invert(ref mat, out result); - return result; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static void Invert(ref Matrix3x4 mat, out Matrix3x4 result) - { - Matrix3 inverseRotation = new Matrix3(mat.Column0, mat.Column1, mat.Column2); - inverseRotation.Row0 /= inverseRotation.Row0.LengthSquared; - inverseRotation.Row1 /= inverseRotation.Row1.LengthSquared; - inverseRotation.Row2 /= inverseRotation.Row2.LengthSquared; - - Vector3 translation = new Vector3(mat.Row0.W, mat.Row1.W, mat.Row2.W); - - result.Row0 = new Vector4(inverseRotation.Row0, -Vector3.Dot(inverseRotation.Row0, translation)); - result.Row1 = new Vector4(inverseRotation.Row1, -Vector3.Dot(inverseRotation.Row1, translation)); - result.Row2 = new Vector4(inverseRotation.Row2, -Vector3.Dot(inverseRotation.Row2, translation)); - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix4x3 Transpose(Matrix3x4 mat) - { - return new Matrix4x3(mat.Column0, mat.Column1, mat.Column2, mat.Column3); - } - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix3x4 mat, out Matrix4x3 result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - result.Row3 = mat.Column3; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3 which holds the result of the multiplication - public static Matrix3 operator *(Matrix3x4 left, Matrix4x3 right) - { - return Matrix3x4.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4 which holds the result of the multiplication - public static Matrix3x4 operator *(Matrix3x4 left, Matrix3x4 right) - { - return Matrix3x4.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4 which holds the result of the multiplication - public static Matrix3x4 operator *(Matrix3x4 left, float right) - { - return Matrix3x4.Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4 which holds the result of the addition - public static Matrix3x4 operator +(Matrix3x4 left, Matrix3x4 right) - { - return Matrix3x4.Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4 which holds the result of the subtraction - public static Matrix3x4 operator -(Matrix3x4 left, Matrix3x4 right) - { - return Matrix3x4.Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix3x4 left, Matrix3x4 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix3x4 left, Matrix3x4 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return string.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix3x4)) - return false; - - return this.Equals((Matrix3x4)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix3x4 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix3x4d.cs b/OpenTK/Math/Matrix3x4d.cs deleted file mode 100644 index fa37cfc8..00000000 --- a/OpenTK/Math/Matrix3x4d.cs +++ /dev/null @@ -1,1000 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x4 Matrix - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix3x4d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix - /// - public Vector4d Row0; - - /// - /// 2nd row of the matrix - /// - public Vector4d Row1; - - /// - /// Bottom row of the matrix - /// - public Vector4d Row2; - - /// - /// The zero matrix - /// - public static Matrix3x4d Zero = new Matrix3x4d(Vector4d.Zero, Vector4d.Zero, Vector4d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Bottom row of the matrix - public Matrix3x4d(Vector4d row0, Vector4d row1, Vector4d row2) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// Fourth item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// Fourth item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - /// First item of the third row of the matrix. - public Matrix3x4d( - double m00, double m01, double m02, double m03, - double m10, double m11, double m12, double m13, - double m20, double m21, double m22, double m23) - { - Row0 = new Vector4d(m00, m01, m02, m03); - Row1 = new Vector4d(m10, m11, m12, m13); - Row2 = new Vector4d(m20, m21, m22, m23); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the first column of this matrix. - /// - public Vector3d Column0 - { - get { return new Vector3d(Row0.X, Row1.X, Row2.X); } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector3d Column1 - { - get { return new Vector3d(Row0.Y, Row1.Y, Row2.Y); } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector3d Column2 - { - get { return new Vector3d(Row0.Z, Row1.Z, Row2.Z); } - } - - /// - /// Gets the fourth column of this matrix. - /// - public Vector3d Column3 - { - get { return new Vector3d(Row0.W, Row1.W, Row2.W); } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public double M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 1, column 4 of this instance. - /// - public double M14 { get { return Row0.W; } set { Row0.W = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public double M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 4 of this instance. - /// - public double M24 { get { return Row1.W; } set { Row1.W = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public double M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public double M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public double M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 4 of this instance. - /// - public double M34 { get { return Row2.W; } set { Row2.W = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector3d Diagonal - { - get - { - return new Vector3d(Row0.X, Row1.Y, Row2.Z); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y + Row2.Z; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix3x4d.Invert(this); - } - - #endregion - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3d axis, double angle, out Matrix3x4d result) - { - axis.Normalize(); - double axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - double t = 1.0f - cos; - - double tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - double sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row0.W = 0; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row1.W = 0; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - result.Row2.W = 0; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix3x4d CreateFromAxisAngle(Vector3d axis, double angle) - { - Matrix3x4d result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static void CreateFromQuaternion(ref Quaternion q, out Matrix3x4d result) - { - double x = q.X, y = q.Y, z = q.Z, w = q.W, - tx = 2 * x, ty = 2 * y, tz = 2 * z, - txx = tx * x, tyy = ty * y, tzz = tz * z, - txy = tx * y, txz = tx * z, tyz = ty * z, - txw = tx * w, tyw = ty * w, tzw = tz * w; - - result.Row0.X = 1f - (tyy + tzz); - result.Row0.Y = txy + tzw; - result.Row0.Z = txz - tyw; - result.Row0.W = 0f; - result.Row1.X = txy - tzw; - result.Row1.Y = 1f - (txx + tzz); - result.Row1.Z = tyz + txw; - result.Row1.W = 0f; - result.Row2.X = txz + tyw; - result.Row2.Y = tyz - txw; - result.Row2.Z = 1f - (txx + tyy); - result.Row2.W = 0f; - - /*Vector3d axis; - double angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result);*/ - } - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static Matrix3x4d CreateFromQuaternion(Quaternion q) - { - Matrix3x4d result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationX(double angle, out Matrix3x4d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row1.W = 0; - result.Row2.X = 0; - result.Row2.Y = -sin; - result.Row2.Z = cos; - result.Row2.W = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix3x4d CreateRotationX(double angle) - { - Matrix3x4d result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationY(double angle, out Matrix3x4d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = 0; - result.Row0.Z = -sin; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = sin; - result.Row2.Y = 0; - result.Row2.Z = cos; - result.Row2.W = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix3x4d CreateRotationY(double angle) - { - Matrix3x4d result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationZ(double angle, out Matrix3x4d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row2.W = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix3x4d CreateRotationZ(double angle) - { - Matrix3x4d result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateTranslation - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static void CreateTranslation(double x, double y, double z, out Matrix3x4d result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = x; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row1.W = y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row2.W = z; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static void CreateTranslation(ref Vector3d vector, out Matrix3x4d result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = vector.X; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row1.W = vector.Y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row2.W = vector.Z; - } - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static Matrix3x4d CreateTranslation(double x, double y, double z) - { - Matrix3x4d result; - CreateTranslation(x, y, z, out result); - return result; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static Matrix3x4d CreateTranslation(Vector3d vector) - { - Matrix3x4d result; - CreateTranslation(vector.X, vector.Y, vector.Z, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Build a scaling matrix - /// - /// Single scale factor for x,y and z axes - /// A scaling matrix - public static Matrix3x4d CreateScale(double scale) - { - return CreateScale(scale, scale, scale); - } - - /// - /// Build a scaling matrix - /// - /// Scale factors for x,y and z axes - /// A scaling matrix - public static Matrix3x4d CreateScale(Vector3d scale) - { - return CreateScale(scale.X, scale.Y, scale.Z); - } - - /// - /// Build a scaling matrix - /// - /// Scale factor for x-axis - /// Scale factor for y-axis - /// Scale factor for z-axis - /// A scaling matrix - public static Matrix3x4d CreateScale(double x, double y, double z) - { - Matrix3x4d result; - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = z; - result.Row2.W = 0; - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3d Mult(Matrix3x4d left, Matrix4x3d right) - { - Matrix3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3x4d left, ref Matrix4x3d right, out Matrix3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, lM34 = left.Row2.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31) + (lM14 * rM41); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32) + (lM14 * rM42); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33) + (lM14 * rM43); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31) + (lM24 * rM41); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32) + (lM24 * rM42); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33) + (lM24 * rM43); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31) + (lM34 * rM41); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32) + (lM34 * rM42); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33) + (lM34 * rM43); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3x4d Mult(Matrix3x4d left, Matrix3x4d right) - { - Matrix3x4d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3x4d left, ref Matrix3x4d right, out Matrix3x4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, lM34 = left.Row2.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24) + (lM13 * rM34) + lM14; - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24) + (lM23 * rM34) + lM24; - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24) + (lM33 * rM34) + lM34; - - /*result.Row0 = (right.Row0 * lM11 + right.Row1 * lM12 + right.Row2 * lM13); - result.Row0.W += lM14; - - result.Row1 = (right.Row0 * lM21 + right.Row1 * lM22 + right.Row2 * lM23); - result.Row1.W += lM24; - - result.Row2 = (right.Row0 * lM31 + right.Row1 * lM32 + right.Row2 * lM33); - result.Row2.W += lM34;*/ - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix3x4d Mult(Matrix3x4d left, double right) - { - Matrix3x4d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix3x4d left, double right, out Matrix3x4d result) - { - result.Row0 = left.Row0 * right; - result.Row1 = left.Row1 * right; - result.Row2 = left.Row2 * right; - } - - #endregion - - #region Add Functions - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix3x4d Add(Matrix3x4d left, Matrix3x4d right) - { - Matrix3x4d result; - Add(ref left, ref right, out result); - return result; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix3x4d left, ref Matrix3x4d right, out Matrix3x4d result) - { - result.Row0 = left.Row0 + right.Row0; - result.Row1 = left.Row1 + right.Row1; - result.Row2 = left.Row2 + right.Row2; - } - - #endregion - - #region Subtract Functions - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static Matrix3x4d Subtract(Matrix3x4d left, Matrix3x4d right) - { - Matrix3x4d result; - Subtract(ref left, ref right, out result); - return result; - } - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static void Subtract(ref Matrix3x4d left, ref Matrix3x4d right, out Matrix3x4d result) - { - result.Row0 = left.Row0 - right.Row0; - result.Row1 = left.Row1 - right.Row1; - result.Row2 = left.Row2 - right.Row2; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix3x4d Invert(Matrix3x4d mat) - { - Matrix3x4d result; - Invert(ref mat, out result); - return result; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static void Invert(ref Matrix3x4d mat, out Matrix3x4d result) - { - Matrix3d inverseRotation = new Matrix3d(mat.Column0, mat.Column1, mat.Column2); - inverseRotation.Row0 /= inverseRotation.Row0.LengthSquared; - inverseRotation.Row1 /= inverseRotation.Row1.LengthSquared; - inverseRotation.Row2 /= inverseRotation.Row2.LengthSquared; - - Vector3d translation = new Vector3d(mat.Row0.W, mat.Row1.W, mat.Row2.W); - - result.Row0 = new Vector4d(inverseRotation.Row0, -Vector3d.Dot(inverseRotation.Row0, translation)); - result.Row1 = new Vector4d(inverseRotation.Row1, -Vector3d.Dot(inverseRotation.Row1, translation)); - result.Row2 = new Vector4d(inverseRotation.Row2, -Vector3d.Dot(inverseRotation.Row2, translation)); - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix4x3d Transpose(Matrix3x4d mat) - { - return new Matrix4x3d(mat.Column0, mat.Column1, mat.Column2, mat.Column3); - } - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix3x4d mat, out Matrix4x3d result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - result.Row3 = mat.Column3; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3d which holds the result of the multiplication - public static Matrix3d operator *(Matrix3x4d left, Matrix4x3d right) - { - return Matrix3x4d.Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4d which holds the result of the multiplication - public static Matrix3x4d operator *(Matrix3x4d left, Matrix3x4d right) - { - return Matrix3x4d.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4d which holds the result of the multiplication - public static Matrix3x4d operator *(Matrix3x4d left, double right) - { - return Matrix3x4d.Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4d which holds the result of the addition - public static Matrix3x4d operator +(Matrix3x4d left, Matrix3x4d right) - { - return Matrix3x4d.Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix3x4d which holds the result of the subtraction - public static Matrix3x4d operator -(Matrix3x4d left, Matrix3x4d right) - { - return Matrix3x4d.Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix3x4d left, Matrix3x4d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix3x4d left, Matrix3x4d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return string.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix3x4d)) - return false; - - return this.Equals((Matrix3x4d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix3x4d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix4.cs b/OpenTK/Math/Matrix4.cs deleted file mode 100644 index 198bf708..00000000 --- a/OpenTK/Math/Matrix4.cs +++ /dev/null @@ -1,1736 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 4x4 matrix containing 3D rotation, scale, transform, and projection. - /// - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix4 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector4 Row0; - - /// - /// 2nd row of the matrix. - /// - public Vector4 Row1; - - /// - /// 3rd row of the matrix. - /// - public Vector4 Row2; - - /// - /// Bottom row of the matrix. - /// - public Vector4 Row3; - - /// - /// The identity matrix. - /// - public static readonly Matrix4 Identity = new Matrix4(Vector4.UnitX, Vector4.UnitY, Vector4.UnitZ, Vector4.UnitW); - - /// - /// The zero matrix. - /// - public static readonly Matrix4 Zero = new Matrix4(Vector4.Zero, Vector4.Zero, Vector4.Zero, Vector4.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Second row of the matrix. - /// Third row of the matrix. - /// Bottom row of the matrix. - public Matrix4(Vector4 row0, Vector4 row1, Vector4 row2, Vector4 row3) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - Row3 = row3; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// Fourth item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// Fourth item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - /// First item of the third row of the matrix. - /// Fourth item of the fourth row of the matrix. - /// Second item of the fourth row of the matrix. - /// Third item of the fourth row of the matrix. - /// Fourth item of the fourth row of the matrix. - public Matrix4( - float m00, float m01, float m02, float m03, - float m10, float m11, float m12, float m13, - float m20, float m21, float m22, float m23, - float m30, float m31, float m32, float m33) - { - Row0 = new Vector4(m00, m01, m02, m03); - Row1 = new Vector4(m10, m11, m12, m13); - Row2 = new Vector4(m20, m21, m22, m23); - Row3 = new Vector4(m30, m31, m32, m33); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the determinant of this matrix. - /// - public float Determinant - { - get - { - float m11 = Row0.X, m12 = Row0.Y, m13 = Row0.Z, m14 = Row0.W, - m21 = Row1.X, m22 = Row1.Y, m23 = Row1.Z, m24 = Row1.W, - m31 = Row2.X, m32 = Row2.Y, m33 = Row2.Z, m34 = Row2.W, - m41 = Row3.X, m42 = Row3.Y, m43 = Row3.Z, m44 = Row3.W; - - return - m11 * m22 * m33 * m44 - m11 * m22 * m34 * m43 + m11 * m23 * m34 * m42 - m11 * m23 * m32 * m44 - + m11 * m24 * m32 * m43 - m11 * m24 * m33 * m42 - m12 * m23 * m34 * m41 + m12 * m23 * m31 * m44 - - m12 * m24 * m31 * m43 + m12 * m24 * m33 * m41 - m12 * m21 * m33 * m44 + m12 * m21 * m34 * m43 - + m13 * m24 * m31 * m42 - m13 * m24 * m32 * m41 + m13 * m21 * m32 * m44 - m13 * m21 * m34 * m42 - + m13 * m22 * m34 * m41 - m13 * m22 * m31 * m44 - m14 * m21 * m32 * m43 + m14 * m21 * m33 * m42 - - m14 * m22 * m33 * m41 + m14 * m22 * m31 * m43 - m14 * m23 * m31 * m42 + m14 * m23 * m32 * m41; - } - } - - /// - /// Gets the first column of this matrix. - /// - public Vector4 Column0 - { - get { return new Vector4(Row0.X, Row1.X, Row2.X, Row3.X); } - set { Row0.X = value.X; Row1.X = value.Y; Row2.X = value.Z; Row3.X = value.W; } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector4 Column1 - { - get { return new Vector4(Row0.Y, Row1.Y, Row2.Y, Row3.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; Row2.Y = value.Z; Row3.Y = value.W; } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector4 Column2 - { - get { return new Vector4(Row0.Z, Row1.Z, Row2.Z, Row3.Z); } - set { Row0.Z = value.X; Row1.Z = value.Y; Row2.Z = value.Z; Row3.Z = value.W; } - } - - /// - /// Gets the fourth column of this matrix. - /// - public Vector4 Column3 - { - get { return new Vector4(Row0.W, Row1.W, Row2.W, Row3.W); } - set { Row0.W = value.X; Row1.W = value.Y; Row2.W = value.Z; Row3.W = value.W; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public float M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 1, column 4 of this instance. - /// - public float M14 { get { return Row0.W; } set { Row0.W = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public float M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 4 of this instance. - /// - public float M24 { get { return Row1.W; } set { Row1.W = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public float M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public float M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public float M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 4 of this instance. - /// - public float M34 { get { return Row2.W; } set { Row2.W = value; } } - - /// - /// Gets or sets the value at row 4, column 1 of this instance. - /// - public float M41 { get { return Row3.X; } set { Row3.X = value; } } - - /// - /// Gets or sets the value at row 4, column 2 of this instance. - /// - public float M42 { get { return Row3.Y; } set { Row3.Y = value; } } - - /// - /// Gets or sets the value at row 4, column 3 of this instance. - /// - public float M43 { get { return Row3.Z; } set { Row3.Z = value; } } - - /// - /// Gets or sets the value at row 4, column 4 of this instance. - /// - public float M44 { get { return Row3.W; } set { Row3.W = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector4 Diagonal - { - get - { - return new Vector4(Row0.X, Row1.Y, Row2.Z, Row3.W); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - Row3.W = value.W; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y + Row2.Z + Row3.W; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - else if (rowIndex == 3) return Row3[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else if (rowIndex == 3) Row3[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix4.Invert(this); - } - - #endregion - - #region public void Transpose() - - /// - /// Converts this instance into its transpose. - /// - public void Transpose() - { - this = Matrix4.Transpose(this); - } - - #endregion - - /// - /// Returns a normalised copy of this instance. - /// - public Matrix4 Normalized() - { - Matrix4 m = this; - m.Normalize(); - return m; - } - - /// - /// Divides each element in the Matrix by the . - /// - public void Normalize() - { - var determinant = this.Determinant; - Row0 /= determinant; - Row1 /= determinant; - Row2 /= determinant; - Row3 /= determinant; - } - - /// - /// Returns an inverted copy of this instance. - /// - public Matrix4 Inverted() - { - Matrix4 m = this; - if (m.Determinant != 0) - m.Invert(); - return m; - } - - /// - /// Returns a copy of this Matrix4 without translation. - /// - public Matrix4 ClearTranslation() - { - Matrix4 m = this; - m.Row3.Xyz = Vector3.Zero; - return m; - } - /// - /// Returns a copy of this Matrix4 without scale. - /// - public Matrix4 ClearScale() - { - Matrix4 m = this; - m.Row0.Xyz = m.Row0.Xyz.Normalized(); - m.Row1.Xyz = m.Row1.Xyz.Normalized(); - m.Row2.Xyz = m.Row2.Xyz.Normalized(); - return m; - } - /// - /// Returns a copy of this Matrix4 without rotation. - /// - public Matrix4 ClearRotation() - { - Matrix4 m = this; - m.Row0.Xyz = new Vector3(m.Row0.Xyz.Length, 0, 0); - m.Row1.Xyz = new Vector3(0, m.Row1.Xyz.Length, 0); - m.Row2.Xyz = new Vector3(0, 0, m.Row2.Xyz.Length); - return m; - } - /// - /// Returns a copy of this Matrix4 without projection. - /// - public Matrix4 ClearProjection() - { - Matrix4 m = this; - m.Column3 = Vector4.Zero; - return m; - } - - /// - /// Returns the translation component of this instance. - /// - public Vector3 ExtractTranslation() { return Row3.Xyz; } - - /// - /// Returns the scale component of this instance. - /// - public Vector3 ExtractScale() { return new Vector3(Row0.Xyz.Length, Row1.Xyz.Length, Row2.Xyz.Length); } - - /// - /// Returns the rotation component of this instance. Quite slow. - /// - /// Whether the method should row-normalise (i.e. remove scale from) the Matrix. Pass false if you know it's already normalised. - public Quaternion ExtractRotation(bool row_normalise = true) - { - var row0 = Row0.Xyz; - var row1 = Row1.Xyz; - var row2 = Row2.Xyz; - - if (row_normalise) - { - row0 = row0.Normalized(); - row1 = row1.Normalized(); - row2 = row2.Normalized(); - } - - // code below adapted from Blender - - Quaternion q = new Quaternion(); - double trace = 0.25 * (row0[0] + row1[1] + row2[2] + 1.0); - - if (trace > 1e-4) - { - double sq = Math.Sqrt(trace); - - q.W = (float)sq; - sq = 1.0 / (4.0 * sq); - q.X = (float)((row1[2] - row2[1]) * sq); - q.Y = (float)((row2[0] - row0[2]) * sq); - q.Z = (float)((row0[1] - row1[0]) * sq); - } - else if (row0[0] > row1[1] && row0[0] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row0[0] - row1[1] - row2[2]); - - q.X = (float)(0.25 * sq); - sq = 1.0 / sq; - q.W = (float)((row1[2] - row2[1]) * sq); - q.Y = (float)((row1[0] + row0[1]) * sq); - q.Z = (float)((row2[0] + row0[2]) * sq); - } - else if (row1[1] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row1[1] - row0[0] - row2[2]); - - q.Y = (float)(0.25 * sq); - sq = 1.0 / sq; - q.W = (float)((row2[0] - row0[2]) * sq); - q.X = (float)((row1[0] + row0[1]) * sq); - q.Z = (float)((row2[1] + row1[2]) * sq); - } - else - { - double sq = 2.0 * Math.Sqrt(1.0 + row2[2] - row0[0] - row1[1]); - - q.Z = (float)(0.25 * sq); - sq = 1.0 / sq; - q.W = (float)((row0[1] - row1[0]) * sq); - q.X = (float)((row2[0] + row0[2]) * sq); - q.Y = (float)((row2[1] + row1[2]) * sq); - } - - q.Normalize(); - return q; - } - - /// - /// Returns the projection component of this instance. - /// - public Vector4 ExtractProjection() - { - return Column3; - } - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3 axis, float angle, out Matrix4 result) - { - // normalize and create a local copy of the vector. - axis.Normalize(); - float axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - // calculate angles - float cos = (float)System.Math.Cos(-angle); - float sin = (float)System.Math.Sin(-angle); - float t = 1.0f - cos; - - // do the conversion math once - float tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - float sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row0.W = 0; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row1.W = 0; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - result.Row2.W = 0; - result.Row3 = Vector4.UnitW; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix4 CreateFromAxisAngle(Vector3 axis, float angle) - { - Matrix4 result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static void CreateFromQuaternion(ref Quaternion q, out Matrix4 result) - { - Vector3 axis; - float angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result); - } - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static Matrix4 CreateFromQuaternion(Quaternion q) - { - Matrix4 result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationX(float angle, out Matrix4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result = Identity; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row2.Y = -sin; - result.Row2.Z = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix4 CreateRotationX(float angle) - { - Matrix4 result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationY(float angle, out Matrix4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result = Identity; - result.Row0.X = cos; - result.Row0.Z = -sin; - result.Row2.X = sin; - result.Row2.Z = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix4 CreateRotationY(float angle) - { - Matrix4 result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationZ(float angle, out Matrix4 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result = Identity; - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix4 CreateRotationZ(float angle) - { - Matrix4 result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateTranslation - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static void CreateTranslation(float x, float y, float z, out Matrix4 result) - { - result = Identity; - result.Row3.X = x; - result.Row3.Y = y; - result.Row3.Z = z; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static void CreateTranslation(ref Vector3 vector, out Matrix4 result) - { - result = Identity; - result.Row3.X = vector.X; - result.Row3.Y = vector.Y; - result.Row3.Z = vector.Z; - } - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static Matrix4 CreateTranslation(float x, float y, float z) - { - Matrix4 result; - CreateTranslation(x, y, z, out result); - return result; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static Matrix4 CreateTranslation(Vector3 vector) - { - Matrix4 result; - CreateTranslation(vector.X, vector.Y, vector.Z, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static Matrix4 CreateScale(float scale) - { - Matrix4 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x, y, and z axes. - /// A scale matrix. - public static Matrix4 CreateScale(Vector3 scale) - { - Matrix4 result; - CreateScale(ref scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// Scale factor for the z axis. - /// A scale matrix. - public static Matrix4 CreateScale(float x, float y, float z) - { - Matrix4 result; - CreateScale(x, y, z, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix4 result) - { - result = Identity; - result.Row0.X = scale; - result.Row1.Y = scale; - result.Row2.Z = scale; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(ref Vector3 scale, out Matrix4 result) - { - result = Identity; - result.Row0.X = scale.X; - result.Row1.Y = scale.Y; - result.Row2.Z = scale.Z; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// Scale factor for the z axis. - /// A scale matrix. - public static void CreateScale(float x, float y, float z, out Matrix4 result) - { - result = Identity; - result.Row0.X = x; - result.Row1.Y = y; - result.Row2.Z = z; - } - - #endregion - - #region CreateOrthographic - - /// - /// Creates an orthographic projection matrix. - /// - /// The width of the projection volume. - /// The height of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4 instance. - public static void CreateOrthographic(float width, float height, float zNear, float zFar, out Matrix4 result) - { - CreateOrthographicOffCenter(-width / 2, width / 2, -height / 2, height / 2, zNear, zFar, out result); - } - - /// - /// Creates an orthographic projection matrix. - /// - /// The width of the projection volume. - /// The height of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4 instance. - public static Matrix4 CreateOrthographic(float width, float height, float zNear, float zFar) - { - Matrix4 result; - CreateOrthographicOffCenter(-width / 2, width / 2, -height / 2, height / 2, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreateOrthographicOffCenter - - /// - /// Creates an orthographic projection matrix. - /// - /// The left edge of the projection volume. - /// The right edge of the projection volume. - /// The bottom edge of the projection volume. - /// The top edge of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4 instance. - public static void CreateOrthographicOffCenter(float left, float right, float bottom, float top, float zNear, float zFar, out Matrix4 result) - { - result = Identity; - - float invRL = 1.0f / (right - left); - float invTB = 1.0f / (top - bottom); - float invFN = 1.0f / (zFar - zNear); - - result.Row0.X = 2 * invRL; - result.Row1.Y = 2 * invTB; - result.Row2.Z = -2 * invFN; - - result.Row3.X = -(right + left) * invRL; - result.Row3.Y = -(top + bottom) * invTB; - result.Row3.Z = -(zFar + zNear) * invFN; - } - - /// - /// Creates an orthographic projection matrix. - /// - /// The left edge of the projection volume. - /// The right edge of the projection volume. - /// The bottom edge of the projection volume. - /// The top edge of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4 instance. - public static Matrix4 CreateOrthographicOffCenter(float left, float right, float bottom, float top, float zNear, float zFar) - { - Matrix4 result; - CreateOrthographicOffCenter(left, right, bottom, top, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreatePerspectiveFieldOfView - - /// - /// Creates a perspective projection matrix. - /// - /// Angle of the field of view in the y direction (in radians) - /// Aspect ratio of the view (width / height) - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// fovy is zero, less than zero or larger than Math.PI - /// aspect is negative or zero - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static void CreatePerspectiveFieldOfView(float fovy, float aspect, float zNear, float zFar, out Matrix4 result) - { - if (fovy <= 0 || fovy > Math.PI) - throw new ArgumentOutOfRangeException("fovy"); - if (aspect <= 0) - throw new ArgumentOutOfRangeException("aspect"); - if (zNear <= 0) - throw new ArgumentOutOfRangeException("zNear"); - if (zFar <= 0) - throw new ArgumentOutOfRangeException("zFar"); - - float yMax = zNear * (float)System.Math.Tan(0.5f * fovy); - float yMin = -yMax; - float xMin = yMin * aspect; - float xMax = yMax * aspect; - - CreatePerspectiveOffCenter(xMin, xMax, yMin, yMax, zNear, zFar, out result); - } - - /// - /// Creates a perspective projection matrix. - /// - /// Angle of the field of view in the y direction (in radians) - /// Aspect ratio of the view (width / height) - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// fovy is zero, less than zero or larger than Math.PI - /// aspect is negative or zero - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static Matrix4 CreatePerspectiveFieldOfView(float fovy, float aspect, float zNear, float zFar) - { - Matrix4 result; - CreatePerspectiveFieldOfView(fovy, aspect, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreatePerspectiveOffCenter - - /// - /// Creates an perspective projection matrix. - /// - /// Left edge of the view frustum - /// Right edge of the view frustum - /// Bottom edge of the view frustum - /// Top edge of the view frustum - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static void CreatePerspectiveOffCenter(float left, float right, float bottom, float top, float zNear, float zFar, out Matrix4 result) - { - if (zNear <= 0) - throw new ArgumentOutOfRangeException("zNear"); - if (zFar <= 0) - throw new ArgumentOutOfRangeException("zFar"); - if (zNear >= zFar) - throw new ArgumentOutOfRangeException("zNear"); - - float x = (2.0f * zNear) / (right - left); - float y = (2.0f * zNear) / (top - bottom); - float a = (right + left) / (right - left); - float b = (top + bottom) / (top - bottom); - float c = -(zFar + zNear) / (zFar - zNear); - float d = -(2.0f * zFar * zNear) / (zFar - zNear); - - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row0.W = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row1.W = 0; - result.Row2.X = a; - result.Row2.Y = b; - result.Row2.Z = c; - result.Row2.W = -1; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = d; - result.Row3.W = 0; - } - - /// - /// Creates an perspective projection matrix. - /// - /// Left edge of the view frustum - /// Right edge of the view frustum - /// Bottom edge of the view frustum - /// Top edge of the view frustum - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static Matrix4 CreatePerspectiveOffCenter(float left, float right, float bottom, float top, float zNear, float zFar) - { - Matrix4 result; - CreatePerspectiveOffCenter(left, right, bottom, top, zNear, zFar, out result); - return result; - } - - #endregion - - #region Obsolete Functions - - #region Translation Functions - - /// - /// Builds a translation matrix. - /// - /// The translation vector. - /// A new Matrix4 instance. - [Obsolete("Use CreateTranslation instead.")] - public static Matrix4 Translation(Vector3 trans) - { - return CreateTranslation(trans); - } - - /// - /// Build a translation matrix with the given translation - /// - /// X translation - /// Y translation - /// Z translation - /// A Translation matrix - [Obsolete("Use CreateTranslation instead.")] - public static Matrix4 Translation(float x, float y, float z) - { - return CreateTranslation(x, y, z); - } - - #endregion - - #region Rotation Functions - - /// - /// Build a rotation matrix that rotates about the x-axis - /// - /// angle in radians to rotate counter-clockwise around the x-axis - /// A rotation matrix - [Obsolete("Use CreateRotationX instead.")] - public static Matrix4 RotateX(float angle) - { - return CreateRotationX(angle); - } - - /// - /// Build a rotation matrix that rotates about the y-axis - /// - /// angle in radians to rotate counter-clockwise around the y-axis - /// A rotation matrix - [Obsolete("Use CreateRotationY instead.")] - public static Matrix4 RotateY(float angle) - { - return CreateRotationY(angle); - } - - /// - /// Build a rotation matrix that rotates about the z-axis - /// - /// angle in radians to rotate counter-clockwise around the z-axis - /// A rotation matrix - [Obsolete("Use CreateRotationZ instead.")] - public static Matrix4 RotateZ(float angle) - { - return CreateRotationZ(angle); - } - - /// - /// Build a rotation matrix to rotate about the given axis - /// - /// the axis to rotate about - /// angle in radians to rotate counter-clockwise (looking in the direction of the given axis) - /// A rotation matrix - [Obsolete("Use CreateFromAxisAngle instead.")] - public static Matrix4 Rotate(Vector3 axis, float angle) - { - return CreateFromAxisAngle(axis, angle); - } - - /// - /// Build a rotation matrix from a quaternion - /// - /// the quaternion - /// A rotation matrix - [Obsolete("Use CreateRotation instead.")] - public static Matrix4 Rotate(Quaternion q) - { - return CreateFromQuaternion(q); - } - - #endregion - - #region Scale Functions - - /// - /// Build a scaling matrix - /// - /// Single scale factor for x,y and z axes - /// A scaling matrix - [Obsolete("Use CreateScale instead.")] - public static Matrix4 Scale(float scale) - { - return Scale(scale, scale, scale); - } - - /// - /// Build a scaling matrix - /// - /// Scale factors for x,y and z axes - /// A scaling matrix - [Obsolete("Use CreateScale instead.")] - public static Matrix4 Scale(Vector3 scale) - { - return Scale(scale.X, scale.Y, scale.Z); - } - - /// - /// Build a scaling matrix - /// - /// Scale factor for x-axis - /// Scale factor for y-axis - /// Scale factor for z-axis - /// A scaling matrix - [Obsolete("Use CreateScale instead.")] - public static Matrix4 Scale(float x, float y, float z) - { - Matrix4 result; - result.Row0 = Vector4.UnitX * x; - result.Row1 = Vector4.UnitY * y; - result.Row2 = Vector4.UnitZ * z; - result.Row3 = Vector4.UnitW; - return result; - } - - #endregion - - #region Camera Helper Functions - - /// - /// Build a projection matrix - /// - /// Left edge of the view frustum - /// Right edge of the view frustum - /// Bottom edge of the view frustum - /// Top edge of the view frustum - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - [Obsolete("Use CreatePerspectiveOffCenter instead.")] - public static Matrix4 Frustum(float left, float right, float bottom, float top, float near, float far) - { - float invRL = 1.0f / (right - left); - float invTB = 1.0f / (top - bottom); - float invFN = 1.0f / (far - near); - return new Matrix4(new Vector4(2.0f * near * invRL, 0.0f, 0.0f, 0.0f), - new Vector4(0.0f, 2.0f * near * invTB, 0.0f, 0.0f), - new Vector4((right + left) * invRL, (top + bottom) * invTB, -(far + near) * invFN, -1.0f), - new Vector4(0.0f, 0.0f, -2.0f * far * near * invFN, 0.0f)); - } - - /// - /// Build a projection matrix - /// - /// Angle of the field of view in the y direction (in radians) - /// Aspect ratio of the view (width / height) - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - [Obsolete("Use CreatePerspectiveFieldOfView instead.")] - public static Matrix4 Perspective(float fovy, float aspect, float near, float far) - { - float yMax = near * (float)System.Math.Tan(0.5f * fovy); - float yMin = -yMax; - float xMin = yMin * aspect; - float xMax = yMax * aspect; - - return Frustum(xMin, xMax, yMin, yMax, near, far); - } - - #endregion - - #endregion - - #region Camera Helper Functions - - /// - /// Build a world space to camera space matrix - /// - /// Eye (camera) position in world space - /// Target position in world space - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// A Matrix4 that transforms world space to camera space - public static Matrix4 LookAt(Vector3 eye, Vector3 target, Vector3 up) - { - Vector3 z = Vector3.Normalize(eye - target); - Vector3 x = Vector3.Normalize(Vector3.Cross(up, z)); - Vector3 y = Vector3.Normalize(Vector3.Cross(z, x)); - - Matrix4 result; - - result.Row0.X = x.X; - result.Row0.Y = y.X; - result.Row0.Z = z.X; - result.Row0.W = 0; - result.Row1.X = x.Y; - result.Row1.Y = y.Y; - result.Row1.Z = z.Y; - result.Row1.W = 0; - result.Row2.X = x.Z; - result.Row2.Y = y.Z; - result.Row2.Z = z.Z; - result.Row2.W = 0; - result.Row3.X = -((x.X * eye.X) + (x.Y * eye.Y) + (x.Z * eye.Z)); - result.Row3.Y = -((y.X * eye.X) + (y.Y * eye.Y) + (y.Z * eye.Z)); - result.Row3.Z = -((z.X * eye.X) + (z.Y * eye.Y) + (z.Z * eye.Z)); - result.Row3.W = 1; - - return result; - } - - /// - /// Build a world space to camera space matrix - /// - /// Eye (camera) position in world space - /// Eye (camera) position in world space - /// Eye (camera) position in world space - /// Target position in world space - /// Target position in world space - /// Target position in world space - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// A Matrix4 that transforms world space to camera space - public static Matrix4 LookAt(float eyeX, float eyeY, float eyeZ, float targetX, float targetY, float targetZ, float upX, float upY, float upZ) - { - return LookAt(new Vector3(eyeX, eyeY, eyeZ), new Vector3(targetX, targetY, targetZ), new Vector3(upX, upY, upZ)); - } - - #endregion - - #region Add Functions - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix4 Add(Matrix4 left, Matrix4 right) - { - Matrix4 result; - Add(ref left, ref right, out result); - return result; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix4 left, ref Matrix4 right, out Matrix4 result) - { - result.Row0 = left.Row0 + right.Row0; - result.Row1 = left.Row1 + right.Row1; - result.Row2 = left.Row2 + right.Row2; - result.Row3 = left.Row3 + right.Row3; - } - - #endregion - - #region Subtract Functions - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static Matrix4 Subtract(Matrix4 left, Matrix4 right) - { - Matrix4 result; - Subtract(ref left, ref right, out result); - return result; - } - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static void Subtract(ref Matrix4 left, ref Matrix4 right, out Matrix4 result) - { - result.Row0 = left.Row0 - right.Row0; - result.Row1 = left.Row1 - right.Row1; - result.Row2 = left.Row2 - right.Row2; - result.Row3 = left.Row3 - right.Row3; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4 Mult(Matrix4 left, Matrix4 right) - { - Matrix4 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4 left, ref Matrix4 right, out Matrix4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, lM34 = left.Row2.W, - lM41 = left.Row3.X, lM42 = left.Row3.Y, lM43 = left.Row3.Z, lM44 = left.Row3.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z, rM44 = right.Row3.W; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row0.Z = (((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33)) + (lM14 * rM43); - result.Row0.W = (((lM11 * rM14) + (lM12 * rM24)) + (lM13 * rM34)) + (lM14 * rM44); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - result.Row1.Z = (((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33)) + (lM24 * rM43); - result.Row1.W = (((lM21 * rM14) + (lM22 * rM24)) + (lM23 * rM34)) + (lM24 * rM44); - result.Row2.X = (((lM31 * rM11) + (lM32 * rM21)) + (lM33 * rM31)) + (lM34 * rM41); - result.Row2.Y = (((lM31 * rM12) + (lM32 * rM22)) + (lM33 * rM32)) + (lM34 * rM42); - result.Row2.Z = (((lM31 * rM13) + (lM32 * rM23)) + (lM33 * rM33)) + (lM34 * rM43); - result.Row2.W = (((lM31 * rM14) + (lM32 * rM24)) + (lM33 * rM34)) + (lM34 * rM44); - result.Row3.X = (((lM41 * rM11) + (lM42 * rM21)) + (lM43 * rM31)) + (lM44 * rM41); - result.Row3.Y = (((lM41 * rM12) + (lM42 * rM22)) + (lM43 * rM32)) + (lM44 * rM42); - result.Row3.Z = (((lM41 * rM13) + (lM42 * rM23)) + (lM43 * rM33)) + (lM44 * rM43); - result.Row3.W = (((lM41 * rM14) + (lM42 * rM24)) + (lM43 * rM34)) + (lM44 * rM44); - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4 Mult(Matrix4 left, float right) - { - Matrix4 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4 left, float right, out Matrix4 result) - { - result.Row0 = left.Row0 * right; - result.Row1 = left.Row1 * right; - result.Row2 = left.Row2 * right; - result.Row3 = left.Row3 * right; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static void Invert(ref Matrix4 mat, out Matrix4 result) - { - int[] colIdx = { 0, 0, 0, 0 }; - int[] rowIdx = { 0, 0, 0, 0 }; - int[] pivotIdx = { -1, -1, -1, -1 }; - - // convert the matrix to an array for easy looping - float[,] inverse = {{mat.Row0.X, mat.Row0.Y, mat.Row0.Z, mat.Row0.W}, - {mat.Row1.X, mat.Row1.Y, mat.Row1.Z, mat.Row1.W}, - {mat.Row2.X, mat.Row2.Y, mat.Row2.Z, mat.Row2.W}, - {mat.Row3.X, mat.Row3.Y, mat.Row3.Z, mat.Row3.W} }; - int icol = 0; - int irow = 0; - for (int i = 0; i < 4; i++) - { - // Find the largest pivot value - float maxPivot = 0.0f; - for (int j = 0; j < 4; j++) - { - if (pivotIdx[j] != 0) - { - for (int k = 0; k < 4; ++k) - { - if (pivotIdx[k] == -1) - { - float absVal = System.Math.Abs(inverse[j, k]); - if (absVal > maxPivot) - { - maxPivot = absVal; - irow = j; - icol = k; - } - } - else if (pivotIdx[k] > 0) - { - result = mat; - return; - } - } - } - } - - ++(pivotIdx[icol]); - - // Swap rows over so pivot is on diagonal - if (irow != icol) - { - for (int k = 0; k < 4; ++k) - { - float f = inverse[irow, k]; - inverse[irow, k] = inverse[icol, k]; - inverse[icol, k] = f; - } - } - - rowIdx[i] = irow; - colIdx[i] = icol; - - float pivot = inverse[icol, icol]; - // check for singular matrix - if (pivot == 0.0f) - { - throw new InvalidOperationException("Matrix is singular and cannot be inverted."); - } - - // Scale row so it has a unit diagonal - float oneOverPivot = 1.0f / pivot; - inverse[icol, icol] = 1.0f; - for (int k = 0; k < 4; ++k) - inverse[icol, k] *= oneOverPivot; - - // Do elimination of non-diagonal elements - for (int j = 0; j < 4; ++j) - { - // check this isn't on the diagonal - if (icol != j) - { - float f = inverse[j, icol]; - inverse[j, icol] = 0.0f; - for (int k = 0; k < 4; ++k) - inverse[j, k] -= inverse[icol, k] * f; - } - } - } - - for (int j = 3; j >= 0; --j) - { - int ir = rowIdx[j]; - int ic = colIdx[j]; - for (int k = 0; k < 4; ++k) - { - float f = inverse[k, ir]; - inverse[k, ir] = inverse[k, ic]; - inverse[k, ic] = f; - } - } - - result.Row0.X = inverse[0, 0]; - result.Row0.Y = inverse[0, 1]; - result.Row0.Z = inverse[0, 2]; - result.Row0.W = inverse[0, 3]; - result.Row1.X = inverse[1, 0]; - result.Row1.Y = inverse[1, 1]; - result.Row1.Z = inverse[1, 2]; - result.Row1.W = inverse[1, 3]; - result.Row2.X = inverse[2, 0]; - result.Row2.Y = inverse[2, 1]; - result.Row2.Z = inverse[2, 2]; - result.Row2.W = inverse[2, 3]; - result.Row3.X = inverse[3, 0]; - result.Row3.Y = inverse[3, 1]; - result.Row3.Z = inverse[3, 2]; - result.Row3.W = inverse[3, 3]; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix4 Invert(Matrix4 mat) - { - Matrix4 result; - Invert(ref mat, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix4 Transpose(Matrix4 mat) - { - return new Matrix4(mat.Column0, mat.Column1, mat.Column2, mat.Column3); - } - - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix4 mat, out Matrix4 result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - result.Row3 = mat.Column3; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4 which holds the result of the multiplication - public static Matrix4 operator *(Matrix4 left, Matrix4 right) - { - return Matrix4.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4 which holds the result of the multiplication - public static Matrix4 operator *(Matrix4 left, float right) - { - return Matrix4.Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4 which holds the result of the addition - public static Matrix4 operator +(Matrix4 left, Matrix4 right) - { - return Matrix4.Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4 which holds the result of the subtraction - public static Matrix4 operator -(Matrix4 left, Matrix4 right) - { - return Matrix4.Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix4 left, Matrix4 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix4 left, Matrix4 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}\n{3}", Row0, Row1, Row2, Row3); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode() ^ Row3.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare tresult. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix4)) - return false; - - return this.Equals((Matrix4)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix4 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2 && - Row3 == other.Row3; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix4d.cs b/OpenTK/Math/Matrix4d.cs deleted file mode 100644 index 785a6a12..00000000 --- a/OpenTK/Math/Matrix4d.cs +++ /dev/null @@ -1,1686 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 4x4 matrix containing 3D rotation, scale, transform, and projection with double-precision components. - /// - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix4d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix - /// - public Vector4d Row0; - /// - /// 2nd row of the matrix - /// - public Vector4d Row1; - /// - /// 3rd row of the matrix - /// - public Vector4d Row2; - /// - /// Bottom row of the matrix - /// - public Vector4d Row3; - - /// - /// The identity matrix - /// - public static Matrix4d Identity = new Matrix4d(Vector4d .UnitX, Vector4d .UnitY, Vector4d .UnitZ, Vector4d .UnitW); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Third row of the matrix - /// Bottom row of the matrix - public Matrix4d(Vector4d row0, Vector4d row1, Vector4d row2, Vector4d row3) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - Row3 = row3; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row. - /// Second item of the first row. - /// Third item of the first row. - /// Fourth item of the first row. - /// First item of the second row. - /// Second item of the second row. - /// Third item of the second row. - /// Fourth item of the second row. - /// First item of the third row. - /// Second item of the third row. - /// Third item of the third row. - /// First item of the third row. - /// Fourth item of the fourth row. - /// Second item of the fourth row. - /// Third item of the fourth row. - /// Fourth item of the fourth row. - public Matrix4d( - double m00, double m01, double m02, double m03, - double m10, double m11, double m12, double m13, - double m20, double m21, double m22, double m23, - double m30, double m31, double m32, double m33) - { - Row0 = new Vector4d(m00, m01, m02, m03); - Row1 = new Vector4d(m10, m11, m12, m13); - Row2 = new Vector4d(m20, m21, m22, m23); - Row3 = new Vector4d(m30, m31, m32, m33); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// The determinant of this matrix - /// - public double Determinant - { - get - { - return - Row0.X * Row1.Y * Row2.Z * Row3.W - Row0.X * Row1.Y * Row2.W * Row3.Z + Row0.X * Row1.Z * Row2.W * Row3.Y - Row0.X * Row1.Z * Row2.Y * Row3.W - + Row0.X * Row1.W * Row2.Y * Row3.Z - Row0.X * Row1.W * Row2.Z * Row3.Y - Row0.Y * Row1.Z * Row2.W * Row3.X + Row0.Y * Row1.Z * Row2.X * Row3.W - - Row0.Y * Row1.W * Row2.X * Row3.Z + Row0.Y * Row1.W * Row2.Z * Row3.X - Row0.Y * Row1.X * Row2.Z * Row3.W + Row0.Y * Row1.X * Row2.W * Row3.Z - + Row0.Z * Row1.W * Row2.X * Row3.Y - Row0.Z * Row1.W * Row2.Y * Row3.X + Row0.Z * Row1.X * Row2.Y * Row3.W - Row0.Z * Row1.X * Row2.W * Row3.Y - + Row0.Z * Row1.Y * Row2.W * Row3.X - Row0.Z * Row1.Y * Row2.X * Row3.W - Row0.W * Row1.X * Row2.Y * Row3.Z + Row0.W * Row1.X * Row2.Z * Row3.Y - - Row0.W * Row1.Y * Row2.Z * Row3.X + Row0.W * Row1.Y * Row2.X * Row3.Z - Row0.W * Row1.Z * Row2.X * Row3.Y + Row0.W * Row1.Z * Row2.Y * Row3.X; - } - } - - /// - /// The first column of this matrix - /// - public Vector4d Column0 - { - get { return new Vector4d (Row0.X, Row1.X, Row2.X, Row3.X); } - set { Row0.X = value.X; Row1.X = value.Y; Row2.X = value.Z; Row3.X = value.W; } - } - - /// - /// The second column of this matrix - /// - public Vector4d Column1 - { - get { return new Vector4d (Row0.Y, Row1.Y, Row2.Y, Row3.Y); } - set { Row0.Y = value.X; Row1.Y = value.Y; Row2.Y = value.Z; Row3.Y = value.W; } - } - - /// - /// The third column of this matrix - /// - public Vector4d Column2 - { - get { return new Vector4d (Row0.Z, Row1.Z, Row2.Z, Row3.Z); } - set { Row0.Z = value.X; Row1.Z = value.Y; Row2.Z = value.Z; Row3.Z = value.W; } - } - - /// - /// The fourth column of this matrix - /// - public Vector4d Column3 - { - get { return new Vector4d (Row0.W, Row1.W, Row2.W, Row3.W); } - set { Row0.W = value.X; Row1.W = value.Y; Row2.W = value.Z; Row3.W = value.W; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public double M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 1, column 4 of this instance. - /// - public double M14 { get { return Row0.W; } set { Row0.W = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public double M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 4 of this instance. - /// - public double M24 { get { return Row1.W; } set { Row1.W = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public double M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public double M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public double M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 4 of this instance. - /// - public double M34 { get { return Row2.W; } set { Row2.W = value; } } - - /// - /// Gets or sets the value at row 4, column 1 of this instance. - /// - public double M41 { get { return Row3.X; } set { Row3.X = value; } } - - /// - /// Gets or sets the value at row 4, column 2 of this instance. - /// - public double M42 { get { return Row3.Y; } set { Row3.Y = value; } } - - /// - /// Gets or sets the value at row 4, column 3 of this instance. - /// - public double M43 { get { return Row3.Z; } set { Row3.Z = value; } } - - /// - /// Gets or sets the value at row 4, column 4 of this instance. - /// - public double M44 { get { return Row3.W; } set { Row3.W = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector4d Diagonal - { - get - { - return new Vector4d(Row0.X, Row1.Y, Row2.Z, Row3.W); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - Row3.W = value.W; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y + Row2.Z + Row3.W; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - else if (rowIndex == 3) return Row3[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else if (rowIndex == 3) Row3[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix4d.Invert(this); - } - - #endregion - - #region public void Transpose() - - /// - /// Converts this instance into its transpose. - /// - public void Transpose() - { - this = Matrix4d.Transpose(this); - } - - #endregion - - /// - /// Returns a normalised copy of this instance. - /// - public Matrix4d Normalized() - { - Matrix4d m = this; - m.Normalize(); - return m; - } - - /// - /// Divides each element in the Matrix by the . - /// - public void Normalize() - { - var determinant = this.Determinant; - Row0 /= determinant; - Row1 /= determinant; - Row2 /= determinant; - Row3 /= determinant; - } - - /// - /// Returns an inverted copy of this instance. - /// - public Matrix4d Inverted() - { - Matrix4d m = this; - if (m.Determinant != 0) - m.Invert(); - return m; - } - - /// - /// Returns a copy of this Matrix4d without translation. - /// - public Matrix4d ClearTranslation() - { - Matrix4d m = this; - m.Row3.Xyz = Vector3d.Zero; - return m; - } - /// - /// Returns a copy of this Matrix4d without scale. - /// - public Matrix4d ClearScale() - { - Matrix4d m = this; - m.Row0.Xyz = m.Row0.Xyz.Normalized(); - m.Row1.Xyz = m.Row1.Xyz.Normalized(); - m.Row2.Xyz = m.Row2.Xyz.Normalized(); - return m; - } - /// - /// Returns a copy of this Matrix4d without rotation. - /// - public Matrix4d ClearRotation() - { - Matrix4d m = this; - m.Row0.Xyz = new Vector3d(m.Row0.Xyz.Length, 0, 0); - m.Row1.Xyz = new Vector3d(0, m.Row1.Xyz.Length, 0); - m.Row2.Xyz = new Vector3d(0, 0, m.Row2.Xyz.Length); - return m; - } - /// - /// Returns a copy of this Matrix4d without projection. - /// - public Matrix4d ClearProjection() - { - Matrix4d m = this; - m.Column3 = Vector4d.Zero; - return m; - } - - /// - /// Returns the translation component of this instance. - /// - public Vector3d ExtractTranslation() { return Row3.Xyz; } - - /// - /// Returns the scale component of this instance. - /// - public Vector3d ExtractScale() { return new Vector3d(Row0.Xyz.Length, Row1.Xyz.Length, Row2.Xyz.Length); } - - /// - /// Returns the rotation component of this instance. Quite slow. - /// - /// Whether the method should row-normalise (i.e. remove scale from) the Matrix. Pass false if you know it's already normalised. - public Quaterniond ExtractRotation(bool row_normalise = true) - { - var row0 = Row0.Xyz; - var row1 = Row1.Xyz; - var row2 = Row2.Xyz; - - if (row_normalise) - { - row0 = row0.Normalized(); - row1 = row1.Normalized(); - row2 = row2.Normalized(); - } - - // code below adapted from Blender - - Quaterniond q = new Quaterniond(); - double trace = 0.25 * (row0[0] + row1[1] + row2[2] + 1.0); - - if (trace > 0) - { - double sq = Math.Sqrt(trace); - - q.W = sq; - sq = 1.0 / (4.0 * sq); - q.X = (row1[2] - row2[1]) * sq; - q.Y = (row2[0] - row0[2]) * sq; - q.Z = (row0[1] - row1[0]) * sq; - } - else if (row0[0] > row1[1] && row0[0] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row0[0] - row1[1] - row2[2]); - - q.X = 0.25 * sq; - sq = 1.0 / sq; - q.W = (row2[1] - row1[2]) * sq; - q.Y = (row1[0] + row0[1]) * sq; - q.Z = (row2[0] + row0[2]) * sq; - } - else if (row1[1] > row2[2]) - { - double sq = 2.0 * Math.Sqrt(1.0 + row1[1] - row0[0] - row2[2]); - - q.Y = 0.25 * sq; - sq = 1.0 / sq; - q.W = (row2[0] - row0[2]) * sq; - q.X = (row1[0] + row0[1]) * sq; - q.Z = (row2[1] + row1[2]) * sq; - } - else - { - double sq = 2.0 * Math.Sqrt(1.0 + row2[2] - row0[0] - row1[1]); - - q.Z = 0.25 * sq; - sq = 1.0 / sq; - q.W = (row1[0] - row0[1]) * sq; - q.X = (row2[0] + row0[2]) * sq; - q.Y = (row2[1] + row1[2]) * sq; - } - - q.Normalize(); - return q; - } - - /// - /// Returns the projection component of this instance. - /// - public Vector4d ExtractProjection() - { - return Column3; - } - - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3d axis, double angle, out Matrix4d result) - { - // normalize and create a local copy of the vector. - axis.Normalize(); - double axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - // calculate angles - double cos = System.Math.Cos(-angle); - double sin = System.Math.Sin(-angle); - double t = 1.0f - cos; - - // do the conversion math once - double tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - double sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row0.W = 0; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row1.W = 0; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - result.Row2.W = 0; - result.Row3 = Vector4d.UnitW; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix4d CreateFromAxisAngle(Vector3d axis, double angle) - { - Matrix4d result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4d instance. - public static void CreateRotationX(double angle, out Matrix4d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result.Row0 = Vector4d.UnitX; - result.Row1 = new Vector4d(0, cos, sin, 0); - result.Row2 = new Vector4d(0, -sin, cos, 0); - result.Row3 = Vector4d.UnitW; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4d instance. - public static Matrix4d CreateRotationX(double angle) - { - Matrix4d result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4d instance. - public static void CreateRotationY(double angle, out Matrix4d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result.Row0 = new Vector4d(cos, 0, -sin, 0); - result.Row1 = Vector4d.UnitY; - result.Row2 = new Vector4d(sin, 0, cos, 0); - result.Row3 = Vector4d.UnitW; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4d instance. - public static Matrix4d CreateRotationY(double angle) - { - Matrix4d result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4d instance. - public static void CreateRotationZ(double angle, out Matrix4d result) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - result.Row0 = new Vector4d(cos, sin, 0, 0); - result.Row1 = new Vector4d(-sin, cos, 0, 0); - result.Row2 = Vector4d.UnitZ; - result.Row3 = Vector4d.UnitW; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4d instance. - public static Matrix4d CreateRotationZ(double angle) - { - Matrix4d result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateTranslation - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4d instance. - public static void CreateTranslation(double x, double y, double z, out Matrix4d result) - { - result = Identity; - result.Row3 = new Vector4d(x, y, z, 1); - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4d instance. - public static void CreateTranslation(ref Vector3d vector, out Matrix4d result) - { - result = Identity; - result.Row3 = new Vector4d(vector.X, vector.Y, vector.Z, 1); - } - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4d instance. - public static Matrix4d CreateTranslation(double x, double y, double z) - { - Matrix4d result; - CreateTranslation(x, y, z, out result); - return result; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4d instance. - public static Matrix4d CreateTranslation(Vector3d vector) - { - Matrix4d result; - CreateTranslation(vector.X, vector.Y, vector.Z, out result); - return result; - } - - #endregion - - #region CreateOrthographic - - /// - /// Creates an orthographic projection matrix. - /// - /// The width of the projection volume. - /// The height of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4d instance. - public static void CreateOrthographic(double width, double height, double zNear, double zFar, out Matrix4d result) - { - CreateOrthographicOffCenter(-width / 2, width / 2, -height / 2, height / 2, zNear, zFar, out result); - } - - /// - /// Creates an orthographic projection matrix. - /// - /// The width of the projection volume. - /// The height of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4d instance. - public static Matrix4d CreateOrthographic(double width, double height, double zNear, double zFar) - { - Matrix4d result; - CreateOrthographicOffCenter(-width / 2, width / 2, -height / 2, height / 2, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreateOrthographicOffCenter - - /// - /// Creates an orthographic projection matrix. - /// - /// The left edge of the projection volume. - /// The right edge of the projection volume. - /// The bottom edge of the projection volume. - /// The top edge of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4d instance. - public static void CreateOrthographicOffCenter(double left, double right, double bottom, double top, double zNear, double zFar, out Matrix4d result) - { - result = new Matrix4d(); - - double invRL = 1 / (right - left); - double invTB = 1 / (top - bottom); - double invFN = 1 / (zFar - zNear); - - result.M11 = 2 * invRL; - result.M22 = 2 * invTB; - result.M33 = -2 * invFN; - - result.M41 = -(right + left) * invRL; - result.M42 = -(top + bottom) * invTB; - result.M43 = -(zFar + zNear) * invFN; - result.M44 = 1; - } - - /// - /// Creates an orthographic projection matrix. - /// - /// The left edge of the projection volume. - /// The right edge of the projection volume. - /// The bottom edge of the projection volume. - /// The top edge of the projection volume. - /// The near edge of the projection volume. - /// The far edge of the projection volume. - /// The resulting Matrix4d instance. - public static Matrix4d CreateOrthographicOffCenter(double left, double right, double bottom, double top, double zNear, double zFar) - { - Matrix4d result; - CreateOrthographicOffCenter(left, right, bottom, top, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreatePerspectiveFieldOfView - - /// - /// Creates a perspective projection matrix. - /// - /// Angle of the field of view in the y direction (in radians) - /// Aspect ratio of the view (width / height) - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// fovy is zero, less than zero or larger than Math.PI - /// aspect is negative or zero - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static void CreatePerspectiveFieldOfView(double fovy, double aspect, double zNear, double zFar, out Matrix4d result) - { - if (fovy <= 0 || fovy > Math.PI) - throw new ArgumentOutOfRangeException("fovy"); - if (aspect <= 0) - throw new ArgumentOutOfRangeException("aspect"); - if (zNear <= 0) - throw new ArgumentOutOfRangeException("zNear"); - if (zFar <= 0) - throw new ArgumentOutOfRangeException("zFar"); - - double yMax = zNear * System.Math.Tan(0.5 * fovy); - double yMin = -yMax; - double xMin = yMin * aspect; - double xMax = yMax * aspect; - - CreatePerspectiveOffCenter(xMin, xMax, yMin, yMax, zNear, zFar, out result); - } - - /// - /// Creates a perspective projection matrix. - /// - /// Angle of the field of view in the y direction (in radians) - /// Aspect ratio of the view (width / height) - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// fovy is zero, less than zero or larger than Math.PI - /// aspect is negative or zero - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static Matrix4d CreatePerspectiveFieldOfView(double fovy, double aspect, double zNear, double zFar) - { - Matrix4d result; - CreatePerspectiveFieldOfView(fovy, aspect, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreatePerspectiveOffCenter - - /// - /// Creates an perspective projection matrix. - /// - /// Left edge of the view frustum - /// Right edge of the view frustum - /// Bottom edge of the view frustum - /// Top edge of the view frustum - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static void CreatePerspectiveOffCenter(double left, double right, double bottom, double top, double zNear, double zFar, out Matrix4d result) - { - if (zNear <= 0) - throw new ArgumentOutOfRangeException("zNear"); - if (zFar <= 0) - throw new ArgumentOutOfRangeException("zFar"); - if (zNear >= zFar) - throw new ArgumentOutOfRangeException("zNear"); - - double x = (2.0 * zNear) / (right - left); - double y = (2.0 * zNear) / (top - bottom); - double a = (right + left) / (right - left); - double b = (top + bottom) / (top - bottom); - double c = -(zFar + zNear) / (zFar - zNear); - double d = -(2.0 * zFar * zNear) / (zFar - zNear); - - result = new Matrix4d(x, 0, 0, 0, - 0, y, 0, 0, - a, b, c, -1, - 0, 0, d, 0); - } - - /// - /// Creates an perspective projection matrix. - /// - /// Left edge of the view frustum - /// Right edge of the view frustum - /// Bottom edge of the view frustum - /// Top edge of the view frustum - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - /// - /// Thrown under the following conditions: - /// - /// zNear is negative or zero - /// zFar is negative or zero - /// zNear is larger than zFar - /// - /// - public static Matrix4d CreatePerspectiveOffCenter(double left, double right, double bottom, double top, double zNear, double zFar) - { - Matrix4d result; - CreatePerspectiveOffCenter(left, right, bottom, top, zNear, zFar, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// Matrix result. - public static void CreateFromQuaternion(ref Quaterniond q, out Matrix4d result) - { - Vector3d axis; - double angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result); - } - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static Matrix4d CreateFromQuaternion(Quaterniond q) - { - Matrix4d result; - CreateFromQuaternion(ref q, out result); - return result; - } - - - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// Matrix result. - [Obsolete("Use double-precision overload instead")] - public static void CreateFromQuaternion(ref Quaternion q,ref Matrix4 m) - { - m = Matrix4.Identity; - - float X = q.X; - float Y = q.Y; - float Z = q.Z; - float W = q.W; - - float xx = X * X; - float xy = X * Y; - float xz = X * Z; - float xw = X * W; - float yy = Y * Y; - float yz = Y * Z; - float yw = Y * W; - float zz = Z * Z; - float zw = Z * W; - - m.M11 = 1 - 2 * (yy + zz); - m.M21 = 2 * (xy - zw); - m.M31 = 2 * (xz + yw); - m.M12 = 2 * (xy + zw); - m.M22 = 1 - 2 * (xx + zz); - m.M32 = 2 * (yz - xw); - m.M13 = 2 * (xz - yw); - m.M23 = 2 * (yz + xw); - m.M33 = 1 - 2 * (xx + yy); - } - - /// - /// Build a rotation matrix from the specified quaternion. - /// - /// Quaternion to translate. - /// A matrix instance. - [Obsolete("Use double-precision overload instead")] - public static Matrix4 CreateFromQuaternion(ref Quaternion q) - { - Matrix4 result = Matrix4.Identity; - - float X = q.X; - float Y = q.Y; - float Z = q.Z; - float W = q.W; - - float xx = X * X; - float xy = X * Y; - float xz = X * Z; - float xw = X * W; - float yy = Y * Y; - float yz = Y * Z; - float yw = Y * W; - float zz = Z * Z; - float zw = Z * W; - - result.M11 = 1 - 2 * (yy + zz); - result.M21 = 2 * (xy - zw); - result.M31 = 2 * (xz + yw); - result.M12 = 2 * (xy + zw); - result.M22 = 1 - 2 * (xx + zz); - result.M32 = 2 * (yz - xw); - result.M13 = 2 * (xz - yw); - result.M23 = 2 * (yz + xw); - result.M33 = 1 - 2 * (xx + yy); - return result; - } - - #endregion - - #region Obsolete Functions - - #region Translation Functions - - /// - /// Build a translation matrix with the given translation - /// - /// The vector to translate along - /// A Translation matrix - [Obsolete("Use CreateTranslation instead.")] - public static Matrix4d Translation(Vector3d trans) - { - return Translation(trans.X, trans.Y, trans.Z); - } - - /// - /// Build a translation matrix with the given translation - /// - /// X translation - /// Y translation - /// Z translation - /// A Translation matrix - [Obsolete("Use CreateTranslation instead.")] - public static Matrix4d Translation(double x, double y, double z) - { - Matrix4d result = Identity; - result.Row3 = new Vector4d(x, y, z, 1.0); - return result; - } - - #endregion - - #endregion - - #region Scale Functions - - /// - /// Build a scaling matrix - /// - /// Single scale factor for x,y and z axes - /// A scaling matrix - public static Matrix4d Scale(double scale) - { - return Scale(scale, scale, scale); - } - - /// - /// Build a scaling matrix - /// - /// Scale factors for x,y and z axes - /// A scaling matrix - public static Matrix4d Scale(Vector3d scale) - { - return Scale(scale.X, scale.Y, scale.Z); - } - - /// - /// Build a scaling matrix - /// - /// Scale factor for x-axis - /// Scale factor for y-axis - /// Scale factor for z-axis - /// A scaling matrix - public static Matrix4d Scale(double x, double y, double z) - { - Matrix4d result; - result.Row0 = Vector4d .UnitX * x; - result.Row1 = Vector4d .UnitY * y; - result.Row2 = Vector4d .UnitZ * z; - result.Row3 = Vector4d .UnitW; - return result; - } - - #endregion - - #region Rotation Functions - - /// - /// Build a rotation matrix that rotates about the x-axis - /// - /// angle in radians to rotate counter-clockwise around the x-axis - /// A rotation matrix - public static Matrix4d RotateX(double angle) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - Matrix4d result; - result.Row0 = Vector4d .UnitX; - result.Row1 = new Vector4d (0.0, cos, sin, 0.0); - result.Row2 = new Vector4d (0.0, -sin, cos, 0.0); - result.Row3 = Vector4d .UnitW; - return result; - } - - /// - /// Build a rotation matrix that rotates about the y-axis - /// - /// angle in radians to rotate counter-clockwise around the y-axis - /// A rotation matrix - public static Matrix4d RotateY(double angle) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - Matrix4d result; - result.Row0 = new Vector4d (cos, 0.0, -sin, 0.0); - result.Row1 = Vector4d .UnitY; - result.Row2 = new Vector4d (sin, 0.0, cos, 0.0); - result.Row3 = Vector4d .UnitW; - return result; - } - - /// - /// Build a rotation matrix that rotates about the z-axis - /// - /// angle in radians to rotate counter-clockwise around the z-axis - /// A rotation matrix - public static Matrix4d RotateZ(double angle) - { - double cos = System.Math.Cos(angle); - double sin = System.Math.Sin(angle); - - Matrix4d result; - result.Row0 = new Vector4d (cos, sin, 0.0, 0.0); - result.Row1 = new Vector4d (-sin, cos, 0.0, 0.0); - result.Row2 = Vector4d .UnitZ; - result.Row3 = Vector4d .UnitW; - return result; - } - - /// - /// Build a rotation matrix to rotate about the given axis - /// - /// the axis to rotate about - /// angle in radians to rotate counter-clockwise (looking in the direction of the given axis) - /// A rotation matrix - public static Matrix4d Rotate(Vector3d axis, double angle) - { - double cos = System.Math.Cos(-angle); - double sin = System.Math.Sin(-angle); - double t = 1.0 - cos; - - axis.Normalize(); - - Matrix4d result; - result.Row0 = new Vector4d (t * axis.X * axis.X + cos, t * axis.X * axis.Y - sin * axis.Z, t * axis.X * axis.Z + sin * axis.Y, 0.0); - result.Row1 = new Vector4d (t * axis.X * axis.Y + sin * axis.Z, t * axis.Y * axis.Y + cos, t * axis.Y * axis.Z - sin * axis.X, 0.0); - result.Row2 = new Vector4d (t * axis.X * axis.Z - sin * axis.Y, t * axis.Y * axis.Z + sin * axis.X, t * axis.Z * axis.Z + cos, 0.0); - result.Row3 = Vector4d .UnitW; - return result; - } - - /// - /// Build a rotation matrix from a quaternion - /// - /// the quaternion - /// A rotation matrix - public static Matrix4d Rotate(Quaterniond q) - { - Vector3d axis; - double angle; - q.ToAxisAngle(out axis, out angle); - return Rotate(axis, angle); - } - - #endregion - - #region Camera Helper Functions - - /// - /// Build a world space to camera space matrix - /// - /// Eye (camera) position in world space - /// Target position in world space - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// A Matrix that transforms world space to camera space - public static Matrix4d LookAt(Vector3d eye, Vector3d target, Vector3d up) - { - Vector3d z = Vector3d.Normalize(eye - target); - Vector3d x = Vector3d.Normalize(Vector3d.Cross(up, z)); - Vector3d y = Vector3d.Normalize(Vector3d.Cross(z, x)); - - Matrix4d rot = new Matrix4d(new Vector4d (x.X, y.X, z.X, 0.0), - new Vector4d (x.Y, y.Y, z.Y, 0.0), - new Vector4d (x.Z, y.Z, z.Z, 0.0), - Vector4d .UnitW); - - Matrix4d trans = Matrix4d.CreateTranslation(-eye); - - return trans * rot; - } - - /// - /// Build a world space to camera space matrix - /// - /// Eye (camera) position in world space - /// Eye (camera) position in world space - /// Eye (camera) position in world space - /// Target position in world space - /// Target position in world space - /// Target position in world space - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// Up vector in world space (should not be parallel to the camera direction, that is target - eye) - /// A Matrix4 that transforms world space to camera space - public static Matrix4d LookAt(double eyeX, double eyeY, double eyeZ, double targetX, double targetY, double targetZ, double upX, double upY, double upZ) - { - return LookAt(new Vector3d(eyeX, eyeY, eyeZ), new Vector3d(targetX, targetY, targetZ), new Vector3d(upX, upY, upZ)); - } - - /// - /// Build a projection matrix - /// - /// Left edge of the view frustum - /// Right edge of the view frustum - /// Bottom edge of the view frustum - /// Top edge of the view frustum - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - public static Matrix4d Frustum(double left, double right, double bottom, double top, double near, double far) - { - double invRL = 1.0 / (right - left); - double invTB = 1.0 / (top - bottom); - double invFN = 1.0 / (far - near); - return new Matrix4d(new Vector4d (2.0 * near * invRL, 0.0, 0.0, 0.0), - new Vector4d (0.0, 2.0 * near * invTB, 0.0, 0.0), - new Vector4d ((right + left) * invRL, (top + bottom) * invTB, -(far + near) * invFN, -1.0), - new Vector4d (0.0, 0.0, -2.0 * far * near * invFN, 0.0)); - } - - /// - /// Build a projection matrix - /// - /// Angle of the field of view in the y direction (in radians) - /// Aspect ratio of the view (width / height) - /// Distance to the near clip plane - /// Distance to the far clip plane - /// A projection matrix that transforms camera space to raster space - public static Matrix4d Perspective(double fovy, double aspect, double near, double far) - { - double yMax = near * System.Math.Tan(0.5f * fovy); - double yMin = -yMax; - double xMin = yMin * aspect; - double xMax = yMax * aspect; - - return Frustum(xMin, xMax, yMin, yMax, near, far); - } - - #endregion - - #region Add Functions - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix4d Add(Matrix4d left, Matrix4d right) - { - Matrix4d result; - Add(ref left, ref right, out result); - return result; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix4d left, ref Matrix4d right, out Matrix4d result) - { - result.Row0 = left.Row0 + right.Row0; - result.Row1 = left.Row1 + right.Row1; - result.Row2 = left.Row2 + right.Row2; - result.Row3 = left.Row3 + right.Row3; - } - - #endregion - - #region Subtract Functions - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static Matrix4d Subtract(Matrix4d left, Matrix4d right) - { - Matrix4d result; - Subtract(ref left, ref right, out result); - return result; - } - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static void Subtract(ref Matrix4d left, ref Matrix4d right, out Matrix4d result) - { - result.Row0 = left.Row0 - right.Row0; - result.Row1 = left.Row1 - right.Row1; - result.Row2 = left.Row2 - right.Row2; - result.Row3 = left.Row3 - right.Row3; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4d Mult(Matrix4d left, Matrix4d right) - { - Matrix4d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4d left, ref Matrix4d right, out Matrix4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, lM14 = left.Row0.W, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, lM24 = left.Row1.W, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, lM34 = left.Row2.W, - lM41 = left.Row3.X, lM42 = left.Row3.Y, lM43 = left.Row3.Z, lM44 = left.Row3.W, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z, rM44 = right.Row3.W; - - result.Row0.X = (((lM11 * rM11) + (lM12 * rM21)) + (lM13 * rM31)) + (lM14 * rM41); - result.Row0.Y = (((lM11 * rM12) + (lM12 * rM22)) + (lM13 * rM32)) + (lM14 * rM42); - result.Row0.Z = (((lM11 * rM13) + (lM12 * rM23)) + (lM13 * rM33)) + (lM14 * rM43); - result.Row0.W = (((lM11 * rM14) + (lM12 * rM24)) + (lM13 * rM34)) + (lM14 * rM44); - result.Row1.X = (((lM21 * rM11) + (lM22 * rM21)) + (lM23 * rM31)) + (lM24 * rM41); - result.Row1.Y = (((lM21 * rM12) + (lM22 * rM22)) + (lM23 * rM32)) + (lM24 * rM42); - result.Row1.Z = (((lM21 * rM13) + (lM22 * rM23)) + (lM23 * rM33)) + (lM24 * rM43); - result.Row1.W = (((lM21 * rM14) + (lM22 * rM24)) + (lM23 * rM34)) + (lM24 * rM44); - result.Row2.X = (((lM31 * rM11) + (lM32 * rM21)) + (lM33 * rM31)) + (lM34 * rM41); - result.Row2.Y = (((lM31 * rM12) + (lM32 * rM22)) + (lM33 * rM32)) + (lM34 * rM42); - result.Row2.Z = (((lM31 * rM13) + (lM32 * rM23)) + (lM33 * rM33)) + (lM34 * rM43); - result.Row2.W = (((lM31 * rM14) + (lM32 * rM24)) + (lM33 * rM34)) + (lM34 * rM44); - result.Row3.X = (((lM41 * rM11) + (lM42 * rM21)) + (lM43 * rM31)) + (lM44 * rM41); - result.Row3.Y = (((lM41 * rM12) + (lM42 * rM22)) + (lM43 * rM32)) + (lM44 * rM42); - result.Row3.Z = (((lM41 * rM13) + (lM42 * rM23)) + (lM43 * rM33)) + (lM44 * rM43); - result.Row3.W = (((lM41 * rM14) + (lM42 * rM24)) + (lM43 * rM34)) + (lM44 * rM44); - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4d Mult(Matrix4d left, double right) - { - Matrix4d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4d left, double right, out Matrix4d result) - { - result.Row0 = left.Row0 * right; - result.Row1 = left.Row1 * right; - result.Row2 = left.Row2 * right; - result.Row3 = left.Row3 * right; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4d is singular. - public static Matrix4d Invert(Matrix4d mat) - { - int[] colIdx = { 0, 0, 0, 0 }; - int[] rowIdx = { 0, 0, 0, 0 }; - int[] pivotIdx = { -1, -1, -1, -1 }; - - // convert the matrix to an array for easy looping - double[,] inverse = {{mat.Row0.X, mat.Row0.Y, mat.Row0.Z, mat.Row0.W}, - {mat.Row1.X, mat.Row1.Y, mat.Row1.Z, mat.Row1.W}, - {mat.Row2.X, mat.Row2.Y, mat.Row2.Z, mat.Row2.W}, - {mat.Row3.X, mat.Row3.Y, mat.Row3.Z, mat.Row3.W} }; - int icol = 0; - int irow = 0; - for (int i = 0; i < 4; i++) - { - // Find the largest pivot value - double maxPivot = 0.0; - for (int j = 0; j < 4; j++) - { - if (pivotIdx[j] != 0) - { - for (int k = 0; k < 4; ++k) - { - if (pivotIdx[k] == -1) - { - double absVal = System.Math.Abs(inverse[j, k]); - if (absVal > maxPivot) - { - maxPivot = absVal; - irow = j; - icol = k; - } - } - else if (pivotIdx[k] > 0) - { - return mat; - } - } - } - } - - ++(pivotIdx[icol]); - - // Swap rows over so pivot is on diagonal - if (irow != icol) - { - for (int k = 0; k < 4; ++k) - { - double f = inverse[irow, k]; - inverse[irow, k] = inverse[icol, k]; - inverse[icol, k] = f; - } - } - - rowIdx[i] = irow; - colIdx[i] = icol; - - double pivot = inverse[icol, icol]; - // check for singular matrix - if (pivot == 0.0) - { - throw new InvalidOperationException("Matrix is singular and cannot be inverted."); - //return mat; - } - - // Scale row so it has a unit diagonal - double oneOverPivot = 1.0 / pivot; - inverse[icol, icol] = 1.0; - for (int k = 0; k < 4; ++k) - inverse[icol, k] *= oneOverPivot; - - // Do elimination of non-diagonal elements - for (int j = 0; j < 4; ++j) - { - // check this isn't on the diagonal - if (icol != j) - { - double f = inverse[j, icol]; - inverse[j, icol] = 0.0; - for (int k = 0; k < 4; ++k) - inverse[j, k] -= inverse[icol, k] * f; - } - } - } - - for (int j = 3; j >= 0; --j) - { - int ir = rowIdx[j]; - int ic = colIdx[j]; - for (int k = 0; k < 4; ++k) - { - double f = inverse[k, ir]; - inverse[k, ir] = inverse[k, ic]; - inverse[k, ic] = f; - } - } - - mat.Row0 = new Vector4d (inverse[0, 0], inverse[0, 1], inverse[0, 2], inverse[0, 3]); - mat.Row1 = new Vector4d (inverse[1, 0], inverse[1, 1], inverse[1, 2], inverse[1, 3]); - mat.Row2 = new Vector4d (inverse[2, 0], inverse[2, 1], inverse[2, 2], inverse[2, 3]); - mat.Row3 = new Vector4d (inverse[3, 0], inverse[3, 1], inverse[3, 2], inverse[3, 3]); - return mat; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix4d Transpose(Matrix4d mat) - { - return new Matrix4d(mat.Column0, mat.Column1, mat.Column2, mat.Column3); - } - - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix4d mat, out Matrix4d result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - result.Row3 = mat.Column3; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4d which holds the result of the multiplication - public static Matrix4d operator *(Matrix4d left, Matrix4d right) - { - return Matrix4d.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4d which holds the result of the multiplication - public static Matrix4d operator *(Matrix4d left, float right) - { - return Matrix4d.Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4d which holds the result of the addition - public static Matrix4d operator +(Matrix4d left, Matrix4d right) - { - return Matrix4d.Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4d which holds the result of the subtraction - public static Matrix4d operator -(Matrix4d left, Matrix4d right) - { - return Matrix4d.Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix4d left, Matrix4d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix4d left, Matrix4d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix44. - /// - /// - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}\n{3}", Row0, Row1, Row2, Row3); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode() ^ Row3.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix4d)) - return false; - - return this.Equals((Matrix4d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// A matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix4d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2 && - Row3 == other.Row3; - } - - #endregion - } -} \ No newline at end of file diff --git a/OpenTK/Math/Matrix4x2.cs b/OpenTK/Math/Matrix4x2.cs deleted file mode 100644 index d548efda..00000000 --- a/OpenTK/Math/Matrix4x2.cs +++ /dev/null @@ -1,786 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 4x2 matrix. - /// - public struct Matrix4x2 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector2 Row0; - - /// - /// Second row of the matrix. - /// - public Vector2 Row1; - - /// - /// Third row of the matrix. - /// - public Vector2 Row2; - - /// - /// Bottom row of the matrix. - /// - public Vector2 Row3; - - /// - /// The zero matrix. - /// - public static readonly Matrix4x2 Zero = new Matrix4x2(Vector2.Zero, Vector2.Zero, Vector2.Zero, Vector2.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Second row of the matrix. - /// Third row of the matrix. - /// Bottom row of the matrix. - public Matrix4x2(Vector2 row0, Vector2 row1, Vector2 row2, Vector2 row3) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - Row3 = row3; - } - - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// First item of the fourth row of the matrix. - /// Second item of the fourth row of the matrix. - public Matrix4x2( - float m00, float m01, - float m10, float m11, - float m20, float m21, - float m30, float m31) - { - Row0 = new Vector2(m00, m01); - Row1 = new Vector2(m10, m11); - Row2 = new Vector2(m20, m21); - Row3 = new Vector2(m30, m31); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector4 Column0 - { - get { return new Vector4(Row0.X, Row1.X, Row2.X, Row3.X); } - set { Row0.X = value.X; Row1.X = value.Y; Row2.X = value.Z; Row3.X = value.W; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector4 Column1 - { - get { return new Vector4(Row0.Y, Row1.Y, Row2.Y, Row3.X); } - set { Row0.Y = value.X; Row1.Y = value.Y; Row2.Y = value.Z; Row3.Y = value.W; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public float M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public float M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 4, column 1 of this instance. - /// - public float M41 { get { return Row3.X; } set { Row3.X = value; } } - - /// - /// Gets or sets the value at row 4, column 2 of this instance. - /// - public float M42 { get { return Row3.Y; } set { Row3.Y = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2 Diagonal - { - get - { - return new Vector2(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - else if (rowIndex == 3) return Row3[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else if (rowIndex == 3) Row3[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2 instance. - public static void CreateRotation(float angle, out Matrix4x2 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2 instance. - public static Matrix4x2 CreateRotation(float angle) - { - Matrix4x2 result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(float scale, out Matrix4x2 result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix4x2 CreateScale(float scale) - { - Matrix4x2 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2 scale, out Matrix4x2 result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix4x2 CreateScale(Vector2 scale) - { - Matrix4x2 result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(float x, float y, out Matrix4x2 result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix4x2 CreateScale(float x, float y) - { - Matrix4x2 result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2 left, float right, out Matrix4x2 result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row2.X = left.Row2.X * right; - result.Row2.Y = left.Row2.Y * right; - result.Row3.X = left.Row3.X * right; - result.Row3.Y = left.Row3.Y * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4x2 Mult(Matrix4x2 left, float right) - { - Matrix4x2 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2 left, ref Matrix2 right, out Matrix4x2 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - lM41 = left.Row3.X, lM42 = left.Row3.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4x2 Mult(Matrix4x2 left, Matrix2 right) - { - Matrix4x2 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2 left, ref Matrix2x3 right, out Matrix4x3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - lM41 = left.Row3.X, lM42 = left.Row3.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22); - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4x3 Mult(Matrix4x2 left, Matrix2x3 right) - { - Matrix4x3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2 left, ref Matrix2x4 right, out Matrix4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - lM41 = left.Row3.X, lM42 = left.Row3.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22); - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23); - result.Row3.W = (lM41 * rM14) + (lM42 * rM24); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4 Mult(Matrix4x2 left, Matrix2x4 right) - { - Matrix4 result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix4x2 left, ref Matrix4x2 right, out Matrix4x2 result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row2.X = left.Row2.X + right.Row2.X; - result.Row2.Y = left.Row2.Y + right.Row2.Y; - result.Row3.X = left.Row3.X + right.Row3.X; - result.Row3.Y = left.Row3.Y + right.Row3.Y; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix4x2 Add(Matrix4x2 left, Matrix4x2 right) - { - Matrix4x2 result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix4x2 left, ref Matrix4x2 right, out Matrix4x2 result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row2.X = left.Row2.X - right.Row2.X; - result.Row2.Y = left.Row2.Y - right.Row2.Y; - result.Row3.X = left.Row3.X - right.Row3.X; - result.Row3.Y = left.Row3.Y - right.Row3.Y; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix4x2 Subtract(Matrix4x2 left, Matrix4x2 right) - { - Matrix4x2 result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix4x2 mat, out Matrix2x4 result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row0.Z = mat.Row2.X; - result.Row0.W = mat.Row3.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row1.Z = mat.Row2.Y; - result.Row1.W = mat.Row3.Y; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix2x4 Transpose(Matrix4x2 mat) - { - Matrix2x4 result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2 which holds the result of the multiplication - public static Matrix4x2 operator *(float left, Matrix4x2 right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2 which holds the result of the multiplication - public static Matrix4x2 operator *(Matrix4x2 left, float right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2 which holds the result of the multiplication - public static Matrix4x2 operator *(Matrix4x2 left, Matrix2 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3 which holds the result of the multiplication - public static Matrix4x3 operator *(Matrix4x2 left, Matrix2x3 right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4 which holds the result of the multiplication - public static Matrix4 operator *(Matrix4x2 left, Matrix2x4 right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2 which holds the result of the addition - public static Matrix4x2 operator +(Matrix4x2 left, Matrix4x2 right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2 which holds the result of the subtraction - public static Matrix4x2 operator -(Matrix4x2 left, Matrix4x2 right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix4x2 left, Matrix4x2 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix4x2 left, Matrix4x2 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}\n{3}", Row0, Row1, Row2, Row3); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode() ^ Row3.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix4x2)) - return false; - - return this.Equals((Matrix4x2)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix4x2 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2 && - Row3 == other.Row3; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix4x2d.cs b/OpenTK/Math/Matrix4x2d.cs deleted file mode 100644 index bf494a19..00000000 --- a/OpenTK/Math/Matrix4x2d.cs +++ /dev/null @@ -1,786 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 4x2 matrix. - /// - public struct Matrix4x2d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix. - /// - public Vector2d Row0; - - /// - /// Second row of the matrix. - /// - public Vector2d Row1; - - /// - /// Third row of the matrix. - /// - public Vector2d Row2; - - /// - /// Bottom row of the matrix. - /// - public Vector2d Row3; - - /// - /// The zero matrix. - /// - public static readonly Matrix4x2d Zero = new Matrix4x2d(Vector2d.Zero, Vector2d.Zero, Vector2d.Zero, Vector2d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix. - /// Second row of the matrix. - /// Third row of the matrix. - /// Bottom row of the matrix. - public Matrix4x2d(Vector2d row0, Vector2d row1, Vector2d row2, Vector2d row3) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - Row3 = row3; - } - - - /// - /// Constructs a new instance - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// First item of the fourth row of the matrix. - /// Second item of the fourth row of the matrix. - public Matrix4x2d( - double m00, double m01, - double m10, double m11, - double m20, double m21, - double m30, double m31) - { - Row0 = new Vector2d(m00, m01); - Row1 = new Vector2d(m10, m11); - Row2 = new Vector2d(m20, m21); - Row3 = new Vector2d(m30, m31); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets or sets the first column of this matrix. - /// - public Vector4d Column0 - { - get { return new Vector4d(Row0.X, Row1.X, Row2.X, Row3.X); } - set { Row0.X = value.X; Row1.X = value.Y; Row2.X = value.Z; Row3.X = value.W; } - } - - /// - /// Gets or sets the second column of this matrix. - /// - public Vector4d Column1 - { - get { return new Vector4d(Row0.Y, Row1.Y, Row2.Y, Row3.X); } - set { Row0.Y = value.X; Row1.Y = value.Y; Row2.Y = value.Z; Row3.Y = value.W; } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public double M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public double M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 4, column 1 of this instance. - /// - public double M41 { get { return Row3.X; } set { Row3.X = value; } } - - /// - /// Gets or sets the value at row 4, column 2 of this instance. - /// - public double M42 { get { return Row3.Y; } set { Row3.Y = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector2d Diagonal - { - get - { - return new Vector2d(Row0.X, Row1.Y); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - else if (rowIndex == 3) return Row3[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else if (rowIndex == 3) Row3[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Static - - #region CreateRotation - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2 instance. - public static void CreateRotation(double angle, out Matrix4x2d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Builds a rotation matrix. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix3x2 instance. - public static Matrix4x2d CreateRotation(double angle) - { - Matrix4x2d result; - CreateRotation(angle, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x, y, and z axes. - /// A scale matrix. - public static void CreateScale(double scale, out Matrix4x2d result) - { - result.Row0.X = scale; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Single scale factor for the x and y axes. - /// A scale matrix. - public static Matrix4x2d CreateScale(double scale) - { - Matrix4x2d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static void CreateScale(Vector2d scale, out Matrix4x2d result) - { - result.Row0.X = scale.X; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = scale.Y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factors for the x and y axes. - /// A scale matrix. - public static Matrix4x2d CreateScale(Vector2d scale) - { - Matrix4x2d result; - CreateScale(scale, out result); - return result; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static void CreateScale(double x, double y, out Matrix4x2d result) - { - result.Row0.X = x; - result.Row0.Y = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row3.X = 0; - result.Row3.Y = 0; - } - - /// - /// Creates a scale matrix. - /// - /// Scale factor for the x axis. - /// Scale factor for the y axis. - /// A scale matrix. - public static Matrix4x2d CreateScale(double x, double y) - { - Matrix4x2d result; - CreateScale(x, y, out result); - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2d left, double right, out Matrix4x2d result) - { - result.Row0.X = left.Row0.X * right; - result.Row0.Y = left.Row0.Y * right; - result.Row1.X = left.Row1.X * right; - result.Row1.Y = left.Row1.Y * right; - result.Row2.X = left.Row2.X * right; - result.Row2.Y = left.Row2.Y * right; - result.Row3.X = left.Row3.X * right; - result.Row3.Y = left.Row3.Y * right; - } - - /// - /// Multiplies and instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4x2d Mult(Matrix4x2d left, double right) - { - Matrix4x2d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2d left, ref Matrix2d right, out Matrix4x2d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - lM41 = left.Row3.X, lM42 = left.Row3.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, - rM21 = right.Row1.X, rM22 = right.Row1.Y; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4x2d Mult(Matrix4x2d left, Matrix2d right) - { - Matrix4x2d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2d left, ref Matrix2x3d right, out Matrix4x3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - lM41 = left.Row3.X, lM42 = left.Row3.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22); - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4x3d Mult(Matrix4x2d left, Matrix2x3d right) - { - Matrix4x3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static void Mult(ref Matrix4x2d left, ref Matrix2x4d right, out Matrix4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, - lM21 = left.Row1.X, lM22 = left.Row1.Y, - lM31 = left.Row2.X, lM32 = left.Row2.Y, - lM41 = left.Row3.X, lM42 = left.Row3.Y, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22); - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23); - result.Row3.W = (lM41 * rM14) + (lM42 * rM24); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication. - public static Matrix4d Mult(Matrix4x2d left, Matrix2x4d right) - { - Matrix4d result; - Mult(ref left, ref right, out result); - return result; - } - - #endregion - - #region Add - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix4x2d left, ref Matrix4x2d right, out Matrix4x2d result) - { - result.Row0.X = left.Row0.X + right.Row0.X; - result.Row0.Y = left.Row0.Y + right.Row0.Y; - result.Row1.X = left.Row1.X + right.Row1.X; - result.Row1.Y = left.Row1.Y + right.Row1.Y; - result.Row2.X = left.Row2.X + right.Row2.X; - result.Row2.Y = left.Row2.Y + right.Row2.Y; - result.Row3.X = left.Row3.X + right.Row3.X; - result.Row3.Y = left.Row3.Y + right.Row3.Y; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix4x2d Add(Matrix4x2d left, Matrix4x2d right) - { - Matrix4x2d result; - Add(ref left, ref right, out result); - return result; - } - - #endregion - - #region Subtract - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static void Subtract(ref Matrix4x2d left, ref Matrix4x2d right, out Matrix4x2d result) - { - result.Row0.X = left.Row0.X - right.Row0.X; - result.Row0.Y = left.Row0.Y - right.Row0.Y; - result.Row1.X = left.Row1.X - right.Row1.X; - result.Row1.Y = left.Row1.Y - right.Row1.Y; - result.Row2.X = left.Row2.X - right.Row2.X; - result.Row2.Y = left.Row2.Y - right.Row2.Y; - result.Row3.X = left.Row3.X - right.Row3.X; - result.Row3.Y = left.Row3.Y - right.Row3.Y; - } - - /// - /// Subtracts two instances. - /// - /// The left operand of the subtraction. - /// The right operand of the subtraction. - /// A new instance that is the result of the subtraction. - public static Matrix4x2d Subtract(Matrix4x2d left, Matrix4x2d right) - { - Matrix4x2d result; - Subtract(ref left, ref right, out result); - return result; - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static void Transpose(ref Matrix4x2d mat, out Matrix2x4d result) - { - result.Row0.X = mat.Row0.X; - result.Row0.Y = mat.Row1.X; - result.Row0.Z = mat.Row2.X; - result.Row0.W = mat.Row3.X; - result.Row1.X = mat.Row0.Y; - result.Row1.Y = mat.Row1.Y; - result.Row1.Z = mat.Row2.Y; - result.Row1.W = mat.Row3.Y; - } - - /// - /// Calculate the transpose of the given matrix. - /// - /// The matrix to transpose. - /// The transpose of the given matrix. - public static Matrix2x4d Transpose(Matrix4x2d mat) - { - Matrix2x4d result; - Transpose(ref mat, out result); - return result; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2d which holds the result of the multiplication - public static Matrix4x2d operator *(double left, Matrix4x2d right) - { - return Mult(right, left); - } - - /// - /// Scalar multiplication. - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2d which holds the result of the multiplication - public static Matrix4x2d operator *(Matrix4x2d left, double right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix2d which holds the result of the multiplication - public static Matrix4x2d operator *(Matrix4x2d left, Matrix2d right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3d which holds the result of the multiplication - public static Matrix4x3d operator *(Matrix4x2d left, Matrix2x3d right) - { - return Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4d which holds the result of the multiplication - public static Matrix4d operator *(Matrix4x2d left, Matrix2x4d right) - { - return Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2d which holds the result of the addition - public static Matrix4x2d operator +(Matrix4x2d left, Matrix4x2d right) - { - return Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x2d which holds the result of the subtraction - public static Matrix4x2d operator -(Matrix4x2d left, Matrix4x2d right) - { - return Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix4x2d left, Matrix4x2d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix4x2d left, Matrix4x2d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return String.Format("{0}\n{1}\n{2}\n{3}", Row0, Row1, Row2, Row3); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode() ^ Row3.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix4x2d)) - return false; - - return this.Equals((Matrix4x2d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether the current matrix is equal to another matrix. - /// - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix4x2d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2 && - Row3 == other.Row3; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix4x3.cs b/OpenTK/Math/Matrix4x3.cs deleted file mode 100644 index e7fd6788..00000000 --- a/OpenTK/Math/Matrix4x3.cs +++ /dev/null @@ -1,1007 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x4 matrix. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix4x3 : IEquatable - { - #region Fields - - /// - /// Top row of the matrix - /// - public Vector3 Row0; - - /// - /// 2nd row of the matrix - /// - public Vector3 Row1; - - /// - /// 3rd row of the matrix - /// - public Vector3 Row2; - - /// - /// Bottom row of the matrix - /// - public Vector3 Row3; - - /// - /// The zero matrix - /// - public static readonly Matrix4x3 Zero = new Matrix4x3(Vector3.Zero, Vector3.Zero, Vector3.Zero, Vector3.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Third row of the matrix - /// Bottom row of the matrix - public Matrix4x3(Vector3 row0, Vector3 row1, Vector3 row2, Vector3 row3) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - Row3 = row3; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - /// First item of the fourth row of the matrix. - /// Second item of the fourth row of the matrix. - /// Third item of the fourth row of the matrix. - public Matrix4x3( - float m00, float m01, float m02, - float m10, float m11, float m12, - float m20, float m21, float m22, - float m30, float m31, float m32) - { - Row0 = new Vector3(m00, m01, m02); - Row1 = new Vector3(m10, m11, m12); - Row2 = new Vector3(m20, m21, m22); - Row3 = new Vector3(m30, m31, m32); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the first column of this matrix. - /// - public Vector4 Column0 - { - get { return new Vector4(Row0.X, Row1.X, Row2.X, Row3.X); } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector4 Column1 - { - get { return new Vector4(Row0.Y, Row1.Y, Row2.Y, Row3.Y); } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector4 Column2 - { - get { return new Vector4(Row0.Z, Row1.Z, Row2.Z, Row3.Z); } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public float M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public float M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public float M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public float M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public float M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public float M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public float M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public float M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public float M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the value at row 4, column 1 of this instance. - /// - public float M41 { get { return Row3.X; } set { Row3.X = value; } } - - /// - /// Gets or sets the value at row 4, column 2 of this instance. - /// - public float M42 { get { return Row3.Y; } set { Row3.Y = value; } } - - /// - /// Gets or sets the value at row 4, column 3 of this instance. - /// - public float M43 { get { return Row3.Z; } set { Row3.Z = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector3 Diagonal - { - get - { - return new Vector3(Row0.X, Row1.Y, Row2.Z); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public float Trace { get { return Row0.X + Row1.Y + Row2.Z; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public float this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - else if (rowIndex == 3) return Row3[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else if (rowIndex == 3) Row3[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into it's inverse by inverting the upper-left 3x3 and replacing Row3. - /// - public void Invert() - { - this = Matrix4x3.Invert(this); - } - - #endregion - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3 axis, float angle, out Matrix4x3 result) - { - axis.Normalize(); - float axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - float cos = (float)System.Math.Cos(-angle); - float sin = (float)System.Math.Sin(-angle); - float t = 1.0f - cos; - - float tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - float sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix4x3 CreateFromAxisAngle(Vector3 axis, float angle) - { - Matrix4x3 result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static void CreateFromQuaternion(ref Quaternion q, out Matrix4x3 result) - { - float x = q.X, y = q.Y, z = q.Z, w = q.W, - tx = 2 * x, ty = 2 * y, tz = 2 * z, - txx = tx * x, tyy = ty * y, tzz = tz * z, - txy = tx * y, txz = tx * z, tyz = ty * z, - twx = w * tx, twy = w * ty, twz = w * tz; - - result.Row0.X = 1f - tyy - tzz; - result.Row0.Y = txy - twz; - result.Row0.Z = txz + twy; - result.Row1.X = txy + twz; - result.Row1.Y = 1f - txx - tzz; - result.Row1.Z = tyz - twx; - result.Row2.X = txz - twy; - result.Row2.Y = tyz + twx; - result.Row2.Z = 1f - txx - tyy; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - - /*Vector3 axis; - float angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result);*/ - } - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static Matrix4x3 CreateFromQuaternion(Quaternion q) - { - Matrix4x3 result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationX(float angle, out Matrix4x3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row2.X = 0; - result.Row2.Y = -sin; - result.Row2.Z = cos; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix4x3 CreateRotationX(float angle) - { - Matrix4x3 result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationY(float angle, out Matrix4x3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = 0; - result.Row0.Z = -sin; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row2.X = sin; - result.Row2.Y = 0; - result.Row2.Z = cos; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix4x3 CreateRotationY(float angle) - { - Matrix4x3 result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static void CreateRotationZ(float angle, out Matrix4x3 result) - { - float cos = (float)System.Math.Cos(angle); - float sin = (float)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4 instance. - public static Matrix4x3 CreateRotationZ(float angle) - { - Matrix4x3 result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateTranslation - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static void CreateTranslation(float x, float y, float z, out Matrix4x3 result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row3.X = x; - result.Row3.Y = y; - result.Row3.Z = z; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static void CreateTranslation(ref Vector3 vector, out Matrix4x3 result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row3.X = vector.X; - result.Row3.Y = vector.Y; - result.Row3.Z = vector.Z; - } - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4 instance. - public static Matrix4x3 CreateTranslation(float x, float y, float z) - { - Matrix4x3 result; - CreateTranslation(x, y, z, out result); - return result; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4 instance. - public static Matrix4x3 CreateTranslation(Vector3 vector) - { - Matrix4x3 result; - CreateTranslation(vector.X, vector.Y, vector.Z, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Build a scaling matrix - /// - /// Single scale factor for x,y and z axes - /// A scaling matrix - public static Matrix4x3 CreateScale(float scale) - { - return CreateScale(scale, scale, scale); - } - - /// - /// Build a scaling matrix - /// - /// Scale factors for x,y and z axes - /// A scaling matrix - public static Matrix4x3 CreateScale(Vector3 scale) - { - return CreateScale(scale.X, scale.Y, scale.Z); - } - - /// - /// Build a scaling matrix - /// - /// Scale factor for x-axis - /// Scale factor for y-axis - /// Scale factor for z-axis - /// A scaling matrix - public static Matrix4x3 CreateScale(float x, float y, float z) - { - Matrix4x3 result; - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = z; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// This isn't quite a multiply, but the result may be useful in some situations. - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4 Mult(Matrix4x3 left, Matrix3x4 right) - { - Matrix4 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// This isn't quite a multiply, but the result may be useful in some situations. - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4x3 left, ref Matrix3x4 right, out Matrix4 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, - lM41 = left.Row3.X, lM42 = left.Row3.Y, lM43 = left.Row3.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24) + (lM13 * rM34); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24) + (lM23 * rM34); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24) + (lM33 * rM34); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21) + (lM43 * rM31); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22) + (lM43 * rM32); - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23) + (lM43 * rM33); - result.Row3.W = (lM41 * rM14) + (lM42 * rM24) + (lM43 * rM34); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4x3 Mult(Matrix4x3 left, Matrix4x3 right) - { - Matrix4x3 result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// This isn't quite a multiply, but the result may be useful in some situations. - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4x3 left, ref Matrix4x3 right, out Matrix4x3 result) - { - float lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, - lM41 = left.Row3.X, lM42 = left.Row3.Y, lM43 = left.Row3.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31) + rM41; - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32) + rM42; - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33) + rM43; - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31) + rM41; - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32) + rM42; - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33) + rM43; - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31) + rM41; - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32) + rM42; - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33) + rM43; - result.Row3.X = (lM41 * rM11) + (lM42 * rM21) + (lM43 * rM31) + rM41; - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22) + (lM43 * rM32) + rM42; - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23) + (lM43 * rM33) + rM43; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4x3 Mult(Matrix4x3 left, float right) - { - Matrix4x3 result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4x3 left, float right, out Matrix4x3 result) - { - result.Row0 = left.Row0 * right; - result.Row1 = left.Row1 * right; - result.Row2 = left.Row2 * right; - result.Row3 = left.Row3 * right; - } - - #endregion - - #region Add Functions - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix4x3 Add(Matrix4x3 left, Matrix4x3 right) - { - Matrix4x3 result; - Add(ref left, ref right, out result); - return result; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix4x3 left, ref Matrix4x3 right, out Matrix4x3 result) - { - result.Row0 = left.Row0 + right.Row0; - result.Row1 = left.Row1 + right.Row1; - result.Row2 = left.Row2 + right.Row2; - result.Row3 = left.Row3 + right.Row3; - } - - #endregion - - #region Subtract Functions - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static Matrix4x3 Subtract(Matrix4x3 left, Matrix4x3 right) - { - Matrix4x3 result; - Subtract(ref left, ref right, out result); - return result; - } - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static void Subtract(ref Matrix4x3 left, ref Matrix4x3 right, out Matrix4x3 result) - { - result.Row0 = left.Row0 - right.Row0; - result.Row1 = left.Row1 - right.Row1; - result.Row2 = left.Row2 - right.Row2; - result.Row3 = left.Row3 - right.Row3; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix4x3 Invert(Matrix4x3 mat) - { - Matrix4x3 result; - Invert(ref mat, out result); - return result; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static void Invert(ref Matrix4x3 mat, out Matrix4x3 result) - { - Matrix3 inverseRotation = new Matrix3(mat.Column0.Xyz, mat.Column1.Xyz, mat.Column2.Xyz); - inverseRotation.Row0 /= inverseRotation.Row0.LengthSquared; - inverseRotation.Row1 /= inverseRotation.Row1.LengthSquared; - inverseRotation.Row2 /= inverseRotation.Row2.LengthSquared; - - Vector3 translation = mat.Row3; - - result.Row0 = inverseRotation.Row0; - result.Row1 = inverseRotation.Row1; - result.Row2 = inverseRotation.Row2; - result.Row3 = new Vector3(-Vector3.Dot(inverseRotation.Row0, translation), -Vector3.Dot(inverseRotation.Row1, translation), -Vector3.Dot(inverseRotation.Row2, translation)); - } - - #endregion - - #region Transpose - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix3x4 Transpose(Matrix4x3 mat) - { - return new Matrix3x4(mat.Column0, mat.Column1, mat.Column2); - } - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix4x3 mat, out Matrix3x4 result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4 which holds the result of the multiplication - public static Matrix4 operator *(Matrix4x3 left, Matrix3x4 right) - { - return Matrix4x3.Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3 which holds the result of the multiplication - public static Matrix4x3 operator *(Matrix4x3 left, Matrix4x3 right) - { - return Matrix4x3.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3 which holds the result of the multiplication - public static Matrix4x3 operator *(Matrix4x3 left, float right) - { - return Matrix4x3.Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3 which holds the result of the addition - public static Matrix4x3 operator +(Matrix4x3 left, Matrix4x3 right) - { - return Matrix4x3.Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3 which holds the result of the subtraction - public static Matrix4x3 operator -(Matrix4x3 left, Matrix4x3 right) - { - return Matrix4x3.Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix4x3 left, Matrix4x3 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix4x3 left, Matrix4x3 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4x3. - /// - /// The string representation of the matrix. - public override string ToString() - { - return string.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare tresult. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix4x3)) - return false; - - return this.Equals((Matrix4x3)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix4x3 other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2 && - Row3 == other.Row3; - } - - #endregion - } -} diff --git a/OpenTK/Math/Matrix4x3d.cs b/OpenTK/Math/Matrix4x3d.cs deleted file mode 100644 index 908477fd..00000000 --- a/OpenTK/Math/Matrix4x3d.cs +++ /dev/null @@ -1,1007 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; - -namespace OpenTK -{ - /// - /// Represents a 3x4 matrix. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Matrix4x3d : IEquatable - { - #region Fields - - /// - /// Top row of the matrix - /// - public Vector3d Row0; - - /// - /// 2nd row of the matrix - /// - public Vector3d Row1; - - /// - /// 3rd row of the matrix - /// - public Vector3d Row2; - - /// - /// Bottom row of the matrix - /// - public Vector3d Row3; - - /// - /// The zero matrix - /// - public static Matrix4x3d Zero = new Matrix4x3d(Vector3d.Zero, Vector3d.Zero, Vector3d.Zero, Vector3d.Zero); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// Top row of the matrix - /// Second row of the matrix - /// Third row of the matrix - /// Bottom row of the matrix - public Matrix4x3d(Vector3d row0, Vector3d row1, Vector3d row2, Vector3d row3) - { - Row0 = row0; - Row1 = row1; - Row2 = row2; - Row3 = row3; - } - - /// - /// Constructs a new instance. - /// - /// First item of the first row of the matrix. - /// Second item of the first row of the matrix. - /// Third item of the first row of the matrix. - /// First item of the second row of the matrix. - /// Second item of the second row of the matrix. - /// Third item of the second row of the matrix. - /// First item of the third row of the matrix. - /// Second item of the third row of the matrix. - /// Third item of the third row of the matrix. - /// First item of the fourth row of the matrix. - /// Second item of the fourth row of the matrix. - /// Third item of the fourth row of the matrix. - public Matrix4x3d( - double m00, double m01, double m02, - double m10, double m11, double m12, - double m20, double m21, double m22, - double m30, double m31, double m32) - { - Row0 = new Vector3d(m00, m01, m02); - Row1 = new Vector3d(m10, m11, m12); - Row2 = new Vector3d(m20, m21, m22); - Row3 = new Vector3d(m30, m31, m32); - } - - #endregion - - #region Public Members - - #region Properties - - /// - /// Gets the first column of this matrix. - /// - public Vector4d Column0 - { - get { return new Vector4d(Row0.X, Row1.X, Row2.X, Row3.X); } - } - - /// - /// Gets the second column of this matrix. - /// - public Vector4d Column1 - { - get { return new Vector4d(Row0.Y, Row1.Y, Row2.Y, Row3.Y); } - } - - /// - /// Gets the third column of this matrix. - /// - public Vector4d Column2 - { - get { return new Vector4d(Row0.Z, Row1.Z, Row2.Z, Row3.Z); } - } - - /// - /// Gets or sets the value at row 1, column 1 of this instance. - /// - public double M11 { get { return Row0.X; } set { Row0.X = value; } } - - /// - /// Gets or sets the value at row 1, column 2 of this instance. - /// - public double M12 { get { return Row0.Y; } set { Row0.Y = value; } } - - /// - /// Gets or sets the value at row 1, column 3 of this instance. - /// - public double M13 { get { return Row0.Z; } set { Row0.Z = value; } } - - /// - /// Gets or sets the value at row 2, column 1 of this instance. - /// - public double M21 { get { return Row1.X; } set { Row1.X = value; } } - - /// - /// Gets or sets the value at row 2, column 2 of this instance. - /// - public double M22 { get { return Row1.Y; } set { Row1.Y = value; } } - - /// - /// Gets or sets the value at row 2, column 3 of this instance. - /// - public double M23 { get { return Row1.Z; } set { Row1.Z = value; } } - - /// - /// Gets or sets the value at row 3, column 1 of this instance. - /// - public double M31 { get { return Row2.X; } set { Row2.X = value; } } - - /// - /// Gets or sets the value at row 3, column 2 of this instance. - /// - public double M32 { get { return Row2.Y; } set { Row2.Y = value; } } - - /// - /// Gets or sets the value at row 3, column 3 of this instance. - /// - public double M33 { get { return Row2.Z; } set { Row2.Z = value; } } - - /// - /// Gets or sets the value at row 4, column 1 of this instance. - /// - public double M41 { get { return Row3.X; } set { Row3.X = value; } } - - /// - /// Gets or sets the value at row 4, column 2 of this instance. - /// - public double M42 { get { return Row3.Y; } set { Row3.Y = value; } } - - /// - /// Gets or sets the value at row 4, column 3 of this instance. - /// - public double M43 { get { return Row3.Z; } set { Row3.Z = value; } } - - /// - /// Gets or sets the values along the main diagonal of the matrix. - /// - public Vector3d Diagonal - { - get - { - return new Vector3d(Row0.X, Row1.Y, Row2.Z); - } - set - { - Row0.X = value.X; - Row1.Y = value.Y; - Row2.Z = value.Z; - } - } - - /// - /// Gets the trace of the matrix, the sum of the values along the diagonal. - /// - public double Trace { get { return Row0.X + Row1.Y + Row2.Z; } } - - #endregion - - #region Indexers - - /// - /// Gets or sets the value at a specified row and column. - /// - public double this[int rowIndex, int columnIndex] - { - get - { - if (rowIndex == 0) return Row0[columnIndex]; - else if (rowIndex == 1) return Row1[columnIndex]; - else if (rowIndex == 2) return Row2[columnIndex]; - else if (rowIndex == 3) return Row3[columnIndex]; - throw new IndexOutOfRangeException("You tried to access this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - set - { - if (rowIndex == 0) Row0[columnIndex] = value; - else if (rowIndex == 1) Row1[columnIndex] = value; - else if (rowIndex == 2) Row2[columnIndex] = value; - else if (rowIndex == 3) Row3[columnIndex] = value; - else throw new IndexOutOfRangeException("You tried to set this matrix at: (" + rowIndex + ", " + columnIndex + ")"); - } - } - - #endregion - - #region Instance - - #region public void Invert() - - /// - /// Converts this instance into its inverse. - /// - public void Invert() - { - this = Matrix4x3d.Invert(this); - } - - #endregion - - #endregion - - #region Static - - #region CreateFromAxisAngle - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static void CreateFromAxisAngle(Vector3d axis, double angle, out Matrix4x3d result) - { - axis.Normalize(); - double axisX = axis.X, axisY = axis.Y, axisZ = axis.Z; - - double cos = (double)System.Math.Cos(-angle); - double sin = (double)System.Math.Sin(-angle); - double t = 1.0f - cos; - - double tXX = t * axisX * axisX, - tXY = t * axisX * axisY, - tXZ = t * axisX * axisZ, - tYY = t * axisY * axisY, - tYZ = t * axisY * axisZ, - tZZ = t * axisZ * axisZ; - - double sinX = sin * axisX, - sinY = sin * axisY, - sinZ = sin * axisZ; - - result.Row0.X = tXX + cos; - result.Row0.Y = tXY - sinZ; - result.Row0.Z = tXZ + sinY; - result.Row1.X = tXY + sinZ; - result.Row1.Y = tYY + cos; - result.Row1.Z = tYZ - sinX; - result.Row2.X = tXZ - sinY; - result.Row2.Y = tYZ + sinX; - result.Row2.Z = tZZ + cos; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Build a rotation matrix from the specified axis/angle rotation. - /// - /// The axis to rotate about. - /// Angle in radians to rotate counter-clockwise (looking in the direction of the given axis). - /// A matrix instance. - public static Matrix4x3d CreateFromAxisAngle(Vector3d axis, double angle) - { - Matrix4x3d result; - CreateFromAxisAngle(axis, angle, out result); - return result; - } - - #endregion - - #region CreateFromQuaternion - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static void CreateFromQuaternion(ref Quaternion q, out Matrix4x3d result) - { - double x = q.X, y = q.Y, z = q.Z, w = q.W, - tx = 2 * x, ty = 2 * y, tz = 2 * z, - txx = tx * x, tyy = ty * y, tzz = tz * z, - txy = tx * y, txz = tx * z, tyz = ty * z, - twx = w * tx, twy = w * ty, twz = w * tz; - - result.Row0.X = 1f - tyy - tzz; - result.Row0.Y = txy - twz; - result.Row0.Z = txz + twy; - result.Row1.X = txy + twz; - result.Row1.Y = 1f - txx - tzz; - result.Row1.Z = tyz - twx; - result.Row2.X = txz - twy; - result.Row2.Y = tyz + twx; - result.Row2.Z = 1f - txx - tyy; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - - /*Vector3d axis; - double angle; - q.ToAxisAngle(out axis, out angle); - CreateFromAxisAngle(axis, angle, out result);*/ - } - - /// - /// Builds a rotation matrix from a quaternion. - /// - /// The quaternion to rotate by. - /// A matrix instance. - public static Matrix4x3d CreateFromQuaternion(Quaternion q) - { - Matrix4x3d result; - CreateFromQuaternion(ref q, out result); - return result; - } - - #endregion - - #region CreateRotation[XYZ] - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4dinstance. - public static void CreateRotationX(double angle, out Matrix4x3d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = cos; - result.Row1.Z = sin; - result.Row2.X = 0; - result.Row2.Y = -sin; - result.Row2.Z = cos; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the x-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4dinstance. - public static Matrix4x3d CreateRotationX(double angle) - { - Matrix4x3d result; - CreateRotationX(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4dinstance. - public static void CreateRotationY(double angle, out Matrix4x3d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = 0; - result.Row0.Z = -sin; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row2.X = sin; - result.Row2.Y = 0; - result.Row2.Z = cos; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the y-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4dinstance. - public static Matrix4x3d CreateRotationY(double angle) - { - Matrix4x3d result; - CreateRotationY(angle, out result); - return result; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4dinstance. - public static void CreateRotationZ(double angle, out Matrix4x3d result) - { - double cos = (double)System.Math.Cos(angle); - double sin = (double)System.Math.Sin(angle); - - result.Row0.X = cos; - result.Row0.Y = sin; - result.Row0.Z = 0; - result.Row1.X = -sin; - result.Row1.Y = cos; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - } - - /// - /// Builds a rotation matrix for a rotation around the z-axis. - /// - /// The counter-clockwise angle in radians. - /// The resulting Matrix4dinstance. - public static Matrix4x3d CreateRotationZ(double angle) - { - Matrix4x3d result; - CreateRotationZ(angle, out result); - return result; - } - - #endregion - - #region CreateTranslation - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4dinstance. - public static void CreateTranslation(double x, double y, double z, out Matrix4x3d result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row3.X = x; - result.Row3.Y = y; - result.Row3.Z = z; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4dinstance. - public static void CreateTranslation(ref Vector3d vector, out Matrix4x3d result) - { - result.Row0.X = 1; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = 1; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = 1; - result.Row3.X = vector.X; - result.Row3.Y = vector.Y; - result.Row3.Z = vector.Z; - } - - /// - /// Creates a translation matrix. - /// - /// X translation. - /// Y translation. - /// Z translation. - /// The resulting Matrix4dinstance. - public static Matrix4x3d CreateTranslation(double x, double y, double z) - { - Matrix4x3d result; - CreateTranslation(x, y, z, out result); - return result; - } - - /// - /// Creates a translation matrix. - /// - /// The translation vector. - /// The resulting Matrix4dinstance. - public static Matrix4x3d CreateTranslation(Vector3d vector) - { - Matrix4x3d result; - CreateTranslation(vector.X, vector.Y, vector.Z, out result); - return result; - } - - #endregion - - #region CreateScale - - /// - /// Build a scaling matrix - /// - /// Single scale factor for x,y and z axes - /// A scaling matrix - public static Matrix4x3d CreateScale(double scale) - { - return CreateScale(scale, scale, scale); - } - - /// - /// Build a scaling matrix - /// - /// Scale factors for x,y and z axes - /// A scaling matrix - public static Matrix4x3d CreateScale(Vector3d scale) - { - return CreateScale(scale.X, scale.Y, scale.Z); - } - - /// - /// Build a scaling matrix - /// - /// Scale factor for x-axis - /// Scale factor for y-axis - /// Scale factor for z-axis - /// A scaling matrix - public static Matrix4x3d CreateScale(double x, double y, double z) - { - Matrix4x3d result; - result.Row0.X = x; - result.Row0.Y = 0; - result.Row0.Z = 0; - result.Row1.X = 0; - result.Row1.Y = y; - result.Row1.Z = 0; - result.Row2.X = 0; - result.Row2.Y = 0; - result.Row2.Z = z; - result.Row3.X = 0; - result.Row3.Y = 0; - result.Row3.Z = 0; - return result; - } - - #endregion - - #region Multiply Functions - - /// - /// This isn't quite a multiply, but the result may be useful in some situations. - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4d Mult(Matrix4x3d left, Matrix3x4d right) - { - Matrix4d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// This isn't quite a multiply, but the result may be useful in some situations. - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4x3d left, ref Matrix3x4d right, out Matrix4d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, - lM41 = left.Row3.X, lM42 = left.Row3.Y, lM43 = left.Row3.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, rM14 = right.Row0.W, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, rM24 = right.Row1.W, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, rM34 = right.Row2.W; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31); - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32); - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33); - result.Row0.W = (lM11 * rM14) + (lM12 * rM24) + (lM13 * rM34); - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31); - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32); - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33); - result.Row1.W = (lM21 * rM14) + (lM22 * rM24) + (lM23 * rM34); - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31); - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32); - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33); - result.Row2.W = (lM31 * rM14) + (lM32 * rM24) + (lM33 * rM34); - result.Row3.X = (lM41 * rM11) + (lM42 * rM21) + (lM43 * rM31); - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22) + (lM43 * rM32); - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23) + (lM43 * rM33); - result.Row3.W = (lM41 * rM14) + (lM42 * rM24) + (lM43 * rM34); - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4x3d Mult(Matrix4x3d left, Matrix4x3d right) - { - Matrix4x3d result; - Mult(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4x3d left, ref Matrix4x3d right, out Matrix4x3d result) - { - double lM11 = left.Row0.X, lM12 = left.Row0.Y, lM13 = left.Row0.Z, - lM21 = left.Row1.X, lM22 = left.Row1.Y, lM23 = left.Row1.Z, - lM31 = left.Row2.X, lM32 = left.Row2.Y, lM33 = left.Row2.Z, - lM41 = left.Row3.X, lM42 = left.Row3.Y, lM43 = left.Row3.Z, - rM11 = right.Row0.X, rM12 = right.Row0.Y, rM13 = right.Row0.Z, - rM21 = right.Row1.X, rM22 = right.Row1.Y, rM23 = right.Row1.Z, - rM31 = right.Row2.X, rM32 = right.Row2.Y, rM33 = right.Row2.Z, - rM41 = right.Row3.X, rM42 = right.Row3.Y, rM43 = right.Row3.Z; - - result.Row0.X = (lM11 * rM11) + (lM12 * rM21) + (lM13 * rM31) + rM41; - result.Row0.Y = (lM11 * rM12) + (lM12 * rM22) + (lM13 * rM32) + rM42; - result.Row0.Z = (lM11 * rM13) + (lM12 * rM23) + (lM13 * rM33) + rM43; - result.Row1.X = (lM21 * rM11) + (lM22 * rM21) + (lM23 * rM31) + rM41; - result.Row1.Y = (lM21 * rM12) + (lM22 * rM22) + (lM23 * rM32) + rM42; - result.Row1.Z = (lM21 * rM13) + (lM22 * rM23) + (lM23 * rM33) + rM43; - result.Row2.X = (lM31 * rM11) + (lM32 * rM21) + (lM33 * rM31) + rM41; - result.Row2.Y = (lM31 * rM12) + (lM32 * rM22) + (lM33 * rM32) + rM42; - result.Row2.Z = (lM31 * rM13) + (lM32 * rM23) + (lM33 * rM33) + rM43; - result.Row3.X = (lM41 * rM11) + (lM42 * rM21) + (lM43 * rM31) + rM41; - result.Row3.Y = (lM41 * rM12) + (lM42 * rM22) + (lM43 * rM32) + rM42; - result.Row3.Z = (lM41 * rM13) + (lM42 * rM23) + (lM43 * rM33) + rM43; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static Matrix4x3d Mult(Matrix4x3d left, double right) - { - Matrix4x3d result; - Mult(ref left, right, out result); - return result; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The left operand of the multiplication. - /// The right operand of the multiplication. - /// A new instance that is the result of the multiplication - public static void Mult(ref Matrix4x3d left, double right, out Matrix4x3d result) - { - result.Row0 = left.Row0 * right; - result.Row1 = left.Row1 * right; - result.Row2 = left.Row2 * right; - result.Row3 = left.Row3 * right; - } - - #endregion - - #region Add Functions - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static Matrix4x3d Add(Matrix4x3d left, Matrix4x3d right) - { - Matrix4x3d result; - Add(ref left, ref right, out result); - return result; - } - - /// - /// Adds two instances. - /// - /// The left operand of the addition. - /// The right operand of the addition. - /// A new instance that is the result of the addition. - public static void Add(ref Matrix4x3d left, ref Matrix4x3d right, out Matrix4x3d result) - { - result.Row0 = left.Row0 + right.Row0; - result.Row1 = left.Row1 + right.Row1; - result.Row2 = left.Row2 + right.Row2; - result.Row3 = left.Row3 + right.Row3; - } - - #endregion - - #region Subtract Functions - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static Matrix4x3d Subtract(Matrix4x3d left, Matrix4x3d right) - { - Matrix4x3d result; - Subtract(ref left, ref right, out result); - return result; - } - - /// - /// Subtracts one instance from another. - /// - /// The left operand of the subraction. - /// The right operand of the subraction. - /// A new instance that is the result of the subraction. - public static void Subtract(ref Matrix4x3d left, ref Matrix4x3d right, out Matrix4x3d result) - { - result.Row0 = left.Row0 - right.Row0; - result.Row1 = left.Row1 - right.Row1; - result.Row2 = left.Row2 - right.Row2; - result.Row3 = left.Row3 - right.Row3; - } - - #endregion - - #region Invert Functions - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static Matrix4x3d Invert(Matrix4x3d mat) - { - Matrix4x3d result; - Invert(ref mat, out result); - return result; - } - - /// - /// Calculate the inverse of the given matrix - /// - /// The matrix to invert - /// The inverse of the given matrix if it has one, or the input if it is singular - /// Thrown if the Matrix4 is singular. - public static void Invert(ref Matrix4x3d mat, out Matrix4x3d result) - { - Matrix3d inverseRotation = new Matrix3d(mat.Column0.Xyz, mat.Column1.Xyz, mat.Column2.Xyz); - inverseRotation.Row0 /= inverseRotation.Row0.LengthSquared; - inverseRotation.Row1 /= inverseRotation.Row1.LengthSquared; - inverseRotation.Row2 /= inverseRotation.Row2.LengthSquared; - - Vector3d translation = mat.Row3; - - result.Row0 = inverseRotation.Row0; - result.Row1 = inverseRotation.Row1; - result.Row2 = inverseRotation.Row2; - result.Row3 = new Vector3d(-Vector3d.Dot(inverseRotation.Row0, translation), -Vector3d.Dot(inverseRotation.Row1, translation), -Vector3d.Dot(inverseRotation.Row2, translation)); - } - - #endregion - - #region Transpose - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The transpose of the given matrix - public static Matrix3x4d Transpose(Matrix4x3d mat) - { - return new Matrix3x4d(mat.Column0, mat.Column1, mat.Column2); - } - - /// - /// Calculate the transpose of the given matrix - /// - /// The matrix to transpose - /// The result of the calculation - public static void Transpose(ref Matrix4x3d mat, out Matrix3x4d result) - { - result.Row0 = mat.Column0; - result.Row1 = mat.Column1; - result.Row2 = mat.Column2; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4d which holds the result of the multiplication - public static Matrix4d operator *(Matrix4x3d left, Matrix3x4d right) - { - return Matrix4x3d.Mult(left, right); - } - - /// - /// Matrix multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3d which holds the result of the multiplication - public static Matrix4x3d operator *(Matrix4x3d left, Matrix4x3d right) - { - return Matrix4x3d.Mult(left, right); - } - - /// - /// Matrix-scalar multiplication - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3d which holds the result of the multiplication - public static Matrix4x3d operator *(Matrix4x3d left, double right) - { - return Matrix4x3d.Mult(left, right); - } - - /// - /// Matrix addition - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3d which holds the result of the addition - public static Matrix4x3d operator +(Matrix4x3d left, Matrix4x3d right) - { - return Matrix4x3d.Add(left, right); - } - - /// - /// Matrix subtraction - /// - /// left-hand operand - /// right-hand operand - /// A new Matrix4x3d which holds the result of the subtraction - public static Matrix4x3d operator -(Matrix4x3d left, Matrix4x3d right) - { - return Matrix4x3d.Subtract(left, right); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Matrix4x3d left, Matrix4x3d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Matrix4x3d left, Matrix4x3d right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Matrix4x3d. - /// - /// The string representation of the matrix. - public override string ToString() - { - return string.Format("{0}\n{1}\n{2}", Row0, Row1, Row2); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return Row0.GetHashCode() ^ Row1.GetHashCode() ^ Row2.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare tresult. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Matrix4x3d)) - return false; - - return this.Equals((Matrix4x3d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current matrix is equal to another matrix. - /// An matrix to compare with this matrix. - /// true if the current matrix is equal to the matrix parameter; otherwise, false. - public bool Equals(Matrix4x3d other) - { - return - Row0 == other.Row0 && - Row1 == other.Row1 && - Row2 == other.Row2 && - Row3 == other.Row3; - } - - #endregion - } -} diff --git a/OpenTK/Math/Point.cs b/OpenTK/Math/Point.cs deleted file mode 100644 index 67613933..00000000 --- a/OpenTK/Math/Point.cs +++ /dev/null @@ -1,235 +0,0 @@ -#region License - // - // The Open Toolkit Library License - // - // Copyright (c) 2006 - 2009 the Open Toolkit library. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to deal - // in the Software without restriction, including without limitation the rights to - // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of - // the Software, and to permit persons to whom the Software is furnished to do - // so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in all - // copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - // OTHER DEALINGS IN THE SOFTWARE. - // - #endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ -#if NO_SYSDRAWING - /// - /// Defines a point on a two-dimensional plane. - /// - public struct Point : IEquatable - { - #region Fields - - int x, y; - - #endregion - - #region Constructors - - /// - /// Constructs a new Point instance. - /// - /// The X coordinate of this instance. - /// The Y coordinate of this instance. - public Point(int x, int y) - : this() - { - X = x; - Y = y; - } - - #endregion - - #region Public Members - - /// - /// Gets a that indicates whether this instance is empty or zero. - /// - public bool IsEmpty { get { return X == 0 && Y == 0; } } - - /// - /// Gets or sets the X coordinate of this instance. - /// - public int X { get { return x; } set { x = value; } } - - /// - /// Gets or sets the Y coordinate of this instance. - /// - public int Y { get { return y; } set { y = value; } } - - /// - /// Returns the Point (0, 0). - /// - public static readonly Point Zero = new Point(); - - /// - /// Returns the Point (0, 0). - /// - public static readonly Point Empty = new Point(); - - /// - /// Translates the specified Point by the specified Size. - /// - /// - /// The instance to translate. - /// - /// - /// The instance to translate point with. - /// - /// - /// A new instance translated by size. - /// - public static Point operator +(Point point, Size size) - { - return new Point(point.X + size.Width, point.Y + size.Height); - } - - /// - /// Translates the specified Point by the negative of the specified Size. - /// - /// - /// The instance to translate. - /// - /// - /// The instance to translate point with. - /// - /// - /// A new instance translated by size. - /// - public static Point operator -(Point point, Size size) - { - return new Point(point.X - size.Width, point.Y - size.Height); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left is equal to right; false otherwise. - public static bool operator ==(Point left, Point right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left is not equal to right; false otherwise. - public static bool operator !=(Point left, Point right) - { - return !left.Equals(right); - } - - /// - /// Converts an OpenTK.Point instance to a System.Drawing.Point. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to point. - /// - public static implicit operator System.Drawing.Point(Point point) - { - return new System.Drawing.Point(point.X, point.Y); - } - - /// - /// Converts a System.Drawing.Point instance to an OpenTK.Point. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to point. - /// - public static implicit operator Point(System.Drawing.Point point) - { - return new Point(point.X, point.Y); - } - - /// - /// Converts an OpenTK.Point instance to a System.Drawing.PointF. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to point. - /// - public static implicit operator System.Drawing.PointF(Point point) - { - return new System.Drawing.PointF(point.X, point.Y); - } - - /// - /// Indicates whether this instance is equal to the specified object. - /// - /// The object instance to compare to. - /// True, if both instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (obj is Point) - return Equals((Point)obj); - - return false; - } - - /// - /// Returns the hash code for this instance. - /// - /// A that represents the hash code for this instance./> - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode(); - } - - /// - /// Returns a that describes this instance. - /// - /// A that describes this instance. - public override string ToString() - { - return String.Format("{{{0}, {1}}}", X, Y); - } - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether this instance is equal to the specified Point. - /// - /// The instance to compare to. - /// True, if both instances are equal; false otherwise. - public bool Equals(Point other) - { - return X == other.X && Y == other.Y; - } - - #endregion - } -#endif -} diff --git a/OpenTK/Math/Quaternion.cs b/OpenTK/Math/Quaternion.cs deleted file mode 100644 index 922a4f79..00000000 --- a/OpenTK/Math/Quaternion.cs +++ /dev/null @@ -1,804 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.ComponentModel; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// - /// Represents a Quaternion. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Quaternion : IEquatable - { - #region Fields - - Vector3 xyz; - float w; - - #endregion - - #region Constructors - - /// - /// Construct a new Quaternion from vector and w components - /// - /// The vector part - /// The w part - public Quaternion(Vector3 v, float w) - { - this.xyz = v; - this.w = w; - } - - /// - /// Construct a new Quaternion - /// - /// The x component - /// The y component - /// The z component - /// The w component - public Quaternion(float x, float y, float z, float w) - : this(new Vector3(x, y, z), w) - { } - - #endregion - - #region Public Members - - #region Properties - - #pragma warning disable 3005 // Identifier differing only in case is not CLS-compliant, compiler bug in Mono 3.4.0 - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Y and Z components of this instance. - /// - [Obsolete("Use Xyz property instead.")] - [EditorBrowsable(EditorBrowsableState.Never)] - [XmlIgnore] - [CLSCompliant(false)] - public Vector3 XYZ { get { return Xyz; } set { Xyz = value; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Y and Z components of this instance. - /// - [CLSCompliant(false)] - public Vector3 Xyz { get { return xyz; } set { xyz = value; } } - - #pragma warning restore 3005 - - /// - /// Gets or sets the X component of this instance. - /// - [XmlIgnore] - public float X { get { return xyz.X; } set { xyz.X = value; } } - - /// - /// Gets or sets the Y component of this instance. - /// - [XmlIgnore] - public float Y { get { return xyz.Y; } set { xyz.Y = value; } } - - /// - /// Gets or sets the Z component of this instance. - /// - [XmlIgnore] - public float Z { get { return xyz.Z; } set { xyz.Z = value; } } - - /// - /// Gets or sets the W component of this instance. - /// - public float W { get { return w; } set { w = value; } } - - #endregion - - #region Instance - - #region ToAxisAngle - - /// - /// Convert the current quaternion to axis angle representation - /// - /// The resultant axis - /// The resultant angle - public void ToAxisAngle(out Vector3 axis, out float angle) - { - Vector4 result = ToAxisAngle(); - axis = result.Xyz; - angle = result.W; - } - - /// - /// Convert this instance to an axis-angle representation. - /// - /// A Vector4 that is the axis-angle representation of this quaternion. - public Vector4 ToAxisAngle() - { - Quaternion q = this; - if (Math.Abs(q.W) > 1.0f) - q.Normalize(); - - Vector4 result = new Vector4(); - - result.W = 2.0f * (float)System.Math.Acos(q.W); // angle - float den = (float)System.Math.Sqrt(1.0 - q.W * q.W); - if (den > 0.0001f) - { - result.Xyz = q.Xyz / den; - } - else - { - // This occurs when the angle is zero. - // Not a problem: just set an arbitrary normalized axis. - result.Xyz = Vector3.UnitX; - } - - return result; - } - - #endregion - - #region public float Length - - /// - /// Gets the length (magnitude) of the quaternion. - /// - /// - public float Length - { - get - { - return (float)System.Math.Sqrt(W * W + Xyz.LengthSquared); - } - } - - #endregion - - #region public float LengthSquared - - /// - /// Gets the square of the quaternion length (magnitude). - /// - public float LengthSquared - { - get - { - return W * W + Xyz.LengthSquared; - } - } - - #endregion - - /// - /// Returns a copy of the Quaternion scaled to unit length. - /// - public Quaternion Normalized() - { - Quaternion q = this; - q.Normalize(); - return q; - } - - /// - /// Reverses the rotation angle of this Quaterniond. - /// - public void Invert() - { - W = -W; - } - - /// - /// Returns a copy of this Quaterniond with its rotation angle reversed. - /// - public Quaternion Inverted() - { - var q = this; - q.Invert(); - return q; - } - - #region public void Normalize() - - /// - /// Scales the Quaternion to unit length. - /// - public void Normalize() - { - float scale = 1.0f / this.Length; - Xyz *= scale; - W *= scale; - } - - #endregion - - #region public void Conjugate() - - /// - /// Inverts the Vector3 component of this Quaternion. - /// - public void Conjugate() - { - Xyz = -Xyz; - } - - #endregion - - #endregion - - #region Static - - #region Fields - - /// - /// Defines the identity quaternion. - /// - public static readonly Quaternion Identity = new Quaternion(0, 0, 0, 1); - - #endregion - - #region Add - - /// - /// Add two quaternions - /// - /// The first operand - /// The second operand - /// The result of the addition - public static Quaternion Add(Quaternion left, Quaternion right) - { - return new Quaternion( - left.Xyz + right.Xyz, - left.W + right.W); - } - - /// - /// Add two quaternions - /// - /// The first operand - /// The second operand - /// The result of the addition - public static void Add(ref Quaternion left, ref Quaternion right, out Quaternion result) - { - result = new Quaternion( - left.Xyz + right.Xyz, - left.W + right.W); - } - - #endregion - - #region Sub - - /// - /// Subtracts two instances. - /// - /// The left instance. - /// The right instance. - /// The result of the operation. - public static Quaternion Sub(Quaternion left, Quaternion right) - { - return new Quaternion( - left.Xyz - right.Xyz, - left.W - right.W); - } - - /// - /// Subtracts two instances. - /// - /// The left instance. - /// The right instance. - /// The result of the operation. - public static void Sub(ref Quaternion left, ref Quaternion right, out Quaternion result) - { - result = new Quaternion( - left.Xyz - right.Xyz, - left.W - right.W); - } - - #endregion - - #region Mult - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - [Obsolete("Use Multiply instead.")] - public static Quaternion Mult(Quaternion left, Quaternion right) - { - return new Quaternion( - right.W * left.Xyz + left.W * right.Xyz + Vector3.Cross(left.Xyz, right.Xyz), - left.W * right.W - Vector3.Dot(left.Xyz, right.Xyz)); - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - [Obsolete("Use Multiply instead.")] - public static void Mult(ref Quaternion left, ref Quaternion right, out Quaternion result) - { - result = new Quaternion( - right.W * left.Xyz + left.W * right.Xyz + Vector3.Cross(left.Xyz, right.Xyz), - left.W * right.W - Vector3.Dot(left.Xyz, right.Xyz)); - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - public static Quaternion Multiply(Quaternion left, Quaternion right) - { - Quaternion result; - Multiply(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - public static void Multiply(ref Quaternion left, ref Quaternion right, out Quaternion result) - { - result = new Quaternion( - right.W * left.Xyz + left.W * right.Xyz + Vector3.Cross(left.Xyz, right.Xyz), - left.W * right.W - Vector3.Dot(left.Xyz, right.Xyz)); - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static void Multiply(ref Quaternion quaternion, float scale, out Quaternion result) - { - result = new Quaternion(quaternion.X * scale, quaternion.Y * scale, quaternion.Z * scale, quaternion.W * scale); - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static Quaternion Multiply(Quaternion quaternion, float scale) - { - return new Quaternion(quaternion.X * scale, quaternion.Y * scale, quaternion.Z * scale, quaternion.W * scale); - } - - #endregion - - #region Conjugate - - /// - /// Get the conjugate of the given quaternion - /// - /// The quaternion - /// The conjugate of the given quaternion - public static Quaternion Conjugate(Quaternion q) - { - return new Quaternion(-q.Xyz, q.W); - } - - /// - /// Get the conjugate of the given quaternion - /// - /// The quaternion - /// The conjugate of the given quaternion - public static void Conjugate(ref Quaternion q, out Quaternion result) - { - result = new Quaternion(-q.Xyz, q.W); - } - - #endregion - - #region Invert - - /// - /// Get the inverse of the given quaternion - /// - /// The quaternion to invert - /// The inverse of the given quaternion - public static Quaternion Invert(Quaternion q) - { - Quaternion result; - Invert(ref q, out result); - return result; - } - - /// - /// Get the inverse of the given quaternion - /// - /// The quaternion to invert - /// The inverse of the given quaternion - public static void Invert(ref Quaternion q, out Quaternion result) - { - float lengthSq = q.LengthSquared; - if (lengthSq != 0.0) - { - float i = 1.0f / lengthSq; - result = new Quaternion(q.Xyz * -i, q.W * i); - } - else - { - result = q; - } - } - - #endregion - - #region Normalize - - /// - /// Scale the given quaternion to unit length - /// - /// The quaternion to normalize - /// The normalized quaternion - public static Quaternion Normalize(Quaternion q) - { - Quaternion result; - Normalize(ref q, out result); - return result; - } - - /// - /// Scale the given quaternion to unit length - /// - /// The quaternion to normalize - /// The normalized quaternion - public static void Normalize(ref Quaternion q, out Quaternion result) - { - float scale = 1.0f / q.Length; - result = new Quaternion(q.Xyz * scale, q.W * scale); - } - - #endregion - - #region FromAxisAngle - - /// - /// Build a quaternion from the given axis and angle - /// - /// The axis to rotate about - /// The rotation angle in radians - /// The equivalent quaternion - public static Quaternion FromAxisAngle(Vector3 axis, float angle) - { - if (axis.LengthSquared == 0.0f) - return Identity; - - Quaternion result = Identity; - - angle *= 0.5f; - axis.Normalize(); - result.Xyz = axis * (float)System.Math.Sin(angle); - result.W = (float)System.Math.Cos(angle); - - return Normalize(result); - } - - #endregion - - #region FromMatrix - - /// - /// Builds a quaternion from the given rotation matrix - /// - /// A rotation matrix - /// The equivalent quaternion - public static Quaternion FromMatrix(Matrix3 matrix) - { - Quaternion result; - FromMatrix(ref matrix, out result); - return result; - } - - /// - /// Builds a quaternion from the given rotation matrix - /// - /// A rotation matrix - /// The equivalent quaternion - public static void FromMatrix(ref Matrix3 matrix, out Quaternion result) - { - float trace = matrix.Trace; - - if (trace > 0) - { - float s = (float)Math.Sqrt(trace + 1) * 2; - float invS = 1f / s; - - result.w = s * 0.25f; - result.xyz.X = (matrix.Row2.Y - matrix.Row1.Z) * invS; - result.xyz.Y = (matrix.Row0.Z - matrix.Row2.X) * invS; - result.xyz.Z = (matrix.Row1.X - matrix.Row0.Y) * invS; - } - else - { - float m00 = matrix.Row0.X, m11 = matrix.Row1.Y, m22 = matrix.Row2.Z; - - if (m00 > m11 && m00 > m22) - { - float s = (float)Math.Sqrt(1 + m00 - m11 - m22) * 2; - float invS = 1f / s; - - result.w = (matrix.Row2.Y - matrix.Row1.Z) * invS; - result.xyz.X = s * 0.25f; - result.xyz.Y = (matrix.Row0.Y + matrix.Row1.X) * invS; - result.xyz.Z = (matrix.Row0.Z + matrix.Row2.X) * invS; - } - else if (m11 > m22) - { - float s = (float)Math.Sqrt(1 + m11 - m00 - m22) * 2; - float invS = 1f / s; - - result.w = (matrix.Row0.Z - matrix.Row2.X) * invS; - result.xyz.X = (matrix.Row0.Y + matrix.Row1.X) * invS; - result.xyz.Y = s * 0.25f; - result.xyz.Z = (matrix.Row1.Z + matrix.Row2.Y) * invS; - } - else - { - float s = (float)Math.Sqrt(1 + m22 - m00 - m11) * 2; - float invS = 1f / s; - - result.w = (matrix.Row1.X - matrix.Row0.Y) * invS; - result.xyz.X = (matrix.Row0.Z + matrix.Row2.X) * invS; - result.xyz.Y = (matrix.Row1.Z + matrix.Row2.Y) * invS; - result.xyz.Z = s * 0.25f; - } - } - } - - #endregion - - #region Slerp - - /// - /// Do Spherical linear interpolation between two quaternions - /// - /// The first quaternion - /// The second quaternion - /// The blend factor - /// A smooth blend between the given quaternions - public static Quaternion Slerp(Quaternion q1, Quaternion q2, float blend) - { - // if either input is zero, return the other. - if (q1.LengthSquared == 0.0f) - { - if (q2.LengthSquared == 0.0f) - { - return Identity; - } - return q2; - } - else if (q2.LengthSquared == 0.0f) - { - return q1; - } - - - float cosHalfAngle = q1.W * q2.W + Vector3.Dot(q1.Xyz, q2.Xyz); - - if (cosHalfAngle >= 1.0f || cosHalfAngle <= -1.0f) - { - // angle = 0.0f, so just return one input. - return q1; - } - else if (cosHalfAngle < 0.0f) - { - q2.Xyz = -q2.Xyz; - q2.W = -q2.W; - cosHalfAngle = -cosHalfAngle; - } - - float blendA; - float blendB; - if (cosHalfAngle < 0.99f) - { - // do proper slerp for big angles - float halfAngle = (float)System.Math.Acos(cosHalfAngle); - float sinHalfAngle = (float)System.Math.Sin(halfAngle); - float oneOverSinHalfAngle = 1.0f / sinHalfAngle; - blendA = (float)System.Math.Sin(halfAngle * (1.0f - blend)) * oneOverSinHalfAngle; - blendB = (float)System.Math.Sin(halfAngle * blend) * oneOverSinHalfAngle; - } - else - { - // do lerp if angle is really small. - blendA = 1.0f - blend; - blendB = blend; - } - - Quaternion result = new Quaternion(blendA * q1.Xyz + blendB * q2.Xyz, blendA * q1.W + blendB * q2.W); - if (result.LengthSquared > 0.0f) - return Normalize(result); - else - return Identity; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Quaternion operator +(Quaternion left, Quaternion right) - { - left.Xyz += right.Xyz; - left.W += right.W; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Quaternion operator -(Quaternion left, Quaternion right) - { - left.Xyz -= right.Xyz; - left.W -= right.W; - return left; - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Quaternion operator *(Quaternion left, Quaternion right) - { - Multiply(ref left, ref right, out left); - return left; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static Quaternion operator *(Quaternion quaternion, float scale) - { - Multiply(ref quaternion, scale, out quaternion); - return quaternion; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static Quaternion operator *(float scale, Quaternion quaternion) - { - return new Quaternion(quaternion.X * scale, quaternion.Y * scale, quaternion.Z * scale, quaternion.W * scale); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Quaternion left, Quaternion right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Quaternion left, Quaternion right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Quaternion. - /// - /// - public override string ToString() - { - return String.Format("V: {0}, W: {1}", Xyz, W); - } - - #endregion - - #region public override bool Equals (object o) - - /// - /// Compares this object instance to another object for equality. - /// - /// The other object to be used in the comparison. - /// True if both objects are Quaternions of equal value. Otherwise it returns false. - public override bool Equals(object other) - { - if (other is Quaternion == false) return false; - return this == (Quaternion)other; - } - - #endregion - - #region public override int GetHashCode () - - /// - /// Provides the hash code for this object. - /// - /// A hash code formed from the bitwise XOR of this objects members. - public override int GetHashCode() - { - return Xyz.GetHashCode() ^ W.GetHashCode(); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// - /// Compares this Quaternion instance to another Quaternion for equality. - /// - /// The other Quaternion to be used in the comparison. - /// True if both instances are equal; false otherwise. - public bool Equals(Quaternion other) - { - return Xyz == other.Xyz && W == other.W; - } - - #endregion - } -} diff --git a/OpenTK/Math/Quaterniond.cs b/OpenTK/Math/Quaterniond.cs deleted file mode 100644 index 792dddde..00000000 --- a/OpenTK/Math/Quaterniond.cs +++ /dev/null @@ -1,1428 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.ComponentModel; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// - /// Represents a double-precision Quaternion. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Quaterniond : IEquatable - { - #region Fields - - Vector3d xyz; - double w; - - #endregion - - #region Constructors - - /// - /// Construct a new Quaterniond from vector and w components - /// - /// The vector part - /// The w part - public Quaterniond(Vector3d v, double w) - { - this.xyz = v; - this.w = w; - } - - /// - /// Construct a new Quaterniond - /// - /// The x component - /// The y component - /// The z component - /// The w component - public Quaterniond(double x, double y, double z, double w) - : this(new Vector3d(x, y, z), w) - { } - - #endregion - - #region Public Members - - #region Properties - - #pragma warning disable 3005 // Identifier differing only in case is not CLS-compliant, compiler bug in Mono 3.4.0 - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Y and Z components of this instance. - /// - [Obsolete("Use Xyz property instead.")] - [CLSCompliant(false)] - [EditorBrowsable(EditorBrowsableState.Never)] - [XmlIgnore] - public Vector3d XYZ { get { return Xyz; } set { Xyz = value; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Y and Z components of this instance. - /// - public Vector3d Xyz { get { return xyz; } set { xyz = value; } } - - #pragma warning restore 3005 - - /// - /// Gets or sets the X component of this instance. - /// - [XmlIgnore] - public double X { get { return xyz.X; } set { xyz.X = value; } } - - /// - /// Gets or sets the Y component of this instance. - /// - [XmlIgnore] - public double Y { get { return xyz.Y; } set { xyz.Y = value; } } - - /// - /// Gets or sets the Z component of this instance. - /// - [XmlIgnore] - public double Z { get { return xyz.Z; } set { xyz.Z = value; } } - - /// - /// Gets or sets the W component of this instance. - /// - public double W { get { return w; } set { w = value; } } - - #endregion - - #region Instance - - #region ToAxisAngle - - /// - /// Convert the current quaternion to axis angle representation - /// - /// The resultant axis - /// The resultant angle - public void ToAxisAngle(out Vector3d axis, out double angle) - { - Vector4d result = ToAxisAngle(); - axis = result.Xyz; - angle = result.W; - } - - /// - /// Convert this instance to an axis-angle representation. - /// - /// A Vector4 that is the axis-angle representation of this quaternion. - public Vector4d ToAxisAngle() - { - Quaterniond q = this; - if (Math.Abs(q.W) > 1.0f) - q.Normalize(); - - Vector4d result = new Vector4d(); - - result.W = 2.0f * (float)System.Math.Acos(q.W); // angle - float den = (float)System.Math.Sqrt(1.0 - q.W * q.W); - if (den > 0.0001f) - { - result.Xyz = q.Xyz / den; - } - else - { - // This occurs when the angle is zero. - // Not a problem: just set an arbitrary normalized axis. - result.Xyz = Vector3d.UnitX; - } - - return result; - } - - #endregion - - #region public double Length - - /// - /// Gets the length (magnitude) of the Quaterniond. - /// - /// - public double Length - { - get - { - return (double)System.Math.Sqrt(W * W + Xyz.LengthSquared); - } - } - - #endregion - - #region public double LengthSquared - - /// - /// Gets the square of the Quaterniond length (magnitude). - /// - public double LengthSquared - { - get - { - return W * W + Xyz.LengthSquared; - } - } - - #endregion - - /// - /// Returns a copy of the Quaterniond scaled to unit length. - /// - public Quaterniond Normalized() - { - Quaterniond q = this; - q.Normalize(); - return q; - } - - /// - /// Reverses the rotation angle of this Quaterniond. - /// - public void Invert() - { - W = -W; - } - - /// - /// Returns a copy of this Quaterniond with its rotation angle reversed. - /// - public Quaterniond Inverted() - { - var q = this; - q.Invert(); - return q; - } - - #region public void Normalize() - - /// - /// Scales the Quaterniond to unit length. - /// - public void Normalize() - { - double scale = 1.0f / this.Length; - Xyz *= scale; - W *= scale; - } - - #endregion - - #region public void Conjugate() - - /// - /// Inverts the Vector3d component of this Quaterniond. - /// - public void Conjugate() - { - Xyz = -Xyz; - } - - #endregion - - #endregion - - #region Static - - #region Fields - - /// - /// Defines the identity quaternion. - /// - public readonly static Quaterniond Identity = new Quaterniond(0, 0, 0, 1); - - #endregion - - #region Add - - /// - /// Add two quaternions - /// - /// The first operand - /// The second operand - /// The result of the addition - public static Quaterniond Add(Quaterniond left, Quaterniond right) - { - return new Quaterniond( - left.Xyz + right.Xyz, - left.W + right.W); - } - - /// - /// Add two quaternions - /// - /// The first operand - /// The second operand - /// The result of the addition - public static void Add(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result = new Quaterniond( - left.Xyz + right.Xyz, - left.W + right.W); - } - - #endregion - - #region Sub - - /// - /// Subtracts two instances. - /// - /// The left instance. - /// The right instance. - /// The result of the operation. - public static Quaterniond Sub(Quaterniond left, Quaterniond right) - { - return new Quaterniond( - left.Xyz - right.Xyz, - left.W - right.W); - } - - /// - /// Subtracts two instances. - /// - /// The left instance. - /// The right instance. - /// The result of the operation. - public static void Sub(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result = new Quaterniond( - left.Xyz - right.Xyz, - left.W - right.W); - } - - #endregion - - #region Mult - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - [Obsolete("Use Multiply instead.")] - public static Quaterniond Mult(Quaterniond left, Quaterniond right) - { - return new Quaterniond( - right.W * left.Xyz + left.W * right.Xyz + Vector3d.Cross(left.Xyz, right.Xyz), - left.W * right.W - Vector3d.Dot(left.Xyz, right.Xyz)); - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - [Obsolete("Use Multiply instead.")] - public static void Mult(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result = new Quaterniond( - right.W * left.Xyz + left.W * right.Xyz + Vector3d.Cross(left.Xyz, right.Xyz), - left.W * right.W - Vector3d.Dot(left.Xyz, right.Xyz)); - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - public static Quaterniond Multiply(Quaterniond left, Quaterniond right) - { - Quaterniond result; - Multiply(ref left, ref right, out result); - return result; - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// A new instance containing the result of the calculation. - public static void Multiply(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result = new Quaterniond( - right.W * left.Xyz + left.W * right.Xyz + Vector3d.Cross(left.Xyz, right.Xyz), - left.W * right.W - Vector3d.Dot(left.Xyz, right.Xyz)); - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static void Multiply(ref Quaterniond quaternion, double scale, out Quaterniond result) - { - result = new Quaterniond(quaternion.X * scale, quaternion.Y * scale, quaternion.Z * scale, quaternion.W * scale); - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static Quaterniond Multiply(Quaterniond quaternion, double scale) - { - return new Quaterniond(quaternion.X * scale, quaternion.Y * scale, quaternion.Z * scale, quaternion.W * scale); - } - - #endregion - - #region Conjugate - - /// - /// Get the conjugate of the given Quaterniond - /// - /// The Quaterniond - /// The conjugate of the given Quaterniond - public static Quaterniond Conjugate(Quaterniond q) - { - return new Quaterniond(-q.Xyz, q.W); - } - - /// - /// Get the conjugate of the given Quaterniond - /// - /// The Quaterniond - /// The conjugate of the given Quaterniond - public static void Conjugate(ref Quaterniond q, out Quaterniond result) - { - result = new Quaterniond(-q.Xyz, q.W); - } - - #endregion - - #region Invert - - /// - /// Get the inverse of the given Quaterniond - /// - /// The Quaterniond to invert - /// The inverse of the given Quaterniond - public static Quaterniond Invert(Quaterniond q) - { - Quaterniond result; - Invert(ref q, out result); - return result; - } - - /// - /// Get the inverse of the given Quaterniond - /// - /// The Quaterniond to invert - /// The inverse of the given Quaterniond - public static void Invert(ref Quaterniond q, out Quaterniond result) - { - double lengthSq = q.LengthSquared; - if (lengthSq != 0.0) - { - double i = 1.0f / lengthSq; - result = new Quaterniond(q.Xyz * -i, q.W * i); - } - else - { - result = q; - } - } - - #endregion - - #region Normalize - - /// - /// Scale the given Quaterniond to unit length - /// - /// The Quaterniond to normalize - /// The normalized Quaterniond - public static Quaterniond Normalize(Quaterniond q) - { - Quaterniond result; - Normalize(ref q, out result); - return result; - } - - /// - /// Scale the given Quaterniond to unit length - /// - /// The Quaterniond to normalize - /// The normalized Quaterniond - public static void Normalize(ref Quaterniond q, out Quaterniond result) - { - double scale = 1.0f / q.Length; - result = new Quaterniond(q.Xyz * scale, q.W * scale); - } - - #endregion - - #region FromAxisAngle - - /// - /// Build a Quaterniond from the given axis and angle - /// - /// The axis to rotate about - /// The rotation angle in radians - /// - public static Quaterniond FromAxisAngle(Vector3d axis, double angle) - { - if (axis.LengthSquared == 0.0f) - return Identity; - - Quaterniond result = Identity; - - angle *= 0.5f; - axis.Normalize(); - result.Xyz = axis * (double)System.Math.Sin(angle); - result.W = (double)System.Math.Cos(angle); - - return Normalize(result); - } - - #endregion - - #region FromMatrix - - /// - /// Builds a quaternion from the given rotation matrix - /// - /// A rotation matrix - /// The equivalent quaternion - public static Quaterniond FromMatrix(Matrix3d matrix) - { - Quaterniond result; - FromMatrix(ref matrix, out result); - return result; - } - - /// - /// Builds a quaternion from the given rotation matrix - /// - /// A rotation matrix - /// The equivalent quaternion - public static void FromMatrix(ref Matrix3d matrix, out Quaterniond result) - { - double trace = matrix.Trace; - - if (trace > 0) - { - double s = Math.Sqrt(trace + 1) * 2; - double invS = 1.0 / s; - - result.w = s * 0.25; - result.xyz.X = (matrix.Row2.Y - matrix.Row1.Z) * invS; - result.xyz.Y = (matrix.Row0.Z - matrix.Row2.X) * invS; - result.xyz.Z = (matrix.Row1.X - matrix.Row0.Y) * invS; - } - else - { - double m00 = matrix.Row0.X, m11 = matrix.Row1.Y, m22 = matrix.Row2.Z; - - if (m00 > m11 && m00 > m22) - { - double s = Math.Sqrt(1 + m00 - m11 - m22) * 2; - double invS = 1.0 / s; - - result.w = (matrix.Row2.Y - matrix.Row1.Z) * invS; - result.xyz.X = s * 0.25; - result.xyz.Y = (matrix.Row0.Y + matrix.Row1.X) * invS; - result.xyz.Z = (matrix.Row0.Z + matrix.Row2.X) * invS; - } - else if (m11 > m22) - { - double s = Math.Sqrt(1 + m11 - m00 - m22) * 2; - double invS = 1.0 / s; - - result.w = (matrix.Row0.Z - matrix.Row2.X) * invS; - result.xyz.X = (matrix.Row0.Y + matrix.Row1.X) * invS; - result.xyz.Y = s * 0.25; - result.xyz.Z = (matrix.Row1.Z + matrix.Row2.Y) * invS; - } - else - { - double s = Math.Sqrt(1 + m22 - m00 - m11) * 2; - double invS = 1.0 / s; - - result.w = (matrix.Row1.X - matrix.Row0.Y) * invS; - result.xyz.X = (matrix.Row0.Z + matrix.Row2.X) * invS; - result.xyz.Y = (matrix.Row1.Z + matrix.Row2.Y) * invS; - result.xyz.Z = s * 0.25; - } - } - } - - #endregion - - #region Slerp - - /// - /// Do Spherical linear interpolation between two quaternions - /// - /// The first Quaterniond - /// The second Quaterniond - /// The blend factor - /// A smooth blend between the given quaternions - public static Quaterniond Slerp(Quaterniond q1, Quaterniond q2, double blend) - { - // if either input is zero, return the other. - if (q1.LengthSquared == 0.0f) - { - if (q2.LengthSquared == 0.0f) - { - return Identity; - } - return q2; - } - else if (q2.LengthSquared == 0.0f) - { - return q1; - } - - - double cosHalfAngle = q1.W * q2.W + Vector3d.Dot(q1.Xyz, q2.Xyz); - - if (cosHalfAngle >= 1.0f || cosHalfAngle <= -1.0f) - { - // angle = 0.0f, so just return one input. - return q1; - } - else if (cosHalfAngle < 0.0f) - { - q2.Xyz = -q2.Xyz; - q2.W = -q2.W; - cosHalfAngle = -cosHalfAngle; - } - - double blendA; - double blendB; - if (cosHalfAngle < 0.99f) - { - // do proper slerp for big angles - double halfAngle = (double)System.Math.Acos(cosHalfAngle); - double sinHalfAngle = (double)System.Math.Sin(halfAngle); - double oneOverSinHalfAngle = 1.0f / sinHalfAngle; - blendA = (double)System.Math.Sin(halfAngle * (1.0f - blend)) * oneOverSinHalfAngle; - blendB = (double)System.Math.Sin(halfAngle * blend) * oneOverSinHalfAngle; - } - else - { - // do lerp if angle is really small. - blendA = 1.0f - blend; - blendB = blend; - } - - Quaterniond result = new Quaterniond(blendA * q1.Xyz + blendB * q2.Xyz, blendA * q1.W + blendB * q2.W); - if (result.LengthSquared > 0.0f) - return Normalize(result); - else - return Identity; - } - - #endregion - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Quaterniond operator +(Quaterniond left, Quaterniond right) - { - left.Xyz += right.Xyz; - left.W += right.W; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Quaterniond operator -(Quaterniond left, Quaterniond right) - { - left.Xyz -= right.Xyz; - left.W -= right.W; - return left; - } - - /// - /// Multiplies two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Quaterniond operator *(Quaterniond left, Quaterniond right) - { - Multiply(ref left, ref right, out left); - return left; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static Quaterniond operator *(Quaterniond quaternion, double scale) - { - Multiply(ref quaternion, scale, out quaternion); - return quaternion; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// A new instance containing the result of the calculation. - public static Quaterniond operator *(double scale, Quaterniond quaternion) - { - return new Quaterniond(quaternion.X * scale, quaternion.Y * scale, quaternion.Z * scale, quaternion.W * scale); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Quaterniond left, Quaterniond right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equal right; false otherwise. - public static bool operator !=(Quaterniond left, Quaterniond right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - /// - /// Returns a System.String that represents the current Quaterniond. - /// - /// - public override string ToString() - { - return String.Format("V: {0}, W: {1}", Xyz, W); - } - - #endregion - - #region public override bool Equals (object o) - - /// - /// Compares this object instance to another object for equality. - /// - /// The other object to be used in the comparison. - /// True if both objects are Quaternions of equal value. Otherwise it returns false. - public override bool Equals(object other) - { - if (other is Quaterniond == false) return false; - return this == (Quaterniond)other; - } - - #endregion - - #region public override int GetHashCode () - - /// - /// Provides the hash code for this object. - /// - /// A hash code formed from the bitwise XOR of this objects members. - public override int GetHashCode() - { - return Xyz.GetHashCode() ^ W.GetHashCode(); - } - - #endregion - - #endregion - - #endregion - -#if false - - #region Fields - - /// The W component of the Quaterniond. - public double W; - - /// The X component of the Quaterniond. - public double X; - - /// The Y component of the Quaterniond. - public double Y; - - /// The Z component of the Quaterniond. - public double Z; - - #endregion - - #region Constructors - - /// Constructs left Quaterniond that is left copy of the given Quaterniond. - /// The Quaterniond to copy. - public Quaterniond(ref Quaterniond Quaterniond) : this(Quaterniond.W, Quaterniond.X, Quaterniond.Y, Quaterniond.Z) { } - - /// Constructs left Quaterniond from the given components. - /// The W component for the Quaterniond. - /// A Vector representing the X, Y, and Z componets for the quaterion. - public Quaterniond(double w, ref Vector3d vector3d) : this(w, vector3d.X, vector3d.Y, vector3d.Z) { } - - /// Constructs left Quaterniond from the given axis and angle. - /// The axis for the Quaterniond. - /// The angle for the quaternione. - public Quaterniond(ref Vector3d axis, double angle) - { - double halfAngle = Functions.DTOR * angle / 2; - - this.W = System.Math.Cos(halfAngle); - - double sin = System.Math.Sin(halfAngle); - Vector3d axisNormalized; - Vector3d.Normalize(ref axis, out axisNormalized); - this.X = axisNormalized.X * sin; - this.Y = axisNormalized.Y * sin; - this.Z = axisNormalized.Z * sin; - } - - /// Constructs left Quaterniond from the given components. - /// The W component for the Quaterniond. - /// The X component for the Quaterniond. - /// The Y component for the Quaterniond. - /// The Z component for the Quaterniond. - public Quaterniond(double w, double x, double y, double z) - { - this.W = w; - this.X = x; - this.Y = y; - this.Z = z; - } - - /// Constructs left Quaterniond from the given array of double-precision floating-point numbers. - /// The array of doubles for the components of the Quaterniond. - public Quaterniond(double[] doubleArray) - { - if (doubleArray == null || doubleArray.GetLength(0) < 4) throw new MissingFieldException(); - - this.W = doubleArray[0]; - this.X = doubleArray[1]; - this.Y = doubleArray[2]; - this.Z = doubleArray[3]; - } - - /// Constructs left Quaterniond from the given matrix. Only contains rotation information. - /// The matrix for the components of the Quaterniond. - public Quaterniond(ref Matrix4d matrix) - { - double scale = System.Math.Pow(matrix.Determinant, 1.0d/3.0d); - - W = System.Math.Sqrt(System.Math.Max(0, scale + matrix[0, 0] + matrix[1, 1] + matrix[2, 2])) / 2; - X = System.Math.Sqrt(System.Math.Max(0, scale + matrix[0, 0] - matrix[1, 1] - matrix[2, 2])) / 2; - Y = System.Math.Sqrt(System.Math.Max(0, scale - matrix[0, 0] + matrix[1, 1] - matrix[2, 2])) / 2; - Z = System.Math.Sqrt(System.Math.Max(0, scale - matrix[0, 0] - matrix[1, 1] + matrix[2, 2])) / 2; - if( matrix[2,1] - matrix[1,2] < 0 ) X = -X; - if( matrix[0,2] - matrix[2,0] < 0 ) Y = -Y; - if( matrix[1,0] - matrix[0,1] < 0 ) Z = -Z; - } - - public Quaterniond(ref Matrix3d matrix) - { - double scale = System.Math.Pow(matrix.Determinant, 1.0d / 3.0d); - - W = System.Math.Sqrt(System.Math.Max(0, scale + matrix[0, 0] + matrix[1, 1] + matrix[2, 2])) / 2; - X = System.Math.Sqrt(System.Math.Max(0, scale + matrix[0, 0] - matrix[1, 1] - matrix[2, 2])) / 2; - Y = System.Math.Sqrt(System.Math.Max(0, scale - matrix[0, 0] + matrix[1, 1] - matrix[2, 2])) / 2; - Z = System.Math.Sqrt(System.Math.Max(0, scale - matrix[0, 0] - matrix[1, 1] + matrix[2, 2])) / 2; - if (matrix[2, 1] - matrix[1, 2] < 0) X = -X; - if (matrix[0, 2] - matrix[2, 0] < 0) Y = -Y; - if (matrix[1, 0] - matrix[0, 1] < 0) Z = -Z; - } - - #endregion - - #region Arithmetic Operators - - public void Add(ref Quaterniond Quaterniond) - { - W = W + Quaterniond.W; - X = X + Quaterniond.X; - Y = Y + Quaterniond.Y; - Z = Z + Quaterniond.Z; - } - public void Add(ref Quaterniond Quaterniond, out Quaterniond result) - { - result.W = W + Quaterniond.W; - result.X = X + Quaterniond.X; - result.Y = Y + Quaterniond.Y; - result.Z = Z + Quaterniond.Z; - } - public static void Add(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result.W = left.W + right.W; - result.X = left.X + right.X; - result.Y = left.Y + right.Y; - result.Z = left.Z + right.Z; - } - - public void Subtract(ref Quaterniond Quaterniond) - { - W = W - Quaterniond.W; - X = X - Quaterniond.X; - Y = Y - Quaterniond.Y; - Z = Z - Quaterniond.Z; - } - public void Subtract(ref Quaterniond Quaterniond, out Quaterniond result) - { - result.W = W - Quaterniond.W; - result.X = X - Quaterniond.X; - result.Y = Y - Quaterniond.Y; - result.Z = Z - Quaterniond.Z; - } - public static void Subtract(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result.W = left.W - right.W; - result.X = left.X - right.X; - result.Y = left.Y - right.Y; - result.Z = left.Z - right.Z; - } - - public void Multiply(ref Quaterniond Quaterniond) - { - double w = W * Quaterniond.W - X * Quaterniond.X - Y * Quaterniond.Y - Z * Quaterniond.Z; - double x = W * Quaterniond.X + X * Quaterniond.W + Y * Quaterniond.Z - Z * Quaterniond.Y; - double y = W * Quaterniond.Y + Y * Quaterniond.W + Z * Quaterniond.X - X * Quaterniond.Z; - Z = W * Quaterniond.Z + Z * Quaterniond.W + X * Quaterniond.Y - Y * Quaterniond.X; - W = w; - X = x; - Y = y; - } - public void Multiply(ref Quaterniond Quaterniond, out Quaterniond result) - { - result.W = W * Quaterniond.W - X * Quaterniond.X - Y * Quaterniond.Y - Z * Quaterniond.Z; - result.X = W * Quaterniond.X + X * Quaterniond.W + Y * Quaterniond.Z - Z * Quaterniond.Y; - result.Y = W * Quaterniond.Y + Y * Quaterniond.W + Z * Quaterniond.X - X * Quaterniond.Z; - result.Z = W * Quaterniond.Z + Z * Quaterniond.W + X * Quaterniond.Y - Y * Quaterniond.X; - } - public static void Multiply(ref Quaterniond left, ref Quaterniond right, out Quaterniond result) - { - result.W = left.W * right.W - left.X * right.X - left.Y * right.Y - left.Z * right.Z; - result.X = left.W * right.X + left.X * right.W + left.Y * right.Z - left.Z * right.Y; - result.Y = left.W * right.Y + left.Y * right.W + left.Z * right.X - left.X * right.Z; - result.Z = left.W * right.Z + left.Z * right.W + left.X * right.Y - left.Y * right.X; - } - - public void Multiply(double scalar) - { - W = W * scalar; - X = X * scalar; - Y = Y * scalar; - Z = Z * scalar; - } - public void Multiply(double scalar, out Quaterniond result) - { - result.W = W * scalar; - result.X = X * scalar; - result.Y = Y * scalar; - result.Z = Z * scalar; - } - public static void Multiply(ref Quaterniond Quaterniond, double scalar, out Quaterniond result) - { - result.W = Quaterniond.W * scalar; - result.X = Quaterniond.X * scalar; - result.Y = Quaterniond.Y * scalar; - result.Z = Quaterniond.Z * scalar; - } - - public void Divide(double scalar) - { - if (scalar == 0) throw new DivideByZeroException(); - W = W / scalar; - X = X / scalar; - Y = Y / scalar; - Z = Z / scalar; - } - public void Divide(double scalar, out Quaterniond result) - { - if (scalar == 0) throw new DivideByZeroException(); - result.W = W / scalar; - result.X = X / scalar; - result.Y = Y / scalar; - result.Z = Z / scalar; - } - public static void Divide(ref Quaterniond Quaterniond, double scalar, out Quaterniond result) - { - if (scalar == 0) throw new DivideByZeroException(); - result.W = Quaterniond.W / scalar; - result.X = Quaterniond.X / scalar; - result.Y = Quaterniond.Y / scalar; - result.Z = Quaterniond.Z / scalar; - } - - #endregion - - #region Functions - - public double Modulus - { - get - { - return System.Math.Sqrt(W * W + X * X + Y * Y + Z * Z); - } - } - public double ModulusSquared - { - get - { - return W * W + X * X + Y * Y + Z * Z; - } - } - - public static double DotProduct(Quaterniond left, Quaterniond right) - { - return left.W * right.W + left.X * right.X + left.Y * right.Y + left.Z * right.Z; - } - - public void Normalize() - { - double modulus = System.Math.Sqrt(W * W + X * X + Y * Y + Z * Z); - if (modulus == 0) throw new DivideByZeroException(); - W = W / modulus; - X = X / modulus; - Y = Y / modulus; - Z = Z / modulus; - } - public void Normalize( out Quaterniond result ) - { - double modulus = System.Math.Sqrt(W * W + X * X + Y * Y + Z * Z); - if (modulus == 0) throw new DivideByZeroException(); - result.W = W / modulus; - result.X = X / modulus; - result.Y = Y / modulus; - result.Z = Z / modulus; - } - public static void Normalize(ref Quaterniond Quaterniond, out Quaterniond result) - { - double modulus = System.Math.Sqrt(Quaterniond.W * Quaterniond.W + Quaterniond.X * Quaterniond.X + Quaterniond.Y * Quaterniond.Y + Quaterniond.Z * Quaterniond.Z); - if (modulus == 0) throw new DivideByZeroException(); - result.W = Quaterniond.W / modulus; - result.X = Quaterniond.X / modulus; - result.Y = Quaterniond.Y / modulus; - result.Z = Quaterniond.Z / modulus; - } - - public void Conjugate() - { - X = -X; - Y = -Y; - Z = -Z; - } - public void Conjugate( out Quaterniond result ) - { - result.W = W; - result.X = -X; - result.Y = -Y; - result.Z = -Z; - } - public static void Conjugate(ref Quaterniond Quaterniond, out Quaterniond result) - { - result.W = Quaterniond.W; - result.X = -Quaterniond.X; - result.Y = -Quaterniond.Y; - result.Z = -Quaterniond.Z; - } - - public void Inverse() - { - double modulusSquared = W * W + X * X + Y * Y + Z * Z; - if (modulusSquared <= 0) throw new InvalidOperationException(); - double inverseModulusSquared = 1.0 / modulusSquared; - W = W * inverseModulusSquared; - X = X * -inverseModulusSquared; - Y = Y * -inverseModulusSquared; - Z = Z * -inverseModulusSquared; - } - public void Inverse( out Quaterniond result ) - { - double modulusSquared = W * W + X * X + Y * Y + Z * Z; - if (modulusSquared <= 0) throw new InvalidOperationException(); - double inverseModulusSquared = 1.0 / modulusSquared; - result.W = W * inverseModulusSquared; - result.X = X * -inverseModulusSquared; - result.Y = Y * -inverseModulusSquared; - result.Z = Z * -inverseModulusSquared; - } - public static void Inverse(ref Quaterniond Quaterniond, out Quaterniond result) - { - double modulusSquared = Quaterniond.W * Quaterniond.W + Quaterniond.X * Quaterniond.X + Quaterniond.Y * Quaterniond.Y + Quaterniond.Z * Quaterniond.Z; - if (modulusSquared <= 0) throw new InvalidOperationException(); - double inverseModulusSquared = 1.0 / modulusSquared; - result.W = Quaterniond.W * inverseModulusSquared; - result.X = Quaterniond.X * -inverseModulusSquared; - result.Y = Quaterniond.Y * -inverseModulusSquared; - result.Z = Quaterniond.Z * -inverseModulusSquared; - } - - public void Log() - { - if (System.Math.Abs(W) < 1.0) - { - double angle = System.Math.Acos(W); - double sin = System.Math.Sin(angle); - - if (System.Math.Abs(sin) >= 0) - { - double coefficient = angle / sin; - X = X * coefficient; - Y = Y * coefficient; - Z = Z * coefficient; - } - } - else - { - X = 0; - Y = 0; - Z = 0; - } - - W = 0; - } - public void Log( out Quaterniond result ) - { - if (System.Math.Abs(W) < 1.0) - { - double angle = System.Math.Acos(W); - double sin = System.Math.Sin(angle); - - if (System.Math.Abs(sin) >= 0) - { - double coefficient = angle / sin; - result.X = X * coefficient; - result.Y = Y * coefficient; - result.Z = Z * coefficient; - } - else - { - result.X = X; - result.Y = Y; - result.Z = Z; - } - } - else - { - result.X = 0; - result.Y = 0; - result.Z = 0; - } - - result.W = 0; - } - public static void Log(ref Quaterniond Quaterniond, out Quaterniond result) - { - if (System.Math.Abs(Quaterniond.W) < 1.0) - { - double angle = System.Math.Acos(Quaterniond.W); - double sin = System.Math.Sin(angle); - - if (System.Math.Abs(sin) >= 0) - { - double coefficient = angle / sin; - result.X = Quaterniond.X * coefficient; - result.Y = Quaterniond.Y * coefficient; - result.Z = Quaterniond.Z * coefficient; - } - else - { - result.X = Quaterniond.X; - result.Y = Quaterniond.Y; - result.Z = Quaterniond.Z; - } - } - else - { - result.X = 0; - result.Y = 0; - result.Z = 0; - } - - result.W = 0; - } - - public void Exp() - { - double angle = System.Math.Sqrt(X * X + Y * Y + Z * Z); - double sin = System.Math.Sin(angle); - - if (System.Math.Abs(sin) > 0) - { - double coefficient = angle / sin; - W = 0; - X = X * coefficient; - Y = Y * coefficient; - Z = Z * coefficient; - } - else - { - W = 0; - } - } - public void Exp(out Quaterniond result) - { - double angle = System.Math.Sqrt(X * X + Y * Y + Z * Z); - double sin = System.Math.Sin(angle); - - if (System.Math.Abs(sin) > 0) - { - double coefficient = angle / sin; - result.W = 0; - result.X = X * coefficient; - result.Y = Y * coefficient; - result.Z = Z * coefficient; - } - else - { - result.W = 0; - result.X = X; - result.Y = Y; - result.Z = Z; - } - } - public static void Exp(ref Quaterniond Quaterniond, out Quaterniond result) - { - double angle = System.Math.Sqrt(Quaterniond.X * Quaterniond.X + Quaterniond.Y * Quaterniond.Y + Quaterniond.Z * Quaterniond.Z); - double sin = System.Math.Sin(angle); - - if (System.Math.Abs(sin) > 0) - { - double coefficient = angle / sin; - result.W = 0; - result.X = Quaterniond.X * coefficient; - result.Y = Quaterniond.Y * coefficient; - result.Z = Quaterniond.Z * coefficient; - } - else - { - result.W = 0; - result.X = Quaterniond.X; - result.Y = Quaterniond.Y; - result.Z = Quaterniond.Z; - } - } - - /// Returns left matrix for this Quaterniond. - public void Matrix4d(out Matrix4d result) - { - // TODO Expand - result = new Matrix4d(ref this); - } - - public void GetAxisAndAngle(out Vector3d axis, out double angle) - { - Quaterniond Quaterniond; - Normalize(out Quaterniond); - double cos = Quaterniond.W; - angle = System.Math.Acos(cos) * 2 * Functions.RTOD; - double sin = System.Math.Sqrt( 1.0d - cos * cos ); - if ( System.Math.Abs( sin ) < 0.0001 ) sin = 1; - axis = new Vector3d(X / sin, Y / sin, Z / sin); - } - - public static void Slerp(ref Quaterniond start, ref Quaterniond end, double blend, out Quaterniond result) - { - if (start.W == 0 && start.X == 0 && start.Y == 0 && start.Z == 0) - { - if (end.W == 0 && end.X == 0 && end.Y == 0 && end.Z == 0) - { - result.W = 1; - result.X = 0; - result.Y = 0; - result.Z = 0; - } - else - { - result = end; - } - } - else if (end.W == 0 && end.X == 0 && end.Y == 0 && end.Z == 0) - { - result = start; - } - - Vector3d startVector = new Vector3d(start.X, start.Y, start.Z); - Vector3d endVector = new Vector3d(end.X, end.Y, end.Z); - double cosHalfAngle = start.W * end.W + Vector3d.Dot(startVector, endVector); - - if (cosHalfAngle >= 1.0f || cosHalfAngle <= -1.0f) - { - // angle = 0.0f, so just return one input. - result = start; - } - else if (cosHalfAngle < 0.0f) - { - end.W = -end.W; - end.X = -end.X; - end.Y = -end.Y; - end.Z = -end.Z; - cosHalfAngle = -cosHalfAngle; - } - - double blendA; - double blendB; - if (cosHalfAngle < 0.99f) - { - // do proper slerp for big angles - double halfAngle = (double)System.Math.Acos(cosHalfAngle); - double sinHalfAngle = (double)System.Math.Sin(halfAngle); - double oneOverSinHalfAngle = 1.0f / sinHalfAngle; - blendA = (double)System.Math.Sin(halfAngle * (1.0f - blend)) * oneOverSinHalfAngle; - blendB = (double)System.Math.Sin(halfAngle * blend) * oneOverSinHalfAngle; - } - else - { - // do lerp if angle is really small. - blendA = 1.0f - blend; - blendB = blend; - } - - result.W = blendA * start.W + blendB * end.W; - result.X = blendA * start.X + blendB * end.X; - result.Y = blendA * start.Y + blendB * end.Y; - result.Z = blendA * start.Z + blendB * end.Z; - - if (result.W != 0 || result.X != 0 || result.Y != 0 || result.Z != 0) - { - result.Normalize(); - } - else - { - result.W = 1; - result.X = 0; - result.Y = 0; - result.Z = 0; - } - } - - #endregion - - #region HashCode - - /// Returns the hash code for this instance. - /// A 32-bit signed integer that is the hash code for this instance. - public override int GetHashCode() - { - base.GetHashCode(); - return W.GetHashCode() ^ X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode(); - } - - #endregion - - #region String and Parse - - /// Returns the fully qualified type name of this instance. - /// A System.String containing left fully qualified type name. - public override string ToString() - { - return string.Format("({0}, {1}, {2}, {3})", W, X, Y, Z); - } - - /// Parses left string, converting it to left Quaterniond. - /// The string to parse. - /// The Quaterniond represented by the string. - public static void Parse(string str, out Quaterniond result) - { - Match match = new Regex(@"\((?.*),(?.*),(?.*),(?.*)\)", RegexOptions.None).Match(str); - if (!match.Success) throw new Exception("Parse failed!"); - - result.W = double.Parse(match.Result("${w}")); - result.X = double.Parse(match.Result("${x}")); - result.Y = double.Parse(match.Result("${y}")); - result.Z = double.Parse(match.Result("${z}")); - } - - #endregion - - #region Constants - - /// A quaterion with all zero components. - public static readonly Quaterniond Zero = new Quaterniond(0, 0, 0, 0); - - /// A quaterion representing an identity. - public static readonly Quaterniond Identity = new Quaterniond(1, 0, 0, 0); - - /// A quaterion representing the W axis. - public static readonly Quaterniond WAxis = new Quaterniond(1, 0, 0, 0); - - /// A quaterion representing the X axis. - public static readonly Quaterniond XAxis = new Quaterniond(0, 1, 0, 0); - - /// A quaterion representing the Y axis. - public static readonly Quaterniond YAxis = new Quaterniond(0, 0, 1, 0); - - /// A quaterion representing the Z axis. - public static readonly Quaterniond ZAxis = new Quaterniond(0, 0, 0, 1); - - #endregion - -#endif - - #region IEquatable Members - - /// - /// Compares this Quaterniond instance to another Quaterniond for equality. - /// - /// The other Quaterniond to be used in the comparison. - /// True if both instances are equal; false otherwise. - public bool Equals(Quaterniond other) - { - return Xyz == other.Xyz && W == other.W; - } - - #endregion - } -} \ No newline at end of file diff --git a/OpenTK/Math/Rectangle.cs b/OpenTK/Math/Rectangle.cs deleted file mode 100644 index 0b3a28dd..00000000 --- a/OpenTK/Math/Rectangle.cs +++ /dev/null @@ -1,323 +0,0 @@ -#region License - // - // The Open Toolkit Library License - // - // Copyright (c) 2006 - 2009 the Open Toolkit library. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to deal - // in the Software without restriction, including without limitation the rights to - // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of - // the Software, and to permit persons to whom the Software is furnished to do - // so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in all - // copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, - // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES - // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND - // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT - // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, - // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR - // OTHER DEALINGS IN THE SOFTWARE. - // - #endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ -#if NO_SYSDRAWING - /// - /// Represents a rectangular region on a two-dimensional plane. - /// - public struct Rectangle : IEquatable - { - #region Fields - - Point location; - Size size; - - #endregion - - #region Constructors - - /// - /// Constructs a new Rectangle instance. - /// - /// The top-left corner of the Rectangle. - /// The width and height of the Rectangle. - public Rectangle(Point location, Size size) - : this() - { - Location = location; - Size = size; - } - - /// - /// Constructs a new Rectangle instance. - /// - /// The x coordinate of the Rectangle. - /// The y coordinate of the Rectangle. - /// The width coordinate of the Rectangle. - /// The height coordinate of the Rectangle. - public Rectangle(int x, int y, int width, int height) - : this(new Point(x, y), new Size(width, height)) - { } - - #endregion - - #region Public Members - - /// - /// Gets or sets the x coordinate of the Rectangle. - /// - public int X - { - get { return Location.X; } - set { Location = new Point (value, Y); } - } - - /// - /// Gets or sets the y coordinate of the Rectangle. - /// - public int Y - { - get { return Location.Y; } - set { Location = new Point (X, value); } - } - - /// - /// Gets or sets the width of the Rectangle. - /// - public int Width - { - get { return Size.Width; } - set { Size = new Size (value, Height); } - } - - /// - /// Gets or sets the height of the Rectangle. - /// - public int Height - { - get { return Size.Height; } - set { Size = new Size(Width, value); } - } - - /// - /// Gets or sets a representing the x and y coordinates - /// of the Rectangle. - /// - public Point Location - { - get { return location; } - set { location = value; } - } - - /// - /// Gets or sets a representing the width and height - /// of the Rectangle. - /// - public Size Size - { - get { return size; } - set { size = value; } - } - - /// - /// Gets the y coordinate of the top edge of this Rectangle. - /// - public int Top { get { return Y; } } - - /// - /// Gets the x coordinate of the right edge of this Rectangle. - /// - public int Right { get { return X + Width; } } - - /// - /// Gets the y coordinate of the bottom edge of this Rectangle. - /// - public int Bottom { get { return Y + Height; } } - - /// - /// Gets the x coordinate of the left edge of this Rectangle. - /// - public int Left { get { return X; } } - - /// - /// Gets a that indicates whether this - /// Rectangle is equal to the empty Rectangle. - /// - public bool IsEmpty - { - get { return Location.IsEmpty && Size.IsEmpty; } - } - - /// - /// Defines the empty Rectangle. - /// - public static readonly Rectangle Zero = new Rectangle(); - - /// - /// Defines the empty Rectangle. - /// - public static readonly Rectangle Empty = new Rectangle(); - - /// - /// Constructs a new instance with the specified edges. - /// - /// The left edge of the Rectangle. - /// The top edge of the Rectangle. - /// The right edge of the Rectangle. - /// The bottom edge of the Rectangle. - /// A new Rectangle instance with the specified edges. - public static Rectangle FromLTRB(int left, int top, int right, int bottom) - { - return new Rectangle(new Point(left, top), new Size(right - left, bottom - top)); - } - - /// - /// Tests whether this instance contains the specified Point. - /// - /// The to test. - /// True if this instance contains point; false otherwise. - /// The left and top edges are inclusive. The right and bottom edges - /// are exclusive. - public bool Contains(Point point) - { - return point.X >= Left && point.X < Right && - point.Y >= Top && point.Y < Bottom; - } - - /// - /// Tests whether this instance contains the specified Rectangle. - /// - /// The to test. - /// True if this instance contains rect; false otherwise. - /// The left and top edges are inclusive. The right and bottom edges - /// are exclusive. - public bool Contains(Rectangle rect) - { - return Contains(rect.Location) && Contains(rect.Location + rect.Size); - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left is equal to right; false otherwise. - public static bool operator ==(Rectangle left, Rectangle right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left is not equal to right; false otherwise. - public static bool operator !=(Rectangle left, Rectangle right) - { - return !left.Equals(right); - } - - /// - /// Converts an OpenTK.Rectangle instance to a System.Drawing.Rectangle. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to rect. - /// - public static implicit operator System.Drawing.Rectangle(Rectangle rect) - { - return new System.Drawing.Rectangle(rect.Location, rect.Size); - } - - /// - /// Converts a System.Drawing.Rectangle instance to an OpenTK.Rectangle. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to point. - /// - public static implicit operator Rectangle(System.Drawing.Rectangle rect) - { - return new Rectangle(rect.Location, rect.Size); - } - - /// - /// Converts an OpenTK.Rectangle instance to a System.Drawing.RectangleF. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to rect. - /// - public static implicit operator System.Drawing.RectangleF(Rectangle rect) - { - return new System.Drawing.RectangleF(rect.Location, rect.Size); - } - - /// - /// Indicates whether this instance is equal to the specified object. - /// - /// The object instance to compare to. - /// True, if both instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (obj is Rectangle) - return Equals((Rectangle)obj); - - return false; - } - - /// - /// Returns the hash code for this instance. - /// - /// A that represents the hash code for this instance./> - public override int GetHashCode() - { - return Location.GetHashCode() & Size.GetHashCode(); - } - - /// - /// Returns a that describes this instance. - /// - /// A that describes this instance. - public override string ToString() - { - return String.Format("{{{0}-{1}}}", Location, Location + Size); - } - - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether this instance is equal to the specified Rectangle. - /// - /// The instance to compare to. - /// True, if both instances are equal; false otherwise. - public bool Equals(Rectangle other) - { - return Location.Equals(other.Location) && - Size.Equals(other.Size); - } - - #endregion - } -#endif -} diff --git a/OpenTK/Math/Size.cs b/OpenTK/Math/Size.cs deleted file mode 100644 index baadb4bc..00000000 --- a/OpenTK/Math/Size.cs +++ /dev/null @@ -1,222 +0,0 @@ -#region License -// -// The Open Toolkit Library License -// -// Copyright (c) 2006 - 2009 the Open Toolkit library. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights to -// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -// the Software, and to permit persons to whom the Software is furnished to do -// so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all -// copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -// OTHER DEALINGS IN THE SOFTWARE. -// -#endregion - -using System; -using System.Collections.Generic; -using System.Text; - -namespace OpenTK -{ -#if NO_SYSDRAWING - /// - /// Stores the width and height of a rectangle. - /// - public struct Size : IEquatable - { - #region Fields - - int width, height; - - #endregion - - #region Constructors - - /// - /// Constructs a new Size instance. - /// - /// The width of this instance. - /// The height of this instance. - public Size(int width, int height) - : this() - { - Width = width; - Height = height; - } - - #endregion - - #region Public Members - - /// - /// Gets or sets the width of this instance. - /// - public int Width - { - get { return width; } - set - { - if (width < 0) - throw new ArgumentOutOfRangeException(); - width = value; - } - } - - /// - /// Gets or sets the height of this instance. - /// - public int Height - { - get { return height; } - set - { - if (height < 0) - throw new ArgumentOutOfRangeException(); - height = value; - } - } - - /// - /// Gets a that indicates whether this instance is empty or zero. - /// - public bool IsEmpty - { - get { return Width == 0 && Height == 0; } - } - - /// - /// Returns a Size instance equal to (0, 0). - /// - public static readonly Size Empty = new Size(); - - /// - /// Returns a Size instance equal to (0, 0). - /// - public static readonly Size Zero = new Size(); - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left is equal to right; false otherwise. - public static bool operator ==(Size left, Size right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left is not equal to right; false otherwise. - public static bool operator !=(Size left, Size right) - { - return !left.Equals(right); - } - - /// - /// Converts an OpenTK.Size instance to a System.Drawing.Size. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to size. - /// - public static implicit operator System.Drawing.Size(Size size) -{ - return new System.Drawing.Size(size.Width, size.Height); - } - - /// - /// Converts a System.Drawing.Size instance to an OpenTK.Size. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to size. - /// - public static implicit operator Size(System.Drawing.Size size) - { - return new Size(size.Width, size.Height); - } - - /// - /// Converts an OpenTK.Point instance to a System.Drawing.SizeF. - /// - /// - /// The instance to convert. - /// - /// - /// A instance equivalent to size. - /// - public static implicit operator System.Drawing.SizeF(Size size) - { - return new System.Drawing.SizeF(size.Width, size.Height); - } - - /// - /// Indicates whether this instance is equal to the specified object. - /// - /// The object instance to compare to. - /// True, if both instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (obj is Size) - return Equals((Size)obj); - - return false; - } - - /// - /// Returns the hash code for this instance. - /// - /// A that represents the hash code for this instance./> - public override int GetHashCode() - { - return Width.GetHashCode() ^ Height.GetHashCode(); - } - - /// - /// Returns a that describes this instance. - /// - /// A that describes this instance. - public override string ToString() - { - return String.Format("{{{0}, {1}}}", Width, Height); - } - - #endregion - - #region IEquatable Members - - /// - /// Indicates whether this instance is equal to the specified Size. - /// - /// The instance to compare to. - /// True, if both instances are equal; false otherwise. - public bool Equals(Size other) - { - return Width == other.Width && Height == other.Height; - } - - #endregion - } -#endif -} diff --git a/OpenTK/Math/Vector2.cs b/OpenTK/Math/Vector2.cs deleted file mode 100644 index 4d1064a6..00000000 --- a/OpenTK/Math/Vector2.cs +++ /dev/null @@ -1,1180 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// Represents a 2D vector using two single-precision floating-point numbers. - /// - /// The Vector2 structure is suitable for interoperation with unmanaged code requiring two consecutive floats. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Vector2 : IEquatable - { - #region Fields - - /// - /// The X component of the Vector2. - /// - public float X; - - /// - /// The Y component of the Vector2. - /// - public float Y; - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector2(float value) - { - X = value; - Y = value; - } - - /// - /// Constructs a new Vector2. - /// - /// The x coordinate of the net Vector2. - /// The y coordinate of the net Vector2. - public Vector2(float x, float y) - { - X = x; - Y = y; - } - - /// - /// Constructs a new Vector2 from the given Vector2. - /// - /// The Vector2 to copy components from. - [Obsolete] - public Vector2(Vector2 v) - { - X = v.X; - Y = v.Y; - } - - /// - /// Constructs a new Vector2 from the given Vector3. - /// - /// The Vector3 to copy components from. Z is discarded. - [Obsolete] - public Vector2(Vector3 v) - { - X = v.X; - Y = v.Y; - } - - /// - /// Constructs a new Vector2 from the given Vector4. - /// - /// The Vector4 to copy components from. Z and W are discarded. - [Obsolete] - public Vector2(Vector4 v) - { - X = v.X; - Y = v.Y; - } - - #endregion - - #region Public Members - - /// - /// Gets or sets the value at the index of the Vector. - /// - public float this[int index] { - get{ - if(index == 0) return X; - else if(index == 1) return Y; - throw new IndexOutOfRangeException("You tried to access this vector at index: " + index); - } set{ - if(index == 0) X = value; - else if(index == 1) Y = value; - else throw new IndexOutOfRangeException("You tried to set this vector at index: " + index); - } - } - - #region Instance - - #region public void Add() - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(Vector2 right) - { - this.X += right.X; - this.Y += right.Y; - } - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(ref Vector2 right) - { - this.X += right.X; - this.Y += right.Y; - } - - #endregion public void Add() - - #region public void Sub() - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(Vector2 right) - { - this.X -= right.X; - this.Y -= right.Y; - } - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(ref Vector2 right) - { - this.X -= right.X; - this.Y -= right.Y; - } - - #endregion public void Sub() - - #region public void Mult() - - /// Multiply this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Multiply() method instead.")] - public void Mult(float f) - { - this.X *= f; - this.Y *= f; - } - - #endregion public void Mult() - - #region public void Div() - - /// Divide this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Divide() method instead.")] - public void Div(float f) - { - float mult = 1.0f / f; - this.X *= mult; - this.Y *= mult; - } - - #endregion public void Div() - - #region public float Length - - /// - /// Gets the length (magnitude) of the vector. - /// - /// - /// - public float Length - { - get - { - return (float)System.Math.Sqrt(X * X + Y * Y); - } - } - - #endregion - - #region public float LengthFast - - /// - /// Gets an approximation of the vector length (magnitude). - /// - /// - /// This property uses an approximation of the square root function to calculate vector magnitude, with - /// an upper error bound of 0.001. - /// - /// - /// - public float LengthFast - { - get - { - return 1.0f / MathHelper.InverseSqrtFast(X * X + Y * Y); - } - } - - #endregion - - #region public float LengthSquared - - /// - /// Gets the square of the vector length (magnitude). - /// - /// - /// This property avoids the costly square root operation required by the Length property. This makes it more suitable - /// for comparisons. - /// - /// - /// - public float LengthSquared - { - get - { - return X * X + Y * Y; - } - } - - #endregion - - #region public Vector2 PerpendicularRight - - /// - /// Gets the perpendicular vector on the right side of this vector. - /// - public Vector2 PerpendicularRight - { - get - { - return new Vector2(Y, -X); - } - } - - #endregion - - #region public Vector2 PerpendicularLeft - - /// - /// Gets the perpendicular vector on the left side of this vector. - /// - public Vector2 PerpendicularLeft - { - get - { - return new Vector2(-Y, X); - } - } - - #endregion - - /// - /// Returns a copy of the Vector2 scaled to unit length. - /// - /// - public Vector2 Normalized() - { - Vector2 v = this; - v.Normalize(); - return v; - } - #region public void Normalize() - - /// - /// Scales the Vector2 to unit length. - /// - public void Normalize() - { - float scale = 1.0f / this.Length; - X *= scale; - Y *= scale; - } - - #endregion - - #region public void NormalizeFast() - - /// - /// Scales the Vector2 to approximately unit length. - /// - public void NormalizeFast() - { - float scale = MathHelper.InverseSqrtFast(X * X + Y * Y); - X *= scale; - Y *= scale; - } - - #endregion - - #region public void Scale() - - /// - /// Scales the current Vector2 by the given amounts. - /// - /// The scale of the X component. - /// The scale of the Y component. - [Obsolete("Use static Multiply() method instead.")] - public void Scale(float sx, float sy) - { - this.X = X * sx; - this.Y = Y * sy; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(Vector2 scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(ref Vector2 scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - } - - #endregion public void Scale() - - #endregion - - #region Static - - #region Fields - - /// - /// Defines a unit-length Vector2 that points towards the X-axis. - /// - public static readonly Vector2 UnitX = new Vector2(1, 0); - - /// - /// Defines a unit-length Vector2 that points towards the Y-axis. - /// - public static readonly Vector2 UnitY = new Vector2(0, 1); - - /// - /// Defines a zero-length Vector2. - /// - public static readonly Vector2 Zero = new Vector2(0, 0); - - /// - /// Defines an instance with all components set to 1. - /// - public static readonly Vector2 One = new Vector2(1, 1); - - /// - /// Defines the size of the Vector2 struct in bytes. - /// - public static readonly int SizeInBytes = Marshal.SizeOf(new Vector2()); - - #endregion - - #region Obsolete - - #region Sub - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static Vector2 Sub(Vector2 a, Vector2 b) - { - a.X -= b.X; - a.Y -= b.Y; - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static void Sub(ref Vector2 a, ref Vector2 b, out Vector2 result) - { - result.X = a.X - b.X; - result.Y = a.Y - b.Y; - } - - #endregion - - #region Mult - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static Vector2 Mult(Vector2 a, float f) - { - a.X *= f; - a.Y *= f; - return a; - } - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static void Mult(ref Vector2 a, float f, out Vector2 result) - { - result.X = a.X * f; - result.Y = a.Y * f; - } - - #endregion - - #region Div - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static Vector2 Div(Vector2 a, float f) - { - float mult = 1.0f / f; - a.X *= mult; - a.Y *= mult; - return a; - } - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static void Div(ref Vector2 a, float f, out Vector2 result) - { - float mult = 1.0f / f; - result.X = a.X * mult; - result.Y = a.Y * mult; - } - - #endregion - - #endregion - - #region Add - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static Vector2 Add(Vector2 a, Vector2 b) - { - Add(ref a, ref b, out a); - return a; - } - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static void Add(ref Vector2 a, ref Vector2 b, out Vector2 result) - { - result = new Vector2(a.X + b.X, a.Y + b.Y); - } - - #endregion - - #region Subtract - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector2 Subtract(Vector2 a, Vector2 b) - { - Subtract(ref a, ref b, out a); - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Subtract(ref Vector2 a, ref Vector2 b, out Vector2 result) - { - result = new Vector2(a.X - b.X, a.Y - b.Y); - } - - #endregion - - #region Multiply - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2 Multiply(Vector2 vector, float scale) - { - Multiply(ref vector, scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector2 vector, float scale, out Vector2 result) - { - result = new Vector2(vector.X * scale, vector.Y * scale); - } - - /// - /// Multiplies a vector by the components a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2 Multiply(Vector2 vector, Vector2 scale) - { - Multiply(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector2 vector, ref Vector2 scale, out Vector2 result) - { - result = new Vector2(vector.X * scale.X, vector.Y * scale.Y); - } - - #endregion - - #region Divide - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2 Divide(Vector2 vector, float scale) - { - Divide(ref vector, scale, out vector); - return vector; - } - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector2 vector, float scale, out Vector2 result) - { - Multiply(ref vector, 1 / scale, out result); - } - - /// - /// Divides a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2 Divide(Vector2 vector, Vector2 scale) - { - Divide(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Divide a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector2 vector, ref Vector2 scale, out Vector2 result) - { - result = new Vector2(vector.X / scale.X, vector.Y / scale.Y); - } - - #endregion - - #region ComponentMin - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static Vector2 ComponentMin(Vector2 a, Vector2 b) - { - a.X = a.X < b.X ? a.X : b.X; - a.Y = a.Y < b.Y ? a.Y : b.Y; - return a; - } - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static void ComponentMin(ref Vector2 a, ref Vector2 b, out Vector2 result) - { - result.X = a.X < b.X ? a.X : b.X; - result.Y = a.Y < b.Y ? a.Y : b.Y; - } - - #endregion - - #region ComponentMax - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static Vector2 ComponentMax(Vector2 a, Vector2 b) - { - a.X = a.X > b.X ? a.X : b.X; - a.Y = a.Y > b.Y ? a.Y : b.Y; - return a; - } - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static void ComponentMax(ref Vector2 a, ref Vector2 b, out Vector2 result) - { - result.X = a.X > b.X ? a.X : b.X; - result.Y = a.Y > b.Y ? a.Y : b.Y; - } - - #endregion - - #region Min - - /// - /// Returns the Vector3 with the minimum magnitude - /// - /// Left operand - /// Right operand - /// The minimum Vector3 - public static Vector2 Min(Vector2 left, Vector2 right) - { - return left.LengthSquared < right.LengthSquared ? left : right; - } - - #endregion - - #region Max - - /// - /// Returns the Vector3 with the minimum magnitude - /// - /// Left operand - /// Right operand - /// The minimum Vector3 - public static Vector2 Max(Vector2 left, Vector2 right) - { - return left.LengthSquared >= right.LengthSquared ? left : right; - } - - #endregion - - #region Clamp - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static Vector2 Clamp(Vector2 vec, Vector2 min, Vector2 max) - { - vec.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - vec.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - return vec; - } - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static void Clamp(ref Vector2 vec, ref Vector2 min, ref Vector2 max, out Vector2 result) - { - result.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - result.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - } - - #endregion - - #region Normalize - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static Vector2 Normalize(Vector2 vec) - { - float scale = 1.0f / vec.Length; - vec.X *= scale; - vec.Y *= scale; - return vec; - } - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static void Normalize(ref Vector2 vec, out Vector2 result) - { - float scale = 1.0f / vec.Length; - result.X = vec.X * scale; - result.Y = vec.Y * scale; - } - - #endregion - - #region NormalizeFast - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static Vector2 NormalizeFast(Vector2 vec) - { - float scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y); - vec.X *= scale; - vec.Y *= scale; - return vec; - } - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static void NormalizeFast(ref Vector2 vec, out Vector2 result) - { - float scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y); - result.X = vec.X * scale; - result.Y = vec.Y * scale; - } - - #endregion - - #region Dot - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static float Dot(Vector2 left, Vector2 right) - { - return left.X * right.X + left.Y * right.Y; - } - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static void Dot(ref Vector2 left, ref Vector2 right, out float result) - { - result = left.X * right.X + left.Y * right.Y; - } - - #endregion - - #region PerpDot - - /// - /// Calculate the perpendicular dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The perpendicular dot product of the two inputs - public static float PerpDot(Vector2 left, Vector2 right) - { - return left.X * right.Y - left.Y * right.X; - } - - /// - /// Calculate the perpendicular dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The perpendicular dot product of the two inputs - public static void PerpDot(ref Vector2 left, ref Vector2 right, out float result) - { - result = left.X * right.Y - left.Y * right.X; - } - - #endregion - - #region Lerp - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static Vector2 Lerp(Vector2 a, Vector2 b, float blend) - { - a.X = blend * (b.X - a.X) + a.X; - a.Y = blend * (b.Y - a.Y) + a.Y; - return a; - } - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static void Lerp(ref Vector2 a, ref Vector2 b, float blend, out Vector2 result) - { - result.X = blend * (b.X - a.X) + a.X; - result.Y = blend * (b.Y - a.Y) + a.Y; - } - - #endregion - - #region Barycentric - - /// - /// Interpolate 3 Vectors using Barycentric coordinates - /// - /// First input Vector - /// Second input Vector - /// Third input Vector - /// First Barycentric Coordinate - /// Second Barycentric Coordinate - /// a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static Vector2 BaryCentric(Vector2 a, Vector2 b, Vector2 c, float u, float v) - { - return a + u * (b - a) + v * (c - a); - } - - /// Interpolate 3 Vectors using Barycentric coordinates - /// First input Vector. - /// Second input Vector. - /// Third input Vector. - /// First Barycentric Coordinate. - /// Second Barycentric Coordinate. - /// Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static void BaryCentric(ref Vector2 a, ref Vector2 b, ref Vector2 c, float u, float v, out Vector2 result) - { - result = a; // copy - - Vector2 temp = b; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, u, out temp); - Add(ref result, ref temp, out result); - - temp = c; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, v, out temp); - Add(ref result, ref temp, out result); - } - - #endregion - - #region Transform - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static Vector2 Transform(Vector2 vec, Quaternion quat) - { - Vector2 result; - Transform(ref vec, ref quat, out result); - return result; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static void Transform(ref Vector2 vec, ref Quaternion quat, out Vector2 result) - { - Quaternion v = new Quaternion(vec.X, vec.Y, 0, 0), i, t; - Quaternion.Invert(ref quat, out i); - Quaternion.Multiply(ref quat, ref v, out t); - Quaternion.Multiply(ref t, ref i, out v); - - result = new Vector2(v.X, v.Y); - } - - #endregion - - #endregion - - #region Swizzle - - /// - /// Gets or sets an OpenTK.Vector2 with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2 Yx { get { return new Vector2(Y, X); } set { Y = value.X; X = value.Y; } } - - #endregion - - #region Operators - - /// - /// Adds the specified instances. - /// - /// Left operand. - /// Right operand. - /// Result of addition. - public static Vector2 operator +(Vector2 left, Vector2 right) - { - left.X += right.X; - left.Y += right.Y; - return left; - } - - /// - /// Subtracts the specified instances. - /// - /// Left operand. - /// Right operand. - /// Result of subtraction. - public static Vector2 operator -(Vector2 left, Vector2 right) - { - left.X -= right.X; - left.Y -= right.Y; - return left; - } - - /// - /// Negates the specified instance. - /// - /// Operand. - /// Result of negation. - public static Vector2 operator -(Vector2 vec) - { - vec.X = -vec.X; - vec.Y = -vec.Y; - return vec; - } - - /// - /// Multiplies the specified instance by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector2 operator *(Vector2 vec, float scale) - { - vec.X *= scale; - vec.Y *= scale; - return vec; - } - - /// - /// Multiplies the specified instance by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector2 operator *(float scale, Vector2 vec) - { - vec.X *= scale; - vec.Y *= scale; - return vec; - } - - /// - /// Component-wise multiplication between the specified instance by a scale vector. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector2 operator *(Vector2 vec, Vector2 scale) - { - vec.X *= scale.X; - vec.Y *= scale.Y; - return vec; - } - - /// - /// Divides the specified instance by a scalar. - /// - /// Left operand - /// Right operand - /// Result of the division. - public static Vector2 operator /(Vector2 vec, float scale) - { - float mult = 1.0f / scale; - vec.X *= mult; - vec.Y *= mult; - return vec; - } - - /// - /// Compares the specified instances for equality. - /// - /// Left operand. - /// Right operand. - /// True if both instances are equal; false otherwise. - public static bool operator ==(Vector2 left, Vector2 right) - { - return left.Equals(right); - } - - /// - /// Compares the specified instances for inequality. - /// - /// Left operand. - /// Right operand. - /// True if both instances are not equal; false otherwise. - public static bool operator !=(Vector2 left, Vector2 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// - /// Returns a System.String that represents the current Vector2. - /// - /// - public override string ToString() - { - return String.Format("({0}{2} {1})", X, Y, listSeparator); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Vector2)) - return false; - - return this.Equals((Vector2)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current vector is equal to another vector. - /// A vector to compare with this vector. - /// true if the current vector is equal to the vector parameter; otherwise, false. - public bool Equals(Vector2 other) - { - return - X == other.X && - Y == other.Y; - } - - #endregion - } -} diff --git a/OpenTK/Math/Vector2d.cs b/OpenTK/Math/Vector2d.cs deleted file mode 100644 index 8c3846cf..00000000 --- a/OpenTK/Math/Vector2d.cs +++ /dev/null @@ -1,1058 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// Represents a 2D vector using two double-precision floating-point numbers. - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Vector2d : IEquatable - { - #region Fields - - /// The X coordinate of this instance. - public double X; - - /// The Y coordinate of this instance. - public double Y; - - /// - /// Defines a unit-length Vector2d that points towards the X-axis. - /// - public static readonly Vector2d UnitX = new Vector2d(1, 0); - - /// - /// Defines a unit-length Vector2d that points towards the Y-axis. - /// - public static readonly Vector2d UnitY = new Vector2d(0, 1); - - /// - /// Defines a zero-length Vector2d. - /// - public static readonly Vector2d Zero = new Vector2d(0, 0); - - /// - /// Defines an instance with all components set to 1. - /// - public static readonly Vector2d One = new Vector2d(1, 1); - - /// - /// Defines the size of the Vector2d struct in bytes. - /// - public static readonly int SizeInBytes = Marshal.SizeOf(new Vector2d()); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector2d(double value) - { - X = value; - Y = value; - } - - /// Constructs left vector with the given coordinates. - /// The X coordinate. - /// The Y coordinate. - public Vector2d(double x, double y) - { - this.X = x; - this.Y = y; - } - - #endregion - - #region Public Members - - /// - /// Gets or sets the value at the index of the Vector. - /// - public double this[int index] { - get{ - if(index == 0) return X; - else if(index == 1) return Y; - throw new IndexOutOfRangeException("You tried to access this vector at index: " + index); - } set{ - if(index == 0) X = value; - else if(index == 1) Y = value; - else throw new IndexOutOfRangeException("You tried to set this vector at index: " + index); - } - } - - #region Instance - - #region public void Add() - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(Vector2d right) - { - this.X += right.X; - this.Y += right.Y; - } - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(ref Vector2d right) - { - this.X += right.X; - this.Y += right.Y; - } - - #endregion public void Add() - - #region public void Sub() - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(Vector2d right) - { - this.X -= right.X; - this.Y -= right.Y; - } - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(ref Vector2d right) - { - this.X -= right.X; - this.Y -= right.Y; - } - - #endregion public void Sub() - - #region public void Mult() - - /// Multiply this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Multiply() method instead.")] - public void Mult(double f) - { - this.X *= f; - this.Y *= f; - } - - #endregion public void Mult() - - #region public void Div() - - /// Divide this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Divide() method instead.")] - public void Div(double f) - { - double mult = 1.0 / f; - this.X *= mult; - this.Y *= mult; - } - - #endregion public void Div() - - #region public double Length - - /// - /// Gets the length (magnitude) of the vector. - /// - /// - public double Length - { - get - { - return System.Math.Sqrt(X * X + Y * Y); - } - } - - #endregion - - #region public double LengthSquared - - /// - /// Gets the square of the vector length (magnitude). - /// - /// - /// This property avoids the costly square root operation required by the Length property. This makes it more suitable - /// for comparisons. - /// - /// - public double LengthSquared - { - get - { - return X * X + Y * Y; - } - } - - #endregion - - #region public Vector2d PerpendicularRight - - /// - /// Gets the perpendicular vector on the right side of this vector. - /// - public Vector2d PerpendicularRight - { - get - { - return new Vector2d(Y, -X); - } - } - - #endregion - - #region public Vector2d PerpendicularLeft - - /// - /// Gets the perpendicular vector on the left side of this vector. - /// - public Vector2d PerpendicularLeft - { - get - { - return new Vector2d(-Y, X); - } - } - - #endregion - - /// - /// Returns a copy of the Vector2d scaled to unit length. - /// - /// - public Vector2d Normalized() - { - Vector2d v = this; - v.Normalize(); - return v; - } - - #region public void Normalize() - - /// - /// Scales the Vector2 to unit length. - /// - public void Normalize() - { - double scale = 1.0 / Length; - X *= scale; - Y *= scale; - } - - #endregion - - #region public void Scale() - - /// - /// Scales the current Vector2 by the given amounts. - /// - /// The scale of the X component. - /// The scale of the Y component. - [Obsolete("Use static Multiply() method instead.")] - public void Scale(double sx, double sy) - { - X *= sx; - Y *= sy; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(Vector2d scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(ref Vector2d scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - } - - #endregion public void Scale() - - #endregion - - #region Static - - #region Obsolete - - #region Sub - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static Vector2d Sub(Vector2d a, Vector2d b) - { - a.X -= b.X; - a.Y -= b.Y; - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static void Sub(ref Vector2d a, ref Vector2d b, out Vector2d result) - { - result.X = a.X - b.X; - result.Y = a.Y - b.Y; - } - - #endregion - - #region Mult - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static Vector2d Mult(Vector2d a, double d) - { - a.X *= d; - a.Y *= d; - return a; - } - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static void Mult(ref Vector2d a, double d, out Vector2d result) - { - result.X = a.X * d; - result.Y = a.Y * d; - } - - #endregion - - #region Div - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static Vector2d Div(Vector2d a, double d) - { - double mult = 1.0 / d; - a.X *= mult; - a.Y *= mult; - return a; - } - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static void Div(ref Vector2d a, double d, out Vector2d result) - { - double mult = 1.0 / d; - result.X = a.X * mult; - result.Y = a.Y * mult; - } - - #endregion - - #endregion - - #region Add - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static Vector2d Add(Vector2d a, Vector2d b) - { - Add(ref a, ref b, out a); - return a; - } - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static void Add(ref Vector2d a, ref Vector2d b, out Vector2d result) - { - result = new Vector2d(a.X + b.X, a.Y + b.Y); - } - - #endregion - - #region Subtract - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector2d Subtract(Vector2d a, Vector2d b) - { - Subtract(ref a, ref b, out a); - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Subtract(ref Vector2d a, ref Vector2d b, out Vector2d result) - { - result = new Vector2d(a.X - b.X, a.Y - b.Y); - } - - #endregion - - #region Multiply - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2d Multiply(Vector2d vector, double scale) - { - Multiply(ref vector, scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector2d vector, double scale, out Vector2d result) - { - result = new Vector2d(vector.X * scale, vector.Y * scale); - } - - /// - /// Multiplies a vector by the components a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2d Multiply(Vector2d vector, Vector2d scale) - { - Multiply(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector2d vector, ref Vector2d scale, out Vector2d result) - { - result = new Vector2d(vector.X * scale.X, vector.Y * scale.Y); - } - - #endregion - - #region Divide - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2d Divide(Vector2d vector, double scale) - { - Divide(ref vector, scale, out vector); - return vector; - } - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector2d vector, double scale, out Vector2d result) - { - Multiply(ref vector, 1 / scale, out result); - } - - /// - /// Divides a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector2d Divide(Vector2d vector, Vector2d scale) - { - Divide(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Divide a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector2d vector, ref Vector2d scale, out Vector2d result) - { - result = new Vector2d(vector.X / scale.X, vector.Y / scale.Y); - } - - #endregion - - #region Min - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static Vector2d Min(Vector2d a, Vector2d b) - { - a.X = a.X < b.X ? a.X : b.X; - a.Y = a.Y < b.Y ? a.Y : b.Y; - return a; - } - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static void Min(ref Vector2d a, ref Vector2d b, out Vector2d result) - { - result.X = a.X < b.X ? a.X : b.X; - result.Y = a.Y < b.Y ? a.Y : b.Y; - } - - #endregion - - #region Max - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static Vector2d Max(Vector2d a, Vector2d b) - { - a.X = a.X > b.X ? a.X : b.X; - a.Y = a.Y > b.Y ? a.Y : b.Y; - return a; - } - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static void Max(ref Vector2d a, ref Vector2d b, out Vector2d result) - { - result.X = a.X > b.X ? a.X : b.X; - result.Y = a.Y > b.Y ? a.Y : b.Y; - } - - #endregion - - #region Clamp - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static Vector2d Clamp(Vector2d vec, Vector2d min, Vector2d max) - { - vec.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - vec.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - return vec; - } - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static void Clamp(ref Vector2d vec, ref Vector2d min, ref Vector2d max, out Vector2d result) - { - result.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - result.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - } - - #endregion - - #region Normalize - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static Vector2d Normalize(Vector2d vec) - { - double scale = 1.0 / vec.Length; - vec.X *= scale; - vec.Y *= scale; - return vec; - } - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static void Normalize(ref Vector2d vec, out Vector2d result) - { - double scale = 1.0 / vec.Length; - result.X = vec.X * scale; - result.Y = vec.Y * scale; - } - - #endregion - - #region NormalizeFast - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static Vector2d NormalizeFast(Vector2d vec) - { - double scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y); - vec.X *= scale; - vec.Y *= scale; - return vec; - } - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static void NormalizeFast(ref Vector2d vec, out Vector2d result) - { - double scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y); - result.X = vec.X * scale; - result.Y = vec.Y * scale; - } - - #endregion - - #region Dot - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static double Dot(Vector2d left, Vector2d right) - { - return left.X * right.X + left.Y * right.Y; - } - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static void Dot(ref Vector2d left, ref Vector2d right, out double result) - { - result = left.X * right.X + left.Y * right.Y; - } - - #endregion - - #region Lerp - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static Vector2d Lerp(Vector2d a, Vector2d b, double blend) - { - a.X = blend * (b.X - a.X) + a.X; - a.Y = blend * (b.Y - a.Y) + a.Y; - return a; - } - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static void Lerp(ref Vector2d a, ref Vector2d b, double blend, out Vector2d result) - { - result.X = blend * (b.X - a.X) + a.X; - result.Y = blend * (b.Y - a.Y) + a.Y; - } - - #endregion - - #region Barycentric - - /// - /// Interpolate 3 Vectors using Barycentric coordinates - /// - /// First input Vector - /// Second input Vector - /// Third input Vector - /// First Barycentric Coordinate - /// Second Barycentric Coordinate - /// a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static Vector2d BaryCentric(Vector2d a, Vector2d b, Vector2d c, double u, double v) - { - return a + u * (b - a) + v * (c - a); - } - - /// Interpolate 3 Vectors using Barycentric coordinates - /// First input Vector. - /// Second input Vector. - /// Third input Vector. - /// First Barycentric Coordinate. - /// Second Barycentric Coordinate. - /// Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static void BaryCentric(ref Vector2d a, ref Vector2d b, ref Vector2d c, double u, double v, out Vector2d result) - { - result = a; // copy - - Vector2d temp = b; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, u, out temp); - Add(ref result, ref temp, out result); - - temp = c; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, v, out temp); - Add(ref result, ref temp, out result); - } - - #endregion - - #region Transform - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static Vector2d Transform(Vector2d vec, Quaterniond quat) - { - Vector2d result; - Transform(ref vec, ref quat, out result); - return result; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static void Transform(ref Vector2d vec, ref Quaterniond quat, out Vector2d result) - { - Quaterniond v = new Quaterniond(vec.X, vec.Y, 0, 0), i, t; - Quaterniond.Invert(ref quat, out i); - Quaterniond.Multiply(ref quat, ref v, out t); - Quaterniond.Multiply(ref t, ref i, out v); - - result = new Vector2d(v.X, v.Y); - } - - #endregion - - #endregion - - #region Swizzle - - /// - /// Gets or sets an OpenTK.Vector2d with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2d Yx { get { return new Vector2d(Y, X); } set { Y = value.X; X = value.Y; } } - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The left instance. - /// The right instance. - /// The result of the operation. - public static Vector2d operator +(Vector2d left, Vector2d right) - { - left.X += right.X; - left.Y += right.Y; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The left instance. - /// The right instance. - /// The result of the operation. - public static Vector2d operator -(Vector2d left, Vector2d right) - { - left.X -= right.X; - left.Y -= right.Y; - return left; - } - - /// - /// Negates an instance. - /// - /// The instance. - /// The result of the operation. - public static Vector2d operator -(Vector2d vec) - { - vec.X = -vec.X; - vec.Y = -vec.Y; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the operation. - public static Vector2d operator *(Vector2d vec, double f) - { - vec.X *= f; - vec.Y *= f; - return vec; - } - - /// - /// Multiply an instance by a scalar. - /// - /// The scalar. - /// The instance. - /// The result of the operation. - public static Vector2d operator *(double f, Vector2d vec) - { - vec.X *= f; - vec.Y *= f; - return vec; - } - - /// - /// Component-wise multiplication between the specified instance by a scale vector. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector2d operator *(Vector2d vec, Vector2d scale) - { - vec.X *= scale.X; - vec.Y *= scale.Y; - return vec; - } - - /// - /// Divides an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the operation. - public static Vector2d operator /(Vector2d vec, double f) - { - double mult = 1.0 / f; - vec.X *= mult; - vec.Y *= mult; - return vec; - } - - /// - /// Compares two instances for equality. - /// - /// The left instance. - /// The right instance. - /// True, if both instances are equal; false otherwise. - public static bool operator ==(Vector2d left, Vector2d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for ienquality. - /// - /// The left instance. - /// The right instance. - /// True, if the instances are not equal; false otherwise. - public static bool operator !=(Vector2d left, Vector2d right) - { - return !left.Equals(right); - } - - /// Converts OpenTK.Vector2 to OpenTK.Vector2d. - /// The Vector2 to convert. - /// The resulting Vector2d. - public static explicit operator Vector2d(Vector2 v2) - { - return new Vector2d(v2.X, v2.Y); - } - - /// Converts OpenTK.Vector2d to OpenTK.Vector2. - /// The Vector2d to convert. - /// The resulting Vector2. - public static explicit operator Vector2(Vector2d v2d) - { - return new Vector2((float)v2d.X, (float)v2d.Y); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// - /// Returns a System.String that represents the current instance. - /// - /// - public override string ToString() - { - return String.Format("({0}{2} {1})", X, Y, listSeparator); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Vector2d)) - return false; - - return this.Equals((Vector2d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current vector is equal to another vector. - /// A vector to compare with this vector. - /// true if the current vector is equal to the vector parameter; otherwise, false. - public bool Equals(Vector2d other) - { - return - X == other.X && - Y == other.Y; - } - - #endregion - } -} \ No newline at end of file diff --git a/OpenTK/Math/Vector2h.cs b/OpenTK/Math/Vector2h.cs deleted file mode 100644 index 85ec2805..00000000 --- a/OpenTK/Math/Vector2h.cs +++ /dev/null @@ -1,370 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.IO; -using System.Runtime.InteropServices; -using System.Runtime.Serialization; -using System.Xml.Serialization; - -namespace OpenTK -{ - - /// 2-component Vector of the Half type. Occupies 4 Byte total. - [Serializable, StructLayout(LayoutKind.Sequential)] - public struct Vector2h : ISerializable, IEquatable - { - #region Fields - - /// The X component of the Half2. - public Half X; - - /// The Y component of the Half2. - public Half Y; - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector2h(Half value) - { - X = value; - Y = value; - } - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector2h(Single value) - { - X = new Half(value); - Y = new Half(value); - } - - /// - /// The new Half2 instance will avoid conversion and copy directly from the Half parameters. - /// - /// An Half instance of a 16-bit half-precision floating-point number. - /// An Half instance of a 16-bit half-precision floating-point number. - public Vector2h(Half x, Half y) - { - X = x; - Y = y; - } - - /// - /// The new Half2 instance will convert the 2 parameters into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - public Vector2h(Single x, Single y) - { - X = new Half(x); - Y = new Half(y); - } - - /// - /// The new Half2 instance will convert the 2 parameters into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// Enable checks that will throw if the conversion result is not meaningful. - public Vector2h(Single x, Single y, bool throwOnError) - { - X = new Half(x, throwOnError); - Y = new Half(y, throwOnError); - } - - /// - /// The new Half2 instance will convert the Vector2 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector2 - [CLSCompliant(false)] - public Vector2h(Vector2 v) - { - X = new Half(v.X); - Y = new Half(v.Y); - } - - /// - /// The new Half2 instance will convert the Vector2 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector2 - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector2h(Vector2 v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - } - - /// - /// The new Half2 instance will convert the Vector2 into 16-bit half-precision floating-point. - /// This is the fastest constructor. - /// - /// OpenTK.Vector2 - public Vector2h(ref Vector2 v) - { - X = new Half(v.X); - Y = new Half(v.Y); - } - - /// - /// The new Half2 instance will convert the Vector2 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector2 - /// Enable checks that will throw if the conversion result is not meaningful. - public Vector2h(ref Vector2 v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - } - - /// - /// The new Half2 instance will convert the Vector2d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector2d - [CLSCompliant(false)] - public Vector2h(Vector2d v) - { - X = new Half(v.X); - Y = new Half(v.Y); - } - - /// - /// The new Half2 instance will convert the Vector2d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector2d - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector2h(Vector2d v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - } - - /// - /// The new Half2 instance will convert the Vector2d into 16-bit half-precision floating-point. - /// This is the faster constructor. - /// - /// OpenTK.Vector2d - [CLSCompliant(false)] - public Vector2h(ref Vector2d v) - { - X = new Half(v.X); - Y = new Half(v.Y); - } - - /// - /// The new Half2 instance will convert the Vector2d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector2d - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector2h(ref Vector2d v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - } - - #endregion Constructors - - #region Swizzle - - /// - /// Gets or sets an OpenTK.Vector2h with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2h Yx { get { return new Vector2h(Y, X); } set { Y = value.X; X = value.Y; } } - - #endregion - - #region Half -> Single - - /// - /// Returns this Half2 instance's contents as Vector2. - /// - /// OpenTK.Vector2 - public Vector2 ToVector2() - { - return new Vector2(X, Y); - } - - /// - /// Returns this Half2 instance's contents as Vector2d. - /// - public Vector2d ToVector2d() - { - return new Vector2d(X, Y); - } - - #endregion Half -> Single - - #region Conversions - - /// Converts OpenTK.Vector2 to OpenTK.Half2. - /// The Vector2 to convert. - /// The resulting Half vector. - public static explicit operator Vector2h(Vector2 v) - { - return new Vector2h(v); - } - - /// Converts OpenTK.Vector2d to OpenTK.Half2. - /// The Vector2d to convert. - /// The resulting Half vector. - public static explicit operator Vector2h(Vector2d v) - { - return new Vector2h(v); - } - - /// Converts OpenTK.Half2 to OpenTK.Vector2. - /// The Half2 to convert. - /// The resulting Vector2. - public static explicit operator Vector2(Vector2h h) - { - return new Vector2(h.X, h.Y); - } - - /// Converts OpenTK.Half2 to OpenTK.Vector2d. - /// The Half2 to convert. - /// The resulting Vector2d. - public static explicit operator Vector2d(Vector2h h) - { - return new Vector2d(h.X, h.Y); - } - - #endregion Conversions - - #region Constants - - /// The size in bytes for an instance of the Half2 struct is 4. - public static readonly int SizeInBytes = 4; - - #endregion Constants - - #region ISerializable - - /// Constructor used by ISerializable to deserialize the object. - /// - /// - public Vector2h(SerializationInfo info, StreamingContext context) - { - this.X = (Half)info.GetValue("X", typeof(Half)); - this.Y = (Half)info.GetValue("Y", typeof(Half)); - } - - /// Used by ISerialize to serialize the object. - /// - /// - public void GetObjectData(SerializationInfo info, StreamingContext context) - { - info.AddValue("X", this.X); - info.AddValue("Y", this.Y); - } - - #endregion ISerializable - - #region Binary dump - - /// Updates the X and Y components of this instance by reading from a Stream. - /// A BinaryReader instance associated with an open Stream. - public void FromBinaryStream(BinaryReader bin) - { - X.FromBinaryStream(bin); - Y.FromBinaryStream(bin); - } - - /// Writes the X and Y components of this instance into a Stream. - /// A BinaryWriter instance associated with an open Stream. - public void ToBinaryStream(BinaryWriter bin) - { - X.ToBinaryStream(bin); - Y.ToBinaryStream(bin); - } - - #endregion Binary dump - - #region IEquatable Members - - /// Returns a value indicating whether this instance is equal to a specified OpenTK.Half2 vector. - /// OpenTK.Half2 to compare to this instance.. - /// True, if other is equal to this instance; false otherwise. - public bool Equals(Vector2h other) - { - return (this.X.Equals(other.X) && this.Y.Equals(other.Y)); - } - - #endregion - - #region ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// Returns a string that contains this Half2's numbers in human-legible form. - public override string ToString() - { - return String.Format("({0}{2} {1})", X, Y, listSeparator); - } - - #endregion ToString() - - #region BitConverter - - /// Returns the Half2 as an array of bytes. - /// The Half2 to convert. - /// The input as byte array. - public static byte[] GetBytes(Vector2h h) - { - byte[] result = new byte[SizeInBytes]; - - byte[] temp = Half.GetBytes(h.X); - result[0] = temp[0]; - result[1] = temp[1]; - temp = Half.GetBytes(h.Y); - result[2] = temp[0]; - result[3] = temp[1]; - - return result; - } - - /// Converts an array of bytes into Half2. - /// A Half2 in it's byte[] representation. - /// The starting position within value. - /// A new Half2 instance. - public static Vector2h FromBytes(byte[] value, int startIndex) - { - Vector2h h2 = new Vector2h(); - h2.X = Half.FromBytes(value, startIndex); - h2.Y = Half.FromBytes(value, startIndex + 2); - return h2; - } - - #endregion BitConverter - } -} \ No newline at end of file diff --git a/OpenTK/Math/Vector3.cs b/OpenTK/Math/Vector3.cs deleted file mode 100644 index 8a096653..00000000 --- a/OpenTK/Math/Vector3.cs +++ /dev/null @@ -1,1494 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.Xml.Serialization; -namespace OpenTK -{ - /// - /// Represents a 3D vector using three single-precision floating-point numbers. - /// - /// - /// The Vector3 structure is suitable for interoperation with unmanaged code requiring three consecutive floats. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Vector3 : IEquatable - { - #region Fields - - /// - /// The X component of the Vector3. - /// - public float X; - - /// - /// The Y component of the Vector3. - /// - public float Y; - - /// - /// The Z component of the Vector3. - /// - public float Z; - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector3(float value) - { - X = value; - Y = value; - Z = value; - } - - /// - /// Constructs a new Vector3. - /// - /// The x component of the Vector3. - /// The y component of the Vector3. - /// The z component of the Vector3. - public Vector3(float x, float y, float z) - { - X = x; - Y = y; - Z = z; - } - - /// - /// Constructs a new Vector3 from the given Vector2. - /// - /// The Vector2 to copy components from. - public Vector3(Vector2 v) - { - X = v.X; - Y = v.Y; - Z = 0.0f; - } - - /// - /// Constructs a new Vector3 from the given Vector3. - /// - /// The Vector3 to copy components from. - public Vector3(Vector3 v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - } - - /// - /// Constructs a new Vector3 from the given Vector4. - /// - /// The Vector4 to copy components from. - public Vector3(Vector4 v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - } - - #endregion - - #region Public Members - - - /// - /// Gets or sets the value at the index of the Vector. - /// - public float this[int index] { - get{ - if(index == 0) return X; - else if(index == 1) return Y; - else if(index == 2) return Z; - throw new IndexOutOfRangeException("You tried to access this vector at index: " + index); - } set{ - if(index == 0) X = value; - else if(index == 1) Y = value; - else if(index == 2) Z = value; - else throw new IndexOutOfRangeException("You tried to set this vector at index: " + index); - } - } - - #region Instance - - #region public void Add() - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(Vector3 right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - } - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(ref Vector3 right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - } - - #endregion public void Add() - - #region public void Sub() - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(Vector3 right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - } - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(ref Vector3 right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - } - - #endregion public void Sub() - - #region public void Mult() - - /// Multiply this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Multiply() method instead.")] - public void Mult(float f) - { - this.X *= f; - this.Y *= f; - this.Z *= f; - } - - #endregion public void Mult() - - #region public void Div() - - /// Divide this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Divide() method instead.")] - public void Div(float f) - { - float mult = 1.0f / f; - this.X *= mult; - this.Y *= mult; - this.Z *= mult; - } - - #endregion public void Div() - - #region public float Length - - /// - /// Gets the length (magnitude) of the vector. - /// - /// - /// - public float Length - { - get - { - return (float)System.Math.Sqrt(X * X + Y * Y + Z * Z); - } - } - - #endregion - - #region public float LengthFast - - /// - /// Gets an approximation of the vector length (magnitude). - /// - /// - /// This property uses an approximation of the square root function to calculate vector magnitude, with - /// an upper error bound of 0.001. - /// - /// - /// - public float LengthFast - { - get - { - return 1.0f / MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z); - } - } - - #endregion - - #region public float LengthSquared - - /// - /// Gets the square of the vector length (magnitude). - /// - /// - /// This property avoids the costly square root operation required by the Length property. This makes it more suitable - /// for comparisons. - /// - /// - /// - public float LengthSquared - { - get - { - return X * X + Y * Y + Z * Z; - } - } - - #endregion - - /// - /// Returns a copy of the Vector3 scaled to unit length. - /// - public Vector3 Normalized() - { - Vector3 v = this; - v.Normalize(); - return v; - } - - #region public void Normalize() - - /// - /// Scales the Vector3 to unit length. - /// - public void Normalize() - { - float scale = 1.0f / this.Length; - X *= scale; - Y *= scale; - Z *= scale; - } - - #endregion - - #region public void NormalizeFast() - - /// - /// Scales the Vector3 to approximately unit length. - /// - public void NormalizeFast() - { - float scale = MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z); - X *= scale; - Y *= scale; - Z *= scale; - } - - #endregion - - #region public void Scale() - - /// - /// Scales the current Vector3 by the given amounts. - /// - /// The scale of the X component. - /// The scale of the Y component. - /// The scale of the Z component. - [Obsolete("Use static Multiply() method instead.")] - public void Scale(float sx, float sy, float sz) - { - this.X = X * sx; - this.Y = Y * sy; - this.Z = Z * sz; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(Vector3 scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(ref Vector3 scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - } - - #endregion public void Scale() - - #endregion - - #region Static - - #region Fields - - /// - /// Defines a unit-length Vector3 that points towards the X-axis. - /// - public static readonly Vector3 UnitX = new Vector3(1, 0, 0); - - /// - /// Defines a unit-length Vector3 that points towards the Y-axis. - /// - public static readonly Vector3 UnitY = new Vector3(0, 1, 0); - - /// - /// /// Defines a unit-length Vector3 that points towards the Z-axis. - /// - public static readonly Vector3 UnitZ = new Vector3(0, 0, 1); - - /// - /// Defines a zero-length Vector3. - /// - public static readonly Vector3 Zero = new Vector3(0, 0, 0); - - /// - /// Defines an instance with all components set to 1. - /// - public static readonly Vector3 One = new Vector3(1, 1, 1); - - /// - /// Defines the size of the Vector3 struct in bytes. - /// - public static readonly int SizeInBytes = Marshal.SizeOf(new Vector3()); - - #endregion - - #region Obsolete - - #region Sub - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static Vector3 Sub(Vector3 a, Vector3 b) - { - a.X -= b.X; - a.Y -= b.Y; - a.Z -= b.Z; - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static void Sub(ref Vector3 a, ref Vector3 b, out Vector3 result) - { - result.X = a.X - b.X; - result.Y = a.Y - b.Y; - result.Z = a.Z - b.Z; - } - - #endregion - - #region Mult - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static Vector3 Mult(Vector3 a, float f) - { - a.X *= f; - a.Y *= f; - a.Z *= f; - return a; - } - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static void Mult(ref Vector3 a, float f, out Vector3 result) - { - result.X = a.X * f; - result.Y = a.Y * f; - result.Z = a.Z * f; - } - - #endregion - - #region Div - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static Vector3 Div(Vector3 a, float f) - { - float mult = 1.0f / f; - a.X *= mult; - a.Y *= mult; - a.Z *= mult; - return a; - } - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static void Div(ref Vector3 a, float f, out Vector3 result) - { - float mult = 1.0f / f; - result.X = a.X * mult; - result.Y = a.Y * mult; - result.Z = a.Z * mult; - } - - #endregion - - #endregion - - #region Add - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static Vector3 Add(Vector3 a, Vector3 b) - { - Add(ref a, ref b, out a); - return a; - } - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static void Add(ref Vector3 a, ref Vector3 b, out Vector3 result) - { - result = new Vector3(a.X + b.X, a.Y + b.Y, a.Z + b.Z); - } - - #endregion - - #region Subtract - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector3 Subtract(Vector3 a, Vector3 b) - { - Subtract(ref a, ref b, out a); - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Subtract(ref Vector3 a, ref Vector3 b, out Vector3 result) - { - result = new Vector3(a.X - b.X, a.Y - b.Y, a.Z - b.Z); - } - - #endregion - - #region Multiply - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3 Multiply(Vector3 vector, float scale) - { - Multiply(ref vector, scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector3 vector, float scale, out Vector3 result) - { - result = new Vector3(vector.X * scale, vector.Y * scale, vector.Z * scale); - } - - /// - /// Multiplies a vector by the components a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3 Multiply(Vector3 vector, Vector3 scale) - { - Multiply(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector3 vector, ref Vector3 scale, out Vector3 result) - { - result = new Vector3(vector.X * scale.X, vector.Y * scale.Y, vector.Z * scale.Z); - } - - #endregion - - #region Divide - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3 Divide(Vector3 vector, float scale) - { - Divide(ref vector, scale, out vector); - return vector; - } - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector3 vector, float scale, out Vector3 result) - { - Multiply(ref vector, 1 / scale, out result); - } - - /// - /// Divides a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3 Divide(Vector3 vector, Vector3 scale) - { - Divide(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Divide a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector3 vector, ref Vector3 scale, out Vector3 result) - { - result = new Vector3(vector.X / scale.X, vector.Y / scale.Y, vector.Z / scale.Z); - } - - #endregion - - #region ComponentMin - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static Vector3 ComponentMin(Vector3 a, Vector3 b) - { - a.X = a.X < b.X ? a.X : b.X; - a.Y = a.Y < b.Y ? a.Y : b.Y; - a.Z = a.Z < b.Z ? a.Z : b.Z; - return a; - } - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static void ComponentMin(ref Vector3 a, ref Vector3 b, out Vector3 result) - { - result.X = a.X < b.X ? a.X : b.X; - result.Y = a.Y < b.Y ? a.Y : b.Y; - result.Z = a.Z < b.Z ? a.Z : b.Z; - } - - #endregion - - #region ComponentMax - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static Vector3 ComponentMax(Vector3 a, Vector3 b) - { - a.X = a.X > b.X ? a.X : b.X; - a.Y = a.Y > b.Y ? a.Y : b.Y; - a.Z = a.Z > b.Z ? a.Z : b.Z; - return a; - } - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static void ComponentMax(ref Vector3 a, ref Vector3 b, out Vector3 result) - { - result.X = a.X > b.X ? a.X : b.X; - result.Y = a.Y > b.Y ? a.Y : b.Y; - result.Z = a.Z > b.Z ? a.Z : b.Z; - } - - #endregion - - #region Min - - /// - /// Returns the Vector3 with the minimum magnitude - /// - /// Left operand - /// Right operand - /// The minimum Vector3 - public static Vector3 Min(Vector3 left, Vector3 right) - { - return left.LengthSquared < right.LengthSquared ? left : right; - } - - #endregion - - #region Max - - /// - /// Returns the Vector3 with the minimum magnitude - /// - /// Left operand - /// Right operand - /// The minimum Vector3 - public static Vector3 Max(Vector3 left, Vector3 right) - { - return left.LengthSquared >= right.LengthSquared ? left : right; - } - - #endregion - - #region Clamp - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static Vector3 Clamp(Vector3 vec, Vector3 min, Vector3 max) - { - vec.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - vec.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - vec.Z = vec.Z < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - return vec; - } - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static void Clamp(ref Vector3 vec, ref Vector3 min, ref Vector3 max, out Vector3 result) - { - result.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - result.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - result.Z = vec.Z < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - } - - #endregion - - #region Normalize - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static Vector3 Normalize(Vector3 vec) - { - float scale = 1.0f / vec.Length; - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static void Normalize(ref Vector3 vec, out Vector3 result) - { - float scale = 1.0f / vec.Length; - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - } - - #endregion - - #region NormalizeFast - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static Vector3 NormalizeFast(Vector3 vec) - { - float scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z); - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static void NormalizeFast(ref Vector3 vec, out Vector3 result) - { - float scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z); - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - } - - #endregion - - #region Dot - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static float Dot(Vector3 left, Vector3 right) - { - return left.X * right.X + left.Y * right.Y + left.Z * right.Z; - } - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static void Dot(ref Vector3 left, ref Vector3 right, out float result) - { - result = left.X * right.X + left.Y * right.Y + left.Z * right.Z; - } - - #endregion - - #region Cross - - /// - /// Caclulate the cross (vector) product of two vectors - /// - /// First operand - /// Second operand - /// The cross product of the two inputs - public static Vector3 Cross(Vector3 left, Vector3 right) - { - Vector3 result; - Cross(ref left, ref right, out result); - return result; - } - - /// - /// Caclulate the cross (vector) product of two vectors - /// - /// First operand - /// Second operand - /// The cross product of the two inputs - /// The cross product of the two inputs - public static void Cross(ref Vector3 left, ref Vector3 right, out Vector3 result) - { - result = new Vector3(left.Y * right.Z - left.Z * right.Y, - left.Z * right.X - left.X * right.Z, - left.X * right.Y - left.Y * right.X); - } - - #endregion - - #region Lerp - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static Vector3 Lerp(Vector3 a, Vector3 b, float blend) - { - a.X = blend * (b.X - a.X) + a.X; - a.Y = blend * (b.Y - a.Y) + a.Y; - a.Z = blend * (b.Z - a.Z) + a.Z; - return a; - } - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static void Lerp(ref Vector3 a, ref Vector3 b, float blend, out Vector3 result) - { - result.X = blend * (b.X - a.X) + a.X; - result.Y = blend * (b.Y - a.Y) + a.Y; - result.Z = blend * (b.Z - a.Z) + a.Z; - } - - #endregion - - #region Barycentric - - /// - /// Interpolate 3 Vectors using Barycentric coordinates - /// - /// First input Vector - /// Second input Vector - /// Third input Vector - /// First Barycentric Coordinate - /// Second Barycentric Coordinate - /// a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static Vector3 BaryCentric(Vector3 a, Vector3 b, Vector3 c, float u, float v) - { - return a + u * (b - a) + v * (c - a); - } - - /// Interpolate 3 Vectors using Barycentric coordinates - /// First input Vector. - /// Second input Vector. - /// Third input Vector. - /// First Barycentric Coordinate. - /// Second Barycentric Coordinate. - /// Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static void BaryCentric(ref Vector3 a, ref Vector3 b, ref Vector3 c, float u, float v, out Vector3 result) - { - result = a; // copy - - Vector3 temp = b; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, u, out temp); - Add(ref result, ref temp, out result); - - temp = c; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, v, out temp); - Add(ref result, ref temp, out result); - } - - #endregion - - #region Transform - - /// Transform a direction vector by the given Matrix - /// Assumes the matrix has a bottom row of (0,0,0,1), that is the translation part is ignored. - /// - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector3 TransformVector(Vector3 vec, Matrix4 mat) - { - Vector3 v; - v.X = Vector3.Dot(vec, new Vector3(mat.Column0)); - v.Y = Vector3.Dot(vec, new Vector3(mat.Column1)); - v.Z = Vector3.Dot(vec, new Vector3(mat.Column2)); - return v; - } - - /// Transform a direction vector by the given Matrix - /// Assumes the matrix has a bottom row of (0,0,0,1), that is the translation part is ignored. - /// - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void TransformVector(ref Vector3 vec, ref Matrix4 mat, out Vector3 result) - { - result.X = vec.X * mat.Row0.X + - vec.Y * mat.Row1.X + - vec.Z * mat.Row2.X; - - result.Y = vec.X * mat.Row0.Y + - vec.Y * mat.Row1.Y + - vec.Z * mat.Row2.Y; - - result.Z = vec.X * mat.Row0.Z + - vec.Y * mat.Row1.Z + - vec.Z * mat.Row2.Z; - } - - /// Transform a Normal by the given Matrix - /// - /// This calculates the inverse of the given matrix, use TransformNormalInverse if you - /// already have the inverse to avoid this extra calculation - /// - /// The normal to transform - /// The desired transformation - /// The transformed normal - public static Vector3 TransformNormal(Vector3 norm, Matrix4 mat) - { - mat.Invert(); - return TransformNormalInverse(norm, mat); - } - - /// Transform a Normal by the given Matrix - /// - /// This calculates the inverse of the given matrix, use TransformNormalInverse if you - /// already have the inverse to avoid this extra calculation - /// - /// The normal to transform - /// The desired transformation - /// The transformed normal - public static void TransformNormal(ref Vector3 norm, ref Matrix4 mat, out Vector3 result) - { - Matrix4 Inverse = Matrix4.Invert(mat); - Vector3.TransformNormalInverse(ref norm, ref Inverse, out result); - } - - /// Transform a Normal by the (transpose of the) given Matrix - /// - /// This version doesn't calculate the inverse matrix. - /// Use this version if you already have the inverse of the desired transform to hand - /// - /// The normal to transform - /// The inverse of the desired transformation - /// The transformed normal - public static Vector3 TransformNormalInverse(Vector3 norm, Matrix4 invMat) - { - Vector3 n; - n.X = Vector3.Dot(norm, new Vector3(invMat.Row0)); - n.Y = Vector3.Dot(norm, new Vector3(invMat.Row1)); - n.Z = Vector3.Dot(norm, new Vector3(invMat.Row2)); - return n; - } - - /// Transform a Normal by the (transpose of the) given Matrix - /// - /// This version doesn't calculate the inverse matrix. - /// Use this version if you already have the inverse of the desired transform to hand - /// - /// The normal to transform - /// The inverse of the desired transformation - /// The transformed normal - public static void TransformNormalInverse(ref Vector3 norm, ref Matrix4 invMat, out Vector3 result) - { - result.X = norm.X * invMat.Row0.X + - norm.Y * invMat.Row0.Y + - norm.Z * invMat.Row0.Z; - - result.Y = norm.X * invMat.Row1.X + - norm.Y * invMat.Row1.Y + - norm.Z * invMat.Row1.Z; - - result.Z = norm.X * invMat.Row2.X + - norm.Y * invMat.Row2.Y + - norm.Z * invMat.Row2.Z; - } - - /// Transform a Position by the given Matrix - /// The position to transform - /// The desired transformation - /// The transformed position - public static Vector3 TransformPosition(Vector3 pos, Matrix4 mat) - { - Vector3 p; - p.X = Vector3.Dot(pos, new Vector3(mat.Column0)) + mat.Row3.X; - p.Y = Vector3.Dot(pos, new Vector3(mat.Column1)) + mat.Row3.Y; - p.Z = Vector3.Dot(pos, new Vector3(mat.Column2)) + mat.Row3.Z; - return p; - } - - /// Transform a Position by the given Matrix - /// The position to transform - /// The desired transformation - /// The transformed position - public static void TransformPosition(ref Vector3 pos, ref Matrix4 mat, out Vector3 result) - { - result.X = pos.X * mat.Row0.X + - pos.Y * mat.Row1.X + - pos.Z * mat.Row2.X + - mat.Row3.X; - - result.Y = pos.X * mat.Row0.Y + - pos.Y * mat.Row1.Y + - pos.Z * mat.Row2.Y + - mat.Row3.Y; - - result.Z = pos.X * mat.Row0.Z + - pos.Y * mat.Row1.Z + - pos.Z * mat.Row2.Z + - mat.Row3.Z; - } - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector3 Transform(Vector3 vec, Matrix4 mat) - { - Vector3 result; - Transform(ref vec, ref mat, out result); - return result; - } - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void Transform(ref Vector3 vec, ref Matrix4 mat, out Vector3 result) - { - Vector4 v4 = new Vector4(vec.X, vec.Y, vec.Z, 1.0f); - Vector4.Transform(ref v4, ref mat, out v4); - result = v4.Xyz; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static Vector3 Transform(Vector3 vec, Quaternion quat) - { - Vector3 result; - Transform(ref vec, ref quat, out result); - return result; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static void Transform(ref Vector3 vec, ref Quaternion quat, out Vector3 result) - { - // Since vec.W == 0, we can optimize quat * vec * quat^-1 as follows: - // vec + 2.0 * cross(quat.xyz, cross(quat.xyz, vec) + quat.w * vec) - Vector3 xyz = quat.Xyz, temp, temp2; - Vector3.Cross(ref xyz, ref vec, out temp); - Vector3.Multiply(ref vec, quat.W, out temp2); - Vector3.Add(ref temp, ref temp2, out temp); - Vector3.Cross(ref xyz, ref temp, out temp); - Vector3.Multiply(ref temp, 2, out temp); - Vector3.Add(ref vec, ref temp, out result); - } - - /// Transform a Vector3 by the given Matrix, and project the resulting Vector4 back to a Vector3 - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector3 TransformPerspective(Vector3 vec, Matrix4 mat) - { - Vector3 result; - TransformPerspective(ref vec, ref mat, out result); - return result; - } - - /// Transform a Vector3 by the given Matrix, and project the resulting Vector4 back to a Vector3 - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void TransformPerspective(ref Vector3 vec, ref Matrix4 mat, out Vector3 result) - { - Vector4 v = new Vector4(vec, 1); - Vector4.Transform(ref v, ref mat, out v); - result.X = v.X / v.W; - result.Y = v.Y / v.W; - result.Z = v.Z / v.W; - } - - #endregion - - #region CalculateAngle - - /// - /// Calculates the angle (in radians) between two vectors. - /// - /// The first vector. - /// The second vector. - /// Angle (in radians) between the vectors. - /// Note that the returned angle is never bigger than the constant Pi. - public static float CalculateAngle(Vector3 first, Vector3 second) - { - return (float)System.Math.Acos((Vector3.Dot(first, second)) / (first.Length * second.Length)); - } - - /// Calculates the angle (in radians) between two vectors. - /// The first vector. - /// The second vector. - /// Angle (in radians) between the vectors. - /// Note that the returned angle is never bigger than the constant Pi. - public static void CalculateAngle(ref Vector3 first, ref Vector3 second, out float result) - { - float temp; - Vector3.Dot(ref first, ref second, out temp); - result = (float)System.Math.Acos(temp / (first.Length * second.Length)); - } - - #endregion - - #endregion - - #region Swizzle - - #region 2-component - - /// - /// Gets or sets an OpenTK.Vector2 with the X and Y components of this instance. - /// - [XmlIgnore] - public Vector2 Xy { get { return new Vector2(X, Y); } set { X = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the X and Z components of this instance. - /// - [XmlIgnore] - public Vector2 Xz { get { return new Vector2(X, Z); } set { X = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2 Yx { get { return new Vector2(Y, X); } set { Y = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Y and Z components of this instance. - /// - [XmlIgnore] - public Vector2 Yz { get { return new Vector2(Y, Z); } set { Y = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Z and X components of this instance. - /// - [XmlIgnore] - public Vector2 Zx { get { return new Vector2(Z, X); } set { Z = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Z and Y components of this instance. - /// - [XmlIgnore] - public Vector2 Zy { get { return new Vector2(Z, Y); } set { Z = value.X; Y = value.Y; } } - - #endregion - - #region 3-component - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Xzy { get { return new Vector3(X, Z, Y); } set { X = value.X; Z = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Yxz { get { return new Vector3(Y, X, Z); } set { Y = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Yzx { get { return new Vector3(Y, Z, X); } set { Y = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Zxy { get { return new Vector3(Z, X, Y); } set { Z = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Zyx { get { return new Vector3(Z, Y, X); } set { Z = value.X; Y = value.Y; X = value.Z; } } - - #endregion - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector3 operator +(Vector3 left, Vector3 right) - { - left.X += right.X; - left.Y += right.Y; - left.Z += right.Z; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector3 operator -(Vector3 left, Vector3 right) - { - left.X -= right.X; - left.Y -= right.Y; - left.Z -= right.Z; - return left; - } - - /// - /// Negates an instance. - /// - /// The instance. - /// The result of the calculation. - public static Vector3 operator -(Vector3 vec) - { - vec.X = -vec.X; - vec.Y = -vec.Y; - vec.Z = -vec.Z; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector3 operator *(Vector3 vec, float scale) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The scalar. - /// The instance. - /// The result of the calculation. - public static Vector3 operator *(float scale, Vector3 vec) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Component-wise multiplication between the specified instance by a scale vector. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector3 operator *(Vector3 vec, Vector3 scale) - { - vec.X *= scale.X; - vec.Y *= scale.Y; - vec.Z *= scale.Z; - return vec; - } - - /// - /// Divides an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector3 operator /(Vector3 vec, float scale) - { - float mult = 1.0f / scale; - vec.X *= mult; - vec.Y *= mult; - vec.Z *= mult; - return vec; - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Vector3 left, Vector3 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equa lright; false otherwise. - public static bool operator !=(Vector3 left, Vector3 right) - { - return !left.Equals(right); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// - /// Returns a System.String that represents the current Vector3. - /// - /// - public override string ToString() - { - return String.Format("({0}{3} {1}{3} {2})", X, Y, Z, listSeparator); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Vector3)) - return false; - - return this.Equals((Vector3)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current vector is equal to another vector. - /// A vector to compare with this vector. - /// true if the current vector is equal to the vector parameter; otherwise, false. - public bool Equals(Vector3 other) - { - return - X == other.X && - Y == other.Y && - Z == other.Z; - } - - #endregion - } -} diff --git a/OpenTK/Math/Vector3d.cs b/OpenTK/Math/Vector3d.cs deleted file mode 100644 index 0b281625..00000000 --- a/OpenTK/Math/Vector3d.cs +++ /dev/null @@ -1,1508 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -*/ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// - /// Represents a 3D vector using three double-precision floating-point numbers. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Vector3d : IEquatable - { - #region Fields - - /// - /// The X component of the Vector3. - /// - public double X; - - /// - /// The Y component of the Vector3. - /// - public double Y; - - /// - /// The Z component of the Vector3. - /// - public double Z; - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector3d(double value) - { - X = value; - Y = value; - Z = value; - } - - /// - /// Constructs a new Vector3. - /// - /// The x component of the Vector3. - /// The y component of the Vector3. - /// The z component of the Vector3. - public Vector3d(double x, double y, double z) - { - X = x; - Y = y; - Z = z; - } - - /// - /// Constructs a new instance from the given Vector2d. - /// - /// The Vector2d to copy components from. - public Vector3d(Vector2d v) - { - X = v.X; - Y = v.Y; - Z = 0.0f; - } - - /// - /// Constructs a new instance from the given Vector3d. - /// - /// The Vector3d to copy components from. - public Vector3d(Vector3d v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - } - - /// - /// Constructs a new instance from the given Vector4d. - /// - /// The Vector4d to copy components from. - public Vector3d(Vector4d v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - } - - - #endregion - - #region Public Members - - /// - /// Gets or sets the value at the index of the Vector. - /// - public double this[int index] { - get{ - if(index == 0) return X; - else if(index == 1) return Y; - else if(index == 2) return Z; - throw new IndexOutOfRangeException("You tried to access this vector at index: " + index); - } set{ - if(index == 0) X = value; - else if(index == 1) Y = value; - else if(index == 2) Z = value; - else throw new IndexOutOfRangeException("You tried to set this vector at index: " + index); - } - } - - #region Instance - - #region public void Add() - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(Vector3d right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - } - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(ref Vector3d right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - } - - #endregion public void Add() - - #region public void Sub() - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(Vector3d right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - } - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(ref Vector3d right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - } - - #endregion public void Sub() - - #region public void Mult() - - /// Multiply this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Multiply() method instead.")] - public void Mult(double f) - { - this.X *= f; - this.Y *= f; - this.Z *= f; - } - - #endregion public void Mult() - - #region public void Div() - - /// Divide this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Divide() method instead.")] - public void Div(double f) - { - double mult = 1.0 / f; - this.X *= mult; - this.Y *= mult; - this.Z *= mult; - } - - #endregion public void Div() - - #region public double Length - - /// - /// Gets the length (magnitude) of the vector. - /// - /// - /// - public double Length - { - get - { - return System.Math.Sqrt(X * X + Y * Y + Z * Z); - } - } - - #endregion - - #region public double LengthFast - - /// - /// Gets an approximation of the vector length (magnitude). - /// - /// - /// This property uses an approximation of the square root function to calculate vector magnitude, with - /// an upper error bound of 0.001. - /// - /// - /// - public double LengthFast - { - get - { - return 1.0 / MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z); - } - } - - #endregion - - #region public double LengthSquared - - /// - /// Gets the square of the vector length (magnitude). - /// - /// - /// This property avoids the costly square root operation required by the Length property. This makes it more suitable - /// for comparisons. - /// - /// - /// - public double LengthSquared - { - get - { - return X * X + Y * Y + Z * Z; - } - } - - #endregion - - /// - /// Returns a copy of the Vector3d scaled to unit length. - /// - /// - public Vector3d Normalized() - { - Vector3d v = this; - v.Normalize(); - return v; - } - - #region public void Normalize() - - /// - /// Scales the Vector3d to unit length. - /// - public void Normalize() - { - double scale = 1.0 / this.Length; - X *= scale; - Y *= scale; - Z *= scale; - } - - #endregion - - #region public void NormalizeFast() - - /// - /// Scales the Vector3d to approximately unit length. - /// - public void NormalizeFast() - { - double scale = MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z); - X *= scale; - Y *= scale; - Z *= scale; - } - - #endregion - - #region public void Scale() - - /// - /// Scales the current Vector3d by the given amounts. - /// - /// The scale of the X component. - /// The scale of the Y component. - /// The scale of the Z component. - [Obsolete("Use static Multiply() method instead.")] - public void Scale(double sx, double sy, double sz) - { - this.X = X * sx; - this.Y = Y * sy; - this.Z = Z * sz; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [Obsolete("Use static Multiply() method instead.")] - [CLSCompliant(false)] - public void Scale(Vector3d scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [Obsolete("Use static Multiply() method instead.")] - [CLSCompliant(false)] - public void Scale(ref Vector3d scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - } - - #endregion public void Scale() - - #endregion - - #region Static - - #region Fields - - /// - /// Defines a unit-length Vector3d that points towards the X-axis. - /// - public static readonly Vector3d UnitX = new Vector3d(1, 0, 0); - - /// - /// Defines a unit-length Vector3d that points towards the Y-axis. - /// - public static readonly Vector3d UnitY = new Vector3d(0, 1, 0); - - /// - /// /// Defines a unit-length Vector3d that points towards the Z-axis. - /// - public static readonly Vector3d UnitZ = new Vector3d(0, 0, 1); - - /// - /// Defines a zero-length Vector3. - /// - public static readonly Vector3d Zero = new Vector3d(0, 0, 0); - - /// - /// Defines an instance with all components set to 1. - /// - public static readonly Vector3d One = new Vector3d(1, 1, 1); - - /// - /// Defines the size of the Vector3d struct in bytes. - /// - public static readonly int SizeInBytes = Marshal.SizeOf(new Vector3d()); - - #endregion - - #region Obsolete - - #region Sub - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static Vector3d Sub(Vector3d a, Vector3d b) - { - a.X -= b.X; - a.Y -= b.Y; - a.Z -= b.Z; - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static void Sub(ref Vector3d a, ref Vector3d b, out Vector3d result) - { - result.X = a.X - b.X; - result.Y = a.Y - b.Y; - result.Z = a.Z - b.Z; - } - - #endregion - - #region Mult - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static Vector3d Mult(Vector3d a, double f) - { - a.X *= f; - a.Y *= f; - a.Z *= f; - return a; - } - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static void Mult(ref Vector3d a, double f, out Vector3d result) - { - result.X = a.X * f; - result.Y = a.Y * f; - result.Z = a.Z * f; - } - - #endregion - - #region Div - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static Vector3d Div(Vector3d a, double f) - { - double mult = 1.0 / f; - a.X *= mult; - a.Y *= mult; - a.Z *= mult; - return a; - } - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static void Div(ref Vector3d a, double f, out Vector3d result) - { - double mult = 1.0 / f; - result.X = a.X * mult; - result.Y = a.Y * mult; - result.Z = a.Z * mult; - } - - #endregion - - #endregion - - #region Add - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static Vector3d Add(Vector3d a, Vector3d b) - { - Add(ref a, ref b, out a); - return a; - } - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static void Add(ref Vector3d a, ref Vector3d b, out Vector3d result) - { - result = new Vector3d(a.X + b.X, a.Y + b.Y, a.Z + b.Z); - } - - #endregion - - #region Subtract - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector3d Subtract(Vector3d a, Vector3d b) - { - Subtract(ref a, ref b, out a); - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Subtract(ref Vector3d a, ref Vector3d b, out Vector3d result) - { - result = new Vector3d(a.X - b.X, a.Y - b.Y, a.Z - b.Z); - } - - #endregion - - #region Multiply - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3d Multiply(Vector3d vector, double scale) - { - Multiply(ref vector, scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector3d vector, double scale, out Vector3d result) - { - result = new Vector3d(vector.X * scale, vector.Y * scale, vector.Z * scale); - } - - /// - /// Multiplies a vector by the components a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3d Multiply(Vector3d vector, Vector3d scale) - { - Multiply(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector3d vector, ref Vector3d scale, out Vector3d result) - { - result = new Vector3d(vector.X * scale.X, vector.Y * scale.Y, vector.Z * scale.Z); - } - - #endregion - - #region Divide - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3d Divide(Vector3d vector, double scale) - { - Divide(ref vector, scale, out vector); - return vector; - } - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector3d vector, double scale, out Vector3d result) - { - Multiply(ref vector, 1 / scale, out result); - } - - /// - /// Divides a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector3d Divide(Vector3d vector, Vector3d scale) - { - Divide(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Divide a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector3d vector, ref Vector3d scale, out Vector3d result) - { - result = new Vector3d(vector.X / scale.X, vector.Y / scale.Y, vector.Z / scale.Z); - } - - #endregion - - #region ComponentMin - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static Vector3d ComponentMin(Vector3d a, Vector3d b) - { - a.X = a.X < b.X ? a.X : b.X; - a.Y = a.Y < b.Y ? a.Y : b.Y; - a.Z = a.Z < b.Z ? a.Z : b.Z; - return a; - } - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static void ComponentMin(ref Vector3d a, ref Vector3d b, out Vector3d result) - { - result.X = a.X < b.X ? a.X : b.X; - result.Y = a.Y < b.Y ? a.Y : b.Y; - result.Z = a.Z < b.Z ? a.Z : b.Z; - } - - #endregion - - #region ComponentMax - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static Vector3d ComponentMax(Vector3d a, Vector3d b) - { - a.X = a.X > b.X ? a.X : b.X; - a.Y = a.Y > b.Y ? a.Y : b.Y; - a.Z = a.Z > b.Z ? a.Z : b.Z; - return a; - } - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static void ComponentMax(ref Vector3d a, ref Vector3d b, out Vector3d result) - { - result.X = a.X > b.X ? a.X : b.X; - result.Y = a.Y > b.Y ? a.Y : b.Y; - result.Z = a.Z > b.Z ? a.Z : b.Z; - } - - #endregion - - #region Min - - /// - /// Returns the Vector3d with the minimum magnitude - /// - /// Left operand - /// Right operand - /// The minimum Vector3 - public static Vector3d Min(Vector3d left, Vector3d right) - { - return left.LengthSquared < right.LengthSquared ? left : right; - } - - #endregion - - #region Max - - /// - /// Returns the Vector3d with the minimum magnitude - /// - /// Left operand - /// Right operand - /// The minimum Vector3 - public static Vector3d Max(Vector3d left, Vector3d right) - { - return left.LengthSquared >= right.LengthSquared ? left : right; - } - - #endregion - - #region Clamp - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static Vector3d Clamp(Vector3d vec, Vector3d min, Vector3d max) - { - vec.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - vec.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - vec.Z = vec.Z < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - return vec; - } - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static void Clamp(ref Vector3d vec, ref Vector3d min, ref Vector3d max, out Vector3d result) - { - result.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - result.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - result.Z = vec.Z < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - } - - #endregion - - #region Normalize - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static Vector3d Normalize(Vector3d vec) - { - double scale = 1.0 / vec.Length; - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static void Normalize(ref Vector3d vec, out Vector3d result) - { - double scale = 1.0 / vec.Length; - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - } - - #endregion - - #region NormalizeFast - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static Vector3d NormalizeFast(Vector3d vec) - { - double scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z); - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static void NormalizeFast(ref Vector3d vec, out Vector3d result) - { - double scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z); - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - } - - #endregion - - #region Dot - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static double Dot(Vector3d left, Vector3d right) - { - return left.X * right.X + left.Y * right.Y + left.Z * right.Z; - } - - /// - /// Calculate the dot (scalar) product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static void Dot(ref Vector3d left, ref Vector3d right, out double result) - { - result = left.X * right.X + left.Y * right.Y + left.Z * right.Z; - } - - #endregion - - #region Cross - - /// - /// Caclulate the cross (vector) product of two vectors - /// - /// First operand - /// Second operand - /// The cross product of the two inputs - public static Vector3d Cross(Vector3d left, Vector3d right) - { - Vector3d result; - Cross(ref left, ref right, out result); - return result; - } - - /// - /// Caclulate the cross (vector) product of two vectors - /// - /// First operand - /// Second operand - /// The cross product of the two inputs - /// The cross product of the two inputs - public static void Cross(ref Vector3d left, ref Vector3d right, out Vector3d result) - { - result = new Vector3d(left.Y * right.Z - left.Z * right.Y, - left.Z * right.X - left.X * right.Z, - left.X * right.Y - left.Y * right.X); - } - - #endregion - - #region Lerp - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static Vector3d Lerp(Vector3d a, Vector3d b, double blend) - { - a.X = blend * (b.X - a.X) + a.X; - a.Y = blend * (b.Y - a.Y) + a.Y; - a.Z = blend * (b.Z - a.Z) + a.Z; - return a; - } - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static void Lerp(ref Vector3d a, ref Vector3d b, double blend, out Vector3d result) - { - result.X = blend * (b.X - a.X) + a.X; - result.Y = blend * (b.Y - a.Y) + a.Y; - result.Z = blend * (b.Z - a.Z) + a.Z; - } - - #endregion - - #region Barycentric - - /// - /// Interpolate 3 Vectors using Barycentric coordinates - /// - /// First input Vector - /// Second input Vector - /// Third input Vector - /// First Barycentric Coordinate - /// Second Barycentric Coordinate - /// a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static Vector3d BaryCentric(Vector3d a, Vector3d b, Vector3d c, double u, double v) - { - return a + u * (b - a) + v * (c - a); - } - - /// Interpolate 3 Vectors using Barycentric coordinates - /// First input Vector. - /// Second input Vector. - /// Third input Vector. - /// First Barycentric Coordinate. - /// Second Barycentric Coordinate. - /// Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static void BaryCentric(ref Vector3d a, ref Vector3d b, ref Vector3d c, double u, double v, out Vector3d result) - { - result = a; // copy - - Vector3d temp = b; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, u, out temp); - Add(ref result, ref temp, out result); - - temp = c; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, v, out temp); - Add(ref result, ref temp, out result); - } - - #endregion - - #region Transform - - /// Transform a direction vector by the given Matrix - /// Assumes the matrix has a bottom row of (0,0,0,1), that is the translation part is ignored. - /// - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector3d TransformVector(Vector3d vec, Matrix4d mat) - { - return new Vector3d( - Vector3d.Dot(vec, new Vector3d(mat.Column0)), - Vector3d.Dot(vec, new Vector3d(mat.Column1)), - Vector3d.Dot(vec, new Vector3d(mat.Column2))); - } - - /// Transform a direction vector by the given Matrix - /// Assumes the matrix has a bottom row of (0,0,0,1), that is the translation part is ignored. - /// - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void TransformVector(ref Vector3d vec, ref Matrix4d mat, out Vector3d result) - { - result.X = vec.X * mat.Row0.X + - vec.Y * mat.Row1.X + - vec.Z * mat.Row2.X; - - result.Y = vec.X * mat.Row0.Y + - vec.Y * mat.Row1.Y + - vec.Z * mat.Row2.Y; - - result.Z = vec.X * mat.Row0.Z + - vec.Y * mat.Row1.Z + - vec.Z * mat.Row2.Z; - } - - /// Transform a Normal by the given Matrix - /// - /// This calculates the inverse of the given matrix, use TransformNormalInverse if you - /// already have the inverse to avoid this extra calculation - /// - /// The normal to transform - /// The desired transformation - /// The transformed normal - public static Vector3d TransformNormal(Vector3d norm, Matrix4d mat) - { - mat.Invert(); - return TransformNormalInverse(norm, mat); - } - - /// Transform a Normal by the given Matrix - /// - /// This calculates the inverse of the given matrix, use TransformNormalInverse if you - /// already have the inverse to avoid this extra calculation - /// - /// The normal to transform - /// The desired transformation - /// The transformed normal - public static void TransformNormal(ref Vector3d norm, ref Matrix4d mat, out Vector3d result) - { - Matrix4d Inverse = Matrix4d.Invert(mat); - Vector3d.TransformNormalInverse(ref norm, ref Inverse, out result); - } - - /// Transform a Normal by the (transpose of the) given Matrix - /// - /// This version doesn't calculate the inverse matrix. - /// Use this version if you already have the inverse of the desired transform to hand - /// - /// The normal to transform - /// The inverse of the desired transformation - /// The transformed normal - public static Vector3d TransformNormalInverse(Vector3d norm, Matrix4d invMat) - { - return new Vector3d( - Vector3d.Dot(norm, new Vector3d(invMat.Row0)), - Vector3d.Dot(norm, new Vector3d(invMat.Row1)), - Vector3d.Dot(norm, new Vector3d(invMat.Row2))); - } - - /// Transform a Normal by the (transpose of the) given Matrix - /// - /// This version doesn't calculate the inverse matrix. - /// Use this version if you already have the inverse of the desired transform to hand - /// - /// The normal to transform - /// The inverse of the desired transformation - /// The transformed normal - public static void TransformNormalInverse(ref Vector3d norm, ref Matrix4d invMat, out Vector3d result) - { - result.X = norm.X * invMat.Row0.X + - norm.Y * invMat.Row0.Y + - norm.Z * invMat.Row0.Z; - - result.Y = norm.X * invMat.Row1.X + - norm.Y * invMat.Row1.Y + - norm.Z * invMat.Row1.Z; - - result.Z = norm.X * invMat.Row2.X + - norm.Y * invMat.Row2.Y + - norm.Z * invMat.Row2.Z; - } - - /// Transform a Position by the given Matrix - /// The position to transform - /// The desired transformation - /// The transformed position - public static Vector3d TransformPosition(Vector3d pos, Matrix4d mat) - { - return new Vector3d( - Vector3d.Dot(pos, new Vector3d(mat.Column0)) + mat.Row3.X, - Vector3d.Dot(pos, new Vector3d(mat.Column1)) + mat.Row3.Y, - Vector3d.Dot(pos, new Vector3d(mat.Column2)) + mat.Row3.Z); - } - - /// Transform a Position by the given Matrix - /// The position to transform - /// The desired transformation - /// The transformed position - public static void TransformPosition(ref Vector3d pos, ref Matrix4d mat, out Vector3d result) - { - result.X = pos.X * mat.Row0.X + - pos.Y * mat.Row1.X + - pos.Z * mat.Row2.X + - mat.Row3.X; - - result.Y = pos.X * mat.Row0.Y + - pos.Y * mat.Row1.Y + - pos.Z * mat.Row2.Y + - mat.Row3.Y; - - result.Z = pos.X * mat.Row0.Z + - pos.Y * mat.Row1.Z + - pos.Z * mat.Row2.Z + - mat.Row3.Z; - } - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector3d Transform(Vector3d vec, Matrix4d mat) - { - Vector3d result; - Transform(ref vec, ref mat, out result); - return result; - } - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void Transform(ref Vector3d vec, ref Matrix4d mat, out Vector3d result) - { - Vector4d v4 = new Vector4d(vec.X, vec.Y, vec.Z, 1.0); - Vector4d.Transform(ref v4, ref mat, out v4); - result = v4.Xyz; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static Vector3d Transform(Vector3d vec, Quaterniond quat) - { - Vector3d result; - Transform(ref vec, ref quat, out result); - return result; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static void Transform(ref Vector3d vec, ref Quaterniond quat, out Vector3d result) - { - // Since vec.W == 0, we can optimize quat * vec * quat^-1 as follows: - // vec + 2.0 * cross(quat.xyz, cross(quat.xyz, vec) + quat.w * vec) - Vector3d xyz = quat.Xyz, temp, temp2; - Vector3d.Cross(ref xyz, ref vec, out temp); - Vector3d.Multiply(ref vec, quat.W, out temp2); - Vector3d.Add(ref temp, ref temp2, out temp); - Vector3d.Cross(ref xyz, ref temp, out temp); - Vector3d.Multiply(ref temp, 2, out temp); - Vector3d.Add(ref vec, ref temp, out result); - } - - /// - /// Transform a Vector3d by the given Matrix, and project the resulting Vector4 back to a Vector3 - /// - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector3d TransformPerspective(Vector3d vec, Matrix4d mat) - { - Vector3d result; - TransformPerspective(ref vec, ref mat, out result); - return result; - } - - /// Transform a Vector3d by the given Matrix, and project the resulting Vector4d back to a Vector3d - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void TransformPerspective(ref Vector3d vec, ref Matrix4d mat, out Vector3d result) - { - Vector4d v = new Vector4d(vec, 1); - Vector4d.Transform(ref v, ref mat, out v); - result.X = v.X / v.W; - result.Y = v.Y / v.W; - result.Z = v.Z / v.W; - } - - #endregion - - #region CalculateAngle - - /// - /// Calculates the angle (in radians) between two vectors. - /// - /// The first vector. - /// The second vector. - /// Angle (in radians) between the vectors. - /// Note that the returned angle is never bigger than the constant Pi. - public static double CalculateAngle(Vector3d first, Vector3d second) - { - return System.Math.Acos((Vector3d.Dot(first, second)) / (first.Length * second.Length)); - } - - /// Calculates the angle (in radians) between two vectors. - /// The first vector. - /// The second vector. - /// Angle (in radians) between the vectors. - /// Note that the returned angle is never bigger than the constant Pi. - public static void CalculateAngle(ref Vector3d first, ref Vector3d second, out double result) - { - double temp; - Vector3d.Dot(ref first, ref second, out temp); - result = System.Math.Acos(temp / (first.Length * second.Length)); - } - - #endregion - - #endregion - - #region Swizzle - - #region 2-component - - /// - /// Gets or sets an OpenTK.Vector2d with the X and Y components of this instance. - /// - [XmlIgnore] - public Vector2d Xy { get { return new Vector2d(X, Y); } set { X = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the X and Z components of this instance. - /// - [XmlIgnore] - public Vector2d Xz { get { return new Vector2d(X, Z); } set { X = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2d Yx { get { return new Vector2d(Y, X); } set { Y = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Y and Z components of this instance. - /// - [XmlIgnore] - public Vector2d Yz { get { return new Vector2d(Y, Z); } set { Y = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Z and X components of this instance. - /// - [XmlIgnore] - public Vector2d Zx { get { return new Vector2d(Z, X); } set { Z = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Z and Y components of this instance. - /// - [XmlIgnore] - public Vector2d Zy { get { return new Vector2d(Z, Y); } set { Z = value.X; Y = value.Y; } } - - #endregion - - #region 3-component - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Xzy { get { return new Vector3d(X, Z, Y); } set { X = value.X; Z = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Yxz { get { return new Vector3d(Y, X, Z); } set { Y = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Yzx { get { return new Vector3d(Y, Z, X); } set { Y = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Zxy { get { return new Vector3d(Z, X, Y); } set { Z = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Zyx { get { return new Vector3d(Z, Y, X); } set { Z = value.X; Y = value.Y; X = value.Z; } } - - #endregion - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector3d operator +(Vector3d left, Vector3d right) - { - left.X += right.X; - left.Y += right.Y; - left.Z += right.Z; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector3d operator -(Vector3d left, Vector3d right) - { - left.X -= right.X; - left.Y -= right.Y; - left.Z -= right.Z; - return left; - } - - /// - /// Negates an instance. - /// - /// The instance. - /// The result of the calculation. - public static Vector3d operator -(Vector3d vec) - { - vec.X = -vec.X; - vec.Y = -vec.Y; - vec.Z = -vec.Z; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector3d operator *(Vector3d vec, double scale) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The scalar. - /// The instance. - /// The result of the calculation. - public static Vector3d operator *(double scale, Vector3d vec) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - return vec; - } - - /// - /// Component-wise multiplication between the specified instance by a scale vector. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector3d operator *(Vector3d vec, Vector3d scale) - { - vec.X *= scale.X; - vec.Y *= scale.Y; - vec.Z *= scale.Z; - return vec; - } - - /// - /// Divides an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector3d operator /(Vector3d vec, double scale) - { - double mult = 1 / scale; - vec.X *= mult; - vec.Y *= mult; - vec.Z *= mult; - return vec; - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Vector3d left, Vector3d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equa lright; false otherwise. - public static bool operator !=(Vector3d left, Vector3d right) - { - return !left.Equals(right); - } - - /// Converts OpenTK.Vector3 to OpenTK.Vector3d. - /// The Vector3 to convert. - /// The resulting Vector3d. - public static explicit operator Vector3d(Vector3 v3) - { - return new Vector3d(v3.X, v3.Y, v3.Z); - } - - /// Converts OpenTK.Vector3d to OpenTK.Vector3. - /// The Vector3d to convert. - /// The resulting Vector3. - public static explicit operator Vector3(Vector3d v3d) - { - return new Vector3((float)v3d.X, (float)v3d.Y, (float)v3d.Z); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// - /// Returns a System.String that represents the current Vector3. - /// - /// - public override string ToString() - { - return String.Format("({0}{3} {1}{3} {2})", X, Y, Z, listSeparator); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Vector3d)) - return false; - - return this.Equals((Vector3d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current vector is equal to another vector. - /// A vector to compare with this vector. - /// true if the current vector is equal to the vector parameter; otherwise, false. - public bool Equals(Vector3d other) - { - return - X == other.X && - Y == other.Y && - Z == other.Z; - } - - #endregion - } -} \ No newline at end of file diff --git a/OpenTK/Math/Vector3h.cs b/OpenTK/Math/Vector3h.cs deleted file mode 100644 index 52945c6d..00000000 --- a/OpenTK/Math/Vector3h.cs +++ /dev/null @@ -1,475 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.IO; -using System.Runtime.InteropServices; -using System.Runtime.Serialization; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// - /// 3-component Vector of the Half type. Occupies 6 Byte total. - /// - [Serializable, StructLayout(LayoutKind.Sequential)] - public struct Vector3h : ISerializable, IEquatable - { - #region Public Fields - - /// The X component of the Half3. - public Half X; - - /// The Y component of the Half3. - public Half Y; - - /// The Z component of the Half3. - public Half Z; - - #endregion Public Fields - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector3h(Half value) - { - X = value; - Y = value; - Z = value; - } - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector3h(Single value) - { - X = new Half(value); - Y = new Half(value); - Z = new Half(value); - } - - /// - /// The new Half3 instance will avoid conversion and copy directly from the Half parameters. - /// - /// An Half instance of a 16-bit half-precision floating-point number. - /// An Half instance of a 16-bit half-precision floating-point number. - /// An Half instance of a 16-bit half-precision floating-point number. - public Vector3h(Half x, Half y, Half z) - { - this.X = x; - this.Y = y; - this.Z = z; - } - - /// - /// The new Half3 instance will convert the 3 parameters into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - public Vector3h(Single x, Single y, Single z) - { - X = new Half(x); - Y = new Half(y); - Z = new Half(z); - } - - /// - /// The new Half3 instance will convert the 3 parameters into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// Enable checks that will throw if the conversion result is not meaningful. - public Vector3h(Single x, Single y, Single z, bool throwOnError) - { - X = new Half(x, throwOnError); - Y = new Half(y, throwOnError); - Z = new Half(z, throwOnError); - } - - /// - /// The new Half3 instance will convert the Vector3 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector3 - [CLSCompliant(false)] - public Vector3h(Vector3 v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - } - - /// - /// The new Half3 instance will convert the Vector3 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector3 - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector3h(Vector3 v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - } - - /// - /// The new Half3 instance will convert the Vector3 into 16-bit half-precision floating-point. - /// This is the fastest constructor. - /// - /// OpenTK.Vector3 - public Vector3h(ref Vector3 v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - } - - /// - /// The new Half3 instance will convert the Vector3 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector3 - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector3h(ref Vector3 v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - } - - /// - /// The new Half3 instance will convert the Vector3d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector3d - [CLSCompliant(false)] - public Vector3h(Vector3d v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - } - - /// - /// The new Half3 instance will convert the Vector3d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector3d - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector3h(Vector3d v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - } - - /// - /// The new Half3 instance will convert the Vector3d into 16-bit half-precision floating-point. - /// This is the faster constructor. - /// - /// OpenTK.Vector3d - [CLSCompliant(false)] - public Vector3h(ref Vector3d v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - } - - /// - /// The new Half3 instance will convert the Vector3d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector3d - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector3h(ref Vector3d v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - } - - #endregion Constructors - - #region Swizzle - - #region 2-component - - /// - /// Gets or sets an OpenTK.Vector2h with the X and Y components of this instance. - /// - [XmlIgnore] - public Vector2h Xy { get { return new Vector2h(X, Y); } set { X = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the X and Z components of this instance. - /// - [XmlIgnore] - public Vector2h Xz { get { return new Vector2h(X, Z); } set { X = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2h Yx { get { return new Vector2h(Y, X); } set { Y = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Y and Z components of this instance. - /// - [XmlIgnore] - public Vector2h Yz { get { return new Vector2h(Y, Z); } set { Y = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Z and X components of this instance. - /// - [XmlIgnore] - public Vector2h Zx { get { return new Vector2h(Z, X); } set { Z = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Z and Y components of this instance. - /// - [XmlIgnore] - public Vector2h Zy { get { return new Vector2h(Z, Y); } set { Z = value.X; Y = value.Y; } } - - #endregion - - #region 3-component - - /// - /// Gets or sets an OpenTK.Vector3h with the X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Xzy { get { return new Vector3h(X, Z, Y); } set { X = value.X; Z = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Yxz { get { return new Vector3h(Y, X, Z); } set { Y = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Yzx { get { return new Vector3h(Y, Z, X); } set { Y = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Zxy { get { return new Vector3h(Z, X, Y); } set { Z = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Zyx { get { return new Vector3h(Z, Y, X); } set { Z = value.X; Y = value.Y; X = value.Z; } } - - #endregion - - #endregion - - #region Half -> Single - - /// - /// Returns this Half3 instance's contents as Vector3. - /// - /// OpenTK.Vector3 - public Vector3 ToVector3() - { - return new Vector3(X, Y, Z); - } - - /// - /// Returns this Half3 instance's contents as Vector3d. - /// - public Vector3d ToVector3d() - { - return new Vector3d(X, Y, Z); - } - - #endregion Half -> Single - - #region Conversions - - /// Converts OpenTK.Vector3 to OpenTK.Half3. - /// The Vector3 to convert. - /// The resulting Half vector. - public static explicit operator Vector3h(Vector3 v3f) - { - return new Vector3h(v3f); - } - - /// Converts OpenTK.Vector3d to OpenTK.Half3. - /// The Vector3d to convert. - /// The resulting Half vector. - public static explicit operator Vector3h(Vector3d v3d) - { - return new Vector3h(v3d); - } - - /// Converts OpenTK.Half3 to OpenTK.Vector3. - /// The Half3 to convert. - /// The resulting Vector3. - public static explicit operator Vector3(Vector3h h3) - { - Vector3 result = new Vector3(); - result.X = h3.X.ToSingle(); - result.Y = h3.Y.ToSingle(); - result.Z = h3.Z.ToSingle(); - return result; - } - - /// Converts OpenTK.Half3 to OpenTK.Vector3d. - /// The Half3 to convert. - /// The resulting Vector3d. - public static explicit operator Vector3d(Vector3h h3) - { - Vector3d result = new Vector3d(); - result.X = h3.X.ToSingle(); - result.Y = h3.Y.ToSingle(); - result.Z = h3.Z.ToSingle(); - return result; - } - - #endregion Conversions - - #region Constants - - /// The size in bytes for an instance of the Half3 struct is 6. - public static readonly int SizeInBytes = 6; - - #endregion Constants - - #region ISerializable - - /// Constructor used by ISerializable to deserialize the object. - /// - /// - public Vector3h(SerializationInfo info, StreamingContext context) - { - this.X = (Half)info.GetValue("X", typeof(Half)); - this.Y = (Half)info.GetValue("Y", typeof(Half)); - this.Z = (Half)info.GetValue("Z", typeof(Half)); - } - - /// Used by ISerialize to serialize the object. - /// - /// - public void GetObjectData(SerializationInfo info, StreamingContext context) - { - info.AddValue("X", this.X); - info.AddValue("Y", this.Y); - info.AddValue("Z", this.Z); - } - - #endregion ISerializable - - #region Binary dump - - /// Updates the X,Y and Z components of this instance by reading from a Stream. - /// A BinaryReader instance associated with an open Stream. - public void FromBinaryStream(BinaryReader bin) - { - X.FromBinaryStream(bin); - Y.FromBinaryStream(bin); - Z.FromBinaryStream(bin); - } - - /// Writes the X,Y and Z components of this instance into a Stream. - /// A BinaryWriter instance associated with an open Stream. - public void ToBinaryStream(BinaryWriter bin) - { - X.ToBinaryStream(bin); - Y.ToBinaryStream(bin); - Z.ToBinaryStream(bin); - } - - #endregion Binary dump - - #region IEquatable Members - - /// Returns a value indicating whether this instance is equal to a specified OpenTK.Half3 vector. - /// OpenTK.Half3 to compare to this instance.. - /// True, if other is equal to this instance; false otherwise. - public bool Equals(Vector3h other) - { - return (this.X.Equals(other.X) && this.Y.Equals(other.Y) && this.Z.Equals(other.Z)); - } - - #endregion - - #region ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// Returns a string that contains this Half3's numbers in human-legible form. - public override string ToString() - { - return String.Format("({0}{3} {1}{3} {2})", X.ToString(), Y.ToString(), Z.ToString(), listSeparator); - } - - #endregion ToString() - - #region BitConverter - - /// Returns the Half3 as an array of bytes. - /// The Half3 to convert. - /// The input as byte array. - public static byte[] GetBytes(Vector3h h) - { - byte[] result = new byte[SizeInBytes]; - - byte[] temp = Half.GetBytes(h.X); - result[0] = temp[0]; - result[1] = temp[1]; - temp = Half.GetBytes(h.Y); - result[2] = temp[0]; - result[3] = temp[1]; - temp = Half.GetBytes(h.Z); - result[4] = temp[0]; - result[5] = temp[1]; - - return result; - } - - /// Converts an array of bytes into Half3. - /// A Half3 in it's byte[] representation. - /// The starting position within value. - /// A new Half3 instance. - public static Vector3h FromBytes(byte[] value, int startIndex) - { - Vector3h h3 = new Vector3h(); - h3.X = Half.FromBytes(value, startIndex); - h3.Y = Half.FromBytes(value, startIndex + 2); - h3.Z = Half.FromBytes(value, startIndex + 4); - return h3; - } - - #endregion BitConverter - } -} diff --git a/OpenTK/Math/Vector4.cs b/OpenTK/Math/Vector4.cs deleted file mode 100644 index 98f374fc..00000000 --- a/OpenTK/Math/Vector4.cs +++ /dev/null @@ -1,1639 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.Xml.Serialization; -namespace OpenTK -{ - /// Represents a 4D vector using four single-precision floating-point numbers. - /// - /// The Vector4 structure is suitable for interoperation with unmanaged code requiring four consecutive floats. - /// - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Vector4 : IEquatable - { - #region Fields - - /// - /// The X component of the Vector4. - /// - public float X; - - /// - /// The Y component of the Vector4. - /// - public float Y; - - /// - /// The Z component of the Vector4. - /// - public float Z; - - /// - /// The W component of the Vector4. - /// - public float W; - - /// - /// Defines a unit-length Vector4 that points towards the X-axis. - /// - public static readonly Vector4 UnitX = new Vector4(1, 0, 0, 0); - - /// - /// Defines a unit-length Vector4 that points towards the Y-axis. - /// - public static readonly Vector4 UnitY = new Vector4(0, 1, 0, 0); - - /// - /// Defines a unit-length Vector4 that points towards the Z-axis. - /// - public static readonly Vector4 UnitZ = new Vector4(0, 0, 1, 0); - - /// - /// Defines a unit-length Vector4 that points towards the W-axis. - /// - public static readonly Vector4 UnitW = new Vector4(0, 0, 0, 1); - - /// - /// Defines a zero-length Vector4. - /// - public static readonly Vector4 Zero = new Vector4(0, 0, 0, 0); - - /// - /// Defines an instance with all components set to 1. - /// - public static readonly Vector4 One = new Vector4(1, 1, 1, 1); - - /// - /// Defines the size of the Vector4 struct in bytes. - /// - public static readonly int SizeInBytes = Marshal.SizeOf(new Vector4()); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector4(float value) - { - X = value; - Y = value; - Z = value; - W = value; - } - - /// - /// Constructs a new Vector4. - /// - /// The x component of the Vector4. - /// The y component of the Vector4. - /// The z component of the Vector4. - /// The w component of the Vector4. - public Vector4(float x, float y, float z, float w) - { - X = x; - Y = y; - Z = z; - W = w; - } - - /// - /// Constructs a new Vector4 from the given Vector2. - /// - /// The Vector2 to copy components from. - public Vector4(Vector2 v) - { - X = v.X; - Y = v.Y; - Z = 0.0f; - W = 0.0f; - } - - /// - /// Constructs a new Vector4 from the given Vector3. - /// The w component is initialized to 0. - /// - /// The Vector3 to copy components from. - /// - public Vector4(Vector3 v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - W = 0.0f; - } - - /// - /// Constructs a new Vector4 from the specified Vector3 and w component. - /// - /// The Vector3 to copy components from. - /// The w component of the new Vector4. - public Vector4(Vector3 v, float w) - { - X = v.X; - Y = v.Y; - Z = v.Z; - W = w; - } - - /// - /// Constructs a new Vector4 from the given Vector4. - /// - /// The Vector4 to copy components from. - public Vector4(Vector4 v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - W = v.W; - } - - #endregion - - #region Public Members - - /// - /// Gets or sets the value at the index of the Vector. - /// - public float this[int index] { - get{ - if(index == 0) return X; - else if(index == 1) return Y; - else if(index == 2) return Z; - else if(index == 3) return W; - throw new IndexOutOfRangeException("You tried to access this vector at index: " + index); - } set{ - if(index == 0) X = value; - else if(index == 1) Y = value; - else if(index == 2) Z = value; - else if(index == 3) W = value; - else throw new IndexOutOfRangeException("You tried to set this vector at index: " + index); - } - } - - #region Instance - - #region public void Add() - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(Vector4 right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - this.W += right.W; - } - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(ref Vector4 right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - this.W += right.W; - } - - #endregion public void Add() - - #region public void Sub() - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(Vector4 right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - this.W -= right.W; - } - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(ref Vector4 right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - this.W -= right.W; - } - - #endregion public void Sub() - - #region public void Mult() - - /// Multiply this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Multiply() method instead.")] - public void Mult(float f) - { - this.X *= f; - this.Y *= f; - this.Z *= f; - this.W *= f; - } - - #endregion public void Mult() - - #region public void Div() - - /// Divide this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Divide() method instead.")] - public void Div(float f) - { - float mult = 1.0f / f; - this.X *= mult; - this.Y *= mult; - this.Z *= mult; - this.W *= mult; - } - - #endregion public void Div() - - #region public float Length - - /// - /// Gets the length (magnitude) of the vector. - /// - /// - /// - public float Length - { - get - { - return (float)System.Math.Sqrt(X * X + Y * Y + Z * Z + W * W); - } - } - - #endregion - - #region public float LengthFast - - /// - /// Gets an approximation of the vector length (magnitude). - /// - /// - /// This property uses an approximation of the square root function to calculate vector magnitude, with - /// an upper error bound of 0.001. - /// - /// - /// - public float LengthFast - { - get - { - return 1.0f / MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z + W * W); - } - } - - #endregion - - #region public float LengthSquared - - /// - /// Gets the square of the vector length (magnitude). - /// - /// - /// This property avoids the costly square root operation required by the Length property. This makes it more suitable - /// for comparisons. - /// - /// - /// - public float LengthSquared - { - get - { - return X * X + Y * Y + Z * Z + W * W; - } - } - - #endregion - - /// - /// Returns a copy of the Vector4 scaled to unit length. - /// - public Vector4 Normalized() - { - Vector4 v = this; - v.Normalize(); - return v; - } - - #region public void Normalize() - - /// - /// Scales the Vector4 to unit length. - /// - public void Normalize() - { - float scale = 1.0f / this.Length; - X *= scale; - Y *= scale; - Z *= scale; - W *= scale; - } - - #endregion - - #region public void NormalizeFast() - - /// - /// Scales the Vector4 to approximately unit length. - /// - public void NormalizeFast() - { - float scale = MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z + W * W); - X *= scale; - Y *= scale; - Z *= scale; - W *= scale; - } - - #endregion - - #region public void Scale() - - /// - /// Scales the current Vector4 by the given amounts. - /// - /// The scale of the X component. - /// The scale of the Y component. - /// The scale of the Z component. - /// The scale of the Z component. - [Obsolete("Use static Multiply() method instead.")] - public void Scale(float sx, float sy, float sz, float sw) - { - this.X = X * sx; - this.Y = Y * sy; - this.Z = Z * sz; - this.W = W * sw; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(Vector4 scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - this.W *= scale.W; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(ref Vector4 scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - this.W *= scale.W; - } - - #endregion public void Scale() - - #endregion - - #region Static - - #region Obsolete - - #region Sub - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector4 Sub(Vector4 a, Vector4 b) - { - a.X -= b.X; - a.Y -= b.Y; - a.Z -= b.Z; - a.W -= b.W; - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Sub(ref Vector4 a, ref Vector4 b, out Vector4 result) - { - result.X = a.X - b.X; - result.Y = a.Y - b.Y; - result.Z = a.Z - b.Z; - result.W = a.W - b.W; - } - - #endregion - - #region Mult - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - public static Vector4 Mult(Vector4 a, float f) - { - a.X *= f; - a.Y *= f; - a.Z *= f; - a.W *= f; - return a; - } - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - public static void Mult(ref Vector4 a, float f, out Vector4 result) - { - result.X = a.X * f; - result.Y = a.Y * f; - result.Z = a.Z * f; - result.W = a.W * f; - } - - #endregion - - #region Div - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - public static Vector4 Div(Vector4 a, float f) - { - float mult = 1.0f / f; - a.X *= mult; - a.Y *= mult; - a.Z *= mult; - a.W *= mult; - return a; - } - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - public static void Div(ref Vector4 a, float f, out Vector4 result) - { - float mult = 1.0f / f; - result.X = a.X * mult; - result.Y = a.Y * mult; - result.Z = a.Z * mult; - result.W = a.W * mult; - } - - #endregion - - #endregion - - #region Add - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static Vector4 Add(Vector4 a, Vector4 b) - { - Add(ref a, ref b, out a); - return a; - } - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static void Add(ref Vector4 a, ref Vector4 b, out Vector4 result) - { - result = new Vector4(a.X + b.X, a.Y + b.Y, a.Z + b.Z, a.W + b.W); - } - - #endregion - - #region Subtract - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector4 Subtract(Vector4 a, Vector4 b) - { - Subtract(ref a, ref b, out a); - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Subtract(ref Vector4 a, ref Vector4 b, out Vector4 result) - { - result = new Vector4(a.X - b.X, a.Y - b.Y, a.Z - b.Z, a.W - b.W); - } - - #endregion - - #region Multiply - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4 Multiply(Vector4 vector, float scale) - { - Multiply(ref vector, scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector4 vector, float scale, out Vector4 result) - { - result = new Vector4(vector.X * scale, vector.Y * scale, vector.Z * scale, vector.W * scale); - } - - /// - /// Multiplies a vector by the components a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4 Multiply(Vector4 vector, Vector4 scale) - { - Multiply(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector4 vector, ref Vector4 scale, out Vector4 result) - { - result = new Vector4(vector.X * scale.X, vector.Y * scale.Y, vector.Z * scale.Z, vector.W * scale.W); - } - - #endregion - - #region Divide - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4 Divide(Vector4 vector, float scale) - { - Divide(ref vector, scale, out vector); - return vector; - } - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector4 vector, float scale, out Vector4 result) - { - Multiply(ref vector, 1 / scale, out result); - } - - /// - /// Divides a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4 Divide(Vector4 vector, Vector4 scale) - { - Divide(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Divide a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector4 vector, ref Vector4 scale, out Vector4 result) - { - result = new Vector4(vector.X / scale.X, vector.Y / scale.Y, vector.Z / scale.Z, vector.W / scale.W); - } - - #endregion - - #region Min - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static Vector4 Min(Vector4 a, Vector4 b) - { - a.X = a.X < b.X ? a.X : b.X; - a.Y = a.Y < b.Y ? a.Y : b.Y; - a.Z = a.Z < b.Z ? a.Z : b.Z; - a.W = a.W < b.W ? a.W : b.W; - return a; - } - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static void Min(ref Vector4 a, ref Vector4 b, out Vector4 result) - { - result.X = a.X < b.X ? a.X : b.X; - result.Y = a.Y < b.Y ? a.Y : b.Y; - result.Z = a.Z < b.Z ? a.Z : b.Z; - result.W = a.W < b.W ? a.W : b.W; - } - - #endregion - - #region Max - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static Vector4 Max(Vector4 a, Vector4 b) - { - a.X = a.X > b.X ? a.X : b.X; - a.Y = a.Y > b.Y ? a.Y : b.Y; - a.Z = a.Z > b.Z ? a.Z : b.Z; - a.W = a.W > b.W ? a.W : b.W; - return a; - } - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static void Max(ref Vector4 a, ref Vector4 b, out Vector4 result) - { - result.X = a.X > b.X ? a.X : b.X; - result.Y = a.Y > b.Y ? a.Y : b.Y; - result.Z = a.Z > b.Z ? a.Z : b.Z; - result.W = a.W > b.W ? a.W : b.W; - } - - #endregion - - #region Clamp - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static Vector4 Clamp(Vector4 vec, Vector4 min, Vector4 max) - { - vec.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - vec.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - vec.Z = vec.X < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - vec.W = vec.Y < min.W ? min.W : vec.W > max.W ? max.W : vec.W; - return vec; - } - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static void Clamp(ref Vector4 vec, ref Vector4 min, ref Vector4 max, out Vector4 result) - { - result.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - result.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - result.Z = vec.X < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - result.W = vec.Y < min.W ? min.W : vec.W > max.W ? max.W : vec.W; - } - - #endregion - - #region Normalize - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static Vector4 Normalize(Vector4 vec) - { - float scale = 1.0f / vec.Length; - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static void Normalize(ref Vector4 vec, out Vector4 result) - { - float scale = 1.0f / vec.Length; - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - result.W = vec.W * scale; - } - - #endregion - - #region NormalizeFast - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static Vector4 NormalizeFast(Vector4 vec) - { - float scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z + vec.W * vec.W); - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static void NormalizeFast(ref Vector4 vec, out Vector4 result) - { - float scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z + vec.W * vec.W); - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - result.W = vec.W * scale; - } - - #endregion - - #region Dot - - /// - /// Calculate the dot product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static float Dot(Vector4 left, Vector4 right) - { - return left.X * right.X + left.Y * right.Y + left.Z * right.Z + left.W * right.W; - } - - /// - /// Calculate the dot product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static void Dot(ref Vector4 left, ref Vector4 right, out float result) - { - result = left.X * right.X + left.Y * right.Y + left.Z * right.Z + left.W * right.W; - } - - #endregion - - #region Lerp - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static Vector4 Lerp(Vector4 a, Vector4 b, float blend) - { - a.X = blend * (b.X - a.X) + a.X; - a.Y = blend * (b.Y - a.Y) + a.Y; - a.Z = blend * (b.Z - a.Z) + a.Z; - a.W = blend * (b.W - a.W) + a.W; - return a; - } - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static void Lerp(ref Vector4 a, ref Vector4 b, float blend, out Vector4 result) - { - result.X = blend * (b.X - a.X) + a.X; - result.Y = blend * (b.Y - a.Y) + a.Y; - result.Z = blend * (b.Z - a.Z) + a.Z; - result.W = blend * (b.W - a.W) + a.W; - } - - #endregion - - #region Barycentric - - /// - /// Interpolate 3 Vectors using Barycentric coordinates - /// - /// First input Vector - /// Second input Vector - /// Third input Vector - /// First Barycentric Coordinate - /// Second Barycentric Coordinate - /// a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static Vector4 BaryCentric(Vector4 a, Vector4 b, Vector4 c, float u, float v) - { - return a + u * (b - a) + v * (c - a); - } - - /// Interpolate 3 Vectors using Barycentric coordinates - /// First input Vector. - /// Second input Vector. - /// Third input Vector. - /// First Barycentric Coordinate. - /// Second Barycentric Coordinate. - /// Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static void BaryCentric(ref Vector4 a, ref Vector4 b, ref Vector4 c, float u, float v, out Vector4 result) - { - result = a; // copy - - Vector4 temp = b; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, u, out temp); - Add(ref result, ref temp, out result); - - temp = c; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, v, out temp); - Add(ref result, ref temp, out result); - } - - #endregion - - #region Transform - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector4 Transform(Vector4 vec, Matrix4 mat) - { - Vector4 result; - Transform(ref vec, ref mat, out result); - return result; - } - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void Transform(ref Vector4 vec, ref Matrix4 mat, out Vector4 result) - { - result = new Vector4( - vec.X * mat.Row0.X + vec.Y * mat.Row1.X + vec.Z * mat.Row2.X + vec.W * mat.Row3.X, - vec.X * mat.Row0.Y + vec.Y * mat.Row1.Y + vec.Z * mat.Row2.Y + vec.W * mat.Row3.Y, - vec.X * mat.Row0.Z + vec.Y * mat.Row1.Z + vec.Z * mat.Row2.Z + vec.W * mat.Row3.Z, - vec.X * mat.Row0.W + vec.Y * mat.Row1.W + vec.Z * mat.Row2.W + vec.W * mat.Row3.W); - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static Vector4 Transform(Vector4 vec, Quaternion quat) - { - Vector4 result; - Transform(ref vec, ref quat, out result); - return result; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static void Transform(ref Vector4 vec, ref Quaternion quat, out Vector4 result) - { - Quaternion v = new Quaternion(vec.X, vec.Y, vec.Z, vec.W), i, t; - Quaternion.Invert(ref quat, out i); - Quaternion.Multiply(ref quat, ref v, out t); - Quaternion.Multiply(ref t, ref i, out v); - - result = new Vector4(v.X, v.Y, v.Z, v.W); - } - - #endregion - - #endregion - - #region Swizzle - - #region 2-component - - /// - /// Gets or sets an OpenTK.Vector2 with the X and Y components of this instance. - /// - [XmlIgnore] - public Vector2 Xy { get { return new Vector2(X, Y); } set { X = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the X and Z components of this instance. - /// - [XmlIgnore] - public Vector2 Xz { get { return new Vector2(X, Z); } set { X = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the X and W components of this instance. - /// - [XmlIgnore] - public Vector2 Xw { get { return new Vector2(X, W); } set { X = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2 Yx { get { return new Vector2(Y, X); } set { Y = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Y and Z components of this instance. - /// - [XmlIgnore] - public Vector2 Yz { get { return new Vector2(Y, Z); } set { Y = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Y and W components of this instance. - /// - [XmlIgnore] - public Vector2 Yw { get { return new Vector2(Y, W); } set { Y = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Z and X components of this instance. - /// - [XmlIgnore] - public Vector2 Zx { get { return new Vector2(Z, X); } set { Z = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the Z and Y components of this instance. - /// - [XmlIgnore] - public Vector2 Zy { get { return new Vector2(Z, Y); } set { Z = value.X; Y = value.Y; } } - - /// - /// Gets an OpenTK.Vector2 with the Z and W components of this instance. - /// - [XmlIgnore] - public Vector2 Zw { get { return new Vector2(Z, W); } set { Z = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the W and X components of this instance. - /// - [XmlIgnore] - public Vector2 Wx { get { return new Vector2(W, X); } set { W = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the W and Y components of this instance. - /// - [XmlIgnore] - public Vector2 Wy { get { return new Vector2(W, Y); } set { W = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2 with the W and Z components of this instance. - /// - [XmlIgnore] - public Vector2 Wz { get { return new Vector2(W, Z); } set { W = value.X; Z = value.Y; } } - - #endregion - - #region 3-component - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Xyz { get { return new Vector3(X, Y, Z); } set { X = value.X; Y = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Xyw { get { return new Vector3(X, Y, W); } set { X = value.X; Y = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Xzy { get { return new Vector3(X, Z, Y); } set { X = value.X; Z = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the X, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector3 Xzw { get { return new Vector3(X, Z, W); } set { X = value.X; Z = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the X, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Xwy { get { return new Vector3(X, W, Y); } set { X = value.X; W = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the X, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Xwz { get { return new Vector3(X, W, Z); } set { X = value.X; W = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Yxz { get { return new Vector3(Y, X, Z); } set { Y = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, X, and W components of this instance. - /// - [XmlIgnore] - public Vector3 Yxw { get { return new Vector3(Y, X, W); } set { Y = value.X; X = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Yzx { get { return new Vector3(Y, Z, X); } set { Y = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector3 Yzw { get { return new Vector3(Y, Z, W); } set { Y = value.X; Z = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Y, W, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Ywx { get { return new Vector3(Y, W, X); } set { Y = value.X; W = value.Y; X = value.Z; } } - - /// - /// Gets an OpenTK.Vector3 with the Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Ywz { get { return new Vector3(Y, W, Z); } set { Y = value.X; W = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Zxy { get { return new Vector3(Z, X, Y); } set { Z = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, X, and W components of this instance. - /// - [XmlIgnore] - public Vector3 Zxw { get { return new Vector3(Z, X, W); } set { Z = value.X; X = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Zyx { get { return new Vector3(Z, Y, X); } set { Z = value.X; Y = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector3 Zyw { get { return new Vector3(Z, Y, W); } set { Z = value.X; Y = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, W, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Zwx { get { return new Vector3(Z, W, X); } set { Z = value.X; W = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the Z, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Zwy { get { return new Vector3(Z, W, Y); } set { Z = value.X; W = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the W, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Wxy { get { return new Vector3(W, X, Y); } set { W = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the W, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Wxz { get { return new Vector3(W, X, Z); } set { W = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the W, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Wyx { get { return new Vector3(W, Y, X); } set { W = value.X; Y = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the W, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3 Wyz { get { return new Vector3(W, Y, Z); } set { W = value.X; Y = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the W, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3 Wzx { get { return new Vector3(W, Z, X); } set { W = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3 with the W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3 Wzy { get { return new Vector3(W, Z, Y); } set { W = value.X; Z = value.Y; Y = value.Z; } } - - #endregion - - #region 4-component - - /// - /// Gets or sets an OpenTK.Vector4 with the X, Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Xywz { get { return new Vector4(X, Y, W, Z); } set { X = value.X; Y = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the X, Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector4 Xzyw { get { return new Vector4(X, Z, Y, W); } set { X = value.X; Z = value.Y; Y = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the X, Z, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Xzwy { get { return new Vector4(X, Z, W, Y); } set { X = value.X; Z = value.Y; W = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the X, W, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Xwyz { get { return new Vector4(X, W, Y, Z); } set { X = value.X; W = value.Y; Y = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the X, W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Xwzy { get { return new Vector4(X, W, Z, Y); } set { X = value.X; W = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Y, X, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector4 Yxzw { get { return new Vector4(Y, X, Z, W); } set { Y = value.X; X = value.Y; Z = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Y, X, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Yxwz { get { return new Vector4(Y, X, W, Z); } set { Y = value.X; X = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets an OpenTK.Vector4 with the Y, Y, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector4 Yyzw { get { return new Vector4(Y, Y, Z, W); } set { X = value.X; Y = value.Y; Z = value.Z; W = value.W; } } - - /// - /// Gets an OpenTK.Vector4 with the Y, Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Yywz { get { return new Vector4(Y, Y, W, Z); } set { X = value.X; Y = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Y, Z, X, and W components of this instance. - /// - [XmlIgnore] - public Vector4 Yzxw { get { return new Vector4(Y, Z, X, W); } set { Y = value.X; Z = value.Y; X = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Y, Z, W, and X components of this instance. - /// - [XmlIgnore] - public Vector4 Yzwx { get { return new Vector4(Y, Z, W, X); } set { Y = value.X; Z = value.Y; W = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Y, W, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Ywxz { get { return new Vector4(Y, W, X, Z); } set { Y = value.X; W = value.Y; X = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Y, W, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector4 Ywzx { get { return new Vector4(Y, W, Z, X); } set { Y = value.X; W = value.Y; Z = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Z, X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Zxyw { get { return new Vector4(Z, X, Y, W); } set { Z = value.X; X = value.Y; Y = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Z, X, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Zxwy { get { return new Vector4(Z, X, W, Y); } set { Z = value.X; X = value.Y; W = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Z, Y, X, and W components of this instance. - /// - [XmlIgnore] - public Vector4 Zyxw { get { return new Vector4(Z, Y, X, W); } set { Z = value.X; Y = value.Y; X = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Z, Y, W, and X components of this instance. - /// - [XmlIgnore] - public Vector4 Zywx { get { return new Vector4(Z, Y, W, X); } set { Z = value.X; Y = value.Y; W = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Z, W, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Zwxy { get { return new Vector4(Z, W, X, Y); } set { Z = value.X; W = value.Y; X = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the Z, W, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector4 Zwyx { get { return new Vector4(Z, W, Y, X); } set { Z = value.X; W = value.Y; Y = value.Z; X = value.W; } } - - /// - /// Gets an OpenTK.Vector4 with the Z, W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Zwzy { get { return new Vector4(Z, W, Z, Y); } set { X = value.X; W = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the W, X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Wxyz { get { return new Vector4(W, X, Y, Z); } set { W = value.X; X = value.Y; Y = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the W, X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Wxzy { get { return new Vector4(W, X, Z, Y); } set { W = value.X; X = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the W, Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector4 Wyxz { get { return new Vector4(W, Y, X, Z); } set { W = value.X; Y = value.Y; X = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the W, Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector4 Wyzx { get { return new Vector4(W, Y, Z, X); } set { W = value.X; Y = value.Y; Z = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the W, Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector4 Wzxy { get { return new Vector4(W, Z, X, Y); } set { W = value.X; Z = value.Y; X = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4 with the W, Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector4 Wzyx { get { return new Vector4(W, Z, Y, X); } set { W = value.X; Z = value.Y; Y = value.Z; X = value.W; } } - - /// - /// Gets an OpenTK.Vector4 with the W, Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector4 Wzyw { get { return new Vector4(W, Z, Y, W); } set { X = value.X; Z = value.Y; Y = value.Z; W = value.W; } } - - #endregion - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector4 operator +(Vector4 left, Vector4 right) - { - left.X += right.X; - left.Y += right.Y; - left.Z += right.Z; - left.W += right.W; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector4 operator -(Vector4 left, Vector4 right) - { - left.X -= right.X; - left.Y -= right.Y; - left.Z -= right.Z; - left.W -= right.W; - return left; - } - - /// - /// Negates an instance. - /// - /// The instance. - /// The result of the calculation. - public static Vector4 operator -(Vector4 vec) - { - vec.X = -vec.X; - vec.Y = -vec.Y; - vec.Z = -vec.Z; - vec.W = -vec.W; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector4 operator *(Vector4 vec, float scale) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The scalar. - /// The instance. - /// The result of the calculation. - public static Vector4 operator *(float scale, Vector4 vec) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Component-wise multiplication between the specified instance by a scale vector. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector4 operator *(Vector4 vec, Vector4 scale) - { - vec.X *= scale.X; - vec.Y *= scale.Y; - vec.Z *= scale.Z; - vec.W *= scale.W; - return vec; - } - - /// - /// Divides an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector4 operator /(Vector4 vec, float scale) - { - float mult = 1.0f / scale; - vec.X *= mult; - vec.Y *= mult; - vec.Z *= mult; - vec.W *= mult; - return vec; - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Vector4 left, Vector4 right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equa lright; false otherwise. - public static bool operator !=(Vector4 left, Vector4 right) - { - return !left.Equals(right); - } - - /// - /// Returns a pointer to the first element of the specified instance. - /// - /// The instance. - /// A pointer to the first element of v. - [CLSCompliant(false)] - unsafe public static explicit operator float*(Vector4 v) - { - return &v.X; - } - - /// - /// Returns a pointer to the first element of the specified instance. - /// - /// The instance. - /// A pointer to the first element of v. - public static explicit operator IntPtr(Vector4 v) - { - unsafe - { - return (IntPtr)(&v.X); - } - } - - #endregion - - #region Overrides - - #region public override string ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// - /// Returns a System.String that represents the current Vector4. - /// - /// - public override string ToString() - { - return String.Format("({0}{4} {1}{4} {2}{4} {3})", X, Y, Z, W, listSeparator); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode() ^ W.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Vector4)) - return false; - - return this.Equals((Vector4)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current vector is equal to another vector. - /// A vector to compare with this vector. - /// true if the current vector is equal to the vector parameter; otherwise, false. - public bool Equals(Vector4 other) - { - return - X == other.X && - Y == other.Y && - Z == other.Z && - W == other.W; - } - - #endregion - } -} diff --git a/OpenTK/Math/Vector4d.cs b/OpenTK/Math/Vector4d.cs deleted file mode 100644 index b158005e..00000000 --- a/OpenTK/Math/Vector4d.cs +++ /dev/null @@ -1,1658 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.Runtime.InteropServices; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// Represents a 4D vector using four double-precision floating-point numbers. - [Serializable] - [StructLayout(LayoutKind.Sequential)] - public struct Vector4d : IEquatable - { - #region Fields - - /// - /// The X component of the Vector4d. - /// - public double X; - - /// - /// The Y component of the Vector4d. - /// - public double Y; - - /// - /// The Z component of the Vector4d. - /// - public double Z; - - /// - /// The W component of the Vector4d. - /// - public double W; - - /// - /// Defines a unit-length Vector4d that points towards the X-axis. - /// - public static readonly Vector4d UnitX = new Vector4d(1, 0, 0, 0); - - /// - /// Defines a unit-length Vector4d that points towards the Y-axis. - /// - public static readonly Vector4d UnitY = new Vector4d(0, 1, 0, 0); - - /// - /// Defines a unit-length Vector4d that points towards the Z-axis. - /// - public static readonly Vector4d UnitZ = new Vector4d(0, 0, 1, 0); - - /// - /// Defines a unit-length Vector4d that points towards the W-axis. - /// - public static readonly Vector4d UnitW = new Vector4d(0, 0, 0, 1); - - /// - /// Defines a zero-length Vector4d. - /// - public static readonly Vector4d Zero = new Vector4d(0, 0, 0, 0); - - /// - /// Defines an instance with all components set to 1. - /// - public static readonly Vector4d One = new Vector4d(1, 1, 1, 1); - - /// - /// Defines the size of the Vector4d struct in bytes. - /// - public static readonly int SizeInBytes = Marshal.SizeOf(new Vector4d()); - - #endregion - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector4d(double value) - { - X = value; - Y = value; - Z = value; - W = value; - } - - /// - /// Constructs a new Vector4d. - /// - /// The x component of the Vector4d. - /// The y component of the Vector4d. - /// The z component of the Vector4d. - /// The w component of the Vector4d. - public Vector4d(double x, double y, double z, double w) - { - X = x; - Y = y; - Z = z; - W = w; - } - - /// - /// Constructs a new Vector4d from the given Vector2d. - /// - /// The Vector2d to copy components from. - public Vector4d(Vector2d v) - { - X = v.X; - Y = v.Y; - Z = 0.0f; - W = 0.0f; - } - - /// - /// Constructs a new Vector4d from the given Vector3d. - /// The w component is initialized to 0. - /// - /// The Vector3d to copy components from. - /// - public Vector4d(Vector3d v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - W = 0.0f; - } - - /// - /// Constructs a new Vector4d from the specified Vector3d and w component. - /// - /// The Vector3d to copy components from. - /// The w component of the new Vector4. - public Vector4d(Vector3d v, double w) - { - X = v.X; - Y = v.Y; - Z = v.Z; - W = w; - } - - /// - /// Constructs a new Vector4d from the given Vector4d. - /// - /// The Vector4d to copy components from. - public Vector4d(Vector4d v) - { - X = v.X; - Y = v.Y; - Z = v.Z; - W = v.W; - } - - #endregion - - #region Public Members - - /// - /// Gets or sets the value at the index of the Vector. - /// - public double this[int index] { - get{ - if(index == 0) return X; - else if(index == 1) return Y; - else if(index == 2) return Z; - else if(index == 3) return W; - throw new IndexOutOfRangeException("You tried to access this vector at index: " + index); - } set{ - if(index == 0) X = value; - else if(index == 1) Y = value; - else if(index == 2) Z = value; - else if(index == 3) W = value; - else throw new IndexOutOfRangeException("You tried to set this vector at index: " + index); - } - } - - #region Instance - - #region public void Add() - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(Vector4d right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - this.W += right.W; - } - - /// Add the Vector passed as parameter to this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Add() method instead.")] - public void Add(ref Vector4d right) - { - this.X += right.X; - this.Y += right.Y; - this.Z += right.Z; - this.W += right.W; - } - - #endregion public void Add() - - #region public void Sub() - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(Vector4d right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - this.W -= right.W; - } - - /// Subtract the Vector passed as parameter from this instance. - /// Right operand. This parameter is only read from. - [CLSCompliant(false)] - [Obsolete("Use static Subtract() method instead.")] - public void Sub(ref Vector4d right) - { - this.X -= right.X; - this.Y -= right.Y; - this.Z -= right.Z; - this.W -= right.W; - } - - #endregion public void Sub() - - #region public void Mult() - - /// Multiply this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Multiply() method instead.")] - public void Mult(double f) - { - this.X *= f; - this.Y *= f; - this.Z *= f; - this.W *= f; - } - - #endregion public void Mult() - - #region public void Div() - - /// Divide this instance by a scalar. - /// Scalar operand. - [Obsolete("Use static Divide() method instead.")] - public void Div(double f) - { - double mult = 1.0 / f; - this.X *= mult; - this.Y *= mult; - this.Z *= mult; - this.W *= mult; - } - - #endregion public void Div() - - #region public double Length - - /// - /// Gets the length (magnitude) of the vector. - /// - /// - /// - public double Length - { - get - { - return System.Math.Sqrt(X * X + Y * Y + Z * Z + W * W); - } - } - - #endregion - - #region public double LengthFast - - /// - /// Gets an approximation of the vector length (magnitude). - /// - /// - /// This property uses an approximation of the square root function to calculate vector magnitude, with - /// an upper error bound of 0.001. - /// - /// - /// - public double LengthFast - { - get - { - return 1.0 / MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z + W * W); - } - } - - #endregion - - #region public double LengthSquared - - /// - /// Gets the square of the vector length (magnitude). - /// - /// - /// This property avoids the costly square root operation required by the Length property. This makes it more suitable - /// for comparisons. - /// - /// - public double LengthSquared - { - get - { - return X * X + Y * Y + Z * Z + W * W; - } - } - - #endregion - - /// - /// Returns a copy of the Vector4d scaled to unit length. - /// - public Vector4d Normalized() - { - Vector4d v = this; - v.Normalize(); - return v; - } - - #region public void Normalize() - - /// - /// Scales the Vector4d to unit length. - /// - public void Normalize() - { - double scale = 1.0 / this.Length; - X *= scale; - Y *= scale; - Z *= scale; - W *= scale; - } - - #endregion - - #region public void NormalizeFast() - - /// - /// Scales the Vector4d to approximately unit length. - /// - public void NormalizeFast() - { - double scale = MathHelper.InverseSqrtFast(X * X + Y * Y + Z * Z + W * W); - X *= scale; - Y *= scale; - Z *= scale; - W *= scale; - } - - #endregion - - #region public void Scale() - - /// - /// Scales the current Vector4d by the given amounts. - /// - /// The scale of the X component. - /// The scale of the Y component. - /// The scale of the Z component. - /// The scale of the Z component. - [Obsolete("Use static Multiply() method instead.")] - public void Scale(double sx, double sy, double sz, double sw) - { - this.X = X * sx; - this.Y = Y * sy; - this.Z = Z * sz; - this.W = W * sw; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(Vector4d scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - this.W *= scale.W; - } - - /// Scales this instance by the given parameter. - /// The scaling of the individual components. - [CLSCompliant(false)] - [Obsolete("Use static Multiply() method instead.")] - public void Scale(ref Vector4d scale) - { - this.X *= scale.X; - this.Y *= scale.Y; - this.Z *= scale.Z; - this.W *= scale.W; - } - - #endregion public void Scale() - - #endregion - - #region Static - - #region Obsolete - - #region Sub - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static Vector4d Sub(Vector4d a, Vector4d b) - { - a.X -= b.X; - a.Y -= b.Y; - a.Z -= b.Z; - a.W -= b.W; - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - [Obsolete("Use static Subtract() method instead.")] - public static void Sub(ref Vector4d a, ref Vector4d b, out Vector4d result) - { - result.X = a.X - b.X; - result.Y = a.Y - b.Y; - result.Z = a.Z - b.Z; - result.W = a.W - b.W; - } - - #endregion - - #region Mult - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static Vector4d Mult(Vector4d a, double f) - { - a.X *= f; - a.Y *= f; - a.Z *= f; - a.W *= f; - return a; - } - - /// - /// Multiply a vector and a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the multiplication - [Obsolete("Use static Multiply() method instead.")] - public static void Mult(ref Vector4d a, double f, out Vector4d result) - { - result.X = a.X * f; - result.Y = a.Y * f; - result.Z = a.Z * f; - result.W = a.W * f; - } - - #endregion - - #region Div - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static Vector4d Div(Vector4d a, double f) - { - double mult = 1.0 / f; - a.X *= mult; - a.Y *= mult; - a.Z *= mult; - a.W *= mult; - return a; - } - - /// - /// Divide a vector by a scalar - /// - /// Vector operand - /// Scalar operand - /// Result of the division - [Obsolete("Use static Divide() method instead.")] - public static void Div(ref Vector4d a, double f, out Vector4d result) - { - double mult = 1.0 / f; - result.X = a.X * mult; - result.Y = a.Y * mult; - result.Z = a.Z * mult; - result.W = a.W * mult; - } - - #endregion - - #endregion - - #region Add - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static Vector4d Add(Vector4d a, Vector4d b) - { - Add(ref a, ref b, out a); - return a; - } - - /// - /// Adds two vectors. - /// - /// Left operand. - /// Right operand. - /// Result of operation. - public static void Add(ref Vector4d a, ref Vector4d b, out Vector4d result) - { - result = new Vector4d(a.X + b.X, a.Y + b.Y, a.Z + b.Z, a.W + b.W); - } - - #endregion - - #region Subtract - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static Vector4d Subtract(Vector4d a, Vector4d b) - { - Subtract(ref a, ref b, out a); - return a; - } - - /// - /// Subtract one Vector from another - /// - /// First operand - /// Second operand - /// Result of subtraction - public static void Subtract(ref Vector4d a, ref Vector4d b, out Vector4d result) - { - result = new Vector4d(a.X - b.X, a.Y - b.Y, a.Z - b.Z, a.W - b.W); - } - - #endregion - - #region Multiply - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4d Multiply(Vector4d vector, double scale) - { - Multiply(ref vector, scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector4d vector, double scale, out Vector4d result) - { - result = new Vector4d(vector.X * scale, vector.Y * scale, vector.Z * scale, vector.W * scale); - } - - /// - /// Multiplies a vector by the components a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4d Multiply(Vector4d vector, Vector4d scale) - { - Multiply(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Multiplies a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Multiply(ref Vector4d vector, ref Vector4d scale, out Vector4d result) - { - result = new Vector4d(vector.X * scale.X, vector.Y * scale.Y, vector.Z * scale.Z, vector.W * scale.W); - } - - #endregion - - #region Divide - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4d Divide(Vector4d vector, double scale) - { - Divide(ref vector, scale, out vector); - return vector; - } - - /// - /// Divides a vector by a scalar. - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector4d vector, double scale, out Vector4d result) - { - Multiply(ref vector, 1 / scale, out result); - } - - /// - /// Divides a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static Vector4d Divide(Vector4d vector, Vector4d scale) - { - Divide(ref vector, ref scale, out vector); - return vector; - } - - /// - /// Divide a vector by the components of a vector (scale). - /// - /// Left operand. - /// Right operand. - /// Result of the operation. - public static void Divide(ref Vector4d vector, ref Vector4d scale, out Vector4d result) - { - result = new Vector4d(vector.X / scale.X, vector.Y / scale.Y, vector.Z / scale.Z, vector.W / scale.W); - } - - #endregion - - #region Min - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static Vector4d Min(Vector4d a, Vector4d b) - { - a.X = a.X < b.X ? a.X : b.X; - a.Y = a.Y < b.Y ? a.Y : b.Y; - a.Z = a.Z < b.Z ? a.Z : b.Z; - a.W = a.W < b.W ? a.W : b.W; - return a; - } - - /// - /// Calculate the component-wise minimum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise minimum - public static void Min(ref Vector4d a, ref Vector4d b, out Vector4d result) - { - result.X = a.X < b.X ? a.X : b.X; - result.Y = a.Y < b.Y ? a.Y : b.Y; - result.Z = a.Z < b.Z ? a.Z : b.Z; - result.W = a.W < b.W ? a.W : b.W; - } - - #endregion - - #region Max - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static Vector4d Max(Vector4d a, Vector4d b) - { - a.X = a.X > b.X ? a.X : b.X; - a.Y = a.Y > b.Y ? a.Y : b.Y; - a.Z = a.Z > b.Z ? a.Z : b.Z; - a.W = a.W > b.W ? a.W : b.W; - return a; - } - - /// - /// Calculate the component-wise maximum of two vectors - /// - /// First operand - /// Second operand - /// The component-wise maximum - public static void Max(ref Vector4d a, ref Vector4d b, out Vector4d result) - { - result.X = a.X > b.X ? a.X : b.X; - result.Y = a.Y > b.Y ? a.Y : b.Y; - result.Z = a.Z > b.Z ? a.Z : b.Z; - result.W = a.W > b.W ? a.W : b.W; - } - - #endregion - - #region Clamp - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static Vector4d Clamp(Vector4d vec, Vector4d min, Vector4d max) - { - vec.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - vec.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - vec.Z = vec.X < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - vec.W = vec.Y < min.W ? min.W : vec.W > max.W ? max.W : vec.W; - return vec; - } - - /// - /// Clamp a vector to the given minimum and maximum vectors - /// - /// Input vector - /// Minimum vector - /// Maximum vector - /// The clamped vector - public static void Clamp(ref Vector4d vec, ref Vector4d min, ref Vector4d max, out Vector4d result) - { - result.X = vec.X < min.X ? min.X : vec.X > max.X ? max.X : vec.X; - result.Y = vec.Y < min.Y ? min.Y : vec.Y > max.Y ? max.Y : vec.Y; - result.Z = vec.X < min.Z ? min.Z : vec.Z > max.Z ? max.Z : vec.Z; - result.W = vec.Y < min.W ? min.W : vec.W > max.W ? max.W : vec.W; - } - - #endregion - - #region Normalize - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static Vector4d Normalize(Vector4d vec) - { - double scale = 1.0 / vec.Length; - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Scale a vector to unit length - /// - /// The input vector - /// The normalized vector - public static void Normalize(ref Vector4d vec, out Vector4d result) - { - double scale = 1.0 / vec.Length; - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - result.W = vec.W * scale; - } - - #endregion - - #region NormalizeFast - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static Vector4d NormalizeFast(Vector4d vec) - { - double scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z + vec.W * vec.W); - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Scale a vector to approximately unit length - /// - /// The input vector - /// The normalized vector - public static void NormalizeFast(ref Vector4d vec, out Vector4d result) - { - double scale = MathHelper.InverseSqrtFast(vec.X * vec.X + vec.Y * vec.Y + vec.Z * vec.Z + vec.W * vec.W); - result.X = vec.X * scale; - result.Y = vec.Y * scale; - result.Z = vec.Z * scale; - result.W = vec.W * scale; - } - - #endregion - - #region Dot - - /// - /// Calculate the dot product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static double Dot(Vector4d left, Vector4d right) - { - return left.X * right.X + left.Y * right.Y + left.Z * right.Z + left.W * right.W; - } - - /// - /// Calculate the dot product of two vectors - /// - /// First operand - /// Second operand - /// The dot product of the two inputs - public static void Dot(ref Vector4d left, ref Vector4d right, out double result) - { - result = left.X * right.X + left.Y * right.Y + left.Z * right.Z + left.W * right.W; - } - - #endregion - - #region Lerp - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static Vector4d Lerp(Vector4d a, Vector4d b, double blend) - { - a.X = blend * (b.X - a.X) + a.X; - a.Y = blend * (b.Y - a.Y) + a.Y; - a.Z = blend * (b.Z - a.Z) + a.Z; - a.W = blend * (b.W - a.W) + a.W; - return a; - } - - /// - /// Returns a new Vector that is the linear blend of the 2 given Vectors - /// - /// First input vector - /// Second input vector - /// The blend factor. a when blend=0, b when blend=1. - /// a when blend=0, b when blend=1, and a linear combination otherwise - public static void Lerp(ref Vector4d a, ref Vector4d b, double blend, out Vector4d result) - { - result.X = blend * (b.X - a.X) + a.X; - result.Y = blend * (b.Y - a.Y) + a.Y; - result.Z = blend * (b.Z - a.Z) + a.Z; - result.W = blend * (b.W - a.W) + a.W; - } - - #endregion - - #region Barycentric - - /// - /// Interpolate 3 Vectors using Barycentric coordinates - /// - /// First input Vector - /// Second input Vector - /// Third input Vector - /// First Barycentric Coordinate - /// Second Barycentric Coordinate - /// a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static Vector4d BaryCentric(Vector4d a, Vector4d b, Vector4d c, double u, double v) - { - return a + u * (b - a) + v * (c - a); - } - - /// Interpolate 3 Vectors using Barycentric coordinates - /// First input Vector. - /// Second input Vector. - /// Third input Vector. - /// First Barycentric Coordinate. - /// Second Barycentric Coordinate. - /// Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise - public static void BaryCentric(ref Vector4d a, ref Vector4d b, ref Vector4d c, double u, double v, out Vector4d result) - { - result = a; // copy - - Vector4d temp = b; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, u, out temp); - Add(ref result, ref temp, out result); - - temp = c; // copy - Subtract(ref temp, ref a, out temp); - Multiply(ref temp, v, out temp); - Add(ref result, ref temp, out result); - } - - #endregion - - #region Transform - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static Vector4d Transform(Vector4d vec, Matrix4d mat) - { - Vector4d result; - Transform(ref vec, ref mat, out result); - return result; - } - - /// Transform a Vector by the given Matrix - /// The vector to transform - /// The desired transformation - /// The transformed vector - public static void Transform(ref Vector4d vec, ref Matrix4d mat, out Vector4d result) - { - result = new Vector4d( - vec.X * mat.Row0.X + vec.Y * mat.Row1.X + vec.Z * mat.Row2.X + vec.W * mat.Row3.X, - vec.X * mat.Row0.Y + vec.Y * mat.Row1.Y + vec.Z * mat.Row2.Y + vec.W * mat.Row3.Y, - vec.X * mat.Row0.Z + vec.Y * mat.Row1.Z + vec.Z * mat.Row2.Z + vec.W * mat.Row3.Z, - vec.X * mat.Row0.W + vec.Y * mat.Row1.W + vec.Z * mat.Row2.W + vec.W * mat.Row3.W); - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static Vector4d Transform(Vector4d vec, Quaterniond quat) - { - Vector4d result; - Transform(ref vec, ref quat, out result); - return result; - } - - /// - /// Transforms a vector by a quaternion rotation. - /// - /// The vector to transform. - /// The quaternion to rotate the vector by. - /// The result of the operation. - public static void Transform(ref Vector4d vec, ref Quaterniond quat, out Vector4d result) - { - Quaterniond v = new Quaterniond(vec.X, vec.Y, vec.Z, vec.W), i, t; - Quaterniond.Invert(ref quat, out i); - Quaterniond.Multiply(ref quat, ref v, out t); - Quaterniond.Multiply(ref t, ref i, out v); - - result = new Vector4d(v.X, v.Y, v.Z, v.W); - } - - #endregion - - #endregion - - #region Swizzle - - #region 2-component - - /// - /// Gets or sets an OpenTK.Vector2d with the X and Y components of this instance. - /// - [XmlIgnore] - public Vector2d Xy { get { return new Vector2d(X, Y); } set { X = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the X and Z components of this instance. - /// - [XmlIgnore] - public Vector2d Xz { get { return new Vector2d(X, Z); } set { X = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the X and W components of this instance. - /// - [XmlIgnore] - public Vector2d Xw { get { return new Vector2d(X, W); } set { X = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2d Yx { get { return new Vector2d(Y, X); } set { Y = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Y and Z components of this instance. - /// - [XmlIgnore] - public Vector2d Yz { get { return new Vector2d(Y, Z); } set { Y = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Y and W components of this instance. - /// - [XmlIgnore] - public Vector2d Yw { get { return new Vector2d(Y, W); } set { Y = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Z and X components of this instance. - /// - [XmlIgnore] - public Vector2d Zx { get { return new Vector2d(Z, X); } set { Z = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the Z and Y components of this instance. - /// - [XmlIgnore] - public Vector2d Zy { get { return new Vector2d(Z, Y); } set { Z = value.X; Y = value.Y; } } - - /// - /// Gets an OpenTK.Vector2d with the Z and W components of this instance. - /// - [XmlIgnore] - public Vector2d Zw { get { return new Vector2d(Z, W); } set { Z = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the W and X components of this instance. - /// - [XmlIgnore] - public Vector2d Wx { get { return new Vector2d(W, X); } set { W = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the W and Y components of this instance. - /// - [XmlIgnore] - public Vector2d Wy { get { return new Vector2d(W, Y); } set { W = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2d with the W and Z components of this instance. - /// - [XmlIgnore] - public Vector2d Wz { get { return new Vector2d(W, Z); } set { W = value.X; Z = value.Y; } } - - #endregion - - #region 3-component - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Xyz { get { return new Vector3d(X, Y, Z); } set { X = value.X; Y = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Xyw { get { return new Vector3d(X, Y, W); } set { X = value.X; Y = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Xzy { get { return new Vector3d(X, Z, Y); } set { X = value.X; Z = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the X, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector3d Xzw { get { return new Vector3d(X, Z, W); } set { X = value.X; Z = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the X, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Xwy { get { return new Vector3d(X, W, Y); } set { X = value.X; W = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the X, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Xwz { get { return new Vector3d(X, W, Z); } set { X = value.X; W = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Yxz { get { return new Vector3d(Y, X, Z); } set { Y = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, X, and W components of this instance. - /// - [XmlIgnore] - public Vector3d Yxw { get { return new Vector3d(Y, X, W); } set { Y = value.X; X = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Yzx { get { return new Vector3d(Y, Z, X); } set { Y = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector3d Yzw { get { return new Vector3d(Y, Z, W); } set { Y = value.X; Z = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Y, W, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Ywx { get { return new Vector3d(Y, W, X); } set { Y = value.X; W = value.Y; X = value.Z; } } - - /// - /// Gets an OpenTK.Vector3d with the Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Ywz { get { return new Vector3d(Y, W, Z); } set { Y = value.X; W = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Zxy { get { return new Vector3d(Z, X, Y); } set { Z = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, X, and W components of this instance. - /// - [XmlIgnore] - public Vector3d Zxw { get { return new Vector3d(Z, X, W); } set { Z = value.X; X = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Zyx { get { return new Vector3d(Z, Y, X); } set { Z = value.X; Y = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector3d Zyw { get { return new Vector3d(Z, Y, W); } set { Z = value.X; Y = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, W, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Zwx { get { return new Vector3d(Z, W, X); } set { Z = value.X; W = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the Z, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Zwy { get { return new Vector3d(Z, W, Y); } set { Z = value.X; W = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the W, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Wxy { get { return new Vector3d(W, X, Y); } set { W = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the W, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Wxz { get { return new Vector3d(W, X, Z); } set { W = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the W, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Wyx { get { return new Vector3d(W, Y, X); } set { W = value.X; Y = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the W, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3d Wyz { get { return new Vector3d(W, Y, Z); } set { W = value.X; Y = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the W, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3d Wzx { get { return new Vector3d(W, Z, X); } set { W = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3d with the W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3d Wzy { get { return new Vector3d(W, Z, Y); } set { W = value.X; Z = value.Y; Y = value.Z; } } - - #endregion - - #region 4-component - - /// - /// Gets or sets an OpenTK.Vector4d with the X, Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Xywz { get { return new Vector4d(X, Y, W, Z); } set { X = value.X; Y = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the X, Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector4d Xzyw { get { return new Vector4d(X, Z, Y, W); } set { X = value.X; Z = value.Y; Y = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the X, Z, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Xzwy { get { return new Vector4d(X, Z, W, Y); } set { X = value.X; Z = value.Y; W = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the X, W, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Xwyz { get { return new Vector4d(X, W, Y, Z); } set { X = value.X; W = value.Y; Y = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the X, W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Xwzy { get { return new Vector4d(X, W, Z, Y); } set { X = value.X; W = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Y, X, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector4d Yxzw { get { return new Vector4d(Y, X, Z, W); } set { Y = value.X; X = value.Y; Z = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Y, X, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Yxwz { get { return new Vector4d(Y, X, W, Z); } set { Y = value.X; X = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets an OpenTK.Vector4d with the Y, Y, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector4d Yyzw { get { return new Vector4d(Y, Y, Z, W); } set { X = value.X; Y = value.Y; Z = value.Z; W = value.W; } } - - /// - /// Gets an OpenTK.Vector4d with the Y, Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Yywz { get { return new Vector4d(Y, Y, W, Z); } set { X = value.X; Y = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Y, Z, X, and W components of this instance. - /// - [XmlIgnore] - public Vector4d Yzxw { get { return new Vector4d(Y, Z, X, W); } set { Y = value.X; Z = value.Y; X = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Y, Z, W, and X components of this instance. - /// - [XmlIgnore] - public Vector4d Yzwx { get { return new Vector4d(Y, Z, W, X); } set { Y = value.X; Z = value.Y; W = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Y, W, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Ywxz { get { return new Vector4d(Y, W, X, Z); } set { Y = value.X; W = value.Y; X = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Y, W, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector4d Ywzx { get { return new Vector4d(Y, W, Z, X); } set { Y = value.X; W = value.Y; Z = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Z, X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Zxyw { get { return new Vector4d(Z, X, Y, W); } set { Z = value.X; X = value.Y; Y = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Z, X, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Zxwy { get { return new Vector4d(Z, X, W, Y); } set { Z = value.X; X = value.Y; W = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Z, Y, X, and W components of this instance. - /// - [XmlIgnore] - public Vector4d Zyxw { get { return new Vector4d(Z, Y, X, W); } set { Z = value.X; Y = value.Y; X = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Z, Y, W, and X components of this instance. - /// - [XmlIgnore] - public Vector4d Zywx { get { return new Vector4d(Z, Y, W, X); } set { Z = value.X; Y = value.Y; W = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Z, W, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Zwxy { get { return new Vector4d(Z, W, X, Y); } set { Z = value.X; W = value.Y; X = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the Z, W, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector4d Zwyx { get { return new Vector4d(Z, W, Y, X); } set { Z = value.X; W = value.Y; Y = value.Z; X = value.W; } } - - /// - /// Gets an OpenTK.Vector4d with the Z, W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Zwzy { get { return new Vector4d(Z, W, Z, Y); } set { X = value.X; W = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the W, X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Wxyz { get { return new Vector4d(W, X, Y, Z); } set { W = value.X; X = value.Y; Y = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the W, X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Wxzy { get { return new Vector4d(W, X, Z, Y); } set { W = value.X; X = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the W, Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector4d Wyxz { get { return new Vector4d(W, Y, X, Z); } set { W = value.X; Y = value.Y; X = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the W, Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector4d Wyzx { get { return new Vector4d(W, Y, Z, X); } set { W = value.X; Y = value.Y; Z = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the W, Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector4d Wzxy { get { return new Vector4d(W, Z, X, Y); } set { W = value.X; Z = value.Y; X = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4d with the W, Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector4d Wzyx { get { return new Vector4d(W, Z, Y, X); } set { W = value.X; Z = value.Y; Y = value.Z; X = value.W; } } - - /// - /// Gets an OpenTK.Vector4d with the W, Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector4d Wzyw { get { return new Vector4d(W, Z, Y, W); } set { X = value.X; Z = value.Y; Y = value.Z; W = value.W; } } - - #endregion - - #endregion - - #region Operators - - /// - /// Adds two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector4d operator +(Vector4d left, Vector4d right) - { - left.X += right.X; - left.Y += right.Y; - left.Z += right.Z; - left.W += right.W; - return left; - } - - /// - /// Subtracts two instances. - /// - /// The first instance. - /// The second instance. - /// The result of the calculation. - public static Vector4d operator -(Vector4d left, Vector4d right) - { - left.X -= right.X; - left.Y -= right.Y; - left.Z -= right.Z; - left.W -= right.W; - return left; - } - - /// - /// Negates an instance. - /// - /// The instance. - /// The result of the calculation. - public static Vector4d operator -(Vector4d vec) - { - vec.X = -vec.X; - vec.Y = -vec.Y; - vec.Z = -vec.Z; - vec.W = -vec.W; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector4d operator *(Vector4d vec, double scale) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Multiplies an instance by a scalar. - /// - /// The scalar. - /// The instance. - /// The result of the calculation. - public static Vector4d operator *(double scale, Vector4d vec) - { - vec.X *= scale; - vec.Y *= scale; - vec.Z *= scale; - vec.W *= scale; - return vec; - } - - /// - /// Component-wise multiplication between the specified instance by a scale vector. - /// - /// Left operand. - /// Right operand. - /// Result of multiplication. - public static Vector4d operator *(Vector4d vec, Vector4d scale) - { - vec.X *= scale.X; - vec.Y *= scale.Y; - vec.Z *= scale.Z; - vec.W *= scale.W; - return vec; - } - - /// - /// Divides an instance by a scalar. - /// - /// The instance. - /// The scalar. - /// The result of the calculation. - public static Vector4d operator /(Vector4d vec, double scale) - { - double mult = 1 / scale; - vec.X *= mult; - vec.Y *= mult; - vec.Z *= mult; - vec.W *= mult; - return vec; - } - - /// - /// Compares two instances for equality. - /// - /// The first instance. - /// The second instance. - /// True, if left equals right; false otherwise. - public static bool operator ==(Vector4d left, Vector4d right) - { - return left.Equals(right); - } - - /// - /// Compares two instances for inequality. - /// - /// The first instance. - /// The second instance. - /// True, if left does not equa lright; false otherwise. - public static bool operator !=(Vector4d left, Vector4d right) - { - return !left.Equals(right); - } - - /// - /// Returns a pointer to the first element of the specified instance. - /// - /// The instance. - /// A pointer to the first element of v. - [CLSCompliant(false)] - unsafe public static explicit operator double*(Vector4d v) - { - return &v.X; - } - - /// - /// Returns a pointer to the first element of the specified instance. - /// - /// The instance. - /// A pointer to the first element of v. - public static explicit operator IntPtr(Vector4d v) - { - unsafe - { - return (IntPtr)(&v.X); - } - } - - /// Converts OpenTK.Vector4 to OpenTK.Vector4d. - /// The Vector4 to convert. - /// The resulting Vector4d. - public static explicit operator Vector4d(Vector4 v4) - { - return new Vector4d(v4.X, v4.Y, v4.Z, v4.W); - } - - /// Converts OpenTK.Vector4d to OpenTK.Vector4. - /// The Vector4d to convert. - /// The resulting Vector4. - public static explicit operator Vector4(Vector4d v4d) - { - return new Vector4((float)v4d.X, (float)v4d.Y, (float)v4d.Z, (float)v4d.W); - } - - #endregion - - #region Overrides - - #region public override string ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// - /// Returns a System.String that represents the current Vector4d. - /// - /// - public override string ToString() - { - return String.Format("({0}{4} {1}{4} {2}{4} {3})", X, Y, Z, W, listSeparator); - } - - #endregion - - #region public override int GetHashCode() - - /// - /// Returns the hashcode for this instance. - /// - /// A System.Int32 containing the unique hashcode for this instance. - public override int GetHashCode() - { - return X.GetHashCode() ^ Y.GetHashCode() ^ Z.GetHashCode() ^ W.GetHashCode(); - } - - #endregion - - #region public override bool Equals(object obj) - - /// - /// Indicates whether this instance and a specified object are equal. - /// - /// The object to compare to. - /// True if the instances are equal; false otherwise. - public override bool Equals(object obj) - { - if (!(obj is Vector4d)) - return false; - - return this.Equals((Vector4d)obj); - } - - #endregion - - #endregion - - #endregion - - #region IEquatable Members - - /// Indicates whether the current vector is equal to another vector. - /// A vector to compare with this vector. - /// true if the current vector is equal to the vector parameter; otherwise, false. - public bool Equals(Vector4d other) - { - return - X == other.X && - Y == other.Y && - Z == other.Z && - W == other.W; - } - - #endregion - } -} \ No newline at end of file diff --git a/OpenTK/Math/Vector4h.cs b/OpenTK/Math/Vector4h.cs deleted file mode 100644 index d9499f73..00000000 --- a/OpenTK/Math/Vector4h.cs +++ /dev/null @@ -1,819 +0,0 @@ -#region --- License --- -/* -Copyright (c) 2006 - 2008 The Open Toolkit library. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - */ -#endregion - -using System; -using System.IO; -using System.Runtime.InteropServices; -using System.Runtime.Serialization; -using System.Xml.Serialization; - -namespace OpenTK -{ - /// - /// 4-component Vector of the Half type. Occupies 8 Byte total. - /// - [Serializable, StructLayout(LayoutKind.Sequential)] - public struct Vector4h : ISerializable, IEquatable - { - #region Public Fields - - /// The X component of the Half4. - public Half X; - - /// The Y component of the Half4. - public Half Y; - - /// The Z component of the Half4. - public Half Z; - - /// The W component of the Half4. - public Half W; - - #endregion Public Fields - - #region Constructors - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector4h(Half value) - { - X = value; - Y = value; - Z = value; - W = value; - } - - /// - /// Constructs a new instance. - /// - /// The value that will initialize this instance. - public Vector4h(Single value) - { - X = new Half(value); - Y = new Half(value); - Z = new Half(value); - W = new Half(value); - } - - /// - /// The new Half4 instance will avoid conversion and copy directly from the Half parameters. - /// - /// An Half instance of a 16-bit half-precision floating-point number. - /// An Half instance of a 16-bit half-precision floating-point number. - /// An Half instance of a 16-bit half-precision floating-point number. - /// An Half instance of a 16-bit half-precision floating-point number. - public Vector4h(Half x, Half y, Half z, Half w) - { - this.X = x; - this.Y = y; - this.Z = z; - this.W = w; - } - - /// - /// The new Half4 instance will convert the 4 parameters into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - public Vector4h(Single x, Single y, Single z, Single w) - { - X = new Half(x); - Y = new Half(y); - Z = new Half(z); - W = new Half(w); - } - - /// - /// The new Half4 instance will convert the 4 parameters into 16-bit half-precision floating-point. - /// - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// 32-bit single-precision floating-point number. - /// Enable checks that will throw if the conversion result is not meaningful. - public Vector4h(Single x, Single y, Single z, Single w, bool throwOnError) - { - X = new Half(x, throwOnError); - Y = new Half(y, throwOnError); - Z = new Half(z, throwOnError); - W = new Half(w, throwOnError); - } - - /// - /// The new Half4 instance will convert the Vector4 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector4 - [CLSCompliant(false)] - public Vector4h(Vector4 v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - W = new Half(v.W); - } - - /// - /// The new Half4 instance will convert the Vector4 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector4 - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector4h(Vector4 v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - W = new Half(v.W, throwOnError); - } - - /// - /// The new Half4 instance will convert the Vector4 into 16-bit half-precision floating-point. - /// This is the fastest constructor. - /// - /// OpenTK.Vector4 - public Vector4h(ref Vector4 v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - W = new Half(v.W); - } - - /// - /// The new Half4 instance will convert the Vector4 into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector4 - /// Enable checks that will throw if the conversion result is not meaningful. - public Vector4h(ref Vector4 v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - W = new Half(v.W, throwOnError); - } - - /// - /// The new Half4 instance will convert the Vector4d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector4d - [CLSCompliant(false)] - public Vector4h(Vector4d v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - W = new Half(v.W); - } - - /// - /// The new Half4 instance will convert the Vector4d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector4d - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector4h(Vector4d v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - W = new Half(v.W, throwOnError); - } - - /// - /// The new Half4 instance will convert the Vector4d into 16-bit half-precision floating-point. - /// This is the faster constructor. - /// - /// OpenTK.Vector4d - [CLSCompliant(false)] - public Vector4h(ref Vector4d v) - { - X = new Half(v.X); - Y = new Half(v.Y); - Z = new Half(v.Z); - W = new Half(v.W); - } - - /// - /// The new Half4 instance will convert the Vector4d into 16-bit half-precision floating-point. - /// - /// OpenTK.Vector4d - /// Enable checks that will throw if the conversion result is not meaningful. - [CLSCompliant(false)] - public Vector4h(ref Vector4d v, bool throwOnError) - { - X = new Half(v.X, throwOnError); - Y = new Half(v.Y, throwOnError); - Z = new Half(v.Z, throwOnError); - W = new Half(v.W, throwOnError); - } - - #endregion Constructors - - #region Swizzle - - #region 2-component - - /// - /// Gets or sets an OpenTK.Vector2h with the X and Y components of this instance. - /// - [XmlIgnore] - public Vector2h Xy { get { return new Vector2h(X, Y); } set { X = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the X and Z components of this instance. - /// - [XmlIgnore] - public Vector2h Xz { get { return new Vector2h(X, Z); } set { X = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the X and W components of this instance. - /// - [XmlIgnore] - public Vector2h Xw { get { return new Vector2h(X, W); } set { X = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Y and X components of this instance. - /// - [XmlIgnore] - public Vector2h Yx { get { return new Vector2h(Y, X); } set { Y = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Y and Z components of this instance. - /// - [XmlIgnore] - public Vector2h Yz { get { return new Vector2h(Y, Z); } set { Y = value.X; Z = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Y and W components of this instance. - /// - [XmlIgnore] - public Vector2h Yw { get { return new Vector2h(Y, W); } set { Y = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Z and X components of this instance. - /// - [XmlIgnore] - public Vector2h Zx { get { return new Vector2h(Z, X); } set { Z = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the Z and Y components of this instance. - /// - [XmlIgnore] - public Vector2h Zy { get { return new Vector2h(Z, Y); } set { Z = value.X; Y = value.Y; } } - - /// - /// Gets an OpenTK.Vector2h with the Z and W components of this instance. - /// - [XmlIgnore] - public Vector2h Zw { get { return new Vector2h(Z, W); } set { Z = value.X; W = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the W and X components of this instance. - /// - [XmlIgnore] - public Vector2h Wx { get { return new Vector2h(W, X); } set { W = value.X; X = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the W and Y components of this instance. - /// - [XmlIgnore] - public Vector2h Wy { get { return new Vector2h(W, Y); } set { W = value.X; Y = value.Y; } } - - /// - /// Gets or sets an OpenTK.Vector2h with the W and Z components of this instance. - /// - [XmlIgnore] - public Vector2h Wz { get { return new Vector2h(W, Z); } set { W = value.X; Z = value.Y; } } - - #endregion - - #region 3-component - - /// - /// Gets or sets an OpenTK.Vector3h with the X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Xyz { get { return new Vector3h(X, Y, Z); } set { X = value.X; Y = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Xyw { get { return new Vector3h(X, Y, W); } set { X = value.X; Y = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Xzy { get { return new Vector3h(X, Z, Y); } set { X = value.X; Z = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the X, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector3h Xzw { get { return new Vector3h(X, Z, W); } set { X = value.X; Z = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the X, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Xwy { get { return new Vector3h(X, W, Y); } set { X = value.X; W = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the X, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Xwz { get { return new Vector3h(X, W, Z); } set { X = value.X; W = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Yxz { get { return new Vector3h(Y, X, Z); } set { Y = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, X, and W components of this instance. - /// - [XmlIgnore] - public Vector3h Yxw { get { return new Vector3h(Y, X, W); } set { Y = value.X; X = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Yzx { get { return new Vector3h(Y, Z, X); } set { Y = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector3h Yzw { get { return new Vector3h(Y, Z, W); } set { Y = value.X; Z = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Y, W, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Ywx { get { return new Vector3h(Y, W, X); } set { Y = value.X; W = value.Y; X = value.Z; } } - - /// - /// Gets an OpenTK.Vector3h with the Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Ywz { get { return new Vector3h(Y, W, Z); } set { Y = value.X; W = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Zxy { get { return new Vector3h(Z, X, Y); } set { Z = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, X, and W components of this instance. - /// - [XmlIgnore] - public Vector3h Zxw { get { return new Vector3h(Z, X, W); } set { Z = value.X; X = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Zyx { get { return new Vector3h(Z, Y, X); } set { Z = value.X; Y = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector3h Zyw { get { return new Vector3h(Z, Y, W); } set { Z = value.X; Y = value.Y; W = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, W, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Zwx { get { return new Vector3h(Z, W, X); } set { Z = value.X; W = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the Z, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Zwy { get { return new Vector3h(Z, W, Y); } set { Z = value.X; W = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the W, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Wxy { get { return new Vector3h(W, X, Y); } set { W = value.X; X = value.Y; Y = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the W, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Wxz { get { return new Vector3h(W, X, Z); } set { W = value.X; X = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the W, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Wyx { get { return new Vector3h(W, Y, X); } set { W = value.X; Y = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the W, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector3h Wyz { get { return new Vector3h(W, Y, Z); } set { W = value.X; Y = value.Y; Z = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the W, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector3h Wzx { get { return new Vector3h(W, Z, X); } set { W = value.X; Z = value.Y; X = value.Z; } } - - /// - /// Gets or sets an OpenTK.Vector3h with the W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector3h Wzy { get { return new Vector3h(W, Z, Y); } set { W = value.X; Z = value.Y; Y = value.Z; } } - - #endregion - - #region 4-component - - /// - /// Gets or sets an OpenTK.Vector4h with the X, Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Xywz { get { return new Vector4h(X, Y, W, Z); } set { X = value.X; Y = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the X, Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector4h Xzyw { get { return new Vector4h(X, Z, Y, W); } set { X = value.X; Z = value.Y; Y = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the X, Z, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Xzwy { get { return new Vector4h(X, Z, W, Y); } set { X = value.X; Z = value.Y; W = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the X, W, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Xwyz { get { return new Vector4h(X, W, Y, Z); } set { X = value.X; W = value.Y; Y = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the X, W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Xwzy { get { return new Vector4h(X, W, Z, Y); } set { X = value.X; W = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Y, X, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector4h Yxzw { get { return new Vector4h(Y, X, Z, W); } set { Y = value.X; X = value.Y; Z = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Y, X, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Yxwz { get { return new Vector4h(Y, X, W, Z); } set { Y = value.X; X = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets an OpenTK.Vector4h with the Y, Y, Z, and W components of this instance. - /// - [XmlIgnore] - public Vector4h Yyzw { get { return new Vector4h(Y, Y, Z, W); } set { X = value.X; Y = value.Y; Z = value.Z; W = value.W; } } - - /// - /// Gets an OpenTK.Vector4h with the Y, Y, W, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Yywz { get { return new Vector4h(Y, Y, W, Z); } set { X = value.X; Y = value.Y; W = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Y, Z, X, and W components of this instance. - /// - [XmlIgnore] - public Vector4h Yzxw { get { return new Vector4h(Y, Z, X, W); } set { Y = value.X; Z = value.Y; X = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Y, Z, W, and X components of this instance. - /// - [XmlIgnore] - public Vector4h Yzwx { get { return new Vector4h(Y, Z, W, X); } set { Y = value.X; Z = value.Y; W = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Y, W, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Ywxz { get { return new Vector4h(Y, W, X, Z); } set { Y = value.X; W = value.Y; X = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Y, W, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector4h Ywzx { get { return new Vector4h(Y, W, Z, X); } set { Y = value.X; W = value.Y; Z = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Z, X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Zxyw { get { return new Vector4h(Z, X, Y, W); } set { Z = value.X; X = value.Y; Y = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Z, X, W, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Zxwy { get { return new Vector4h(Z, X, W, Y); } set { Z = value.X; X = value.Y; W = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Z, Y, X, and W components of this instance. - /// - [XmlIgnore] - public Vector4h Zyxw { get { return new Vector4h(Z, Y, X, W); } set { Z = value.X; Y = value.Y; X = value.Z; W = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Z, Y, W, and X components of this instance. - /// - [XmlIgnore] - public Vector4h Zywx { get { return new Vector4h(Z, Y, W, X); } set { Z = value.X; Y = value.Y; W = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Z, W, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Zwxy { get { return new Vector4h(Z, W, X, Y); } set { Z = value.X; W = value.Y; X = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the Z, W, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector4h Zwyx { get { return new Vector4h(Z, W, Y, X); } set { Z = value.X; W = value.Y; Y = value.Z; X = value.W; } } - - /// - /// Gets an OpenTK.Vector4h with the Z, W, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Zwzy { get { return new Vector4h(Z, W, Z, Y); } set { X = value.X; W = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the W, X, Y, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Wxyz { get { return new Vector4h(W, X, Y, Z); } set { W = value.X; X = value.Y; Y = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the W, X, Z, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Wxzy { get { return new Vector4h(W, X, Z, Y); } set { W = value.X; X = value.Y; Z = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the W, Y, X, and Z components of this instance. - /// - [XmlIgnore] - public Vector4h Wyxz { get { return new Vector4h(W, Y, X, Z); } set { W = value.X; Y = value.Y; X = value.Z; Z = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the W, Y, Z, and X components of this instance. - /// - [XmlIgnore] - public Vector4h Wyzx { get { return new Vector4h(W, Y, Z, X); } set { W = value.X; Y = value.Y; Z = value.Z; X = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the W, Z, X, and Y components of this instance. - /// - [XmlIgnore] - public Vector4h Wzxy { get { return new Vector4h(W, Z, X, Y); } set { W = value.X; Z = value.Y; X = value.Z; Y = value.W; } } - - /// - /// Gets or sets an OpenTK.Vector4h with the W, Z, Y, and X components of this instance. - /// - [XmlIgnore] - public Vector4h Wzyx { get { return new Vector4h(W, Z, Y, X); } set { W = value.X; Z = value.Y; Y = value.Z; X = value.W; } } - - /// - /// Gets an OpenTK.Vector4h with the W, Z, Y, and W components of this instance. - /// - [XmlIgnore] - public Vector4h Wzyw { get { return new Vector4h(W, Z, Y, W); } set { X = value.X; Z = value.Y; Y = value.Z; W = value.W; } } - - #endregion - - #endregion - - #region Half -> Single - - /// - /// Returns this Half4 instance's contents as Vector4. - /// - /// OpenTK.Vector4 - public Vector4 ToVector4() - { - return new Vector4(X, Y, Z, W); - } - - /// - /// Returns this Half4 instance's contents as Vector4d. - /// - public Vector4d ToVector4d() - { - return new Vector4d(X, Y, Z, W); - } - - #endregion Half -> Single - - #region Conversions - - /// Converts OpenTK.Vector4 to OpenTK.Half4. - /// The Vector4 to convert. - /// The resulting Half vector. - public static explicit operator Vector4h(Vector4 v4f) - { - return new Vector4h(v4f); - } - - /// Converts OpenTK.Vector4d to OpenTK.Half4. - /// The Vector4d to convert. - /// The resulting Half vector. - public static explicit operator Vector4h(Vector4d v4d) - { - return new Vector4h(v4d); - } - - /// Converts OpenTK.Half4 to OpenTK.Vector4. - /// The Half4 to convert. - /// The resulting Vector4. - public static explicit operator Vector4(Vector4h h4) - { - Vector4 result = new Vector4(); - result.X = h4.X.ToSingle(); - result.Y = h4.Y.ToSingle(); - result.Z = h4.Z.ToSingle(); - result.W = h4.W.ToSingle(); - return result; - } - - /// Converts OpenTK.Half4 to OpenTK.Vector4d. - /// The Half4 to convert. - /// The resulting Vector4d. - public static explicit operator Vector4d(Vector4h h4) - { - Vector4d result = new Vector4d(); - result.X = h4.X.ToSingle(); - result.Y = h4.Y.ToSingle(); - result.Z = h4.Z.ToSingle(); - result.W = h4.W.ToSingle(); - return result; - } - - #endregion Conversions - - #region Constants - - /// The size in bytes for an instance of the Half4 struct is 8. - public static readonly int SizeInBytes = 8; - - #endregion Constants - - #region ISerializable - - /// Constructor used by ISerializable to deserialize the object. - /// - /// - public Vector4h(SerializationInfo info, StreamingContext context) - { - this.X = (Half)info.GetValue("X", typeof(Half)); - this.Y = (Half)info.GetValue("Y", typeof(Half)); - this.Z = (Half)info.GetValue("Z", typeof(Half)); - this.W = (Half)info.GetValue("W", typeof(Half)); - } - - /// Used by ISerialize to serialize the object. - /// - /// - public void GetObjectData(SerializationInfo info, StreamingContext context) - { - info.AddValue("X", this.X); - info.AddValue("Y", this.Y); - info.AddValue("Z", this.Z); - info.AddValue("W", this.W); - } - - #endregion ISerializable - - #region Binary dump - - /// Updates the X,Y,Z and W components of this instance by reading from a Stream. - /// A BinaryReader instance associated with an open Stream. - public void FromBinaryStream(BinaryReader bin) - { - X.FromBinaryStream(bin); - Y.FromBinaryStream(bin); - Z.FromBinaryStream(bin); - W.FromBinaryStream(bin); - } - - /// Writes the X,Y,Z and W components of this instance into a Stream. - /// A BinaryWriter instance associated with an open Stream. - public void ToBinaryStream(BinaryWriter bin) - { - X.ToBinaryStream(bin); - Y.ToBinaryStream(bin); - Z.ToBinaryStream(bin); - W.ToBinaryStream(bin); - } - - #endregion Binary dump - - #region IEquatable Members - - /// Returns a value indicating whether this instance is equal to a specified OpenTK.Half4 vector. - /// OpenTK.Half4 to compare to this instance.. - /// True, if other is equal to this instance; false otherwise. - public bool Equals(Vector4h other) - { - return (this.X.Equals(other.X) && this.Y.Equals(other.Y) && this.Z.Equals(other.Z) && this.W.Equals(other.W)); - } - - #endregion - - #region ToString() - - private static string listSeparator = System.Globalization.CultureInfo.CurrentCulture.TextInfo.ListSeparator; - /// Returns a string that contains this Half4's numbers in human-legible form. - public override string ToString() - { - return String.Format("({0}{4} {1}{4} {2}{4} {3})", X.ToString(), Y.ToString(), Z.ToString(), W.ToString(), listSeparator); - } - - #endregion ToString() - - #region BitConverter - - /// Returns the Half4 as an array of bytes. - /// The Half4 to convert. - /// The input as byte array. - public static byte[] GetBytes(Vector4h h) - { - byte[] result = new byte[SizeInBytes]; - - byte[] temp = Half.GetBytes(h.X); - result[0] = temp[0]; - result[1] = temp[1]; - temp = Half.GetBytes(h.Y); - result[2] = temp[0]; - result[3] = temp[1]; - temp = Half.GetBytes(h.Z); - result[4] = temp[0]; - result[5] = temp[1]; - temp = Half.GetBytes(h.W); - result[6] = temp[0]; - result[7] = temp[1]; - - return result; - } - - /// Converts an array of bytes into Half4. - /// A Half4 in it's byte[] representation. - /// The starting position within value. - /// A new Half4 instance. - public static Vector4h FromBytes(byte[] value, int startIndex) - { - Vector4h h4 = new Vector4h(); - h4.X = Half.FromBytes(value, startIndex); - h4.Y = Half.FromBytes(value, startIndex + 2); - h4.Z = Half.FromBytes(value, startIndex + 4); - h4.W = Half.FromBytes(value, startIndex + 6); - return h4; - } - - #endregion BitConverter - } -} diff --git a/OpenTK/OpenTK.csproj b/OpenTK/OpenTK.csproj deleted file mode 100644 index 3334c66a..00000000 --- a/OpenTK/OpenTK.csproj +++ /dev/null @@ -1,107 +0,0 @@ - - - - - Debug - AnyCPU - {ABB9DB44-14F2-46E0-A4B8-B46C300CA982} - Library - Properties - OpenTK - OpenTK - v4.7.2 - 512 - - - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - true - false - x64 - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - true - x64 - false - - - bin\Editor Debug\ - TRACE - true - true - pdbonly - x86 - prompt - MinimumRecommendedRules.ruleset - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/OpenTK/Properties/AssemblyInfo.cs b/OpenTK/Properties/AssemblyInfo.cs deleted file mode 100644 index a9b6bba5..00000000 --- a/OpenTK/Properties/AssemblyInfo.cs +++ /dev/null @@ -1,36 +0,0 @@ -using System.Reflection; -using System.Runtime.CompilerServices; -using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("OpenTK")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("OpenTK")] -[assembly: AssemblyCopyright("Copyright © 2014")] -[assembly: AssemblyTrademark("")] -[assembly: AssemblyCulture("")] - -// Setting ComVisible to false makes the types in this assembly not visible -// to COM components. If you need to access a type in this assembly from -// COM, set the ComVisible attribute to true on that type. -[assembly: ComVisible(false)] - -// The following GUID is for the ID of the typelib if this project is exposed to COM -[assembly: Guid("dddb367c-9590-4d8a-8bb1-a32446164876")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/RconClient/Properties/AssemblyInfo.cs b/RconClient/Properties/AssemblyInfo.cs index 3031906d..1aa987b1 100644 --- a/RconClient/Properties/AssemblyInfo.cs +++ b/RconClient/Properties/AssemblyInfo.cs @@ -1,16 +1,6 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("RCON Client")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("Copyright © Norbyte 2012-2018")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -21,16 +11,3 @@ // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("ef82c289-53d6-41c8-b5c3-72b37655c7f3")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/RconClient/RconClient.csproj b/RconClient/RconClient.csproj index df482ba7..cc0917a7 100644 --- a/RconClient/RconClient.csproj +++ b/RconClient/RconClient.csproj @@ -1,65 +1,17 @@ - - - + - Debug - AnyCPU - {EF82C289-53D6-41C8-B5C3-72B37655C7F3} + net8.0 Exe - Properties LSLib.Rcon - RconClient - v4.7.2 - 512 - - - - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - x64 - - - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false + false x64 + RCON Client + LSLib + Copyright © Norbyte 2012-2018 + 1.0.0.0 + 1.0.0.0 - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/StatParser/Properties/AssemblyInfo.cs b/StatParser/Properties/AssemblyInfo.cs index 3e731079..5b733744 100644 --- a/StatParser/Properties/AssemblyInfo.cs +++ b/StatParser/Properties/AssemblyInfo.cs @@ -1,16 +1,6 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("StatParser")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("StatParser")] -[assembly: AssemblyCopyright("Copyright © 2019")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -21,16 +11,3 @@ // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("94d900d1-ec77-4170-8942-56e3736e44de")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/StatParser/StatParser.csproj b/StatParser/StatParser.csproj index 8e6833af..f0bb54f1 100644 --- a/StatParser/StatParser.csproj +++ b/StatParser/StatParser.csproj @@ -1,67 +1,19 @@ - - - + - Debug - AnyCPU - {94D900D1-EC77-4170-8942-56E3736E44DE} + net8.0 Exe - StatParser - StatParser - v4.7.2 - 512 - true - true - - - x64 - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - - + false x64 - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false + StatParser + StatParser + Copyright © 2019 + 1.0.0.0 + 1.0.0.0 - - ..\packages\CommandLineArgumentsParser.3.0.20\lib\net452\CommandLineArgumentsParser.dll - - - - - - - - - - - - - - - - - - - + - - {46372C50-4288-4B8E-AF21-C934560600E0} - LSLib - + - \ No newline at end of file diff --git a/StatParser/packages.config b/StatParser/packages.config deleted file mode 100644 index e9cd0fb2..00000000 --- a/StatParser/packages.config +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/StoryCompiler/DebugInfoSaver.cs b/StoryCompiler/DebugInfoSaver.cs index 39ad1a50..1ece6b7c 100644 --- a/StoryCompiler/DebugInfoSaver.cs +++ b/StoryCompiler/DebugInfoSaver.cs @@ -192,7 +192,7 @@ public void Save(Stream stream, StoryDebugInfo debugInfo) codedStream.Flush(); byte[] proto = ms.ToArray(); - byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.CompressionLevel.FastCompression); + byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); byte[] compressed = BinUtils.Compress(proto, flags); stream.Write(compressed, 0, compressed.Length); diff --git a/StoryCompiler/Properties/AssemblyInfo.cs b/StoryCompiler/Properties/AssemblyInfo.cs index 1a14f3f6..6f2ae9c8 100644 --- a/StoryCompiler/Properties/AssemblyInfo.cs +++ b/StoryCompiler/Properties/AssemblyInfo.cs @@ -1,16 +1,6 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("Osiris Story Compiler")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("Copyright © Norbyte 2012-2018")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -21,16 +11,3 @@ // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("32f08b9a-f50b-4c2e-ab56-533fed066dde")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/StoryCompiler/StoryCompiler.csproj b/StoryCompiler/StoryCompiler.csproj index e525bcc6..6cedeb7d 100644 --- a/StoryCompiler/StoryCompiler.csproj +++ b/StoryCompiler/StoryCompiler.csproj @@ -1,17 +1,8 @@ - - - + - Debug - AnyCPU - {32F08B9A-F50B-4C2E-AB56-533FED066DDE} + net8.0 Exe LSTools.StoryCompiler - StoryCompiler - v4.7.2 - 512 - true - false publish\ true @@ -27,70 +18,14 @@ 1.0.0.%2a false true - - - x64 - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - - + false x64 - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false - - ..\packages\CommandLineArgumentsParser.3.0.19\lib\net452\CommandLineArgumentsParser.dll - - - ..\packages\Google.Protobuf.3.6.1\lib\net45\Google.Protobuf.dll - - - ..\packages\Newtonsoft.Json.13.0.1\lib\net45\Newtonsoft.Json.dll - - - - - - - - - - - - - + - - - - - - - - - - - - - - - - {46372c50-4288-4b8e-af21-c934560600e0} - LSLib - + @@ -104,8 +39,23 @@ false - + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + - $(SolutionDir)\External\protoc\bin\protoc.exe --proto_path=$(ProjectDir) --csharp_out=$(ProjectDir) debuginfo.proto + Osiris Story Compiler + LSLib + Copyright © Norbyte 2012-2018 + 1.0.0.0 + 1.0.0.0 \ No newline at end of file diff --git a/StoryCompiler/packages.config b/StoryCompiler/packages.config deleted file mode 100644 index 623d717c..00000000 --- a/StoryCompiler/packages.config +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/StoryDecompiler/Properties/AssemblyInfo.cs b/StoryDecompiler/Properties/AssemblyInfo.cs index 44a53ac6..a8317ca3 100644 --- a/StoryDecompiler/Properties/AssemblyInfo.cs +++ b/StoryDecompiler/Properties/AssemblyInfo.cs @@ -1,16 +1,6 @@ using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; - -// General Information about an assembly is controlled through the following -// set of attributes. Change these attribute values to modify the information -// associated with an assembly. -[assembly: AssemblyTitle("Osiris Story Decompiler")] -[assembly: AssemblyDescription("")] -[assembly: AssemblyConfiguration("")] -[assembly: AssemblyCompany("")] -[assembly: AssemblyProduct("LSLib")] -[assembly: AssemblyCopyright("Copyright © Norbyte 2012-2018")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] @@ -21,16 +11,3 @@ // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("e4b4f95e-f027-44d7-ab93-b96ef2e661b6")] - -// Version information for an assembly consists of the following four values: -// -// Major Version -// Minor Version -// Build Number -// Revision -// -// You can specify all the values or you can default the Build and Revision Numbers -// by using the '*' as shown below: -// [assembly: AssemblyVersion("1.0.*")] -[assembly: AssemblyVersion("1.0.0.0")] -[assembly: AssemblyFileVersion("1.0.0.0")] diff --git a/StoryDecompiler/StoryDecompiler.csproj b/StoryDecompiler/StoryDecompiler.csproj index 266960fa..eb7b59c6 100644 --- a/StoryDecompiler/StoryDecompiler.csproj +++ b/StoryDecompiler/StoryDecompiler.csproj @@ -1,70 +1,19 @@ - - - + - Debug - AnyCPU - {E4B4F95E-F027-44D7-AB93-B96EF2E661B6} + net8.0 Exe - StoryDecompiler - StoryDecompiler - v4.7.2 - 512 - true - - - - x64 - true - full - false - bin\Debug\ - DEBUG;TRACE - prompt - 4 - false - - + false x64 - pdbonly - true - bin\Release\ - TRACE - prompt - 4 - false + Osiris Story Decompiler + LSLib + Copyright © Norbyte 2012-2018 + 1.0.0.0 + 1.0.0.0 - - ..\packages\CommandLineArgumentsParser.3.0.19\lib\net452\CommandLineArgumentsParser.dll - - - - - - - - - - - - - - - - - - - - - - + - - {46372c50-4288-4b8e-af21-c934560600e0} - LSLib - + - \ No newline at end of file diff --git a/StoryDecompiler/packages.config b/StoryDecompiler/packages.config deleted file mode 100644 index 312644b4..00000000 --- a/StoryDecompiler/packages.config +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/TerrainFixup/Program.cs b/TerrainFixup/Program.cs deleted file mode 100644 index 9cbab7c6..00000000 --- a/TerrainFixup/Program.cs +++ /dev/null @@ -1,307 +0,0 @@ -using LSLib.LS; -using OpenTK; -using System; -using System.Collections.Generic; -using System.IO; -using System.Runtime.InteropServices; - -namespace TerrainFixup -{ - [StructLayout(LayoutKind.Sequential)] - public struct TerrainPatchHeader - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 8)] - public byte[] Signature; // PVersion - - public UInt32 Version; - } - - public class TerrainPatchLayer - { - public int Index; - public int[] Data; - public byte[] Data2; - } - - public class TerrainPatch - { - public string Path; - public TerrainPatchHeader Header; - public int X; - public int Y; - public int Width; - public int Height; - public float[] Heightmap; - public Vector2d[] Vertices; - public int[] Indices; - public int PatchIndex; - public TerrainPatchLayer[] Layers; - } - - public class Terrain - { - public string Directory; - public string GUID; - public int Width; - public int Height; - public int CellsX; - public int CellsY; - public int PatchesX; - public int PatchesY; - public int NumPatches; - public TerrainPatch[] Patches; - } - - class Program - { - static void Main(string[] args) - { - if (args.Length < 2 || args.Length > 3) - { - Console.WriteLine("Usage: TerrainFixup []"); - return; - } - - Console.WriteLine($"Loading terrains from {args[1]} ..."); - var terrains = new Dictionary(); - LoadTerrainsFromPath(args[0], args[1], terrains); - - var referenceTerrains = new Dictionary(); - if (args.Length > 2) - { - Console.WriteLine($"Loading reference terrains from {args[2]} ..."); - LoadTerrainsFromPath(args[0], args[2], referenceTerrains); - } - - Console.WriteLine($"Updating terrain meshes ..."); - foreach (var terrain in terrains) - { - Terrain refTerrain = null; - referenceTerrains.TryGetValue(terrain.Key, out refTerrain); - PatchTerrain(terrain.Value, refTerrain); - } - - Console.WriteLine($"Updating patches ..."); - foreach (var terrain in terrains) - { - SaveTerrainPatches(terrain.Value); - } - } - - private static void PatchTerrain(Terrain terrain, Terrain refTerrain) - { - if (refTerrain != null) - { - if (refTerrain.CellsX == terrain.CellsX && refTerrain.CellsY == terrain.CellsY) - { - Console.WriteLine($"Patching reference data to terrain {terrain.GUID}."); - for (var i = 0; i < terrain.Patches.Length; i++) - { - Console.WriteLine($"Patch {i}: {refTerrain.Patches[i].Indices.Length} inds, {refTerrain.Patches[i].Vertices.Length} verts"); - terrain.Patches[i].Indices = refTerrain.Patches[i].Indices; - terrain.Patches[i].Vertices = refTerrain.Patches[i].Vertices; - } - - return; - } - else - { - Console.WriteLine($"Terrain {terrain.GUID} patch size differs; couldnt apply ref mesh."); - } - } - - Console.WriteLine($"Terrain {terrain.GUID} has no reference data, clearing vertex buffers."); - foreach (var patch in terrain.Patches) - { - patch.Indices = new int[0]; - patch.Vertices = new Vector2d[0]; - } - } - - private static void LoadTerrainsFromPath(string path, string patchDir, Dictionary terrains) - { - foreach (var lsfPath in Directory.GetFiles(path, "*.lsf")) - { - LoadTerrainsFromLSF(lsfPath, patchDir, terrains); - } - } - - private static void LoadTerrainsFromLSF(string path, string patchDir, Dictionary terrains) - { - var loadParams = ResourceLoadParameters.FromGameVersion(LSLib.LS.Enums.Game.DivinityOriginalSin2DE); - var terrainRes = ResourceUtils.LoadResource(path, loadParams); - var tmpls = terrainRes.Regions["Templates"]; - if (tmpls.Children.TryGetValue("GameObjects", out List terrainTemplates)) - { - foreach (var tmpl in terrainTemplates) - { - var terrain = LoadTerrainFromNode(patchDir, tmpl); - terrain.Directory = patchDir; - terrains.Add(terrain.GUID, terrain); - } - } - } - - private static Terrain LoadTerrainFromNode(string dir, Node node) - { - var terrain = new Terrain(); - terrain.GUID = (string)node.Attributes["MapKey"].Value; - terrain.Width = (int)node.Children["Visual"][0].Attributes["Width"].Value; - terrain.Height = (int)node.Children["Visual"][0].Attributes["Height"].Value; - terrain.CellsX = (int)Math.Ceiling(terrain.Width * 0.5f) + 1; - terrain.CellsY = (int)Math.Ceiling(terrain.Height * 0.5f) + 1; - terrain.PatchesX = (int)Math.Ceiling(terrain.Width * 0.015625f); - terrain.PatchesY = (int)Math.Ceiling(terrain.Height * 0.015625f); - terrain.NumPatches = terrain.PatchesX * terrain.PatchesY; - terrain.Patches = new TerrainPatch[terrain.NumPatches]; - - int offsetY = 0; - for (int y = 0; y < terrain.PatchesY; y++) - { - int sizeY = Math.Min(terrain.CellsY - offsetY - 1, 32) + 1; - int offsetX = 0; - for (int x = 0; x < terrain.PatchesX; x++) - { - int sizeX = Math.Min(terrain.CellsX - offsetX - 1, 32) + 1; - - var patchPath = Path.Join(dir, $"{terrain.GUID}_{x}_{y}.patch"); - var patch = new TerrainPatch(); - patch.X = x; - patch.Y = y; - patch.Width = sizeX; - patch.Height = sizeY; - LoadPatch(patch, patchPath); - terrain.Patches[x + y * terrain.PatchesX] = patch; - offsetX += 32; - } - offsetY += 32; - } - - return terrain; - } - - private static void SaveTerrainPatches(Terrain terrain) - { - foreach (var patch in terrain.Patches) - { - SavePatch(patch, patch.Path); - } - } - - private static void LoadPatch(TerrainPatch patch, string sourcePath) - { - using (var fs = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) - using (var reader = new BinaryReader(fs)) - { - patch.Path = sourcePath; - patch.Header = BinUtils.ReadStruct(reader); - if (patch.Header.Version != 4) - throw new InvalidFormatException(String.Format("Can only read version 4 terrain patch files; this file is v{0}", patch.Header.Version)); - - patch.Heightmap = new float[patch.Width * patch.Height]; - for (var i = 0; i < patch.Width * patch.Height; i++) - { - patch.Heightmap[i] = reader.ReadSingle(); - } - - int numVertices = reader.ReadInt32(); - patch.Vertices = new Vector2d[numVertices]; - for (var i = 0; i < numVertices; i++) - { - int x = reader.ReadInt32(); - int y = reader.ReadInt32(); - patch.Vertices[i] = new Vector2d(x, y); - } - - int numIndices = reader.ReadInt32(); - patch.Indices = new int[numIndices/4]; - for (var i = 0; i < numIndices / 4; i++) - { - patch.Indices[i] = reader.ReadInt32(); - } - - patch.PatchIndex = reader.ReadInt32(); - int numLayers = reader.ReadInt32(); - - patch.Layers = new TerrainPatchLayer[numLayers]; - for (var i = 0; i < numLayers; i++) - { - var layer = new TerrainPatchLayer(); - layer.Index = reader.ReadInt32(); - if (layer.Index != -1) - { - int layerBytes = reader.ReadInt32(); - layer.Data = new int[layerBytes/4]; - for (int j = 0; j < layerBytes/4; j++) - { - layer.Data[j] = reader.ReadInt32(); - } - int layerBytes2 = reader.ReadInt32(); - layer.Data2 = new byte[layerBytes2]; - for (int j = 0; j < layerBytes2; j++) - { - layer.Data2[j] = reader.ReadByte(); - } - } - - patch.Layers[i] = layer; - } - - if (fs.Position != fs.Length) - { - throw new InvalidDataException("Did not reach EOF?"); - } - } - } - - private static void SavePatch(TerrainPatch patch, string sourcePath) - { - using (var fs = new FileStream(sourcePath, FileMode.Create, FileAccess.Write, FileShare.ReadWrite)) - using (var writer = new BinaryWriter(fs)) - { - BinUtils.WriteStruct(writer, ref patch.Header); - - for (var i = 0; i < patch.Width * patch.Height; i++) - { - writer.Write(patch.Heightmap[i]); - } - - writer.Write((Int32)patch.Vertices.Length); - for (var i = 0; i < patch.Vertices.Length; i++) - { - writer.Write(patch.Vertices[i].X); - writer.Write(patch.Vertices[i].Y); - } - - writer.Write((Int32)patch.Indices.Length * 4); - for (var i = 0; i < patch.Indices.Length; i++) - { - writer.Write(patch.Indices[i]); - } - - writer.Write(patch.PatchIndex); - writer.Write((Int32)patch.Layers.Length); - - for (var i = 0; i < patch.Layers.Length; i++) - { - var layer = patch.Layers[i]; - writer.Write(layer.Index); - if (layer.Index != -1) - { - writer.Write((Int32)layer.Data.Length*4); - for (int j = 0; j < layer.Data.Length; j++) - { - writer.Write(layer.Data[j]); - } - - writer.Write((Int32)layer.Data2.Length); - for (int j = 0; j < layer.Data2.Length; j++) - { - writer.Write(layer.Data2[j]); - } - } - } - } - } - } -} diff --git a/TerrainFixup/TerrainFixup.csproj b/TerrainFixup/TerrainFixup.csproj deleted file mode 100644 index 75c276b7..00000000 --- a/TerrainFixup/TerrainFixup.csproj +++ /dev/null @@ -1,13 +0,0 @@ - - - - Exe - net5.0 - - - - - - - - diff --git a/UnpackGrid/Program.cs b/UnpackGrid/Program.cs deleted file mode 100644 index a4639b68..00000000 --- a/UnpackGrid/Program.cs +++ /dev/null @@ -1,69 +0,0 @@ -using LSLib.LS; -using System; -using System.IO; -using System.Runtime.InteropServices; - -namespace UnpackGrid -{ - [StructLayout(LayoutKind.Sequential)] - public struct AIGridHeader - { - public UInt32 Version; - public Int32 Width; - public Int32 Height; - public Single OffsetX; - public Single OffsetY; - public Single OffsetZ; - } - - [StructLayout(LayoutKind.Sequential)] - public struct AIGridCompressionHeader - { - public Int32 UncompressedSize; - public Int32 CompressedSize; - } - - class Program - { - static void Main(string[] args) - { - if (args.Length != 2) - { - Console.WriteLine("Usage: UnpackGrid "); - return; - } - - UnpackAiGrid(args[0], args[1]); - } - - private static void UnpackAiGrid(string sourcePath, string destinationPath) - { - using (var fs = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) - using (var reader = new BinaryReader(fs)) - { - var header = BinUtils.ReadStruct(reader); - if (header.Version != 4) - throw new InvalidFormatException(String.Format("Can only decompress version 4 AI grid files; this file is v{0}", header.Version)); - - var compHeader = BinUtils.ReadStruct(reader); - - if (fs.Length != compHeader.CompressedSize + fs.Position) - throw new InvalidFormatException(String.Format("Invalid AI grid file size; expected {0}, got {1}", compHeader.CompressedSize + fs.Position, fs.Length)); - - var compressedBlob = reader.ReadBytes(compHeader.CompressedSize); - var uncompressed = BinUtils.Decompress(compressedBlob, compHeader.UncompressedSize, 0x21); - var uncompressed2 = BinUtils.Decompress(uncompressed, 16 * header.Width * header.Height, 0x42); - - header.Version = 2; - using (var unpackedFs = new FileStream(destinationPath, FileMode.Create)) - using (var writer = new BinaryWriter(unpackedFs)) - { - BinUtils.WriteStruct(writer, ref header); - writer.Write(uncompressed2); - } - } - - Console.WriteLine($"Wrote resource to: {destinationPath}"); - } - } -} diff --git a/UnpackGrid/UnpackGrid.csproj b/UnpackGrid/UnpackGrid.csproj deleted file mode 100644 index 5a506c33..00000000 --- a/UnpackGrid/UnpackGrid.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - - Exe - net5.0 - - - - - - - diff --git a/VTexTool/App.config b/VTexTool/App.config new file mode 100644 index 00000000..ecdcf8a5 --- /dev/null +++ b/VTexTool/App.config @@ -0,0 +1,6 @@ + + + + + + diff --git a/VTexTool/Program.cs b/VTexTool/Program.cs new file mode 100644 index 00000000..5ceceac4 --- /dev/null +++ b/VTexTool/Program.cs @@ -0,0 +1,55 @@ +using LSLib.LS; +using LSLib.VirtualTextures; +using System; +using System.IO; +using System.Linq; + +namespace LSTools.StoryDecompiler +{ + class Program + { + static void Main(string[] args) + { + if (args.Length != 2) + { + Console.WriteLine("Usage: VTexTool.exe "); + Environment.Exit(1); + } + + Console.WriteLine($"LSLib Virtual Tile Set Generator (v{Common.MajorVersion}.{Common.MinorVersion}.{Common.PatchVersion})"); + + try + { + var configPath = Path.Combine(args[0], args[1]); + var descriptor = new TileSetDescriptor + { + RootPath = args[0] + }; + descriptor.Load(configPath); + + var builder = new TileSetBuilder(descriptor.Config); + foreach (var texture in descriptor.Textures) + { + var layerPaths = texture.Layers.Select(name => name != null ? Path.Combine(descriptor.SourceTexturePath, name) : null).ToList(); + builder.AddTexture(texture.Name, layerPaths); + } + + builder.Build(descriptor.VirtualTexturePath); + } + catch (InvalidDataException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ForegroundColor = ConsoleColor.Gray; + Environment.Exit(1); + } + catch (FileNotFoundException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ForegroundColor = ConsoleColor.Gray; + Environment.Exit(1); + } + } + } +} diff --git a/VTexTool/Properties/AssemblyInfo.cs b/VTexTool/Properties/AssemblyInfo.cs new file mode 100644 index 00000000..bd2d8575 --- /dev/null +++ b/VTexTool/Properties/AssemblyInfo.cs @@ -0,0 +1,13 @@ +using System.Reflection; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +[assembly: AssemblyTrademark("")] +[assembly: AssemblyCulture("")] + +// Setting ComVisible to false makes the types in this assembly not visible +// to COM components. If you need to access a type in this assembly from +// COM, set the ComVisible attribute to true on that type. +[assembly: ComVisible(false)] + +// The following GUID is for the ID of the typelib if this project is exposed to COM +[assembly: Guid("67e646c2-3c3c-4327-a0b4-40c1db32579f")] diff --git a/VTexTool/VTexTool.csproj b/VTexTool/VTexTool.csproj new file mode 100644 index 00000000..dd14a5d9 --- /dev/null +++ b/VTexTool/VTexTool.csproj @@ -0,0 +1,16 @@ + + + net8.0 + Exe + false + x64 + Osiris Story Decompiler + LSLib + Copyright © Norbyte 2012-2018 + 1.0.0.0 + 1.0.0.0 + + + + + \ No newline at end of file From f3ba0d2c9ab491c6abbd2074c4d69c3d29a43330 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 8 Dec 2023 23:30:38 +0100 Subject: [PATCH 042/139] Fix path management jank --- ConverterApp/DebugPane.cs | 6 +++--- ConverterApp/VirtualTexturesPane.cs | 2 +- DebuggerFrontend/Program.cs | 2 +- LSLib/LS/Mods/ModResources.cs | 22 +++++++++++----------- LSLib/LS/PackageCommon.cs | 2 +- LSLib/LS/ResourceUtils.cs | 4 ++-- LSLib/VirtualTextures/Build.cs | 4 ++-- LSLib/VirtualTextures/VirtualTexture.cs | 2 +- 8 files changed, 22 insertions(+), 22 deletions(-) diff --git a/ConverterApp/DebugPane.cs b/ConverterApp/DebugPane.cs index a3ff866a..40ce6689 100644 --- a/ConverterApp/DebugPane.cs +++ b/ConverterApp/DebugPane.cs @@ -19,13 +19,13 @@ public DebugPane(ISettingsDataSource settingsDataSource) private DebugDumperTask CreateDumperFromSettings() { - string dumpPath = Path.GetDirectoryName(saveFilePath.Text) + "\\" + Path.GetFileNameWithoutExtension(saveFilePath.Text) + "\\"; + string dumpPath = Path.Join(Path.GetDirectoryName(saveFilePath.Text), Path.GetFileNameWithoutExtension(saveFilePath.Text)); var dumper = new DebugDumperTask { GameVersion = Game, - ExtractionPath = dumpPath + "SaveArchive", - DataDumpPath = dumpPath + "Dumps", + ExtractionPath = Path.Join(dumpPath, "SaveArchive"), + DataDumpPath = Path.Join(dumpPath, "Dumps"), SaveFilePath = saveFilePath.Text, diff --git a/ConverterApp/VirtualTexturesPane.cs b/ConverterApp/VirtualTexturesPane.cs index 0782e3a5..7d6e88ab 100644 --- a/ConverterApp/VirtualTexturesPane.cs +++ b/ConverterApp/VirtualTexturesPane.cs @@ -60,7 +60,7 @@ private void extractTileSetBtn_Click(object sender, EventArgs e) if (tex != null) { - var outputPath = destinationPath.Text + Path.PathSeparator + texture.Name + $"_{layer}.dds"; + var outputPath = Path.Join(destinationPath.Text, texture.Name + $"_{layer}.dds"); tex.SaveDDS(outputPath); } } diff --git a/DebuggerFrontend/Program.cs b/DebuggerFrontend/Program.cs index 2f2c6f27..2a540860 100644 --- a/DebuggerFrontend/Program.cs +++ b/DebuggerFrontend/Program.cs @@ -13,7 +13,7 @@ class Program static void Main(string[] args) { var currentPath = AppDomain.CurrentDomain.BaseDirectory; - var logFile = new FileStream(currentPath + "\\DAP.log", FileMode.Create); + var logFile = new FileStream(Path.Join(currentPath, "DAP.log"), FileMode.Create); var dap = new DAPStream(); dap.EnableLogging(logFile); var dapHandler = new DAPMessageHandler(dap); diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index f4fb4af3..b35aad95 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -336,7 +336,7 @@ public void DiscoverUserPackages(string gameDataPath) private void DiscoverModGoals(string modName, string modPath) { - var goalPath = modPath + @"\Story\RawFiles\Goals"; + var goalPath = Path.Join(modPath, @"Story\RawFiles\Goals"); if (!Directory.Exists(goalPath)) return; List goalFiles = []; @@ -346,7 +346,7 @@ private void DiscoverModGoals(string modName, string modPath) { var fileInfo = new FilesystemFileInfo { - FilesystemPath = goalPath + "\\" + goalFile, + FilesystemPath = Path.Join(goalPath, goalFile), Name = goalFile }; AddScriptToMod(modName, goalFile, fileInfo); @@ -355,7 +355,7 @@ private void DiscoverModGoals(string modName, string modPath) private void DiscoverModStats(string modName, string modPublicPath) { - var statsPath = modPublicPath + @"\Stats\Generated\Data"; + var statsPath = Path.Join(modPublicPath, @"Stats\Generated\Data"); if (!Directory.Exists(statsPath)) return; List statFiles = []; @@ -365,7 +365,7 @@ private void DiscoverModStats(string modName, string modPublicPath) { var fileInfo = new FilesystemFileInfo { - FilesystemPath = statsPath + "\\" + statFile, + FilesystemPath = Path.Join(statsPath, statFile), Name = statFile }; AddStatToMod(modName, statFile, fileInfo); @@ -374,7 +374,7 @@ private void DiscoverModStats(string modName, string modPublicPath) private void DiscoverModGlobals(string modName, string modPath) { - var globalsPath = modPath + @"\Globals"; + var globalsPath = Path.Join(modPath, "Globals"); if (!Directory.Exists(globalsPath)) return; List globalFiles = []; @@ -384,7 +384,7 @@ private void DiscoverModGlobals(string modName, string modPath) { var fileInfo = new FilesystemFileInfo { - FilesystemPath = globalsPath + "\\" + globalFile, + FilesystemPath = Path.Join(globalsPath, globalFile), Name = globalFile }; AddGlobalsToMod(modName, globalFile, fileInfo); @@ -393,7 +393,7 @@ private void DiscoverModGlobals(string modName, string modPath) private void DiscoverModLevelObjects(string modName, string modPath) { - var levelsPath = modPath + @"\Levels"; + var levelsPath = Path.Join(modPath, "Levels"); if (!Directory.Exists(levelsPath)) return; List levelFiles = []; @@ -404,7 +404,7 @@ private void DiscoverModLevelObjects(string modName, string modPath) { var fileInfo = new FilesystemFileInfo { - FilesystemPath = levelsPath + "\\" + levelFile, + FilesystemPath = Path.Join(levelsPath, levelFile), Name = levelFile }; AddLevelObjectsToMod(modName, levelFile, fileInfo); @@ -420,7 +420,7 @@ public void DiscoverModDirectory(string modName, string modPath, string publicPa { DiscoverModGoals(modName, modPath); - var headerPath = modPath + @"\Story\RawFiles\story_header.div"; + var headerPath = Path.Join(modPath, @"Story\RawFiles\story_header.div"); if (File.Exists(headerPath)) { var fileInfo = new FilesystemFileInfo @@ -431,7 +431,7 @@ public void DiscoverModDirectory(string modName, string modPath, string publicPa GetMod(modName).StoryHeaderFile = fileInfo; } - var orphanQueryIgnoresPath = modPath + @"\Story\story_orphanqueries_ignore_local.txt"; + var orphanQueryIgnoresPath = Path.Join(modPath, @"Story\story_orphanqueries_ignore_local.txt"); if (File.Exists(orphanQueryIgnoresPath)) { var fileInfo = new FilesystemFileInfo @@ -442,7 +442,7 @@ public void DiscoverModDirectory(string modName, string modPath, string publicPa GetMod(modName).OrphanQueryIgnoreList = fileInfo; } - var typeCoercionWhitelistPath = modPath + @"\Story\RawFiles\TypeCoercionWhitelist.txt"; + var typeCoercionWhitelistPath = Path.Join(modPath, @"Story\RawFiles\TypeCoercionWhitelist.txt"); if (File.Exists(typeCoercionWhitelistPath)) { var fileInfo = new FilesystemFileInfo diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index ba701226..17bf4ea8 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -653,7 +653,7 @@ public void UncompressPackage(Package package, string outputPath, Func Date: Mon, 11 Dec 2023 11:47:01 +0100 Subject: [PATCH 043/139] Allow extracting single GTex files --- ConverterApp/VirtualTexturesPane.Designer.cs | 39 +++- ConverterApp/VirtualTexturesPane.cs | 222 ++++++++++--------- 2 files changed, 149 insertions(+), 112 deletions(-) diff --git a/ConverterApp/VirtualTexturesPane.Designer.cs b/ConverterApp/VirtualTexturesPane.Designer.cs index ba424168..5ae59eac 100644 --- a/ConverterApp/VirtualTexturesPane.Designer.cs +++ b/ConverterApp/VirtualTexturesPane.Designer.cs @@ -54,6 +54,8 @@ private void InitializeComponent() label2 = new System.Windows.Forms.Label(); modRootPathDlg = new System.Windows.Forms.FolderBrowserDialog(); tileSetConfigDlg = new System.Windows.Forms.OpenFileDialog(); + gTexNameInput = new System.Windows.Forms.TextBox(); + label6 = new System.Windows.Forms.Label(); groupBox1.SuspendLayout(); groupBox2.SuspendLayout(); SuspendLayout(); @@ -61,6 +63,8 @@ private void InitializeComponent() // groupBox1 // groupBox1.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + groupBox1.Controls.Add(gTexNameInput); + groupBox1.Controls.Add(label6); groupBox1.Controls.Add(extractTileSetBtn); groupBox1.Controls.Add(destinationPathBrowseBtn); groupBox1.Controls.Add(gtsBrowseBtn); @@ -72,7 +76,7 @@ private void InitializeComponent() groupBox1.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); groupBox1.Name = "groupBox1"; groupBox1.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); - groupBox1.Size = new System.Drawing.Size(1167, 212); + groupBox1.Size = new System.Drawing.Size(1167, 296); groupBox1.TabIndex = 66; groupBox1.TabStop = false; groupBox1.Text = "Extract Virtual Textures"; @@ -80,7 +84,7 @@ private void InitializeComponent() // extractTileSetBtn // extractTileSetBtn.Anchor = System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right; - extractTileSetBtn.Location = new System.Drawing.Point(945, 168); + extractTileSetBtn.Location = new System.Drawing.Point(945, 252); extractTileSetBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); extractTileSetBtn.Name = "extractTileSetBtn"; extractTileSetBtn.Size = new System.Drawing.Size(213, 35); @@ -158,7 +162,7 @@ private void InitializeComponent() // actionProgressLabel // actionProgressLabel.AutoSize = true; - actionProgressLabel.Location = new System.Drawing.Point(88, 478); + actionProgressLabel.Location = new System.Drawing.Point(88, 545); actionProgressLabel.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); actionProgressLabel.Name = "actionProgressLabel"; actionProgressLabel.Size = new System.Drawing.Size(0, 20); @@ -167,7 +171,7 @@ private void InitializeComponent() // actionProgress // actionProgress.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; - actionProgress.Location = new System.Drawing.Point(9, 501); + actionProgress.Location = new System.Drawing.Point(9, 568); actionProgress.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); actionProgress.Name = "actionProgress"; actionProgress.Size = new System.Drawing.Size(1168, 35); @@ -176,7 +180,7 @@ private void InitializeComponent() // label5 // label5.AutoSize = true; - label5.Location = new System.Drawing.Point(5, 477); + label5.Location = new System.Drawing.Point(5, 544); label5.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); label5.Name = "label5"; label5.Size = new System.Drawing.Size(68, 20); @@ -196,7 +200,7 @@ private void InitializeComponent() groupBox2.Controls.Add(label1); groupBox2.Controls.Add(modRootPath); groupBox2.Controls.Add(label2); - groupBox2.Location = new System.Drawing.Point(10, 252); + groupBox2.Location = new System.Drawing.Point(10, 319); groupBox2.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); groupBox2.Name = "groupBox2"; groupBox2.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); @@ -316,6 +320,25 @@ private void InitializeComponent() // tileSetConfigDlg.Filter = "Virtual Texture Set Configuration (.xml)|*.xml"; // + // gTexNameInput + // + gTexNameInput.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + gTexNameInput.Location = new System.Drawing.Point(10, 194); + gTexNameInput.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gTexNameInput.Name = "gTexNameInput"; + gTexNameInput.Size = new System.Drawing.Size(1148, 27); + gTexNameInput.TabIndex = 63; + // + // label6 + // + label6.AutoSize = true; + label6.Location = new System.Drawing.Point(6, 169); + label6.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label6.Name = "label6"; + label6.Size = new System.Drawing.Size(326, 20); + label6.TabIndex = 64; + label6.Text = "GTex Name: (leave empty to extract all textures)"; + // // VirtualTexturesPane // AutoScaleDimensions = new System.Drawing.SizeF(8F, 20F); @@ -327,7 +350,7 @@ private void InitializeComponent() Controls.Add(label5); Margin = new System.Windows.Forms.Padding(3, 4, 3, 4); Name = "VirtualTexturesPane"; - Size = new System.Drawing.Size(1188, 570); + Size = new System.Drawing.Size(1188, 666); groupBox1.ResumeLayout(false); groupBox1.PerformLayout(); groupBox2.ResumeLayout(false); @@ -364,5 +387,7 @@ private void InitializeComponent() private System.Windows.Forms.Button modRootPathBrowseBtn; private System.Windows.Forms.Button tileSetBrowseBtn; private System.Windows.Forms.Button tileSetBuildBtn; + private System.Windows.Forms.TextBox gTexNameInput; + private System.Windows.Forms.Label label6; } } diff --git a/ConverterApp/VirtualTexturesPane.cs b/ConverterApp/VirtualTexturesPane.cs index 7d6e88ab..1c7d64c9 100644 --- a/ConverterApp/VirtualTexturesPane.cs +++ b/ConverterApp/VirtualTexturesPane.cs @@ -4,147 +4,159 @@ using System.Linq; using System.Windows.Forms; -namespace ConverterApp +namespace ConverterApp; + +public partial class VirtualTexturesPane : UserControl { - public partial class VirtualTexturesPane : UserControl + public VirtualTexturesPane(ISettingsDataSource settingsDataSource) { - public VirtualTexturesPane(ISettingsDataSource settingsDataSource) - { - InitializeComponent(); + InitializeComponent(); + + gtsPath.DataBindings.Add("Text", settingsDataSource, "Settings.VirtualTextures.GTSPath", true, DataSourceUpdateMode.OnPropertyChanged); + destinationPath.DataBindings.Add("Text", settingsDataSource, "Settings.VirtualTextures.DestinationPath", true, DataSourceUpdateMode.OnPropertyChanged); + } - gtsPath.DataBindings.Add("Text", settingsDataSource, "Settings.VirtualTextures.GTSPath", true, DataSourceUpdateMode.OnPropertyChanged); - destinationPath.DataBindings.Add("Text", settingsDataSource, "Settings.VirtualTextures.DestinationPath", true, DataSourceUpdateMode.OnPropertyChanged); + private void gtpBrowseBtn_Click(object sender, EventArgs e) + { + if (gtsFileDlg.ShowDialog(this) == DialogResult.OK) + { + gtsPath.Text = gtsFileDlg.FileName; } + } - private void gtpBrowseBtn_Click(object sender, EventArgs e) + private void destinationPathBrowseBtn_Click(object sender, EventArgs e) + { + DialogResult result = destinationPathDlg.ShowDialog(this); + if (result == DialogResult.OK) { - if (gtsFileDlg.ShowDialog(this) == DialogResult.OK) - { - gtsPath.Text = gtsFileDlg.FileName; - } + destinationPath.Text = destinationPathDlg.SelectedPath; } + } - private void destinationPathBrowseBtn_Click(object sender, EventArgs e) + private void extractTileSetBtn_Click(object sender, EventArgs e) + { + extractTileSetBtn.Enabled = false; + try { - DialogResult result = destinationPathDlg.ShowDialog(this); - if (result == DialogResult.OK) + var tileSet = new VirtualTileSet(gtsPath.Text); + var textures = tileSet.FourCCMetadata.ExtractTextureMetadata(); + + var texName = gTexNameInput.Text.Trim(); + if (texName.Length > 0) { - destinationPath.Text = destinationPathDlg.SelectedPath; + textures = textures.Where(tex => tex.Name == texName).ToList(); + if (textures.Count == 0) + { + MessageBox.Show($"GTex was not found in this tile set: {texName}", "Extraction Failed", MessageBoxButtons.OK, MessageBoxIcon.Warning); + return; + } } - } - private void extractTileSetBtn_Click(object sender, EventArgs e) - { - extractTileSetBtn.Enabled = false; - try + var i = 0; + foreach (var texture in textures) { - var tileSet = new VirtualTileSet(gtsPath.Text); - var textures = tileSet.FourCCMetadata.ExtractTextureMetadata(); + actionProgressLabel.Text = "GTex: " + texture.Name; + actionProgress.Value = i++ * 100 / textures.Count; + Application.DoEvents(); - var i = 0; - foreach (var texture in textures) + for (var layer = 0; layer < tileSet.TileSetLayers.Length; layer++) { - actionProgressLabel.Text = "GTex: " + texture.Name; - actionProgress.Value = i++ * 100 / textures.Count; - Application.DoEvents(); + BC5Image tex = null; + var level = 0; + do + { + tex = tileSet.ExtractTexture(level, layer, texture); + level++; + } while (tex == null && level < tileSet.TileSetLevels.Length); - for (var layer = 0; layer < tileSet.TileSetLayers.Length; layer++) + if (tex != null) { - BC5Image tex = null; - var level = 0; - do - { - tex = tileSet.ExtractTexture(level, layer, texture); - level++; - } while (tex == null && level < tileSet.TileSetLevels.Length); - - if (tex != null) - { - var outputPath = Path.Join(destinationPath.Text, texture.Name + $"_{layer}.dds"); - tex.SaveDDS(outputPath); - } + var outputPath = Path.Join(destinationPath.Text, texture.Name + $"_{layer}.dds"); + tex.SaveDDS(outputPath); } - - tileSet.ReleasePageFiles(); - GC.Collect(); } - MessageBox.Show("Textures extracted successfully."); - } - catch (Exception exc) - { - MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{exc}", "Extraction Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - } - finally - { - actionProgressLabel.Text = ""; - actionProgress.Value = 0; - extractTileSetBtn.Enabled = true; + tileSet.ReleasePageFiles(); + GC.Collect(); } - } - private void tileSetConfigBrowseBtn_Click(object sender, EventArgs e) + MessageBox.Show("Textures extracted successfully."); + } + catch (Exception exc) { - if (tileSetConfigDlg.ShowDialog(this) == DialogResult.OK) - { - tileSetConfigPath.Text = tileSetConfigDlg.FileName; - } + MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{exc}", "Extraction Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + finally + { + actionProgressLabel.Text = ""; + actionProgress.Value = 0; + extractTileSetBtn.Enabled = true; } + } - private void modRootPathBrowseBtn_Click(object sender, EventArgs e) + private void tileSetConfigBrowseBtn_Click(object sender, EventArgs e) + { + if (tileSetConfigDlg.ShowDialog(this) == DialogResult.OK) { - DialogResult result = modRootPathDlg.ShowDialog(this); - if (result == DialogResult.OK) - { - modRootPath.Text = modRootPathDlg.SelectedPath; - } + tileSetConfigPath.Text = tileSetConfigDlg.FileName; } + } - private void tileSetBuildBtn_Click(object sender, EventArgs ev) + private void modRootPathBrowseBtn_Click(object sender, EventArgs e) + { + DialogResult result = modRootPathDlg.ShowDialog(this); + if (result == DialogResult.OK) { - try - { - var descriptor = new TileSetDescriptor(); - descriptor.RootPath = modRootPath.Text; - descriptor.Load(tileSetConfigPath.Text); - - var builder = new TileSetBuilder(descriptor.Config); - builder.OnStepStarted += (step) => { - actionProgressLabel.Text = step; - Application.DoEvents(); - }; - builder.OnStepProgress += (numerator, denumerator) => { - actionProgress.Maximum = denumerator; - actionProgress.Value = numerator; - Application.DoEvents(); - }; - - builder.OnStepStarted("Adding textures"); - foreach (var texture in descriptor.Textures) - { - var layerPaths = texture.Layers.Select(name => name != null ? Path.Combine(descriptor.SourceTexturePath, name) : null).ToList(); - builder.AddTexture(texture.Name, layerPaths); - } + modRootPath.Text = modRootPathDlg.SelectedPath; + } + } - builder.Build(descriptor.VirtualTexturePath); + private void tileSetBuildBtn_Click(object sender, EventArgs ev) + { + try + { + var descriptor = new TileSetDescriptor(); + descriptor.RootPath = modRootPath.Text; + descriptor.Load(tileSetConfigPath.Text); - MessageBox.Show("Tile set build completed."); - } - catch (InvalidDataException e) + var builder = new TileSetBuilder(descriptor.Config); + builder.OnStepStarted += (step) => { - MessageBox.Show($"{e.Message}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - } - catch (FileNotFoundException e) + actionProgressLabel.Text = step; + Application.DoEvents(); + }; + builder.OnStepProgress += (numerator, denumerator) => { - MessageBox.Show($"{e.Message}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - } - catch (Exception e) + actionProgress.Maximum = denumerator; + actionProgress.Value = numerator; + Application.DoEvents(); + }; + + builder.OnStepStarted("Adding textures"); + foreach (var texture in descriptor.Textures) { - MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{e}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + var layerPaths = texture.Layers.Select(name => name != null ? Path.Combine(descriptor.SourceTexturePath, name) : null).ToList(); + builder.AddTexture(texture.Name, layerPaths); } - actionProgressLabel.Text = ""; - actionProgress.Value = 0; + builder.Build(descriptor.VirtualTexturePath); + + MessageBox.Show("Tile set build completed."); } + catch (InvalidDataException e) + { + MessageBox.Show($"{e.Message}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + catch (FileNotFoundException e) + { + MessageBox.Show($"{e.Message}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + catch (Exception e) + { + MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{e}", "Tile Set Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + } + + actionProgressLabel.Text = ""; + actionProgress.Value = 0; } } From b46da62084e08149261684617c60ec40826b6c6b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 11 Dec 2023 11:48:13 +0100 Subject: [PATCH 044/139] Vertex weight serialization fixes --- LSLib/Granny/Model/ColladaImporter.cs | 1642 +++++++++++++------------ LSLib/Granny/Model/Vertex.cs | 1028 ++++++++-------- 2 files changed, 1353 insertions(+), 1317 deletions(-) diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 5c38f59d..9424be80 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -1,1079 +1,1115 @@ using System; using System.Collections.Generic; +using System.Diagnostics; using System.IO; using System.Linq; using LSLib.Granny.GR2; using LSLib.LS; using OpenTK.Mathematics; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +internal class ColladaSource { - internal class ColladaSource + public String id; + public Dictionary> FloatParams = new Dictionary>(); + public Dictionary> MatrixParams = new Dictionary>(); + public Dictionary> NameParams = new Dictionary>(); + + public static ColladaSource FromCollada(source src) { - public String id; - public Dictionary> FloatParams = new Dictionary>(); - public Dictionary> MatrixParams = new Dictionary>(); - public Dictionary> NameParams = new Dictionary>(); + var source = new ColladaSource + { + id = src.id + }; + + var accessor = src.technique_common.accessor; + // TODO: check src.#ID? - public static ColladaSource FromCollada(source src) + float_array floats = null; + Name_array names = null; + if (src.Item is float_array) { - var source = new ColladaSource - { - id = src.id - }; + floats = src.Item as float_array; + // Workaround for empty arrays being null + floats.Values ??= []; - var accessor = src.technique_common.accessor; - // TODO: check src.#ID? + if ((int)floats.count != floats.Values.Length || floats.count < accessor.stride * accessor.count + accessor.offset) + throw new ParsingException("Float source data size mismatch. Check source and accessor item counts."); + } + else if (src.Item is Name_array) + { + names = src.Item as Name_array; + // Workaround for empty arrays being null + names.Values ??= []; - float_array floats = null; - Name_array names = null; - if (src.Item is float_array) - { - floats = src.Item as float_array; - // Workaround for empty arrays being null - floats.Values ??= []; + if ((int)names.count != names.Values.Length || names.count < accessor.stride * accessor.count + accessor.offset) + throw new ParsingException("Name source data size mismatch. Check source and accessor item counts."); + } + else + throw new ParsingException("Unsupported source data format."); - if ((int)floats.count != floats.Values.Length || floats.count < accessor.stride * accessor.count + accessor.offset) - throw new ParsingException("Float source data size mismatch. Check source and accessor item counts."); - } - else if (src.Item is Name_array) + var paramOffset = 0; + foreach (var param in accessor.param) + { + param.name ??= "default"; + if (param.type == "float" || param.type == "double") { - names = src.Item as Name_array; - // Workaround for empty arrays being null - names.Values ??= []; + var items = new List((int)accessor.count); + var offset = (int)accessor.offset; + for (var i = 0; i < (int)accessor.count; i++) + { + items.Add((float)floats.Values[offset + paramOffset]); + offset += (int)accessor.stride; + } - if ((int)names.count != names.Values.Length || names.count < accessor.stride * accessor.count + accessor.offset) - throw new ParsingException("Name source data size mismatch. Check source and accessor item counts."); + source.FloatParams.Add(param.name, items); } - else - throw new ParsingException("Unsupported source data format."); - - var paramOffset = 0; - foreach (var param in accessor.param) + else if (param.type == "float4x4") { - param.name ??= "default"; - if (param.type == "float" || param.type == "double") + var items = new List((int)accessor.count); + var offset = (int)accessor.offset; + for (var i = 0; i < (int)accessor.count; i++) { - var items = new List((int)accessor.count); - var offset = (int)accessor.offset; - for (var i = 0; i < (int)accessor.count; i++) - { - items.Add((float)floats.Values[offset + paramOffset]); - offset += (int)accessor.stride; - } - - source.FloatParams.Add(param.name, items); + var itemOff = offset + paramOffset; + var mat = new Matrix4( + (float)floats.Values[itemOff + 0], (float)floats.Values[itemOff + 1], (float)floats.Values[itemOff + 2], (float)floats.Values[itemOff + 3], + (float)floats.Values[itemOff + 4], (float)floats.Values[itemOff + 5], (float)floats.Values[itemOff + 6], (float)floats.Values[itemOff + 7], + (float)floats.Values[itemOff + 8], (float)floats.Values[itemOff + 9], (float)floats.Values[itemOff + 10], (float)floats.Values[itemOff + 11], + (float)floats.Values[itemOff + 12], (float)floats.Values[itemOff + 13], (float)floats.Values[itemOff + 14], (float)floats.Values[itemOff + 15] + ); + items.Add(mat); + offset += (int)accessor.stride; } - else if (param.type == "float4x4") - { - var items = new List((int)accessor.count); - var offset = (int)accessor.offset; - for (var i = 0; i < (int)accessor.count; i++) - { - var itemOff = offset + paramOffset; - var mat = new Matrix4( - (float)floats.Values[itemOff + 0], (float)floats.Values[itemOff + 1], (float)floats.Values[itemOff + 2], (float)floats.Values[itemOff + 3], - (float)floats.Values[itemOff + 4], (float)floats.Values[itemOff + 5], (float)floats.Values[itemOff + 6], (float)floats.Values[itemOff + 7], - (float)floats.Values[itemOff + 8], (float)floats.Values[itemOff + 9], (float)floats.Values[itemOff + 10], (float)floats.Values[itemOff + 11], - (float)floats.Values[itemOff + 12], (float)floats.Values[itemOff + 13], (float)floats.Values[itemOff + 14], (float)floats.Values[itemOff + 15] - ); - items.Add(mat); - offset += (int)accessor.stride; - } - source.MatrixParams.Add(param.name, items); - } - else if (param.type.ToLower() == "name") + source.MatrixParams.Add(param.name, items); + } + else if (param.type.ToLower() == "name") + { + var items = new List((int)accessor.count); + var offset = (int)accessor.offset; + for (var i = 0; i < (int)accessor.count; i++) { - var items = new List((int)accessor.count); - var offset = (int)accessor.offset; - for (var i = 0; i < (int)accessor.count; i++) - { - items.Add(names.Values[offset + paramOffset]); - offset += (int)accessor.stride; - } - - source.NameParams.Add(param.name, items); + items.Add(names.Values[offset + paramOffset]); + offset += (int)accessor.stride; } - else - throw new ParsingException("Unsupported accessor param type: " + param.type); - paramOffset++; + source.NameParams.Add(param.name, items); } + else + throw new ParsingException("Unsupported accessor param type: " + param.type); - return source; + paramOffset++; } + + return source; } +} - class RootBoneInfo - { - public node Bone; - public List Parents; - }; +class RootBoneInfo +{ + public node Bone; + public List Parents; +}; - public class ColladaImporter - { - [Serialization(Kind = SerializationKind.None)] - public ExporterOptions Options = new(); +public class ColladaImporter +{ + [Serialization(Kind = SerializationKind.None)] + public ExporterOptions Options = new(); - private bool ZUp = false; + private bool ZUp = false; - [Serialization(Kind = SerializationKind.None)] - public Dictionary ColladaGeometries; + [Serialization(Kind = SerializationKind.None)] + public Dictionary ColladaGeometries; - [Serialization(Kind = SerializationKind.None)] - public HashSet SkinnedMeshes; + [Serialization(Kind = SerializationKind.None)] + public HashSet SkinnedMeshes; - private ArtToolInfo ImportArtToolInfo(COLLADA collada) + private ArtToolInfo ImportArtToolInfo(COLLADA collada) + { + ZUp = false; + var toolInfo = new ArtToolInfo + { + FromArtToolName = "Unknown", + ArtToolMajorRevision = 1, + ArtToolMinorRevision = 0, + ArtToolPointerSize = Options.Is64Bit ? 64 : 32, + Origin = [0, 0, 0] + }; + toolInfo.SetYUp(); + + if (collada.asset != null) { - ZUp = false; - var toolInfo = new ArtToolInfo + if (collada.asset.unit != null) { - FromArtToolName = "Unknown", - ArtToolMajorRevision = 1, - ArtToolMinorRevision = 0, - ArtToolPointerSize = Options.Is64Bit ? 64 : 32, - Origin = [0, 0, 0] - }; - toolInfo.SetYUp(); + if (collada.asset.unit.name == "meter") + toolInfo.UnitsPerMeter = (float)collada.asset.unit.meter; + else if (collada.asset.unit.name == "centimeter") + toolInfo.UnitsPerMeter = (float)collada.asset.unit.meter * 100; + else + throw new NotImplementedException("Unsupported asset unit type: " + collada.asset.unit.name); + } - if (collada.asset != null) + if (collada.asset.contributor != null && collada.asset.contributor.Length > 0) { - if (collada.asset.unit != null) - { - if (collada.asset.unit.name == "meter") - toolInfo.UnitsPerMeter = (float)collada.asset.unit.meter; - else if (collada.asset.unit.name == "centimeter") - toolInfo.UnitsPerMeter = (float)collada.asset.unit.meter * 100; - else - throw new NotImplementedException("Unsupported asset unit type: " + collada.asset.unit.name); - } - - if (collada.asset.contributor != null && collada.asset.contributor.Length > 0) - { - var contributor = collada.asset.contributor.First(); - if (contributor.authoring_tool != null) - toolInfo.FromArtToolName = contributor.authoring_tool; - } + var contributor = collada.asset.contributor.First(); + if (contributor.authoring_tool != null) + toolInfo.FromArtToolName = contributor.authoring_tool; + } - switch (collada.asset.up_axis) - { - case UpAxisType.X_UP: - throw new Exception("X-up not supported yet!"); + switch (collada.asset.up_axis) + { + case UpAxisType.X_UP: + throw new Exception("X-up not supported yet!"); - case UpAxisType.Y_UP: - toolInfo.SetYUp(); - break; + case UpAxisType.Y_UP: + toolInfo.SetYUp(); + break; - case UpAxisType.Z_UP: - ZUp = true; - toolInfo.SetZUp(); - break; - } + case UpAxisType.Z_UP: + ZUp = true; + toolInfo.SetZUp(); + break; } - - return toolInfo; } - private ExporterInfo ImportExporterInfo(COLLADA collada) + return toolInfo; + } + + private ExporterInfo ImportExporterInfo(COLLADA collada) + { + return new ExporterInfo { - return new ExporterInfo - { - ExporterName = $"LSLib GR2 Exporter v{Common.LibraryVersion()}", - ExporterMajorRevision = Common.MajorVersion, - ExporterMinorRevision = Common.MinorVersion, - ExporterBuildNumber = 0, - ExporterCustomization = Common.PatchVersion - }; - } + ExporterName = $"LSLib GR2 Exporter v{Common.LibraryVersion()}", + ExporterMajorRevision = Common.MajorVersion, + ExporterMinorRevision = Common.MinorVersion, + ExporterBuildNumber = 0, + ExporterCustomization = Common.PatchVersion + }; + } - private DivinityModelFlag DetermineSkeletonModelFlagsFromModels(Root root, Skeleton skeleton, DivinityModelFlag meshFlagOverrides) + private DivinityModelFlag DetermineSkeletonModelFlagsFromModels(Root root, Skeleton skeleton, DivinityModelFlag meshFlagOverrides) + { + DivinityModelFlag accumulatedFlags = 0; + foreach (var model in root.Models ?? Enumerable.Empty()) { - DivinityModelFlag accumulatedFlags = 0; - foreach (var model in root.Models ?? Enumerable.Empty()) + if (model.Skeleton == skeleton && model.MeshBindings != null) { - if (model.Skeleton == skeleton && model.MeshBindings != null) + foreach (var meshBinding in model.MeshBindings) { - foreach (var meshBinding in model.MeshBindings) - { - accumulatedFlags |= meshBinding.Mesh?.ExtendedData?.UserMeshProperties?.MeshFlags ?? meshFlagOverrides; - } + accumulatedFlags |= meshBinding.Mesh?.ExtendedData?.UserMeshProperties?.MeshFlags ?? meshFlagOverrides; } } + } - return accumulatedFlags; + return accumulatedFlags; + } + + private void BuildExtendedData(Root root) + { + if (Options.ModelInfoFormat == DivinityModelInfoFormat.None) + { + return; } - private void BuildExtendedData(Root root) + var modelFlagOverrides = Options.ModelType; + + foreach (var mesh in root.Meshes ?? Enumerable.Empty()) { - if (Options.ModelInfoFormat == DivinityModelInfoFormat.None) + DivinityModelFlag modelFlags = modelFlagOverrides; + if (modelFlags == 0 && mesh.ExtendedData != null) { - return; + modelFlags = mesh.ExtendedData.UserMeshProperties.MeshFlags; } - var modelFlagOverrides = Options.ModelType; + mesh.ExtendedData ??= DivinityMeshExtendedData.Make(); + mesh.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; + mesh.ExtendedData.UpdateFromModelInfo(mesh, Options.ModelInfoFormat); + } - foreach (var mesh in root.Meshes ?? Enumerable.Empty()) + foreach (var skeleton in root.Skeletons ?? Enumerable.Empty()) + { + if (Options.ModelInfoFormat == DivinityModelInfoFormat.None || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv3) { - DivinityModelFlag modelFlags = modelFlagOverrides; - if (modelFlags == 0 && mesh.ExtendedData != null) + foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) { - modelFlags = mesh.ExtendedData.UserMeshProperties.MeshFlags; + bone.ExtendedData = null; } - - mesh.ExtendedData ??= DivinityMeshExtendedData.Make(); - mesh.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; - mesh.ExtendedData.UpdateFromModelInfo(mesh, Options.ModelInfoFormat); } - - foreach (var skeleton in root.Skeletons ?? Enumerable.Empty()) + else { - if (Options.ModelInfoFormat == DivinityModelInfoFormat.None || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv3) - { - foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) - { - bone.ExtendedData = null; - } - } - else - { - var accumulatedFlags = DetermineSkeletonModelFlagsFromModels(root, skeleton, modelFlagOverrides); + var accumulatedFlags = DetermineSkeletonModelFlagsFromModels(root, skeleton, modelFlagOverrides); - foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) - { - bone.ExtendedData ??= new DivinityBoneExtendedData(); - var userDefinedProperties = UserDefinedPropertiesHelpers.MeshFlagsToUserDefinedProperties(accumulatedFlags); - bone.ExtendedData.UserDefinedProperties = userDefinedProperties; - bone.ExtendedData.IsRigid = (accumulatedFlags.IsRigid()) ? 1 : 0; - } + foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) + { + bone.ExtendedData ??= new DivinityBoneExtendedData(); + var userDefinedProperties = UserDefinedPropertiesHelpers.MeshFlagsToUserDefinedProperties(accumulatedFlags); + bone.ExtendedData.UserDefinedProperties = userDefinedProperties; + bone.ExtendedData.IsRigid = (accumulatedFlags.IsRigid()) ? 1 : 0; } } } + } - private void FindRootBones(List parents, node node, List rootBones) + private void FindRootBones(List parents, node node, List rootBones) + { + if (node.type == NodeType.JOINT) { - if (node.type == NodeType.JOINT) + var root = new RootBoneInfo { - var root = new RootBoneInfo - { - Bone = node, - Parents = parents.Select(a => a).ToList() - }; - rootBones.Add(root); - } - else if (node.type == NodeType.NODE) + Bone = node, + Parents = parents.Select(a => a).ToList() + }; + rootBones.Add(root); + } + else if (node.type == NodeType.NODE) + { + if (node.node1 != null) { - if (node.node1 != null) + parents.Add(node); + foreach (var child in node.node1) { - parents.Add(node); - foreach (var child in node.node1) - { - FindRootBones(parents, child, rootBones); - } - parents.RemoveAt(parents.Count - 1); + FindRootBones(parents, child, rootBones); } + parents.RemoveAt(parents.Count - 1); } } + } - public static technique FindExporterExtraData(extra[] extras) + public static technique FindExporterExtraData(extra[] extras) + { + foreach (var extra in extras ?? Enumerable.Empty()) { - foreach (var extra in extras ?? Enumerable.Empty()) + foreach (var technique in extra.technique ?? Enumerable.Empty()) { - foreach (var technique in extra.technique ?? Enumerable.Empty()) + if (technique.profile == "LSTools") { - if (technique.profile == "LSTools") - { - return technique; - } + return technique; } } - - return null; } - private void LoadLSLibProfileMeshType(DivinityMeshExtendedData props, string meshType) - { - var meshProps = props.UserMeshProperties; + return null; + } - switch (meshType) - { - // Compatibility flag, not used anymore - case "Normal": break; - case "Cloth": meshProps.MeshFlags |= DivinityModelFlag.Cloth; props.Cloth = 1; break; - case "Rigid": meshProps.MeshFlags |= DivinityModelFlag.Rigid; props.Rigid = 1; break; - case "MeshProxy": meshProps.MeshFlags |= DivinityModelFlag.MeshProxy | DivinityModelFlag.HasProxyGeometry; props.MeshProxy = 1; break; - case "ProxyGeometry": meshProps.MeshFlags |= DivinityModelFlag.HasProxyGeometry; break; - case "Spring": meshProps.MeshFlags |= DivinityModelFlag.Spring; props.Spring = 1; break; - case "Occluder": meshProps.MeshFlags |= DivinityModelFlag.Occluder; props.Occluder = 1; break; - case "Cloth01": meshProps.ClothFlags |= DivinityClothFlag.Cloth01; break; - case "Cloth02": meshProps.ClothFlags |= DivinityClothFlag.Cloth02; break; - case "Cloth04": meshProps.ClothFlags |= DivinityClothFlag.Cloth04; break; - case "ClothPhysics": meshProps.ClothFlags |= DivinityClothFlag.ClothPhysics; break; - default: - Utils.Warn($"Unrecognized model type in tag: {meshType}"); - break; - } + private void LoadLSLibProfileMeshType(DivinityMeshExtendedData props, string meshType) + { + var meshProps = props.UserMeshProperties; + + switch (meshType) + { + // Compatibility flag, not used anymore + case "Normal": break; + case "Cloth": meshProps.MeshFlags |= DivinityModelFlag.Cloth; props.Cloth = 1; break; + case "Rigid": meshProps.MeshFlags |= DivinityModelFlag.Rigid; props.Rigid = 1; break; + case "MeshProxy": meshProps.MeshFlags |= DivinityModelFlag.MeshProxy | DivinityModelFlag.HasProxyGeometry; props.MeshProxy = 1; break; + case "ProxyGeometry": meshProps.MeshFlags |= DivinityModelFlag.HasProxyGeometry; break; + case "Spring": meshProps.MeshFlags |= DivinityModelFlag.Spring; props.Spring = 1; break; + case "Occluder": meshProps.MeshFlags |= DivinityModelFlag.Occluder; props.Occluder = 1; break; + case "Cloth01": meshProps.ClothFlags |= DivinityClothFlag.Cloth01; break; + case "Cloth02": meshProps.ClothFlags |= DivinityClothFlag.Cloth02; break; + case "Cloth04": meshProps.ClothFlags |= DivinityClothFlag.Cloth04; break; + case "ClothPhysics": meshProps.ClothFlags |= DivinityClothFlag.ClothPhysics; break; + default: + Utils.Warn($"Unrecognized model type in tag: {meshType}"); + break; } + } - private void LoadLSLibProfileExportOrder(Mesh mesh, string order) + private void LoadLSLibProfileExportOrder(Mesh mesh, string order) + { + if (Int32.TryParse(order, out int parsedOrder)) { - if (Int32.TryParse(order, out int parsedOrder)) + if (parsedOrder >= 0 && parsedOrder < 100) { - if (parsedOrder >= 0 && parsedOrder < 100) - { - mesh.ExportOrder = parsedOrder; - } + mesh.ExportOrder = parsedOrder; } } + } - private void LoadLSLibProfileLOD(DivinityMeshExtendedData props, string lod) + private void LoadLSLibProfileLOD(DivinityMeshExtendedData props, string lod) + { + if (Int32.TryParse(lod, out int parsedLod)) { - if (Int32.TryParse(lod, out int parsedLod)) + if (parsedLod >= 0 && parsedLod < 100) { - if (parsedLod >= 0 && parsedLod < 100) + props.LOD = parsedLod; + if (parsedLod == 0) { - props.LOD = parsedLod; - if (parsedLod == 0) - { - props.UserMeshProperties.Lod[0] = -1; - } - else - { - props.UserMeshProperties.Lod[0] = parsedLod; - } + props.UserMeshProperties.Lod[0] = -1; + } + else + { + props.UserMeshProperties.Lod[0] = parsedLod; } } } + } - private void LoadLSLibProfileImpostor(DivinityMeshExtendedData props, string impostor) + private void LoadLSLibProfileImpostor(DivinityMeshExtendedData props, string impostor) + { + if (Int32.TryParse(impostor, out int isImpostor)) { - if (Int32.TryParse(impostor, out int isImpostor)) + if (isImpostor == 1) { - if (isImpostor == 1) - { - props.UserMeshProperties.IsImpostor[0] = 1; - } + props.UserMeshProperties.IsImpostor[0] = 1; } } + } - private void LoadLSLibProfileLODDistance(DivinityMeshProperties props, string lodDistance) + private void LoadLSLibProfileLODDistance(DivinityMeshProperties props, string lodDistance) + { + if (Single.TryParse(lodDistance, out float parsedLodDistance)) { - if (Single.TryParse(lodDistance, out float parsedLodDistance)) + if (parsedLodDistance >= 0.0f) { - if (parsedLodDistance >= 0.0f) - { - props.LodDistance[0] = parsedLodDistance; - } + props.LodDistance[0] = parsedLodDistance; } } + } - private void MakeExtendedData(mesh mesh, Mesh loaded) + private void MakeExtendedData(mesh mesh, Mesh loaded) + { + var modelFlagOverrides = Options.ModelType; + + DivinityModelFlag modelFlags = modelFlagOverrides; + if (modelFlags == 0 && loaded.ExtendedData != null) { - var modelFlagOverrides = Options.ModelType; + modelFlags = loaded.ExtendedData.UserMeshProperties.MeshFlags; + } - DivinityModelFlag modelFlags = modelFlagOverrides; - if (modelFlags == 0 && loaded.ExtendedData != null) - { - modelFlags = loaded.ExtendedData.UserMeshProperties.MeshFlags; - } + loaded.ExtendedData = DivinityMeshExtendedData.Make(); + loaded.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; + loaded.ExtendedData.UpdateFromModelInfo(loaded, Options.ModelInfoFormat); + LoadColladaLSLibProfileData(mesh, loaded); + } - loaded.ExtendedData = DivinityMeshExtendedData.Make(); - loaded.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; - loaded.ExtendedData.UpdateFromModelInfo(loaded, Options.ModelInfoFormat); - LoadColladaLSLibProfileData(mesh, loaded); - } + private void LoadColladaLSLibProfileData(mesh mesh, Mesh loaded) + { + var technique = FindExporterExtraData(mesh.extra); + if (technique == null || technique.Any == null) return; - private void LoadColladaLSLibProfileData(mesh mesh, Mesh loaded) + var meshProps = loaded.ExtendedData.UserMeshProperties; + + foreach (var setting in technique.Any) { - var technique = FindExporterExtraData(mesh.extra); - if (technique == null || technique.Any == null) return; + switch (setting.LocalName) + { + case "DivModelType": + LoadLSLibProfileMeshType(loaded.ExtendedData, setting.InnerText.Trim()); + break; + + case "IsImpostor": + LoadLSLibProfileImpostor(loaded.ExtendedData, setting.InnerText.Trim()); + break; - var meshProps = loaded.ExtendedData.UserMeshProperties; + case "ExportOrder": + LoadLSLibProfileExportOrder(loaded, setting.InnerText.Trim()); + break; - foreach (var setting in technique.Any) - { - switch (setting.LocalName) - { - case "DivModelType": - LoadLSLibProfileMeshType(loaded.ExtendedData, setting.InnerText.Trim()); - break; - - case "IsImpostor": - LoadLSLibProfileImpostor(loaded.ExtendedData, setting.InnerText.Trim()); - break; - - case "ExportOrder": - LoadLSLibProfileExportOrder(loaded, setting.InnerText.Trim()); - break; - - case "LOD": - LoadLSLibProfileLOD(loaded.ExtendedData, setting.InnerText.Trim()); - break; - - case "LODDistance": - LoadLSLibProfileLODDistance(meshProps, setting.InnerText.Trim()); - break; - - default: - Utils.Warn($"Unrecognized LSLib profile attribute: {setting.LocalName}"); - break; - } + case "LOD": + LoadLSLibProfileLOD(loaded.ExtendedData, setting.InnerText.Trim()); + break; + + case "LODDistance": + LoadLSLibProfileLODDistance(meshProps, setting.InnerText.Trim()); + break; + + default: + Utils.Warn($"Unrecognized LSLib profile attribute: {setting.LocalName}"); + break; } } + } - private void ValidateLSLibProfileMetadataVersion(string ver) + private void ValidateLSLibProfileMetadataVersion(string ver) + { + if (Int32.TryParse(ver, out int version)) { - if (Int32.TryParse(ver, out int version)) + if (version > Common.ColladaMetadataVersion) { - if (version > Common.ColladaMetadataVersion) - { - throw new ParsingException( - $"Collada file is using a newer LSLib metadata format than this LSLib version supports, please upgrade.\r\n" + - $"File version: {version}, exporter version: {Common.ColladaMetadataVersion}"); - } + throw new ParsingException( + $"Collada file is using a newer LSLib metadata format than this LSLib version supports, please upgrade.\r\n" + + $"File version: {version}, exporter version: {Common.ColladaMetadataVersion}"); } } + } - private void LoadColladaLSLibProfileData(COLLADA collada) - { - var technique = FindExporterExtraData(collada.extra); - if (technique == null || technique.Any == null) return; + private void LoadColladaLSLibProfileData(COLLADA collada) + { + var technique = FindExporterExtraData(collada.extra); + if (technique == null || technique.Any == null) return; - foreach (var setting in technique.Any) + foreach (var setting in technique.Any) + { + switch (setting.LocalName) { - switch (setting.LocalName) - { - case "MetadataVersion": - ValidateLSLibProfileMetadataVersion(setting.InnerText.Trim()); - break; - - case "LSLibMajor": - case "LSLibMinor": - case "LSLibPatch": - break; - - default: - Utils.Warn($"Unrecognized LSLib root profile attribute: {setting.LocalName}"); - break; - } + case "MetadataVersion": + ValidateLSLibProfileMetadataVersion(setting.InnerText.Trim()); + break; + + case "LSLibMajor": + case "LSLibMinor": + case "LSLibPatch": + break; + + default: + Utils.Warn($"Unrecognized LSLib root profile attribute: {setting.LocalName}"); + break; } } + } + + private Mesh ImportMesh(geometry geom, mesh mesh, VertexDescriptor vertexFormat) + { + var collada = new ColladaMesh(); + bool isSkinned = SkinnedMeshes.Contains(geom.id); + collada.ImportFromCollada(mesh, vertexFormat, isSkinned, Options); - private Mesh ImportMesh(geometry geom, mesh mesh, VertexDescriptor vertexFormat) + var m = new Mesh { - var collada = new ColladaMesh(); - bool isSkinned = SkinnedMeshes.Contains(geom.id); - collada.ImportFromCollada(mesh, vertexFormat, isSkinned, Options); + VertexFormat = collada.InternalVertexType, + Name = "Unnamed", - var m = new Mesh + PrimaryVertexData = new VertexData { - VertexFormat = collada.InternalVertexType, - Name = "Unnamed", + Vertices = collada.ConsolidatedVertices + }, - PrimaryVertexData = new VertexData - { - Vertices = collada.ConsolidatedVertices - }, + PrimaryTopology = new TriTopology + { + Indices = collada.ConsolidatedIndices, + Groups = [ + new TriTopologyGroup + { + MaterialIndex = 0, + TriFirst = 0, + TriCount = collada.TriangleCount + } + ] + }, - PrimaryTopology = new TriTopology - { - Indices = collada.ConsolidatedIndices, - Groups = [ - new TriTopologyGroup - { - MaterialIndex = 0, - TriFirst = 0, - TriCount = collada.TriangleCount - } - ] - }, + MaterialBindings = [new MaterialBinding()], + OriginalToConsolidatedVertexIndexMap = collada.OriginalToConsolidatedVertexIndexMap + }; - MaterialBindings = [new MaterialBinding()], - OriginalToConsolidatedVertexIndexMap = collada.OriginalToConsolidatedVertexIndexMap - }; + if (!Options.StripMetadata) + { + var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); + m.PrimaryVertexData.VertexComponentNames = components; + } + else + { + m.PrimaryVertexData.VertexComponentNames = null; + } - if (!Options.StripMetadata) - { - var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); - m.PrimaryVertexData.VertexComponentNames = components; - } - else - { - m.PrimaryVertexData.VertexComponentNames = null; - } + MakeExtendedData(mesh, m); - MakeExtendedData(mesh, m); + Utils.Info(String.Format("Imported {0} mesh ({1} tri groups, {2} tris)", + (m.VertexFormat.HasBoneWeights ? "skinned" : "rigid"), + m.PrimaryTopology.Groups.Count, + collada.TriangleCount)); - Utils.Info(String.Format("Imported {0} mesh ({1} tri groups, {2} tris)", - (m.VertexFormat.HasBoneWeights ? "skinned" : "rigid"), - m.PrimaryTopology.Groups.Count, - collada.TriangleCount)); + return m; + } - return m; + private Mesh ImportMesh(Root root, string name, geometry geom, mesh mesh, VertexDescriptor vertexFormat) + { + var m = ImportMesh(geom, mesh, vertexFormat); + m.Name = name; + root.VertexDatas.Add(m.PrimaryVertexData); + root.TriTopologies.Add(m.PrimaryTopology); + root.Meshes.Add(m); + return m; + } + + private void ImportSkin(Root root, skin skin) + { + if (skin.source1[0] != '#') + throw new ParsingException("Only ID references are supported for skin geometries"); + + if (!ColladaGeometries.TryGetValue(skin.source1[1..], out Mesh mesh)) + throw new ParsingException("Skin references nonexistent mesh: " + skin.source1); + + if (!mesh.VertexFormat.HasBoneWeights) + { + var msg = String.Format("Tried to apply skin to mesh ({0}) with non-skinned vertices", + mesh.Name); + throw new ParsingException(msg); } - private Mesh ImportMesh(Root root, string name, geometry geom, mesh mesh, VertexDescriptor vertexFormat) + var sources = new Dictionary(); + foreach (var source in skin.source) { - var m = ImportMesh(geom, mesh, vertexFormat); - m.Name = name; - root.VertexDatas.Add(m.PrimaryVertexData); - root.TriTopologies.Add(m.PrimaryTopology); - root.Meshes.Add(m); - return m; + var src = ColladaSource.FromCollada(source); + sources.Add(src.id, src); } - private void ImportSkin(Root root, skin skin) + List joints = null; + List invBindMatrices = null; + foreach (var input in skin.joints.input) { - if (skin.source1[0] != '#') - throw new ParsingException("Only ID references are supported for skin geometries"); + if (input.source[0] != '#') + throw new ParsingException("Only ID references are supported for joint input sources"); - if (!ColladaGeometries.TryGetValue(skin.source1[1..], out Mesh mesh)) - throw new ParsingException("Skin references nonexistent mesh: " + skin.source1); + if (!sources.TryGetValue(input.source.Substring(1), out ColladaSource inputSource)) + throw new ParsingException("Joint input source does not exist: " + input.source); - if (!mesh.VertexFormat.HasBoneWeights) + if (input.semantic == "JOINT") { - var msg = String.Format("Tried to apply skin to mesh ({0}) with non-skinned vertices", - mesh.Name); - throw new ParsingException(msg); - } + List jointNames = inputSource.NameParams.Values.SingleOrDefault(); + if (jointNames == null) + throw new ParsingException("Joint input source 'JOINT' must contain array of names."); + + var skeleton = root.Skeletons[0]; + joints = []; + foreach (var name in jointNames) + { + var lookupName = name.Replace("_x0020_", " "); + if (!skeleton.BonesBySID.TryGetValue(lookupName, out Bone bone)) + throw new ParsingException("Joint name list references nonexistent bone: " + lookupName); - var sources = new Dictionary(); - foreach (var source in skin.source) + joints.Add(bone); + } + } + else if (input.semantic == "INV_BIND_MATRIX") { - var src = ColladaSource.FromCollada(source); - sources.Add(src.id, src); + invBindMatrices = inputSource.MatrixParams.Values.SingleOrDefault(); + if (invBindMatrices == null) + throw new ParsingException("Joint input source 'INV_BIND_MATRIX' must contain a single array of matrices."); } - - List joints = null; - List invBindMatrices = null; - foreach (var input in skin.joints.input) + else { - if (input.source[0] != '#') - throw new ParsingException("Only ID references are supported for joint input sources"); + throw new ParsingException("Unsupported joint semantic: " + input.semantic); + } + } - if (!sources.TryGetValue(input.source.Substring(1), out ColladaSource inputSource)) - throw new ParsingException("Joint input source does not exist: " + input.source); + if (joints == null) + throw new ParsingException("Required joint input semantic missing: JOINT"); - if (input.semantic == "JOINT") - { - List jointNames = inputSource.NameParams.Values.SingleOrDefault(); - if (jointNames == null) - throw new ParsingException("Joint input source 'JOINT' must contain array of names."); + if (invBindMatrices == null) + throw new ParsingException("Required joint input semantic missing: INV_BIND_MATRIX"); - var skeleton = root.Skeletons[0]; - joints = []; - foreach (var name in jointNames) - { - var lookupName = name.Replace("_x0020_", " "); - if (!skeleton.BonesBySID.TryGetValue(lookupName, out Bone bone)) - throw new ParsingException("Joint name list references nonexistent bone: " + lookupName); + var influenceCounts = ColladaHelpers.StringsToIntegers(skin.vertex_weights.vcount); + var influences = ColladaHelpers.StringsToIntegers(skin.vertex_weights.v); - joints.Add(bone); - } - } - else if (input.semantic == "INV_BIND_MATRIX") - { - invBindMatrices = inputSource.MatrixParams.Values.SingleOrDefault(); - if (invBindMatrices == null) - throw new ParsingException("Joint input source 'INV_BIND_MATRIX' must contain a single array of matrices."); - } - else - { - throw new ParsingException("Unsupported joint semantic: " + input.semantic); - } - } - - if (joints == null) - throw new ParsingException("Required joint input semantic missing: JOINT"); + foreach (var count in influenceCounts) + { + if (count > Vertex.MaxBoneInfluences) + throw new ParsingException($"GR2 only supports at most {Vertex.MaxBoneInfluences} vertex influences"); + } - if (invBindMatrices == null) - throw new ParsingException("Required joint input semantic missing: INV_BIND_MATRIX"); + // TODO + if (influenceCounts.Count != mesh.OriginalToConsolidatedVertexIndexMap.Count) + Utils.Warn(String.Format("Vertex influence count ({0}) differs from vertex count ({1})", influenceCounts.Count, mesh.OriginalToConsolidatedVertexIndexMap.Count)); - var influenceCounts = ColladaHelpers.StringsToIntegers(skin.vertex_weights.vcount); - var influences = ColladaHelpers.StringsToIntegers(skin.vertex_weights.v); + List weights = null; - foreach (var count in influenceCounts) + int jointInputIndex = -1, weightInputIndex = -1; + foreach (var input in skin.vertex_weights.input) + { + if (input.semantic == "JOINT") { - if (count > 4) - throw new ParsingException("GR2 only supports at most 4 vertex influences"); + jointInputIndex = (int)input.offset; } + else if (input.semantic == "WEIGHT") + { + weightInputIndex = (int)input.offset; - // TODO - if (influenceCounts.Count != mesh.OriginalToConsolidatedVertexIndexMap.Count) - Utils.Warn(String.Format("Vertex influence count ({0}) differs from vertex count ({1})", influenceCounts.Count, mesh.OriginalToConsolidatedVertexIndexMap.Count)); + if (input.source[0] != '#') + throw new ParsingException("Only ID references are supported for weight input sources"); - List weights = null; + if (!sources.TryGetValue(input.source[1..], out ColladaSource inputSource)) + throw new ParsingException("Weight input source does not exist: " + input.source); - int jointInputIndex = -1, weightInputIndex = -1; - foreach (var input in skin.vertex_weights.input) - { - if (input.semantic == "JOINT") - { - jointInputIndex = (int)input.offset; - } - else if (input.semantic == "WEIGHT") - { - weightInputIndex = (int)input.offset; + if (!inputSource.FloatParams.TryGetValue("WEIGHT", out weights)) + weights = inputSource.FloatParams.Values.SingleOrDefault(); - if (input.source[0] != '#') - throw new ParsingException("Only ID references are supported for weight input sources"); + if (weights == null) + throw new ParsingException("Weight input source " + input.source + " must have WEIGHT float attribute"); + } + else + throw new ParsingException("Unsupported skin input semantic: " + input.semantic); + } - if (!sources.TryGetValue(input.source[1..], out ColladaSource inputSource)) - throw new ParsingException("Weight input source does not exist: " + input.source); + if (jointInputIndex == -1) + throw new ParsingException("Required vertex weight input semantic missing: JOINT"); - if (!inputSource.FloatParams.TryGetValue("WEIGHT", out weights)) - weights = inputSource.FloatParams.Values.SingleOrDefault(); + if (weightInputIndex == -1) + throw new ParsingException("Required vertex weight input semantic missing: WEIGHT"); - if (weights == null) - throw new ParsingException("Weight input source " + input.source + " must have WEIGHT float attribute"); - } - else - throw new ParsingException("Unsupported skin input semantic: " + input.semantic); - } + // Remove bones that are not actually influenced from the binding list + var boundBones = new HashSet(); + int offset = 0; + int stride = skin.vertex_weights.input.Length; + while (offset < influences.Count) + { + var jointIndex = influences[offset + jointInputIndex]; + var weightIndex = influences[offset + weightInputIndex]; + var joint = joints[jointIndex]; + var weight = weights[weightIndex]; + boundBones.Add(joint); - if (jointInputIndex == -1) - throw new ParsingException("Required vertex weight input semantic missing: JOINT"); + offset += stride; + } - if (weightInputIndex == -1) - throw new ParsingException("Required vertex weight input semantic missing: WEIGHT"); + if (boundBones.Count > 127) + throw new ParsingException("D:OS supports at most 127 bound bones per mesh."); - // Remove bones that are not actually influenced from the binding list - var boundBones = new HashSet(); - int offset = 0; - int stride = skin.vertex_weights.input.Length; - while (offset < influences.Count) + mesh.BoneBindings = []; + var boneToIndexMaps = new Dictionary(); + for (var i = 0; i < joints.Count; i++) + { + if (boundBones.Contains(joints[i])) { - var jointIndex = influences[offset + jointInputIndex]; - var weightIndex = influences[offset + weightInputIndex]; - var joint = joints[jointIndex]; - var weight = weights[weightIndex]; - boundBones.Add(joint); - - offset += stride; + // Collada allows one inverse bind matrix for each skin, however Granny + // only has one matrix for one bone, even if said bone is used from multiple meshes. + // Hopefully the Collada ones are all equal ... + var iwt = invBindMatrices[i]; + // iwt.Transpose(); + joints[i].InverseWorldTransform = [ + iwt[0, 0], iwt[1, 0], iwt[2, 0], iwt[3, 0], + iwt[0, 1], iwt[1, 1], iwt[2, 1], iwt[3, 1], + iwt[0, 2], iwt[1, 2], iwt[2, 2], iwt[3, 2], + iwt[0, 3], iwt[1, 3], iwt[2, 3], iwt[3, 3] + ]; + + // Bind all bones that affect vertices to the mesh, so we can reference them + // later from the vertexes BoneIndices. + var binding = new BoneBinding + { + BoneName = joints[i].Name, + // TODO + // Use small bounding box values, as it interferes with object placement + // in D:OS 2 (after the Gift Bag 2 update) + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; + mesh.BoneBindings.Add(binding); + boneToIndexMaps.Add(joints[i], boneToIndexMaps.Count); } + } - if (boundBones.Count > 127) - throw new ParsingException("D:OS supports at most 127 bound bones per mesh."); + Span vertErrors = stackalloc float[Vertex.MaxBoneInfluences]; + Span vertWeights = stackalloc byte[Vertex.MaxBoneInfluences]; - mesh.BoneBindings = []; - var boneToIndexMaps = new Dictionary(); - for (var i = 0; i < joints.Count; i++) + offset = 0; + for (var vertexIndex = 0; vertexIndex < influenceCounts.Count; vertexIndex++) + { + var influenceCount = influenceCounts[vertexIndex]; + float influenceSum = 0.0f; + for (var i = 0; i < influenceCount; i++) { - if (boundBones.Contains(joints[i])) - { - // Collada allows one inverse bind matrix for each skin, however Granny - // only has one matrix for one bone, even if said bone is used from multiple meshes. - // Hopefully the Collada ones are all equal ... - var iwt = invBindMatrices[i]; - // iwt.Transpose(); - joints[i].InverseWorldTransform = [ - iwt[0, 0], iwt[1, 0], iwt[2, 0], iwt[3, 0], - iwt[0, 1], iwt[1, 1], iwt[2, 1], iwt[3, 1], - iwt[0, 2], iwt[1, 2], iwt[2, 2], iwt[3, 2], - iwt[0, 3], iwt[1, 3], iwt[2, 3], iwt[3, 3] - ]; - - // Bind all bones that affect vertices to the mesh, so we can reference them - // later from the vertexes BoneIndices. - var binding = new BoneBinding - { - BoneName = joints[i].Name, - // TODO - // Use small bounding box values, as it interferes with object placement - // in D:OS 2 (after the Gift Bag 2 update) - OBBMin = [-0.1f, -0.1f, -0.1f], - OBBMax = [0.1f, 0.1f, 0.1f] - }; - mesh.BoneBindings.Add(binding); - boneToIndexMaps.Add(joints[i], boneToIndexMaps.Count); - } + var weightIndex = influences[offset + i * stride + weightInputIndex]; + influenceSum += weights[weightIndex]; } - offset = 0; - for (var vertexIndex = 0; vertexIndex < influenceCounts.Count; vertexIndex++) + byte totalEncoded = 0; + for (var i = 0; i < influenceCount; i++) { - var influenceCount = influenceCounts[vertexIndex]; - float influenceSum = 0.0f; + var weightIndex = influences[offset + i * stride + weightInputIndex]; + var weight = weights[weightIndex] / influenceSum * 255.0f; + var encodedWeight = (byte)Math.Round(weight); + totalEncoded += encodedWeight; + vertErrors[i] = Math.Abs(encodedWeight - weight); + vertWeights[i] = encodedWeight; + } + + while (totalEncoded != 0 && totalEncoded < 255) + { + float firstHighest = 0.0f; + int errorIndex = -1; for (var i = 0; i < influenceCount; i++) { - var weightIndex = influences[offset + i * stride + weightInputIndex]; - influenceSum += weights[weightIndex]; + if (vertErrors[i] > firstHighest) + { + firstHighest = vertErrors[i]; + errorIndex = i; + } } - for (var i = 0; i < influenceCount; i++) + var weightIndex = influences[offset + errorIndex * stride + weightInputIndex]; + var weight = weights[weightIndex] / influenceSum * 255.0f; + + vertWeights[errorIndex]++; + vertErrors[errorIndex] = Math.Abs(vertWeights[errorIndex] - weight); + totalEncoded++; + } + + Debug.Assert(totalEncoded == 0 || totalEncoded == 255); + + for (var i = 0; i < influenceCount; i++) + { + // Not all vertices are actually used in triangles, we may have unused verts in the + // source list (though this is rare) which won't show up in the consolidated vertex map. + if (mesh.OriginalToConsolidatedVertexIndexMap.TryGetValue(vertexIndex, out List consolidatedIndices)) { var jointIndex = influences[offset + jointInputIndex]; - var weightIndex = influences[offset + weightInputIndex]; var joint = joints[jointIndex]; - var weight = weights[weightIndex] / influenceSum; - // Not all vertices are actually used in triangles, we may have unused verts in the - // source list (though this is rare) which won't show up in the consolidated vertex map. - if (mesh.OriginalToConsolidatedVertexIndexMap.TryGetValue(vertexIndex, out List consolidatedIndices)) + + foreach (var consolidatedIndex in consolidatedIndices) { - foreach (var consolidatedIndex in consolidatedIndices) - { - var vertex = mesh.PrimaryVertexData.Vertices[consolidatedIndex]; - vertex.AddInfluence((byte)boneToIndexMaps[joint], weight); - } + var vertex = mesh.PrimaryVertexData.Vertices[consolidatedIndex]; + vertex.AddInfluence((byte)boneToIndexMaps[joint], vertWeights[i]); } - - offset += stride; } - } - foreach (var vertex in mesh.PrimaryVertexData.Vertices) - { - vertex.FinalizeInfluences(); + offset += stride; } + } - // Warn if we have vertices that are not influenced by any bone - int notInfluenced = 0; - foreach (var vertex in mesh.PrimaryVertexData.Vertices) - { - if (vertex.BoneWeights[0] == 0) notInfluenced++; - } + foreach (var vertex in mesh.PrimaryVertexData.Vertices) + { + vertex.FinalizeInfluences(); + } - if (notInfluenced > 0) - Utils.Warn(String.Format("{0} vertices are not influenced by any bone", notInfluenced)); + // Warn if we have vertices that are not influenced by any bone + int notInfluenced = 0; + foreach (var vertex in mesh.PrimaryVertexData.Vertices) + { + if (vertex.BoneWeights[0] == 0) notInfluenced++; + } - if (skin.bind_shape_matrix != null) - { - var bindShapeFloats = skin.bind_shape_matrix.Trim().Split([' ']).Select(s => Single.Parse(s)).ToArray(); - var bindShapeMat = ColladaHelpers.FloatsToMatrix(bindShapeFloats); - bindShapeMat.Transpose(); + if (notInfluenced > 0) + Utils.Warn(String.Format("{0} vertices are not influenced by any bone", notInfluenced)); - // Deform geometries that were affected by our bind shape matrix - mesh.PrimaryVertexData.Transform(bindShapeMat); - } + if (skin.bind_shape_matrix != null) + { + var bindShapeFloats = skin.bind_shape_matrix.Trim().Split([' ']).Select(s => Single.Parse(s)).ToArray(); + var bindShapeMat = ColladaHelpers.FloatsToMatrix(bindShapeFloats); + bindShapeMat.Transpose(); - if (Options.RecalculateOBBs) - { - UpdateOBBs(root.Skeletons.Single(), mesh); - } + // Deform geometries that were affected by our bind shape matrix + mesh.PrimaryVertexData.Transform(bindShapeMat); } - class OBB + if (Options.RecalculateOBBs) { - public Vector3 Min, Max; - public int NumVerts = 0; + UpdateOBBs(root.Skeletons.Single(), mesh); } + } - private void UpdateOBBs(Skeleton skeleton, Mesh mesh) + class OBB + { + public Vector3 Min, Max; + public int NumVerts = 0; + } + + private void UpdateOBBs(Skeleton skeleton, Mesh mesh) + { + if (mesh.BoneBindings == null || mesh.BoneBindings.Count == 0) return; + + var obbs = new List(mesh.BoneBindings.Count); + for (var i = 0; i < mesh.BoneBindings.Count; i++) { - if (mesh.BoneBindings == null || mesh.BoneBindings.Count == 0) return; - - var obbs = new List(mesh.BoneBindings.Count); - for (var i = 0; i < mesh.BoneBindings.Count; i++) + obbs.Add(new OBB { - obbs.Add(new OBB - { - Min = new Vector3(1000.0f, 1000.0f, 1000.0f), - Max = new Vector3(-1000.0f, -1000.0f, -1000.0f), - }); - } - - foreach (var vert in mesh.PrimaryVertexData.Vertices) + Min = new Vector3(1000.0f, 1000.0f, 1000.0f), + Max = new Vector3(-1000.0f, -1000.0f, -1000.0f), + }); + } + + foreach (var vert in mesh.PrimaryVertexData.Vertices) + { + for (var i = 0; i < Vertex.MaxBoneInfluences; i++) { - for (var i = 0; i < 4; i++) + if (vert.BoneWeights[i] > 0) { - if (vert.BoneWeights[i] > 0) - { - var bi = vert.BoneIndices[i]; - var obb = obbs[bi]; - obb.NumVerts++; + var bi = vert.BoneIndices[i]; + var obb = obbs[bi]; + obb.NumVerts++; - var bone = skeleton.GetBoneByName(mesh.BoneBindings[bi].BoneName); - var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); - var transformed = Vector3.TransformPosition(vert.Position, invWorldTransform); + var bone = skeleton.GetBoneByName(mesh.BoneBindings[bi].BoneName); + var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); + var transformed = Vector3.TransformPosition(vert.Position, invWorldTransform); - obb.Min.X = Math.Min(obb.Min.X, transformed.X); - obb.Min.Y = Math.Min(obb.Min.Y, transformed.Y); - obb.Min.Z = Math.Min(obb.Min.Z, transformed.Z); + obb.Min.X = Math.Min(obb.Min.X, transformed.X); + obb.Min.Y = Math.Min(obb.Min.Y, transformed.Y); + obb.Min.Z = Math.Min(obb.Min.Z, transformed.Z); - obb.Max.X = Math.Max(obb.Max.X, transformed.X); - obb.Max.Y = Math.Max(obb.Max.Y, transformed.Y); - obb.Max.Z = Math.Max(obb.Max.Z, transformed.Z); - } - } - } - - for (var i = 0; i < obbs.Count; i++) - { - var obb = obbs[i]; - if (obb.NumVerts > 0) - { - mesh.BoneBindings[i].OBBMin = [obb.Min.X, obb.Min.Y, obb.Min.Z]; - mesh.BoneBindings[i].OBBMax = [obb.Max.X, obb.Max.Y, obb.Max.Z]; - } - else - { - mesh.BoneBindings[i].OBBMin = [0.0f, 0.0f, 0.0f]; - mesh.BoneBindings[i].OBBMax = [0.0f, 0.0f, 0.0f]; + obb.Max.X = Math.Max(obb.Max.X, transformed.X); + obb.Max.Y = Math.Max(obb.Max.Y, transformed.Y); + obb.Max.Z = Math.Max(obb.Max.Z, transformed.Z); } } } - private void LoadColladaLSLibProfileData(animation anim, TrackGroup loaded) + for (var i = 0; i < obbs.Count; i++) { - var technique = FindExporterExtraData(anim.extra); - if (technique == null || technique.Any == null) return; - - foreach (var setting in technique.Any) + var obb = obbs[i]; + if (obb.NumVerts > 0) { - switch (setting.LocalName) - { - case "SkeletonResourceID": - loaded.ExtendedData = new BG3TrackGroupExtendedData - { - SkeletonResourceID = setting.InnerText.Trim() - }; - break; - - default: - Utils.Warn($"Unrecognized LSLib animation profile attribute: {setting.LocalName}"); - break; - } + mesh.BoneBindings[i].OBBMin = [obb.Min.X, obb.Min.Y, obb.Min.Z]; + mesh.BoneBindings[i].OBBMax = [obb.Max.X, obb.Max.Y, obb.Max.Z]; + } + else + { + mesh.BoneBindings[i].OBBMin = [0.0f, 0.0f, 0.0f]; + mesh.BoneBindings[i].OBBMax = [0.0f, 0.0f, 0.0f]; } } + } - public void ImportAnimations(IEnumerable anims, Root root, Skeleton skeleton) - { - var trackGroup = new TrackGroup - { - Name = (skeleton != null) ? skeleton.Name : "Dummy_Root", - TransformTracks = [], - InitialPlacement = new Transform(), - AccumulationFlags = 2, - LoopTranslation = [0, 0, 0] - }; + private void LoadColladaLSLibProfileData(animation anim, TrackGroup loaded) + { + var technique = FindExporterExtraData(anim.extra); + if (technique == null || technique.Any == null) return; - var animation = new Animation + foreach (var setting in technique.Any) + { + switch (setting.LocalName) { - Name = "Default", - TimeStep = 0.016667f, // 60 FPS - Oversampling = 1, - DefaultLoopCount = 1, - Flags = 1, - Duration = .0f, - TrackGroups = [trackGroup] - }; + case "SkeletonResourceID": + loaded.ExtendedData = new BG3TrackGroupExtendedData + { + SkeletonResourceID = setting.InnerText.Trim() + }; + break; - foreach (var colladaTrack in anims) - { - ImportAnimation(colladaTrack, animation, trackGroup, skeleton); + default: + Utils.Warn($"Unrecognized LSLib animation profile attribute: {setting.LocalName}"); + break; } + } + } - if (trackGroup.TransformTracks.Count > 0) - { - // Reorder transform tracks in lexicographic order - // This is needed by Granny; otherwise it'll fail to find animation tracks - trackGroup.TransformTracks.Sort((t1, t2) => String.Compare(t1.Name, t2.Name, StringComparison.Ordinal)); - - root.TrackGroups.Add(trackGroup); - root.Animations.Add(animation); - } + public void ImportAnimations(IEnumerable anims, Root root, Skeleton skeleton) + { + var trackGroup = new TrackGroup + { + Name = (skeleton != null) ? skeleton.Name : "Dummy_Root", + TransformTracks = [], + InitialPlacement = new Transform(), + AccumulationFlags = 2, + LoopTranslation = [0, 0, 0] + }; + + var animation = new Animation + { + Name = "Default", + TimeStep = 0.016667f, // 60 FPS + Oversampling = 1, + DefaultLoopCount = 1, + Flags = 1, + Duration = .0f, + TrackGroups = [trackGroup] + }; + + foreach (var colladaTrack in anims) + { + ImportAnimation(colladaTrack, animation, trackGroup, skeleton); } - public void ImportAnimation(animation colladaAnim, Animation animation, TrackGroup trackGroup, Skeleton skeleton) + if (trackGroup.TransformTracks.Count > 0) { - var childAnims = 0; - foreach (var item in colladaAnim.Items) - { - if (item is animation) - { - ImportAnimation(item as animation, animation, trackGroup, skeleton); - childAnims++; - } - } + // Reorder transform tracks in lexicographic order + // This is needed by Granny; otherwise it'll fail to find animation tracks + trackGroup.TransformTracks.Sort((t1, t2) => String.Compare(t1.Name, t2.Name, StringComparison.Ordinal)); + + root.TrackGroups.Add(trackGroup); + root.Animations.Add(animation); + } + } - var duration = .0f; - if (childAnims < colladaAnim.Items.Length) + public void ImportAnimation(animation colladaAnim, Animation animation, TrackGroup trackGroup, Skeleton skeleton) + { + var childAnims = 0; + foreach (var item in colladaAnim.Items) + { + if (item is animation) { - ColladaAnimation importAnim = new(); - if (importAnim.ImportFromCollada(colladaAnim, skeleton)) - { - duration = Math.Max(duration, importAnim.Duration); - var track = importAnim.MakeTrack(Options.RemoveTrivialAnimationKeys); - trackGroup.TransformTracks.Add(track); - LoadColladaLSLibProfileData(colladaAnim, trackGroup); - } + ImportAnimation(item as animation, animation, trackGroup, skeleton); + childAnims++; } - - animation.Duration = Math.Max(animation.Duration, duration); } - public Root Import(string inputPath) + var duration = .0f; + if (childAnims < colladaAnim.Items.Length) { - COLLADA collada = null; - using (var stream = File.OpenRead(inputPath)) + ColladaAnimation importAnim = new(); + if (importAnim.ImportFromCollada(colladaAnim, skeleton)) { - collada = COLLADA.Load(stream); + duration = Math.Max(duration, importAnim.Duration); + var track = importAnim.MakeTrack(Options.RemoveTrivialAnimationKeys); + trackGroup.TransformTracks.Add(track); + LoadColladaLSLibProfileData(colladaAnim, trackGroup); } + } - LoadColladaLSLibProfileData(collada); - - var root = new Root - { - ArtToolInfo = ImportArtToolInfo(collada), - ExporterInfo = Options.StripMetadata ? null : ImportExporterInfo(collada), - - FromFileName = inputPath, - - Skeletons = [], - VertexDatas = [], - TriTopologies = [], - Meshes = [], - Models = [], - TrackGroups = [], - Animations = [] - }; + animation.Duration = Math.Max(animation.Duration, duration); + } - ColladaGeometries = []; - SkinnedMeshes = []; + public Root Import(string inputPath) + { + COLLADA collada = null; + using (var stream = File.OpenRead(inputPath)) + { + collada = COLLADA.Load(stream); + } - var collGeometries = new List(); - var collSkins = new List(); - var collNodes = new List(); - var collAnimations = new List(); - var rootBones = new List(); + LoadColladaLSLibProfileData(collada); - // Import skinning controllers after skeleton and geometry loading has finished, as - // we reference both of them during skin import - foreach (var item in collada.Items) + var root = new Root + { + ArtToolInfo = ImportArtToolInfo(collada), + ExporterInfo = Options.StripMetadata ? null : ImportExporterInfo(collada), + + FromFileName = inputPath, + + Skeletons = [], + VertexDatas = [], + TriTopologies = [], + Meshes = [], + Models = [], + TrackGroups = [], + Animations = [] + }; + + ColladaGeometries = []; + SkinnedMeshes = []; + + var collGeometries = new List(); + var collSkins = new List(); + var collNodes = new List(); + var collAnimations = new List(); + var rootBones = new List(); + + // Import skinning controllers after skeleton and geometry loading has finished, as + // we reference both of them during skin import + foreach (var item in collada.Items) + { + if (item is library_controllers) { - if (item is library_controllers) + var controllers = item as library_controllers; + if (controllers.controller != null) { - var controllers = item as library_controllers; - if (controllers.controller != null) + foreach (var controller in controllers.controller) { - foreach (var controller in controllers.controller) + if (controller.Item is skin) { - if (controller.Item is skin) - { - collSkins.Add(controller.Item as skin); - SkinnedMeshes.Add((controller.Item as skin).source1[1..]); - } - else - { - Utils.Warn(String.Format("Controller {0} is unsupported and will be ignored", controller.Item.GetType().Name)); - } + collSkins.Add(controller.Item as skin); + SkinnedMeshes.Add((controller.Item as skin).source1[1..]); } - } - } - else if (item is library_visual_scenes) - { - var scenes = item as library_visual_scenes; - if (scenes.visual_scene != null) - { - foreach (var scene in scenes.visual_scene) + else { - if (scene.node != null) - { - foreach (var node in scene.node) - { - collNodes.Add(node); - FindRootBones([], node, rootBones); - } - } + Utils.Warn(String.Format("Controller {0} is unsupported and will be ignored", controller.Item.GetType().Name)); } } } - else if (item is library_geometries) + } + else if (item is library_visual_scenes) + { + var scenes = item as library_visual_scenes; + if (scenes.visual_scene != null) { - var geometries = item as library_geometries; - if (geometries.geometry != null) + foreach (var scene in scenes.visual_scene) { - foreach (var geometry in geometries.geometry) + if (scene.node != null) { - if (geometry.Item is mesh) - { - collGeometries.Add(geometry); - } - else + foreach (var node in scene.node) { - Utils.Warn(String.Format("Geometry type {0} is unsupported and will be ignored", geometry.Item.GetType().Name)); + collNodes.Add(node); + FindRootBones([], node, rootBones); } } } } - else if (item is library_animations) + } + else if (item is library_geometries) + { + var geometries = item as library_geometries; + if (geometries.geometry != null) { - var animations = item as library_animations; - if (animations.animation != null) + foreach (var geometry in geometries.geometry) { - collAnimations.AddRange(animations.animation); + if (geometry.Item is mesh) + { + collGeometries.Add(geometry); + } + else + { + Utils.Warn(String.Format("Geometry type {0} is unsupported and will be ignored", geometry.Item.GetType().Name)); + } } } - else + } + else if (item is library_animations) + { + var animations = item as library_animations; + if (animations.animation != null) { - Utils.Warn($"Library {item.GetType().Name} is unsupported and will be ignored"); + collAnimations.AddRange(animations.animation); } } - - foreach (var bone in rootBones) + else { - var skeleton = Skeleton.FromCollada(bone.Bone); - var rootTransform = NodeHelpers.GetTransformHierarchy(bone.Parents); - skeleton.TransformRoots(rootTransform.Inverted()); - skeleton.ReorderBones(); - root.Skeletons.Add(skeleton); + Utils.Warn($"Library {item.GetType().Name} is unsupported and will be ignored"); } + } - foreach (var geometry in collGeometries) - { - // Use the override vertex format, if one was specified - Options.VertexFormats.TryGetValue(geometry.name, out VertexDescriptor vertexFormat); - var mesh = ImportMesh(root, geometry.name, geometry, geometry.Item as mesh, vertexFormat); - ColladaGeometries.Add(geometry.id, mesh); - } + foreach (var bone in rootBones) + { + var skeleton = Skeleton.FromCollada(bone.Bone); + var rootTransform = NodeHelpers.GetTransformHierarchy(bone.Parents); + skeleton.TransformRoots(rootTransform.Inverted()); + skeleton.ReorderBones(); + root.Skeletons.Add(skeleton); + } - // Reorder meshes based on their ExportOrder - if (root.Meshes.Any(m => m.ExportOrder > -1)) - { - root.Meshes.Sort((a, b) => a.ExportOrder - b.ExportOrder); - } + foreach (var geometry in collGeometries) + { + // Use the override vertex format, if one was specified + Options.VertexFormats.TryGetValue(geometry.name, out VertexDescriptor vertexFormat); + var mesh = ImportMesh(root, geometry.name, geometry, geometry.Item as mesh, vertexFormat); + ColladaGeometries.Add(geometry.id, mesh); + } - // Import skinning controllers after skeleton and geometry loading has finished, as - // we reference both of them during skin import - if (rootBones.Count > 0) - { - foreach (var skin in collSkins) - { - ImportSkin(root, skin); - } - } + // Reorder meshes based on their ExportOrder + if (root.Meshes.Any(m => m.ExportOrder > -1)) + { + root.Meshes.Sort((a, b) => a.ExportOrder - b.ExportOrder); + } - if (collAnimations.Count > 0) + // Import skinning controllers after skeleton and geometry loading has finished, as + // we reference both of them during skin import + if (rootBones.Count > 0) + { + foreach (var skin in collSkins) { - ImportAnimations(collAnimations, root, root.Skeletons.FirstOrDefault()); + ImportSkin(root, skin); } + } - var rootModel = new Model(); - rootModel.Name = "Unnamed"; // TODO - if (root.Skeletons.Count > 0) - { - rootModel.Skeleton = root.Skeletons[0]; - rootModel.Name = rootModel.Skeleton.Bones[0].Name; - } - rootModel.InitialPlacement = new Transform(); - rootModel.MeshBindings = new List(); - foreach (var mesh in root.Meshes) - { - var binding = new MeshBinding(); - binding.Mesh = mesh; - rootModel.MeshBindings.Add(binding); - } + if (collAnimations.Count > 0) + { + ImportAnimations(collAnimations, root, root.Skeletons.FirstOrDefault()); + } - root.Models.Add(rootModel); - // TODO: make this an option! - if (root.Skeletons.Count > 0) - root.Skeletons[0].UpdateWorldTransforms(); - root.ZUp = ZUp; - root.PostLoad(GR2.Header.DefaultTag); + var rootModel = new Model(); + rootModel.Name = "Unnamed"; // TODO + if (root.Skeletons.Count > 0) + { + rootModel.Skeleton = root.Skeletons[0]; + rootModel.Name = rootModel.Skeleton.Bones[0].Name; + } + rootModel.InitialPlacement = new Transform(); + rootModel.MeshBindings = new List(); + foreach (var mesh in root.Meshes) + { + var binding = new MeshBinding(); + binding.Mesh = mesh; + rootModel.MeshBindings.Add(binding); + } - BuildExtendedData(root); + root.Models.Add(rootModel); + // TODO: make this an option! + if (root.Skeletons.Count > 0) + root.Skeletons[0].UpdateWorldTransforms(); + root.ZUp = ZUp; + root.PostLoad(GR2.Header.DefaultTag); - return root; - } + BuildExtendedData(root); + + return root; } } diff --git a/LSLib/Granny/Model/Vertex.cs b/LSLib/Granny/Model/Vertex.cs index 15840380..b7fe53c9 100644 --- a/LSLib/Granny/Model/Vertex.cs +++ b/LSLib/Granny/Model/Vertex.cs @@ -3,618 +3,618 @@ using System; using System.Collections.Generic; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public struct BoneWeight : IEquatable { - public struct BoneWeight : IEquatable - { - public byte A, B, C, D; + public byte A, B, C, D; - /// - /// Gets or sets the value at the index of the weight vector. - /// - public byte this[int index] + /// + /// Gets or sets the value at the index of the weight vector. + /// + public byte this[int index] + { + get { - get - { - if (index == 0) return A; - else if (index == 1) return B; - else if (index == 2) return C; - else if (index == 3) return D; - throw new IndexOutOfRangeException("Illegal bone influence index: " + index); - } - set - { - if (index == 0) A = value; - else if (index == 1) B = value; - else if (index == 2) C = value; - else if (index == 3) D = value; - else throw new IndexOutOfRangeException("Illegal bone influence index: " + index); - } + if (index == 0) return A; + else if (index == 1) return B; + else if (index == 2) return C; + else if (index == 3) return D; + throw new IndexOutOfRangeException("Illegal bone influence index: " + index); } - - public bool Equals(BoneWeight w) + set { - return A == w.A - && B == w.B - && C == w.C - && D == w.D; - } - - public override int GetHashCode() - { - return (int)A ^ (int)(B << 8) ^ (int)(C << 16) ^ (int)(D << 24); + if (index == 0) A = value; + else if (index == 1) B = value; + else if (index == 2) C = value; + else if (index == 3) D = value; + else throw new IndexOutOfRangeException("Illegal bone influence index: " + index); } } - public enum PositionType + public bool Equals(BoneWeight w) { - None, - Float3, - Word4 - }; + return A == w.A + && B == w.B + && C == w.C + && D == w.D; + } - public enum NormalType - { - None, - Float3, - Half4, - Byte4, - QTangent - }; - - public enum ColorMapType + public override int GetHashCode() { - None, - Float4, - Byte4 - }; + return (int)A ^ (int)(B << 8) ^ (int)(C << 16) ^ (int)(D << 24); + } +} - public enum TextureCoordinateType - { - None, - Float2, - Half2 - }; +public enum PositionType +{ + None, + Float3, + Word4 +}; - /// - /// Describes the properties (Position, Normal, Tangent, ...) of the vertex format - /// - public class VertexDescriptor +public enum NormalType +{ + None, + Float3, + Half4, + Byte4, + QTangent +}; + +public enum ColorMapType +{ + None, + Float4, + Byte4 +}; + +public enum TextureCoordinateType +{ + None, + Float2, + Half2 +}; + +/// +/// Describes the properties (Position, Normal, Tangent, ...) of the vertex format +/// +public class VertexDescriptor +{ + public bool HasBoneWeights = false; + public int NumBoneInfluences = Vertex.MaxBoneInfluences; + public PositionType PositionType = PositionType.None; + public NormalType NormalType = NormalType.None; + public NormalType TangentType = NormalType.None; + public NormalType BinormalType = NormalType.None; + public ColorMapType ColorMapType = ColorMapType.None; + public int ColorMaps = 0; + public TextureCoordinateType TextureCoordinateType = TextureCoordinateType.None; + public int TextureCoordinates = 0; + private Type VertexType; + + public List ComponentNames() { - public bool HasBoneWeights = false; - public int NumBoneInfluences = 4; - public PositionType PositionType = PositionType.None; - public NormalType NormalType = NormalType.None; - public NormalType TangentType = NormalType.None; - public NormalType BinormalType = NormalType.None; - public ColorMapType ColorMapType = ColorMapType.None; - public int ColorMaps = 0; - public TextureCoordinateType TextureCoordinateType = TextureCoordinateType.None; - public int TextureCoordinates = 0; - private Type VertexType; - - public List ComponentNames() + var names = new List(); + if (PositionType != PositionType.None) { - var names = new List(); - if (PositionType != PositionType.None) - { - names.Add("Position"); - } + names.Add("Position"); + } - if (HasBoneWeights) - { - names.Add("BoneWeights"); - names.Add("BoneIndices"); - } + if (HasBoneWeights) + { + names.Add("BoneWeights"); + names.Add("BoneIndices"); + } - if (NormalType != NormalType.None) + if (NormalType != NormalType.None) + { + if (NormalType == NormalType.QTangent) { - if (NormalType == NormalType.QTangent) - { - names.Add("QTangent"); - } - else - { - names.Add("Normal"); - } + names.Add("QTangent"); } - - if (TangentType != NormalType.None - && TangentType != NormalType.QTangent) + else { - names.Add("Tangent"); + names.Add("Normal"); } + } - if (BinormalType != NormalType.None - && BinormalType != NormalType.QTangent) - { - names.Add("Binormal"); - } + if (TangentType != NormalType.None + && TangentType != NormalType.QTangent) + { + names.Add("Tangent"); + } - if (ColorMapType != ColorMapType.None) - { - for (int i = 0; i < ColorMaps; i++) - { - names.Add("DiffuseColor_" + i.ToString()); - } - } + if (BinormalType != NormalType.None + && BinormalType != NormalType.QTangent) + { + names.Add("Binormal"); + } - if (TextureCoordinateType != TextureCoordinateType.None) + if (ColorMapType != ColorMapType.None) + { + for (int i = 0; i < ColorMaps; i++) { - for (int i = 0; i < TextureCoordinates; i++) - { - names.Add("TextureCoordinate_" + i.ToString()); - } + names.Add("DiffuseColor_" + i.ToString()); } - - return names; } - public String Name() + if (TextureCoordinateType != TextureCoordinateType.None) { - string vertexFormat; - vertexFormat = ""; - string attributeCounts = ""; - - switch (PositionType) + for (int i = 0; i < TextureCoordinates; i++) { - case PositionType.None: - break; + names.Add("TextureCoordinate_" + i.ToString()); + } + } - case PositionType.Float3: - vertexFormat += "P"; - attributeCounts += "3"; - break; + return names; + } - case PositionType.Word4: - vertexFormat += "PW"; - attributeCounts += "4"; - break; - } + public String Name() + { + string vertexFormat; + vertexFormat = ""; + string attributeCounts = ""; - if (HasBoneWeights) - { - vertexFormat += "W"; - attributeCounts += NumBoneInfluences.ToString(); - } - - switch (NormalType) - { - case NormalType.None: - break; + switch (PositionType) + { + case PositionType.None: + break; + + case PositionType.Float3: + vertexFormat += "P"; + attributeCounts += "3"; + break; + + case PositionType.Word4: + vertexFormat += "PW"; + attributeCounts += "4"; + break; + } - case NormalType.Float3: - vertexFormat += "N"; - attributeCounts += "3"; - break; + if (HasBoneWeights) + { + vertexFormat += "W"; + attributeCounts += NumBoneInfluences.ToString(); + } + + switch (NormalType) + { + case NormalType.None: + break; + + case NormalType.Float3: + vertexFormat += "N"; + attributeCounts += "3"; + break; + + case NormalType.Half4: + vertexFormat += "HN"; + attributeCounts += "4"; + break; + + case NormalType.QTangent: + vertexFormat += "QN"; + attributeCounts += "4"; + break; + } - case NormalType.Half4: - vertexFormat += "HN"; - attributeCounts += "4"; - break; + switch (TangentType) + { + case NormalType.None: + break; + + case NormalType.Float3: + vertexFormat += "G"; + attributeCounts += "3"; + break; + + case NormalType.Half4: + vertexFormat += "HG"; + attributeCounts += "4"; + break; + } - case NormalType.QTangent: - vertexFormat += "QN"; - attributeCounts += "4"; - break; - } + switch (BinormalType) + { + case NormalType.None: + break; + + case NormalType.Float3: + vertexFormat += "B"; + attributeCounts += "3"; + break; + + case NormalType.Half4: + vertexFormat += "HB"; + attributeCounts += "4"; + break; + } - switch (TangentType) + for (var i = 0; i < ColorMaps; i++) + { + switch (ColorMapType) { - case NormalType.None: + case ColorMapType.None: break; - case NormalType.Float3: - vertexFormat += "G"; - attributeCounts += "3"; + case ColorMapType.Float4: + vertexFormat += "D"; + attributeCounts += "4"; break; - case NormalType.Half4: - vertexFormat += "HG"; + case ColorMapType.Byte4: + vertexFormat += "CD"; attributeCounts += "4"; break; } + } - switch (BinormalType) + for (var i = 0; i < TextureCoordinates; i++) + { + switch (TextureCoordinateType) { - case NormalType.None: + case TextureCoordinateType.None: break; - case NormalType.Float3: - vertexFormat += "B"; - attributeCounts += "3"; + case TextureCoordinateType.Float2: + vertexFormat += "T"; + attributeCounts += "2"; break; - case NormalType.Half4: - vertexFormat += "HB"; - attributeCounts += "4"; + case TextureCoordinateType.Half2: + vertexFormat += "HT"; + attributeCounts += "2"; break; } - - for (var i = 0; i < ColorMaps; i++) - { - switch (ColorMapType) - { - case ColorMapType.None: - break; - - case ColorMapType.Float4: - vertexFormat += "D"; - attributeCounts += "4"; - break; - - case ColorMapType.Byte4: - vertexFormat += "CD"; - attributeCounts += "4"; - break; - } - } - - for (var i = 0; i < TextureCoordinates; i++) - { - switch (TextureCoordinateType) - { - case TextureCoordinateType.None: - break; - - case TextureCoordinateType.Float2: - vertexFormat += "T"; - attributeCounts += "2"; - break; - - case TextureCoordinateType.Half2: - vertexFormat += "HT"; - attributeCounts += "2"; - break; - } - } - - return vertexFormat + attributeCounts; } - public Vertex CreateInstance() - { - if (VertexType == null) - { - var typeName = "Vertex_" + Name(); - VertexType = VertexTypeBuilder.CreateVertexSubtype(typeName); - } - - var vert = Activator.CreateInstance(VertexType) as Vertex; - vert.Format = this; - return vert; - } + return vertexFormat + attributeCounts; } - [StructSerialization(TypeSelector = typeof(VertexDefinitionSelector), MixedMarshal = true)] - public class Vertex + public Vertex CreateInstance() { - public const int MaxUVs = 4; - public const int MaxColors = 2; - - public VertexDescriptor Format; - public Vector3 Position; - public BoneWeight BoneWeights; - public BoneWeight BoneIndices; - public Vector3 Normal; - public Vector3 Tangent; - public Vector3 Binormal; - public Vector4 Color0; - public Vector4 Color1; - public Vector2 TextureCoordinates0; - public Vector2 TextureCoordinates1; - public Vector2 TextureCoordinates2; - public Vector2 TextureCoordinates3; - public Vector2 TextureCoordinates4; - public Vector2 TextureCoordinates5; - - protected Vertex() { } - - public Vector2 GetUV(int index) + if (VertexType == null) { - return index switch - { - 0 => TextureCoordinates0, - 1 => TextureCoordinates1, - 2 => TextureCoordinates2, - 3 => TextureCoordinates3, - 4 => TextureCoordinates4, - 5 => TextureCoordinates5, - _ => throw new ArgumentException($"At most {MaxUVs} UVs are supported."), - }; + var typeName = "Vertex_" + Name(); + VertexType = VertexTypeBuilder.CreateVertexSubtype(typeName); } - public void SetUV(int index, Vector2 uv) + var vert = Activator.CreateInstance(VertexType) as Vertex; + vert.Format = this; + return vert; + } +} + +[StructSerialization(TypeSelector = typeof(VertexDefinitionSelector), MixedMarshal = true)] +public class Vertex +{ + public const int MaxBoneInfluences = 4; + public const int MaxUVs = 4; + public const int MaxColors = 2; + + public VertexDescriptor Format; + public Vector3 Position; + public BoneWeight BoneWeights; + public BoneWeight BoneIndices; + public Vector3 Normal; + public Vector3 Tangent; + public Vector3 Binormal; + public Vector4 Color0; + public Vector4 Color1; + public Vector2 TextureCoordinates0; + public Vector2 TextureCoordinates1; + public Vector2 TextureCoordinates2; + public Vector2 TextureCoordinates3; + public Vector2 TextureCoordinates4; + public Vector2 TextureCoordinates5; + + protected Vertex() { } + + public Vector2 GetUV(int index) + { + return index switch { - switch (index) - { - case 0: TextureCoordinates0 = uv; break; - case 1: TextureCoordinates1 = uv; break; - case 2: TextureCoordinates2 = uv; break; - case 3: TextureCoordinates3 = uv; break; - case 4: TextureCoordinates4 = uv; break; - case 5: TextureCoordinates5 = uv; break; - default: throw new ArgumentException($"At most {MaxUVs} UVs are supported."); - } - } + 0 => TextureCoordinates0, + 1 => TextureCoordinates1, + 2 => TextureCoordinates2, + 3 => TextureCoordinates3, + 4 => TextureCoordinates4, + 5 => TextureCoordinates5, + _ => throw new ArgumentException($"At most {MaxUVs} UVs are supported."), + }; + } - public Vector4 GetColor(int index) + public void SetUV(int index, Vector2 uv) + { + switch (index) { - return index switch - { - 0 => Color0, - 1 => Color1, - _ => throw new ArgumentException($"At most {MaxColors} color maps are supported."), - }; + case 0: TextureCoordinates0 = uv; break; + case 1: TextureCoordinates1 = uv; break; + case 2: TextureCoordinates2 = uv; break; + case 3: TextureCoordinates3 = uv; break; + case 4: TextureCoordinates4 = uv; break; + case 5: TextureCoordinates5 = uv; break; + default: throw new ArgumentException($"At most {MaxUVs} UVs are supported."); } + } - public void SetColor(int index, Vector4 color) + public Vector4 GetColor(int index) + { + return index switch { - switch (index) - { - case 0: Color0 = color; break; - case 1: Color1 = color; break; - default: throw new ArgumentException($"At most {MaxColors} color maps are supported."); - } - } + 0 => Color0, + 1 => Color1, + _ => throw new ArgumentException($"At most {MaxColors} color maps are supported."), + }; + } - public Vertex Clone() + public void SetColor(int index, Vector4 color) + { + switch (index) { - return MemberwiseClone() as Vertex; + case 0: Color0 = color; break; + case 1: Color1 = color; break; + default: throw new ArgumentException($"At most {MaxColors} color maps are supported."); } + } + + public Vertex Clone() + { + return MemberwiseClone() as Vertex; + } - public void AddInfluence(byte boneIndex, float weight) + public void AddInfluence(byte boneIndex, byte weight) + { + // Get the first zero vertex influence and update it with the new one + for (var influence = 0; influence < MaxBoneInfluences; influence++) { - // Get the first zero vertex influence and update it with the new one - for (var influence = 0; influence < 4; influence++) + if (BoneWeights[influence] == 0) { - if (BoneWeights[influence] == 0) - { - // BoneIndices refers to Mesh.BoneBindings[index], not Skeleton.Bones[index] ! - BoneIndices[influence] = boneIndex; - BoneWeights[influence] = (byte)(Math.Round(weight * 255)); - break; - } + // BoneIndices refers to Mesh.BoneBindings[index], not Skeleton.Bones[index] ! + BoneIndices[influence] = boneIndex; + BoneWeights[influence] = weight; + break; } } + } - public void FinalizeInfluences() + public void FinalizeInfluences() + { + for (var influence = 1; influence < MaxBoneInfluences; influence++) { - for (var influence = 1; influence < 4; influence++) + if (BoneWeights[influence] == 0) { - if (BoneWeights[influence] == 0) - { - BoneIndices[influence] = BoneIndices[0]; - } + BoneIndices[influence] = BoneIndices[0]; } } + } - public void Transform(Matrix4 transformation, Matrix4 inverse) - { - Position = Vector3.TransformPosition(Position, transformation); - Normal = Vector3.Normalize(Vector3.TransformNormalInverse(Normal, inverse)); - Tangent = Vector3.Normalize(Vector3.TransformNormalInverse(Tangent, inverse)); - Binormal = Vector3.Normalize(Vector3.TransformNormalInverse(Binormal, inverse)); - } - - public void Serialize(WritableSection section) - { - VertexSerializationHelpers.Serialize(section, this); - } + public void Transform(Matrix4 transformation, Matrix4 inverse) + { + Position = Vector3.TransformPosition(Position, transformation); + Normal = Vector3.Normalize(Vector3.TransformNormalInverse(Normal, inverse)); + Tangent = Vector3.Normalize(Vector3.TransformNormalInverse(Tangent, inverse)); + Binormal = Vector3.Normalize(Vector3.TransformNormalInverse(Binormal, inverse)); + } - public void Unserialize(GR2Reader reader) - { - VertexSerializationHelpers.Unserialize(reader, this); - } + public void Serialize(WritableSection section) + { + VertexSerializationHelpers.Serialize(section, this); } - - public class VertexSerializer : NodeSerializer + public void Unserialize(GR2Reader reader) { - private Dictionary VertexTypeCache = new Dictionary(); + VertexSerializationHelpers.Unserialize(reader, this); + } +} - public VertexDescriptor ConstructDescriptor(MemberDefinition memberDefn, StructDefinition defn, object parent) + +public class VertexSerializer : NodeSerializer +{ + private Dictionary VertexTypeCache = new Dictionary(); + + public VertexDescriptor ConstructDescriptor(MemberDefinition memberDefn, StructDefinition defn, object parent) + { + var desc = new VertexDescriptor(); + + foreach (var member in defn.Members) { - var desc = new VertexDescriptor(); - - foreach (var member in defn.Members) + switch (member.Name) { - switch (member.Name) - { - case "Position": - if (member.Type == MemberType.Real32 && member.ArraySize == 3) - { - desc.PositionType = PositionType.Float3; - } - // Game incorrectly uses UInt16 instead of BinormalInt16 sometimes - else if ((member.Type == MemberType.BinormalInt16 || member.Type == MemberType.UInt16) && member.ArraySize == 4) - { - desc.PositionType = PositionType.Word4; - } - else - { - throw new Exception($"Unsupported position format: {member.Type}, {member.ArraySize}"); - } - break; - - case "BoneWeights": - if (member.Type != MemberType.NormalUInt8) - { - throw new Exception("Bone weight must be a NormalUInt8"); - } - - if (member.ArraySize != 2 && member.ArraySize != 4) - { - throw new Exception($"Unsupported bone influence count: {member.ArraySize}"); - } - - desc.HasBoneWeights = true; - desc.NumBoneInfluences = (int)member.ArraySize; - break; - - case "BoneIndices": - if (member.Type != MemberType.UInt8) - { - throw new Exception("Bone index must be an UInt8"); - } - break; - - case "Normal": - if (member.Type == MemberType.Real32 && member.ArraySize == 3) - { - desc.NormalType = NormalType.Float3; - } - else if (member.Type == MemberType.Real16 && member.ArraySize == 4) - { - desc.NormalType = NormalType.Half4; - } - else if (member.Type == MemberType.BinormalInt8 && member.ArraySize == 4) - { - desc.NormalType = NormalType.Byte4; - } - else - { - throw new Exception($"Unsupported normal format: {member.Type}, {member.ArraySize}"); - } - break; - - case "QTangent": - // Game incorrectly uses UInt16 instead of BinormalInt16 sometimes - if ((member.Type == MemberType.BinormalInt16 || member.Type == MemberType.UInt16) && member.ArraySize == 4) - { - desc.NormalType = NormalType.QTangent; - desc.TangentType = NormalType.QTangent; - desc.BinormalType = NormalType.QTangent; - } - else - { - throw new Exception($"Unsupported QTangent format: {member.Type}, {member.ArraySize}"); - } - break; - - case "Tangent": - if (member.Type == MemberType.Real32 && member.ArraySize == 3) - { - desc.TangentType = NormalType.Float3; - } - else if (member.Type == MemberType.Real16 && member.ArraySize == 4) - { - desc.TangentType = NormalType.Half4; - } - else if (member.Type == MemberType.BinormalInt8 && member.ArraySize == 4) - { - desc.TangentType = NormalType.Byte4; - } - else - { - throw new Exception($"Unsupported tangent format: {member.Type}, {member.ArraySize}"); - } - break; - - case "Binormal": - if (member.Type == MemberType.Real32 && member.ArraySize == 3) - { - desc.BinormalType = NormalType.Float3; - } - else if (member.Type == MemberType.Real16 && member.ArraySize == 4) - { - desc.BinormalType = NormalType.Half4; - } - else if (member.Type == MemberType.BinormalInt8 && member.ArraySize == 4) - { - desc.BinormalType = NormalType.Byte4; - } - else - { - throw new Exception($"Unsupported binormal format: {member.Type}, {member.ArraySize}"); - } - break; - - case "DiffuseColor0": - case "DiffuseColor1": - desc.ColorMaps++; - if (member.Type == MemberType.Real32 && member.ArraySize == 4) - { - desc.ColorMapType = ColorMapType.Float4; - } - else if (member.Type == MemberType.NormalUInt8 && member.ArraySize == 4) - { - desc.ColorMapType = ColorMapType.Byte4; - } - //Some Granny2 model formats report their color maps as UInt8 type instead of NormalUInt8, causing it to fail checks. - else if (member.Type == MemberType.UInt8 && member.ArraySize == 4) - { - desc.ColorMapType = ColorMapType.Byte4; - } - else - { - throw new Exception($"Unsupported color map type: {member.Type}, {member.ArraySize}"); - } - break; - - case "TextureCoordinates0": - case "TextureCoordinates1": - case "TextureCoordinates2": - case "TextureCoordinates3": - case "TextureCoordinates4": - case "TextureCoordinates5": - desc.TextureCoordinates++; - if (member.Type == MemberType.Real32 && member.ArraySize == 2) - { - desc.TextureCoordinateType = TextureCoordinateType.Float2; - } - else if (member.Type == MemberType.Real16 && member.ArraySize == 2) - { - desc.TextureCoordinateType = TextureCoordinateType.Half2; - } - else - { - throw new Exception($"Unsupported texture coordinate format: {member.Type}, {member.ArraySize}"); - } - break; - - default: - throw new Exception($"Unknown vertex property: {member.Name}"); - } - } + case "Position": + if (member.Type == MemberType.Real32 && member.ArraySize == 3) + { + desc.PositionType = PositionType.Float3; + } + // Game incorrectly uses UInt16 instead of BinormalInt16 sometimes + else if ((member.Type == MemberType.BinormalInt16 || member.Type == MemberType.UInt16) && member.ArraySize == 4) + { + desc.PositionType = PositionType.Word4; + } + else + { + throw new Exception($"Unsupported position format: {member.Type}, {member.ArraySize}"); + } + break; + + case "BoneWeights": + if (member.Type != MemberType.NormalUInt8) + { + throw new Exception("Bone weight must be a NormalUInt8"); + } + + if (member.ArraySize != 2 && member.ArraySize != 4) + { + throw new Exception($"Unsupported bone influence count: {member.ArraySize}"); + } + + desc.HasBoneWeights = true; + desc.NumBoneInfluences = (int)member.ArraySize; + break; + + case "BoneIndices": + if (member.Type != MemberType.UInt8) + { + throw new Exception("Bone index must be an UInt8"); + } + break; + + case "Normal": + if (member.Type == MemberType.Real32 && member.ArraySize == 3) + { + desc.NormalType = NormalType.Float3; + } + else if (member.Type == MemberType.Real16 && member.ArraySize == 4) + { + desc.NormalType = NormalType.Half4; + } + else if (member.Type == MemberType.BinormalInt8 && member.ArraySize == 4) + { + desc.NormalType = NormalType.Byte4; + } + else + { + throw new Exception($"Unsupported normal format: {member.Type}, {member.ArraySize}"); + } + break; + + case "QTangent": + // Game incorrectly uses UInt16 instead of BinormalInt16 sometimes + if ((member.Type == MemberType.BinormalInt16 || member.Type == MemberType.UInt16) && member.ArraySize == 4) + { + desc.NormalType = NormalType.QTangent; + desc.TangentType = NormalType.QTangent; + desc.BinormalType = NormalType.QTangent; + } + else + { + throw new Exception($"Unsupported QTangent format: {member.Type}, {member.ArraySize}"); + } + break; + + case "Tangent": + if (member.Type == MemberType.Real32 && member.ArraySize == 3) + { + desc.TangentType = NormalType.Float3; + } + else if (member.Type == MemberType.Real16 && member.ArraySize == 4) + { + desc.TangentType = NormalType.Half4; + } + else if (member.Type == MemberType.BinormalInt8 && member.ArraySize == 4) + { + desc.TangentType = NormalType.Byte4; + } + else + { + throw new Exception($"Unsupported tangent format: {member.Type}, {member.ArraySize}"); + } + break; + + case "Binormal": + if (member.Type == MemberType.Real32 && member.ArraySize == 3) + { + desc.BinormalType = NormalType.Float3; + } + else if (member.Type == MemberType.Real16 && member.ArraySize == 4) + { + desc.BinormalType = NormalType.Half4; + } + else if (member.Type == MemberType.BinormalInt8 && member.ArraySize == 4) + { + desc.BinormalType = NormalType.Byte4; + } + else + { + throw new Exception($"Unsupported binormal format: {member.Type}, {member.ArraySize}"); + } + break; + + case "DiffuseColor0": + case "DiffuseColor1": + desc.ColorMaps++; + if (member.Type == MemberType.Real32 && member.ArraySize == 4) + { + desc.ColorMapType = ColorMapType.Float4; + } + else if (member.Type == MemberType.NormalUInt8 && member.ArraySize == 4) + { + desc.ColorMapType = ColorMapType.Byte4; + } + //Some Granny2 model formats report their color maps as UInt8 type instead of NormalUInt8, causing it to fail checks. + else if (member.Type == MemberType.UInt8 && member.ArraySize == 4) + { + desc.ColorMapType = ColorMapType.Byte4; + } + else + { + throw new Exception($"Unsupported color map type: {member.Type}, {member.ArraySize}"); + } + break; - return desc; + case "TextureCoordinates0": + case "TextureCoordinates1": + case "TextureCoordinates2": + case "TextureCoordinates3": + case "TextureCoordinates4": + case "TextureCoordinates5": + desc.TextureCoordinates++; + if (member.Type == MemberType.Real32 && member.ArraySize == 2) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + } + else if (member.Type == MemberType.Real16 && member.ArraySize == 2) + { + desc.TextureCoordinateType = TextureCoordinateType.Half2; + } + else + { + throw new Exception($"Unsupported texture coordinate format: {member.Type}, {member.ArraySize}"); + } + break; + + default: + throw new Exception($"Unknown vertex property: {member.Name}"); + } } - public Vertex ReadVertex(GR2Reader reader, VertexDescriptor descriptor) + return desc; + } + + public Vertex ReadVertex(GR2Reader reader, VertexDescriptor descriptor) + { + var vertex = descriptor.CreateInstance(); + vertex.Unserialize(reader); + return vertex; + } + + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) + { + if (!VertexTypeCache.TryGetValue(parent, out VertexDescriptor descriptor)) { - var vertex = descriptor.CreateInstance(); - vertex.Unserialize(reader); - return vertex; + descriptor = ConstructDescriptor(member, definition, parent); + VertexTypeCache.Add(parent, descriptor); } - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) - { - if (!VertexTypeCache.TryGetValue(parent, out VertexDescriptor descriptor)) - { - descriptor = ConstructDescriptor(member, definition, parent); - VertexTypeCache.Add(parent, descriptor); - } + var vertices = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + vertices.Add(ReadVertex(gr2, descriptor)); + return vertices; + } - var vertices = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - vertices.Add(ReadVertex(gr2, descriptor)); - return vertices; - } + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + if (items.Count > 0) { - var items = obj as List; - - if (items.Count > 0) - { - section.StoreObjectOffset(items[0]); - } + section.StoreObjectOffset(items[0]); + } - for (int i = 0; i < items.Count; i++) - { - items[i].Serialize(section); - } + for (int i = 0; i < items.Count; i++) + { + items[i].Serialize(section); } } } From ce594177ce84dff197acc55cee70af0a4986fa25 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 11 Dec 2023 11:57:19 +0100 Subject: [PATCH 045/139] Simplify namespace usage --- ConverterApp/ConverterAppSettings.cs | 601 +++-- ConverterApp/DatabaseDumper.cs | 99 +- ConverterApp/DebugDumper.cs | 481 ++-- ConverterApp/Program.cs | 29 +- ConverterApp/VariableDumper.cs | 205 +- DebuggerFrontend/Breakpoints.cs | 479 ++-- DebuggerFrontend/DAPMessageHandler.cs | 1339 +++++----- DebuggerFrontend/DAPProtocol.cs | 2011 ++++++++------- DebuggerFrontend/DAPStream.cs | 289 ++- DebuggerFrontend/DAPUtils.cs | 285 +-- DebuggerFrontend/DatabaseEnumerator.cs | 135 +- DebuggerFrontend/DbgClient.cs | 553 ++-- DebuggerFrontend/DebugInfoLoader.cs | 343 ++- DebuggerFrontend/DebugInfoSync.cs | 205 +- DebuggerFrontend/EvaluationResults.cs | 231 +- DebuggerFrontend/ExpressionEvaluator.cs | 597 +++-- .../ExpressionParser/ExpressionNodes.cs | 141 +- .../ExpressionParser/ExpressionParser.cs | 177 +- DebuggerFrontend/Program.cs | 37 +- DebuggerFrontend/StackTracePrinter.cs | 425 ++- DebuggerFrontend/ValueFormatter.cs | 315 ++- Divine/CLI/CommandLineActions.cs | 365 ++- Divine/CLI/CommandLineArguments.cs | 671 +++-- Divine/CLI/CommandLineDataProcessor.cs | 119 +- Divine/CLI/CommandLineGR2Processor.cs | 155 +- Divine/CLI/CommandLineLogger.cs | 159 +- Divine/CLI/CommandLinePackageProcessor.cs | 267 +- Divine/Program.cs | 55 +- LSLib/Granny/Collada.cs | 129 +- LSLib/Granny/ColladaAnimation.cs | 241 +- LSLib/Granny/GR2/Format.cs | 2019 ++++++++------- LSLib/Granny/GR2/Helpers.cs | 237 +- LSLib/Granny/GR2/Reader.cs | 1615 ++++++------ LSLib/Granny/GR2/Writer.cs | 1603 ++++++------ LSLib/Granny/GR2Utils.cs | 159 +- LSLib/Granny/Model/Animation.cs | 1083 ++++---- LSLib/Granny/Model/ColladaExporter.cs | 1551 ++++++----- LSLib/Granny/Model/ColladaHelpers.cs | 359 ++- LSLib/Granny/Model/ColladaMesh.cs | 991 ++++--- .../Model/CurveData/AnimationCurveData.cs | 448 ++-- LSLib/Granny/Model/CurveData/D3Constant32f.cs | 39 +- LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs | 77 +- LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs | 75 +- LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs | 77 +- LSLib/Granny/Model/CurveData/D3K16uC16u.cs | 77 +- LSLib/Granny/Model/CurveData/D3K8uC8u.cs | 77 +- LSLib/Granny/Model/CurveData/D4Constant32f.cs | 41 +- LSLib/Granny/Model/CurveData/D4nK16uC15u.cs | 207 +- LSLib/Granny/Model/CurveData/D4nK8uC7u.cs | 205 +- LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs | 83 +- LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs | 85 +- LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs | 85 +- LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs | 85 +- LSLib/Granny/Model/CurveData/DaConstant32f.cs | 53 +- LSLib/Granny/Model/CurveData/DaIdentity.cs | 43 +- LSLib/Granny/Model/CurveData/DaK16uC16u.cs | 127 +- LSLib/Granny/Model/CurveData/DaK32fC32f.cs | 221 +- LSLib/Granny/Model/CurveData/DaK8uC8u.cs | 127 +- .../Granny/Model/CurveData/DaKeyframes32f.cs | 213 +- LSLib/Granny/Model/DivinityMesh.cs | 633 +++-- LSLib/Granny/Model/Exporter.cs | 1237 +++++---- LSLib/Granny/Model/HalfHelpers.cs | 285 +-- LSLib/Granny/Model/Mesh.cs | 1169 +++++---- LSLib/Granny/Model/Metadata.cs | 79 +- LSLib/Granny/Model/Model.cs | 29 +- LSLib/Granny/Model/Root.cs | 219 +- LSLib/Granny/Model/Skeleton.cs | 427 ++-- LSLib/Granny/Model/VertexSerialization.cs | 979 ++++--- LSLib/Granny/Utils.cs | 19 +- LSLib/LS/BinUtils.cs | 679 +++-- LSLib/LS/Common.cs | 63 +- LSLib/LS/Enums/CompressionFlags.cs | 15 +- LSLib/LS/Enums/CompressionLevel.cs | 15 +- LSLib/LS/Enums/CompressionMethod.cs | 15 +- LSLib/LS/Enums/Game.cs | 97 +- LSLib/LS/Enums/LSFVersion.cs | 121 +- LSLib/LS/Enums/LogLevel.cs | 23 +- LSLib/LS/Enums/PackageVersion.cs | 23 +- LSLib/LS/Enums/ResourceFormat.cs | 17 +- LSLib/LS/FileManager.cs | 81 +- LSLib/LS/Localization.cs | 463 ++-- LSLib/LS/Matrix.cs | 890 ++++--- LSLib/LS/Mods/ModResources.cs | 765 +++--- LSLib/LS/NodeAttribute.cs | 663 +++-- LSLib/LS/PackageCommon.cs | 1139 ++++----- LSLib/LS/PackageReader.cs | 567 ++-- LSLib/LS/PackageWriter.cs | 757 +++--- LSLib/LS/Resource.cs | 373 ++- LSLib/LS/ResourceUtils.cs | 443 ++-- LSLib/LS/Resources/LSB/LSBReader.cs | 337 ++- LSLib/LS/Resources/LSB/LSBWriter.cs | 313 ++- LSLib/LS/Resources/LSF/LSFCommon.cs | 707 +++-- LSLib/LS/Resources/LSF/LSFReader.cs | 945 ++++--- LSLib/LS/Resources/LSF/LSFWriter.cs | 773 +++--- LSLib/LS/Resources/LSJ/LSJReader.cs | 35 +- .../LS/Resources/LSJ/LSJResourceConverter.cs | 1243 +++++---- LSLib/LS/Resources/LSJ/LSJWriter.cs | 37 +- LSLib/LS/Resources/LSX/LSXReader.cs | 449 ++-- LSLib/LS/Resources/LSX/LSXWriter.cs | 213 +- LSLib/LS/Save/SavegameHelpers.cs | 209 +- LSLib/LS/Save/VariableManager.cs | 613 +++-- LSLib/LS/Stats/Parser/PropertyDefinitions.cs | 99 +- LSLib/LS/Stats/Parser/StatLuaParser.cs | 27 +- LSLib/LS/Stats/Parser/StatNodes.cs | 55 +- LSLib/LS/Stats/Parser/StatParser.cs | 389 ++- LSLib/LS/Stats/Parser/StatPropertyParser.cs | 433 ++-- LSLib/LS/Stats/StatDefinitions.cs | 2247 ++++++++-------- LSLib/LS/Stats/StatFileParser.cs | 660 +++-- LSLib/LS/Stats/StatValueParsers.cs | 991 ++++--- LSLib/LS/Story/Adapter.cs | 259 +- LSLib/LS/Story/Call.cs | 163 +- LSLib/LS/Story/Common.cs | 827 +++--- LSLib/LS/Story/Compiler/CompilationContext.cs | 1077 ++++---- LSLib/LS/Story/Compiler/Compiler.cs | 1895 +++++++------- LSLib/LS/Story/Compiler/DebugInfo.cs | 221 +- LSLib/LS/Story/Compiler/HeaderLoader.cs | 185 +- LSLib/LS/Story/Compiler/IR.cs | 583 +++-- LSLib/LS/Story/Compiler/IRGenerator.cs | 469 ++-- LSLib/LS/Story/Compiler/Preprocessor.cs | 95 +- LSLib/LS/Story/Compiler/StoryEmitter.cs | 2277 ++++++++--------- LSLib/LS/Story/DataNode.cs | 69 +- LSLib/LS/Story/Database.cs | 375 ++- LSLib/LS/Story/DatabaseNode.cs | 31 +- LSLib/LS/Story/DebugExport.cs | 883 ++++--- LSLib/LS/Story/Function.cs | 275 +- LSLib/LS/Story/Goal.cs | 233 +- LSLib/LS/Story/GoalParser/ASTNodes.cs | 355 ++- LSLib/LS/Story/GoalParser/GoalParser.cs | 697 +++-- LSLib/LS/Story/HeaderParser/ASTNodes.cs | 219 +- LSLib/LS/Story/HeaderParser/HeaderParser.cs | 193 +- LSLib/LS/Story/Join.cs | 277 +- LSLib/LS/Story/Node.cs | 185 +- LSLib/LS/Story/Proc.cs | 31 +- LSLib/LS/Story/Query.cs | 75 +- LSLib/LS/Story/Reference.cs | 307 ++- LSLib/LS/Story/Rel.cs | 121 +- LSLib/LS/Story/RelOp.cs | 175 +- LSLib/LS/Story/Rule.cs | 369 ++- LSLib/LS/Story/Story.cs | 845 +++--- LSLib/LS/Story/Value.cs | 755 +++--- LSLib/VirtualTextures/BC5Image.cs | 233 +- LSLib/VirtualTextures/Build.cs | 1735 +++++++------ LSLib/VirtualTextures/Compression.cs | 237 +- LSLib/VirtualTextures/Geometry.cs | 259 +- LSLib/VirtualTextures/PageFile.cs | 117 +- LSLib/VirtualTextures/PageFileBuild.cs | 407 ++- LSLib/VirtualTextures/VirtualTexture.cs | 1053 ++++---- .../VirtualTextures/VirtualTextureFormats.cs | 675 +++-- RconClient/DosPackets.cs | 239 +- RconClient/Encapsulation.cs | 205 +- RconClient/RakNetClient.cs | 259 +- RconClient/RakNetCommon.cs | 91 +- RconClient/RakNetPackets.cs | 473 ++-- RconClient/RakNetSession.cs | 419 ++- RconClient/Rcon.cs | 229 +- RconClient/Utils.cs | 77 +- StatParser/Arguments.cs | 71 +- StatParser/Program.cs | 99 +- StatParser/StatChecker.cs | 233 +- StoryCompiler/DebugInfoSaver.cs | 311 ++- StoryCompiler/Log.cs | 313 ++- StoryCompiler/ModCompiler.cs | 727 +++--- StoryCompiler/Program.cs | 197 +- StoryDecompiler/Arguments.cs | 41 +- StoryDecompiler/Program.cs | 223 +- VTexTool/Program.cs | 73 +- 166 files changed, 34803 insertions(+), 34974 deletions(-) diff --git a/ConverterApp/ConverterAppSettings.cs b/ConverterApp/ConverterAppSettings.cs index 3dfb1464..94d8863b 100644 --- a/ConverterApp/ConverterAppSettings.cs +++ b/ConverterApp/ConverterAppSettings.cs @@ -9,404 +9,403 @@ using System.Text; using System.Threading.Tasks; -namespace ConverterApp +namespace ConverterApp; + +public interface ISettingsDataSource +{ + ConverterAppSettings Settings { get; set; } +} + +public class SettingsBase : INotifyPropertyChanged { - public interface ISettingsDataSource + public event PropertyChangedEventHandler PropertyChanged; + + protected virtual void OnPropertyChanged([CallerMemberName] string propertyName = null) { - ConverterAppSettings Settings { get; set; } + var handler = PropertyChanged; + if (handler != null) handler(this, new PropertyChangedEventArgs(propertyName)); } +} - public class SettingsBase : INotifyPropertyChanged - { - public event PropertyChangedEventHandler PropertyChanged; +public class ConverterAppSettings : SettingsBase +{ + private GR2PaneSettings gr2; - protected virtual void OnPropertyChanged([CallerMemberName] string propertyName = null) - { - var handler = PropertyChanged; - if (handler != null) handler(this, new PropertyChangedEventArgs(propertyName)); - } + public GR2PaneSettings GR2 + { + get { return gr2; } + set { gr2 = value; } } - public class ConverterAppSettings : SettingsBase + private PackagePaneSettings pakSettings; + + public PackagePaneSettings PAK { - private GR2PaneSettings gr2; + get { return pakSettings; } + set { pakSettings = value; } + } - public GR2PaneSettings GR2 - { - get { return gr2; } - set { gr2 = value; } - } + private ResourcePaneSettings resourceSettings; - private PackagePaneSettings pakSettings; + public ResourcePaneSettings Resources + { + get { return resourceSettings; } + set { resourceSettings = value; } + } - public PackagePaneSettings PAK - { - get { return pakSettings; } - set { pakSettings = value; } - } + private VirtualTexturesPaneSettings virtualTextureSettings; + public VirtualTexturesPaneSettings VirtualTextures + { + get { return virtualTextureSettings; } + set { virtualTextureSettings = value; } + } - private ResourcePaneSettings resourceSettings; + private OsirisPaneSettings storySettings; - public ResourcePaneSettings Resources - { - get { return resourceSettings; } - set { resourceSettings = value; } - } - - private VirtualTexturesPaneSettings virtualTextureSettings; - public VirtualTexturesPaneSettings VirtualTextures - { - get { return virtualTextureSettings; } - set { virtualTextureSettings = value; } - } + public OsirisPaneSettings Story + { + get { return storySettings; } + set { storySettings = value; } + } - private OsirisPaneSettings storySettings; + private DebugPaneSettings debugSettings; - public OsirisPaneSettings Story - { - get { return storySettings; } - set { storySettings = value; } - } + public DebugPaneSettings Debugging + { + get { return debugSettings; } + set { debugSettings = value; } + } - private DebugPaneSettings debugSettings; + private Game selectedGame = Game.BaldursGate3; - public DebugPaneSettings Debugging - { - get { return debugSettings; } - set { debugSettings = value; } - } + public int SelectedGame + { + get { return (int)selectedGame; } + set { selectedGame = (Game)value; OnPropertyChanged(); } + } - private Game selectedGame = Game.BaldursGate3; + private string version = ""; - public int SelectedGame - { - get { return (int)selectedGame; } - set { selectedGame = (Game)value; OnPropertyChanged(); } - } + public string Version + { + get { return version; } + set { version = value; } + } - private string version = ""; + public void SetPropertyChangedEvent(PropertyChangedEventHandler eventHandler) + { + this.PropertyChanged += eventHandler; + GR2.PropertyChanged += eventHandler; + PAK.PropertyChanged += eventHandler; + Resources.PropertyChanged += eventHandler; + Story.PropertyChanged += eventHandler; + } - public string Version - { - get { return version; } - set { version = value; } - } + public ConverterAppSettings() + { + GR2 = new GR2PaneSettings(); + PAK = new PackagePaneSettings(); + Resources = new ResourcePaneSettings(); + VirtualTextures = new VirtualTexturesPaneSettings(); + Story = new OsirisPaneSettings(); + Debugging = new DebugPaneSettings(); + } +} - public void SetPropertyChangedEvent(PropertyChangedEventHandler eventHandler) - { - this.PropertyChanged += eventHandler; - GR2.PropertyChanged += eventHandler; - PAK.PropertyChanged += eventHandler; - Resources.PropertyChanged += eventHandler; - Story.PropertyChanged += eventHandler; - } +public class GR2PaneSettings : SettingsBase +{ + private string inputPath = ""; - public ConverterAppSettings() - { - GR2 = new GR2PaneSettings(); - PAK = new PackagePaneSettings(); - Resources = new ResourcePaneSettings(); - VirtualTextures = new VirtualTexturesPaneSettings(); - Story = new OsirisPaneSettings(); - Debugging = new DebugPaneSettings(); - } + public string InputPath + { + get { return inputPath; } + set { inputPath = value; OnPropertyChanged(); } } - public class GR2PaneSettings : SettingsBase - { - private string inputPath = ""; + private string outputPath = ""; - public string InputPath - { - get { return inputPath; } - set { inputPath = value; OnPropertyChanged(); } - } + public string OutputPath + { + get { return outputPath; } + set { outputPath = value; OnPropertyChanged(); } + } - private string outputPath = ""; + private string batchInputPath = ""; - public string OutputPath - { - get { return outputPath; } - set { outputPath = value; OnPropertyChanged(); } - } + public string BatchInputPath + { + get { return batchInputPath; } + set { batchInputPath = value; OnPropertyChanged(); } + } - private string batchInputPath = ""; + private string batchOutputPath = ""; - public string BatchInputPath - { - get { return batchInputPath; } - set { batchInputPath = value; OnPropertyChanged(); } - } + public string BatchOutputPath + { + get { return batchOutputPath; } + set { batchOutputPath = value; OnPropertyChanged(); } + } - private string batchOutputPath = ""; + private ExportFormat batchInputFormat = ExportFormat.GR2; - public string BatchOutputPath - { - get { return batchOutputPath; } - set { batchOutputPath = value; OnPropertyChanged(); } - } + public int BatchInputFormat + { + get { return (int)batchInputFormat; } + set { batchInputFormat = (ExportFormat)value; OnPropertyChanged(); } + } - private ExportFormat batchInputFormat = ExportFormat.GR2; + private ExportFormat batchOutputFormat = ExportFormat.DAE; - public int BatchInputFormat - { - get { return (int)batchInputFormat; } - set { batchInputFormat = (ExportFormat)value; OnPropertyChanged(); } - } + public int BatchOutputFormat + { + get { return (int)batchOutputFormat; } + set { batchOutputFormat = (ExportFormat)value; OnPropertyChanged(); } + } - private ExportFormat batchOutputFormat = ExportFormat.DAE; + private string conformPath; - public int BatchOutputFormat - { - get { return (int)batchOutputFormat; } - set { batchOutputFormat = (ExportFormat)value; OnPropertyChanged(); } - } + public string ConformPath + { + get { return conformPath; } + set { conformPath = value; OnPropertyChanged(); } + } - private string conformPath; +} - public string ConformPath - { - get { return conformPath; } - set { conformPath = value; OnPropertyChanged(); } - } +public class PackagePaneSettings : SettingsBase +{ + private string extractInputPath = ""; + public string ExtractInputPath + { + get { return extractInputPath; } + set { extractInputPath = value; OnPropertyChanged(); } } - public class PackagePaneSettings : SettingsBase - { - private string extractInputPath = ""; + private string extractOutputPath = ""; - public string ExtractInputPath - { - get { return extractInputPath; } - set { extractInputPath = value; OnPropertyChanged(); } - } + public string ExtractOutputPath + { + get { return extractOutputPath; } + set { extractOutputPath = value; OnPropertyChanged(); } + } - private string extractOutputPath = ""; + private string createInputPath = ""; - public string ExtractOutputPath - { - get { return extractOutputPath; } - set { extractOutputPath = value; OnPropertyChanged(); } - } + public string CreateInputPath + { + get { return createInputPath; } + set { createInputPath = value; OnPropertyChanged(); } + } - private string createInputPath = ""; + private string createOutputPath = ""; - public string CreateInputPath - { - get { return createInputPath; } - set { createInputPath = value; OnPropertyChanged(); } - } + public string CreateOutputPath + { + get { return createOutputPath; } + set { createOutputPath = value; OnPropertyChanged(); } + } - private string createOutputPath = ""; + private int createPackageVersion = 0; - public string CreateOutputPath - { - get { return createOutputPath; } - set { createOutputPath = value; OnPropertyChanged(); } - } + public int CreatePackageVersion + { + get { return createPackageVersion; } + set { createPackageVersion = value; OnPropertyChanged(); } + } - private int createPackageVersion = 0; + private int createPackageCompression = 3; - public int CreatePackageVersion - { - get { return createPackageVersion; } - set { createPackageVersion = value; OnPropertyChanged(); } - } + public int CreatePackageCompression + { + get { return createPackageCompression; } + set { createPackageCompression = value; OnPropertyChanged(); } + } - private int createPackageCompression = 3; + //public string BatchInputPath { get; set; } = ""; + //public string BatchOutputPath { get; set; } = ""; +} - public int CreatePackageCompression - { - get { return createPackageCompression; } - set { createPackageCompression = value; OnPropertyChanged(); } - } +public class ResourcePaneSettings : SettingsBase +{ + private string inputPath = ""; - //public string BatchInputPath { get; set; } = ""; - //public string BatchOutputPath { get; set; } = ""; + public string InputPath + { + get { return inputPath; } + set { inputPath = value; OnPropertyChanged(); } } - public class ResourcePaneSettings : SettingsBase - { - private string inputPath = ""; + private string outputPath = ""; - public string InputPath - { - get { return inputPath; } - set { inputPath = value; OnPropertyChanged(); } - } + public string OutputPath + { + get { return outputPath; } + set { outputPath = value; OnPropertyChanged(); } + } - private string outputPath = ""; + private string batchInputPath = ""; - public string OutputPath - { - get { return outputPath; } - set { outputPath = value; OnPropertyChanged(); } - } + public string BatchInputPath + { + get { return batchInputPath; } + set { batchInputPath = value; OnPropertyChanged(); } + } - private string batchInputPath = ""; + private string batchOutputPath = ""; - public string BatchInputPath - { - get { return batchInputPath; } - set { batchInputPath = value; OnPropertyChanged(); } - } + public string BatchOutputPath + { + get { return batchOutputPath; } + set { batchOutputPath = value; OnPropertyChanged(); } + } - private string batchOutputPath = ""; + private int batchInputFormat; - public string BatchOutputPath - { - get { return batchOutputPath; } - set { batchOutputPath = value; OnPropertyChanged(); } - } + public int BatchInputFormat + { + get { return batchInputFormat; } + set { batchInputFormat = value; OnPropertyChanged(); } + } - private int batchInputFormat; + private int batchOutputFormat; - public int BatchInputFormat - { - get { return batchInputFormat; } - set { batchInputFormat = value; OnPropertyChanged(); } - } + public int BatchOutputFormat + { + get { return batchOutputFormat; } + set { batchOutputFormat = value; OnPropertyChanged(); } + } +} - private int batchOutputFormat; +public class VirtualTexturesPaneSettings : SettingsBase +{ + private string gtsPath = ""; - public int BatchOutputFormat - { - get { return batchOutputFormat; } - set { batchOutputFormat = value; OnPropertyChanged(); } - } + public string GTSPath + { + get { return gtsPath; } + set { gtsPath = value; OnPropertyChanged(); } } - public class VirtualTexturesPaneSettings : SettingsBase - { - private string gtsPath = ""; + private string destinationPath = ""; - public string GTSPath - { - get { return gtsPath; } - set { gtsPath = value; OnPropertyChanged(); } - } + public string DestinationPath + { + get { return destinationPath; } + set { destinationPath = value; OnPropertyChanged(); } + } +} - private string destinationPath = ""; +public class OsirisPaneSettings : SettingsBase +{ + private string inputPath = ""; - public string DestinationPath - { - get { return destinationPath; } - set { destinationPath = value; OnPropertyChanged(); } - } + public string InputPath + { + get { return inputPath; } + set { inputPath = value; OnPropertyChanged(); } } - public class OsirisPaneSettings : SettingsBase - { - private string inputPath = ""; + private string outputPath = ""; - public string InputPath - { - get { return inputPath; } - set { inputPath = value; OnPropertyChanged(); } - } + public string OutputPath + { + get { return outputPath; } + set { outputPath = value; OnPropertyChanged(); } + } - private string outputPath = ""; + private string filterText = ""; - public string OutputPath - { - get { return outputPath; } - set { outputPath = value; OnPropertyChanged(); } - } + public string FilterText + { + get { return filterText; } + set { filterText = value; OnPropertyChanged(); } + } - private string filterText = ""; + private bool filterMatchCase = false; - public string FilterText - { - get { return filterText; } - set { filterText = value; OnPropertyChanged(); } - } + public bool FilterMatchCase + { + get { return filterMatchCase; } + set { filterMatchCase = value; OnPropertyChanged(); } + } +} - private bool filterMatchCase = false; +public class DebugPaneSettings : SettingsBase +{ + private string savePath = ""; - public bool FilterMatchCase - { - get { return filterMatchCase; } - set { filterMatchCase = value; OnPropertyChanged(); } - } + public string SavePath + { + get { return savePath; } + set { savePath = value; OnPropertyChanged(); } } +} - public class DebugPaneSettings : SettingsBase +sealed class PackageVersionConverter : TypeConverter +{ + public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) { - private string savePath = ""; - - public string SavePath - { - get { return savePath; } - set { savePath = value; OnPropertyChanged(); } - } + return true; } - sealed class PackageVersionConverter : TypeConverter + public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType) { - public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) - { - return true; - } - - public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType) + if(value is PackageVersion version) { - if(value is PackageVersion version) + switch (version) { - switch (version) - { - case PackageVersion.V10: - { - return 2; - } - case PackageVersion.V9: - { - return 3; - } - case PackageVersion.V7: - { - return 4; - } - case PackageVersion.V13: - default: - { - return 0; - } - } + case PackageVersion.V10: + { + return 2; + } + case PackageVersion.V9: + { + return 3; + } + case PackageVersion.V7: + { + return 4; + } + case PackageVersion.V13: + default: + { + return 0; + } } - return 0; } + return 0; } +} - sealed class CompressionConverter : TypeConverter +sealed class CompressionConverter : TypeConverter +{ + public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) { - public override bool CanConvertTo(ITypeDescriptorContext context, Type destinationType) - { - return true; - } + return true; + } - public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType) + public override object ConvertTo(ITypeDescriptorContext context, System.Globalization.CultureInfo culture, object value, Type destinationType) + { + if (value is CompressionMethod compression) { - if (value is CompressionMethod compression) + switch (compression) { - switch (compression) - { - case CompressionMethod.Zlib: - { - return 1; - } - case CompressionMethod.None: - { - return 0; - } - case CompressionMethod.LZ4: - default: - { - return 3; - } - } + case CompressionMethod.Zlib: + { + return 1; + } + case CompressionMethod.None: + { + return 0; + } + case CompressionMethod.LZ4: + default: + { + return 3; + } } - return 0; } + return 0; } } diff --git a/ConverterApp/DatabaseDumper.cs b/ConverterApp/DatabaseDumper.cs index 648da830..096ca76e 100644 --- a/ConverterApp/DatabaseDumper.cs +++ b/ConverterApp/DatabaseDumper.cs @@ -4,77 +4,76 @@ using System.Linq; using System.Text; -namespace ConverterApp +namespace ConverterApp; + +class DatabaseDumper : IDisposable { - class DatabaseDumper : IDisposable - { - private StreamWriter Writer; + private StreamWriter Writer; - public bool DumpUnnamedDbs { get; set; } + public bool DumpUnnamedDbs { get; set; } - public DatabaseDumper(Stream outputStream) - { - Writer = new StreamWriter(outputStream, Encoding.UTF8); - DumpUnnamedDbs = false; - } + public DatabaseDumper(Stream outputStream) + { + Writer = new StreamWriter(outputStream, Encoding.UTF8); + DumpUnnamedDbs = false; + } - public void Dispose() - { - Writer.Dispose(); - } - - private void DumpFact(Story story, Fact fact) + public void Dispose() + { + Writer.Dispose(); + } + + private void DumpFact(Story story, Fact fact) + { + Writer.Write("("); + for (var i = 0; i < fact.Columns.Count; i++) { - Writer.Write("("); - for (var i = 0; i < fact.Columns.Count; i++) + fact.Columns[i].DebugDump(Writer, story); + if (i + 1 < fact.Columns.Count) { - fact.Columns[i].DebugDump(Writer, story); - if (i + 1 < fact.Columns.Count) - { - Writer.Write(", "); - } + Writer.Write(", "); } - Writer.WriteLine(")"); } + Writer.WriteLine(")"); + } - public void DumpDatabase(Story story, Database database) + public void DumpDatabase(Story story, Database database) + { + if (database.OwnerNode != null) { - if (database.OwnerNode != null) + if (database.OwnerNode.Name.Length > 0) { - if (database.OwnerNode.Name.Length > 0) - { - Writer.Write($"Database '{database.OwnerNode.Name}'"); - } - else - { - Writer.Write($"Database #{database.Index} <{database.OwnerNode.TypeName()}>"); - } + Writer.Write($"Database '{database.OwnerNode.Name}'"); } else { - Writer.Write($"Database #{database.Index}"); + Writer.Write($"Database #{database.Index} <{database.OwnerNode.TypeName()}>"); } + } + else + { + Writer.Write($"Database #{database.Index}"); + } - var types = String.Join(", ", database.Parameters.Types.Select(ty => story.Types[ty].Name)); - Writer.WriteLine($" ({types}):"); + var types = String.Join(", ", database.Parameters.Types.Select(ty => story.Types[ty].Name)); + Writer.WriteLine($" ({types}):"); - foreach (var fact in database.Facts) - { - Writer.Write("\t"); - DumpFact(story, fact); - } + foreach (var fact in database.Facts) + { + Writer.Write("\t"); + DumpFact(story, fact); } + } - public void DumpAll(Story story) + public void DumpAll(Story story) + { + Writer.WriteLine(" === DUMP OF DATABASES === "); + foreach (var db in story.Databases) { - Writer.WriteLine(" === DUMP OF DATABASES === "); - foreach (var db in story.Databases) + if (DumpUnnamedDbs || (db.Value.OwnerNode != null && db.Value.OwnerNode.Name.Length > 0)) { - if (DumpUnnamedDbs || (db.Value.OwnerNode != null && db.Value.OwnerNode.Name.Length > 0)) - { - DumpDatabase(story, db.Value); - Writer.WriteLine(""); - } + DumpDatabase(story, db.Value); + Writer.WriteLine(""); } } } diff --git a/ConverterApp/DebugDumper.cs b/ConverterApp/DebugDumper.cs index 85db14f4..c847c8d9 100644 --- a/ConverterApp/DebugDumper.cs +++ b/ConverterApp/DebugDumper.cs @@ -7,309 +7,308 @@ using System.Linq; using System.Windows.Forms; -namespace ConverterApp +namespace ConverterApp; + +public delegate void DumugDumperReportProgress(int percentage, string statusText); + +public class DebugDumperTask { - public delegate void DumugDumperReportProgress(int percentage, string statusText); + private Package SavePackage; + private Resource SaveMeta; + private Resource SaveGlobals; + private Story SaveStory; + + public Game GameVersion { get; set; } + public string SaveFilePath { get; set; } + public string ExtractionPath { get; set; } + public string DataDumpPath { get; set; } + + // General savegame dumping settings + public bool ExtractAll { get; set; } + public bool ConvertToLsx { get; set; } + public bool DumpModList { get; set; } + + // Behavior variable dumping settings + public bool DumpGlobalVars { get; set; } + public bool DumpCharacterVars { get; set; } + public bool DumpItemVars { get; set; } + public bool IncludeDeletedVars { get; set; } + public bool IncludeLocalScopes { get; set; } + + // Story dump settings + public bool DumpStoryDatabases { get; set; } + public bool DumpStoryGoals { get; set; } + public bool IncludeUnnamedDatabases { get; set; } + + public event DumugDumperReportProgress ReportProgress; + + public DebugDumperTask() + { + ExtractAll = true; + ConvertToLsx = true; + DumpModList = true; + + DumpGlobalVars = true; + DumpCharacterVars = true; + DumpItemVars = true; + IncludeDeletedVars = false; + IncludeLocalScopes = false; + // TODO ------------------ RE-IMPORTABLE VARS/DBS FORMAT ---------------------- + + DumpStoryDatabases = true; + DumpStoryGoals = true; + IncludeUnnamedDatabases = false; + } + + private void DoExtractPackage() + { + var packager = new Packager(); + packager.ProgressUpdate = (file, numerator, denominator, fileInfo) => { + ReportProgress(5 + (int)(numerator * 15 / denominator), "Extracting: " + file); + }; + packager.UncompressPackage(SavePackage, ExtractionPath); + } - public class DebugDumperTask + private void DoLsxConversion() { - private Package SavePackage; - private Resource SaveMeta; - private Resource SaveGlobals; - private Story SaveStory; - - public Game GameVersion { get; set; } - public string SaveFilePath { get; set; } - public string ExtractionPath { get; set; } - public string DataDumpPath { get; set; } - - // General savegame dumping settings - public bool ExtractAll { get; set; } - public bool ConvertToLsx { get; set; } - public bool DumpModList { get; set; } - - // Behavior variable dumping settings - public bool DumpGlobalVars { get; set; } - public bool DumpCharacterVars { get; set; } - public bool DumpItemVars { get; set; } - public bool IncludeDeletedVars { get; set; } - public bool IncludeLocalScopes { get; set; } - - // Story dump settings - public bool DumpStoryDatabases { get; set; } - public bool DumpStoryGoals { get; set; } - public bool IncludeUnnamedDatabases { get; set; } - - public event DumugDumperReportProgress ReportProgress; - - public DebugDumperTask() + var conversionParams = ResourceConversionParameters.FromGameVersion(GameVersion); + var loadParams = ResourceLoadParameters.FromGameVersion(GameVersion); + + var lsfList = SavePackage.Files.Where(p => p.Name.EndsWith(".lsf")); + var numProcessed = 0; + foreach (var lsf in lsfList) { - ExtractAll = true; - ConvertToLsx = true; - DumpModList = true; - - DumpGlobalVars = true; - DumpCharacterVars = true; - DumpItemVars = true; - IncludeDeletedVars = false; - IncludeLocalScopes = false; - // TODO ------------------ RE-IMPORTABLE VARS/DBS FORMAT ---------------------- - - DumpStoryDatabases = true; - DumpStoryGoals = true; - IncludeUnnamedDatabases = false; + var lsfPath = Path.Combine(ExtractionPath, lsf.Name); + var lsxPath = Path.Combine(ExtractionPath, lsf.Name.Substring(0, lsf.Name.Length - 4) + ".lsx"); + + ReportProgress(20 + (numProcessed * 30 / lsfList.Count()), "Converting to LSX: " + lsf.Name); + var resource = ResourceUtils.LoadResource(lsfPath, ResourceFormat.LSF, loadParams); + ResourceUtils.SaveResource(resource, lsxPath, ResourceFormat.LSX, conversionParams); + numProcessed++; } + } - private void DoExtractPackage() + private Resource LoadPackagedResource(string path) + { + AbstractFileInfo fileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == path); + if (fileInfo == null) { - var packager = new Packager(); - packager.ProgressUpdate = (file, numerator, denominator, fileInfo) => { - ReportProgress(5 + (int)(numerator * 15 / denominator), "Extracting: " + file); - }; - packager.UncompressPackage(SavePackage, ExtractionPath); + throw new ArgumentException($"Could not locate file in package: '{path}"); } - private void DoLsxConversion() + Resource resource; + Stream rsrcStream = fileInfo.MakeStream(); + try { - var conversionParams = ResourceConversionParameters.FromGameVersion(GameVersion); - var loadParams = ResourceLoadParameters.FromGameVersion(GameVersion); - - var lsfList = SavePackage.Files.Where(p => p.Name.EndsWith(".lsf")); - var numProcessed = 0; - foreach (var lsf in lsfList) + using (var rsrcReader = new LSFReader(rsrcStream)) { - var lsfPath = Path.Combine(ExtractionPath, lsf.Name); - var lsxPath = Path.Combine(ExtractionPath, lsf.Name.Substring(0, lsf.Name.Length - 4) + ".lsx"); - - ReportProgress(20 + (numProcessed * 30 / lsfList.Count()), "Converting to LSX: " + lsf.Name); - var resource = ResourceUtils.LoadResource(lsfPath, ResourceFormat.LSF, loadParams); - ResourceUtils.SaveResource(resource, lsxPath, ResourceFormat.LSX, conversionParams); - numProcessed++; + resource = rsrcReader.Read(); } } - - private Resource LoadPackagedResource(string path) + finally { - AbstractFileInfo fileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == path); - if (fileInfo == null) - { - throw new ArgumentException($"Could not locate file in package: '{path}"); - } + fileInfo.ReleaseStream(); + } + + return resource; + } - Resource resource; - Stream rsrcStream = fileInfo.MakeStream(); - try + private void DumpMods(string outputPath) + { + using (var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.Read)) + using (var writer = new StreamWriter(outputStream)) + { + var meta = SaveMeta.Regions["MetaData"].Children["MetaData"][0]; + var moduleDescs = meta.Children["ModuleSettings"][0].Children["Mods"][0].Children["ModuleShortDesc"]; + foreach (var modDesc in moduleDescs) { - using (var rsrcReader = new LSFReader(rsrcStream)) + var folder = (string)modDesc.Attributes["Folder"].Value; + var name = (string)modDesc.Attributes["Name"].Value; + PackedVersion version; + if (modDesc.Attributes.ContainsKey("Version64")) { - resource = rsrcReader.Read(); + var versionNum = (Int64)modDesc.Attributes["Version64"].Value; + version = PackedVersion.FromInt64(versionNum); + } + else + { + var versionNum = (Int32)modDesc.Attributes["Version"].Value; + version = PackedVersion.FromInt32(versionNum); } - } - finally - { - fileInfo.ReleaseStream(); - } - return resource; + writer.WriteLine($"{name} (v{version.Major}.{version.Minor}.{version.Revision}.{version.Build}) @ {folder}"); + } } + } - private void DumpMods(string outputPath) + private void DumpVariables(string outputPath, bool globals, bool characters, bool items) + { + using (var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.Read)) { - using (var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.Read)) - using (var writer = new StreamWriter(outputStream)) + var varDumper = new VariableDumper(outputStream); + varDumper.IncludeDeletedVars = IncludeDeletedVars; + varDumper.IncludeLocalScopes = IncludeLocalScopes; + if (varDumper.Load(SaveGlobals)) { - var meta = SaveMeta.Regions["MetaData"].Children["MetaData"][0]; - var moduleDescs = meta.Children["ModuleSettings"][0].Children["Mods"][0].Children["ModuleShortDesc"]; - foreach (var modDesc in moduleDescs) + if (globals) { - var folder = (string)modDesc.Attributes["Folder"].Value; - var name = (string)modDesc.Attributes["Name"].Value; - PackedVersion version; - if (modDesc.Attributes.ContainsKey("Version64")) - { - var versionNum = (Int64)modDesc.Attributes["Version64"].Value; - version = PackedVersion.FromInt64(versionNum); - } - else - { - var versionNum = (Int32)modDesc.Attributes["Version"].Value; - version = PackedVersion.FromInt32(versionNum); - } - - writer.WriteLine($"{name} (v{version.Major}.{version.Minor}.{version.Revision}.{version.Build}) @ {folder}"); + varDumper.DumpGlobals(); } - } - } - private void DumpVariables(string outputPath, bool globals, bool characters, bool items) - { - using (var outputStream = new FileStream(outputPath, FileMode.Create, FileAccess.Write, FileShare.Read)) - { - var varDumper = new VariableDumper(outputStream); - varDumper.IncludeDeletedVars = IncludeDeletedVars; - varDumper.IncludeLocalScopes = IncludeLocalScopes; - if (varDumper.Load(SaveGlobals)) + if (characters) + { + varDumper.DumpCharacters(); + } + + if (items) { - if (globals) - { - varDumper.DumpGlobals(); - } - - if (characters) - { - varDumper.DumpCharacters(); - } - - if (items) - { - varDumper.DumpItems(); - } + varDumper.DumpItems(); } } } + } - private void DumpGoals() + private void DumpGoals() + { + ReportProgress(80, "Dumping story ..."); + string debugPath = Path.Combine(DataDumpPath, "GoalsDebug.log"); + using (var debugFile = new FileStream(debugPath, FileMode.Create, FileAccess.Write)) + using (var writer = new StreamWriter(debugFile)) { - ReportProgress(80, "Dumping story ..."); - string debugPath = Path.Combine(DataDumpPath, "GoalsDebug.log"); - using (var debugFile = new FileStream(debugPath, FileMode.Create, FileAccess.Write)) - using (var writer = new StreamWriter(debugFile)) - { - SaveStory.DebugDump(writer); - } + SaveStory.DebugDump(writer); + } - ReportProgress(85, "Dumping story goals ..."); - string goalsPath = Path.Combine(DataDumpPath, "Goals"); - FileManager.TryToCreateDirectory(Path.Combine(goalsPath, "Dummy")); + ReportProgress(85, "Dumping story goals ..."); + string goalsPath = Path.Combine(DataDumpPath, "Goals"); + FileManager.TryToCreateDirectory(Path.Combine(goalsPath, "Dummy")); - string unassignedPath = Path.Combine(goalsPath, "UNASSIGNED_RULES.txt"); - using (var goalFile = new FileStream(unassignedPath, FileMode.Create, FileAccess.Write)) - using (var writer = new StreamWriter(goalFile)) + string unassignedPath = Path.Combine(goalsPath, "UNASSIGNED_RULES.txt"); + using (var goalFile = new FileStream(unassignedPath, FileMode.Create, FileAccess.Write)) + using (var writer = new StreamWriter(goalFile)) + { + var dummyGoal = new Goal(SaveStory) { - var dummyGoal = new Goal(SaveStory) - { - ExitCalls = new List(), - InitCalls = new List(), - ParentGoals = new List(), - SubGoals = new List(), - Name = "UNASSIGNED_RULES", - Index = 0 - }; - dummyGoal.MakeScript(writer, SaveStory); - } + ExitCalls = new List(), + InitCalls = new List(), + ParentGoals = new List(), + SubGoals = new List(), + Name = "UNASSIGNED_RULES", + Index = 0 + }; + dummyGoal.MakeScript(writer, SaveStory); + } - foreach (KeyValuePair goal in SaveStory.Goals) + foreach (KeyValuePair goal in SaveStory.Goals) + { + string filePath = Path.Combine(goalsPath, $"{goal.Value.Name}.txt"); + using (var goalFile = new FileStream(filePath, FileMode.Create, FileAccess.Write)) + using (var writer = new StreamWriter(goalFile)) { - string filePath = Path.Combine(goalsPath, $"{goal.Value.Name}.txt"); - using (var goalFile = new FileStream(filePath, FileMode.Create, FileAccess.Write)) - using (var writer = new StreamWriter(goalFile)) - { - goal.Value.MakeScript(writer, SaveStory); - } + goal.Value.MakeScript(writer, SaveStory); } } + } - private void RunTasks() + private void RunTasks() + { + if (ExtractAll) { - if (ExtractAll) - { - DoExtractPackage(); - } + DoExtractPackage(); + } - if (ConvertToLsx) - { - DoLsxConversion(); - } + if (ConvertToLsx) + { + DoLsxConversion(); + } - FileManager.TryToCreateDirectory(Path.Combine(DataDumpPath, "Dummy")); + FileManager.TryToCreateDirectory(Path.Combine(DataDumpPath, "Dummy")); - ReportProgress(50, "Loading meta.lsf ..."); - SaveMeta = LoadPackagedResource("meta.lsf"); + ReportProgress(50, "Loading meta.lsf ..."); + SaveMeta = LoadPackagedResource("meta.lsf"); - ReportProgress(52, "Loading globals.lsf ..."); - SaveGlobals = LoadPackagedResource("globals.lsf"); + ReportProgress(52, "Loading globals.lsf ..."); + SaveGlobals = LoadPackagedResource("globals.lsf"); - ReportProgress(60, "Dumping mod list ..."); - if (DumpModList) - { - var modListPath = Path.Combine(DataDumpPath, "ModList.txt"); - DumpMods(modListPath); - } + ReportProgress(60, "Dumping mod list ..."); + if (DumpModList) + { + var modListPath = Path.Combine(DataDumpPath, "ModList.txt"); + DumpMods(modListPath); + } - ReportProgress(62, "Dumping variables ..."); - if (DumpGlobalVars) - { - var varsPath = Path.Combine(DataDumpPath, "GlobalVars.txt"); - DumpVariables(varsPath, true, false, false); - } + ReportProgress(62, "Dumping variables ..."); + if (DumpGlobalVars) + { + var varsPath = Path.Combine(DataDumpPath, "GlobalVars.txt"); + DumpVariables(varsPath, true, false, false); + } - if (DumpCharacterVars) - { - var varsPath = Path.Combine(DataDumpPath, "CharacterVars.txt"); - DumpVariables(varsPath, false, true, false); - } + if (DumpCharacterVars) + { + var varsPath = Path.Combine(DataDumpPath, "CharacterVars.txt"); + DumpVariables(varsPath, false, true, false); + } - if (DumpItemVars) - { - var varsPath = Path.Combine(DataDumpPath, "ItemVars.txt"); - DumpVariables(varsPath, false, false, true); - } + if (DumpItemVars) + { + var varsPath = Path.Combine(DataDumpPath, "ItemVars.txt"); + DumpVariables(varsPath, false, false, true); + } - ReportProgress(70, "Loading story ..."); - AbstractFileInfo storySave = SavePackage.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); - Stream storyStream; - if (storySave != null) - { - storyStream = storySave.MakeStream(); - } - else - { - LSLib.LS.Node storyNode = SaveGlobals.Regions["Story"].Children["Story"][0]; - storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[]); - } + ReportProgress(70, "Loading story ..."); + AbstractFileInfo storySave = SavePackage.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + Stream storyStream; + if (storySave != null) + { + storyStream = storySave.MakeStream(); + } + else + { + LSLib.LS.Node storyNode = SaveGlobals.Regions["Story"].Children["Story"][0]; + storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[]); + } - var reader = new StoryReader(); - SaveStory = reader.Read(storyStream); + var reader = new StoryReader(); + SaveStory = reader.Read(storyStream); - if (DumpStoryGoals) - { - DumpGoals(); - } + if (DumpStoryGoals) + { + DumpGoals(); + } - if (DumpStoryDatabases) + if (DumpStoryDatabases) + { + ReportProgress(90, "Dumping databases ..."); + var dbDumpPath = Path.Combine(DataDumpPath, "Databases.txt"); + using (var dbDumpStream = new FileStream(dbDumpPath, FileMode.Create, FileAccess.Write, FileShare.Read)) { - ReportProgress(90, "Dumping databases ..."); - var dbDumpPath = Path.Combine(DataDumpPath, "Databases.txt"); - using (var dbDumpStream = new FileStream(dbDumpPath, FileMode.Create, FileAccess.Write, FileShare.Read)) - { - var dbDumper = new DatabaseDumper(dbDumpStream); - dbDumper.DumpUnnamedDbs = IncludeUnnamedDatabases; - dbDumper.DumpAll(SaveStory); - } + var dbDumper = new DatabaseDumper(dbDumpStream); + dbDumper.DumpUnnamedDbs = IncludeUnnamedDatabases; + dbDumper.DumpAll(SaveStory); } - - ReportProgress(100, ""); } - public void Run() + ReportProgress(100, ""); + } + + public void Run() + { + ReportProgress(0, "Reading package ..."); + + using (var packageReader = new PackageReader(SaveFilePath)) { - ReportProgress(0, "Reading package ..."); + SavePackage = packageReader.Read(); - using (var packageReader = new PackageReader(SaveFilePath)) + AbstractFileInfo abstractFileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + if (abstractFileInfo == null) { - SavePackage = packageReader.Read(); - - AbstractFileInfo abstractFileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - if (abstractFileInfo == null) - { - MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - return; - } + MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + return; + } - RunTasks(); + RunTasks(); - MessageBox.Show($"Savegame dumped to {DataDumpPath}."); - } + MessageBox.Show($"Savegame dumped to {DataDumpPath}."); } } } diff --git a/ConverterApp/Program.cs b/ConverterApp/Program.cs index db9175b5..30100d4f 100644 --- a/ConverterApp/Program.cs +++ b/ConverterApp/Program.cs @@ -3,23 +3,22 @@ using System.Windows.Forms; [assembly: SupportedOSPlatform("windows")] -namespace ConverterApp +namespace ConverterApp; + +static class Program { - static class Program + /// + /// The main entry point for the application. + /// + [STAThread] + static void Main() { - /// - /// The main entry point for the application. - /// - [STAThread] - static void Main() - { - System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); - customCulture.NumberFormat.NumberDecimalSeparator = "."; - System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; + System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); + customCulture.NumberFormat.NumberDecimalSeparator = "."; + System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; - Application.EnableVisualStyles(); - Application.SetCompatibleTextRenderingDefault(false); - Application.Run(new MainForm()); - } + Application.EnableVisualStyles(); + Application.SetCompatibleTextRenderingDefault(false); + Application.Run(new MainForm()); } } diff --git a/ConverterApp/VariableDumper.cs b/ConverterApp/VariableDumper.cs index 95810bf9..4c76e513 100644 --- a/ConverterApp/VariableDumper.cs +++ b/ConverterApp/VariableDumper.cs @@ -5,145 +5,144 @@ using System.Linq; using System.Text; -namespace ConverterApp +namespace ConverterApp; + +class VariableDumper : IDisposable { - class VariableDumper : IDisposable - { - private StreamWriter Writer; - private Resource Rsrc; - private OsirisVariableHelper VariablesHelper; + private StreamWriter Writer; + private Resource Rsrc; + private OsirisVariableHelper VariablesHelper; - public bool IncludeDeletedVars { get; set; } - public bool IncludeLocalScopes { get; set; } + public bool IncludeDeletedVars { get; set; } + public bool IncludeLocalScopes { get; set; } - public VariableDumper(Stream outputStream) - { - Writer = new StreamWriter(outputStream, Encoding.UTF8); - IncludeDeletedVars = false; - IncludeLocalScopes = false; - } + public VariableDumper(Stream outputStream) + { + Writer = new StreamWriter(outputStream, Encoding.UTF8); + IncludeDeletedVars = false; + IncludeLocalScopes = false; + } - public void Dispose() - { - Writer.Dispose(); - } + public void Dispose() + { + Writer.Dispose(); + } - private void DumpCharacter(Node characterNode) + private void DumpCharacter(Node characterNode) + { + if (characterNode.Children.TryGetValue("VariableManager", out var varNodes)) { - if (characterNode.Children.TryGetValue("VariableManager", out var varNodes)) - { - var characterVars = new VariableManager(VariablesHelper); - characterVars.Load(varNodes[0]); - - var key = characterNode.Attributes["CurrentTemplate"].Value.ToString(); - if (characterNode.Attributes.ContainsKey("Stats")) - { - key += " (" + (string)characterNode.Attributes["Stats"].Value + ")"; - } - else if (characterNode.Children.ContainsKey("PlayerData")) - { - var playerData = characterNode.Children["PlayerData"][0] - .Children["PlayerCustomData"][0]; - if (playerData.Attributes.TryGetValue("Name", out NodeAttribute name)) - { - key += " (Player " + (string)name.Value + ")"; - } - } + var characterVars = new VariableManager(VariablesHelper); + characterVars.Load(varNodes[0]); - DumpVariables(key, characterVars); + var key = characterNode.Attributes["CurrentTemplate"].Value.ToString(); + if (characterNode.Attributes.ContainsKey("Stats")) + { + key += " (" + (string)characterNode.Attributes["Stats"].Value + ")"; } - } - - private void DumpItem(Node itemNode) - { - if (itemNode.Children.TryGetValue("VariableManager", out var varNodes)) + else if (characterNode.Children.ContainsKey("PlayerData")) { - var itemVars = new VariableManager(VariablesHelper); - itemVars.Load(varNodes[0]); - - var key = itemNode.Attributes["CurrentTemplate"].Value.ToString(); - if (itemNode.Attributes.ContainsKey("Stats")) + var playerData = characterNode.Children["PlayerData"][0] + .Children["PlayerCustomData"][0]; + if (playerData.Attributes.TryGetValue("Name", out NodeAttribute name)) { - key += " (" + (string)itemNode.Attributes["Stats"].Value + ")"; + key += " (Player " + (string)name.Value + ")"; } - - DumpVariables(key, itemVars); } - } - private void DumpGlobals(Node globalVarsNode) - { - var vars = new VariableManager(VariablesHelper); - vars.Load(globalVarsNode); - DumpVariables("Globals", vars); + DumpVariables(key, characterVars); } + } - private void DumpVariables(string label, VariableManager variableMgr) + private void DumpItem(Node itemNode) + { + if (itemNode.Children.TryGetValue("VariableManager", out var varNodes)) { - var variables = variableMgr.GetAll(IncludeDeletedVars); + var itemVars = new VariableManager(VariablesHelper); + itemVars.Load(varNodes[0]); - if (!IncludeLocalScopes) + var key = itemNode.Attributes["CurrentTemplate"].Value.ToString(); + if (itemNode.Attributes.ContainsKey("Stats")) { - variables = variables - .Where(kv => !kv.Key.Contains('.')) - .ToDictionary(kv => kv.Key, kv => kv.Value); + key += " (" + (string)itemNode.Attributes["Stats"].Value + ")"; } - if (variables.Count > 0) - { - Writer.WriteLine($"{label}:"); - foreach (var kv in variables) - { - Writer.WriteLine($"\t{kv.Key}: {kv.Value}"); - } + DumpVariables(key, itemVars); + } + } - Writer.WriteLine(""); - } + private void DumpGlobals(Node globalVarsNode) + { + var vars = new VariableManager(VariablesHelper); + vars.Load(globalVarsNode); + DumpVariables("Globals", vars); + } + + private void DumpVariables(string label, VariableManager variableMgr) + { + var variables = variableMgr.GetAll(IncludeDeletedVars); + + if (!IncludeLocalScopes) + { + variables = variables + .Where(kv => !kv.Key.Contains('.')) + .ToDictionary(kv => kv.Key, kv => kv.Value); } - public bool Load(Resource resource) + if (variables.Count > 0) { - Rsrc = resource; - Node osiHelper = resource.Regions["OsirisVariableHelper"]; - if (!osiHelper.Children.ContainsKey("IdentifierTable")) + Writer.WriteLine($"{label}:"); + foreach (var kv in variables) { - return false; + Writer.WriteLine($"\t{kv.Key}: {kv.Value}"); } - VariablesHelper = new OsirisVariableHelper(); - VariablesHelper.Load(osiHelper); - return true; + Writer.WriteLine(""); } + } - public void DumpGlobals() + public bool Load(Resource resource) + { + Rsrc = resource; + Node osiHelper = resource.Regions["OsirisVariableHelper"]; + if (!osiHelper.Children.ContainsKey("IdentifierTable")) { - Node osiHelper = Rsrc.Regions["OsirisVariableHelper"]; - var globalVarsNode = osiHelper.Children["VariableManager"][0]; - - Writer.WriteLine(" === DUMP OF GLOBALS === "); - DumpGlobals(globalVarsNode); + return false; } - public void DumpCharacters() + VariablesHelper = new OsirisVariableHelper(); + VariablesHelper.Load(osiHelper); + return true; + } + + public void DumpGlobals() + { + Node osiHelper = Rsrc.Regions["OsirisVariableHelper"]; + var globalVarsNode = osiHelper.Children["VariableManager"][0]; + + Writer.WriteLine(" === DUMP OF GLOBALS === "); + DumpGlobals(globalVarsNode); + } + + public void DumpCharacters() + { + Writer.WriteLine(); + Writer.WriteLine(" === DUMP OF CHARACTERS === "); + var characters = Rsrc.Regions["Characters"].Children["CharacterFactory"][0].Children["Characters"][0].Children["Character"]; + foreach (var character in characters) { - Writer.WriteLine(); - Writer.WriteLine(" === DUMP OF CHARACTERS === "); - var characters = Rsrc.Regions["Characters"].Children["CharacterFactory"][0].Children["Characters"][0].Children["Character"]; - foreach (var character in characters) - { - DumpCharacter(character); - } + DumpCharacter(character); } + } - public void DumpItems() + public void DumpItems() + { + Writer.WriteLine(); + Writer.WriteLine(" === DUMP OF ITEMS === "); + var items = Rsrc.Regions["Items"].Children["ItemFactory"][0].Children["Items"][0].Children["Item"]; + foreach (var item in items) { - Writer.WriteLine(); - Writer.WriteLine(" === DUMP OF ITEMS === "); - var items = Rsrc.Regions["Items"].Children["ItemFactory"][0].Children["Items"][0].Children["Item"]; - foreach (var item in items) - { - DumpItem(item); - } + DumpItem(item); } } } diff --git a/DebuggerFrontend/Breakpoints.cs b/DebuggerFrontend/Breakpoints.cs index cb1e7f9f..21d9cdc0 100644 --- a/DebuggerFrontend/Breakpoints.cs +++ b/DebuggerFrontend/Breakpoints.cs @@ -6,310 +6,309 @@ using System.Text; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public enum LineType { - public enum LineType - { - // Line is a single node (i.e. AND, rule, etc.) - NodeLine, - // Line is an action in the rule THEN part - RuleActionLine, - // Line is an action in the goal INIT section - GoalInitActionLine, - // Line is an action in the goal EXIT section - GoalExitActionLine - } + // Line is a single node (i.e. AND, rule, etc.) + NodeLine, + // Line is an action in the rule THEN part + RuleActionLine, + // Line is an action in the goal INIT section + GoalInitActionLine, + // Line is an action in the goal EXIT section + GoalExitActionLine +} - // Node information associated to a line - public class LineDebugInfo - { - // Type of line - public LineType Type; - // Node associated to this line - public NodeDebugInfo Node; - // Goal associated to this line - public GoalDebugInfo Goal; - // Index of action in INIT/EXIT/THEN part - public UInt32 ActionIndex; - // Line number - public UInt32 Line; - } +// Node information associated to a line +public class LineDebugInfo +{ + // Type of line + public LineType Type; + // Node associated to this line + public NodeDebugInfo Node; + // Goal associated to this line + public GoalDebugInfo Goal; + // Index of action in INIT/EXIT/THEN part + public UInt32 ActionIndex; + // Line number + public UInt32 Line; +} - public class GoalLineMap +public class GoalLineMap +{ + public GoalDebugInfo Goal; + // Line number => Node info mappings + public Dictionary LineMap; +} + +public class CodeLocationTranslator +{ + private StoryDebugInfo DebugInfo; + // Goal name => Goal mappings + private Dictionary GoalMap; + + public CodeLocationTranslator(StoryDebugInfo debugInfo) { - public GoalDebugInfo Goal; - // Line number => Node info mappings - public Dictionary LineMap; + DebugInfo = debugInfo; + GoalMap = new Dictionary(); + BuildLineMap(); } - public class CodeLocationTranslator + public LineDebugInfo LocationToNode(String goalName, UInt32 line) { - private StoryDebugInfo DebugInfo; - // Goal name => Goal mappings - private Dictionary GoalMap; - - public CodeLocationTranslator(StoryDebugInfo debugInfo) + GoalLineMap goalMap; + if (!GoalMap.TryGetValue(goalName, out goalMap)) { - DebugInfo = debugInfo; - GoalMap = new Dictionary(); - BuildLineMap(); + return null; } - public LineDebugInfo LocationToNode(String goalName, UInt32 line) + LineDebugInfo lineInfo; + if (!goalMap.LineMap.TryGetValue(line, out lineInfo)) { - GoalLineMap goalMap; - if (!GoalMap.TryGetValue(goalName, out goalMap)) - { - return null; - } - - LineDebugInfo lineInfo; - if (!goalMap.LineMap.TryGetValue(line, out lineInfo)) - { - return null; - } - - return lineInfo; + return null; } - private void AddLineMapping(LineType type, GoalDebugInfo goal, NodeDebugInfo node, UInt32 index, UInt32 line) - { - GoalLineMap goalMap; - if (!GoalMap.TryGetValue(goal.Name, out goalMap)) - { - goalMap = new GoalLineMap - { - Goal = goal, - LineMap = new Dictionary() - }; - GoalMap.Add(goal.Name, goalMap); - } + return lineInfo; + } - var mapping = new LineDebugInfo + private void AddLineMapping(LineType type, GoalDebugInfo goal, NodeDebugInfo node, UInt32 index, UInt32 line) + { + GoalLineMap goalMap; + if (!GoalMap.TryGetValue(goal.Name, out goalMap)) + { + goalMap = new GoalLineMap { - Type = type, Goal = goal, - Node = node, - ActionIndex = index, - Line = line + LineMap = new Dictionary() }; - goalMap.LineMap[line] = mapping; + GoalMap.Add(goal.Name, goalMap); } - private void BuildLineMap(GoalDebugInfo goal) + var mapping = new LineDebugInfo { - for (var index = 0; index < goal.InitActions.Count; index++) - { - AddLineMapping(LineType.GoalInitActionLine, goal, null, (UInt32)index, goal.InitActions[index].Line); - } + Type = type, + Goal = goal, + Node = node, + ActionIndex = index, + Line = line + }; + goalMap.LineMap[line] = mapping; + } - for (var index = 0; index < goal.ExitActions.Count; index++) - { - AddLineMapping(LineType.GoalExitActionLine, goal, null, (UInt32)index, goal.ExitActions[index].Line); - } + private void BuildLineMap(GoalDebugInfo goal) + { + for (var index = 0; index < goal.InitActions.Count; index++) + { + AddLineMapping(LineType.GoalInitActionLine, goal, null, (UInt32)index, goal.InitActions[index].Line); } - private void BuildLineMap(NodeDebugInfo node) + for (var index = 0; index < goal.ExitActions.Count; index++) { - if (node.RuleId != 0) - { - var rule = DebugInfo.Rules[node.RuleId]; - var goal = DebugInfo.Goals[rule.GoalId]; - - if (node.Line != 0 - && node.Type != LSLib.LS.Story.Node.Type.Rule) - { - AddLineMapping(LineType.NodeLine, goal, node, 0, (UInt32)node.Line); - } - - if (node.Type == LSLib.LS.Story.Node.Type.Rule) - { - for (var index = 0; index < rule.Actions.Count; index++) - { - AddLineMapping(LineType.RuleActionLine, goal, node, (UInt32)index, rule.Actions[index].Line); - } - } - } + AddLineMapping(LineType.GoalExitActionLine, goal, null, (UInt32)index, goal.ExitActions[index].Line); } + } - private void BuildLineMap() + private void BuildLineMap(NodeDebugInfo node) + { + if (node.RuleId != 0) { - foreach (var goal in DebugInfo.Goals) - { - BuildLineMap(goal.Value); - } + var rule = DebugInfo.Rules[node.RuleId]; + var goal = DebugInfo.Goals[rule.GoalId]; - foreach (var node in DebugInfo.Nodes) + if (node.Line != 0 + && node.Type != LSLib.LS.Story.Node.Type.Rule) { - BuildLineMap(node.Value); + AddLineMapping(LineType.NodeLine, goal, node, 0, (UInt32)node.Line); } - } - } - public class Breakpoint - { - // Unique breakpoint ID on frontend - public UInt32 Id; - // Source code location reference - public DAPSource Source; - // Story goal name - public String GoalName; - // 1-based line number on goal file - public UInt32 Line; - // Line to node mapping (if the line could be mapped to a valid location) - public LineDebugInfo LineInfo; - // Is the node permanently invalidated? - // (ie. an unsupported feature was requested when adding the breakpoint, like conditional breaks) - public bool PermanentlyInvalid; - // Was the breakpoint correct and could it be mapped to a node? - // This is updated each time the debug info is reloaded. - public bool Verified; - // Reason for verification error - public String ErrorReason; - - public DAPBreakpoint ToDAP() - { - return new DAPBreakpoint + if (node.Type == LSLib.LS.Story.Node.Type.Rule) { - id = (int)Id, - verified = Verified, - message = ErrorReason, - source = Source, - line = (int)Line - }; + for (var index = 0; index < rule.Actions.Count; index++) + { + AddLineMapping(LineType.RuleActionLine, goal, node, (UInt32)index, rule.Actions[index].Line); + } + } } } - public class BreakpointManager + private void BuildLineMap() { - private DebuggerClient DbgCli; - private CodeLocationTranslator LocationTranslator; - private Dictionary Breakpoints; - private UInt32 NextBreakpointId = 1; - - public BreakpointManager(DebuggerClient client) + foreach (var goal in DebugInfo.Goals) { - DbgCli = client; - Breakpoints = new Dictionary(); + BuildLineMap(goal.Value); } - public List DebugInfoLoaded(StoryDebugInfo debugInfo) + foreach (var node in DebugInfo.Nodes) { - LocationTranslator = new CodeLocationTranslator(debugInfo); - var changes = RevalidateBreakpoints(); - // Sync breakpoint list to backend as the current debugger instance doesn't have - // any of our breakpoints yet - UpdateBreakpointsOnBackend(); - return changes; + BuildLineMap(node.Value); } + } +} - public List DebugInfoUnloaded() +public class Breakpoint +{ + // Unique breakpoint ID on frontend + public UInt32 Id; + // Source code location reference + public DAPSource Source; + // Story goal name + public String GoalName; + // 1-based line number on goal file + public UInt32 Line; + // Line to node mapping (if the line could be mapped to a valid location) + public LineDebugInfo LineInfo; + // Is the node permanently invalidated? + // (ie. an unsupported feature was requested when adding the breakpoint, like conditional breaks) + public bool PermanentlyInvalid; + // Was the breakpoint correct and could it be mapped to a node? + // This is updated each time the debug info is reloaded. + public bool Verified; + // Reason for verification error + public String ErrorReason; + + public DAPBreakpoint ToDAP() + { + return new DAPBreakpoint { - LocationTranslator = null; - var changes = RevalidateBreakpoints(); - return changes; - } + id = (int)Id, + verified = Verified, + message = ErrorReason, + source = Source, + line = (int)Line + }; + } +} - public void ClearGoalBreakpoints(String goalName) - { - Breakpoints = Breakpoints - .Where(kv => kv.Value.GoalName != goalName) - .Select(kv => kv.Value) - .ToDictionary(kv => kv.Id); - } +public class BreakpointManager +{ + private DebuggerClient DbgCli; + private CodeLocationTranslator LocationTranslator; + private Dictionary Breakpoints; + private UInt32 NextBreakpointId = 1; - public Breakpoint AddBreakpoint(DAPSource source, DAPSourceBreakpoint breakpoint) - { - var bp = new Breakpoint - { - Id = NextBreakpointId++, - Source = source, - GoalName = Path.GetFileNameWithoutExtension(source.name), - Line = (UInt32)breakpoint.line, - PermanentlyInvalid = false - }; - Breakpoints.Add(bp.Id, bp); + public BreakpointManager(DebuggerClient client) + { + DbgCli = client; + Breakpoints = new Dictionary(); + } - if (breakpoint.condition != null || breakpoint.hitCondition != null) - { - bp.PermanentlyInvalid = true; - bp.ErrorReason = "Conditional breakpoints are not supported"; - } + public List DebugInfoLoaded(StoryDebugInfo debugInfo) + { + LocationTranslator = new CodeLocationTranslator(debugInfo); + var changes = RevalidateBreakpoints(); + // Sync breakpoint list to backend as the current debugger instance doesn't have + // any of our breakpoints yet + UpdateBreakpointsOnBackend(); + return changes; + } - ValidateBreakpoint(bp); + public List DebugInfoUnloaded() + { + LocationTranslator = null; + var changes = RevalidateBreakpoints(); + return changes; + } - return bp; - } + public void ClearGoalBreakpoints(String goalName) + { + Breakpoints = Breakpoints + .Where(kv => kv.Value.GoalName != goalName) + .Select(kv => kv.Value) + .ToDictionary(kv => kv.Id); + } - /// - /// Transmits the list of active breakpoints to the debugger backend. - /// - public void UpdateBreakpointsOnBackend() + public Breakpoint AddBreakpoint(DAPSource source, DAPSourceBreakpoint breakpoint) + { + var bp = new Breakpoint + { + Id = NextBreakpointId++, + Source = source, + GoalName = Path.GetFileNameWithoutExtension(source.name), + Line = (UInt32)breakpoint.line, + PermanentlyInvalid = false + }; + Breakpoints.Add(bp.Id, bp); + + if (breakpoint.condition != null || breakpoint.hitCondition != null) { - var breakpoints = Breakpoints.Values.Where(bp => bp.Verified).ToList(); - DbgCli.SendSetBreakpoints(breakpoints); + bp.PermanentlyInvalid = true; + bp.ErrorReason = "Conditional breakpoints are not supported"; } - /// - /// Rechecks the code -> node mapping of each breakpoint. - /// This is required after each story reload/recompilation to make sure - /// that we don't use stale node ID-s from the previous compilation. - /// - List RevalidateBreakpoints() - { - var changes = new List(); - foreach (var bp in Breakpoints) - { - bool changed = ValidateBreakpoint(bp.Value); - if (changed) - { - changes.Add(bp.Value); - } - } + ValidateBreakpoint(bp); - return changes; - } + return bp; + } - private bool ValidateBreakpoint(Breakpoint bp) + /// + /// Transmits the list of active breakpoints to the debugger backend. + /// + public void UpdateBreakpointsOnBackend() + { + var breakpoints = Breakpoints.Values.Where(bp => bp.Verified).ToList(); + DbgCli.SendSetBreakpoints(breakpoints); + } + + /// + /// Rechecks the code -> node mapping of each breakpoint. + /// This is required after each story reload/recompilation to make sure + /// that we don't use stale node ID-s from the previous compilation. + /// + List RevalidateBreakpoints() + { + var changes = new List(); + foreach (var bp in Breakpoints) { - if (bp.PermanentlyInvalid) + bool changed = ValidateBreakpoint(bp.Value); + if (changed) { - bp.Verified = false; - // Don't touch the error message here, as it was already updated when the - // PermanentlyInvalid flag was set. - return false; + changes.Add(bp.Value); } + } - var oldVerified = bp.Verified; - var oldReason = bp.ErrorReason; + return changes; + } - bp.LineInfo = LocationToNode(bp.GoalName, bp.Line); + private bool ValidateBreakpoint(Breakpoint bp) + { + if (bp.PermanentlyInvalid) + { + bp.Verified = false; + // Don't touch the error message here, as it was already updated when the + // PermanentlyInvalid flag was set. + return false; + } - if (bp.LineInfo == null) - { - bp.Verified = false; - bp.ErrorReason = $"Could not map {bp.GoalName}:{bp.Line} to a story node"; - } - else - { - bp.Verified = true; - bp.ErrorReason = null; - } + var oldVerified = bp.Verified; + var oldReason = bp.ErrorReason; + + bp.LineInfo = LocationToNode(bp.GoalName, bp.Line); - var changed = (bp.Verified != oldVerified || bp.ErrorReason != oldReason); - return changed; + if (bp.LineInfo == null) + { + bp.Verified = false; + bp.ErrorReason = $"Could not map {bp.GoalName}:{bp.Line} to a story node"; } + else + { + bp.Verified = true; + bp.ErrorReason = null; + } + + var changed = (bp.Verified != oldVerified || bp.ErrorReason != oldReason); + return changed; + } - private LineDebugInfo LocationToNode(String goalName, UInt32 line) + private LineDebugInfo LocationToNode(String goalName, UInt32 line) + { + if (LocationTranslator == null) { - if (LocationTranslator == null) - { - return null; - } - else - { - return LocationTranslator.LocationToNode(goalName, line); - } + return null; + } + else + { + return LocationTranslator.LocationToNode(goalName, line); } } } diff --git a/DebuggerFrontend/DAPMessageHandler.cs b/DebuggerFrontend/DAPMessageHandler.cs index 324d7a0e..7f4fb5fe 100644 --- a/DebuggerFrontend/DAPMessageHandler.cs +++ b/DebuggerFrontend/DAPMessageHandler.cs @@ -9,858 +9,857 @@ using System.Threading; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class DAPMessageHandler { - public class DAPMessageHandler + // DBG protocol version (game/editor backend to debugger frontend communication) + private const UInt32 DBGProtocolVersion = 8; + + // DAP protocol version (VS Code to debugger frontend communication) + private const int DAPProtocolVersion = 1; + + private DAPStream Stream; + private Stream LogStream; + + private StoryDebugInfo DebugInfo; + private String DebugInfoPath; + private DebugInfoSync DebugInfoSync; + private Thread DbgThread; + private AsyncProtobufClient DbgClient; + private DebuggerClient DbgCli; + private ValueFormatter Formatter; + private StackTracePrinter TracePrinter; + private BreakpointManager Breakpoints; + private EvaluationResultManager EvalResults; + private ExpressionEvaluator Evaluator; + private List Stack; + private DAPCustomConfiguration Config; + private bool Stopped; + // Should we send a continue message after story synchronization is done? + // This is needed if the sync was triggered by a global breakpoint. + private bool ContinueAfterSync; + // Should we pause on the next instruction? + private bool PauseRequested; + // Are we currently debugging a story? + private bool DebuggingStory; + // Results of last DIV query before breakpoint (if available) + private FunctionDebugInfo LastQueryFunc; + private List LastQueryResults; + // Mod/project UUID we'll send to the debugger instead of the packaged path + public string ModUuid; + + + public DAPMessageHandler(DAPStream stream) { - // DBG protocol version (game/editor backend to debugger frontend communication) - private const UInt32 DBGProtocolVersion = 8; - - // DAP protocol version (VS Code to debugger frontend communication) - private const int DAPProtocolVersion = 1; - - private DAPStream Stream; - private Stream LogStream; - - private StoryDebugInfo DebugInfo; - private String DebugInfoPath; - private DebugInfoSync DebugInfoSync; - private Thread DbgThread; - private AsyncProtobufClient DbgClient; - private DebuggerClient DbgCli; - private ValueFormatter Formatter; - private StackTracePrinter TracePrinter; - private BreakpointManager Breakpoints; - private EvaluationResultManager EvalResults; - private ExpressionEvaluator Evaluator; - private List Stack; - private DAPCustomConfiguration Config; - private bool Stopped; - // Should we send a continue message after story synchronization is done? - // This is needed if the sync was triggered by a global breakpoint. - private bool ContinueAfterSync; - // Should we pause on the next instruction? - private bool PauseRequested; - // Are we currently debugging a story? - private bool DebuggingStory; - // Results of last DIV query before breakpoint (if available) - private FunctionDebugInfo LastQueryFunc; - private List LastQueryResults; - // Mod/project UUID we'll send to the debugger instead of the packaged path - public string ModUuid; - + Stream = stream; + Stream.MessageReceived += this.MessageReceived; + } - public DAPMessageHandler(DAPStream stream) - { - Stream = stream; - Stream.MessageReceived += this.MessageReceived; - } + public void EnableLogging(Stream logStream) + { + LogStream = logStream; + } - public void EnableLogging(Stream logStream) + private void SendBreakpoint(string eventType, Breakpoint bp) + { + var bpMsg = new DAPBreakpointEvent { - LogStream = logStream; - } + reason = eventType, + breakpoint = bp.ToDAP() + }; + Stream.SendEvent("breakpoint", bpMsg); + } - private void SendBreakpoint(string eventType, Breakpoint bp) + public void SendOutput(string category, string output) + { + var outputMsg = new DAPOutputMessage { - var bpMsg = new DAPBreakpointEvent - { - reason = eventType, - breakpoint = bp.ToDAP() - }; - Stream.SendEvent("breakpoint", bpMsg); - } + category = category, + output = output + }; + Stream.SendEvent("output", outputMsg); + } - public void SendOutput(string category, string output) - { - var outputMsg = new DAPOutputMessage - { - category = category, - output = output - }; - Stream.SendEvent("output", outputMsg); - } + private void LogError(String message) + { + SendOutput("stderr", message + "\r\n"); - private void LogError(String message) + if (LogStream != null) { - SendOutput("stderr", message + "\r\n"); - - if (LogStream != null) + using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) { - using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) - { - writer.WriteLine(message); - Console.WriteLine(message); - } + writer.WriteLine(message); + Console.WriteLine(message); } } + } - private void MessageReceived(DAPMessage message) + private void MessageReceived(DAPMessage message) + { + if (message is DAPRequest) { - if (message is DAPRequest) + try { - try - { - HandleRequest(message as DAPRequest); - } - catch (RequestFailedException e) - { - Stream.SendReply(message as DAPRequest, e.Message); - } - catch (Exception e) - { - LogError(e.ToString()); - Stream.SendReply(message as DAPRequest, e.ToString()); - } + HandleRequest(message as DAPRequest); } - else if (message is DAPEvent) + catch (RequestFailedException e) { - HandleEvent(message as DAPEvent); + Stream.SendReply(message as DAPRequest, e.Message); } - else + catch (Exception e) { - throw new InvalidDataException("DAP replies not handled"); + LogError(e.ToString()); + Stream.SendReply(message as DAPRequest, e.ToString()); } } - - private void InitDebugger() + else if (message is DAPEvent) { - var debugPayload = File.ReadAllBytes(DebugInfoPath); - var loader = new DebugInfoLoader(); - DebugInfo = loader.Load(debugPayload); - if (DebugInfo.Version != StoryDebugInfo.CurrentVersion) - { - throw new InvalidDataException($"Story debug info too old (found version {DebugInfo.Version}, we only support {StoryDebugInfo.CurrentVersion}). Please recompile the story."); - } - - Formatter = new ValueFormatter(DebugInfo); - TracePrinter = new StackTracePrinter(DebugInfo, Formatter); - TracePrinter.ModUuid = ModUuid; - if (Config != null) - { - TracePrinter.MergeFrames = !Config.rawFrames; - } - - EvalResults = new EvaluationResultManager(Formatter); - Evaluator = new ExpressionEvaluator(DebugInfo, Stream, DbgCli, Formatter, EvalResults); + HandleEvent(message as DAPEvent); + } + else + { + throw new InvalidDataException("DAP replies not handled"); + } + } - Stack = null; - Stopped = false; - // We're not in debug mode yet. We'll enable debugging when the story is fully synced - DebuggingStory = false; + private void InitDebugger() + { + var debugPayload = File.ReadAllBytes(DebugInfoPath); + var loader = new DebugInfoLoader(); + DebugInfo = loader.Load(debugPayload); + if (DebugInfo.Version != StoryDebugInfo.CurrentVersion) + { + throw new InvalidDataException($"Story debug info too old (found version {DebugInfo.Version}, we only support {StoryDebugInfo.CurrentVersion}). Please recompile the story."); } - private void StartDebugSession() + Formatter = new ValueFormatter(DebugInfo); + TracePrinter = new StackTracePrinter(DebugInfo, Formatter); + TracePrinter.ModUuid = ModUuid; + if (Config != null) { - DebuggingStory = true; + TracePrinter.MergeFrames = !Config.rawFrames; + } - var changedBps = Breakpoints.DebugInfoLoaded(DebugInfo); - // Notify the debugger that the status of breakpoints changed - changedBps.ForEach(bp => SendBreakpoint("changed", bp)); + EvalResults = new EvaluationResultManager(Formatter); + Evaluator = new ExpressionEvaluator(DebugInfo, Stream, DbgCli, Formatter, EvalResults); - SendOutput("console", "Debug session started\r\n"); - } + Stack = null; + Stopped = false; + // We're not in debug mode yet. We'll enable debugging when the story is fully synced + DebuggingStory = false; + } - private void OnDebugSessionEnded() - { - if (DebuggingStory) - { - SendOutput("console", "Story unloaded - debug session terminated\r\n"); - } + private void StartDebugSession() + { + DebuggingStory = true; - DebuggingStory = false; - Stopped = false; - DebugInfo = null; - Evaluator = null; - EvalResults = null; - TracePrinter = null; - Formatter = null; + var changedBps = Breakpoints.DebugInfoLoaded(DebugInfo); + // Notify the debugger that the status of breakpoints changed + changedBps.ForEach(bp => SendBreakpoint("changed", bp)); - var changedBps = Breakpoints.DebugInfoUnloaded(); - // Notify the debugger that the status of breakpoints changed - changedBps.ForEach(bp => SendBreakpoint("changed", bp)); - } + SendOutput("console", "Debug session started\r\n"); + } - private void SynchronizeStoryWithBackend(bool continueAfterSync) + private void OnDebugSessionEnded() + { + if (DebuggingStory) { - DebugInfoSync = new DebugInfoSync(DebugInfo); - ContinueAfterSync = continueAfterSync; - DbgCli.SendSyncStory(); + SendOutput("console", "Story unloaded - debug session terminated\r\n"); } - private void OnBackendInfo(BkVersionInfoResponse response) - { - if (response.ProtocolVersion != DBGProtocolVersion) - { - throw new InvalidDataException($"Backend sent unsupported protocol version; got {response.ProtocolVersion}, we only support {DBGProtocolVersion}"); - } + DebuggingStory = false; + Stopped = false; + DebugInfo = null; + Evaluator = null; + EvalResults = null; + TracePrinter = null; + Formatter = null; - if (response.StoryLoaded) - { - InitDebugger(); - } + var changedBps = Breakpoints.DebugInfoUnloaded(); + // Notify the debugger that the status of breakpoints changed + changedBps.ForEach(bp => SendBreakpoint("changed", bp)); + } - if (response.StoryInitialized) - { - SynchronizeStoryWithBackend(false); - } + private void SynchronizeStoryWithBackend(bool continueAfterSync) + { + DebugInfoSync = new DebugInfoSync(DebugInfo); + ContinueAfterSync = continueAfterSync; + DbgCli.SendSyncStory(); + } + + private void OnBackendInfo(BkVersionInfoResponse response) + { + if (response.ProtocolVersion != DBGProtocolVersion) + { + throw new InvalidDataException($"Backend sent unsupported protocol version; got {response.ProtocolVersion}, we only support {DBGProtocolVersion}"); } - private void OnStoryLoaded() + if (response.StoryLoaded) { InitDebugger(); } - private void OnBreakpointTriggered(BkBreakpointTriggered bp) + if (response.StoryInitialized) { - Stack = TracePrinter.BreakpointToStack(bp); - Stopped = true; - PauseRequested = false; + SynchronizeStoryWithBackend(false); + } + } - var stopped = new DAPStoppedEvent - { - reason = "breakpoint", - threadId = 1 - }; - Stream.SendEvent("stopped", stopped); + private void OnStoryLoaded() + { + InitDebugger(); + } - LastQueryFunc = null; - LastQueryResults = null; - if (bp.QueryResults != null) + private void OnBreakpointTriggered(BkBreakpointTriggered bp) + { + Stack = TracePrinter.BreakpointToStack(bp); + Stopped = true; + PauseRequested = false; + + var stopped = new DAPStoppedEvent + { + reason = "breakpoint", + threadId = 1 + }; + Stream.SendEvent("stopped", stopped); + + LastQueryFunc = null; + LastQueryResults = null; + if (bp.QueryResults != null) + { + var node = DebugInfo.Nodes[bp.QueryNodeId]; + + if (node.FunctionName != null) { - var node = DebugInfo.Nodes[bp.QueryNodeId]; + var function = DebugInfo.Functions[node.FunctionName]; + LastQueryFunc = function; - if (node.FunctionName != null) + LastQueryResults = new List(); + for (var i = 0; i < bp.QueryResults.Column.Count; i++) { - var function = DebugInfo.Functions[node.FunctionName]; - LastQueryFunc = function; - - LastQueryResults = new List(); - for (var i = 0; i < bp.QueryResults.Column.Count; i++) + if (function.Params[i].Out) { - if (function.Params[i].Out) + var col = bp.QueryResults.Column[i]; + var resultVar = new DebugVariable { - var col = bp.QueryResults.Column[i]; - var resultVar = new DebugVariable - { - Name = "@" + function.Params[i].Name, - Type = function.Params[i].TypeId.ToString(), // TODO name - Value = Formatter.ValueToString(col), - TypedValue = col - }; - LastQueryResults.Add(resultVar); - } + Name = "@" + function.Params[i].Name, + Type = function.Params[i].TypeId.ToString(), // TODO name + Value = Formatter.ValueToString(col), + TypedValue = col + }; + LastQueryResults.Add(resultVar); } } } - - if (bp.QuerySucceeded != BkBreakpointTriggered.Types.QueryStatus.NotAQuery) - { - var queryResult = new DAPCustomQueryResultEvent - { - succeeded = (bp.QuerySucceeded == BkBreakpointTriggered.Types.QueryStatus.Succeeded) - }; - Stream.SendEvent("osirisQueryResult", queryResult); - } } - private void OnGlobalBreakpointTriggered(BkGlobalBreakpointTriggered message) + if (bp.QuerySucceeded != BkBreakpointTriggered.Types.QueryStatus.NotAQuery) { - if (message.Reason == BkGlobalBreakpointTriggered.Types.Reason.StoryLoaded) + var queryResult = new DAPCustomQueryResultEvent { - DbgCli.SendSetGlobalBreakpoints(0x80); // TODO const - // Break on next node - SendContinue(DbgContinue.Types.Action.StepInto); - } - else if (message.Reason == BkGlobalBreakpointTriggered.Types.Reason.GameInit) - { - SynchronizeStoryWithBackend(true); - } - else - { - throw new InvalidOperationException($"Global breakpoint type not supported: {message.Reason}"); - } + succeeded = (bp.QuerySucceeded == BkBreakpointTriggered.Types.QueryStatus.Succeeded) + }; + Stream.SendEvent("osirisQueryResult", queryResult); } + } - private void OnStorySyncData(BkSyncStoryData data) + private void OnGlobalBreakpointTriggered(BkGlobalBreakpointTriggered message) + { + if (message.Reason == BkGlobalBreakpointTriggered.Types.Reason.StoryLoaded) { - DebugInfoSync.AddData(data); + DbgCli.SendSetGlobalBreakpoints(0x80); // TODO const + // Break on next node + SendContinue(DbgContinue.Types.Action.StepInto); } - - private void OnStorySyncFinished() + else if (message.Reason == BkGlobalBreakpointTriggered.Types.Reason.GameInit) { - DebugInfoSync.Finish(); - - if (DebugInfoSync.Matches) - { - StartDebugSession(); - } - else - { - OnDebugSessionEnded(); - - SendOutput("stderr", $"Could not start debugging session - debug info does not match loaded story.\r\n"); + SynchronizeStoryWithBackend(true); + } + else + { + throw new InvalidOperationException($"Global breakpoint type not supported: {message.Reason}"); + } + } - var reasons = " " + DebugInfoSync.Reasons.Aggregate((a, b) => a + "\r\n " + b); - SendOutput("console", $"Mismatches:\r\n{reasons}\r\n"); - } - - DebugInfoSync = null; + private void OnStorySyncData(BkSyncStoryData data) + { + DebugInfoSync.AddData(data); + } - if (ContinueAfterSync) - { - if (PauseRequested && DebuggingStory) - { - SendContinue(DbgContinue.Types.Action.StepInto); - } - else - { - SendContinue(DbgContinue.Types.Action.Continue); - } - } - } + private void OnStorySyncFinished() + { + DebugInfoSync.Finish(); - private void OnDebugOutput(BkDebugOutput msg) + if (DebugInfoSync.Matches) { - SendOutput("stdout", "DebugBreak: " + msg.Message + "\r\n"); + StartDebugSession(); } - - private void HandleInitializeRequest(DAPRequest request, DAPInitializeRequest init) + else { - var reply = new DAPCapabilities - { - supportsConfigurationDoneRequest = true, - supportsEvaluateForHovers = true - }; - Stream.SendReply(request, reply); + OnDebugSessionEnded(); - var versionInfo = new DAPCustomVersionInfoEvent - { - version = DAPProtocolVersion - }; - Stream.SendEvent("osirisProtocolVersion", versionInfo); + SendOutput("stderr", $"Could not start debugging session - debug info does not match loaded story.\r\n"); + + var reasons = " " + DebugInfoSync.Reasons.Aggregate((a, b) => a + "\r\n " + b); + SendOutput("console", $"Mismatches:\r\n{reasons}\r\n"); } + + DebugInfoSync = null; - private void DebugThreadMain() + if (ContinueAfterSync) { - try + if (PauseRequested && DebuggingStory) { - DbgClient.RunLoop(); + SendContinue(DbgContinue.Types.Action.StepInto); } - catch (Exception e) + else { - LogError(e.ToString()); - Environment.Exit(2); + SendContinue(DbgContinue.Types.Action.Continue); } } + } - private void HandleLaunchRequest(DAPRequest request, DAPLaunchRequest launch) - { - Config = launch.dbgOptions; - ModUuid = launch.modUuid; - - if (!File.Exists(launch.debugInfoPath)) - { - throw new RequestFailedException("Story debug file does not exist: " + launch.debugInfoPath); - } + private void OnDebugOutput(BkDebugOutput msg) + { + SendOutput("stdout", "DebugBreak: " + msg.Message + "\r\n"); + } - DebugInfoPath = launch.debugInfoPath; + private void HandleInitializeRequest(DAPRequest request, DAPInitializeRequest init) + { + var reply = new DAPCapabilities + { + supportsConfigurationDoneRequest = true, + supportsEvaluateForHovers = true + }; + Stream.SendReply(request, reply); - try - { - DbgClient = new AsyncProtobufClient(launch.backendHost, launch.backendPort); - } - catch (SocketException e) - { - throw new RequestFailedException("Could not connect to Osiris backend server: " + e.Message); - } + var versionInfo = new DAPCustomVersionInfoEvent + { + version = DAPProtocolVersion + }; + Stream.SendEvent("osirisProtocolVersion", versionInfo); + } - DbgCli = new DebuggerClient(DbgClient, DebugInfo) - { - OnStoryLoaded = this.OnStoryLoaded, - OnDebugSessionEnded = this.OnDebugSessionEnded, - OnBackendInfo = this.OnBackendInfo, - OnBreakpointTriggered = this.OnBreakpointTriggered, - OnGlobalBreakpointTriggered = this.OnGlobalBreakpointTriggered, - OnStorySyncData = this.OnStorySyncData, - OnStorySyncFinished = this.OnStorySyncFinished, - OnDebugOutput = this.OnDebugOutput - }; - if (LogStream != null) - { - DbgCli.EnableLogging(LogStream); - } - - DbgCli.SendIdentify(DBGProtocolVersion); + private void DebugThreadMain() + { + try + { + DbgClient.RunLoop(); + } + catch (Exception e) + { + LogError(e.ToString()); + Environment.Exit(2); + } + } - DbgThread = new Thread(new ThreadStart(DebugThreadMain)); - DbgThread.Start(); + private void HandleLaunchRequest(DAPRequest request, DAPLaunchRequest launch) + { + Config = launch.dbgOptions; + ModUuid = launch.modUuid; - Breakpoints = new BreakpointManager(DbgCli); + if (!File.Exists(launch.debugInfoPath)) + { + throw new RequestFailedException("Story debug file does not exist: " + launch.debugInfoPath); + } - var reply = new DAPLaunchResponse(); - Stream.SendReply(request, reply); + DebugInfoPath = launch.debugInfoPath; - var initializedEvt = new DAPInitializedEvent(); - Stream.SendEvent("initialized", initializedEvt); + try + { + DbgClient = new AsyncProtobufClient(launch.backendHost, launch.backendPort); + } + catch (SocketException e) + { + throw new RequestFailedException("Could not connect to Osiris backend server: " + e.Message); } - private void HandleSetBreakpointsRequest(DAPRequest request, DAPSetBreakpointsRequest breakpoints) + DbgCli = new DebuggerClient(DbgClient, DebugInfo) { - if (Breakpoints != null) - { - var goalName = Path.GetFileNameWithoutExtension(breakpoints.source.name); - Breakpoints.ClearGoalBreakpoints(goalName); + OnStoryLoaded = this.OnStoryLoaded, + OnDebugSessionEnded = this.OnDebugSessionEnded, + OnBackendInfo = this.OnBackendInfo, + OnBreakpointTriggered = this.OnBreakpointTriggered, + OnGlobalBreakpointTriggered = this.OnGlobalBreakpointTriggered, + OnStorySyncData = this.OnStorySyncData, + OnStorySyncFinished = this.OnStorySyncFinished, + OnDebugOutput = this.OnDebugOutput + }; + if (LogStream != null) + { + DbgCli.EnableLogging(LogStream); + } + + DbgCli.SendIdentify(DBGProtocolVersion); - var reply = new DAPSetBreakpointsResponse - { - breakpoints = new List() - }; + DbgThread = new Thread(new ThreadStart(DebugThreadMain)); + DbgThread.Start(); - foreach (var breakpoint in breakpoints.breakpoints) - { - var bp = Breakpoints.AddBreakpoint(breakpoints.source, breakpoint); - reply.breakpoints.Add(bp.ToDAP()); - } + Breakpoints = new BreakpointManager(DbgCli); - Breakpoints.UpdateBreakpointsOnBackend(); + var reply = new DAPLaunchResponse(); + Stream.SendReply(request, reply); - Stream.SendReply(request, reply); - } - else - { - throw new RequestFailedException("Cannot add breakpoint - breakpoint manager not yet initialized"); - } - } + var initializedEvt = new DAPInitializedEvent(); + Stream.SendEvent("initialized", initializedEvt); + } - private void HandleConfigurationDoneRequest(DAPRequest request, DAPEmptyPayload msg) + private void HandleSetBreakpointsRequest(DAPRequest request, DAPSetBreakpointsRequest breakpoints) + { + if (Breakpoints != null) { - Stream.SendReply(request, new DAPEmptyPayload()); - } + var goalName = Path.GetFileNameWithoutExtension(breakpoints.source.name); + Breakpoints.ClearGoalBreakpoints(goalName); - private void HandleThreadsRequest(DAPRequest request, DAPEmptyPayload msg) - { - var reply = new DAPThreadsResponse + var reply = new DAPSetBreakpointsResponse { - threads = new List { - new DAPThread - { - id = 1, - name = "OsirisThread" - } - } + breakpoints = new List() }; - Stream.SendReply(request, reply); - } - private void HandleStackTraceRequest(DAPRequest request, DAPStackFramesRequest msg) - { - if (!Stopped) + foreach (var breakpoint in breakpoints.breakpoints) { - throw new RequestFailedException("Cannot get stack when story is running"); + var bp = Breakpoints.AddBreakpoint(breakpoints.source, breakpoint); + reply.breakpoints.Add(bp.ToDAP()); } - if (msg.threadId != 1) - { - throw new RequestFailedException("Requested stack trace for unknown thread"); - } + Breakpoints.UpdateBreakpointsOnBackend(); + + Stream.SendReply(request, reply); + } + else + { + throw new RequestFailedException("Cannot add breakpoint - breakpoint manager not yet initialized"); + } + } - int startFrame = msg.startFrame == null ? 0 : (int)msg.startFrame; - int levels = (msg.levels == null || msg.levels == 0) ? Stack.Count : (int)msg.levels; - int lastFrame = Math.Min(startFrame + levels, Stack.Count); + private void HandleConfigurationDoneRequest(DAPRequest request, DAPEmptyPayload msg) + { + Stream.SendReply(request, new DAPEmptyPayload()); + } - var frames = new List(); - for (var i = startFrame; i < lastFrame; i++) - { - var frame = Stack[i]; - var dapFrame = new DAPStackFrame(); - dapFrame.id = i; - // TODO DAPStackFrameFormat for name formatting - dapFrame.name = frame.Name; - if (frame.File != null) + private void HandleThreadsRequest(DAPRequest request, DAPEmptyPayload msg) + { + var reply = new DAPThreadsResponse + { + threads = new List { + new DAPThread { - dapFrame.source = new DAPSource - { - name = Path.GetFileNameWithoutExtension(frame.File), - path = frame.File - }; - dapFrame.line = frame.Line; - dapFrame.column = 1; + id = 1, + name = "OsirisThread" } - - // TODO presentationHint - frames.Add(dapFrame); } + }; + Stream.SendReply(request, reply); + } - var reply = new DAPStackFramesResponse - { - stackFrames = frames, - totalFrames = Stack.Count - }; - Stream.SendReply(request, reply); + private void HandleStackTraceRequest(DAPRequest request, DAPStackFramesRequest msg) + { + if (!Stopped) + { + throw new RequestFailedException("Cannot get stack when story is running"); } - private void HandleScopesRequest(DAPRequest request, DAPScopesRequest msg) + if (msg.threadId != 1) { - if (!Stopped) - { - throw new RequestFailedException("Cannot get scopes when story is running"); - } + throw new RequestFailedException("Requested stack trace for unknown thread"); + } - if (msg.frameId < 0 || msg.frameId >= Stack.Count) - { - throw new RequestFailedException("Requested scopes for unknown frame"); - } + int startFrame = msg.startFrame == null ? 0 : (int)msg.startFrame; + int levels = (msg.levels == null || msg.levels == 0) ? Stack.Count : (int)msg.levels; + int lastFrame = Math.Min(startFrame + levels, Stack.Count); - var frame = Stack[msg.frameId]; - var stackScope = new DAPScope - { - // TODO DB insert args? - name = "Locals", - variablesReference = msg.frameId + 1, - namedVariables = frame.Variables.Count, - indexedVariables = 0, - expensive = false - }; - - // Send location information for rule-local scopes. - // If the scope location is missing, the value of local variables will be displayed in - // every rule that has variables with the same name. - // This restricts them so they're only displayed in the rule that the stack frame belongs to. - if (frame.Rule != null) + var frames = new List(); + for (var i = startFrame; i < lastFrame; i++) + { + var frame = Stack[i]; + var dapFrame = new DAPStackFrame(); + dapFrame.id = i; + // TODO DAPStackFrameFormat for name formatting + dapFrame.name = frame.Name; + if (frame.File != null) { - stackScope.source = new DAPSource + dapFrame.source = new DAPSource { name = Path.GetFileNameWithoutExtension(frame.File), path = frame.File }; - stackScope.line = (int)frame.Rule.ConditionsStartLine; - stackScope.column = 1; - stackScope.endLine = (int)frame.Rule.ActionsEndLine + 1; - stackScope.endColumn = 1; + dapFrame.line = frame.Line; + dapFrame.column = 1; } - var scopes = new List { stackScope }; + // TODO presentationHint + frames.Add(dapFrame); + } - if (msg.frameId == 0 - && LastQueryResults != null - && LastQueryResults.Count > 0) - { - var queryScope = new DAPScope - { - name = LastQueryFunc.Name + " Returns", - variablesReference = ((long)3 << 48), - namedVariables = LastQueryResults.Count, - indexedVariables = 0, - expensive = false, - - source = stackScope.source, - line = stackScope.line, - column = stackScope.column, - endLine = stackScope.endLine, - endColumn = stackScope.endColumn - }; + var reply = new DAPStackFramesResponse + { + stackFrames = frames, + totalFrames = Stack.Count + }; + Stream.SendReply(request, reply); + } - scopes.Add(queryScope); - } + private void HandleScopesRequest(DAPRequest request, DAPScopesRequest msg) + { + if (!Stopped) + { + throw new RequestFailedException("Cannot get scopes when story is running"); + } - var reply = new DAPScopesResponse - { - scopes = scopes - }; - Stream.SendReply(request, reply); + if (msg.frameId < 0 || msg.frameId >= Stack.Count) + { + throw new RequestFailedException("Requested scopes for unknown frame"); } - private List GetStackVariables(DAPVariablesRequest msg, int frameIndex) + var frame = Stack[msg.frameId]; + var stackScope = new DAPScope + { + // TODO DB insert args? + name = "Locals", + variablesReference = msg.frameId + 1, + namedVariables = frame.Variables.Count, + indexedVariables = 0, + expensive = false + }; + + // Send location information for rule-local scopes. + // If the scope location is missing, the value of local variables will be displayed in + // every rule that has variables with the same name. + // This restricts them so they're only displayed in the rule that the stack frame belongs to. + if (frame.Rule != null) { - if (frameIndex < 0 || frameIndex >= Stack.Count) + stackScope.source = new DAPSource { - throw new RequestFailedException($"Requested variables for unknown frame {frameIndex}"); - } + name = Path.GetFileNameWithoutExtension(frame.File), + path = frame.File + }; + stackScope.line = (int)frame.Rule.ConditionsStartLine; + stackScope.column = 1; + stackScope.endLine = (int)frame.Rule.ActionsEndLine + 1; + stackScope.endColumn = 1; + } - var frame = Stack[frameIndex]; - int startIndex = msg.start == null ? 0 : (int)msg.start; - int numVars = (msg.count == null || msg.count == 0) ? frame.Variables.Count : (int)msg.count; - int lastIndex = Math.Min(startIndex + numVars, frame.Variables.Count); - // TODO req.filter, format + var scopes = new List { stackScope }; - var variables = new List(); - for (var i = startIndex; i < startIndex + numVars; i++) + if (msg.frameId == 0 + && LastQueryResults != null + && LastQueryResults.Count > 0) + { + var queryScope = new DAPScope { - var variable = frame.Variables[i]; - var dapVar = new DAPVariable - { - name = variable.Name, - value = variable.Value, - type = variable.Type - }; - variables.Add(dapVar); - } + name = LastQueryFunc.Name + " Returns", + variablesReference = ((long)3 << 48), + namedVariables = LastQueryResults.Count, + indexedVariables = 0, + expensive = false, - return variables; + source = stackScope.source, + line = stackScope.line, + column = stackScope.column, + endLine = stackScope.endLine, + endColumn = stackScope.endColumn + }; + + scopes.Add(queryScope); } - private List GetQueryResultVariables(DAPVariablesRequest msg, int frameIndex) + var reply = new DAPScopesResponse { - if (frameIndex != 0) - { - throw new RequestFailedException($"Requested query results for bad frame {frameIndex}"); - } - - int startIndex = msg.start == null ? 0 : (int)msg.start; - int numVars = (msg.count == null || msg.count == 0) ? LastQueryResults.Count : (int)msg.count; - int lastIndex = Math.Min(startIndex + numVars, LastQueryResults.Count); - // TODO req.filter, format - - var variables = new List(); - for (var i = startIndex; i < startIndex + numVars; i++) - { - var variable = LastQueryResults[i]; - var dapVar = new DAPVariable - { - name = variable.Name, - value = variable.Value, - type = variable.Type - }; - variables.Add(dapVar); - } + scopes = scopes + }; + Stream.SendReply(request, reply); + } - return variables; + private List GetStackVariables(DAPVariablesRequest msg, int frameIndex) + { + if (frameIndex < 0 || frameIndex >= Stack.Count) + { + throw new RequestFailedException($"Requested variables for unknown frame {frameIndex}"); } - private void HandleVariablesRequest(DAPRequest request, DAPVariablesRequest msg) + var frame = Stack[frameIndex]; + int startIndex = msg.start == null ? 0 : (int)msg.start; + int numVars = (msg.count == null || msg.count == 0) ? frame.Variables.Count : (int)msg.count; + int lastIndex = Math.Min(startIndex + numVars, frame.Variables.Count); + // TODO req.filter, format + + var variables = new List(); + for (var i = startIndex; i < startIndex + numVars; i++) { - if (!Stopped) + var variable = frame.Variables[i]; + var dapVar = new DAPVariable { - throw new RequestFailedException("Cannot get variables when story is running"); - } + name = variable.Name, + value = variable.Value, + type = variable.Type + }; + variables.Add(dapVar); + } - long variableType = (msg.variablesReference >> 48); - List variables; - if (variableType == 0) - { - int frameIndex = (int)msg.variablesReference - 1; - variables = GetStackVariables(msg, frameIndex); - } - else if (variableType == 1 || variableType == 2) - { - variables = EvalResults.GetVariables(msg, msg.variablesReference); - } - else if (variableType == 3) - { - int frameIndex = (int)(msg.variablesReference & 0xffffff); - variables = GetQueryResultVariables(msg, frameIndex); - } - else - { - throw new InvalidOperationException($"Unknown variables reference type: {msg.variablesReference}"); - } + return variables; + } - var reply = new DAPVariablesResponse - { - variables = variables - }; - Stream.SendReply(request, reply); + private List GetQueryResultVariables(DAPVariablesRequest msg, int frameIndex) + { + if (frameIndex != 0) + { + throw new RequestFailedException($"Requested query results for bad frame {frameIndex}"); } + + int startIndex = msg.start == null ? 0 : (int)msg.start; + int numVars = (msg.count == null || msg.count == 0) ? LastQueryResults.Count : (int)msg.count; + int lastIndex = Math.Min(startIndex + numVars, LastQueryResults.Count); + // TODO req.filter, format - private UInt32 GetContinueBreakpointMask() + var variables = new List(); + for (var i = startIndex; i < startIndex + numVars; i++) { - UInt32 breakpoints = 0; - if (Config == null || Config.stopOnFailedQueries) + var variable = LastQueryResults[i]; + var dapVar = new DAPVariable { - breakpoints |= (UInt32)MsgBreakpoint.Types.BreakpointType.FailedQuery; - } + name = variable.Name, + value = variable.Value, + type = variable.Type + }; + variables.Add(dapVar); + } - if (Config != null && Config.stopOnAllFrames) - { - breakpoints |= - // Break on all possible node events - (UInt32)MsgBreakpoint.Types.BreakpointType.Valid - | (UInt32)MsgBreakpoint.Types.BreakpointType.Pushdown - | (UInt32)MsgBreakpoint.Types.BreakpointType.Insert - | (UInt32)MsgBreakpoint.Types.BreakpointType.RuleAction - | (UInt32)MsgBreakpoint.Types.BreakpointType.InitCall - | (UInt32)MsgBreakpoint.Types.BreakpointType.ExitCall - | (UInt32)MsgBreakpoint.Types.BreakpointType.Delete; - } - else - { - breakpoints |= - // Break on Pushdown for rule "AND/NOT AND" nodes - (UInt32)MsgBreakpoint.Types.BreakpointType.Pushdown - // Break on rule THEN part actions - | (UInt32)MsgBreakpoint.Types.BreakpointType.RuleAction - // Break on goal Init/Exit calls - | (UInt32)MsgBreakpoint.Types.BreakpointType.InitCall - | (UInt32)MsgBreakpoint.Types.BreakpointType.ExitCall; - } + return variables; + } - return breakpoints; + private void HandleVariablesRequest(DAPRequest request, DAPVariablesRequest msg) + { + if (!Stopped) + { + throw new RequestFailedException("Cannot get variables when story is running"); } - private UInt32 GetContinueFlags() + long variableType = (msg.variablesReference >> 48); + List variables; + if (variableType == 0) { - UInt32 flags = 0; - if (Config == null || !Config.stopOnAllFrames) - { - flags |= (UInt32)DbgContinue.Types.Flags.SkipRulePushdown; - } + int frameIndex = (int)msg.variablesReference - 1; + variables = GetStackVariables(msg, frameIndex); + } + else if (variableType == 1 || variableType == 2) + { + variables = EvalResults.GetVariables(msg, msg.variablesReference); + } + else if (variableType == 3) + { + int frameIndex = (int)(msg.variablesReference & 0xffffff); + variables = GetQueryResultVariables(msg, frameIndex); + } + else + { + throw new InvalidOperationException($"Unknown variables reference type: {msg.variablesReference}"); + } - if (Config == null || !Config.stopOnDbPropagation) - { - flags |= (UInt32)DbgContinue.Types.Flags.SkipDbPropagation; - } + var reply = new DAPVariablesResponse + { + variables = variables + }; + Stream.SendReply(request, reply); + } - return flags; + private UInt32 GetContinueBreakpointMask() + { + UInt32 breakpoints = 0; + if (Config == null || Config.stopOnFailedQueries) + { + breakpoints |= (UInt32)MsgBreakpoint.Types.BreakpointType.FailedQuery; } - private void SendContinue(DbgContinue.Types.Action action) + if (Config != null && Config.stopOnAllFrames) { - DbgCli.SendContinue(action, GetContinueBreakpointMask(), GetContinueFlags()); + breakpoints |= + // Break on all possible node events + (UInt32)MsgBreakpoint.Types.BreakpointType.Valid + | (UInt32)MsgBreakpoint.Types.BreakpointType.Pushdown + | (UInt32)MsgBreakpoint.Types.BreakpointType.Insert + | (UInt32)MsgBreakpoint.Types.BreakpointType.RuleAction + | (UInt32)MsgBreakpoint.Types.BreakpointType.InitCall + | (UInt32)MsgBreakpoint.Types.BreakpointType.ExitCall + | (UInt32)MsgBreakpoint.Types.BreakpointType.Delete; } - - private void HandleContinueRequest(DAPRequest request, DAPContinueRequest msg, DbgContinue.Types.Action action) + else { - if (msg.threadId != 1) - { - throw new RequestFailedException("Requested continue for unknown thread"); - } + breakpoints |= + // Break on Pushdown for rule "AND/NOT AND" nodes + (UInt32)MsgBreakpoint.Types.BreakpointType.Pushdown + // Break on rule THEN part actions + | (UInt32)MsgBreakpoint.Types.BreakpointType.RuleAction + // Break on goal Init/Exit calls + | (UInt32)MsgBreakpoint.Types.BreakpointType.InitCall + | (UInt32)MsgBreakpoint.Types.BreakpointType.ExitCall; + } - if (action == DbgContinue.Types.Action.Pause) - { - if (Stopped) - { - throw new RequestFailedException("Already stopped"); - } + return breakpoints; + } - PauseRequested = true; - } - else - { - if (!Stopped) - { - throw new RequestFailedException("Already running"); - } + private UInt32 GetContinueFlags() + { + UInt32 flags = 0; + if (Config == null || !Config.stopOnAllFrames) + { + flags |= (UInt32)DbgContinue.Types.Flags.SkipRulePushdown; + } - Stopped = false; - } + if (Config == null || !Config.stopOnDbPropagation) + { + flags |= (UInt32)DbgContinue.Types.Flags.SkipDbPropagation; + } - if (DebuggingStory) - { - SendContinue(action); - } + return flags; + } - var reply = new DAPContinueResponse - { - allThreadsContinued = false - }; - Stream.SendReply(request, reply); + private void SendContinue(DbgContinue.Types.Action action) + { + DbgCli.SendContinue(action, GetContinueBreakpointMask(), GetContinueFlags()); + } + + private void HandleContinueRequest(DAPRequest request, DAPContinueRequest msg, DbgContinue.Types.Action action) + { + if (msg.threadId != 1) + { + throw new RequestFailedException("Requested continue for unknown thread"); } - private void HandleEvaluateRequest(DAPRequest request, DAPEvaulateRequest req) + if (action == DbgContinue.Types.Action.Pause) { - if (!Stopped) + if (Stopped) { - throw new RequestFailedException("Can only evaluate expressions when stopped"); + throw new RequestFailedException("Already stopped"); } - var frameIndex = req.frameId ?? 0; - if (frameIndex < 0 || frameIndex >= Stack.Count) + PauseRequested = true; + } + else + { + if (!Stopped) { - throw new RequestFailedException($"Requested evaluate for unknown frame {frameIndex}"); + throw new RequestFailedException("Already running"); } - var frame = Stack[frameIndex]; + Stopped = false; + } - // Only allow functions that have side effects in the debugger console - bool allowMutation = (req.context == "repl"); - Evaluator.Evaluate(request, req.expression, frame, allowMutation); + if (DebuggingStory) + { + SendContinue(action); } - private void HandleDisconnectRequest(DAPRequest request, DAPDisconnectRequest msg) + var reply = new DAPContinueResponse { - var reply = new DAPEmptyPayload(); - Stream.SendReply(request, reply); - // TODO - close session + allThreadsContinued = false + }; + Stream.SendReply(request, reply); + } + + private void HandleEvaluateRequest(DAPRequest request, DAPEvaulateRequest req) + { + if (!Stopped) + { + throw new RequestFailedException("Can only evaluate expressions when stopped"); } - private void HandleRequest(DAPRequest request) + var frameIndex = req.frameId ?? 0; + if (frameIndex < 0 || frameIndex >= Stack.Count) { - switch (request.command) - { - case "initialize": - HandleInitializeRequest(request, request.arguments as DAPInitializeRequest); - break; - - case "launch": - HandleLaunchRequest(request, request.arguments as DAPLaunchRequest); - break; - - case "setBreakpoints": - HandleSetBreakpointsRequest(request, request.arguments as DAPSetBreakpointsRequest); - break; - - case "configurationDone": - HandleConfigurationDoneRequest(request, request.arguments as DAPEmptyPayload); - break; - - case "threads": - HandleThreadsRequest(request, request.arguments as DAPEmptyPayload); - break; - - case "stackTrace": - HandleStackTraceRequest(request, request.arguments as DAPStackFramesRequest); - break; - - case "scopes": - HandleScopesRequest(request, request.arguments as DAPScopesRequest); - break; - - case "variables": - HandleVariablesRequest(request, request.arguments as DAPVariablesRequest); - break; - - case "continue": - HandleContinueRequest(request, request.arguments as DAPContinueRequest, - DbgContinue.Types.Action.Continue); - break; - - case "next": - HandleContinueRequest(request, request.arguments as DAPContinueRequest, - DbgContinue.Types.Action.StepOver); - break; - - case "stepIn": - HandleContinueRequest(request, request.arguments as DAPContinueRequest, - DbgContinue.Types.Action.StepInto); - break; - - case "stepOut": - HandleContinueRequest(request, request.arguments as DAPContinueRequest, - DbgContinue.Types.Action.StepOut); - break; - - case "pause": - HandleContinueRequest(request, request.arguments as DAPContinueRequest, - DbgContinue.Types.Action.Pause); - break; - - case "evaluate": - HandleEvaluateRequest(request, request.arguments as DAPEvaulateRequest); - break; - - case "disconnect": - HandleDisconnectRequest(request, request.arguments as DAPDisconnectRequest); - break; - - default: - throw new InvalidOperationException($"Unsupported DAP request: {request.command}"); - } + throw new RequestFailedException($"Requested evaluate for unknown frame {frameIndex}"); } - private void HandleEvent(DAPEvent evt) + var frame = Stack[frameIndex]; + + // Only allow functions that have side effects in the debugger console + bool allowMutation = (req.context == "repl"); + Evaluator.Evaluate(request, req.expression, frame, allowMutation); + } + + private void HandleDisconnectRequest(DAPRequest request, DAPDisconnectRequest msg) + { + var reply = new DAPEmptyPayload(); + Stream.SendReply(request, reply); + // TODO - close session + } + + private void HandleRequest(DAPRequest request) + { + switch (request.command) { - throw new InvalidOperationException($"Unsupported DAP event: {evt.@event}"); + case "initialize": + HandleInitializeRequest(request, request.arguments as DAPInitializeRequest); + break; + + case "launch": + HandleLaunchRequest(request, request.arguments as DAPLaunchRequest); + break; + + case "setBreakpoints": + HandleSetBreakpointsRequest(request, request.arguments as DAPSetBreakpointsRequest); + break; + + case "configurationDone": + HandleConfigurationDoneRequest(request, request.arguments as DAPEmptyPayload); + break; + + case "threads": + HandleThreadsRequest(request, request.arguments as DAPEmptyPayload); + break; + + case "stackTrace": + HandleStackTraceRequest(request, request.arguments as DAPStackFramesRequest); + break; + + case "scopes": + HandleScopesRequest(request, request.arguments as DAPScopesRequest); + break; + + case "variables": + HandleVariablesRequest(request, request.arguments as DAPVariablesRequest); + break; + + case "continue": + HandleContinueRequest(request, request.arguments as DAPContinueRequest, + DbgContinue.Types.Action.Continue); + break; + + case "next": + HandleContinueRequest(request, request.arguments as DAPContinueRequest, + DbgContinue.Types.Action.StepOver); + break; + + case "stepIn": + HandleContinueRequest(request, request.arguments as DAPContinueRequest, + DbgContinue.Types.Action.StepInto); + break; + + case "stepOut": + HandleContinueRequest(request, request.arguments as DAPContinueRequest, + DbgContinue.Types.Action.StepOut); + break; + + case "pause": + HandleContinueRequest(request, request.arguments as DAPContinueRequest, + DbgContinue.Types.Action.Pause); + break; + + case "evaluate": + HandleEvaluateRequest(request, request.arguments as DAPEvaulateRequest); + break; + + case "disconnect": + HandleDisconnectRequest(request, request.arguments as DAPDisconnectRequest); + break; + + default: + throw new InvalidOperationException($"Unsupported DAP request: {request.command}"); } } + + private void HandleEvent(DAPEvent evt) + { + throw new InvalidOperationException($"Unsupported DAP event: {evt.@event}"); + } } diff --git a/DebuggerFrontend/DAPProtocol.cs b/DebuggerFrontend/DAPProtocol.cs index b4dbc764..f94d4d2d 100644 --- a/DebuggerFrontend/DAPProtocol.cs +++ b/DebuggerFrontend/DAPProtocol.cs @@ -1,1010 +1,1009 @@ using System; using System.Collections.Generic; -namespace LSTools.DebuggerFrontend -{ - /** - * Base class of requests, responses, and events. - */ - public class DAPMessage - { - /** - * Sequence number. - */ - public Int32 seq { get; set; } - - /** - * Message type. - * Values: 'request', 'response', 'event', etc. - */ - public String type { get; set; } - } - - /** - * A client or debug adapter initiated request. - */ - public class DAPRequest : DAPMessage - { - /** - * The command to execute. - */ - public String command { get; set; } - - /** - * Object containing arguments for the command. - */ - public IDAPMessagePayload arguments; - } - - /** - * A debug adapter initiated event. - */ - public class DAPEvent : DAPMessage - { - /** - * Type of event. - */ - public String @event { get; set; } - - /** - * Event-specific information - */ - public IDAPMessagePayload body; - } - - /** - * Response for a request. - */ - public class DAPResponse : DAPMessage - { - /** - * Sequence number of the corresponding request. - */ - public Int32 request_seq { get; set; } - - /** - * Outcome of the request. - */ - public bool success { get; set; } - - /** - * The command requested. - */ - public String command { get; set; } - - /** - * Contains error message if success == false. - */ - public String message { get; set; } - - /** - * Contains request result if success is true and optional error details if success is false. - */ - public IDAPMessagePayload body; - } - - public interface IDAPMessagePayload { } - - public class DAPInitializeRequest : IDAPMessagePayload - { - /** - * The ID of the (frontend) client using this adapter. - */ - public String clientID { get; set; } - - /** - * The human readable name of the (frontend) client using this adapter. - */ - public String clientName { get; set; } - - /** - * The ID of the debug adapter. - */ - public String adapterID { get; set; } - - /** - * The ISO-639 locale of the (frontend) client using this adapter, e.g. en-US or de-CH. - */ - public String locale { get; set; } - - /** - * If true all line numbers are 1-based (default). - */ - public bool linesStartAt1 { get; set; } - - /** - * If true all column numbers are 1-based (default). - */ - public bool columnsStartAt1 { get; set; } - - /** - * Determines in what format paths are specified. The default is 'path', which is the native format. - * Values: 'path', 'uri', etc. - */ - public String pathFormat { get; set; } - - /** - * Client supports the optional type attribute for variables. - */ - public bool supportsVariableType { get; set; } - - /** - * Client supports the paging of variables. - */ - public bool supportsVariablePaging { get; set; } - - /** - * Client supports the runInTerminal request. - */ - public bool supportsRunInTerminalRequest { get; set; } - } - - public class DAPCapabilities : IDAPMessagePayload - { - /** - * The debug adapter supports the 'configurationDone' request. - */ - public bool supportsConfigurationDoneRequest { get; set; } - - /** - * The debug adapter supports function breakpoints. - */ - // TODO - public bool supportsFunctionBreakpoints { get; set; } - - /** - * The debug adapter supports conditional breakpoints. - */ - // TODO - public bool supportsConditionalBreakpoints { get; set; } - - /** - * The debug adapter supports breakpoints that break execution after a specified number of hits. - */ - // TODO - public bool supportsHitConditionalBreakpoints { get; set; } - - /** - * The debug adapter supports a (side effect free) evaluate request for data hovers. - */ - public bool supportsEvaluateForHovers { get; set; } - - /** - * The debug adapter supports setting a variable to a value. - */ - // TODO - public bool supportsSetVariable { get; set; } - - /** - * The debug adapter supports the 'stepInTargets' request. - */ - // TODO - public bool supportsStepInTargetsRequest { get; set; } - - /** - * The debug adapter supports the 'setExpression' request. - */ - // TODO - public bool supportsSetExpression { get; set; } - } - - /** - * Custom configuration class - */ - public class DAPCustomConfiguration - { - /** - * Requests the debugger to return raw call frames from the backend instead of - * merging and pretty printing the frames. - */ - public bool rawFrames { get; set; } - - /** - * Stop on all frames during single-stepping instead of meaningful frames. - */ - public bool stopOnAllFrames { get; set; } - - /** - * Stop inside database propagation calls during single-stepping. - */ - public bool stopOnDbPropagation { get; set; } - - /** - * Stop when a query inside an IF block fails. - */ - public bool stopOnFailedQueries { get; set; } - } - - /** - * Arguments for ‘launch’ request. Additional attributes are implementation specific. - */ - public class DAPLaunchRequest : IDAPMessagePayload - { - /** - * If noDebug is true the launch request should launch the program without enabling debugging. - */ - public bool noDebug { get; set; } - - /** - * Optional data from the previous, restarted session. - * The data is sent as the 'restart' attribute of the 'terminated' event. - * The client should leave the data intact. - */ - public object __restart { get; set; } - - /** - * Location of story debug symbol file - */ - public string debugInfoPath { get; set; } - - /** - * IP address of debugger backend server - */ - public string backendHost { get; set; } - - /** - * Port of debugger backend server - */ - public int backendPort { get; set; } - - /** - * UUID of the mod we're debugging - */ - public string modUuid { get; set; } - - /** - * Additional debugger configuration - */ - public DAPCustomConfiguration dbgOptions { get; set; } - } - - /** - * Response to ‘launch’ request. This is just an acknowledgement, so no body field is required. - */ - public class DAPLaunchResponse : IDAPMessagePayload - { - } - - public class DAPInitializedEvent : IDAPMessagePayload - { - } - - /** - * A Source is a descriptor for source code. It is returned from the debug adapter as part of a - * StackFrame and it is used by clients when specifying breakpoints. - */ - public class DAPSource - { - /** - * The short name of the source. Every source returned from the debug adapter has a name. When sending a source to the debug adapter this name is optional. - */ - public string name { get; set; } - - /** - * The path of the source to be shown in the UI. It is only used to locate and load the content of the source if no sourceReference is specified (or its value is 0). - */ - public string path { get; set; } - - /** - * If sourceReference > 0 the contents of the source must be retrieved through the SourceRequest (even if a path is specified). A sourceReference is only valid for a session, so it must not be used to persist a source. - */ - // public int? sourceReference { get; set; } - - /** - * An optional hint for how to present the source in the UI. A value of 'deemphasize' can be used to indicate that the source is not available or that it is skipped on stepping. - */ - // public string presentationHint { get; set; } - - /** - * The (optional) origin of this source: possible values 'internal module', 'inlined content from source map', etc. - */ - // public string origin { get; set; } - - /** - * An optional list of sources that are related to this source. These may be the source that generated this source. - */ - // public IList sources { get; set; } - - /** - * Optional data that a debug adapter might want to loop through the client. The client should leave the data intact and persist it across sessions. The client should not interpret the data. - */ - // public object adapterData { get; set; } - } - - /** - * Properties of a breakpoint or logpoint passed to the setBreakpoints request. - */ - public class DAPSourceBreakpoint - { - /** - * The source line of the breakpoint or logpoint. - */ - public int line { get; set; } - - /** - * An optional source column of the breakpoint. - */ - public int? column { get; set; } - - /** - * An optional expression for conditional breakpoints. - */ - public string condition { get; set; } - - /** - * An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed. - */ - public string hitCondition { get; set; } - - /** - * If this attribute exists and is non-empty, the backend must not 'break' (stop) but log the message instead. Expressions within {} are interpolated. - */ - public string logMessage { get; set; } - } - - /** - * Sets multiple breakpoints for a single source and clears all previous breakpoints in that source. - * To clear all breakpoint for a source, specify an empty array. - * When a breakpoint is hit, a ‘stopped’ event (with reason ‘breakpoint’) is generated. - */ - public class DAPSetBreakpointsRequest : IDAPMessagePayload - { - /** - * The source location of the breakpoints; either 'source.path' or 'source.reference' must be specified. - */ - public DAPSource source { get; set; } - - /** - * The code locations of the breakpoints. - */ - public IList breakpoints { get; set; } - - /** - * A value of true indicates that the underlying source has been modified which results in new breakpoint locations. - */ - public bool sourceModified { get; set; } - } - - public class DAPBreakpoint - { - /** - * An optional unique identifier for the breakpoint. - */ - public int? id { get; set; } - - /** - * If true breakpoint could be set (but not necessarily at the desired location). - */ - public bool verified { get; set; } - - /** - * An optional message about the state of the breakpoint. This is shown to the user and can be used to explain why a breakpoint could not be verified. - */ - public string message { get; set; } - - /** - * The source where the breakpoint is located. - */ - public DAPSource source { get; set; } - - /** - * The start line of the actual range covered by the breakpoint. - */ - public int? line { get; set; } - - /** - * An optional start column of the actual range covered by the breakpoint. - */ - public int? column { get; set; } - - /** - * An optional end line of the actual range covered by the breakpoint. - */ - public int? endLine { get; set; } - - /** - * An optional end column of the actual range covered by the breakpoint. If no end line is given, then the end column is assumed to be in the start line. - */ - public int? endColumn { get; set; } - } - - /** - * Response to ‘setBreakpoints’ request. - * Returned is information about each breakpoint created by this request. - * This includes the actual code location and whether the breakpoint could be verified. - * The breakpoints returned are in the same order as the elements of the ‘breakpoints’ - * (or the deprecated ‘lines’) array in the arguments. - */ - public class DAPSetBreakpointsResponse : IDAPMessagePayload - { - /** - * Information about the breakpoints. The array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments. - */ - public IList breakpoints { get; set; } - } - - public class DAPEmptyPayload : IDAPMessagePayload - { - } - - public class DAPThread - { - /** - * Unique identifier for the thread. - */ - public int id { get; set; } - - /** - * A name of the thread. - */ - public string name { get; set; } - } - - /** - * Response to ‘threads’ request. - * The request retrieves a list of all threads. - */ - public class DAPThreadsResponse : IDAPMessagePayload - { - /** - * All threads. - */ - public IList threads { get; set; } - } - - /** - * The ‘disconnect’ request is sent from the client to the debug adapter in order to stop debugging. It asks - * the debug adapter to disconnect from the debuggee and to terminate the debug adapter. If the debuggee - * has been started with the ‘launch’ request, the ‘disconnect’ request terminates the debuggee. If the - * ‘attach’ request was used to connect to the debuggee, ‘disconnect’ does not terminate the debuggee. - * This behavior can be controlled with the ‘terminateDebuggee’ (if supported by the debug adapter). - */ - public class DAPDisconnectRequest : IDAPMessagePayload - { - /** - * Indicates whether the debuggee should be terminated when the debugger is disconnected. - * If unspecified, the debug adapter is free to do whatever it thinks is best. - * A client can only rely on this attribute being properly honored if a debug adapter returns true for the 'supportTerminateDebuggee' capability. - */ - public bool? terminateDebuggee { get; set; } - } - - /** - * The event indicates that the execution of the debuggee has stopped due to some condition. - * - * This can be caused by a break point previously set, a stepping action has completed, by - * executing a debugger statement etc. - */ - public class DAPStoppedEvent : IDAPMessagePayload - { - /** - * The reason for the event. - * For backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated). - * Values: 'step', 'breakpoint', 'exception', 'pause', 'entry', 'goto', etc. - */ - public String reason { get; set; } - - /** - * The thread which was stopped. - */ - public int threadId { get; set; } - } - - /** - * Provides formatting information for a stack frame. - */ - public class DAPStackFrameFormat - { - /** - * Displays parameters for the stack frame. - */ - public bool? parameters { get; set; } - - /** - * Displays the types of parameters for the stack frame. - */ - public bool? parameterTypes { get; set; } - - /** - * Displays the names of parameters for the stack frame. - */ - public bool? parameterNames { get; set; } - - /** - * Displays the values of parameters for the stack frame. - */ - public bool? parameterValues { get; set; } - - /** - * Displays the line number of the stack frame. - */ - public bool? line { get; set; } - - /** - * Displays the module of the stack frame. - */ - public bool? module { get; set; } - - /** - * Includes all stack frames, including those the debug adapter might otherwise hide. - */ - public bool? includeAll { get; set; } - } - - /** - * The request returns a stacktrace from the current execution state. - */ - public class DAPStackFramesRequest : IDAPMessagePayload - { - /** - * Retrieve the stacktrace for this thread. - */ - public int threadId { get; set; } - - /** - * The index of the first frame to return; if omitted frames start at 0. - */ - public int? startFrame { get; set; } - - /** - * The maximum number of frames to return. If levels is not specified or 0, all frames are returned. - */ - public int? levels { get; set; } - - /** - * Specifies details on how to format the stack frames. - */ - public DAPStackFrameFormat format { get; set; } - } - - public class DAPStackFrame - { - /** - * An identifier for the stack frame. It must be unique across all threads. This id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe. - */ - public int id { get; set; } - - /** - * The name of the stack frame, typically a method name. - */ - public string name { get; set; } - - /** - * The optional source of the frame. - */ - public DAPSource source { get; set; } - - /** - * The line within the file of the frame. If source is null or doesn't exist, line is 0 and must be ignored. - */ - public int line { get; set; } - - /** - * The column within the line. If source is null or doesn't exist, column is 0 and must be ignored. - */ - public int column { get; set; } - - /** - * An optional end line of the range covered by the stack frame. - */ - //public int? endLine { get; set; } - - /** - * An optional end column of the range covered by the stack frame. - */ - //public int? endColumn { get; set; } - - /** - * An optional hint for how to present this frame in the UI. - * A value of 'label' can be used to indicate that the frame is an artificial frame that is used as a visual - * label or separator. A value of 'subtle' can be used to change the appearance of a frame in a 'subtle' way. - */ - //public string presentationHint { get; set; } - } - - /** - * Response to ‘stackTrace’ request. - */ - public class DAPStackFramesResponse : IDAPMessagePayload - { - /** - * The frames of the stackframe. If the array has length zero, there are no stackframes available. - * This means that there is no location information available. - */ - public IList stackFrames { get; set; } - - /** - * The total number of frames available. - */ - public int? totalFrames { get; set; } - } - - /** - * The request returns the variable scopes for a given stackframe ID. - */ - public class DAPScopesRequest : IDAPMessagePayload - { - /** - * Retrieve the scopes for this stackframe. - */ - public int frameId { get; set; } - } - - /** - * A Scope is a named container for variables. Optionally a scope can map to a - * source or a range within a source. - */ - public class DAPScope - { - /** - * Name of the scope such as 'Arguments', 'Locals'. - */ - public string name { get; set; } - - /** - * The variables of this scope can be retrieved by passing the value of variablesReference to the VariablesRequest. - */ - public long variablesReference { get; set; } - - /** - * The number of named variables in this scope. - * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. - */ - public int? namedVariables { get; set; } - - /** - * The number of indexed variables in this scope. - * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. - */ - public int? indexedVariables { get; set; } - - /** - * If true, the number of variables in this scope is large or expensive to retrieve. - */ - public bool expensive { get; set; } - - /** - * Optional source for this scope. - */ - public DAPSource source { get; set; } - - /** - * Optional start line of the range covered by this scope. - */ - public int? line { get; set; } - - /** - * Optional start column of the range covered by this scope. - */ - public int? column { get; set; } - - /** - * Optional end line of the range covered by this scope. - */ - public int? endLine { get; set; } - - /** - * Optional end column of the range covered by this scope. - */ - public int? endColumn { get; set; } - } - - /** - * Response to ‘scopes’ request. - */ - public class DAPScopesResponse : IDAPMessagePayload - { - public IList scopes { get; set; } - } - - /** - * Provides formatting information for a value. - */ - public class DAPValueFormat - { - /** - * Display the value in hex. - */ - public bool hex { get; set; } - } - - /** - * Retrieves all child variables for the given variable reference. - * - * An optional filter can be used to limit the fetched children to either named or indexed children. - */ - public class DAPVariablesRequest : IDAPMessagePayload - { - /** - * The Variable reference. - */ - public long variablesReference { get; set; } - - /** - * Optional filter to limit the child variables to either named or indexed. If ommited, both types are fetched. - */ - public string filter { get; set; } - - /** - * The index of the first variable to return; if omitted children start at 0. - */ - public int? start { get; set; } - - /** - * The number of variables to return. If count is missing or 0, all variables are returned. - */ - public int? count { get; set; } - - /** - * Specifies details on how to format the Variable values. - */ - public DAPValueFormat format { get; set; } - } - - /** - * Optional properties of a variable that can be used to determine how to render the variable in the UI. - */ - public class DAPVariablePresentationHint - { - /** - * The kind of variable. Before introducing additional values, try to use the listed values. - * Values: - * 'property': Indicates that the object is a property. - * 'method': Indicates that the object is a method. - * 'class': Indicates that the object is a class. - * 'data': Indicates that the object is data. - * 'event': Indicates that the object is an event. - * 'baseClass': Indicates that the object is a base class. - * 'innerClass': Indicates that the object is an inner class. - * 'interface': Indicates that the object is an interface. - * 'mostDerivedClass': Indicates that the object is the most derived class. - * 'virtual': Indicates that the object is virtual, that means it is a synthetic object introduced by the adapter for rendering purposes, e.g. an index range for large arrays. - * etc. - */ - public string kind { get; set; } - - /** - * Set of attributes represented as an array of strings. Before introducing additional values, try to use the listed values. - * Values: - * 'static': Indicates that the object is static. - * 'constant': Indicates that the object is a constant. - * 'readOnly': Indicates that the object is read only. - * 'rawString': Indicates that the object is a raw string. - * 'hasObjectId': Indicates that the object can have an Object ID created for it. - * 'canHaveObjectId': Indicates that the object has an Object ID associated with it. - * 'hasSideEffects': Indicates that the evaluation had side effects. - * etc. - */ - public IList attributes { get; set; } - - /** - * Visibility of variable. Before introducing additional values, try to use the listed values. - * Values: 'public', 'private', 'protected', 'internal', 'final', etc. - */ - public string visibility { get; set; } - } - - /** - * A Variable is a name/value pair. - * - * Optionally a variable can have a ‘type’ that is shown if space permits or when hovering over - * the variable’s name. - * - * An optional ‘kind’ is used to render additional properties of the variable, e.g. different icons - * can be used to indicate that a variable is public or private. - * - * If the value is structured (has children), a handle is provided to retrieve the children with the VariablesRequest. - * - * If the number of named or indexed children is large, the numbers should be returned via the optional - * ‘namedVariables’ and ‘indexedVariables’ attributes. - */ - public class DAPVariable - { - /** - * The variable's name. - */ - public string name { get; set; } - - /** - * The variable's value. This can be a multi-line text, e.g. for a function the body of a function. - */ - public string value { get; set; } - - /** - * The type of the variable's value. Typically shown in the UI when hovering over the value. - */ - public string type { get; set; } - - /** - * Properties of a variable that can be used to determine how to render the variable in the UI. - */ - // public DAPVariablePresentationHint presentationHint { get; set; } - - /** - * Optional evaluatable name of this variable which can be passed to the 'EvaluateRequest' to fetch the variable's value. - */ - // public string evaluateName { get; set; } - - /** - * If variablesReference is > 0, the variable is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. - */ - public long variablesReference { get; set; } - - /** - * The number of named child variables. - * The client can use this optional information to present the children in a paged UI and fetch them in chunks. - */ - public int? namedVariables { get; set; } - - /** - * The number of indexed child variables. - * The client can use this optional information to present the children in a paged UI and fetch them in chunks. - */ - public int? indexedVariables { get; set; } - } - - /** - * Response to ‘variables’ request. - */ - public class DAPVariablesResponse : IDAPMessagePayload - { - public IList variables { get; set; } - } - - /** - * The request starts the debuggee to run again. - */ - public class DAPContinueRequest : IDAPMessagePayload - { - /** - * Continue execution for the specified thread (if possible). If the backend cannot continue on a single thread - * but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true. - */ - public int threadId { get; set; } - } - - /** - * Response to ‘continue’ request. - */ - public class DAPContinueResponse : IDAPMessagePayload - { - /** - * If true, the 'continue' request has ignored the specified thread and continued all threads instead. - * If this attribute is missing a value of 'true' is assumed for backward compatibility. - */ - public bool allThreadsContinued { get; set; } - } - - /** - * The event indicates that the target has produced some output. - */ - public class DAPOutputMessage : IDAPMessagePayload - { - /** - * The output category. If not specified, 'console' is assumed. - * Values: 'console', 'stdout', 'stderr', 'telemetry', etc. - */ - public string category { get; set; } - - /** - * The output to report. - */ - public string output { get; set; } - } - - /** - * The event indicates that debugging of the debuggee has terminated. - * This does not mean that the debuggee itself has exited. - */ - public class DAPTerminatedEvent : IDAPMessagePayload - { - /** - * A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session. - * The value is not interpreted by the client and passed unmodified as an attribute '__restart' to the 'launch' and 'attach' requests. - */ - // public object restart { get; set; }; - } - - /** - * The event indicates that some information about a breakpoint has changed. - */ - public class DAPBreakpointEvent : IDAPMessagePayload - { - /** - * The reason for the event. - * Values: 'changed', 'new', 'removed', etc. - */ - public String reason { get; set; } - - /** - * The breakpoint. - */ - public DAPBreakpoint breakpoint; - } - - /** - * Evaluates the given expression in the context of the top most stack frame. - * The expression has access to any variables and arguments that are in scope. - */ - public class DAPEvaulateRequest : IDAPMessagePayload - { - /** - * The expression to evaluate. - */ - public string expression { get; set; } - - /** - * Evaluate the expression in the scope of this stack frame. If not specified, the expression is evaluated in the global scope. - */ - public int? frameId { get; set; } - - /** - * The context in which the evaluate request is run. - * Values: - * 'watch': evaluate is run in a watch. - * 'repl': evaluate is run from REPL console. - * 'hover': evaluate is run from a data hover. - * etc. - */ - public string context { get; set; } - - /** - * Specifies details on how to format the Evaluate result. - */ - public DAPValueFormat format { get; set; } - } - - /** - * Response to ‘evaluate’ request. - */ - public class DAPEvaluateResponse : IDAPMessagePayload - { - /** - * The result of the evaluate request. - */ - public string result { get; set; } - - /** - * The optional type of the evaluate result. - */ - public string type { get; set; } - - /** - * Properties of a evaluate result that can be used to determine how to render the result in the UI. - */ - public DAPVariablePresentationHint presentationHint { get; set; } - - /** - * If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. - */ - public long variablesReference { get; set; } - - /** - * The number of named child variables. - * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. - */ - public int? namedVariables { get; set; } - - /** - * The number of indexed child variables. - * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. - */ - public int? indexedVariables { get; set; } - } - - /** - * The event transmits the output of the last div query - */ - public class DAPCustomVersionInfoEvent : IDAPMessagePayload - { - /** - * DAP protocol version - */ - public int version; - } - - /** - * The event transmits the output of the last div query - */ - public class DAPCustomQueryResultEvent : IDAPMessagePayload - { - /** - * Did the query succeed? - */ - public bool succeeded; - } +namespace LSTools.DebuggerFrontend; + +/** + * Base class of requests, responses, and events. + */ +public class DAPMessage +{ + /** + * Sequence number. + */ + public Int32 seq { get; set; } + + /** + * Message type. + * Values: 'request', 'response', 'event', etc. + */ + public String type { get; set; } +} + +/** + * A client or debug adapter initiated request. + */ +public class DAPRequest : DAPMessage +{ + /** + * The command to execute. + */ + public String command { get; set; } + + /** + * Object containing arguments for the command. + */ + public IDAPMessagePayload arguments; +} + +/** + * A debug adapter initiated event. + */ +public class DAPEvent : DAPMessage +{ + /** + * Type of event. + */ + public String @event { get; set; } + + /** + * Event-specific information + */ + public IDAPMessagePayload body; +} + +/** + * Response for a request. + */ +public class DAPResponse : DAPMessage +{ + /** + * Sequence number of the corresponding request. + */ + public Int32 request_seq { get; set; } + + /** + * Outcome of the request. + */ + public bool success { get; set; } + + /** + * The command requested. + */ + public String command { get; set; } + + /** + * Contains error message if success == false. + */ + public String message { get; set; } + + /** + * Contains request result if success is true and optional error details if success is false. + */ + public IDAPMessagePayload body; +} + +public interface IDAPMessagePayload { } + +public class DAPInitializeRequest : IDAPMessagePayload +{ + /** + * The ID of the (frontend) client using this adapter. + */ + public String clientID { get; set; } + + /** + * The human readable name of the (frontend) client using this adapter. + */ + public String clientName { get; set; } + + /** + * The ID of the debug adapter. + */ + public String adapterID { get; set; } + + /** + * The ISO-639 locale of the (frontend) client using this adapter, e.g. en-US or de-CH. + */ + public String locale { get; set; } + + /** + * If true all line numbers are 1-based (default). + */ + public bool linesStartAt1 { get; set; } + + /** + * If true all column numbers are 1-based (default). + */ + public bool columnsStartAt1 { get; set; } + + /** + * Determines in what format paths are specified. The default is 'path', which is the native format. + * Values: 'path', 'uri', etc. + */ + public String pathFormat { get; set; } + + /** + * Client supports the optional type attribute for variables. + */ + public bool supportsVariableType { get; set; } + + /** + * Client supports the paging of variables. + */ + public bool supportsVariablePaging { get; set; } + + /** + * Client supports the runInTerminal request. + */ + public bool supportsRunInTerminalRequest { get; set; } +} + +public class DAPCapabilities : IDAPMessagePayload +{ + /** + * The debug adapter supports the 'configurationDone' request. + */ + public bool supportsConfigurationDoneRequest { get; set; } + + /** + * The debug adapter supports function breakpoints. + */ + // TODO - public bool supportsFunctionBreakpoints { get; set; } + + /** + * The debug adapter supports conditional breakpoints. + */ + // TODO - public bool supportsConditionalBreakpoints { get; set; } + + /** + * The debug adapter supports breakpoints that break execution after a specified number of hits. + */ + // TODO - public bool supportsHitConditionalBreakpoints { get; set; } + + /** + * The debug adapter supports a (side effect free) evaluate request for data hovers. + */ + public bool supportsEvaluateForHovers { get; set; } + + /** + * The debug adapter supports setting a variable to a value. + */ + // TODO - public bool supportsSetVariable { get; set; } + + /** + * The debug adapter supports the 'stepInTargets' request. + */ + // TODO - public bool supportsStepInTargetsRequest { get; set; } + + /** + * The debug adapter supports the 'setExpression' request. + */ + // TODO - public bool supportsSetExpression { get; set; } +} + +/** + * Custom configuration class + */ +public class DAPCustomConfiguration +{ + /** + * Requests the debugger to return raw call frames from the backend instead of + * merging and pretty printing the frames. + */ + public bool rawFrames { get; set; } + + /** + * Stop on all frames during single-stepping instead of meaningful frames. + */ + public bool stopOnAllFrames { get; set; } + + /** + * Stop inside database propagation calls during single-stepping. + */ + public bool stopOnDbPropagation { get; set; } + + /** + * Stop when a query inside an IF block fails. + */ + public bool stopOnFailedQueries { get; set; } +} + +/** + * Arguments for ‘launch’ request. Additional attributes are implementation specific. + */ +public class DAPLaunchRequest : IDAPMessagePayload +{ + /** + * If noDebug is true the launch request should launch the program without enabling debugging. + */ + public bool noDebug { get; set; } + + /** + * Optional data from the previous, restarted session. + * The data is sent as the 'restart' attribute of the 'terminated' event. + * The client should leave the data intact. + */ + public object __restart { get; set; } + + /** + * Location of story debug symbol file + */ + public string debugInfoPath { get; set; } + + /** + * IP address of debugger backend server + */ + public string backendHost { get; set; } + + /** + * Port of debugger backend server + */ + public int backendPort { get; set; } + + /** + * UUID of the mod we're debugging + */ + public string modUuid { get; set; } + + /** + * Additional debugger configuration + */ + public DAPCustomConfiguration dbgOptions { get; set; } +} + +/** + * Response to ‘launch’ request. This is just an acknowledgement, so no body field is required. + */ +public class DAPLaunchResponse : IDAPMessagePayload +{ +} + +public class DAPInitializedEvent : IDAPMessagePayload +{ +} + +/** + * A Source is a descriptor for source code. It is returned from the debug adapter as part of a + * StackFrame and it is used by clients when specifying breakpoints. + */ +public class DAPSource +{ + /** + * The short name of the source. Every source returned from the debug adapter has a name. When sending a source to the debug adapter this name is optional. + */ + public string name { get; set; } + + /** + * The path of the source to be shown in the UI. It is only used to locate and load the content of the source if no sourceReference is specified (or its value is 0). + */ + public string path { get; set; } + + /** + * If sourceReference > 0 the contents of the source must be retrieved through the SourceRequest (even if a path is specified). A sourceReference is only valid for a session, so it must not be used to persist a source. + */ + // public int? sourceReference { get; set; } + + /** + * An optional hint for how to present the source in the UI. A value of 'deemphasize' can be used to indicate that the source is not available or that it is skipped on stepping. + */ + // public string presentationHint { get; set; } + + /** + * The (optional) origin of this source: possible values 'internal module', 'inlined content from source map', etc. + */ + // public string origin { get; set; } + + /** + * An optional list of sources that are related to this source. These may be the source that generated this source. + */ + // public IList sources { get; set; } + + /** + * Optional data that a debug adapter might want to loop through the client. The client should leave the data intact and persist it across sessions. The client should not interpret the data. + */ + // public object adapterData { get; set; } +} + +/** + * Properties of a breakpoint or logpoint passed to the setBreakpoints request. + */ +public class DAPSourceBreakpoint +{ + /** + * The source line of the breakpoint or logpoint. + */ + public int line { get; set; } + + /** + * An optional source column of the breakpoint. + */ + public int? column { get; set; } + + /** + * An optional expression for conditional breakpoints. + */ + public string condition { get; set; } + + /** + * An optional expression that controls how many hits of the breakpoint are ignored. The backend is expected to interpret the expression as needed. + */ + public string hitCondition { get; set; } + + /** + * If this attribute exists and is non-empty, the backend must not 'break' (stop) but log the message instead. Expressions within {} are interpolated. + */ + public string logMessage { get; set; } +} + +/** + * Sets multiple breakpoints for a single source and clears all previous breakpoints in that source. + * To clear all breakpoint for a source, specify an empty array. + * When a breakpoint is hit, a ‘stopped’ event (with reason ‘breakpoint’) is generated. + */ +public class DAPSetBreakpointsRequest : IDAPMessagePayload +{ + /** + * The source location of the breakpoints; either 'source.path' or 'source.reference' must be specified. + */ + public DAPSource source { get; set; } + + /** + * The code locations of the breakpoints. + */ + public IList breakpoints { get; set; } + + /** + * A value of true indicates that the underlying source has been modified which results in new breakpoint locations. + */ + public bool sourceModified { get; set; } +} + +public class DAPBreakpoint +{ + /** + * An optional unique identifier for the breakpoint. + */ + public int? id { get; set; } + + /** + * If true breakpoint could be set (but not necessarily at the desired location). + */ + public bool verified { get; set; } + + /** + * An optional message about the state of the breakpoint. This is shown to the user and can be used to explain why a breakpoint could not be verified. + */ + public string message { get; set; } + + /** + * The source where the breakpoint is located. + */ + public DAPSource source { get; set; } + + /** + * The start line of the actual range covered by the breakpoint. + */ + public int? line { get; set; } + + /** + * An optional start column of the actual range covered by the breakpoint. + */ + public int? column { get; set; } + + /** + * An optional end line of the actual range covered by the breakpoint. + */ + public int? endLine { get; set; } + + /** + * An optional end column of the actual range covered by the breakpoint. If no end line is given, then the end column is assumed to be in the start line. + */ + public int? endColumn { get; set; } +} + +/** + * Response to ‘setBreakpoints’ request. + * Returned is information about each breakpoint created by this request. + * This includes the actual code location and whether the breakpoint could be verified. + * The breakpoints returned are in the same order as the elements of the ‘breakpoints’ + * (or the deprecated ‘lines’) array in the arguments. + */ +public class DAPSetBreakpointsResponse : IDAPMessagePayload +{ + /** + * Information about the breakpoints. The array elements are in the same order as the elements of the 'breakpoints' (or the deprecated 'lines') array in the arguments. + */ + public IList breakpoints { get; set; } +} + +public class DAPEmptyPayload : IDAPMessagePayload +{ +} + +public class DAPThread +{ + /** + * Unique identifier for the thread. + */ + public int id { get; set; } + + /** + * A name of the thread. + */ + public string name { get; set; } +} + +/** + * Response to ‘threads’ request. + * The request retrieves a list of all threads. + */ +public class DAPThreadsResponse : IDAPMessagePayload +{ + /** + * All threads. + */ + public IList threads { get; set; } +} + +/** + * The ‘disconnect’ request is sent from the client to the debug adapter in order to stop debugging. It asks + * the debug adapter to disconnect from the debuggee and to terminate the debug adapter. If the debuggee + * has been started with the ‘launch’ request, the ‘disconnect’ request terminates the debuggee. If the + * ‘attach’ request was used to connect to the debuggee, ‘disconnect’ does not terminate the debuggee. + * This behavior can be controlled with the ‘terminateDebuggee’ (if supported by the debug adapter). + */ +public class DAPDisconnectRequest : IDAPMessagePayload +{ + /** + * Indicates whether the debuggee should be terminated when the debugger is disconnected. + * If unspecified, the debug adapter is free to do whatever it thinks is best. + * A client can only rely on this attribute being properly honored if a debug adapter returns true for the 'supportTerminateDebuggee' capability. + */ + public bool? terminateDebuggee { get; set; } +} + +/** + * The event indicates that the execution of the debuggee has stopped due to some condition. + * + * This can be caused by a break point previously set, a stepping action has completed, by + * executing a debugger statement etc. + */ +public class DAPStoppedEvent : IDAPMessagePayload +{ + /** + * The reason for the event. + * For backward compatibility this string is shown in the UI if the 'description' attribute is missing (but it must not be translated). + * Values: 'step', 'breakpoint', 'exception', 'pause', 'entry', 'goto', etc. + */ + public String reason { get; set; } + + /** + * The thread which was stopped. + */ + public int threadId { get; set; } +} + +/** + * Provides formatting information for a stack frame. + */ +public class DAPStackFrameFormat +{ + /** + * Displays parameters for the stack frame. + */ + public bool? parameters { get; set; } + + /** + * Displays the types of parameters for the stack frame. + */ + public bool? parameterTypes { get; set; } + + /** + * Displays the names of parameters for the stack frame. + */ + public bool? parameterNames { get; set; } + + /** + * Displays the values of parameters for the stack frame. + */ + public bool? parameterValues { get; set; } + + /** + * Displays the line number of the stack frame. + */ + public bool? line { get; set; } + + /** + * Displays the module of the stack frame. + */ + public bool? module { get; set; } + + /** + * Includes all stack frames, including those the debug adapter might otherwise hide. + */ + public bool? includeAll { get; set; } +} + +/** + * The request returns a stacktrace from the current execution state. + */ +public class DAPStackFramesRequest : IDAPMessagePayload +{ + /** + * Retrieve the stacktrace for this thread. + */ + public int threadId { get; set; } + + /** + * The index of the first frame to return; if omitted frames start at 0. + */ + public int? startFrame { get; set; } + + /** + * The maximum number of frames to return. If levels is not specified or 0, all frames are returned. + */ + public int? levels { get; set; } + + /** + * Specifies details on how to format the stack frames. + */ + public DAPStackFrameFormat format { get; set; } +} + +public class DAPStackFrame +{ + /** + * An identifier for the stack frame. It must be unique across all threads. This id can be used to retrieve the scopes of the frame with the 'scopesRequest' or to restart the execution of a stackframe. + */ + public int id { get; set; } + + /** + * The name of the stack frame, typically a method name. + */ + public string name { get; set; } + + /** + * The optional source of the frame. + */ + public DAPSource source { get; set; } + + /** + * The line within the file of the frame. If source is null or doesn't exist, line is 0 and must be ignored. + */ + public int line { get; set; } + + /** + * The column within the line. If source is null or doesn't exist, column is 0 and must be ignored. + */ + public int column { get; set; } + + /** + * An optional end line of the range covered by the stack frame. + */ + //public int? endLine { get; set; } + + /** + * An optional end column of the range covered by the stack frame. + */ + //public int? endColumn { get; set; } + + /** + * An optional hint for how to present this frame in the UI. + * A value of 'label' can be used to indicate that the frame is an artificial frame that is used as a visual + * label or separator. A value of 'subtle' can be used to change the appearance of a frame in a 'subtle' way. + */ + //public string presentationHint { get; set; } +} + +/** + * Response to ‘stackTrace’ request. + */ +public class DAPStackFramesResponse : IDAPMessagePayload +{ + /** + * The frames of the stackframe. If the array has length zero, there are no stackframes available. + * This means that there is no location information available. + */ + public IList stackFrames { get; set; } + + /** + * The total number of frames available. + */ + public int? totalFrames { get; set; } +} + +/** + * The request returns the variable scopes for a given stackframe ID. + */ +public class DAPScopesRequest : IDAPMessagePayload +{ + /** + * Retrieve the scopes for this stackframe. + */ + public int frameId { get; set; } +} + +/** + * A Scope is a named container for variables. Optionally a scope can map to a + * source or a range within a source. + */ +public class DAPScope +{ + /** + * Name of the scope such as 'Arguments', 'Locals'. + */ + public string name { get; set; } + + /** + * The variables of this scope can be retrieved by passing the value of variablesReference to the VariablesRequest. + */ + public long variablesReference { get; set; } + + /** + * The number of named variables in this scope. + * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. + */ + public int? namedVariables { get; set; } + + /** + * The number of indexed variables in this scope. + * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. + */ + public int? indexedVariables { get; set; } + + /** + * If true, the number of variables in this scope is large or expensive to retrieve. + */ + public bool expensive { get; set; } + + /** + * Optional source for this scope. + */ + public DAPSource source { get; set; } + + /** + * Optional start line of the range covered by this scope. + */ + public int? line { get; set; } + + /** + * Optional start column of the range covered by this scope. + */ + public int? column { get; set; } + + /** + * Optional end line of the range covered by this scope. + */ + public int? endLine { get; set; } + + /** + * Optional end column of the range covered by this scope. + */ + public int? endColumn { get; set; } +} + +/** + * Response to ‘scopes’ request. + */ +public class DAPScopesResponse : IDAPMessagePayload +{ + public IList scopes { get; set; } +} + +/** + * Provides formatting information for a value. + */ +public class DAPValueFormat +{ + /** + * Display the value in hex. + */ + public bool hex { get; set; } +} + +/** + * Retrieves all child variables for the given variable reference. + * + * An optional filter can be used to limit the fetched children to either named or indexed children. + */ +public class DAPVariablesRequest : IDAPMessagePayload +{ + /** + * The Variable reference. + */ + public long variablesReference { get; set; } + + /** + * Optional filter to limit the child variables to either named or indexed. If ommited, both types are fetched. + */ + public string filter { get; set; } + + /** + * The index of the first variable to return; if omitted children start at 0. + */ + public int? start { get; set; } + + /** + * The number of variables to return. If count is missing or 0, all variables are returned. + */ + public int? count { get; set; } + + /** + * Specifies details on how to format the Variable values. + */ + public DAPValueFormat format { get; set; } +} + +/** + * Optional properties of a variable that can be used to determine how to render the variable in the UI. + */ +public class DAPVariablePresentationHint +{ + /** + * The kind of variable. Before introducing additional values, try to use the listed values. + * Values: + * 'property': Indicates that the object is a property. + * 'method': Indicates that the object is a method. + * 'class': Indicates that the object is a class. + * 'data': Indicates that the object is data. + * 'event': Indicates that the object is an event. + * 'baseClass': Indicates that the object is a base class. + * 'innerClass': Indicates that the object is an inner class. + * 'interface': Indicates that the object is an interface. + * 'mostDerivedClass': Indicates that the object is the most derived class. + * 'virtual': Indicates that the object is virtual, that means it is a synthetic object introduced by the adapter for rendering purposes, e.g. an index range for large arrays. + * etc. + */ + public string kind { get; set; } + + /** + * Set of attributes represented as an array of strings. Before introducing additional values, try to use the listed values. + * Values: + * 'static': Indicates that the object is static. + * 'constant': Indicates that the object is a constant. + * 'readOnly': Indicates that the object is read only. + * 'rawString': Indicates that the object is a raw string. + * 'hasObjectId': Indicates that the object can have an Object ID created for it. + * 'canHaveObjectId': Indicates that the object has an Object ID associated with it. + * 'hasSideEffects': Indicates that the evaluation had side effects. + * etc. + */ + public IList attributes { get; set; } + + /** + * Visibility of variable. Before introducing additional values, try to use the listed values. + * Values: 'public', 'private', 'protected', 'internal', 'final', etc. + */ + public string visibility { get; set; } +} + +/** + * A Variable is a name/value pair. + * + * Optionally a variable can have a ‘type’ that is shown if space permits or when hovering over + * the variable’s name. + * + * An optional ‘kind’ is used to render additional properties of the variable, e.g. different icons + * can be used to indicate that a variable is public or private. + * + * If the value is structured (has children), a handle is provided to retrieve the children with the VariablesRequest. + * + * If the number of named or indexed children is large, the numbers should be returned via the optional + * ‘namedVariables’ and ‘indexedVariables’ attributes. + */ +public class DAPVariable +{ + /** + * The variable's name. + */ + public string name { get; set; } + + /** + * The variable's value. This can be a multi-line text, e.g. for a function the body of a function. + */ + public string value { get; set; } + + /** + * The type of the variable's value. Typically shown in the UI when hovering over the value. + */ + public string type { get; set; } + + /** + * Properties of a variable that can be used to determine how to render the variable in the UI. + */ + // public DAPVariablePresentationHint presentationHint { get; set; } + + /** + * Optional evaluatable name of this variable which can be passed to the 'EvaluateRequest' to fetch the variable's value. + */ + // public string evaluateName { get; set; } + + /** + * If variablesReference is > 0, the variable is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. + */ + public long variablesReference { get; set; } + + /** + * The number of named child variables. + * The client can use this optional information to present the children in a paged UI and fetch them in chunks. + */ + public int? namedVariables { get; set; } + + /** + * The number of indexed child variables. + * The client can use this optional information to present the children in a paged UI and fetch them in chunks. + */ + public int? indexedVariables { get; set; } +} + +/** + * Response to ‘variables’ request. + */ +public class DAPVariablesResponse : IDAPMessagePayload +{ + public IList variables { get; set; } +} + +/** + * The request starts the debuggee to run again. + */ +public class DAPContinueRequest : IDAPMessagePayload +{ + /** + * Continue execution for the specified thread (if possible). If the backend cannot continue on a single thread + * but will continue on all threads, it should set the 'allThreadsContinued' attribute in the response to true. + */ + public int threadId { get; set; } +} + +/** + * Response to ‘continue’ request. + */ +public class DAPContinueResponse : IDAPMessagePayload +{ + /** + * If true, the 'continue' request has ignored the specified thread and continued all threads instead. + * If this attribute is missing a value of 'true' is assumed for backward compatibility. + */ + public bool allThreadsContinued { get; set; } +} + +/** + * The event indicates that the target has produced some output. + */ +public class DAPOutputMessage : IDAPMessagePayload +{ + /** + * The output category. If not specified, 'console' is assumed. + * Values: 'console', 'stdout', 'stderr', 'telemetry', etc. + */ + public string category { get; set; } + + /** + * The output to report. + */ + public string output { get; set; } +} + +/** + * The event indicates that debugging of the debuggee has terminated. + * This does not mean that the debuggee itself has exited. + */ +public class DAPTerminatedEvent : IDAPMessagePayload +{ + /** + * A debug adapter may set 'restart' to true (or to an arbitrary object) to request that the front end restarts the session. + * The value is not interpreted by the client and passed unmodified as an attribute '__restart' to the 'launch' and 'attach' requests. + */ + // public object restart { get; set; }; +} + +/** + * The event indicates that some information about a breakpoint has changed. + */ +public class DAPBreakpointEvent : IDAPMessagePayload +{ + /** + * The reason for the event. + * Values: 'changed', 'new', 'removed', etc. + */ + public String reason { get; set; } + + /** + * The breakpoint. + */ + public DAPBreakpoint breakpoint; +} + +/** + * Evaluates the given expression in the context of the top most stack frame. + * The expression has access to any variables and arguments that are in scope. + */ +public class DAPEvaulateRequest : IDAPMessagePayload +{ + /** + * The expression to evaluate. + */ + public string expression { get; set; } + + /** + * Evaluate the expression in the scope of this stack frame. If not specified, the expression is evaluated in the global scope. + */ + public int? frameId { get; set; } + + /** + * The context in which the evaluate request is run. + * Values: + * 'watch': evaluate is run in a watch. + * 'repl': evaluate is run from REPL console. + * 'hover': evaluate is run from a data hover. + * etc. + */ + public string context { get; set; } + + /** + * Specifies details on how to format the Evaluate result. + */ + public DAPValueFormat format { get; set; } +} + +/** + * Response to ‘evaluate’ request. + */ +public class DAPEvaluateResponse : IDAPMessagePayload +{ + /** + * The result of the evaluate request. + */ + public string result { get; set; } + + /** + * The optional type of the evaluate result. + */ + public string type { get; set; } + + /** + * Properties of a evaluate result that can be used to determine how to render the result in the UI. + */ + public DAPVariablePresentationHint presentationHint { get; set; } + + /** + * If variablesReference is > 0, the evaluate result is structured and its children can be retrieved by passing variablesReference to the VariablesRequest. + */ + public long variablesReference { get; set; } + + /** + * The number of named child variables. + * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. + */ + public int? namedVariables { get; set; } + + /** + * The number of indexed child variables. + * The client can use this optional information to present the variables in a paged UI and fetch them in chunks. + */ + public int? indexedVariables { get; set; } +} + +/** + * The event transmits the output of the last div query + */ +public class DAPCustomVersionInfoEvent : IDAPMessagePayload +{ + /** + * DAP protocol version + */ + public int version; +} + +/** + * The event transmits the output of the last div query + */ +public class DAPCustomQueryResultEvent : IDAPMessagePayload +{ + /** + * Did the query succeed? + */ + public bool succeeded; } diff --git a/DebuggerFrontend/DAPStream.cs b/DebuggerFrontend/DAPStream.cs index 4ebd4ca9..1842a4e8 100644 --- a/DebuggerFrontend/DAPStream.cs +++ b/DebuggerFrontend/DAPStream.cs @@ -6,198 +6,197 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class DAPStream { - public class DAPStream + private Stream Input; + //private Stream Output; + private StreamReader InputReader; + //private StreamWriter OutputWriter; + private Stream LogStream; + private Int32 OutgoingSeq = 1; + private Int32 IncomingSeq = 1; + + public delegate void MessageReceivedDelegate(DAPMessage message); + public MessageReceivedDelegate MessageReceived = delegate { }; + + public DAPStream() { - private Stream Input; - //private Stream Output; - private StreamReader InputReader; - //private StreamWriter OutputWriter; - private Stream LogStream; - private Int32 OutgoingSeq = 1; - private Int32 IncomingSeq = 1; - - public delegate void MessageReceivedDelegate(DAPMessage message); - public MessageReceivedDelegate MessageReceived = delegate { }; - - public DAPStream() - { - Input = Console.OpenStandardInput(); - //Output = Console.OpenStandardOutput(); + Input = Console.OpenStandardInput(); + //Output = Console.OpenStandardOutput(); - InputReader = new StreamReader(Input, Encoding.UTF8); - //OutputWriter = new StreamWriter(Output, Encoding.UTF8); - } + InputReader = new StreamReader(Input, Encoding.UTF8); + //OutputWriter = new StreamWriter(Output, Encoding.UTF8); + } - public void EnableLogging(Stream logStream) + public void EnableLogging(Stream logStream) + { + LogStream = logStream; + } + + private void ProcessPayload(char[] payload) + { + if (LogStream != null) { - LogStream = logStream; + using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) + { + writer.Write(" DAP >>> "); + writer.Write(payload); + writer.Write("\r\n"); + } } - private void ProcessPayload(char[] payload) + DAPMessage message = null; + try + { + message = DAPMessageSerializer.Unserialize(payload); + } + catch (DAPUnknownMessageException e) { if (LogStream != null) { using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) { - writer.Write(" DAP >>> "); - writer.Write(payload); - writer.Write("\r\n"); + writer.WriteLine(" DAP !!! Could not decode DAP message: " + e.Message); } } - DAPMessage message = null; - try + if (e.Type == "request") { - message = DAPMessageSerializer.Unserialize(payload); - } - catch (DAPUnknownMessageException e) - { - if (LogStream != null) - { - using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) - { - writer.WriteLine(" DAP !!! Could not decode DAP message: " + e.Message); - } - } - - if (e.Type == "request") - { - SendErrorReply(e.Seq, e.MessageType, e.Message); - } - - IncomingSeq++; - return; - } - - if (message.seq != IncomingSeq) - { - throw new InvalidDataException($"DAP sequence number mismatch; got {message.seq} expected {IncomingSeq}"); + SendErrorReply(e.Seq, e.MessageType, e.Message); } IncomingSeq++; - MessageReceived(message); + return; } - public void RunLoop() + if (message.seq != IncomingSeq) { - var lineRe = new Regex("^([^:]*):\\s*(.*)$", RegexOptions.Compiled); + throw new InvalidDataException($"DAP sequence number mismatch; got {message.seq} expected {IncomingSeq}"); + } + IncomingSeq++; + MessageReceived(message); + } + + public void RunLoop() + { + var lineRe = new Regex("^([^:]*):\\s*(.*)$", RegexOptions.Compiled); + + while (true) + { + Dictionary headers = new Dictionary(); while (true) { - Dictionary headers = new Dictionary(); - while (true) + var line = InputReader.ReadLine(); + if (line == null && InputReader.EndOfStream) { - var line = InputReader.ReadLine(); - if (line == null && InputReader.EndOfStream) - { - return; - } - - if (line.Length == 0) - { - break; - } - - var matches = lineRe.Match(line); - if (!matches.Success) - { - throw new InvalidDataException($"Malformed header line: {line}"); - } - - headers.Add(matches.Groups[1].Value, matches.Groups[2].Value); - if (matches.Groups[1].Value != "Content-Length") - { - throw new InvalidDataException($"{matches.Groups[1].Value}={matches.Groups[2].Value}"); - } + return; } - if (headers.Count == 0) throw new InvalidDataException("Empty headers."); - - var length = Int32.Parse(headers["Content-Length"]); - var payload = new char[length]; - var read = InputReader.Read(payload, 0, length); - if (read != length) + if (line.Length == 0) { - throw new InvalidDataException($"Could not read {length} bytes of payload (got {read})"); + break; } - ProcessPayload(payload); - } - } - - public void Send(DAPMessage message) - { - message.seq = OutgoingSeq++; - var encoded = DAPMessageSerializer.Serialize(message); + var matches = lineRe.Match(line); + if (!matches.Success) + { + throw new InvalidDataException($"Malformed header line: {line}"); + } - if (LogStream != null) - { - using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) + headers.Add(matches.Groups[1].Value, matches.Groups[2].Value); + if (matches.Groups[1].Value != "Content-Length") { - writer.Write(" DAP <<< "); - writer.Write(encoded); - writer.Write("\r\n"); + throw new InvalidDataException($"{matches.Groups[1].Value}={matches.Groups[2].Value}"); } } - Console.Write($"Content-Length: {encoded.Length}\r\n\r\n"); - Console.Write(encoded); + if (headers.Count == 0) throw new InvalidDataException("Empty headers."); + + var length = Int32.Parse(headers["Content-Length"]); + var payload = new char[length]; + var read = InputReader.Read(payload, 0, length); + if (read != length) + { + throw new InvalidDataException($"Could not read {length} bytes of payload (got {read})"); + } + + ProcessPayload(payload); } + } + + public void Send(DAPMessage message) + { + message.seq = OutgoingSeq++; + var encoded = DAPMessageSerializer.Serialize(message); - private void SendErrorReply(int requestSeq, string command, string errorText) + if (LogStream != null) { - var reply = new DAPResponse + using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) { - type = "response", - request_seq = requestSeq, - success = false, - command = command, - message = errorText - }; - - Send(reply); + writer.Write(" DAP <<< "); + writer.Write(encoded); + writer.Write("\r\n"); + } } - public void SendEvent(string command, IDAPMessagePayload body) + Console.Write($"Content-Length: {encoded.Length}\r\n\r\n"); + Console.Write(encoded); + } + + private void SendErrorReply(int requestSeq, string command, string errorText) + { + var reply = new DAPResponse { - var reply = new DAPEvent - { - type = "event", - @event = command, - body = body - }; + type = "response", + request_seq = requestSeq, + success = false, + command = command, + message = errorText + }; + + Send(reply); + } - Send(reply); - } + public void SendEvent(string command, IDAPMessagePayload body) + { + var reply = new DAPEvent + { + type = "event", + @event = command, + body = body + }; + + Send(reply); + } - public void SendReply(DAPRequest request, IDAPMessagePayload response) + public void SendReply(DAPRequest request, IDAPMessagePayload response) + { + var reply = new DAPResponse { - var reply = new DAPResponse - { - type = "response", - request_seq = request.seq, - success = true, - command = request.command, - body = response - }; - - Send(reply); - } + type = "response", + request_seq = request.seq, + success = true, + command = request.command, + body = response + }; + + Send(reply); + } - public void SendReply(DAPRequest request, string errorText) + public void SendReply(DAPRequest request, string errorText) + { + var reply = new DAPResponse { - var reply = new DAPResponse - { - type = "response", - request_seq = request.seq, - success = false, - command = request.command, - message = errorText - }; - - Send(reply); - } + type = "response", + request_seq = request.seq, + success = false, + command = request.command, + message = errorText + }; + + Send(reply); } } diff --git a/DebuggerFrontend/DAPUtils.cs b/DebuggerFrontend/DAPUtils.cs index 092e03c7..ec6d793c 100644 --- a/DebuggerFrontend/DAPUtils.cs +++ b/DebuggerFrontend/DAPUtils.cs @@ -6,196 +6,195 @@ using System.Text; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class DAPJSONTypeLookahead : JsonConverter { - public class DAPJSONTypeLookahead : JsonConverter + public override bool CanConvert(Type objectType) { - public override bool CanConvert(Type objectType) - { - return objectType.Equals(typeof(DAPMessageTypeHint)); - } + return objectType.Equals(typeof(DAPMessageTypeHint)); + } - public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + var type = new DAPMessageTypeHint(); + int depth = 0; + while (type.type == null || type.message == null || type.seq == 0) { - var type = new DAPMessageTypeHint(); - int depth = 0; - while (type.type == null || type.message == null || type.seq == 0) + while (reader.Read() && reader.TokenType != JsonToken.PropertyName) { - while (reader.Read() && reader.TokenType != JsonToken.PropertyName) - { - if (reader.TokenType == JsonToken.StartObject) depth++; - else if (reader.TokenType == JsonToken.EndObject) depth--; - } + if (reader.TokenType == JsonToken.StartObject) depth++; + else if (reader.TokenType == JsonToken.EndObject) depth--; + } - if (reader.TokenType != JsonToken.PropertyName) - { - throw new InvalidDataException("Could not get property name in DAP payload"); - } + if (reader.TokenType != JsonToken.PropertyName) + { + throw new InvalidDataException("Could not get property name in DAP payload"); + } - if (depth == 0) + if (depth == 0) + { + var propertyName = (string)reader.Value; + if (propertyName == "seq") { - var propertyName = (string)reader.Value; - if (propertyName == "seq") + if (!reader.Read() || reader.TokenType != JsonToken.Integer) { - if (!reader.Read() || reader.TokenType != JsonToken.Integer) - { - throw new InvalidDataException("Could not get sequence number in DAP payload"); - } - - type.seq = (int)(Int64)reader.Value; + throw new InvalidDataException("Could not get sequence number in DAP payload"); } - else if (propertyName == "type") + + type.seq = (int)(Int64)reader.Value; + } + else if (propertyName == "type") + { + if (!reader.Read() || reader.TokenType != JsonToken.String) { - if (!reader.Read() || reader.TokenType != JsonToken.String) - { - throw new InvalidDataException("Could not get property value in DAP payload"); - } - - type.type = (string)reader.Value; + throw new InvalidDataException("Could not get property value in DAP payload"); } - else if (propertyName == "command" || propertyName == "event") - { - if (!reader.Read() || reader.TokenType != JsonToken.String) - { - throw new InvalidDataException("Could not get property value in DAP payload"); - } - type.message = (string)reader.Value; + type.type = (string)reader.Value; + } + else if (propertyName == "command" || propertyName == "event") + { + if (!reader.Read() || reader.TokenType != JsonToken.String) + { + throw new InvalidDataException("Could not get property value in DAP payload"); } + + type.message = (string)reader.Value; } } - - return type; } - public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) - { - throw new NotImplementedException(); - } + return type; } - public class DAPMessageTypeHint + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) { - public String type; - public String message; - public int seq; + throw new NotImplementedException(); } - - public class DAPUnknownMessageException : Exception - { - public String Type { get; } - public String MessageType { get; } - public int Seq { get; } +} - public DAPUnknownMessageException(string type, string messageType, int seq) - : base($"Unknown message type: {type}, {messageType}") - { - Type = type; - MessageType = messageType; - Seq = seq; - } - } +public class DAPMessageTypeHint +{ + public String type; + public String message; + public int seq; +} - public class DAPJSONMessageConverter : JsonConverter +public class DAPUnknownMessageException : Exception +{ + public String Type { get; } + public String MessageType { get; } + public int Seq { get; } + + public DAPUnknownMessageException(string type, string messageType, int seq) + : base($"Unknown message type: {type}, {messageType}") { - private DAPMessageTypeHint TypeHint; + Type = type; + MessageType = messageType; + Seq = seq; + } +} - public DAPJSONMessageConverter(DAPMessageTypeHint typeHint) - { - TypeHint = typeHint; - } +public class DAPJSONMessageConverter : JsonConverter +{ + private DAPMessageTypeHint TypeHint; - public override bool CanConvert(Type objectType) - { - return objectType.Equals(typeof(DAPMessage)) || objectType.Equals(typeof(IDAPMessagePayload)); - } + public DAPJSONMessageConverter(DAPMessageTypeHint typeHint) + { + TypeHint = typeHint; + } + + public override bool CanConvert(Type objectType) + { + return objectType.Equals(typeof(DAPMessage)) || objectType.Equals(typeof(IDAPMessagePayload)); + } - public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (objectType.Equals(typeof(DAPMessage))) { - if (objectType.Equals(typeof(DAPMessage))) + switch (TypeHint.type) { - switch (TypeHint.type) - { - case "request": return serializer.Deserialize(reader); - case "response": return serializer.Deserialize(reader); - case "event": return serializer.Deserialize(reader); - default: throw new InvalidDataException($"Unknown message type: {TypeHint.type}"); - } + case "request": return serializer.Deserialize(reader); + case "response": return serializer.Deserialize(reader); + case "event": return serializer.Deserialize(reader); + default: throw new InvalidDataException($"Unknown message type: {TypeHint.type}"); } - else + } + else + { + switch (TypeHint.message) { - switch (TypeHint.message) - { - case "initialize": return serializer.Deserialize(reader); - case "launch": return serializer.Deserialize(reader); - case "setBreakpoints": return serializer.Deserialize(reader); - case "configurationDone": return serializer.Deserialize(reader); - case "threads": return serializer.Deserialize(reader); - case "disconnect": return serializer.Deserialize(reader); - case "stackTrace": return serializer.Deserialize(reader); - case "variables": return serializer.Deserialize(reader); - case "scopes": return serializer.Deserialize(reader); - case "continue": return serializer.Deserialize(reader); - case "next": return serializer.Deserialize(reader); - case "stepIn": return serializer.Deserialize(reader); - case "stepOut": return serializer.Deserialize(reader); - case "pause": return serializer.Deserialize(reader); - case "evaluate": return serializer.Deserialize(reader); - default: throw new DAPUnknownMessageException(TypeHint.type, TypeHint.message, TypeHint.seq); - } + case "initialize": return serializer.Deserialize(reader); + case "launch": return serializer.Deserialize(reader); + case "setBreakpoints": return serializer.Deserialize(reader); + case "configurationDone": return serializer.Deserialize(reader); + case "threads": return serializer.Deserialize(reader); + case "disconnect": return serializer.Deserialize(reader); + case "stackTrace": return serializer.Deserialize(reader); + case "variables": return serializer.Deserialize(reader); + case "scopes": return serializer.Deserialize(reader); + case "continue": return serializer.Deserialize(reader); + case "next": return serializer.Deserialize(reader); + case "stepIn": return serializer.Deserialize(reader); + case "stepOut": return serializer.Deserialize(reader); + case "pause": return serializer.Deserialize(reader); + case "evaluate": return serializer.Deserialize(reader); + default: throw new DAPUnknownMessageException(TypeHint.type, TypeHint.message, TypeHint.seq); } } + } - public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) - { - throw new NotImplementedException(); - } + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + throw new NotImplementedException(); } +} - class DAPMessageSerializer +class DAPMessageSerializer +{ + public static DAPMessage Unserialize(char[] payload) { - public static DAPMessage Unserialize(char[] payload) - { - DAPMessageTypeHint typeHint; - DAPMessage message; + DAPMessageTypeHint typeHint; + DAPMessage message; - var bytes = Encoding.UTF8.GetBytes(payload); - using (var ms = new MemoryStream(bytes)) + var bytes = Encoding.UTF8.GetBytes(payload); + using (var ms = new MemoryStream(bytes)) + { + using (var textReader = new StreamReader(ms, Encoding.UTF8, false, 0x100, true)) + using (var jsonReader = new JsonTextReader(textReader)) { - using (var textReader = new StreamReader(ms, Encoding.UTF8, false, 0x100, true)) - using (var jsonReader = new JsonTextReader(textReader)) - { - var hintSerializer = new JsonSerializer(); - hintSerializer.Converters.Add(new DAPJSONTypeLookahead()); - typeHint = hintSerializer.Deserialize(jsonReader); - } + var hintSerializer = new JsonSerializer(); + hintSerializer.Converters.Add(new DAPJSONTypeLookahead()); + typeHint = hintSerializer.Deserialize(jsonReader); + } - using (var textReader = new StreamReader(ms)) - using (var jsonReader = new JsonTextReader(textReader)) - { - ms.Position = 0; - var serializer = new JsonSerializer(); - serializer.Converters.Add(new DAPJSONMessageConverter(typeHint)); - message = serializer.Deserialize(jsonReader); - } + using (var textReader = new StreamReader(ms)) + using (var jsonReader = new JsonTextReader(textReader)) + { + ms.Position = 0; + var serializer = new JsonSerializer(); + serializer.Converters.Add(new DAPJSONMessageConverter(typeHint)); + message = serializer.Deserialize(jsonReader); } - return message; } + return message; + } - public static char[] Serialize(DAPMessage message) + public static char[] Serialize(DAPMessage message) + { + using (var ms = new MemoryStream()) { - using (var ms = new MemoryStream()) + using (var textWriter = new StreamWriter(ms)) + using (var jsonWriter = new JsonTextWriter(textWriter)) { - using (var textWriter = new StreamWriter(ms)) - using (var jsonWriter = new JsonTextWriter(textWriter)) - { - var serializer = new JsonSerializer(); - serializer.Serialize(jsonWriter, message); - } - - var bytes = ms.ToArray(); - return Encoding.UTF8.GetChars(bytes); + var serializer = new JsonSerializer(); + serializer.Serialize(jsonWriter, message); } + + var bytes = ms.ToArray(); + return Encoding.UTF8.GetChars(bytes); } } } diff --git a/DebuggerFrontend/DatabaseEnumerator.cs b/DebuggerFrontend/DatabaseEnumerator.cs index 21da0cb9..ce39ed72 100644 --- a/DebuggerFrontend/DatabaseEnumerator.cs +++ b/DebuggerFrontend/DatabaseEnumerator.cs @@ -5,93 +5,92 @@ using System.Text; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class RequestFailedException : Exception { - public class RequestFailedException : Exception + public RequestFailedException(string message) + : base(message) { - public RequestFailedException(string message) - : base(message) - { - } } +} - class DatabaseEnumerator +class DatabaseEnumerator +{ + private StoryDebugInfo DebugInfo; + DAPStream DAP; + private DebuggerClient DbgClient; + private ValueFormatter Formatter; + private EvaluationResultManager ResultManager; + // Databases that we'll have to send to the debugger after receipt + private Dictionary> PendingDatabaseRequests = new Dictionary>(); + // Database contents that we're receiving from the backend + private Dictionary DatabaseContents = new Dictionary(); + + public DatabaseEnumerator(DebuggerClient dbgClient, DAPStream dap, StoryDebugInfo debugInfo, ValueFormatter formatter, + EvaluationResultManager resultManager) { - private StoryDebugInfo DebugInfo; - DAPStream DAP; - private DebuggerClient DbgClient; - private ValueFormatter Formatter; - private EvaluationResultManager ResultManager; - // Databases that we'll have to send to the debugger after receipt - private Dictionary> PendingDatabaseRequests = new Dictionary>(); - // Database contents that we're receiving from the backend - private Dictionary DatabaseContents = new Dictionary(); + DebugInfo = debugInfo; + DAP = dap; + DbgClient = dbgClient; + Formatter = formatter; + ResultManager = resultManager; - public DatabaseEnumerator(DebuggerClient dbgClient, DAPStream dap, StoryDebugInfo debugInfo, ValueFormatter formatter, - EvaluationResultManager resultManager) - { - DebugInfo = debugInfo; - DAP = dap; - DbgClient = dbgClient; - Formatter = formatter; - ResultManager = resultManager; + DbgClient.OnBeginDatabaseContents = this.OnBeginDatabaseContents; + DbgClient.OnDatabaseRow = this.OnDatabaseRow; + DbgClient.OnEndDatabaseContents = this.OnEndDatabaseContents; + } - DbgClient.OnBeginDatabaseContents = this.OnBeginDatabaseContents; - DbgClient.OnDatabaseRow = this.OnDatabaseRow; - DbgClient.OnEndDatabaseContents = this.OnEndDatabaseContents; + public void RequestDatabaseEvaluation(DAPRequest request, UInt32 databaseId) + { + List requests; + if (!PendingDatabaseRequests.TryGetValue(databaseId, out requests)) + { + requests = new List(); + PendingDatabaseRequests[databaseId] = requests; } - public void RequestDatabaseEvaluation(DAPRequest request, UInt32 databaseId) + if (requests.Count == 0) { - List requests; - if (!PendingDatabaseRequests.TryGetValue(databaseId, out requests)) - { - requests = new List(); - PendingDatabaseRequests[databaseId] = requests; - } - - if (requests.Count == 0) - { - var databaseDebugInfo = DebugInfo.Databases[databaseId]; - DatabaseContents[databaseId] = ResultManager.MakeResults(databaseDebugInfo.ParamTypes.Count); - } + var databaseDebugInfo = DebugInfo.Databases[databaseId]; + DatabaseContents[databaseId] = ResultManager.MakeResults(databaseDebugInfo.ParamTypes.Count); + } - requests.Add(request); + requests.Add(request); - DbgClient.SendGetDatabaseContents(databaseId); - } + DbgClient.SendGetDatabaseContents(databaseId); + } - private void OnBeginDatabaseContents(BkBeginDatabaseContents msg) - { - } + private void OnBeginDatabaseContents(BkBeginDatabaseContents msg) + { + } - private void OnDatabaseRow(BkDatabaseRow msg) + private void OnDatabaseRow(BkDatabaseRow msg) + { + var db = DatabaseContents[msg.DatabaseId]; + foreach (var row in msg.Row) { - var db = DatabaseContents[msg.DatabaseId]; - foreach (var row in msg.Row) - { - db.Add(row); - } + db.Add(row); } + } - private void OnEndDatabaseContents(BkEndDatabaseContents msg) - { - var rows = DatabaseContents[msg.DatabaseId]; - var db = DebugInfo.Databases[msg.DatabaseId]; - - var evalResponse = new DAPEvaluateResponse(); - evalResponse.result = $"Database {db.Name} ({rows.Count} rows)"; - evalResponse.namedVariables = 0; - evalResponse.indexedVariables = rows.Count; - evalResponse.variablesReference = rows.VariablesReference; + private void OnEndDatabaseContents(BkEndDatabaseContents msg) + { + var rows = DatabaseContents[msg.DatabaseId]; + var db = DebugInfo.Databases[msg.DatabaseId]; - var requests = PendingDatabaseRequests[msg.DatabaseId]; - foreach (var request in requests) - { - DAP.SendReply(request, evalResponse); - } + var evalResponse = new DAPEvaluateResponse(); + evalResponse.result = $"Database {db.Name} ({rows.Count} rows)"; + evalResponse.namedVariables = 0; + evalResponse.indexedVariables = rows.Count; + evalResponse.variablesReference = rows.VariablesReference; - requests.Clear(); + var requests = PendingDatabaseRequests[msg.DatabaseId]; + foreach (var request in requests) + { + DAP.SendReply(request, evalResponse); } + + requests.Clear(); } } diff --git a/DebuggerFrontend/DbgClient.cs b/DebuggerFrontend/DbgClient.cs index 400aed0f..4ca3641d 100644 --- a/DebuggerFrontend/DbgClient.cs +++ b/DebuggerFrontend/DbgClient.cs @@ -8,374 +8,373 @@ using System.Net.Sockets; using System.Text; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class AsyncProtobufClient { - public class AsyncProtobufClient - { - private TcpClient Socket; - private byte[] MessageBuffer; - private int BufferPos; + private TcpClient Socket; + private byte[] MessageBuffer; + private int BufferPos; - public delegate void MessageReceivedDelegate(BackendToDebugger message); - public MessageReceivedDelegate MessageReceived = delegate { }; + public delegate void MessageReceivedDelegate(BackendToDebugger message); + public MessageReceivedDelegate MessageReceived = delegate { }; - public AsyncProtobufClient(string host, int port) - { - MessageBuffer = new byte[0x100000]; - BufferPos = 0; + public AsyncProtobufClient(string host, int port) + { + MessageBuffer = new byte[0x100000]; + BufferPos = 0; - Socket = new TcpClient(); - Socket.Connect(host, port); - } + Socket = new TcpClient(); + Socket.Connect(host, port); + } - public void RunLoop() + public void RunLoop() + { + while (true) { - while (true) + try { - try - { - int received = Socket.Client.Receive(MessageBuffer, BufferPos, MessageBuffer.Length - BufferPos, SocketFlags.Partial); - BufferPos += received; - } - catch (SocketException e) + int received = Socket.Client.Receive(MessageBuffer, BufferPos, MessageBuffer.Length - BufferPos, SocketFlags.Partial); + BufferPos += received; + } + catch (SocketException e) + { + throw e; + } + + while (BufferPos >= 4) + { + Int32 length = MessageBuffer[0] + | (MessageBuffer[1] << 8) + | (MessageBuffer[2] << 16) + | (MessageBuffer[3] << 24); + + if (length >= 0x100000) { - throw e; + throw new InvalidDataException($"Message too long ({length} bytes)"); } - while (BufferPos >= 4) + if (BufferPos >= length) { - Int32 length = MessageBuffer[0] - | (MessageBuffer[1] << 8) - | (MessageBuffer[2] << 16) - | (MessageBuffer[3] << 24); - - if (length >= 0x100000) + using (var stream = new CodedInputStream(MessageBuffer, 4, length - 4)) { - throw new InvalidDataException($"Message too long ({length} bytes)"); + var message = BackendToDebugger.Parser.ParseFrom(stream); + MessageReceived(message); } - if (BufferPos >= length) - { - using (var stream = new CodedInputStream(MessageBuffer, 4, length - 4)) - { - var message = BackendToDebugger.Parser.ParseFrom(stream); - MessageReceived(message); - } - - Array.Copy(MessageBuffer, length, MessageBuffer, 0, BufferPos - length); - BufferPos -= length; - } - else - { - break; - } + Array.Copy(MessageBuffer, length, MessageBuffer, 0, BufferPos - length); + BufferPos -= length; + } + else + { + break; } } } + } - public void Send(DebuggerToBackend message) + public void Send(DebuggerToBackend message) + { + using (var ms = new MemoryStream()) { - using (var ms = new MemoryStream()) - { - message.WriteTo(ms); - - var length = ms.Position + 4; - var lengthBuf = new byte[4]; - lengthBuf[0] = (byte)(length & 0xff); - lengthBuf[1] = (byte)((length >> 8) & 0xff); - lengthBuf[2] = (byte)((length >> 16) & 0xff); - lengthBuf[3] = (byte)((length >> 24) & 0xff); - Socket.Client.Send(lengthBuf); - var payload = ms.ToArray(); - Socket.Client.Send(payload); - } + message.WriteTo(ms); + + var length = ms.Position + 4; + var lengthBuf = new byte[4]; + lengthBuf[0] = (byte)(length & 0xff); + lengthBuf[1] = (byte)((length >> 8) & 0xff); + lengthBuf[2] = (byte)((length >> 16) & 0xff); + lengthBuf[3] = (byte)((length >> 24) & 0xff); + Socket.Client.Send(lengthBuf); + var payload = ms.ToArray(); + Socket.Client.Send(payload); } } +} - public class DebuggerClient - { - private AsyncProtobufClient Client; - private StoryDebugInfo DebugInfo; - private Stream LogStream; - private UInt32 OutgoingSeq = 1; - private UInt32 IncomingSeq = 1; +public class DebuggerClient +{ + private AsyncProtobufClient Client; + private StoryDebugInfo DebugInfo; + private Stream LogStream; + private UInt32 OutgoingSeq = 1; + private UInt32 IncomingSeq = 1; - public delegate void BackendInfoDelegate(BkVersionInfoResponse response); - public BackendInfoDelegate OnBackendInfo = delegate { }; + public delegate void BackendInfoDelegate(BkVersionInfoResponse response); + public BackendInfoDelegate OnBackendInfo = delegate { }; - public delegate void StoryLoadedDelegate(); - public StoryLoadedDelegate OnStoryLoaded = delegate { }; + public delegate void StoryLoadedDelegate(); + public StoryLoadedDelegate OnStoryLoaded = delegate { }; - public delegate void DebugSessionEndedDelegate(); - public DebugSessionEndedDelegate OnDebugSessionEnded = delegate { }; + public delegate void DebugSessionEndedDelegate(); + public DebugSessionEndedDelegate OnDebugSessionEnded = delegate { }; - public delegate void BreakpointTriggeredDelegate(BkBreakpointTriggered bp); - public BreakpointTriggeredDelegate OnBreakpointTriggered = delegate { }; + public delegate void BreakpointTriggeredDelegate(BkBreakpointTriggered bp); + public BreakpointTriggeredDelegate OnBreakpointTriggered = delegate { }; - public delegate void GlobalBreakpointTriggeredDelegate(BkGlobalBreakpointTriggered bp); - public GlobalBreakpointTriggeredDelegate OnGlobalBreakpointTriggered = delegate { }; + public delegate void GlobalBreakpointTriggeredDelegate(BkGlobalBreakpointTriggered bp); + public GlobalBreakpointTriggeredDelegate OnGlobalBreakpointTriggered = delegate { }; - public delegate void StorySyncDataDelegate(BkSyncStoryData data); - public StorySyncDataDelegate OnStorySyncData = delegate { }; + public delegate void StorySyncDataDelegate(BkSyncStoryData data); + public StorySyncDataDelegate OnStorySyncData = delegate { }; - public delegate void StorySyncFinishedDelegate(); - public StorySyncFinishedDelegate OnStorySyncFinished = delegate { }; + public delegate void StorySyncFinishedDelegate(); + public StorySyncFinishedDelegate OnStorySyncFinished = delegate { }; - public delegate void DebugOutputDelegate(BkDebugOutput msg); - public DebugOutputDelegate OnDebugOutput = delegate { }; + public delegate void DebugOutputDelegate(BkDebugOutput msg); + public DebugOutputDelegate OnDebugOutput = delegate { }; - public delegate void BeginDatabaseContentsDelegate(BkBeginDatabaseContents msg); - public BeginDatabaseContentsDelegate OnBeginDatabaseContents = delegate { }; + public delegate void BeginDatabaseContentsDelegate(BkBeginDatabaseContents msg); + public BeginDatabaseContentsDelegate OnBeginDatabaseContents = delegate { }; - public delegate void DatabaseRowDelegate(BkDatabaseRow msg); - public DatabaseRowDelegate OnDatabaseRow = delegate { }; + public delegate void DatabaseRowDelegate(BkDatabaseRow msg); + public DatabaseRowDelegate OnDatabaseRow = delegate { }; - public delegate void EndDatabaseContentsDelegate(BkEndDatabaseContents msg); - public EndDatabaseContentsDelegate OnEndDatabaseContents = delegate { }; + public delegate void EndDatabaseContentsDelegate(BkEndDatabaseContents msg); + public EndDatabaseContentsDelegate OnEndDatabaseContents = delegate { }; - public delegate void EvaluateRowDelegate(UInt32 seq, BkEvaluateRow msg); - public EvaluateRowDelegate OnEvaluateRow = delegate { }; + public delegate void EvaluateRowDelegate(UInt32 seq, BkEvaluateRow msg); + public EvaluateRowDelegate OnEvaluateRow = delegate { }; - public delegate void EvaluateFinishedDelegate(UInt32 seq, BkEvaluateFinished msg); - public EvaluateFinishedDelegate OnEvaluateFinished = delegate { }; + public delegate void EvaluateFinishedDelegate(UInt32 seq, BkEvaluateFinished msg); + public EvaluateFinishedDelegate OnEvaluateFinished = delegate { }; - public DebuggerClient(AsyncProtobufClient client, StoryDebugInfo debugInfo) - { - Client = client; - Client.MessageReceived = this.MessageReceived; - DebugInfo = debugInfo; - } + public DebuggerClient(AsyncProtobufClient client, StoryDebugInfo debugInfo) + { + Client = client; + Client.MessageReceived = this.MessageReceived; + DebugInfo = debugInfo; + } - public void EnableLogging(Stream logStream) - { - LogStream = logStream; - } + public void EnableLogging(Stream logStream) + { + LogStream = logStream; + } - private void LogMessage(IMessage message) + private void LogMessage(IMessage message) + { + if (LogStream != null) { - if (LogStream != null) + using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) { - using (var writer = new StreamWriter(LogStream, Encoding.UTF8, 0x1000, true)) - { - writer.Write(" DBG >>> "); - var settings = new JsonFormatter.Settings(true); - var formatter = new JsonFormatter(settings); - formatter.Format(message, writer); - writer.Write("\r\n"); - } + writer.Write(" DBG >>> "); + var settings = new JsonFormatter.Settings(true); + var formatter = new JsonFormatter(settings); + formatter.Format(message, writer); + writer.Write("\r\n"); } } + } - public UInt32 Send(DebuggerToBackend message) - { - message.SeqNo = OutgoingSeq++; - LogMessage(message); - Client.Send(message); - return message.SeqNo; - } - - public void SendIdentify(UInt32 protocolVersion) - { - var msg = new DebuggerToBackend - { - Identify = new DbgIdentifyRequest - { - ProtocolVersion = protocolVersion - } - }; - Send(msg); - } + public UInt32 Send(DebuggerToBackend message) + { + message.SeqNo = OutgoingSeq++; + LogMessage(message); + Client.Send(message); + return message.SeqNo; + } - private MsgBreakpoint BreakpointToMsg(Breakpoint breakpoint) + public void SendIdentify(UInt32 protocolVersion) + { + var msg = new DebuggerToBackend { - var msgBp = new MsgBreakpoint(); - if (breakpoint.LineInfo.Node != null) - { - msgBp.NodeId = breakpoint.LineInfo.Node.Id; - } - else + Identify = new DbgIdentifyRequest { - msgBp.GoalId = breakpoint.LineInfo.Goal.Id; + ProtocolVersion = protocolVersion } + }; + Send(msg); + } - msgBp.IsInitAction = breakpoint.LineInfo.Type == LineType.GoalInitActionLine; - if (breakpoint.LineInfo.Type == LineType.GoalInitActionLine - || breakpoint.LineInfo.Type == LineType.GoalExitActionLine - || breakpoint.LineInfo.Type == LineType.RuleActionLine) - { - msgBp.ActionIndex = (Int32)breakpoint.LineInfo.ActionIndex; - } - else - { - msgBp.ActionIndex = -1; - } - - msgBp.BreakpointMask = 0x3f; // TODO const - return msgBp; + private MsgBreakpoint BreakpointToMsg(Breakpoint breakpoint) + { + var msgBp = new MsgBreakpoint(); + if (breakpoint.LineInfo.Node != null) + { + msgBp.NodeId = breakpoint.LineInfo.Node.Id; } - - public void SendSetBreakpoints(List breakpoints) + else { - var setBps = new DbgSetBreakpoints(); - foreach (var breakpoint in breakpoints) - { - if (breakpoint.Verified) - { - var msgBp = BreakpointToMsg(breakpoint); - setBps.Breakpoint.Add(msgBp); - } - } - - var msg = new DebuggerToBackend - { - SetBreakpoints = setBps - }; - Send(msg); + msgBp.GoalId = breakpoint.LineInfo.Goal.Id; } - public void SendGetDatabaseContents(UInt32 databaseId) + msgBp.IsInitAction = breakpoint.LineInfo.Type == LineType.GoalInitActionLine; + if (breakpoint.LineInfo.Type == LineType.GoalInitActionLine + || breakpoint.LineInfo.Type == LineType.GoalExitActionLine + || breakpoint.LineInfo.Type == LineType.RuleActionLine) { - var msg = new DebuggerToBackend - { - GetDatabaseContents = new DbgGetDatabaseContents - { - DatabaseId = databaseId - } - }; - Send(msg); + msgBp.ActionIndex = (Int32)breakpoint.LineInfo.ActionIndex; } - - public void SendSetGlobalBreakpoints(UInt32 breakpointMask) + else { - var msg = new DebuggerToBackend - { - SetGlobalBreakpoints = new DbgSetGlobalBreakpoints - { - BreakpointMask = breakpointMask - } - }; - Send(msg); + msgBp.ActionIndex = -1; } + + msgBp.BreakpointMask = 0x3f; // TODO const + return msgBp; + } - public void SendContinue(DbgContinue.Types.Action action, UInt32 breakpointMask, UInt32 flags) + public void SendSetBreakpoints(List breakpoints) + { + var setBps = new DbgSetBreakpoints(); + foreach (var breakpoint in breakpoints) { - var msg = new DebuggerToBackend + if (breakpoint.Verified) { - Continue = new DbgContinue - { - Action = action, - BreakpointMask = breakpointMask, - Flags = flags - } - }; - Send(msg); + var msgBp = BreakpointToMsg(breakpoint); + setBps.Breakpoint.Add(msgBp); + } } - public void SendSyncStory() + var msg = new DebuggerToBackend { - var msg = new DebuggerToBackend - { - SyncStory = new DbgSyncStory() - }; - Send(msg); - } + SetBreakpoints = setBps + }; + Send(msg); + } - public UInt32 SendEvaluate(DbgEvaluate.Types.EvalType type, UInt32 nodeId, MsgTuple args) + public void SendGetDatabaseContents(UInt32 databaseId) + { + var msg = new DebuggerToBackend { - var msg = new DebuggerToBackend + GetDatabaseContents = new DbgGetDatabaseContents { - Evaluate = new DbgEvaluate - { - Type = type, - NodeId = nodeId, - Params = args - } - }; - return Send(msg); - } + DatabaseId = databaseId + } + }; + Send(msg); + } - private void BreakpointTriggered(BkBreakpointTriggered message) + public void SendSetGlobalBreakpoints(UInt32 breakpointMask) + { + var msg = new DebuggerToBackend { - OnBreakpointTriggered(message); - } + SetGlobalBreakpoints = new DbgSetGlobalBreakpoints + { + BreakpointMask = breakpointMask + } + }; + Send(msg); + } - private void GlobalBreakpointTriggered(BkGlobalBreakpointTriggered message) + public void SendContinue(DbgContinue.Types.Action action, UInt32 breakpointMask, UInt32 flags) + { + var msg = new DebuggerToBackend { - OnGlobalBreakpointTriggered(message); - } + Continue = new DbgContinue + { + Action = action, + BreakpointMask = breakpointMask, + Flags = flags + } + }; + Send(msg); + } - private void MessageReceived(BackendToDebugger message) + public void SendSyncStory() + { + var msg = new DebuggerToBackend { - LogMessage(message); + SyncStory = new DbgSyncStory() + }; + Send(msg); + } - if (message.SeqNo != IncomingSeq) + public UInt32 SendEvaluate(DbgEvaluate.Types.EvalType type, UInt32 nodeId, MsgTuple args) + { + var msg = new DebuggerToBackend + { + Evaluate = new DbgEvaluate { - throw new InvalidDataException($"DBG sequence number mismatch; got {message.SeqNo} expected {IncomingSeq}"); + Type = type, + NodeId = nodeId, + Params = args } + }; + return Send(msg); + } - IncomingSeq++; + private void BreakpointTriggered(BkBreakpointTriggered message) + { + OnBreakpointTriggered(message); + } + private void GlobalBreakpointTriggered(BkGlobalBreakpointTriggered message) + { + OnGlobalBreakpointTriggered(message); + } - switch (message.MsgCase) - { - case BackendToDebugger.MsgOneofCase.VersionInfo: - OnBackendInfo(message.VersionInfo); - break; + private void MessageReceived(BackendToDebugger message) + { + LogMessage(message); - case BackendToDebugger.MsgOneofCase.Results: - break; + if (message.SeqNo != IncomingSeq) + { + throw new InvalidDataException($"DBG sequence number mismatch; got {message.SeqNo} expected {IncomingSeq}"); + } - case BackendToDebugger.MsgOneofCase.StoryLoaded: - OnStoryLoaded(); - break; + IncomingSeq++; - case BackendToDebugger.MsgOneofCase.DebugSessionEnded: - OnDebugSessionEnded(); - break; - case BackendToDebugger.MsgOneofCase.BreakpointTriggered: - BreakpointTriggered(message.BreakpointTriggered); - break; + switch (message.MsgCase) + { + case BackendToDebugger.MsgOneofCase.VersionInfo: + OnBackendInfo(message.VersionInfo); + break; - case BackendToDebugger.MsgOneofCase.GlobalBreakpointTriggered: - GlobalBreakpointTriggered(message.GlobalBreakpointTriggered); - break; + case BackendToDebugger.MsgOneofCase.Results: + break; - case BackendToDebugger.MsgOneofCase.SyncStoryData: - OnStorySyncData(message.SyncStoryData); - break; + case BackendToDebugger.MsgOneofCase.StoryLoaded: + OnStoryLoaded(); + break; - case BackendToDebugger.MsgOneofCase.SyncStoryFinished: - OnStorySyncFinished(); - break; + case BackendToDebugger.MsgOneofCase.DebugSessionEnded: + OnDebugSessionEnded(); + break; - case BackendToDebugger.MsgOneofCase.DebugOutput: - OnDebugOutput(message.DebugOutput); - break; + case BackendToDebugger.MsgOneofCase.BreakpointTriggered: + BreakpointTriggered(message.BreakpointTriggered); + break; - case BackendToDebugger.MsgOneofCase.BeginDatabaseContents: - OnBeginDatabaseContents(message.BeginDatabaseContents); - break; + case BackendToDebugger.MsgOneofCase.GlobalBreakpointTriggered: + GlobalBreakpointTriggered(message.GlobalBreakpointTriggered); + break; - case BackendToDebugger.MsgOneofCase.DatabaseRow: - OnDatabaseRow(message.DatabaseRow); - break; + case BackendToDebugger.MsgOneofCase.SyncStoryData: + OnStorySyncData(message.SyncStoryData); + break; - case BackendToDebugger.MsgOneofCase.EndDatabaseContents: - OnEndDatabaseContents(message.EndDatabaseContents); - break; + case BackendToDebugger.MsgOneofCase.SyncStoryFinished: + OnStorySyncFinished(); + break; - case BackendToDebugger.MsgOneofCase.EvaluateRow: - OnEvaluateRow(message.ReplySeqNo, message.EvaluateRow); - break; + case BackendToDebugger.MsgOneofCase.DebugOutput: + OnDebugOutput(message.DebugOutput); + break; - case BackendToDebugger.MsgOneofCase.EvaluateFinished: - OnEvaluateFinished(message.ReplySeqNo, message.EvaluateFinished); - break; + case BackendToDebugger.MsgOneofCase.BeginDatabaseContents: + OnBeginDatabaseContents(message.BeginDatabaseContents); + break; - default: - throw new InvalidOperationException($"Unknown message from DBG: {message.MsgCase}"); - } + case BackendToDebugger.MsgOneofCase.DatabaseRow: + OnDatabaseRow(message.DatabaseRow); + break; + + case BackendToDebugger.MsgOneofCase.EndDatabaseContents: + OnEndDatabaseContents(message.EndDatabaseContents); + break; + + case BackendToDebugger.MsgOneofCase.EvaluateRow: + OnEvaluateRow(message.ReplySeqNo, message.EvaluateRow); + break; + + case BackendToDebugger.MsgOneofCase.EvaluateFinished: + OnEvaluateFinished(message.ReplySeqNo, message.EvaluateFinished); + break; + + default: + throw new InvalidOperationException($"Unknown message from DBG: {message.MsgCase}"); } } } \ No newline at end of file diff --git a/DebuggerFrontend/DebugInfoLoader.cs b/DebuggerFrontend/DebugInfoLoader.cs index 07fc859f..cf2962fc 100644 --- a/DebuggerFrontend/DebugInfoLoader.cs +++ b/DebuggerFrontend/DebugInfoLoader.cs @@ -6,208 +6,207 @@ using System.Text; using System.Collections.Generic; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +class DebugInfoLoader { - class DebugInfoLoader + private DatabaseDebugInfo FromProtobuf(DatabaseDebugInfoMsg msg) { - private DatabaseDebugInfo FromProtobuf(DatabaseDebugInfoMsg msg) - { - var debugInfo = new DatabaseDebugInfo - { - Id = msg.Id, - Name = msg.Name, - ParamTypes = new List() - }; - foreach (var paramType in msg.ParamTypes) - { - debugInfo.ParamTypes.Add(paramType); - } - - return debugInfo; + var debugInfo = new DatabaseDebugInfo + { + Id = msg.Id, + Name = msg.Name, + ParamTypes = new List() + }; + foreach (var paramType in msg.ParamTypes) + { + debugInfo.ParamTypes.Add(paramType); } - private ActionDebugInfo FromProtobuf(ActionDebugInfoMsg msg) + return debugInfo; + } + + private ActionDebugInfo FromProtobuf(ActionDebugInfoMsg msg) + { + return new ActionDebugInfo { - return new ActionDebugInfo - { - Line = msg.Line - }; + Line = msg.Line + }; + } + + private GoalDebugInfo FromProtobuf(GoalDebugInfoMsg msg) + { + var debugInfo = new GoalDebugInfo + { + Id = msg.Id, + Name = msg.Name, + Path = msg.Path, + InitActions = new List(), + ExitActions = new List() + }; + + foreach (var action in msg.InitActions) + { + debugInfo.InitActions.Add(FromProtobuf(action)); } - private GoalDebugInfo FromProtobuf(GoalDebugInfoMsg msg) + foreach (var action in msg.ExitActions) { - var debugInfo = new GoalDebugInfo - { - Id = msg.Id, - Name = msg.Name, - Path = msg.Path, - InitActions = new List(), - ExitActions = new List() - }; + debugInfo.ExitActions.Add(FromProtobuf(action)); + } + + return debugInfo; + } - foreach (var action in msg.InitActions) - { - debugInfo.InitActions.Add(FromProtobuf(action)); - } + private RuleVariableDebugInfo FromProtobuf(RuleVariableDebugInfoMsg msg) + { + return new RuleVariableDebugInfo + { + Index = msg.Index, + Name = msg.Name, + Type = msg.Type + }; + } - foreach (var action in msg.ExitActions) - { - debugInfo.ExitActions.Add(FromProtobuf(action)); - } + private RuleDebugInfo FromProtobuf(RuleDebugInfoMsg msg) + { + var debugInfo = new RuleDebugInfo + { + Id = msg.Id, + GoalId = msg.GoalId, + Name = msg.Name, + Variables = new List(), + Actions = new List(), + ConditionsStartLine = msg.ConditionsStartLine, + ConditionsEndLine = msg.ConditionsEndLine, + ActionsStartLine = msg.ActionsStartLine, + ActionsEndLine = msg.ActionsEndLine + }; + + foreach (var variableMsg in msg.Variables) + { + var variable = FromProtobuf(variableMsg); + debugInfo.Variables.Add(variable); + } - return debugInfo; + foreach (var action in msg.Actions) + { + debugInfo.Actions.Add(FromProtobuf(action)); } - private RuleVariableDebugInfo FromProtobuf(RuleVariableDebugInfoMsg msg) + return debugInfo; + } + + private NodeDebugInfo FromProtobuf(NodeDebugInfoMsg msg) + { + var debugInfo = new NodeDebugInfo + { + Id = msg.Id, + RuleId = msg.RuleId, + Line = (Int32)msg.Line, + ColumnToVariableMaps = new Dictionary(), + DatabaseId = msg.DatabaseId, + Name = msg.Name, + Type = (LSLib.LS.Story.Node.Type)msg.Type, + ParentNodeId = msg.ParentNodeId + }; + + if (msg.FunctionName != "") { - return new RuleVariableDebugInfo - { - Index = msg.Index, - Name = msg.Name, - Type = msg.Type - }; + debugInfo.FunctionName = new FunctionNameAndArity(msg.FunctionName, (int)msg.FunctionArity); } - private RuleDebugInfo FromProtobuf(RuleDebugInfoMsg msg) - { - var debugInfo = new RuleDebugInfo - { - Id = msg.Id, - GoalId = msg.GoalId, - Name = msg.Name, - Variables = new List(), - Actions = new List(), - ConditionsStartLine = msg.ConditionsStartLine, - ConditionsEndLine = msg.ConditionsEndLine, - ActionsStartLine = msg.ActionsStartLine, - ActionsEndLine = msg.ActionsEndLine - }; - - foreach (var variableMsg in msg.Variables) - { - var variable = FromProtobuf(variableMsg); - debugInfo.Variables.Add(variable); - } - - foreach (var action in msg.Actions) - { - debugInfo.Actions.Add(FromProtobuf(action)); - } - - return debugInfo; + foreach (var map in msg.ColumnMaps) + { + debugInfo.ColumnToVariableMaps.Add((Int32)map.Key, (Int32)map.Value); } - private NodeDebugInfo FromProtobuf(NodeDebugInfoMsg msg) - { - var debugInfo = new NodeDebugInfo - { - Id = msg.Id, - RuleId = msg.RuleId, - Line = (Int32)msg.Line, - ColumnToVariableMaps = new Dictionary(), - DatabaseId = msg.DatabaseId, - Name = msg.Name, - Type = (LSLib.LS.Story.Node.Type)msg.Type, - ParentNodeId = msg.ParentNodeId - }; - - if (msg.FunctionName != "") - { - debugInfo.FunctionName = new FunctionNameAndArity(msg.FunctionName, (int)msg.FunctionArity); - } - - foreach (var map in msg.ColumnMaps) - { - debugInfo.ColumnToVariableMaps.Add((Int32)map.Key, (Int32)map.Value); - } - - return debugInfo; + return debugInfo; + } + + private FunctionParamDebugInfo FromProtobuf(FunctionParamDebugInfoMsg msg) + { + return new FunctionParamDebugInfo + { + TypeId = msg.TypeId, + Name = msg.Name, + Out = msg.Out + }; + } + + private FunctionDebugInfo FromProtobuf(FunctionDebugInfoMsg msg) + { + var debugInfo = new FunctionDebugInfo + { + Name = msg.Name, + Params = new List(), + TypeId = msg.TypeId + }; + + foreach (var param in msg.Params) + { + debugInfo.Params.Add(FromProtobuf(param)); } - private FunctionParamDebugInfo FromProtobuf(FunctionParamDebugInfoMsg msg) + return debugInfo; + } + + private StoryDebugInfo FromProtobuf(StoryDebugInfoMsg msg) + { + var debugInfo = new StoryDebugInfo(); + debugInfo.Version = msg.Version; + + foreach (var dbMsg in msg.Databases) { - return new FunctionParamDebugInfo - { - TypeId = msg.TypeId, - Name = msg.Name, - Out = msg.Out - }; + var db = FromProtobuf(dbMsg); + debugInfo.Databases.Add(db.Id, db); } - private FunctionDebugInfo FromProtobuf(FunctionDebugInfoMsg msg) + foreach (var goalMsg in msg.Goals) { - var debugInfo = new FunctionDebugInfo - { - Name = msg.Name, - Params = new List(), - TypeId = msg.TypeId - }; + var goal = FromProtobuf(goalMsg); + debugInfo.Goals.Add(goal.Id, goal); + } - foreach (var param in msg.Params) - { - debugInfo.Params.Add(FromProtobuf(param)); - } + foreach (var ruleMsg in msg.Rules) + { + var rule = FromProtobuf(ruleMsg); + debugInfo.Rules.Add(rule.Id, rule); + } - return debugInfo; + foreach (var nodeMsg in msg.Nodes) + { + var node = FromProtobuf(nodeMsg); + debugInfo.Nodes.Add(node.Id, node); } - private StoryDebugInfo FromProtobuf(StoryDebugInfoMsg msg) - { - var debugInfo = new StoryDebugInfo(); - debugInfo.Version = msg.Version; - - foreach (var dbMsg in msg.Databases) - { - var db = FromProtobuf(dbMsg); - debugInfo.Databases.Add(db.Id, db); - } - - foreach (var goalMsg in msg.Goals) - { - var goal = FromProtobuf(goalMsg); - debugInfo.Goals.Add(goal.Id, goal); - } - - foreach (var ruleMsg in msg.Rules) - { - var rule = FromProtobuf(ruleMsg); - debugInfo.Rules.Add(rule.Id, rule); - } - - foreach (var nodeMsg in msg.Nodes) - { - var node = FromProtobuf(nodeMsg); - debugInfo.Nodes.Add(node.Id, node); - } - - foreach (var funcMsg in msg.Functions) - { - var func = FromProtobuf(funcMsg); - debugInfo.Functions.Add(new FunctionNameAndArity(func.Name, func.Params.Count), func); - } - - return debugInfo; + foreach (var funcMsg in msg.Functions) + { + var func = FromProtobuf(funcMsg); + debugInfo.Functions.Add(new FunctionNameAndArity(func.Name, func.Params.Count), func); } - public StoryDebugInfo Load(byte[] msgPayload) - { - UInt32 decompressedSize; - byte[] lengthBuf = new byte[4]; - Array.Copy(msgPayload, msgPayload.Length - 4, lengthBuf, 0, 4); - using (var ms = new MemoryStream(lengthBuf)) - using (var reader = new BinaryReader(ms, Encoding.UTF8, true)) - { - decompressedSize = reader.ReadUInt32(); - } - - var compressed = new byte[msgPayload.Length - 4]; - Array.Copy(msgPayload, 0, compressed, 0, msgPayload.Length - 4); - - byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); - byte[] decompressed = BinUtils.Decompress(compressed, (int)decompressedSize, flags); - var msg = StoryDebugInfoMsg.Parser.ParseFrom(decompressed); - var debugInfo = FromProtobuf(msg); - return debugInfo; + return debugInfo; + } + + public StoryDebugInfo Load(byte[] msgPayload) + { + UInt32 decompressedSize; + byte[] lengthBuf = new byte[4]; + Array.Copy(msgPayload, msgPayload.Length - 4, lengthBuf, 0, 4); + using (var ms = new MemoryStream(lengthBuf)) + using (var reader = new BinaryReader(ms, Encoding.UTF8, true)) + { + decompressedSize = reader.ReadUInt32(); } + + var compressed = new byte[msgPayload.Length - 4]; + Array.Copy(msgPayload, 0, compressed, 0, msgPayload.Length - 4); + + byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); + byte[] decompressed = BinUtils.Decompress(compressed, (int)decompressedSize, flags); + var msg = StoryDebugInfoMsg.Parser.ParseFrom(decompressed); + var debugInfo = FromProtobuf(msg); + return debugInfo; } } diff --git a/DebuggerFrontend/DebugInfoSync.cs b/DebuggerFrontend/DebugInfoSync.cs index 5bac158a..146b548b 100644 --- a/DebuggerFrontend/DebugInfoSync.cs +++ b/DebuggerFrontend/DebugInfoSync.cs @@ -6,153 +6,152 @@ using System.Text; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class DebugInfoSync { - public class DebugInfoSync + private StoryDebugInfo DebugInfo; + private Dictionary Goals = new Dictionary(); + private Dictionary Databases = new Dictionary(); + private Dictionary Nodes = new Dictionary(); + private Dictionary Rules = new Dictionary(); + + public Boolean Matches; + public List Reasons = new List(); + + public DebugInfoSync(StoryDebugInfo debugInfo) { - private StoryDebugInfo DebugInfo; - private Dictionary Goals = new Dictionary(); - private Dictionary Databases = new Dictionary(); - private Dictionary Nodes = new Dictionary(); - private Dictionary Rules = new Dictionary(); + DebugInfo = debugInfo; + } - public Boolean Matches; - public List Reasons = new List(); + public void AddData(BkSyncStoryData data) + { + foreach (var goal in data.Goal) + { + Goals.Add(goal.Id, goal); + } - public DebugInfoSync(StoryDebugInfo debugInfo) + foreach (var db in data.Database) { - DebugInfo = debugInfo; + Databases.Add(db.Id, db); } - public void AddData(BkSyncStoryData data) + foreach (var node in data.Node) { - foreach (var goal in data.Goal) - { - Goals.Add(goal.Id, goal); - } + Nodes.Add(node.Id, node); + } - foreach (var db in data.Database) - { - Databases.Add(db.Id, db); - } + foreach (var rule in data.Rule) + { + Rules.Add(rule.NodeId, rule); + } + } - foreach (var node in data.Node) - { - Nodes.Add(node.Id, node); - } + public void Finish() + { + if (Goals.Count != DebugInfo.Goals.Count) + { + Reasons.Add($"Goal count mismatch; local {DebugInfo.Goals.Count}, remote {Goals.Count}"); + } - foreach (var rule in data.Rule) - { - Rules.Add(rule.NodeId, rule); - } + if (Databases.Count != DebugInfo.Databases.Count) + { + Reasons.Add($"Database count mismatch; local {DebugInfo.Databases.Count}, remote {Databases.Count}"); } - public void Finish() + if (Nodes.Count != DebugInfo.Nodes.Count) { - if (Goals.Count != DebugInfo.Goals.Count) - { - Reasons.Add($"Goal count mismatch; local {DebugInfo.Goals.Count}, remote {Goals.Count}"); - } + Reasons.Add($"Node count mismatch; local {DebugInfo.Nodes.Count}, remote {Nodes.Count}"); + } - if (Databases.Count != DebugInfo.Databases.Count) - { - Reasons.Add($"Database count mismatch; local {DebugInfo.Databases.Count}, remote {Databases.Count}"); - } + if (Rules.Count != DebugInfo.Rules.Count) + { + Reasons.Add($"Rule count mismatch; local {DebugInfo.Rules.Count}, remote {Rules.Count}"); + } - if (Nodes.Count != DebugInfo.Nodes.Count) - { - Reasons.Add($"Node count mismatch; local {DebugInfo.Nodes.Count}, remote {Nodes.Count}"); - } + if (Reasons.Count > 0) + { + Matches = false; + return; + } - if (Rules.Count != DebugInfo.Rules.Count) + foreach (var goal in DebugInfo.Goals) + { + var remoteGoal = Goals[goal.Key]; + if (remoteGoal.Name != goal.Value.Name) { - Reasons.Add($"Rule count mismatch; local {DebugInfo.Rules.Count}, remote {Rules.Count}"); + Reasons.Add($"Goal {goal.Key} name mismatch; local {goal.Value.Name}, remote {remoteGoal.Name}"); } - if (Reasons.Count > 0) + if (remoteGoal.InitActions.Count != goal.Value.InitActions.Count) { - Matches = false; - return; + Reasons.Add($"Goal {goal.Key} INIT action count mismatch; local {goal.Value.InitActions.Count}, remote {remoteGoal.InitActions.Count}"); } - foreach (var goal in DebugInfo.Goals) + if (remoteGoal.ExitActions.Count != goal.Value.ExitActions.Count) { - var remoteGoal = Goals[goal.Key]; - if (remoteGoal.Name != goal.Value.Name) - { - Reasons.Add($"Goal {goal.Key} name mismatch; local {goal.Value.Name}, remote {remoteGoal.Name}"); - } - - if (remoteGoal.InitActions.Count != goal.Value.InitActions.Count) - { - Reasons.Add($"Goal {goal.Key} INIT action count mismatch; local {goal.Value.InitActions.Count}, remote {remoteGoal.InitActions.Count}"); - } + Reasons.Add($"Goal {goal.Key} EXIT action count mismatch; local {goal.Value.ExitActions.Count}, remote {remoteGoal.ExitActions.Count}"); + } - if (remoteGoal.ExitActions.Count != goal.Value.ExitActions.Count) - { - Reasons.Add($"Goal {goal.Key} EXIT action count mismatch; local {goal.Value.ExitActions.Count}, remote {remoteGoal.ExitActions.Count}"); - } + // TODO - check INIT/EXIT actions func, arity, goal id + } - // TODO - check INIT/EXIT actions func, arity, goal id + foreach (var db in DebugInfo.Databases) + { + var remoteDb = Databases[db.Key]; + if (remoteDb.ArgumentType.Count != db.Value.ParamTypes.Count) + { + Reasons.Add($"DB {db.Key} arity mismatch; local {db.Value.ParamTypes.Count}, remote {remoteDb.ArgumentType.Count}"); } - - foreach (var db in DebugInfo.Databases) + else { - var remoteDb = Databases[db.Key]; - if (remoteDb.ArgumentType.Count != db.Value.ParamTypes.Count) + for (var i = 0; i < db.Value.ParamTypes.Count; i++) { - Reasons.Add($"DB {db.Key} arity mismatch; local {db.Value.ParamTypes.Count}, remote {remoteDb.ArgumentType.Count}"); - } - else - { - for (var i = 0; i < db.Value.ParamTypes.Count; i++) + var localType = db.Value.ParamTypes[i]; + var remoteType = remoteDb.ArgumentType[i]; + if (localType != remoteType) { - var localType = db.Value.ParamTypes[i]; - var remoteType = remoteDb.ArgumentType[i]; - if (localType != remoteType) - { - Reasons.Add($"DB {db.Key} arg {i} mismatch; local {localType}, remote {remoteType}"); - } + Reasons.Add($"DB {db.Key} arg {i} mismatch; local {localType}, remote {remoteType}"); } } } + } - Dictionary ruleIdToIndexMap = new Dictionary(); + Dictionary ruleIdToIndexMap = new Dictionary(); - foreach (var node in DebugInfo.Nodes) + foreach (var node in DebugInfo.Nodes) + { + var remoteNode = Nodes[node.Key]; + if ((Node.Type)remoteNode.Type != node.Value.Type) { - var remoteNode = Nodes[node.Key]; - if ((Node.Type)remoteNode.Type != node.Value.Type) - { - Reasons.Add($"Node {node.Key} type mismatch; local {node.Value.Type}, remote {remoteNode.Type}"); - } - - if (remoteNode.Name != node.Value.Name - && remoteNode.Name != node.Value.Name + "__DEF__") - { - Reasons.Add($"Node {node.Key} name mismatch; local {node.Value.Name}, remote {remoteNode.Name}"); - } + Reasons.Add($"Node {node.Key} type mismatch; local {node.Value.Type}, remote {remoteNode.Type}"); + } - if (node.Value.RuleId != 0) - { - ruleIdToIndexMap[node.Value.RuleId] = node.Key; - } + if (remoteNode.Name != node.Value.Name + && remoteNode.Name != node.Value.Name + "__DEF__") + { + Reasons.Add($"Node {node.Key} name mismatch; local {node.Value.Name}, remote {remoteNode.Name}"); } - foreach (var ruleMapping in ruleIdToIndexMap) + if (node.Value.RuleId != 0) { - var localRule = DebugInfo.Rules[ruleMapping.Key]; - var remoteRule = Rules[ruleMapping.Value]; + ruleIdToIndexMap[node.Value.RuleId] = node.Key; + } + } - if (remoteRule.Actions.Count != localRule.Actions.Count) - { - Reasons.Add($"Rule {ruleMapping.Value} action count mismatch; local {localRule.Actions.Count}, remote {remoteRule.Actions.Count}"); - } + foreach (var ruleMapping in ruleIdToIndexMap) + { + var localRule = DebugInfo.Rules[ruleMapping.Key]; + var remoteRule = Rules[ruleMapping.Value]; - // TODO - check actions func, arity, goal id + if (remoteRule.Actions.Count != localRule.Actions.Count) + { + Reasons.Add($"Rule {ruleMapping.Value} action count mismatch; local {localRule.Actions.Count}, remote {remoteRule.Actions.Count}"); } - Matches = (Reasons.Count == 0); + // TODO - check actions func, arity, goal id } + + Matches = (Reasons.Count == 0); } } diff --git a/DebuggerFrontend/EvaluationResults.cs b/DebuggerFrontend/EvaluationResults.cs index 3eed0441..d676eefb 100644 --- a/DebuggerFrontend/EvaluationResults.cs +++ b/DebuggerFrontend/EvaluationResults.cs @@ -4,151 +4,150 @@ using System.Text; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +class EvaluationResults { - class EvaluationResults + private ValueFormatter Formatter; + private Int64 VariableIndexPrefix; + private Int64 VariableReference; + private Int32 NumColumns; + private List Tuples; + public List ColumnNames; + + public int Count { - private ValueFormatter Formatter; - private Int64 VariableIndexPrefix; - private Int64 VariableReference; - private Int32 NumColumns; - private List Tuples; - public List ColumnNames; - - public int Count - { - get { return Tuples.Count; } - } + get { return Tuples.Count; } + } - public Int64 VariablesReference - { - get { return VariableReference; } - } + public Int64 VariablesReference + { + get { return VariableReference; } + } - public EvaluationResults(ValueFormatter formatter, Int64 variableReference, - Int64 variableIndexPrefix, Int32 numColumns) - { - Formatter = formatter; - VariableReference = variableReference; - VariableIndexPrefix = variableIndexPrefix; - NumColumns = numColumns; - Tuples = new List(); - } + public EvaluationResults(ValueFormatter formatter, Int64 variableReference, + Int64 variableIndexPrefix, Int32 numColumns) + { + Formatter = formatter; + VariableReference = variableReference; + VariableIndexPrefix = variableIndexPrefix; + NumColumns = numColumns; + Tuples = new List(); + } - public void Add(MsgTuple tuple) - { - Tuples.Add(tuple); - } + public void Add(MsgTuple tuple) + { + Tuples.Add(tuple); + } - public List GetRows(DAPVariablesRequest msg) - { - int startIndex = msg.start == null ? 0 : (int)msg.start; - int numVars = (msg.count == null || msg.count == 0) ? Tuples.Count : (int)msg.count; - int lastIndex = Math.Min(startIndex + numVars, Tuples.Count); - // TODO req.filter, format + public List GetRows(DAPVariablesRequest msg) + { + int startIndex = msg.start == null ? 0 : (int)msg.start; + int numVars = (msg.count == null || msg.count == 0) ? Tuples.Count : (int)msg.count; + int lastIndex = Math.Min(startIndex + numVars, Tuples.Count); + // TODO req.filter, format - var variables = new List(); - for (var i = startIndex; i < startIndex + numVars; i++) + var variables = new List(); + for (var i = startIndex; i < startIndex + numVars; i++) + { + var row = Tuples[i]; + var dapVar = new DAPVariable { - var row = Tuples[i]; - var dapVar = new DAPVariable - { - name = i.ToString(), - value = "(" + Formatter.TupleToString(row) + ")", + name = i.ToString(), + value = "(" + Formatter.TupleToString(row) + ")", #pragma warning disable CS0675 // Bitwise-or operator used on a sign-extended operand - variablesReference = VariableIndexPrefix | i, + variablesReference = VariableIndexPrefix | i, #pragma warning restore CS0675 // Bitwise-or operator used on a sign-extended operand - indexedVariables = ColumnNames == null ? NumColumns : 0, - namedVariables = ColumnNames == null ? 0 : NumColumns - }; - variables.Add(dapVar); - } + indexedVariables = ColumnNames == null ? NumColumns : 0, + namedVariables = ColumnNames == null ? 0 : NumColumns + }; + variables.Add(dapVar); + } - return variables; + return variables; + } + + public List GetRow(DAPVariablesRequest msg, int rowIndex) + { + if (rowIndex < 0 || rowIndex >= Tuples.Count) + { + throw new RequestFailedException($"Requested nonexistent row {rowIndex}"); } - public List GetRow(DAPVariablesRequest msg, int rowIndex) + int startIndex = msg.start == null ? 0 : (int)msg.start; + int numVars = (msg.count == null || msg.count == 0) ? NumColumns : (int)msg.count; + int lastIndex = Math.Min(startIndex + numVars, NumColumns); + // TODO req.filter, format + + var row = Tuples[rowIndex]; + var variables = new List(); + for (var i = startIndex; i < startIndex + numVars; i++) { - if (rowIndex < 0 || rowIndex >= Tuples.Count) + var dapVar = new DAPVariable { - throw new RequestFailedException($"Requested nonexistent row {rowIndex}"); - } + name = ColumnNames == null ? i.ToString() : ColumnNames[i], + value = Formatter.ValueToString(row.Column[i]) + }; + variables.Add(dapVar); + } - int startIndex = msg.start == null ? 0 : (int)msg.start; - int numVars = (msg.count == null || msg.count == 0) ? NumColumns : (int)msg.count; - int lastIndex = Math.Min(startIndex + numVars, NumColumns); - // TODO req.filter, format + return variables; + } +} - var row = Tuples[rowIndex]; - var variables = new List(); - for (var i = startIndex; i < startIndex + numVars; i++) - { - var dapVar = new DAPVariable - { - name = ColumnNames == null ? i.ToString() : ColumnNames[i], - value = Formatter.ValueToString(row.Column[i]) - }; - variables.Add(dapVar); - } +class EvaluationResultManager +{ + private ValueFormatter Formatter; + private List Results; - return variables; - } + public EvaluationResultManager(ValueFormatter formatter) + { + Formatter = formatter; + Results = new List(); } - class EvaluationResultManager + public EvaluationResults MakeResults(int numColumns) { - private ValueFormatter Formatter; - private List Results; + return MakeResults(numColumns, null); + } - public EvaluationResultManager(ValueFormatter formatter) - { - Formatter = formatter; - Results = new List(); - } + public EvaluationResults MakeResults(int numColumns, List columnNames) + { + var variableRef = ((UInt64)1 << 48) | ((UInt64)Results.Count << 24); + var variableIndexPrefix = ((UInt64)2 << 48) | ((UInt64)Results.Count << 24); + var result = new EvaluationResults(Formatter, (Int64)variableRef, (Int64)variableIndexPrefix, numColumns); + result.ColumnNames = columnNames; + Results.Add(result); + return result; + } - public EvaluationResults MakeResults(int numColumns) + public List GetVariables(DAPVariablesRequest msg, long variablesReference) + { + long variableType = (variablesReference >> 48); + if (variableType == 1) { - return MakeResults(numColumns, null); - } + int resultSetIdx = (int)((variablesReference >> 24) & 0xffffff); + if (resultSetIdx < 0 || resultSetIdx >= Results.Count) + { + throw new InvalidOperationException($"Evaluation result set ID does not exist {resultSetIdx}"); + } - public EvaluationResults MakeResults(int numColumns, List columnNames) - { - var variableRef = ((UInt64)1 << 48) | ((UInt64)Results.Count << 24); - var variableIndexPrefix = ((UInt64)2 << 48) | ((UInt64)Results.Count << 24); - var result = new EvaluationResults(Formatter, (Int64)variableRef, (Int64)variableIndexPrefix, numColumns); - result.ColumnNames = columnNames; - Results.Add(result); - return result; + return Results[resultSetIdx].GetRows(msg); } - - public List GetVariables(DAPVariablesRequest msg, long variablesReference) + else if (variableType == 2) { - long variableType = (variablesReference >> 48); - if (variableType == 1) + int resultSetIdx = (int)((variablesReference >> 24) & 0xffffff); + if (resultSetIdx < 0 || resultSetIdx >= Results.Count) { - int resultSetIdx = (int)((variablesReference >> 24) & 0xffffff); - if (resultSetIdx < 0 || resultSetIdx >= Results.Count) - { - throw new InvalidOperationException($"Evaluation result set ID does not exist {resultSetIdx}"); - } - - return Results[resultSetIdx].GetRows(msg); - } - else if (variableType == 2) - { - int resultSetIdx = (int)((variablesReference >> 24) & 0xffffff); - if (resultSetIdx < 0 || resultSetIdx >= Results.Count) - { - throw new InvalidOperationException($"Evaluation result set ID does not exist {resultSetIdx}"); - } - - int rowIndex = (int)(msg.variablesReference & 0xffffff); - return Results[resultSetIdx].GetRow(msg, rowIndex); - } - else - { - throw new InvalidOperationException($"EvaluationResultManager does not support this variable type: {variableType}"); + throw new InvalidOperationException($"Evaluation result set ID does not exist {resultSetIdx}"); } + + int rowIndex = (int)(msg.variablesReference & 0xffffff); + return Results[resultSetIdx].GetRow(msg, rowIndex); + } + else + { + throw new InvalidOperationException($"EvaluationResultManager does not support this variable type: {variableType}"); } } } diff --git a/DebuggerFrontend/ExpressionEvaluator.cs b/DebuggerFrontend/ExpressionEvaluator.cs index 071790ef..38450749 100644 --- a/DebuggerFrontend/ExpressionEvaluator.cs +++ b/DebuggerFrontend/ExpressionEvaluator.cs @@ -9,261 +9,261 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +class PendingExpressionEvaluation { - class PendingExpressionEvaluation - { - public DAPRequest Request; - public EvaluationResults Results; - public NodeDebugInfo Node; - public FunctionDebugInfo Function; - } + public DAPRequest Request; + public EvaluationResults Results; + public NodeDebugInfo Node; + public FunctionDebugInfo Function; +} - class ExpressionEvaluator +class ExpressionEvaluator +{ + private StoryDebugInfo DebugInfo; + private DebuggerClient DbgClient; + private DAPStream DAP; + private Dictionary NameToNodeMap; + public DatabaseEnumerator DatabaseDumper; + private EvaluationResultManager EvalResults; + private Dictionary PendingEvaluations = new Dictionary(); + + public ExpressionEvaluator(StoryDebugInfo debugInfo, DAPStream dap, DebuggerClient dbgClient, ValueFormatter formatter, + EvaluationResultManager results) { - private StoryDebugInfo DebugInfo; - private DebuggerClient DbgClient; - private DAPStream DAP; - private Dictionary NameToNodeMap; - public DatabaseEnumerator DatabaseDumper; - private EvaluationResultManager EvalResults; - private Dictionary PendingEvaluations = new Dictionary(); - - public ExpressionEvaluator(StoryDebugInfo debugInfo, DAPStream dap, DebuggerClient dbgClient, ValueFormatter formatter, - EvaluationResultManager results) - { - DebugInfo = debugInfo; - DbgClient = dbgClient; - DAP = dap; - DatabaseDumper = new DatabaseEnumerator(dbgClient, dap, debugInfo, formatter, results); - EvalResults = results; + DebugInfo = debugInfo; + DbgClient = dbgClient; + DAP = dap; + DatabaseDumper = new DatabaseEnumerator(dbgClient, dap, debugInfo, formatter, results); + EvalResults = results; - DbgClient.OnEvaluateRow = this.OnEvaluateRow; - DbgClient.OnEvaluateFinished = this.OnEvaluateFinished; + DbgClient.OnEvaluateRow = this.OnEvaluateRow; + DbgClient.OnEvaluateFinished = this.OnEvaluateFinished; - MakeFunctionNameMap(); - } + MakeFunctionNameMap(); + } - private void MakeFunctionNameMap() + private void MakeFunctionNameMap() + { + NameToNodeMap = new Dictionary(); + foreach (var node in DebugInfo.Nodes) { - NameToNodeMap = new Dictionary(); - foreach (var node in DebugInfo.Nodes) + if (node.Value.FunctionName != null) { - if (node.Value.FunctionName != null) + NodeDebugInfo existingNode; + // Make sure that we don't overwrite user queries with their PROC equivalents + if (NameToNodeMap.TryGetValue(node.Value.FunctionName, out existingNode)) { - NodeDebugInfo existingNode; - // Make sure that we don't overwrite user queries with their PROC equivalents - if (NameToNodeMap.TryGetValue(node.Value.FunctionName, out existingNode)) - { - if (existingNode.Type != Node.Type.UserQuery) - { - NameToNodeMap[node.Value.FunctionName] = node.Value; - } - } - else + if (existingNode.Type != Node.Type.UserQuery) { - NameToNodeMap.Add(node.Value.FunctionName, node.Value); + NameToNodeMap[node.Value.FunctionName] = node.Value; } } + else + { + NameToNodeMap.Add(node.Value.FunctionName, node.Value); + } } } + } - private MsgTypedValue ConstantToTypedValue(ConstantValue c) + private MsgTypedValue ConstantToTypedValue(ConstantValue c) + { + var tv = new MsgTypedValue(); + // TODO - c.TypeName? + switch (c.Type) + { + case IRConstantType.Integer: + tv.TypeId = (UInt32)Value.Type.Integer; + tv.Intval = c.IntegerValue; + break; + + case IRConstantType.Float: + tv.TypeId = (UInt32)Value.Type.Float; + tv.Floatval = c.FloatValue; + break; + + case IRConstantType.String: + tv.TypeId = (UInt32)Value.Type.String; + tv.Stringval = c.StringValue; + break; + + case IRConstantType.Name: + tv.TypeId = (UInt32)Value.Type.GuidString; + tv.Stringval = c.StringValue; + break; + + default: + throw new ArgumentException("Constant has unknown type"); + } + + return tv; + } + + + private MsgTypedValue VariableToTypedValue(LocalVar lvar, CoalescedFrame frame) + { + // TODO - lvar.Type? + if (lvar.Name == "_") { var tv = new MsgTypedValue(); - // TODO - c.TypeName? - switch (c.Type) + tv.TypeId = (UInt32)Value.Type.None; + return tv; + } + else + { + var frameVar = frame.Variables.FirstOrDefault(v => v.Name == lvar.Name); + if (frameVar == null) { - case IRConstantType.Integer: - tv.TypeId = (UInt32)Value.Type.Integer; - tv.Intval = c.IntegerValue; - break; - - case IRConstantType.Float: - tv.TypeId = (UInt32)Value.Type.Float; - tv.Floatval = c.FloatValue; - break; - - case IRConstantType.String: - tv.TypeId = (UInt32)Value.Type.String; - tv.Stringval = c.StringValue; - break; - - case IRConstantType.Name: - tv.TypeId = (UInt32)Value.Type.GuidString; - tv.Stringval = c.StringValue; - break; - - default: - throw new ArgumentException("Constant has unknown type"); + throw new RequestFailedException($"Variable does not exist: \"{lvar.Name}\""); } - return tv; + return frameVar.TypedValue; } + } - private MsgTypedValue VariableToTypedValue(LocalVar lvar, CoalescedFrame frame) + private MsgTuple ParamsToTuple(IEnumerable args, CoalescedFrame frame) + { + var tuple = new MsgTuple(); + foreach (var arg in args) { - // TODO - lvar.Type? - if (lvar.Name == "_") + if (arg is ConstantValue) { - var tv = new MsgTypedValue(); - tv.TypeId = (UInt32)Value.Type.None; - return tv; + tuple.Column.Add(ConstantToTypedValue(arg as ConstantValue)); } else { - var frameVar = frame.Variables.FirstOrDefault(v => v.Name == lvar.Name); - if (frameVar == null) + if (frame != null) { - throw new RequestFailedException($"Variable does not exist: \"{lvar.Name}\""); + tuple.Column.Add(VariableToTypedValue(arg as LocalVar, frame)); + } + else + { + throw new RequestFailedException("Local variables cannot be referenced without a stack frame"); } - - return frameVar.TypedValue; } } + return tuple; + } - private MsgTuple ParamsToTuple(IEnumerable args, CoalescedFrame frame) + public void EvaluateCall(DAPRequest request, Statement stmt, CoalescedFrame frame, bool allowMutation) + { + NodeDebugInfo node; + var func = new FunctionNameAndArity(stmt.Name, stmt.Params.Count); + if (!NameToNodeMap.TryGetValue(func, out node)) { - var tuple = new MsgTuple(); - foreach (var arg in args) - { - if (arg is ConstantValue) + DAP.SendReply(request, "Name not found: " + func); + return; + } + + var function = DebugInfo.Functions[node.FunctionName]; + var args = ParamsToTuple(stmt.Params, frame); + + DbgEvaluate.Types.EvalType evalType; + switch (node.Type) + { + case Node.Type.Database: + if (stmt.Not) { - tuple.Column.Add(ConstantToTypedValue(arg as ConstantValue)); + evalType = DbgEvaluate.Types.EvalType.Insert; } else { - if (frame != null) - { - tuple.Column.Add(VariableToTypedValue(arg as LocalVar, frame)); - } - else - { - throw new RequestFailedException("Local variables cannot be referenced without a stack frame"); - } + evalType = DbgEvaluate.Types.EvalType.Delete; } - } - - return tuple; - } - - public void EvaluateCall(DAPRequest request, Statement stmt, CoalescedFrame frame, bool allowMutation) - { - NodeDebugInfo node; - var func = new FunctionNameAndArity(stmt.Name, stmt.Params.Count); - if (!NameToNodeMap.TryGetValue(func, out node)) - { - DAP.SendReply(request, "Name not found: " + func); - return; - } + break; - var function = DebugInfo.Functions[node.FunctionName]; - var args = ParamsToTuple(stmt.Params, frame); + case Node.Type.Proc: + if (stmt.Not) + { + throw new RequestFailedException("\"NOT\" statements not supported for PROCs"); + } - DbgEvaluate.Types.EvalType evalType; - switch (node.Type) - { - case Node.Type.Database: - if (stmt.Not) - { - evalType = DbgEvaluate.Types.EvalType.Insert; - } - else - { - evalType = DbgEvaluate.Types.EvalType.Delete; - } - break; + evalType = DbgEvaluate.Types.EvalType.Insert; + break; - case Node.Type.Proc: - if (stmt.Not) - { - throw new RequestFailedException("\"NOT\" statements not supported for PROCs"); - } + case Node.Type.DivQuery: + case Node.Type.InternalQuery: + case Node.Type.UserQuery: + if (stmt.Not) + { + throw new RequestFailedException("\"NOT\" statements not supported for QRYs"); + } - evalType = DbgEvaluate.Types.EvalType.Insert; - break; + evalType = DbgEvaluate.Types.EvalType.IsValid; + break; - case Node.Type.DivQuery: - case Node.Type.InternalQuery: - case Node.Type.UserQuery: - if (stmt.Not) - { - throw new RequestFailedException("\"NOT\" statements not supported for QRYs"); - } + default: + throw new RequestFailedException($"Eval node type not supported: {node.Type}"); + } - evalType = DbgEvaluate.Types.EvalType.IsValid; - break; + if ((evalType != DbgEvaluate.Types.EvalType.IsValid + || node.Type == Node.Type.UserQuery) + && !allowMutation) + { + throw new RequestFailedException($"Evaluation could cause game state change"); + } + + UInt32 seq = DbgClient.SendEvaluate(evalType, node.Id, args); - default: - throw new RequestFailedException($"Eval node type not supported: {node.Type}"); - } + var argNames = function.Params.Select(arg => arg.Name).ToList(); + var eval = new PendingExpressionEvaluation + { + Request = request, + Results = EvalResults.MakeResults(function.Params.Count, argNames), + Node = node, + Function = function + }; + PendingEvaluations.Add(seq, eval); + } - if ((evalType != DbgEvaluate.Types.EvalType.IsValid - || node.Type == Node.Type.UserQuery) - && !allowMutation) - { - throw new RequestFailedException($"Evaluation could cause game state change"); - } - - UInt32 seq = DbgClient.SendEvaluate(evalType, node.Id, args); + public void EvaluateName(DAPRequest request, string name, bool allowMutation) + { + if (name == "help") + { + SendUsage(); + return; + } - var argNames = function.Params.Select(arg => arg.Name).ToList(); - var eval = new PendingExpressionEvaluation - { - Request = request, - Results = EvalResults.MakeResults(function.Params.Count, argNames), - Node = node, - Function = function - }; - PendingEvaluations.Add(seq, eval); + // TODO - this is bad for performance! + var db = DebugInfo.Databases.Values.FirstOrDefault(r => r.Name == name); + if (db == null) + { + throw new RequestFailedException($"Database does not exist: \"{name}\""); } - public void EvaluateName(DAPRequest request, string name, bool allowMutation) + DatabaseDumper.RequestDatabaseEvaluation(request, db.Id); + } + + private Statement Parse(string expression) + { + var exprBytes = Encoding.UTF8.GetBytes(expression); + using (var exprStream = new MemoryStream(exprBytes)) { - if (name == "help") - { - SendUsage(); - return; - } + var scanner = new ExpressionScanner(); + scanner.SetSource(exprStream); + var parser = new ExpressionParser(scanner); + bool parsed = parser.Parse(); - // TODO - this is bad for performance! - var db = DebugInfo.Databases.Values.FirstOrDefault(r => r.Name == name); - if (db == null) + if (parsed) { - throw new RequestFailedException($"Database does not exist: \"{name}\""); + return parser.GetStatement(); } - - DatabaseDumper.RequestDatabaseEvaluation(request, db.Id); - } - - private Statement Parse(string expression) - { - var exprBytes = Encoding.UTF8.GetBytes(expression); - using (var exprStream = new MemoryStream(exprBytes)) + else { - var scanner = new ExpressionScanner(); - scanner.SetSource(exprStream); - var parser = new ExpressionParser(scanner); - bool parsed = parser.Parse(); - - if (parsed) - { - return parser.GetStatement(); - } - else - { - return null; - } + return null; } } + } - private void SendUsage() - { - string usageText = $@"Basic Usage: + private void SendUsage() + { + string usageText = $@"Basic Usage: Dump the contents of a database: DB_Database Insert a row into a database (EXPERIMENTAL!): DB_Database(1, 2, 3) Delete a row from a database (EXPERIMENTAL!): NOT DB_Database(4, 5, 6) @@ -278,132 +278,131 @@ Delete a row from a database (EXPERIMENTAL!): NOT DB_Database(4, 5, 6) - You can use local variables from the active rule (_Char, etc.) in the expressions. "; - var outputMsg = new DAPOutputMessage - { - category = "console", - output = usageText - }; - DAP.SendEvent("output", outputMsg); - } - - public void Evaluate(DAPRequest request, string expression, CoalescedFrame frame, bool allowMutation) + var outputMsg = new DAPOutputMessage { - var stmt = Parse(expression); - if (stmt == null) - { - DAP.SendReply(request, "Syntax error. Type \"help\" for usage."); - return; - } + category = "console", + output = usageText + }; + DAP.SendEvent("output", outputMsg); + } - if (stmt.Params == null) - { - EvaluateName(request, stmt.Name, allowMutation); - } - else - { - EvaluateCall(request, stmt, frame, allowMutation); - } + public void Evaluate(DAPRequest request, string expression, CoalescedFrame frame, bool allowMutation) + { + var stmt = Parse(expression); + if (stmt == null) + { + DAP.SendReply(request, "Syntax error. Type \"help\" for usage."); + return; } - private void OnEvaluateRow(UInt32 seq, BkEvaluateRow msg) + if (stmt.Params == null) { - var results = PendingEvaluations[seq].Results; - foreach (var row in msg.Row) - { - results.Add(row); - } + EvaluateName(request, stmt.Name, allowMutation); } - - private void OnEvaluateFinished(UInt32 seq, BkEvaluateFinished msg) + else { - var eval = PendingEvaluations[seq]; + EvaluateCall(request, stmt, frame, allowMutation); + } + } - if (msg.ResultCode != StatusCode.Success) - { - DAP.SendReply(eval.Request, $"Evaluation failed: DBG server sent error code: {msg.ResultCode}"); - return; - } + private void OnEvaluateRow(UInt32 seq, BkEvaluateRow msg) + { + var results = PendingEvaluations[seq].Results; + foreach (var row in msg.Row) + { + results.Add(row); + } + } - var funcType = (LSLib.LS.Story.FunctionType)eval.Function.TypeId; - if (eval.Node.Type == Node.Type.UserQuery) - { - funcType = LSLib.LS.Story.FunctionType.UserQuery; - } + private void OnEvaluateFinished(UInt32 seq, BkEvaluateFinished msg) + { + var eval = PendingEvaluations[seq]; - string resultText = ""; - string consoleText = ""; - bool returnResults; - switch (funcType) - { - case LSLib.LS.Story.FunctionType.Event: - consoleText = $"Event {eval.Node.FunctionName} triggered"; - returnResults = false; - break; - - case LSLib.LS.Story.FunctionType.Query: - case LSLib.LS.Story.FunctionType.SysQuery: - case LSLib.LS.Story.FunctionType.UserQuery: - if (msg.QuerySucceeded) - { - consoleText = $"Query {eval.Node.FunctionName} SUCCEEDED"; - } - else - { - consoleText = $"Query {eval.Node.FunctionName} FAILED"; - } + if (msg.ResultCode != StatusCode.Success) + { + DAP.SendReply(eval.Request, $"Evaluation failed: DBG server sent error code: {msg.ResultCode}"); + return; + } - resultText = "Query results"; - returnResults = (funcType != LSLib.LS.Story.FunctionType.UserQuery); - break; + var funcType = (LSLib.LS.Story.FunctionType)eval.Function.TypeId; + if (eval.Node.Type == Node.Type.UserQuery) + { + funcType = LSLib.LS.Story.FunctionType.UserQuery; + } - case LSLib.LS.Story.FunctionType.Proc: - consoleText = $"PROC {eval.Node.FunctionName} called"; - returnResults = false; - break; + string resultText = ""; + string consoleText = ""; + bool returnResults; + switch (funcType) + { + case LSLib.LS.Story.FunctionType.Event: + consoleText = $"Event {eval.Node.FunctionName} triggered"; + returnResults = false; + break; + + case LSLib.LS.Story.FunctionType.Query: + case LSLib.LS.Story.FunctionType.SysQuery: + case LSLib.LS.Story.FunctionType.UserQuery: + if (msg.QuerySucceeded) + { + consoleText = $"Query {eval.Node.FunctionName} SUCCEEDED"; + } + else + { + consoleText = $"Query {eval.Node.FunctionName} FAILED"; + } - case LSLib.LS.Story.FunctionType.SysCall: - case LSLib.LS.Story.FunctionType.Call: - consoleText = $"Built-in function {eval.Node.FunctionName} called"; - returnResults = false; - break; + resultText = "Query results"; + returnResults = (funcType != LSLib.LS.Story.FunctionType.UserQuery); + break; - case LSLib.LS.Story.FunctionType.Database: - consoleText = $"Inserted row into {eval.Node.FunctionName}"; - returnResults = false; - break; + case LSLib.LS.Story.FunctionType.Proc: + consoleText = $"PROC {eval.Node.FunctionName} called"; + returnResults = false; + break; - default: - throw new InvalidOperationException($"Unknown function type: {eval.Function.TypeId}"); - } + case LSLib.LS.Story.FunctionType.SysCall: + case LSLib.LS.Story.FunctionType.Call: + consoleText = $"Built-in function {eval.Node.FunctionName} called"; + returnResults = false; + break; - if (consoleText.Length > 0) - { - var outputMsg = new DAPOutputMessage - { - category = "console", - output = consoleText + "\r\n" - }; - DAP.SendEvent("output", outputMsg); - } + case LSLib.LS.Story.FunctionType.Database: + consoleText = $"Inserted row into {eval.Node.FunctionName}"; + returnResults = false; + break; - if (funcType == LSLib.LS.Story.FunctionType.Database) - { - // For database inserts we'll return the whole database in the response. - DatabaseDumper.RequestDatabaseEvaluation(eval.Request, eval.Node.DatabaseId); - return; - } + default: + throw new InvalidOperationException($"Unknown function type: {eval.Function.TypeId}"); + } - var evalResponse = new DAPEvaluateResponse + if (consoleText.Length > 0) + { + var outputMsg = new DAPOutputMessage { - result = resultText, - namedVariables = 0, - indexedVariables = returnResults ? eval.Results.Count : 0, - variablesReference = returnResults ? eval.Results.VariablesReference : 0 + category = "console", + output = consoleText + "\r\n" }; + DAP.SendEvent("output", outputMsg); + } - DAP.SendReply(eval.Request, evalResponse); - - PendingEvaluations.Remove(seq); + if (funcType == LSLib.LS.Story.FunctionType.Database) + { + // For database inserts we'll return the whole database in the response. + DatabaseDumper.RequestDatabaseEvaluation(eval.Request, eval.Node.DatabaseId); + return; } + + var evalResponse = new DAPEvaluateResponse + { + result = resultText, + namedVariables = 0, + indexedVariables = returnResults ? eval.Results.Count : 0, + variablesReference = returnResults ? eval.Results.VariablesReference : 0 + }; + + DAP.SendReply(eval.Request, evalResponse); + + PendingEvaluations.Remove(seq); } } diff --git a/DebuggerFrontend/ExpressionParser/ExpressionNodes.cs b/DebuggerFrontend/ExpressionParser/ExpressionNodes.cs index 925c935e..0d66ccfa 100644 --- a/DebuggerFrontend/ExpressionParser/ExpressionNodes.cs +++ b/DebuggerFrontend/ExpressionParser/ExpressionNodes.cs @@ -2,81 +2,80 @@ using System; using System.Collections.Generic; -namespace LSLib.DebuggerFrontend.ExpressionParser +namespace LSLib.DebuggerFrontend.ExpressionParser; + +/// +/// Base class for all nodes. +/// (This doesn't do anything meaningful, it is needed only to +/// provide the GPPG parser a semantic value base class.) +/// +public class ExpressionNode { - /// - /// Base class for all nodes. - /// (This doesn't do anything meaningful, it is needed only to - /// provide the GPPG parser a semantic value base class.) - /// - public class ExpressionNode - { - } - - /// - /// Parameter list of an expression. - /// This is discarded during parsing and does not appear in the final tree. - /// - public class StatementParamList : ExpressionNode - { - public List Params = new List(); - } +} - /// - /// An expression. - /// This is either a PROC call, QRY, or a database insert/delete operation. - /// - public class Statement : ExpressionNode - { - // Function name - public String Name; - // Statement negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). - public bool Not; - // List of parameters - public List Params; - } +/// +/// Parameter list of an expression. +/// This is discarded during parsing and does not appear in the final tree. +/// +public class StatementParamList : ExpressionNode +{ + public List Params = new List(); +} + +/// +/// An expression. +/// This is either a PROC call, QRY, or a database insert/delete operation. +/// +public class Statement : ExpressionNode +{ + // Function name + public String Name; + // Statement negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). + public bool Not; + // List of parameters + public List Params; +} - public class RValue : ExpressionNode - { - } +public class RValue : ExpressionNode +{ +} - /// - /// Constant scalar value. - /// - public class ConstantValue : RValue - { - // Type of value, if specified in the code. - // (e.g. "(INT64)123") - public String TypeName; - // Internal type of the constant - public IRConstantType Type; - // Value of this constant if the type is Integer. - public Int64 IntegerValue; - // Value of this constant if the type is Float. - public Single FloatValue; - // Value of this constant if the type is String or Name. - public String StringValue; - } +/// +/// Constant scalar value. +/// +public class ConstantValue : RValue +{ + // Type of value, if specified in the code. + // (e.g. "(INT64)123") + public String TypeName; + // Internal type of the constant + public IRConstantType Type; + // Value of this constant if the type is Integer. + public Int64 IntegerValue; + // Value of this constant if the type is Float. + public Single FloatValue; + // Value of this constant if the type is String or Name. + public String StringValue; +} - /// - /// Rule-local variable name. - /// (Any variable that begins with an underscore) - /// - public class LocalVar : RValue - { - // Type of variable, if specified in the code. - // (e.g. "(ITEMGUID)_Var") - public String Type; - // Name of variable. - public String Name; - } +/// +/// Rule-local variable name. +/// (Any variable that begins with an underscore) +/// +public class LocalVar : RValue +{ + // Type of variable, if specified in the code. + // (e.g. "(ITEMGUID)_Var") + public String Type; + // Name of variable. + public String Name; +} - /// - /// String literal from lexing stage (yytext). - /// This is discarded during parsing and does not appear in the final tree. - /// - public class Literal : ExpressionNode - { - public String Lit; - } +/// +/// String literal from lexing stage (yytext). +/// This is discarded during parsing and does not appear in the final tree. +/// +public class Literal : ExpressionNode +{ + public String Lit; } diff --git a/DebuggerFrontend/ExpressionParser/ExpressionParser.cs b/DebuggerFrontend/ExpressionParser/ExpressionParser.cs index 346b8335..8a22cfaa 100644 --- a/DebuggerFrontend/ExpressionParser/ExpressionParser.cs +++ b/DebuggerFrontend/ExpressionParser/ExpressionParser.cs @@ -5,118 +5,117 @@ using System.IO; using System.Text.RegularExpressions; -namespace LSLib.DebuggerFrontend.ExpressionParser +namespace LSLib.DebuggerFrontend.ExpressionParser; + +internal class ParserConstants +{ + public static CultureInfo ParserCulture = new CultureInfo("en-US"); +} + +public abstract class ExpressionScanBase : AbstractScanner { - internal class ParserConstants + protected virtual bool yywrap() { return true; } + + protected Literal MakeLiteral(string lit) => new Literal() { - public static CultureInfo ParserCulture = new CultureInfo("en-US"); - } + Lit = lit + }; - public abstract class ExpressionScanBase : AbstractScanner + protected Literal MakeString(string lit) { - protected virtual bool yywrap() { return true; } + return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); + } +} - protected Literal MakeLiteral(string lit) => new Literal() - { - Lit = lit - }; +public sealed partial class ExpressionScanner : ExpressionScanBase +{ +} - protected Literal MakeString(string lit) - { - return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); - } +public partial class ExpressionParser +{ + public ExpressionParser(ExpressionScanner scnr) : base(scnr) + { } - public sealed partial class ExpressionScanner : ExpressionScanBase + public Statement GetStatement() { + return CurrentSemanticValue as Statement; } - public partial class ExpressionParser + private Statement MakeStatement(ExpressionNode name, ExpressionNode paramList, bool not) => new Statement { - public ExpressionParser(ExpressionScanner scnr) : base(scnr) - { - } + Name = (name as Literal).Lit, + Not = not, + Params = (paramList as StatementParamList).Params + }; - public Statement GetStatement() - { - return CurrentSemanticValue as Statement; - } + private Statement MakeStatement(ExpressionNode name, bool not) => new Statement + { + Name = (name as Literal).Lit, + Not = not + }; - private Statement MakeStatement(ExpressionNode name, ExpressionNode paramList, bool not) => new Statement - { - Name = (name as Literal).Lit, - Not = not, - Params = (paramList as StatementParamList).Params - }; + private StatementParamList MakeParamList() => new StatementParamList(); - private Statement MakeStatement(ExpressionNode name, bool not) => new Statement - { - Name = (name as Literal).Lit, - Not = not - }; - - private StatementParamList MakeParamList() => new StatementParamList(); + private StatementParamList MakeParamList(ExpressionNode param) + { + var list = new StatementParamList(); + list.Params.Add(param as RValue); + return list; + } - private StatementParamList MakeParamList(ExpressionNode param) - { - var list = new StatementParamList(); - list.Params.Add(param as RValue); - return list; - } + private StatementParamList MakeParamList(ExpressionNode list, ExpressionNode param) + { + var actionParamList = list as StatementParamList; + actionParamList.Params.Add(param as RValue); + return actionParamList; + } - private StatementParamList MakeParamList(ExpressionNode list, ExpressionNode param) - { - var actionParamList = list as StatementParamList; - actionParamList.Params.Add(param as RValue); - return actionParamList; - } + private LocalVar MakeLocalVar(ExpressionNode varName) => new LocalVar() + { + Name = (varName as Literal).Lit + }; - private LocalVar MakeLocalVar(ExpressionNode varName) => new LocalVar() - { - Name = (varName as Literal).Lit - }; + private LocalVar MakeLocalVar(ExpressionNode typeName, ExpressionNode varName) => new LocalVar() + { + Type = (typeName as Literal).Lit, + Name = (varName as Literal).Lit + }; - private LocalVar MakeLocalVar(ExpressionNode typeName, ExpressionNode varName) => new LocalVar() + private ConstantValue MakeTypedConstant(ExpressionNode typeName, ExpressionNode constant) + { + var c = constant as ConstantValue; + return new ConstantValue() { - Type = (typeName as Literal).Lit, - Name = (varName as Literal).Lit + TypeName = (typeName as Literal).Lit, + Type = c.Type, + StringValue = c.StringValue, + FloatValue = c.FloatValue, + IntegerValue = c.IntegerValue, }; + } - private ConstantValue MakeTypedConstant(ExpressionNode typeName, ExpressionNode constant) - { - var c = constant as ConstantValue; - return new ConstantValue() - { - TypeName = (typeName as Literal).Lit, - Type = c.Type, - StringValue = c.StringValue, - FloatValue = c.FloatValue, - IntegerValue = c.IntegerValue, - }; - } - - private ConstantValue MakeConstGuidString(ExpressionNode val) => new ConstantValue() - { - Type = IRConstantType.Name, - StringValue = (val as Literal).Lit - }; + private ConstantValue MakeConstGuidString(ExpressionNode val) => new ConstantValue() + { + Type = IRConstantType.Name, + StringValue = (val as Literal).Lit + }; - private ConstantValue MakeConstString(ExpressionNode val) => new ConstantValue() - { - Type = IRConstantType.String, - StringValue = (val as Literal).Lit - }; + private ConstantValue MakeConstString(ExpressionNode val) => new ConstantValue() + { + Type = IRConstantType.String, + StringValue = (val as Literal).Lit + }; - private ConstantValue MakeConstInteger(ExpressionNode val) => new ConstantValue() - { - Type = IRConstantType.Integer, - IntegerValue = Int64.Parse((val as Literal).Lit, ParserConstants.ParserCulture.NumberFormat) - }; + private ConstantValue MakeConstInteger(ExpressionNode val) => new ConstantValue() + { + Type = IRConstantType.Integer, + IntegerValue = Int64.Parse((val as Literal).Lit, ParserConstants.ParserCulture.NumberFormat) + }; - private ConstantValue MakeConstFloat(ExpressionNode val) => new ConstantValue() - { - Type = IRConstantType.Float, - FloatValue = Single.Parse((val as Literal).Lit, ParserConstants.ParserCulture.NumberFormat) - }; - } + private ConstantValue MakeConstFloat(ExpressionNode val) => new ConstantValue() + { + Type = IRConstantType.Float, + FloatValue = Single.Parse((val as Literal).Lit, ParserConstants.ParserCulture.NumberFormat) + }; } \ No newline at end of file diff --git a/DebuggerFrontend/Program.cs b/DebuggerFrontend/Program.cs index 2a540860..beddcb8c 100644 --- a/DebuggerFrontend/Program.cs +++ b/DebuggerFrontend/Program.cs @@ -6,29 +6,28 @@ using System.Text; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +class Program { - class Program + static void Main(string[] args) { - static void Main(string[] args) + var currentPath = AppDomain.CurrentDomain.BaseDirectory; + var logFile = new FileStream(Path.Join(currentPath, "DAP.log"), FileMode.Create); + var dap = new DAPStream(); + dap.EnableLogging(logFile); + var dapHandler = new DAPMessageHandler(dap); + dapHandler.EnableLogging(logFile); + try { - var currentPath = AppDomain.CurrentDomain.BaseDirectory; - var logFile = new FileStream(Path.Join(currentPath, "DAP.log"), FileMode.Create); - var dap = new DAPStream(); - dap.EnableLogging(logFile); - var dapHandler = new DAPMessageHandler(dap); - dapHandler.EnableLogging(logFile); - try - { - dap.RunLoop(); - } - catch (Exception e) + dap.RunLoop(); + } + catch (Exception e) + { + using (var writer = new StreamWriter(logFile, Encoding.UTF8, 0x1000, true)) { - using (var writer = new StreamWriter(logFile, Encoding.UTF8, 0x1000, true)) - { - writer.Write(e.ToString()); - Console.WriteLine(e.ToString()); - } + writer.Write(e.ToString()); + Console.WriteLine(e.ToString()); } } } diff --git a/DebuggerFrontend/StackTracePrinter.cs b/DebuggerFrontend/StackTracePrinter.cs index 103500cc..4a61076c 100644 --- a/DebuggerFrontend/StackTracePrinter.cs +++ b/DebuggerFrontend/StackTracePrinter.cs @@ -7,291 +7,290 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class DebugVariable { - public class DebugVariable - { - public String Name; - public String Type; - public String Value; - public MsgTypedValue TypedValue; - } + public String Name; + public String Type; + public String Value; + public MsgTypedValue TypedValue; +} + +public class CoalescedFrame +{ + public String Name; + public String File; + public int Line; + public MsgFrame Frame; + // List of named variables available in this frame (if any) + public List Variables; + // Arguments that the PROC/QRY was called with + // If the frame is not a call, this will be null. + public MsgTuple CallArguments; + // Rule that this frame belongs to. + // We use this info to restrict the scope of a frame (to source lines) in VS + public RuleDebugInfo Rule; +} - public class CoalescedFrame +public class StackTracePrinter +{ + private StoryDebugInfo DebugInfo; + private ValueFormatter Formatter; + public bool MergeFrames = true; + // Mod/project UUID we'll send to the debugger instead of the packaged path + public string ModUuid; + + public StackTracePrinter(StoryDebugInfo debugInfo, ValueFormatter formatter) { - public String Name; - public String File; - public int Line; - public MsgFrame Frame; - // List of named variables available in this frame (if any) - public List Variables; - // Arguments that the PROC/QRY was called with - // If the frame is not a call, this will be null. - public MsgTuple CallArguments; - // Rule that this frame belongs to. - // We use this info to restrict the scope of a frame (to source lines) in VS - public RuleDebugInfo Rule; + DebugInfo = debugInfo; + Formatter = formatter; } - public class StackTracePrinter + private List TupleToVariables(MsgFrame frame) { - private StoryDebugInfo DebugInfo; - private ValueFormatter Formatter; - public bool MergeFrames = true; - // Mod/project UUID we'll send to the debugger instead of the packaged path - public string ModUuid; + var variables = new List(); + NodeDebugInfo node = null; + RuleDebugInfo rule = null; + if (frame.NodeId != 0) + { + node = DebugInfo.Nodes[frame.NodeId]; + if (node.RuleId != 0) + { + rule = DebugInfo.Rules[node.RuleId]; + } + } - public StackTracePrinter(StoryDebugInfo debugInfo, ValueFormatter formatter) + for (var i = 0; i < frame.Tuple.Column.Count; i++) { - DebugInfo = debugInfo; - Formatter = formatter; + var value = frame.Tuple.Column[i]; + var variable = new DebugVariable + { + Name = Formatter.TupleVariableIndexToName(rule, node, i), + // TODO type name! + Type = value.TypeId.ToString(), + Value = Formatter.ValueToString(value), + TypedValue = value + }; + + variables.Add(variable); } - private List TupleToVariables(MsgFrame frame) + return variables; + } + + private CoalescedFrame MsgFrameToLocal(MsgFrame frame) + { + var outFrame = new CoalescedFrame(); + outFrame.Name = Formatter.GetFrameDebugName(frame); + + if (frame.Type == MsgFrame.Types.FrameType.GoalInitAction + || frame.Type == MsgFrame.Types.FrameType.GoalExitAction) { - var variables = new List(); - NodeDebugInfo node = null; - RuleDebugInfo rule = null; - if (frame.NodeId != 0) + var goal = DebugInfo.Goals[frame.GoalId]; + outFrame.File = goal.Path; + if (frame.Type == MsgFrame.Types.FrameType.GoalInitAction) { - node = DebugInfo.Nodes[frame.NodeId]; - if (node.RuleId != 0) - { - rule = DebugInfo.Rules[node.RuleId]; - } + outFrame.Line = (int)goal.InitActions[(int)frame.ActionIndex].Line; } - - for (var i = 0; i < frame.Tuple.Column.Count; i++) + else { - var value = frame.Tuple.Column[i]; - var variable = new DebugVariable - { - Name = Formatter.TupleVariableIndexToName(rule, node, i), - // TODO type name! - Type = value.TypeId.ToString(), - Value = Formatter.ValueToString(value), - TypedValue = value - }; - - variables.Add(variable); + outFrame.Line = (int)goal.ExitActions[(int)frame.ActionIndex].Line; } - - return variables; } - - private CoalescedFrame MsgFrameToLocal(MsgFrame frame) + else if (frame.NodeId != 0) { - var outFrame = new CoalescedFrame(); - outFrame.Name = Formatter.GetFrameDebugName(frame); - - if (frame.Type == MsgFrame.Types.FrameType.GoalInitAction - || frame.Type == MsgFrame.Types.FrameType.GoalExitAction) + var node = DebugInfo.Nodes[frame.NodeId]; + if (node.RuleId != 0) { - var goal = DebugInfo.Goals[frame.GoalId]; + var rule = DebugInfo.Rules[node.RuleId]; + var goal = DebugInfo.Goals[rule.GoalId]; outFrame.File = goal.Path; - if (frame.Type == MsgFrame.Types.FrameType.GoalInitAction) + + if (frame.Type == MsgFrame.Types.FrameType.Pushdown + && node.Type == Node.Type.Rule) { - outFrame.Line = (int)goal.InitActions[(int)frame.ActionIndex].Line; + outFrame.Line = (int)rule.ActionsStartLine; } - else + else if (frame.Type == MsgFrame.Types.FrameType.RuleAction) { - outFrame.Line = (int)goal.ExitActions[(int)frame.ActionIndex].Line; + outFrame.Line = (int)rule.Actions[(int)frame.ActionIndex].Line; } - } - else if (frame.NodeId != 0) - { - var node = DebugInfo.Nodes[frame.NodeId]; - if (node.RuleId != 0) + else { - var rule = DebugInfo.Rules[node.RuleId]; - var goal = DebugInfo.Goals[rule.GoalId]; - outFrame.File = goal.Path; - - if (frame.Type == MsgFrame.Types.FrameType.Pushdown - && node.Type == Node.Type.Rule) - { - outFrame.Line = (int)rule.ActionsStartLine; - } - else if (frame.Type == MsgFrame.Types.FrameType.RuleAction) - { - outFrame.Line = (int)rule.Actions[(int)frame.ActionIndex].Line; - } - else - { - outFrame.Line = node.Line; - } + outFrame.Line = node.Line; } } + } - outFrame.Variables = TupleToVariables(frame); - outFrame.Frame = frame; + outFrame.Variables = TupleToVariables(frame); + outFrame.Frame = frame; - if (outFrame.File != null - && ModUuid != null) + if (outFrame.File != null + && ModUuid != null) + { + var modRe = new Regex(".*\\.pak:/Mods/.*/Story/RawFiles/Goals/(.*)\\.txt"); + var match = modRe.Match(outFrame.File); + if (match.Success) { - var modRe = new Regex(".*\\.pak:/Mods/.*/Story/RawFiles/Goals/(.*)\\.txt"); - var match = modRe.Match(outFrame.File); - if (match.Success) - { - outFrame.File = "divinity:/" + ModUuid + "/" + match.Groups[1].Value + ".divgoal"; - } + outFrame.File = "divinity:/" + ModUuid + "/" + match.Groups[1].Value + ".divgoal"; } - - return outFrame; } - /// - /// Maps node calls to ranges. Each range represents one output frame in the final call stack. - /// - private List> DetermineFrameRanges(List frames) - { - var ranges = new List>(); + return outFrame; + } + + /// + /// Maps node calls to ranges. Each range represents one output frame in the final call stack. + /// + private List> DetermineFrameRanges(List frames) + { + var ranges = new List>(); - List currentFrames = new List(); - foreach (var frame in frames) + List currentFrames = new List(); + foreach (var frame in frames) + { + if (frame.Frame.Type == MsgFrame.Types.FrameType.GoalInitAction + || frame.Frame.Type == MsgFrame.Types.FrameType.GoalExitAction) { - if (frame.Frame.Type == MsgFrame.Types.FrameType.GoalInitAction - || frame.Frame.Type == MsgFrame.Types.FrameType.GoalExitAction) + // Goal INIT/EXIT frames don't have parent frames, so we'll add them as separate frames + if (currentFrames.Count > 0) { - // Goal INIT/EXIT frames don't have parent frames, so we'll add them as separate frames - if (currentFrames.Count > 0) - { - ranges.Add(currentFrames); - currentFrames = new List(); - } - - currentFrames.Add(frame); ranges.Add(currentFrames); currentFrames = new List(); } - else - { - // Embedded PROC/QRY frames start with Insert/Delete frames - if (frame.Frame.Type == MsgFrame.Types.FrameType.Insert - || frame.Frame.Type == MsgFrame.Types.FrameType.Insert) - { - if (currentFrames.Count > 0) - { - ranges.Add(currentFrames); - currentFrames = new List(); - } - } - currentFrames.Add(frame); - - // Rule frames are terminated by RuleAction (THEN part) frames - if (frame.Frame.Type == MsgFrame.Types.FrameType.RuleAction) + currentFrames.Add(frame); + ranges.Add(currentFrames); + currentFrames = new List(); + } + else + { + // Embedded PROC/QRY frames start with Insert/Delete frames + if (frame.Frame.Type == MsgFrame.Types.FrameType.Insert + || frame.Frame.Type == MsgFrame.Types.FrameType.Insert) + { + if (currentFrames.Count > 0) { ranges.Add(currentFrames); currentFrames = new List(); } } - } + currentFrames.Add(frame); - if (currentFrames.Count > 0) - { - ranges.Add(currentFrames); + // Rule frames are terminated by RuleAction (THEN part) frames + if (frame.Frame.Type == MsgFrame.Types.FrameType.RuleAction) + { + ranges.Add(currentFrames); + currentFrames = new List(); + } } - - return ranges; } - /// - /// Merges a node call range into an output stack frame. - /// - private CoalescedFrame MergeFrame(List range) - { - var frame = new CoalescedFrame(); - frame.Frame = range[0].Frame; - foreach (var node in range) - { - // Use last available location/variable data in the range - if (node.Line != 0) - { - frame.File = node.File; - frame.Line = node.Line; - } + if (currentFrames.Count > 0) + { + ranges.Add(currentFrames); + } - if (frame.Rule == null && node.Frame.NodeId != 0) - { - var storyNode = DebugInfo.Nodes[node.Frame.NodeId]; - if (storyNode.RuleId != 0) - { - var rule = DebugInfo.Rules[storyNode.RuleId]; - frame.Rule = rule; - } - } + return ranges; + } - if (node.Frame.Type == MsgFrame.Types.FrameType.Pushdown - || node.Frame.Type == MsgFrame.Types.FrameType.Insert - || node.Frame.Type == MsgFrame.Types.FrameType.Delete) - { - // Rule variable info is only propagated through Pushdown nodes. - // All other nodes either have no variable info at all, or contain - // local tuples used for DB insert/delete/query. + /// + /// Merges a node call range into an output stack frame. + /// + private CoalescedFrame MergeFrame(List range) + { + var frame = new CoalescedFrame(); + frame.Frame = range[0].Frame; - // We'll keep the variables from Insert/Delete nodes if there are - // no better rule candidates, as they show the PROC/DB input tuple. - frame.Variables = node.Variables; - } + foreach (var node in range) + { + // Use last available location/variable data in the range + if (node.Line != 0) + { + frame.File = node.File; + frame.Line = node.Line; + } - if (node.Frame.Type == MsgFrame.Types.FrameType.Insert - || node.Frame.Type == MsgFrame.Types.FrameType.Delete) + if (frame.Rule == null && node.Frame.NodeId != 0) + { + var storyNode = DebugInfo.Nodes[node.Frame.NodeId]; + if (storyNode.RuleId != 0) { - // We'll keep the initial argument list that was passed to the PROC/QRY/DB - // (from the initial Insert/Delete frame) to display in the call frame name - frame.CallArguments = node.Frame.Tuple; + var rule = DebugInfo.Rules[storyNode.RuleId]; + frame.Rule = rule; } } - if (frame.Variables == null) + if (node.Frame.Type == MsgFrame.Types.FrameType.Pushdown + || node.Frame.Type == MsgFrame.Types.FrameType.Insert + || node.Frame.Type == MsgFrame.Types.FrameType.Delete) { - frame.Variables = new List(); - } + // Rule variable info is only propagated through Pushdown nodes. + // All other nodes either have no variable info at all, or contain + // local tuples used for DB insert/delete/query. - frame.Name = Formatter.GetFrameName(frame.Frame, frame.CallArguments); + // We'll keep the variables from Insert/Delete nodes if there are + // no better rule candidates, as they show the PROC/DB input tuple. + frame.Variables = node.Variables; + } - // Special indicator for backward propagation of database inserts/deletes - if (range.Count >= 2 - && (range[0].Frame.Type == MsgFrame.Types.FrameType.Insert - || range[0].Frame.Type == MsgFrame.Types.FrameType.Delete) - && (range[1].Frame.Type == MsgFrame.Types.FrameType.Pushdown - || range[1].Frame.Type == MsgFrame.Types.FrameType.PushdownDelete)) + if (node.Frame.Type == MsgFrame.Types.FrameType.Insert + || node.Frame.Type == MsgFrame.Types.FrameType.Delete) { - var pushdownNode = DebugInfo.Nodes[range[1].Frame.NodeId]; - if (range[0].Frame.NodeId != pushdownNode.ParentNodeId) - { - frame.Name = "(Database Propagation) " + frame.Name; - } + // We'll keep the initial argument list that was passed to the PROC/QRY/DB + // (from the initial Insert/Delete frame) to display in the call frame name + frame.CallArguments = node.Frame.Tuple; } - - return frame; } - private List MergeCallStack(List nodes) + if (frame.Variables == null) { - var frameRanges = DetermineFrameRanges(nodes); - var frames = frameRanges.Select(range => MergeFrame(range)).ToList(); - return frames; + frame.Variables = new List(); } - public List BreakpointToStack(BkBreakpointTriggered message) + frame.Name = Formatter.GetFrameName(frame.Frame, frame.CallArguments); + + // Special indicator for backward propagation of database inserts/deletes + if (range.Count >= 2 + && (range[0].Frame.Type == MsgFrame.Types.FrameType.Insert + || range[0].Frame.Type == MsgFrame.Types.FrameType.Delete) + && (range[1].Frame.Type == MsgFrame.Types.FrameType.Pushdown + || range[1].Frame.Type == MsgFrame.Types.FrameType.PushdownDelete)) { - var rawFrames = message.CallStack.Select(frame => MsgFrameToLocal(frame)).ToList(); - List mergedFrames; - if (MergeFrames) + var pushdownNode = DebugInfo.Nodes[range[1].Frame.NodeId]; + if (range[0].Frame.NodeId != pushdownNode.ParentNodeId) { - mergedFrames = MergeCallStack(rawFrames); - } - else - { - mergedFrames = rawFrames; + frame.Name = "(Database Propagation) " + frame.Name; } + } + + return frame; + } - mergedFrames.Reverse(); - return mergedFrames; + private List MergeCallStack(List nodes) + { + var frameRanges = DetermineFrameRanges(nodes); + var frames = frameRanges.Select(range => MergeFrame(range)).ToList(); + return frames; + } + + public List BreakpointToStack(BkBreakpointTriggered message) + { + var rawFrames = message.CallStack.Select(frame => MsgFrameToLocal(frame)).ToList(); + List mergedFrames; + if (MergeFrames) + { + mergedFrames = MergeCallStack(rawFrames); } + else + { + mergedFrames = rawFrames; + } + + mergedFrames.Reverse(); + return mergedFrames; } } diff --git a/DebuggerFrontend/ValueFormatter.cs b/DebuggerFrontend/ValueFormatter.cs index 365724df..09b924e7 100644 --- a/DebuggerFrontend/ValueFormatter.cs +++ b/DebuggerFrontend/ValueFormatter.cs @@ -4,75 +4,32 @@ using System.Collections.Generic; using System.Linq; -namespace LSTools.DebuggerFrontend +namespace LSTools.DebuggerFrontend; + +public class ValueFormatter { - public class ValueFormatter + private StoryDebugInfo DebugInfo; + + public ValueFormatter(StoryDebugInfo debugInfo) { - private StoryDebugInfo DebugInfo; + DebugInfo = debugInfo; + } - public ValueFormatter(StoryDebugInfo debugInfo) + public string TupleToString(MsgFrame frame) + { + string tuple = ""; + var node = DebugInfo.Nodes[frame.NodeId]; + RuleDebugInfo rule = null; + if (node.RuleId != 0) { - DebugInfo = debugInfo; + rule = DebugInfo.Rules[node.RuleId]; } - public string TupleToString(MsgFrame frame) + for (var i = 0; i < frame.Tuple.Column.Count; i++) { - string tuple = ""; - var node = DebugInfo.Nodes[frame.NodeId]; - RuleDebugInfo rule = null; - if (node.RuleId != 0) - { - rule = DebugInfo.Rules[node.RuleId]; - } + var value = frame.Tuple.Column[i]; + string columnName = TupleVariableIndexToName(rule, node, i); - for (var i = 0; i < frame.Tuple.Column.Count; i++) - { - var value = frame.Tuple.Column[i]; - string columnName = TupleVariableIndexToName(rule, node, i); - - string valueStr; - switch ((Value.Type)value.TypeId) - { - case Value.Type.None: - valueStr = "(None)"; - break; - - case Value.Type.Integer: - case Value.Type.Integer64: - valueStr = value.Intval.ToString(); - break; - - case Value.Type.Float: - valueStr = value.Floatval.ToString(); - break; - - case Value.Type.String: - case Value.Type.GuidString: - default: - valueStr = value.Stringval; - break; - } - - if (columnName.Length > 0) - { - tuple += String.Format("{0}={1}, ", columnName, valueStr); - } - else - { - tuple += String.Format("{0}, ", valueStr); - } - } - - return tuple; - } - - public string TupleToString(MsgTuple tuple) - { - return String.Join(", ", tuple.Column.Select(val => ValueToString(val))); - } - - public string ValueToString(MsgTypedValue value) - { string valueStr; switch ((Value.Type)value.TypeId) { @@ -96,142 +53,184 @@ public string ValueToString(MsgTypedValue value) break; } - return valueStr; + if (columnName.Length > 0) + { + tuple += String.Format("{0}={1}, ", columnName, valueStr); + } + else + { + tuple += String.Format("{0}, ", valueStr); + } + } + + return tuple; + } + + public string TupleToString(MsgTuple tuple) + { + return String.Join(", ", tuple.Column.Select(val => ValueToString(val))); + } + + public string ValueToString(MsgTypedValue value) + { + string valueStr; + switch ((Value.Type)value.TypeId) + { + case Value.Type.None: + valueStr = "(None)"; + break; + + case Value.Type.Integer: + case Value.Type.Integer64: + valueStr = value.Intval.ToString(); + break; + + case Value.Type.Float: + valueStr = value.Floatval.ToString(); + break; + + case Value.Type.String: + case Value.Type.GuidString: + default: + valueStr = value.Stringval; + break; } - public String TupleVariableIndexToName(RuleDebugInfo rule, NodeDebugInfo node, int index) + return valueStr; + } + + public String TupleVariableIndexToName(RuleDebugInfo rule, NodeDebugInfo node, int index) + { + if (rule == null) { - if (rule == null) - { - return "#" + index.ToString(); - } - else if (node != null) + return "#" + index.ToString(); + } + else if (node != null) + { + if (index < node.ColumnToVariableMaps.Count) { - if (index < node.ColumnToVariableMaps.Count) + var mappedColumnIdx = node.ColumnToVariableMaps[index]; + if (mappedColumnIdx < rule.Variables.Count) { - var mappedColumnIdx = node.ColumnToVariableMaps[index]; - if (mappedColumnIdx < rule.Variables.Count) - { - return rule.Variables[mappedColumnIdx].Name; - } - else - { - return String.Format("(Bad Variable Idx #{0})", index); - } + return rule.Variables[mappedColumnIdx].Name; } else { - return String.Format("(Unknown #{0})", index); + return String.Format("(Bad Variable Idx #{0})", index); } } else { - if (index < rule.Variables.Count) - { - return rule.Variables[index].Name; - } - else - { - return String.Format("(Bad Variable Idx #{0})", index); - } + return String.Format("(Unknown #{0})", index); } } - - public string GetFrameDebugName(MsgFrame frame) + else { - string frameType; - switch (frame.Type) + if (index < rule.Variables.Count) { - case MsgFrame.Types.FrameType.IsValid: frameType = "IsValid"; break; - case MsgFrame.Types.FrameType.Pushdown: frameType = "Pushdown"; break; - case MsgFrame.Types.FrameType.PushdownDelete: frameType = "PushdownDelete"; break; - case MsgFrame.Types.FrameType.Insert: frameType = "Insert"; break; - case MsgFrame.Types.FrameType.Delete: frameType = "Delete"; break; - case MsgFrame.Types.FrameType.RuleAction: frameType = "RuleAction"; break; - case MsgFrame.Types.FrameType.GoalInitAction: frameType = "GoalInitAction"; break; - case MsgFrame.Types.FrameType.GoalExitAction: frameType = "GoalExitAction"; break; - - default: - throw new InvalidOperationException($"Unsupported frame type: {frame.Type}"); + return rule.Variables[index].Name; } + else + { + return String.Format("(Bad Variable Idx #{0})", index); + } + } + } - if (frame.NodeId != 0) + public string GetFrameDebugName(MsgFrame frame) + { + string frameType; + switch (frame.Type) + { + case MsgFrame.Types.FrameType.IsValid: frameType = "IsValid"; break; + case MsgFrame.Types.FrameType.Pushdown: frameType = "Pushdown"; break; + case MsgFrame.Types.FrameType.PushdownDelete: frameType = "PushdownDelete"; break; + case MsgFrame.Types.FrameType.Insert: frameType = "Insert"; break; + case MsgFrame.Types.FrameType.Delete: frameType = "Delete"; break; + case MsgFrame.Types.FrameType.RuleAction: frameType = "RuleAction"; break; + case MsgFrame.Types.FrameType.GoalInitAction: frameType = "GoalInitAction"; break; + case MsgFrame.Types.FrameType.GoalExitAction: frameType = "GoalExitAction"; break; + + default: + throw new InvalidOperationException($"Unsupported frame type: {frame.Type}"); + } + + if (frame.NodeId != 0) + { + string dbName = ""; + var node = DebugInfo.Nodes[frame.NodeId]; + if (node.DatabaseId != 0) { - string dbName = ""; - var node = DebugInfo.Nodes[frame.NodeId]; - if (node.DatabaseId != 0) - { - var db = DebugInfo.Databases[node.DatabaseId]; - dbName = db.Name; - } - else if (node.Name != null && node.Name.Length > 0) - { - dbName = node.Name; - } + var db = DebugInfo.Databases[node.DatabaseId]; + dbName = db.Name; + } + else if (node.Name != null && node.Name.Length > 0) + { + dbName = node.Name; + } - if (dbName != "") - { - return $"{frameType} @ {node.Type} (DB {dbName})"; - } - else - { - return $"{frameType} @ {node.Type}"; - } + if (dbName != "") + { + return $"{frameType} @ {node.Type} (DB {dbName})"; } else { - var goal = DebugInfo.Goals[frame.GoalId]; - return $"{frameType} @ {goal.Name}"; + return $"{frameType} @ {node.Type}"; } } + else + { + var goal = DebugInfo.Goals[frame.GoalId]; + return $"{frameType} @ {goal.Name}"; + } + } - public string GetFrameName(MsgFrame frame, MsgTuple arguments) + public string GetFrameName(MsgFrame frame, MsgTuple arguments) + { + switch (frame.Type) { - switch (frame.Type) - { - case MsgFrame.Types.FrameType.GoalInitAction: - { - var goal = DebugInfo.Goals[frame.GoalId].Name; - return goal + " (INIT)"; - } + case MsgFrame.Types.FrameType.GoalInitAction: + { + var goal = DebugInfo.Goals[frame.GoalId].Name; + return goal + " (INIT)"; + } - case MsgFrame.Types.FrameType.GoalExitAction: + case MsgFrame.Types.FrameType.GoalExitAction: + { + var goal = DebugInfo.Goals[frame.GoalId].Name; + return goal + " (EXIT)"; + } + + case MsgFrame.Types.FrameType.Insert: + case MsgFrame.Types.FrameType.Delete: + { + string argumentsFmt = ""; + if (arguments != null) { - var goal = DebugInfo.Goals[frame.GoalId].Name; - return goal + " (EXIT)"; + argumentsFmt = "(" + TupleToString(arguments) + ")"; } - case MsgFrame.Types.FrameType.Insert: - case MsgFrame.Types.FrameType.Delete: + var node = DebugInfo.Nodes[frame.NodeId]; + if (node.Type == Node.Type.Database) { - string argumentsFmt = ""; - if (arguments != null) - { - argumentsFmt = "(" + TupleToString(arguments) + ")"; - } - - var node = DebugInfo.Nodes[frame.NodeId]; - if (node.Type == Node.Type.Database) + var db = DebugInfo.Databases[node.DatabaseId]; + if (frame.Type == MsgFrame.Types.FrameType.Insert) { - var db = DebugInfo.Databases[node.DatabaseId]; - if (frame.Type == MsgFrame.Types.FrameType.Insert) - { - return db.Name + argumentsFmt + " (INSERT)"; - } - else - { - return db.Name + argumentsFmt + " (DELETE)"; - } + return db.Name + argumentsFmt + " (INSERT)"; } else { - return node.Name + argumentsFmt; + return db.Name + argumentsFmt + " (DELETE)"; } } + else + { + return node.Name + argumentsFmt; + } + } - default: - throw new InvalidOperationException($"Unsupported root frame type: {frame.Type}"); - } + default: + throw new InvalidOperationException($"Unsupported root frame type: {frame.Type}"); } } } diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index a2874c10..c68f12b8 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -6,251 +6,250 @@ using LSLib.LS; using LSLib.LS.Enums; -namespace Divine.CLI +namespace Divine.CLI; + +internal class CommandLineActions { - internal class CommandLineActions + public static string SourcePath; + public static string DestinationPath; + public static string PackagedFilePath; + public static string ConformPath; + + public static Game Game; + public static LogLevel LogLevel; + public static ResourceFormat InputFormat; + public static ResourceFormat OutputFormat; + public static PackageVersion PackageVersion; + public static int PackagePriority; + public static bool LegacyGuids; + public static Dictionary GR2Options; + + // TODO: OSI support + + public static void Run(CommandLineArguments args) { - public static string SourcePath; - public static string DestinationPath; - public static string PackagedFilePath; - public static string ConformPath; - - public static Game Game; - public static LogLevel LogLevel; - public static ResourceFormat InputFormat; - public static ResourceFormat OutputFormat; - public static PackageVersion PackageVersion; - public static int PackagePriority; - public static bool LegacyGuids; - public static Dictionary GR2Options; - - // TODO: OSI support - - public static void Run(CommandLineArguments args) - { - SetUpAndValidate(args); - Process(args); - } + SetUpAndValidate(args); + Process(args); + } - private static void SetUpAndValidate(CommandLineArguments args) + private static void SetUpAndValidate(CommandLineArguments args) + { + string[] batchActions = { - string[] batchActions = - { - "extract-packages", - "convert-models", - "convert-resources" - }; + "extract-packages", + "convert-models", + "convert-resources" + }; - string[] packageActions = - { - "create-package", - "list-package", - "extract-single-file", - "extract-package", - "extract-packages" - }; - - string[] graphicsActions = - { - "convert-model", - "convert-models" - }; + string[] packageActions = + { + "create-package", + "list-package", + "extract-single-file", + "extract-package", + "extract-packages" + }; + + string[] graphicsActions = + { + "convert-model", + "convert-models" + }; - LogLevel = CommandLineArguments.GetLogLevelByString(args.LogLevel); - CommandLineLogger.LogDebug($"Using log level: {LogLevel}"); + LogLevel = CommandLineArguments.GetLogLevelByString(args.LogLevel); + CommandLineLogger.LogDebug($"Using log level: {LogLevel}"); - Game = CommandLineArguments.GetGameByString(args.Game); - CommandLineLogger.LogDebug($"Using game: {Game}"); + Game = CommandLineArguments.GetGameByString(args.Game); + CommandLineLogger.LogDebug($"Using game: {Game}"); - LegacyGuids = args.LegacyGuids; + LegacyGuids = args.LegacyGuids; - if (batchActions.Any(args.Action.Contains)) + if (batchActions.Any(args.Action.Contains)) + { + if (args.InputFormat == null || args.OutputFormat == null) { - if (args.InputFormat == null || args.OutputFormat == null) - { - if (args.InputFormat == null && args.Action != "extract-packages") - { - CommandLineLogger.LogFatal("Cannot perform batch action without --input-format and --output-format arguments", 1); - } - } - - InputFormat = CommandLineArguments.GetResourceFormatByString(args.InputFormat); - CommandLineLogger.LogDebug($"Using input format: {InputFormat}"); - - if (args.Action != "extract-packages") + if (args.InputFormat == null && args.Action != "extract-packages") { - OutputFormat = CommandLineArguments.GetResourceFormatByString(args.OutputFormat); - CommandLineLogger.LogDebug($"Using output format: {OutputFormat}"); + CommandLineLogger.LogFatal("Cannot perform batch action without --input-format and --output-format arguments", 1); } } - if (args.Action == "create-package") - { - PackagePriority = args.PackagePriority; - PackageVersion = Game.PAKVersion(); - CommandLineLogger.LogDebug($"Using package version: {PackageVersion}"); - } + InputFormat = CommandLineArguments.GetResourceFormatByString(args.InputFormat); + CommandLineLogger.LogDebug($"Using input format: {InputFormat}"); - if (graphicsActions.Any(args.Action.Contains)) + if (args.Action != "extract-packages") { - GR2Options = CommandLineArguments.GetGR2Options(args.Options); + OutputFormat = CommandLineArguments.GetResourceFormatByString(args.OutputFormat); + CommandLineLogger.LogDebug($"Using output format: {OutputFormat}"); + } + } - if(LogLevel == LogLevel.DEBUG || LogLevel == LogLevel.ALL) - { - CommandLineLogger.LogDebug("Using graphics options:"); + if (args.Action == "create-package") + { + PackagePriority = args.PackagePriority; + PackageVersion = Game.PAKVersion(); + CommandLineLogger.LogDebug($"Using package version: {PackageVersion}"); + } - foreach (KeyValuePair x in GR2Options) - { - CommandLineLogger.LogDebug($" {x.Key} = {x.Value}"); - } + if (graphicsActions.Any(args.Action.Contains)) + { + GR2Options = CommandLineArguments.GetGR2Options(args.Options); - } + if(LogLevel == LogLevel.DEBUG || LogLevel == LogLevel.ALL) + { + CommandLineLogger.LogDebug("Using graphics options:"); - if (GR2Options["conform"]) + foreach (KeyValuePair x in GR2Options) { - ConformPath = TryToValidatePath(args.ConformPath); + CommandLineLogger.LogDebug($" {x.Key} = {x.Value}"); } - } - SourcePath = TryToValidatePath(args.Source); - if (args.Action != "list-package") - { - DestinationPath = TryToValidatePath(args.Destination); } - if (args.Action == "extract-single-file") + + if (GR2Options["conform"]) { - PackagedFilePath = args.PackagedPath; + ConformPath = TryToValidatePath(args.ConformPath); } } - private static void Process(CommandLineArguments args) + SourcePath = TryToValidatePath(args.Source); + if (args.Action != "list-package") + { + DestinationPath = TryToValidatePath(args.Destination); + } + if (args.Action == "extract-single-file") { - Func filter; + PackagedFilePath = args.PackagedPath; + } + } + + private static void Process(CommandLineArguments args) + { + Func filter; - if (args.Expression != null) + if (args.Expression != null) + { + Regex expression = null; + if (args.UseRegex) { - Regex expression = null; - if (args.UseRegex) + try { - try - { - expression = new Regex(args.Expression, RegexOptions.Singleline | RegexOptions.Compiled); - } - catch (ArgumentException) - { - CommandLineLogger.LogFatal($"Cannot parse RegEx expression: {args.Expression}", -1); - } + expression = new Regex(args.Expression, RegexOptions.Singleline | RegexOptions.Compiled); } - else + catch (ArgumentException) { - expression = new Regex("^" + Regex.Escape(args.Expression).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline | RegexOptions.Compiled); + CommandLineLogger.LogFatal($"Cannot parse RegEx expression: {args.Expression}", -1); } - - filter = obj => obj.Name.Like(expression); } else { - filter = obj => true; + expression = new Regex("^" + Regex.Escape(args.Expression).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline | RegexOptions.Compiled); } - + + filter = obj => obj.Name.Like(expression); + } + else + { + filter = obj => true; + } + switch (args.Action) + { + case "create-package": { - case "create-package": - { - CommandLinePackageProcessor.Create(); - break; - } - - case "extract-package": - { - CommandLinePackageProcessor.Extract(filter); - break; - } - - case "extract-single-file": - { - CommandLinePackageProcessor.ExtractSingleFile(); - break; - } - - case "list-package": - { - CommandLinePackageProcessor.ListFiles(filter); - break; - } - - case "convert-model": - { - CommandLineGR2Processor.UpdateExporterSettings(); - CommandLineGR2Processor.Convert(); - break; - } + CommandLinePackageProcessor.Create(); + break; + } - case "convert-resource": - { - CommandLineDataProcessor.Convert(); - break; - } + case "extract-package": + { + CommandLinePackageProcessor.Extract(filter); + break; + } - case "convert-loca": - { - CommandLineDataProcessor.ConvertLoca(); - break; - } + case "extract-single-file": + { + CommandLinePackageProcessor.ExtractSingleFile(); + break; + } - case "extract-packages": - { - CommandLinePackageProcessor.BatchExtract(filter); - break; - } + case "list-package": + { + CommandLinePackageProcessor.ListFiles(filter); + break; + } - case "convert-models": - { - CommandLineGR2Processor.BatchConvert(); - break; - } + case "convert-model": + { + CommandLineGR2Processor.UpdateExporterSettings(); + CommandLineGR2Processor.Convert(); + break; + } - case "convert-resources": - { - CommandLineDataProcessor.BatchConvert(); - break; + case "convert-resource": + { + CommandLineDataProcessor.Convert(); + break; } - default: - { - throw new ArgumentException($"Unhandled action: {args.Action}"); - } + case "convert-loca": + { + CommandLineDataProcessor.ConvertLoca(); + break; } - } - public static string TryToValidatePath(string path) - { - CommandLineLogger.LogDebug($"Using path: {path}"); - - if (string.IsNullOrWhiteSpace(path)) + case "extract-packages": { - CommandLineLogger.LogFatal($"Cannot parse path from input: {path}", 1); + CommandLinePackageProcessor.BatchExtract(filter); + break; } - Uri uri = null; - try + case "convert-models": { - Uri.TryCreate(path, UriKind.RelativeOrAbsolute, out uri); + CommandLineGR2Processor.BatchConvert(); + break; } - catch (InvalidOperationException) + + case "convert-resources": { - CommandLineLogger.LogFatal($"Cannot proceed without absolute path [E1]: {path}", 1); + CommandLineDataProcessor.BatchConvert(); + break; } - if (uri != null && (!Path.IsPathRooted(path) || !uri.IsFile)) + default: { - CommandLineLogger.LogFatal($"Cannot proceed without absolute path [E2]: {path}", 1); + throw new ArgumentException($"Unhandled action: {args.Action}"); } + } + } - // ReSharper disable once AssignNullToNotNullAttribute - path = Path.GetFullPath(path); + public static string TryToValidatePath(string path) + { + CommandLineLogger.LogDebug($"Using path: {path}"); - return path; + if (string.IsNullOrWhiteSpace(path)) + { + CommandLineLogger.LogFatal($"Cannot parse path from input: {path}", 1); + } + + Uri uri = null; + try + { + Uri.TryCreate(path, UriKind.RelativeOrAbsolute, out uri); + } + catch (InvalidOperationException) + { + CommandLineLogger.LogFatal($"Cannot proceed without absolute path [E1]: {path}", 1); } + + if (uri != null && (!Path.IsPathRooted(path) || !uri.IsFile)) + { + CommandLineLogger.LogFatal($"Cannot proceed without absolute path [E2]: {path}", 1); + } + + // ReSharper disable once AssignNullToNotNullAttribute + path = Path.GetFullPath(path); + + return path; } } diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index dafdd6ad..dffdee3d 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -6,389 +6,388 @@ using LSLib.Granny.Model; using LSLib.LS.Enums; -namespace Divine.CLI +namespace Divine.CLI; + +public class CommandLineArguments { - public class CommandLineArguments - { - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'l', "loglevel", - Description = "Set verbosity level of log output", - DefaultValue = "info", - AllowedValues = "off;fatal;error;warn;info;debug;trace;all", - ValueOptional = false, - Optional = true - )] - public string LogLevel; - - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'g', "game", - Description = "Set target game when generating output", - DefaultValue = null, - AllowedValues = "dos;dosee;dos2;dos2de;bg3", - ValueOptional = false, - Optional = false - )] - public string Game; - - // @formatter:off - [ValueArgument(typeof(string), 's', "source", - Description = "Set source file path or directory", - DefaultValue = null, - ValueOptional = false, - Optional = false - )] - public string Source; - - // @formatter:off - [ValueArgument(typeof(string), 'd', "destination", - Description = "Set destination file path or directory", - DefaultValue = null, - ValueOptional = true, - Optional = true - )] - public string Destination; - - // @formatter:off - [ValueArgument(typeof(string), 'f', "packaged-path", - Description = "File to extract from package", - DefaultValue = null, - ValueOptional = true, - Optional = true - )] - public string PackagedPath; - - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'i', "input-format", - Description = "Set input format for batch operations", - DefaultValue = null, - AllowedValues = "dae;gr2;lsv;pak;lsj;lsx;lsb;lsf", - ValueOptional = false, - Optional = true - )] - public string InputFormat; - - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'o', "output-format", - Description = "Set output format for batch operations", - DefaultValue = null, - AllowedValues = "dae;gr2;lsv;pak;lsj;lsx;lsb;lsf", - ValueOptional = false, - Optional = true - )] - public string OutputFormat; - - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'a', "action", - Description = "Set action to execute", - DefaultValue = "extract-package", - AllowedValues = "create-package;list-package;extract-single-file;extract-package;extract-packages;convert-model;convert-models;convert-resource;convert-resources;convert-loca", - ValueOptional = false, - Optional = false - )] - public string Action; - - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'c', "compression-method", - Description = "Set compression method", - DefaultValue = "lz4hc", - AllowedValues = "zlib;zlibfast;lz4;lz4hc;none", - ValueOptional = false, - Optional = true - )] - public string CompressionMethod; - - // @formatter:off - [EnumeratedValueArgument(typeof(string), 'e', "gr2-options", - Description = "Set extra options for GR2/DAE conversion", - AllowMultiple = true, - AllowedValues = "export-normals;export-tangents;export-uvs;export-colors;deduplicate-vertices;deduplicate-uvs;recalculate-normals;recalculate-tangents;recalculate-iwt;flip-uvs;ignore-uv-nan;disable-qtangents;y-up-skeletons;force-legacy-version;compact-tris;build-dummy-skeleton;apply-basis-transforms;x-flip-skeletons;x-flip-meshes;conform;conform-copy", - ValueOptional = false, - Optional = true - )] - public string[] Options; + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'l', "loglevel", + Description = "Set verbosity level of log output", + DefaultValue = "info", + AllowedValues = "off;fatal;error;warn;info;debug;trace;all", + ValueOptional = false, + Optional = true + )] + public string LogLevel; + + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'g', "game", + Description = "Set target game when generating output", + DefaultValue = null, + AllowedValues = "dos;dosee;dos2;dos2de;bg3", + ValueOptional = false, + Optional = false + )] + public string Game; + + // @formatter:off + [ValueArgument(typeof(string), 's', "source", + Description = "Set source file path or directory", + DefaultValue = null, + ValueOptional = false, + Optional = false + )] + public string Source; + + // @formatter:off + [ValueArgument(typeof(string), 'd', "destination", + Description = "Set destination file path or directory", + DefaultValue = null, + ValueOptional = true, + Optional = true + )] + public string Destination; + + // @formatter:off + [ValueArgument(typeof(string), 'f', "packaged-path", + Description = "File to extract from package", + DefaultValue = null, + ValueOptional = true, + Optional = true + )] + public string PackagedPath; + + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'i', "input-format", + Description = "Set input format for batch operations", + DefaultValue = null, + AllowedValues = "dae;gr2;lsv;pak;lsj;lsx;lsb;lsf", + ValueOptional = false, + Optional = true + )] + public string InputFormat; + + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'o', "output-format", + Description = "Set output format for batch operations", + DefaultValue = null, + AllowedValues = "dae;gr2;lsv;pak;lsj;lsx;lsb;lsf", + ValueOptional = false, + Optional = true + )] + public string OutputFormat; + + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'a', "action", + Description = "Set action to execute", + DefaultValue = "extract-package", + AllowedValues = "create-package;list-package;extract-single-file;extract-package;extract-packages;convert-model;convert-models;convert-resource;convert-resources;convert-loca", + ValueOptional = false, + Optional = false + )] + public string Action; + + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'c', "compression-method", + Description = "Set compression method", + DefaultValue = "lz4hc", + AllowedValues = "zlib;zlibfast;lz4;lz4hc;none", + ValueOptional = false, + Optional = true + )] + public string CompressionMethod; + + // @formatter:off + [EnumeratedValueArgument(typeof(string), 'e', "gr2-options", + Description = "Set extra options for GR2/DAE conversion", + AllowMultiple = true, + AllowedValues = "export-normals;export-tangents;export-uvs;export-colors;deduplicate-vertices;deduplicate-uvs;recalculate-normals;recalculate-tangents;recalculate-iwt;flip-uvs;ignore-uv-nan;disable-qtangents;y-up-skeletons;force-legacy-version;compact-tris;build-dummy-skeleton;apply-basis-transforms;x-flip-skeletons;x-flip-meshes;conform;conform-copy", + ValueOptional = false, + Optional = true + )] + public string[] Options; // @formatter:off [ValueArgument(typeof(string), 'x', "expression", - Description = "Set glob expression for extract and list actions", - DefaultValue = "*", - ValueOptional = false, - Optional = true - )] - public string Expression; - - // @formatter:off - [ValueArgument(typeof(string), "conform-path", - Description = "Set conform to original path", - DefaultValue = null, - ValueOptional = false, - Optional = true - )] - public string ConformPath; - - // @formatter:off - [ValueArgument(typeof(int), "package-priority", - Description = "Set a custom package priority", - DefaultValue = 0, - ValueOptional = true, - Optional = true - )] - public int PackagePriority; - - // @formatter:off - [SwitchArgument("legacy-guids", false, - Description = "Use legacy GUID serialization format when serializing LSX/LSJ files", - Optional = true - )] - public bool LegacyGuids; - - // @formatter:off - [SwitchArgument("use-package-name", false, - Description = "Use package name for destination folder", - Optional = true - )] - public bool UsePackageName; + Description = "Set glob expression for extract and list actions", + DefaultValue = "*", + ValueOptional = false, + Optional = true + )] + public string Expression; + + // @formatter:off + [ValueArgument(typeof(string), "conform-path", + Description = "Set conform to original path", + DefaultValue = null, + ValueOptional = false, + Optional = true + )] + public string ConformPath; + + // @formatter:off + [ValueArgument(typeof(int), "package-priority", + Description = "Set a custom package priority", + DefaultValue = 0, + ValueOptional = true, + Optional = true + )] + public int PackagePriority; + + // @formatter:off + [SwitchArgument("legacy-guids", false, + Description = "Use legacy GUID serialization format when serializing LSX/LSJ files", + Optional = true + )] + public bool LegacyGuids; + + // @formatter:off + [SwitchArgument("use-package-name", false, + Description = "Use package name for destination folder", + Optional = true + )] + public bool UsePackageName; // @formatter:off - [SwitchArgument("use-regex", false, - Description = "Use Regular Expressions for expression type", - Optional = true - )] - public bool UseRegex; - - // @formatter:on - public static LogLevel GetLogLevelByString(string logLevel) + [SwitchArgument("use-regex", false, + Description = "Use Regular Expressions for expression type", + Optional = true + )] + public bool UseRegex; + + // @formatter:on + public static LogLevel GetLogLevelByString(string logLevel) + { + switch (logLevel) { - switch (logLevel) + case "off": { - case "off": - { - return LSLib.LS.Enums.LogLevel.OFF; - } - case "fatal": - { - return LSLib.LS.Enums.LogLevel.FATAL; - } - case "error": - { - return LSLib.LS.Enums.LogLevel.ERROR; - } - case "warn": - { - return LSLib.LS.Enums.LogLevel.WARN; - } - case "info": - { - return LSLib.LS.Enums.LogLevel.INFO; - } - case "debug": - { - return LSLib.LS.Enums.LogLevel.DEBUG; - } - case "trace": - { - return LSLib.LS.Enums.LogLevel.TRACE; - } - case "all": - { - return LSLib.LS.Enums.LogLevel.ALL; - } - default: - { - return LSLib.LS.Enums.LogLevel.INFO; - } + return LSLib.LS.Enums.LogLevel.OFF; } - } - - // ReSharper disable once RedundantCaseLabel - public static Game GetGameByString(string game) - { - switch (game) + case "fatal": + { + return LSLib.LS.Enums.LogLevel.FATAL; + } + case "error": + { + return LSLib.LS.Enums.LogLevel.ERROR; + } + case "warn": + { + return LSLib.LS.Enums.LogLevel.WARN; + } + case "info": + { + return LSLib.LS.Enums.LogLevel.INFO; + } + case "debug": + { + return LSLib.LS.Enums.LogLevel.DEBUG; + } + case "trace": + { + return LSLib.LS.Enums.LogLevel.TRACE; + } + case "all": + { + return LSLib.LS.Enums.LogLevel.ALL; + } + default: { - case "bg3": - { - return LSLib.LS.Enums.Game.BaldursGate3; - } - case "dos": - { - return LSLib.LS.Enums.Game.DivinityOriginalSin; - } - case "dosee": - { - return LSLib.LS.Enums.Game.DivinityOriginalSinEE; - } - case "dos2": - { - return LSLib.LS.Enums.Game.DivinityOriginalSin2; - } - case "dos2de": - { - return LSLib.LS.Enums.Game.DivinityOriginalSin2DE; - } - case "unset": - { - return LSLib.LS.Enums.Game.Unset; - } - default: - { - throw new ArgumentException($"Unknown game: \"{game}\""); - } + return LSLib.LS.Enums.LogLevel.INFO; } } + } - public static ExportFormat GetModelFormatByString(string format) + // ReSharper disable once RedundantCaseLabel + public static Game GetGameByString(string game) + { + switch (game) { - switch (format.ToLower()) + case "bg3": + { + return LSLib.LS.Enums.Game.BaldursGate3; + } + case "dos": + { + return LSLib.LS.Enums.Game.DivinityOriginalSin; + } + case "dosee": + { + return LSLib.LS.Enums.Game.DivinityOriginalSinEE; + } + case "dos2": + { + return LSLib.LS.Enums.Game.DivinityOriginalSin2; + } + case "dos2de": { - case "gr2": - { - return ExportFormat.GR2; - } - case "dae": - { - return ExportFormat.DAE; - } - default: - { - throw new ArgumentException($"Unknown model format: {format}"); - } + return LSLib.LS.Enums.Game.DivinityOriginalSin2DE; + } + case "unset": + { + return LSLib.LS.Enums.Game.Unset; + } + default: + { + throw new ArgumentException($"Unknown game: \"{game}\""); } } + } - public static ExportFormat GetModelFormatByPath(string path) + public static ExportFormat GetModelFormatByString(string format) + { + switch (format.ToLower()) { - string extension = Path.GetExtension(path); - if (extension != null) + case "gr2": { - return GetModelFormatByString(extension.Substring(1)); + return ExportFormat.GR2; } + case "dae": + { + return ExportFormat.DAE; + } + default: + { + throw new ArgumentException($"Unknown model format: {format}"); + } + } + } - throw new ArgumentException($"Could not determine model format from filename: {path}"); + public static ExportFormat GetModelFormatByPath(string path) + { + string extension = Path.GetExtension(path); + if (extension != null) + { + return GetModelFormatByString(extension.Substring(1)); } - // ReSharper disable once RedundantCaseLabel - public static ResourceFormat GetResourceFormatByString(string resourceFormat) + throw new ArgumentException($"Could not determine model format from filename: {path}"); + } + + // ReSharper disable once RedundantCaseLabel + public static ResourceFormat GetResourceFormatByString(string resourceFormat) + { + switch (resourceFormat) { - switch (resourceFormat) + case "lsb": + { + return ResourceFormat.LSB; + } + case "lsf": + { + return ResourceFormat.LSF; + } + case "lsj": { - case "lsb": - { - return ResourceFormat.LSB; - } - case "lsf": - { - return ResourceFormat.LSF; - } - case "lsj": - { - return ResourceFormat.LSJ; - } - case "lsx": - { - return ResourceFormat.LSX; - } - default: - { - throw new ArgumentException($"Unknown resource format: \"{resourceFormat}\""); - } + return ResourceFormat.LSJ; + } + case "lsx": + { + return ResourceFormat.LSX; + } + default: + { + throw new ArgumentException($"Unknown resource format: \"{resourceFormat}\""); } } + } - public static Dictionary GetCompressionOptions(string compressionOption, PackageVersion packageVersion) - { - CompressionMethod compression; - var fastCompression = true; + public static Dictionary GetCompressionOptions(string compressionOption, PackageVersion packageVersion) + { + CompressionMethod compression; + var fastCompression = true; - switch (compressionOption) + switch (compressionOption) + { + case "zlibfast": { - case "zlibfast": - { - compression = LSLib.LS.Enums.CompressionMethod.Zlib; - break; - } - - case "zlib": - { - compression = LSLib.LS.Enums.CompressionMethod.Zlib; - fastCompression = false; - break; - } - - case "lz4": - { - compression = LSLib.LS.Enums.CompressionMethod.LZ4; - break; - } - - case "lz4hc": - { - compression = LSLib.LS.Enums.CompressionMethod.LZ4; - fastCompression = false; - break; - } - - // ReSharper disable once RedundantCaseLabel - case "none": - default: - { - compression = LSLib.LS.Enums.CompressionMethod.None; - break; - } + compression = LSLib.LS.Enums.CompressionMethod.Zlib; + break; } - // fallback to zlib, if the package version doesn't support lz4 - if (compression == LSLib.LS.Enums.CompressionMethod.LZ4 && packageVersion <= LSLib.LS.Enums.PackageVersion.V9) + case "zlib": { compression = LSLib.LS.Enums.CompressionMethod.Zlib; fastCompression = false; + break; } - var compressionOptions = new Dictionary + case "lz4": { - { "Compression", compression }, - { "FastCompression", fastCompression } - }; - - return compressionOptions; - } + compression = LSLib.LS.Enums.CompressionMethod.LZ4; + break; + } - public static Dictionary GetGR2Options(string[] options) - { - var results = new Dictionary + case "lz4hc": { - { "export-normals", true }, - { "export-tangents", true }, - { "export-uvs", true }, - { "export-colors", true }, - { "deduplicate-vertices", true }, - { "deduplicate-uvs", true }, - { "recalculate-normals", false }, - { "recalculate-tangents", false }, - { "recalculate-iwt", false }, - { "flip-uvs", true }, - { "ignore-uv-nan", true }, - { "disable-qtangents", false }, - { "y-up-skeletons", true }, - { "force-legacy-version", false }, - { "compact-tris", true }, - { "build-dummy-skeleton", true }, - { "apply-basis-transforms", true }, - { "x-flip-skeletons", false }, - { "x-flip-meshes", false }, - { "conform", false }, - { "conform-copy", false } - }; - - if (options == null) - { - return results; + compression = LSLib.LS.Enums.CompressionMethod.LZ4; + fastCompression = false; + break; } - foreach (string option in options.Where(option => results.Keys.Contains(option))) + // ReSharper disable once RedundantCaseLabel + case "none": + default: { - results[option] = true; + compression = LSLib.LS.Enums.CompressionMethod.None; + break; } + } + + // fallback to zlib, if the package version doesn't support lz4 + if (compression == LSLib.LS.Enums.CompressionMethod.LZ4 && packageVersion <= LSLib.LS.Enums.PackageVersion.V9) + { + compression = LSLib.LS.Enums.CompressionMethod.Zlib; + fastCompression = false; + } + + var compressionOptions = new Dictionary + { + { "Compression", compression }, + { "FastCompression", fastCompression } + }; + + return compressionOptions; + } + public static Dictionary GetGR2Options(string[] options) + { + var results = new Dictionary + { + { "export-normals", true }, + { "export-tangents", true }, + { "export-uvs", true }, + { "export-colors", true }, + { "deduplicate-vertices", true }, + { "deduplicate-uvs", true }, + { "recalculate-normals", false }, + { "recalculate-tangents", false }, + { "recalculate-iwt", false }, + { "flip-uvs", true }, + { "ignore-uv-nan", true }, + { "disable-qtangents", false }, + { "y-up-skeletons", true }, + { "force-legacy-version", false }, + { "compact-tris", true }, + { "build-dummy-skeleton", true }, + { "apply-basis-transforms", true }, + { "x-flip-skeletons", false }, + { "x-flip-meshes", false }, + { "conform", false }, + { "conform-copy", false } + }; + + if (options == null) + { return results; } + + foreach (string option in options.Where(option => results.Keys.Contains(option))) + { + results[option] = true; + } + + return results; } } diff --git a/Divine/CLI/CommandLineDataProcessor.cs b/Divine/CLI/CommandLineDataProcessor.cs index 62960e65..acf6dd33 100644 --- a/Divine/CLI/CommandLineDataProcessor.cs +++ b/Divine/CLI/CommandLineDataProcessor.cs @@ -2,85 +2,84 @@ using LSLib.LS; using LSLib.LS.Enums; -namespace Divine.CLI +namespace Divine.CLI; + +internal class CommandLineDataProcessor { - internal class CommandLineDataProcessor + public static void Convert() { - public static void Convert() - { - var conversionParams = ResourceConversionParameters.FromGameVersion(CommandLineActions.Game); - var loadParams = ResourceLoadParameters.FromGameVersion(CommandLineActions.Game); - loadParams.ByteSwapGuids = !CommandLineActions.LegacyGuids; - ConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, loadParams, conversionParams); - } + var conversionParams = ResourceConversionParameters.FromGameVersion(CommandLineActions.Game); + var loadParams = ResourceLoadParameters.FromGameVersion(CommandLineActions.Game); + loadParams.ByteSwapGuids = !CommandLineActions.LegacyGuids; + ConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, loadParams, conversionParams); + } - public static void BatchConvert() - { - var conversionParams = ResourceConversionParameters.FromGameVersion(CommandLineActions.Game); - var loadParams = ResourceLoadParameters.FromGameVersion(CommandLineActions.Game); - loadParams.ByteSwapGuids = !CommandLineActions.LegacyGuids; - BatchConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, CommandLineActions.InputFormat, CommandLineActions.OutputFormat, loadParams, conversionParams); - } + public static void BatchConvert() + { + var conversionParams = ResourceConversionParameters.FromGameVersion(CommandLineActions.Game); + var loadParams = ResourceLoadParameters.FromGameVersion(CommandLineActions.Game); + loadParams.ByteSwapGuids = !CommandLineActions.LegacyGuids; + BatchConvertResource(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, CommandLineActions.InputFormat, CommandLineActions.OutputFormat, loadParams, conversionParams); + } - private static void ConvertResource(string sourcePath, string destinationPath, - ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) + private static void ConvertResource(string sourcePath, string destinationPath, + ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) + { + try { - try - { - ResourceFormat resourceFormat = ResourceUtils.ExtensionToResourceFormat(destinationPath); - CommandLineLogger.LogDebug($"Using destination extension: {resourceFormat}"); + ResourceFormat resourceFormat = ResourceUtils.ExtensionToResourceFormat(destinationPath); + CommandLineLogger.LogDebug($"Using destination extension: {resourceFormat}"); - Resource resource = ResourceUtils.LoadResource(sourcePath, loadParams); + Resource resource = ResourceUtils.LoadResource(sourcePath, loadParams); - ResourceUtils.SaveResource(resource, destinationPath, resourceFormat, conversionParams); + ResourceUtils.SaveResource(resource, destinationPath, resourceFormat, conversionParams); - CommandLineLogger.LogInfo($"Wrote resource to: {destinationPath}"); - } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Failed to convert resource: {e.Message}", 2); - CommandLineLogger.LogTrace($"{e.StackTrace}"); - } + CommandLineLogger.LogInfo($"Wrote resource to: {destinationPath}"); } - - public static void ConvertLoca() + catch (Exception e) { - ConvertLoca(CommandLineActions.SourcePath, CommandLineActions.DestinationPath); + CommandLineLogger.LogFatal($"Failed to convert resource: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); } + } - private static void ConvertLoca(string sourcePath, string destinationPath) + public static void ConvertLoca() + { + ConvertLoca(CommandLineActions.SourcePath, CommandLineActions.DestinationPath); + } + + private static void ConvertLoca(string sourcePath, string destinationPath) + { + try + { + var loca = LocaUtils.Load(sourcePath); + LocaUtils.Save(loca, destinationPath); + CommandLineLogger.LogInfo($"Wrote localization to: {destinationPath}"); + } + catch (Exception e) { - try - { - var loca = LocaUtils.Load(sourcePath); - LocaUtils.Save(loca, destinationPath); - CommandLineLogger.LogInfo($"Wrote localization to: {destinationPath}"); - } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Failed to convert localization file: {e.Message}", 2); - CommandLineLogger.LogTrace($"{e.StackTrace}"); - } + CommandLineLogger.LogFatal($"Failed to convert localization file: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); } + } - private static void BatchConvertResource(string sourcePath, string destinationPath, ResourceFormat inputFormat, ResourceFormat outputFormat, - ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) + private static void BatchConvertResource(string sourcePath, string destinationPath, ResourceFormat inputFormat, ResourceFormat outputFormat, + ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) + { + try { - try - { - CommandLineLogger.LogDebug($"Using destination extension: {outputFormat}"); + CommandLineLogger.LogDebug($"Using destination extension: {outputFormat}"); - var resourceUtils = new ResourceUtils(); - resourceUtils.ConvertResources(sourcePath, destinationPath, inputFormat, outputFormat, loadParams, conversionParams); + var resourceUtils = new ResourceUtils(); + resourceUtils.ConvertResources(sourcePath, destinationPath, inputFormat, outputFormat, loadParams, conversionParams); - CommandLineLogger.LogInfo($"Wrote resources to: {destinationPath}"); - } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Failed to batch convert resources: {e.Message}", 2); - CommandLineLogger.LogTrace($"{e.StackTrace}"); - } + CommandLineLogger.LogInfo($"Wrote resources to: {destinationPath}"); + } + catch (Exception e) + { + CommandLineLogger.LogFatal($"Failed to batch convert resources: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); } } } diff --git a/Divine/CLI/CommandLineGR2Processor.cs b/Divine/CLI/CommandLineGR2Processor.cs index 740f4726..f1f7d3d9 100644 --- a/Divine/CLI/CommandLineGR2Processor.cs +++ b/Divine/CLI/CommandLineGR2Processor.cs @@ -5,108 +5,107 @@ using LSLib.Granny.Model; using LSLib.LS.Enums; -namespace Divine.CLI +namespace Divine.CLI; + +internal class CommandLineGR2Processor { - internal class CommandLineGR2Processor + private static readonly Dictionary GR2Options = CommandLineActions.GR2Options; + + public static void Convert(string file = "") { - private static readonly Dictionary GR2Options = CommandLineActions.GR2Options; + ConvertResource(file); + } - public static void Convert(string file = "") - { - ConvertResource(file); - } + public static void BatchConvert() + { + BatchConvertResources(CommandLineActions.SourcePath, Program.argv.InputFormat); + } - public static void BatchConvert() + public static ExporterOptions UpdateExporterSettings() + { + var exporterOptions = new ExporterOptions() { - BatchConvertResources(CommandLineActions.SourcePath, Program.argv.InputFormat); - } + InputPath = CommandLineActions.SourcePath, + OutputPath = CommandLineActions.DestinationPath, + InputFormat = Program.argv.InputFormat != null ? CommandLineArguments.GetModelFormatByString(Program.argv.InputFormat) : CommandLineArguments.GetModelFormatByPath(CommandLineActions.SourcePath), + OutputFormat = Program.argv.OutputFormat != null ? CommandLineArguments.GetModelFormatByString(Program.argv.OutputFormat) : CommandLineArguments.GetModelFormatByPath(CommandLineActions.DestinationPath), + ExportNormals = GR2Options["export-normals"], + ExportTangents = GR2Options["export-tangents"], + ExportUVs = GR2Options["export-uvs"], + ExportColors = GR2Options["export-colors"], + FlipUVs = GR2Options["flip-uvs"], + RecalculateNormals = GR2Options["recalculate-normals"], + RecalculateTangents = GR2Options["recalculate-tangents"], + RecalculateIWT = GR2Options["recalculate-iwt"], + BuildDummySkeleton = GR2Options["build-dummy-skeleton"], + CompactIndices = GR2Options["compact-tris"], + DeduplicateVertices = GR2Options["deduplicate-vertices"], + DeduplicateUVs = GR2Options["deduplicate-uvs"], + ApplyBasisTransforms = GR2Options["apply-basis-transforms"], + UseObsoleteVersionTag = GR2Options["force-legacy-version"], + ConformGR2Path = GR2Options["conform"] && !string.IsNullOrEmpty(CommandLineActions.ConformPath) ? CommandLineActions.ConformPath : null, + FlipSkeleton = GR2Options["x-flip-skeletons"], + FlipMesh = GR2Options["x-flip-meshes"], + TransformSkeletons = GR2Options["y-up-skeletons"], + IgnoreUVNaN = GR2Options["ignore-uv-nan"], + EnableQTangents = !GR2Options["disable-qtangents"] + }; - public static ExporterOptions UpdateExporterSettings() + if (exporterOptions.ConformGR2Path != null) { - var exporterOptions = new ExporterOptions() + if(GR2Options["conform-copy"]) { - InputPath = CommandLineActions.SourcePath, - OutputPath = CommandLineActions.DestinationPath, - InputFormat = Program.argv.InputFormat != null ? CommandLineArguments.GetModelFormatByString(Program.argv.InputFormat) : CommandLineArguments.GetModelFormatByPath(CommandLineActions.SourcePath), - OutputFormat = Program.argv.OutputFormat != null ? CommandLineArguments.GetModelFormatByString(Program.argv.OutputFormat) : CommandLineArguments.GetModelFormatByPath(CommandLineActions.DestinationPath), - ExportNormals = GR2Options["export-normals"], - ExportTangents = GR2Options["export-tangents"], - ExportUVs = GR2Options["export-uvs"], - ExportColors = GR2Options["export-colors"], - FlipUVs = GR2Options["flip-uvs"], - RecalculateNormals = GR2Options["recalculate-normals"], - RecalculateTangents = GR2Options["recalculate-tangents"], - RecalculateIWT = GR2Options["recalculate-iwt"], - BuildDummySkeleton = GR2Options["build-dummy-skeleton"], - CompactIndices = GR2Options["compact-tris"], - DeduplicateVertices = GR2Options["deduplicate-vertices"], - DeduplicateUVs = GR2Options["deduplicate-uvs"], - ApplyBasisTransforms = GR2Options["apply-basis-transforms"], - UseObsoleteVersionTag = GR2Options["force-legacy-version"], - ConformGR2Path = GR2Options["conform"] && !string.IsNullOrEmpty(CommandLineActions.ConformPath) ? CommandLineActions.ConformPath : null, - FlipSkeleton = GR2Options["x-flip-skeletons"], - FlipMesh = GR2Options["x-flip-meshes"], - TransformSkeletons = GR2Options["y-up-skeletons"], - IgnoreUVNaN = GR2Options["ignore-uv-nan"], - EnableQTangents = !GR2Options["disable-qtangents"] - }; - - if (exporterOptions.ConformGR2Path != null) - { - if(GR2Options["conform-copy"]) - { - exporterOptions.ConformSkeletons = false; - exporterOptions.ConformSkeletonsCopy = true; - } + exporterOptions.ConformSkeletons = false; + exporterOptions.ConformSkeletonsCopy = true; } + } - exporterOptions.LoadGameSettings(CommandLineActions.Game); + exporterOptions.LoadGameSettings(CommandLineActions.Game); - return exporterOptions; - } + return exporterOptions; + } - private static void ConvertResource(string file) + private static void ConvertResource(string file) + { + var exporter = new Exporter { - var exporter = new Exporter - { - Options = UpdateExporterSettings() - }; + Options = UpdateExporterSettings() + }; - if (!string.IsNullOrEmpty(file)) - { - exporter.Options.InputPath = file; - } + if (!string.IsNullOrEmpty(file)) + { + exporter.Options.InputPath = file; + } #if !DEBUG - try - { + try + { #endif - exporter.Export(); + exporter.Export(); - CommandLineLogger.LogInfo("Export completed successfully."); + CommandLineLogger.LogInfo("Export completed successfully."); #if !DEBUG + } + catch (Exception e) + { + CommandLineLogger.LogFatal($"Export failed: {e.Message + Environment.NewLine + e.StackTrace}", 2); } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Export failed: {e.Message + Environment.NewLine + e.StackTrace}", 2); - } #endif - } + } - private static void BatchConvertResources(string sourcePath, string inputFormat) - { - string[] files = Directory.GetFiles(sourcePath, $"*.{inputFormat}"); + private static void BatchConvertResources(string sourcePath, string inputFormat) + { + string[] files = Directory.GetFiles(sourcePath, $"*.{inputFormat}"); - if (files.Length == 0) - { - CommandLineLogger.LogFatal($"Batch convert failed: *.{inputFormat} not found in source path", 1); - } + if (files.Length == 0) + { + CommandLineLogger.LogFatal($"Batch convert failed: *.{inputFormat} not found in source path", 1); + } - foreach (string file in files) - { - UpdateExporterSettings(); - Convert(file); - } + foreach (string file in files) + { + UpdateExporterSettings(); + Convert(file); } } } diff --git a/Divine/CLI/CommandLineLogger.cs b/Divine/CLI/CommandLineLogger.cs index 0531f077..a00b0b83 100644 --- a/Divine/CLI/CommandLineLogger.cs +++ b/Divine/CLI/CommandLineLogger.cs @@ -1,123 +1,122 @@ using System; using LSLib.LS.Enums; -namespace Divine.CLI +namespace Divine.CLI; + +internal class CommandLineLogger { - internal class CommandLineLogger + private static readonly LogLevel LogLevelOption = CommandLineActions.LogLevel; + + public static void LogFatal(string message, int errorCode) { - private static readonly LogLevel LogLevelOption = CommandLineActions.LogLevel; + Log(LogLevel.FATAL, message, errorCode); + } - public static void LogFatal(string message, int errorCode) - { - Log(LogLevel.FATAL, message, errorCode); - } + public static void LogError(string message) + { + Log(LogLevel.ERROR, message); + } - public static void LogError(string message) - { - Log(LogLevel.ERROR, message); - } + public static void LogWarn(string message) + { + Log(LogLevel.WARN, message); + } - public static void LogWarn(string message) - { - Log(LogLevel.WARN, message); - } + public static void LogInfo(string message) + { + Log(LogLevel.INFO, message); + } - public static void LogInfo(string message) - { - Log(LogLevel.INFO, message); - } + public static void LogDebug(string message) + { + Log(LogLevel.DEBUG, message); + } - public static void LogDebug(string message) - { - Log(LogLevel.DEBUG, message); - } + public static void LogTrace(string message) + { + Log(LogLevel.TRACE, message); + } - public static void LogTrace(string message) - { - Log(LogLevel.TRACE, message); - } + public static void LogAll(string message) + { + Log(LogLevel.ALL, message); + } - public static void LogAll(string message) + private static void Log(LogLevel logLevel, string message, int errorCode = -1) + { + if (LogLevelOption == LogLevel.OFF && logLevel != LogLevel.FATAL) { - Log(LogLevel.ALL, message); + return; } - private static void Log(LogLevel logLevel, string message, int errorCode = -1) + switch (logLevel) { - if (LogLevelOption == LogLevel.OFF && logLevel != LogLevel.FATAL) + case LogLevel.FATAL: { - return; - } + if (LogLevelOption > LogLevel.OFF) + { + Console.WriteLine($"[FATAL] {message}"); + } - switch (logLevel) - { - case LogLevel.FATAL: + if (errorCode == -1) { - if (LogLevelOption > LogLevel.OFF) - { - Console.WriteLine($"[FATAL] {message}"); - } - - if (errorCode == -1) - { - Environment.Exit((int) LogLevel.FATAL); - } - else - { - Environment.Exit((int) LogLevel.FATAL + errorCode); - } - break; + Environment.Exit((int) LogLevel.FATAL); + } + else + { + Environment.Exit((int) LogLevel.FATAL + errorCode); } + break; + } - case LogLevel.ERROR: + case LogLevel.ERROR: + { + if (LogLevelOption < logLevel) { - if (LogLevelOption < logLevel) - { - break; - } - Console.WriteLine($"[ERROR] {message}"); break; } + Console.WriteLine($"[ERROR] {message}"); + break; + } - case LogLevel.WARN: + case LogLevel.WARN: + { + if (LogLevelOption < logLevel) { - if (LogLevelOption < logLevel) - { - break; - } - Console.WriteLine($"[WARN] {message}"); break; } + Console.WriteLine($"[WARN] {message}"); + break; + } - case LogLevel.INFO: + case LogLevel.INFO: + { + if (LogLevelOption < logLevel) { - if (LogLevelOption < logLevel) - { - break; - } - Console.WriteLine($"[INFO] {message}"); break; } + Console.WriteLine($"[INFO] {message}"); + break; + } - case LogLevel.DEBUG: + case LogLevel.DEBUG: + { + if (LogLevelOption < logLevel) { - if (LogLevelOption < logLevel) - { - break; - } - Console.WriteLine($"[DEBUG] {message}"); break; } + Console.WriteLine($"[DEBUG] {message}"); + break; + } - case LogLevel.TRACE: + case LogLevel.TRACE: + { + if (LogLevelOption < logLevel) { - if (LogLevelOption < logLevel) - { - break; - } - Console.WriteLine($"[TRACE] {message}"); break; } + Console.WriteLine($"[TRACE] {message}"); + break; } } } diff --git a/Divine/CLI/CommandLinePackageProcessor.cs b/Divine/CLI/CommandLinePackageProcessor.cs index 3ec4dfdc..b0bafab5 100644 --- a/Divine/CLI/CommandLinePackageProcessor.cs +++ b/Divine/CLI/CommandLinePackageProcessor.cs @@ -5,87 +5,87 @@ using LSLib.LS; using LSLib.LS.Enums; -namespace Divine.CLI +namespace Divine.CLI; + +internal class CommandLinePackageProcessor { - internal class CommandLinePackageProcessor + private static readonly CommandLineArguments Args = Program.argv; + + public static void Create() { - private static readonly CommandLineArguments Args = Program.argv; + CreatePackageResource(); + } - public static void Create() + public static void ListFiles(Func filter = null) + { + if (CommandLineActions.SourcePath == null) { - CreatePackageResource(); + CommandLineLogger.LogFatal("Cannot list package without source path", 1); } - - public static void ListFiles(Func filter = null) + else { - if (CommandLineActions.SourcePath == null) - { - CommandLineLogger.LogFatal("Cannot list package without source path", 1); - } - else - { - ListPackageFiles(CommandLineActions.SourcePath, filter); - } + ListPackageFiles(CommandLineActions.SourcePath, filter); } + } - public static void ExtractSingleFile() - { - ExtractSingleFile(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, CommandLineActions.PackagedFilePath); - } + public static void ExtractSingleFile() + { + ExtractSingleFile(CommandLineActions.SourcePath, CommandLineActions.DestinationPath, CommandLineActions.PackagedFilePath); + } - private static void ExtractSingleFile(string packagePath, string destinationPath, string packagedPath) + private static void ExtractSingleFile(string packagePath, string destinationPath, string packagedPath) + { + try { - try + using (var reader = new PackageReader(packagePath)) { - using (var reader = new PackageReader(packagePath)) + Package package = reader.Read(); + // Try to match by full path + AbstractFileInfo file = package.Files.Find(fileInfo => string.Compare(fileInfo.Name, packagedPath, StringComparison.OrdinalIgnoreCase) == 0 && !fileInfo.IsDeletion()); + if (file == null) { - Package package = reader.Read(); - // Try to match by full path - AbstractFileInfo file = package.Files.Find(fileInfo => string.Compare(fileInfo.Name, packagedPath, StringComparison.OrdinalIgnoreCase) == 0 && !fileInfo.IsDeletion()); + // Try to match by filename only + file = package.Files.Find(fileInfo => string.Compare(Path.GetFileName(fileInfo.Name), packagedPath, StringComparison.OrdinalIgnoreCase) == 0); if (file == null) { - // Try to match by filename only - file = package.Files.Find(fileInfo => string.Compare(Path.GetFileName(fileInfo.Name), packagedPath, StringComparison.OrdinalIgnoreCase) == 0); - if (file == null) - { - CommandLineLogger.LogError($"Package doesn't contain file named '{packagedPath}'"); - return; - } + CommandLineLogger.LogError($"Package doesn't contain file named '{packagedPath}'"); + return; } + } - using (var fs = new FileStream(destinationPath, FileMode.Create, FileAccess.Write)) + using (var fs = new FileStream(destinationPath, FileMode.Create, FileAccess.Write)) + { + try { - try - { - Stream stream = file.MakeStream(); - stream.CopyTo(fs); - } - finally - { - file.ReleaseStream(); - } - + Stream stream = file.MakeStream(); + stream.CopyTo(fs); } + finally + { + file.ReleaseStream(); + } + } } - catch (NotAPackageException) - { - CommandLineLogger.LogError("Failed to list package contents because the package is not an Original Sin package or savegame archive"); - } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Failed to list package: {e.Message}", 2); - CommandLineLogger.LogTrace($"{e.StackTrace}"); - } } + catch (NotAPackageException) + { + CommandLineLogger.LogError("Failed to list package contents because the package is not an Original Sin package or savegame archive"); + } + catch (Exception e) + { + CommandLineLogger.LogFatal($"Failed to list package: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); + } + } - private static void ListPackageFiles(string packagePath, Func filter = null) + private static void ListPackageFiles(string packagePath, Func filter = null) + { + try { - try + using (var reader = new PackageReader(packagePath)) { - using (var reader = new PackageReader(packagePath)) - { - Package package = reader.Read(); + Package package = reader.Read(); List files = package.Files; @@ -94,114 +94,113 @@ private static void ListPackageFiles(string packagePath, Func filter(obj)); } - foreach (AbstractFileInfo fileInfo in files.OrderBy(obj => obj.Name)) - { - Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.CRC()}"); - } + foreach (AbstractFileInfo fileInfo in files.OrderBy(obj => obj.Name)) + { + Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.CRC()}"); } } - catch (NotAPackageException) - { - CommandLineLogger.LogError("Failed to list package contents because the package is not an Original Sin package or savegame archive"); - } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Failed to list package: {e.Message}", 2); - CommandLineLogger.LogTrace($"{e.StackTrace}"); - } } + catch (NotAPackageException) + { + CommandLineLogger.LogError("Failed to list package contents because the package is not an Original Sin package or savegame archive"); + } + catch (Exception e) + { + CommandLineLogger.LogFatal($"Failed to list package: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); + } + } - public static void Extract(Func filter = null) + public static void Extract(Func filter = null) + { + if (CommandLineActions.SourcePath == null) { - if (CommandLineActions.SourcePath == null) - { - CommandLineLogger.LogFatal("Cannot extract package without source path", 1); - } - else - { - string extractionPath = GetExtractionPath(CommandLineActions.SourcePath, CommandLineActions.DestinationPath); + CommandLineLogger.LogFatal("Cannot extract package without source path", 1); + } + else + { + string extractionPath = GetExtractionPath(CommandLineActions.SourcePath, CommandLineActions.DestinationPath); - CommandLineLogger.LogInfo($"Extracting package: {CommandLineActions.SourcePath}"); + CommandLineLogger.LogInfo($"Extracting package: {CommandLineActions.SourcePath}"); - ExtractPackageResource(CommandLineActions.SourcePath, extractionPath, filter); - } + ExtractPackageResource(CommandLineActions.SourcePath, extractionPath, filter); } + } - public static void BatchExtract(Func filter = null) - { - string[] files = Directory.GetFiles(CommandLineActions.SourcePath, $"*.{Args.InputFormat}"); + public static void BatchExtract(Func filter = null) + { + string[] files = Directory.GetFiles(CommandLineActions.SourcePath, $"*.{Args.InputFormat}"); - foreach (string file in files) - { - string extractionPath = GetExtractionPath(file, CommandLineActions.DestinationPath); + foreach (string file in files) + { + string extractionPath = GetExtractionPath(file, CommandLineActions.DestinationPath); - CommandLineLogger.LogInfo($"Extracting package: {file}"); + CommandLineLogger.LogInfo($"Extracting package: {file}"); - ExtractPackageResource(file, extractionPath, filter); - } + ExtractPackageResource(file, extractionPath, filter); } + } - private static string GetExtractionPath(string sourcePath, string destinationPath) + private static string GetExtractionPath(string sourcePath, string destinationPath) + { + return Args.UsePackageName ? Path.Combine(destinationPath, Path.GetFileNameWithoutExtension(sourcePath) ?? throw new InvalidOperationException()) : CommandLineActions.DestinationPath; + } + + private static void CreatePackageResource(string file = "") + { + if (string.IsNullOrEmpty(file)) { - return Args.UsePackageName ? Path.Combine(destinationPath, Path.GetFileNameWithoutExtension(sourcePath) ?? throw new InvalidOperationException()) : CommandLineActions.DestinationPath; + file = CommandLineActions.DestinationPath; + CommandLineLogger.LogDebug($"Using destination path: {file}"); } - private static void CreatePackageResource(string file = "") - { - if (string.IsNullOrEmpty(file)) - { - file = CommandLineActions.DestinationPath; - CommandLineLogger.LogDebug($"Using destination path: {file}"); - } + var options = new PackageCreationOptions(); + options.Version = CommandLineActions.PackageVersion; - var options = new PackageCreationOptions(); - options.Version = CommandLineActions.PackageVersion; + options.Priority = (byte)CommandLineActions.PackagePriority; - options.Priority = (byte)CommandLineActions.PackagePriority; + Dictionary compressionOptions = CommandLineArguments.GetCompressionOptions(Path.GetExtension(file)?.ToLower() == ".lsv" ? "zlib" : Args.CompressionMethod, options.Version); - Dictionary compressionOptions = CommandLineArguments.GetCompressionOptions(Path.GetExtension(file)?.ToLower() == ".lsv" ? "zlib" : Args.CompressionMethod, options.Version); + options.Compression = (CompressionMethod)compressionOptions["Compression"]; + options.FastCompression = (bool)compressionOptions["FastCompression"]; - options.Compression = (CompressionMethod)compressionOptions["Compression"]; - options.FastCompression = (bool)compressionOptions["FastCompression"]; + var fast = options.FastCompression ? "Fast" : "Normal"; + CommandLineLogger.LogDebug($"Using compression method: {options.Compression.ToString()} ({fast})"); - var fast = options.FastCompression ? "Fast" : "Normal"; - CommandLineLogger.LogDebug($"Using compression method: {options.Compression.ToString()} ({fast})"); + var packager = new Packager(); + packager.CreatePackage(file, CommandLineActions.SourcePath, options); - var packager = new Packager(); - packager.CreatePackage(file, CommandLineActions.SourcePath, options); + CommandLineLogger.LogInfo("Package created successfully."); + } - CommandLineLogger.LogInfo("Package created successfully."); + private static void ExtractPackageResource(string file = "", string folder = "", Func filter = null) + { + if (string.IsNullOrEmpty(file)) + { + file = CommandLineActions.SourcePath; + CommandLineLogger.LogDebug($"Using source path: {file}"); } - private static void ExtractPackageResource(string file = "", string folder = "", Func filter = null) + try { - if (string.IsNullOrEmpty(file)) - { - file = CommandLineActions.SourcePath; - CommandLineLogger.LogDebug($"Using source path: {file}"); - } - - try - { - var packager = new Packager(); + var packager = new Packager(); - string extractionPath = GetExtractionPath(folder, CommandLineActions.DestinationPath); + string extractionPath = GetExtractionPath(folder, CommandLineActions.DestinationPath); - CommandLineLogger.LogDebug($"Using extraction path: {extractionPath}"); + CommandLineLogger.LogDebug($"Using extraction path: {extractionPath}"); - packager.UncompressPackage(file, extractionPath, filter); + packager.UncompressPackage(file, extractionPath, filter); - CommandLineLogger.LogInfo($"Extracted package to: {extractionPath}"); - } - catch (NotAPackageException) - { - CommandLineLogger.LogError("Failed to extract package because the package is not an Original Sin package or savegame archive"); - } - catch (Exception e) - { - CommandLineLogger.LogFatal($"Failed to extract package: {e.Message}", 2); - CommandLineLogger.LogTrace($"{e.StackTrace}"); - } + CommandLineLogger.LogInfo($"Extracted package to: {extractionPath}"); + } + catch (NotAPackageException) + { + CommandLineLogger.LogError("Failed to extract package because the package is not an Original Sin package or savegame archive"); + } + catch (Exception e) + { + CommandLineLogger.LogFatal($"Failed to extract package: {e.Message}", 2); + CommandLineLogger.LogTrace($"{e.StackTrace}"); } } } diff --git a/Divine/Program.cs b/Divine/Program.cs index 6cd0640c..7c128459 100644 --- a/Divine/Program.cs +++ b/Divine/Program.cs @@ -1,46 +1,45 @@ using System; using Divine.CLI; -namespace Divine +namespace Divine; + +internal class Program { - internal class Program + // ReSharper disable once InconsistentNaming + public static CommandLineArguments argv; + + private static void Main(string[] args) { - // ReSharper disable once InconsistentNaming - public static CommandLineArguments argv; + System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); + customCulture.NumberFormat.NumberDecimalSeparator = "."; + System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; - private static void Main(string[] args) + CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser { - System.Globalization.CultureInfo customCulture = (System.Globalization.CultureInfo)System.Threading.Thread.CurrentThread.CurrentCulture.Clone(); - customCulture.NumberFormat.NumberDecimalSeparator = "."; - System.Threading.Thread.CurrentThread.CurrentCulture = customCulture; + IgnoreCase = true, + ShowUsageOnEmptyCommandline = true + }; - CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser - { - IgnoreCase = true, - ShowUsageOnEmptyCommandline = true - }; + argv = new CommandLineArguments(); - argv = new CommandLineArguments(); - - parser.ExtractArgumentAttributes(argv); + parser.ExtractArgumentAttributes(argv); #if !DEBUG - try - { + try + { #endif - parser.ParseCommandLine(args); + parser.ParseCommandLine(args); #if !DEBUG - } - catch (Exception e) - { - Console.WriteLine($"[FATAL] {e.Message}"); - } + } + catch (Exception e) + { + Console.WriteLine($"[FATAL] {e.Message}"); + } #endif - if (parser.ParsingSucceeded) - { - CommandLineActions.Run(argv); - } + if (parser.ParsingSucceeded) + { + CommandLineActions.Run(argv); } } } diff --git a/LSLib/Granny/Collada.cs b/LSLib/Granny/Collada.cs index 14e61362..dccccc04 100644 --- a/LSLib/Granny/Collada.cs +++ b/LSLib/Granny/Collada.cs @@ -3,85 +3,84 @@ using System.Text; using System.Security.Cryptography; -namespace LSLib.Granny +namespace LSLib.Granny; + +class ColladaUtils { - class ColladaUtils + public static sourceTechnique_common MakeAccessor(string type, string[] components, int stride, int elements, string arrayId) { - public static sourceTechnique_common MakeAccessor(string type, string[] components, int stride, int elements, string arrayId) + var sourceTechnique = new sourceTechnique_common(); + var accessor = new accessor(); + var accessorParams = new List(); + + foreach (var component in components) { - var sourceTechnique = new sourceTechnique_common(); - var accessor = new accessor(); - var accessorParams = new List(); + var param = new param(); + if (component.Length > 0) + param.name = component; + param.type = type; + accessorParams.Add(param); + } - foreach (var component in components) - { - var param = new param(); - if (component.Length > 0) - param.name = component; - param.type = type; - accessorParams.Add(param); - } + accessor.param = accessorParams.ToArray(); + accessor.source = "#" + arrayId; + accessor.stride = (ulong)(components.Length * stride); + accessor.offset = 0; + accessor.count = (ulong)(elements / stride); + sourceTechnique.accessor = accessor; + return sourceTechnique; + } - accessor.param = accessorParams.ToArray(); - accessor.source = "#" + arrayId; - accessor.stride = (ulong)(components.Length * stride); - accessor.offset = 0; - accessor.count = (ulong)(elements / stride); - sourceTechnique.accessor = accessor; - return sourceTechnique; + public static source MakeFloatSource(string parentName, string name, string[] components, float[] values, int stride = 1, string type = "float") + { + var posName = parentName + "-" + name + "-array"; + // Create a shortened source name if the length exceeds 64 bytes + if (posName.Length > 64) + { + var hash = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(parentName)); + parentName = string.Join("", hash.Select(c => ((int)c).ToString("X2"))); } - public static source MakeFloatSource(string parentName, string name, string[] components, float[] values, int stride = 1, string type = "float") + var positions = new float_array { - var posName = parentName + "-" + name + "-array"; - // Create a shortened source name if the length exceeds 64 bytes - if (posName.Length > 64) - { - var hash = MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(parentName)); - parentName = string.Join("", hash.Select(c => ((int)c).ToString("X2"))); - } + id = parentName + "-" + name + "-array", + count = (ulong)values.Length, + Values = values.Select(x => (double)x).ToArray() + }; - var positions = new float_array - { - id = parentName + "-" + name + "-array", - count = (ulong)values.Length, - Values = values.Select(x => (double)x).ToArray() - }; + var source = new source + { + id = parentName + "-" + name, + name = name + }; - var source = new source - { - id = parentName + "-" + name, - name = name - }; + var technique = MakeAccessor(type, components, stride, values.Length / components.Length, positions.id); + source.technique_common = technique; + source.Item = positions; + return source; + } - var technique = MakeAccessor(type, components, stride, values.Length / components.Length, positions.id); - source.technique_common = technique; - source.Item = positions; - return source; - } + public static source MakeNameSource(string parentName, string name, string[] components, string[] values, string type = "name") + { + var varNames = from v in values + select v.Replace(' ', '_'); - public static source MakeNameSource(string parentName, string name, string[] components, string[] values, string type = "name") + var names = new Name_array { - var varNames = from v in values - select v.Replace(' ', '_'); + id = parentName + "-" + name + "-array", + count = (ulong)values.Length, + Values = varNames.ToArray() + }; - var names = new Name_array - { - id = parentName + "-" + name + "-array", - count = (ulong)values.Length, - Values = varNames.ToArray() - }; - - var source = new source - { - id = parentName + "-" + name, - name = name - }; + var source = new source + { + id = parentName + "-" + name, + name = name + }; - var technique = MakeAccessor(type, components, 1, values.Length / components.Length, names.id); - source.technique_common = technique; - source.Item = names; - return source; - } + var technique = MakeAccessor(type, components, 1, values.Length / components.Length, names.id); + source.technique_common = technique; + source.Item = names; + return source; } } diff --git a/LSLib/Granny/ColladaAnimation.cs b/LSLib/Granny/ColladaAnimation.cs index 12705cbb..e91f5f37 100644 --- a/LSLib/Granny/ColladaAnimation.cs +++ b/LSLib/Granny/ColladaAnimation.cs @@ -5,169 +5,168 @@ using LSLib.Granny.Model; using OpenTK.Mathematics; -namespace LSLib.Granny +namespace LSLib.Granny; + +public class ColladaAnimation { - public class ColladaAnimation - { - private animation Animation; - private Dictionary Sources; - private List Transforms; - private List Times; - private string BoneName; + private animation Animation; + private Dictionary Sources; + private List Transforms; + private List Times; + private string BoneName; - public Single Duration - { - get { return Times.Last(); } - } + public Single Duration + { + get { return Times.Last(); } + } - private void ImportSources() + private void ImportSources() + { + Sources = []; + foreach (var item in Animation.Items) { - Sources = []; - foreach (var item in Animation.Items) + if (item is source) { - if (item is source) - { - var src = ColladaSource.FromCollada(item as source); - Sources.Add(src.id, src); - } + var src = ColladaSource.FromCollada(item as source); + Sources.Add(src.id, src); } } + } - private void ImportSampler() + private void ImportSampler() + { + sampler sampler = null; + foreach (var item in Animation.Items) { - sampler sampler = null; - foreach (var item in Animation.Items) + if (item is sampler) { - if (item is sampler) - { - sampler = item as sampler; - break; - } + sampler = item as sampler; + break; } + } - if (sampler == null) - throw new ParsingException("Animation " + Animation.id + " has no sampler!"); + if (sampler == null) + throw new ParsingException("Animation " + Animation.id + " has no sampler!"); - ColladaSource inputSource = null, outputSource = null, interpolationSource = null; - foreach (var input in sampler.input) - { - if (input.source[0] != '#') - throw new ParsingException("Only ID references are supported for animation input sources"); + ColladaSource inputSource = null, outputSource = null, interpolationSource = null; + foreach (var input in sampler.input) + { + if (input.source[0] != '#') + throw new ParsingException("Only ID references are supported for animation input sources"); - if (!Sources.TryGetValue(input.source.Substring(1), out ColladaSource source)) - throw new ParsingException("Animation sampler " + input.semantic + " references nonexistent source: " + input.source); + if (!Sources.TryGetValue(input.source.Substring(1), out ColladaSource source)) + throw new ParsingException("Animation sampler " + input.semantic + " references nonexistent source: " + input.source); - switch (input.semantic) - { - case "INPUT": - inputSource = source; - break; + switch (input.semantic) + { + case "INPUT": + inputSource = source; + break; - case "OUTPUT": - outputSource = source; - break; + case "OUTPUT": + outputSource = source; + break; - case "INTERPOLATION": - interpolationSource = source; - break; + case "INTERPOLATION": + interpolationSource = source; + break; - default: - break; - } + default: + break; } + } - if (inputSource == null || outputSource == null || interpolationSource == null) - throw new ParsingException("Animation " + Animation.id + " must have an INPUT, OUTPUT and INTERPOLATION sampler input!"); + if (inputSource == null || outputSource == null || interpolationSource == null) + throw new ParsingException("Animation " + Animation.id + " must have an INPUT, OUTPUT and INTERPOLATION sampler input!"); - if (!inputSource.FloatParams.TryGetValue("TIME", out Times)) - Times = inputSource.FloatParams.Values.SingleOrDefault(); + if (!inputSource.FloatParams.TryGetValue("TIME", out Times)) + Times = inputSource.FloatParams.Values.SingleOrDefault(); - if (Times == null) - throw new ParsingException("Animation " + Animation.id + " INPUT must have a TIME parameter!"); + if (Times == null) + throw new ParsingException("Animation " + Animation.id + " INPUT must have a TIME parameter!"); - if (!outputSource.MatrixParams.TryGetValue("TRANSFORM", out Transforms)) - Transforms = outputSource.MatrixParams.Values.SingleOrDefault(); + if (!outputSource.MatrixParams.TryGetValue("TRANSFORM", out Transforms)) + Transforms = outputSource.MatrixParams.Values.SingleOrDefault(); - if (Transforms == null) - throw new ParsingException("Animation " + Animation.id + " OUTPUT must have a TRANSFORM parameter!"); + if (Transforms == null) + throw new ParsingException("Animation " + Animation.id + " OUTPUT must have a TRANSFORM parameter!"); - if (Transforms.Count != Times.Count) - throw new ParsingException("Animation " + Animation.id + " has different time and transform counts!"); + if (Transforms.Count != Times.Count) + throw new ParsingException("Animation " + Animation.id + " has different time and transform counts!"); - for (var i = 0; i < Transforms.Count; i++ ) - { - var m = Transforms[i]; - m.Transpose(); - Transforms[i] = m; - } + for (var i = 0; i < Transforms.Count; i++ ) + { + var m = Transforms[i]; + m.Transpose(); + Transforms[i] = m; } + } - private void ImportChannel(Skeleton skeleton) + private void ImportChannel(Skeleton skeleton) + { + channel channel = null; + foreach (var item in Animation.Items) { - channel channel = null; - foreach (var item in Animation.Items) + if (item is channel) { - if (item is channel) - { - channel = item as channel; - break; - } + channel = item as channel; + break; } + } - if (channel == null) - throw new ParsingException("Animation " + Animation.id + " has no channel!"); - - var parts = channel.target.Split(['/']); - if (parts.Length != 2) - throw new ParsingException("Unsupported channel target format: " + channel.target); - - if (skeleton != null) - { - if (!skeleton.BonesByID.TryGetValue(parts[0], out Bone bone)) - throw new ParsingException("Animation channel references nonexistent bone: " + parts[0]); - - if (bone.TransformSID != parts[1]) - throw new ParsingException("Animation channel references nonexistent transform or transform is not float4x4: " + channel.target); + if (channel == null) + throw new ParsingException("Animation " + Animation.id + " has no channel!"); - BoneName = bone.Name; - } - else - { - BoneName = parts[0]; - } - } + var parts = channel.target.Split(['/']); + if (parts.Length != 2) + throw new ParsingException("Unsupported channel target format: " + channel.target); - public bool ImportFromCollada(animation colladaAnim, Skeleton skeleton) + if (skeleton != null) { - Animation = colladaAnim; - ImportSources(); - ImportSampler(); + if (!skeleton.BonesByID.TryGetValue(parts[0], out Bone bone)) + throw new ParsingException("Animation channel references nonexistent bone: " + parts[0]); - // Avoid importing empty animations - if (Transforms.Count == 0) - return false; + if (bone.TransformSID != parts[1]) + throw new ParsingException("Animation channel references nonexistent transform or transform is not float4x4: " + channel.target); - ImportChannel(skeleton); - return true; + BoneName = bone.Name; } - - public TransformTrack MakeTrack(bool removeTrivialKeys) + else { - var keyframes = KeyframeTrack.FromMatrices(Times, Transforms); + BoneName = parts[0]; + } + } - if (removeTrivialKeys) - { - keyframes.RemoveTrivialTranslations(); - keyframes.RemoveTrivialRotations(); - keyframes.RemoveTrivialScales(); - keyframes.RemoveTrivialFrames(); - } + public bool ImportFromCollada(animation colladaAnim, Skeleton skeleton) + { + Animation = colladaAnim; + ImportSources(); + ImportSampler(); - var track = TransformTrack.FromKeyframes(keyframes); - track.Flags = 0; - track.Name = BoneName; + // Avoid importing empty animations + if (Transforms.Count == 0) + return false; - return track; + ImportChannel(skeleton); + return true; + } + + public TransformTrack MakeTrack(bool removeTrivialKeys) + { + var keyframes = KeyframeTrack.FromMatrices(Times, Transforms); + + if (removeTrivialKeys) + { + keyframes.RemoveTrivialTranslations(); + keyframes.RemoveTrivialRotations(); + keyframes.RemoveTrivialScales(); + keyframes.RemoveTrivialFrames(); } + + var track = TransformTrack.FromKeyframes(keyframes); + track.Flags = 0; + track.Name = BoneName; + + return track; } } diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index 4caae727..d4c20cd6 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -7,1213 +7,1212 @@ using System.IO; using System.Reflection; -namespace LSLib.Granny.GR2 +namespace LSLib.Granny.GR2; + +public class GrannyString { - public class GrannyString + public String String; + + public GrannyString() { - public String String; + } - public GrannyString() - { - } + public GrannyString(String s) + { + String = s; + } + + public override string ToString() + { + return String; + } +} + +public class Transform +{ + public enum TransformFlags : uint + { + HasTranslation = 0x01, + HasRotation = 0x02, + HasScaleShear = 0x04 + } + + public UInt32 Flags = 0; + public Vector3 Translation = Vector3.Zero; + public Quaternion Rotation = Quaternion.Identity; + public Matrix3 ScaleShear = Matrix3.Identity; - public GrannyString(String s) + public bool HasTranslation + { + get { return ((Flags & (uint)TransformFlags.HasTranslation) != 0); } + } + + public bool HasRotation + { + get { return ((Flags & (uint)TransformFlags.HasRotation) != 0); } + } + + public bool HasScaleShear + { + get { return ((Flags & (uint)TransformFlags.HasScaleShear) != 0); } + } + + public void SetTranslation(Vector3 translation) + { + if (translation.Length > 0.0001f) { - String = s; + Translation = translation; + Flags |= (uint)TransformFlags.HasTranslation; } - - public override string ToString() + else { - return String; + Translation = Vector3.Zero; + Flags &= ~(uint)TransformFlags.HasTranslation; } } - public class Transform + public void SetRotation(Quaternion rotation) { - public enum TransformFlags : uint + if (rotation.Length > 0.0001f + && (Math.Abs(rotation.X) >= 0.001f + || Math.Abs(rotation.Y) >= 0.001f + || Math.Abs(rotation.Z) >= 0.001f)) { - HasTranslation = 0x01, - HasRotation = 0x02, - HasScaleShear = 0x04 + Rotation = rotation; + Flags |= (uint)TransformFlags.HasRotation; } - - public UInt32 Flags = 0; - public Vector3 Translation = Vector3.Zero; - public Quaternion Rotation = Quaternion.Identity; - public Matrix3 ScaleShear = Matrix3.Identity; - - public bool HasTranslation + else { - get { return ((Flags & (uint)TransformFlags.HasTranslation) != 0); } + Rotation = Quaternion.Identity; + Flags &= ~(uint)TransformFlags.HasRotation; } + } - public bool HasRotation + public void SetScale(Vector3 scale) + { + ScaleShear = Matrix3.Identity; + if ((scale - Vector3.One).Length > 0.0001f) { - get { return ((Flags & (uint)TransformFlags.HasRotation) != 0); } + ScaleShear[0, 0] = scale[0]; + ScaleShear[1, 1] = scale[1]; + ScaleShear[2, 2] = scale[2]; + Flags |= (uint)TransformFlags.HasScaleShear; } - - public bool HasScaleShear + else { - get { return ((Flags & (uint)TransformFlags.HasScaleShear) != 0); } + Flags &= ~(uint)TransformFlags.HasScaleShear; } + } - public void SetTranslation(Vector3 translation) + public void SetScaleShear(Matrix3 scaleShear) + { + if ((scaleShear.Diagonal - Vector3.One).Length > 0.0001f) { - if (translation.Length > 0.0001f) - { - Translation = translation; - Flags |= (uint)TransformFlags.HasTranslation; - } - else - { - Translation = Vector3.Zero; - Flags &= ~(uint)TransformFlags.HasTranslation; - } + ScaleShear = scaleShear; + Flags |= (uint)TransformFlags.HasScaleShear; } - - public void SetRotation(Quaternion rotation) + else { - if (rotation.Length > 0.0001f - && (Math.Abs(rotation.X) >= 0.001f - || Math.Abs(rotation.Y) >= 0.001f - || Math.Abs(rotation.Z) >= 0.001f)) - { - Rotation = rotation; - Flags |= (uint)TransformFlags.HasRotation; - } - else - { - Rotation = Quaternion.Identity; - Flags &= ~(uint)TransformFlags.HasRotation; - } + Flags &= ~(uint)TransformFlags.HasScaleShear; } + } - public void SetScale(Vector3 scale) + public static Transform FromMatrix4(Matrix4 mat) + { + var transform = new Transform(); + transform.SetTranslation(mat.ExtractTranslation()); + transform.SetRotation(mat.ExtractRotation()); + transform.SetScale(mat.ExtractScale()); + return transform; + } + + public Matrix4 ToMatrix4Composite() + { + Matrix3 transform3 = Matrix3.CreateFromQuaternion(Rotation); + + if (HasScaleShear) { - ScaleShear = Matrix3.Identity; - if ((scale - Vector3.One).Length > 0.0001f) - { - ScaleShear[0, 0] = scale[0]; - ScaleShear[1, 1] = scale[1]; - ScaleShear[2, 2] = scale[2]; - Flags |= (uint)TransformFlags.HasScaleShear; - } - else - { - Flags &= ~(uint)TransformFlags.HasScaleShear; - } + transform3 = ScaleShear * transform3; } - public void SetScaleShear(Matrix3 scaleShear) + Matrix4 transform = Matrix4.Identity; + transform[0, 0] = transform3[0, 0]; + transform[0, 1] = transform3[0, 1]; + transform[0, 2] = transform3[0, 2]; + transform[1, 0] = transform3[1, 0]; + transform[1, 1] = transform3[1, 1]; + transform[1, 2] = transform3[1, 2]; + transform[2, 0] = transform3[2, 0]; + transform[2, 1] = transform3[2, 1]; + transform[2, 2] = transform3[2, 2]; + + transform[3, 0] = Translation[0]; + transform[3, 1] = Translation[1]; + transform[3, 2] = Translation[2]; + + return transform; + } + + public Matrix4 ToMatrix4() + { + Matrix4 transform = Matrix4.Identity; + if (HasTranslation) { - if ((scaleShear.Diagonal - Vector3.One).Length > 0.0001f) - { - ScaleShear = scaleShear; - Flags |= (uint)TransformFlags.HasScaleShear; - } - else - { - Flags &= ~(uint)TransformFlags.HasScaleShear; - } + transform = Matrix4.CreateTranslation(Translation); } - public static Transform FromMatrix4(Matrix4 mat) + if (HasRotation) { - var transform = new Transform(); - transform.SetTranslation(mat.ExtractTranslation()); - transform.SetRotation(mat.ExtractRotation()); - transform.SetScale(mat.ExtractScale()); - return transform; + transform = Matrix4.CreateFromQuaternion(Rotation) * transform; } - public Matrix4 ToMatrix4Composite() + if (HasScaleShear) { - Matrix3 transform3 = Matrix3.CreateFromQuaternion(Rotation); - - if (HasScaleShear) + Matrix4 scaleShear = Matrix4.Identity; + for (var i = 0; i < 3; i++) { - transform3 = ScaleShear * transform3; + for (var j = 0; j < 3; j++) + scaleShear[i, j] = ScaleShear[i, j]; } - Matrix4 transform = Matrix4.Identity; - transform[0, 0] = transform3[0, 0]; - transform[0, 1] = transform3[0, 1]; - transform[0, 2] = transform3[0, 2]; - transform[1, 0] = transform3[1, 0]; - transform[1, 1] = transform3[1, 1]; - transform[1, 2] = transform3[1, 2]; - transform[2, 0] = transform3[2, 0]; - transform[2, 1] = transform3[2, 1]; - transform[2, 2] = transform3[2, 2]; - - transform[3, 0] = Translation[0]; - transform[3, 1] = Translation[1]; - transform[3, 2] = Translation[2]; - - return transform; + transform = scaleShear * transform; } + + return transform; + } - public Matrix4 ToMatrix4() - { - Matrix4 transform = Matrix4.Identity; - if (HasTranslation) - { - transform = Matrix4.CreateTranslation(Translation); - } + public override string ToString() + { + return $"Rotation: ({Rotation.X}, {Rotation.Y}, {Rotation.Z}); Translation: ({Translation.X}, {Translation.Y}, {Translation.Z}); Scale: ({ScaleShear[0, 0]}, {ScaleShear[1, 1]}, {ScaleShear[2, 2]})"; + } +} - if (HasRotation) - { - transform = Matrix4.CreateFromQuaternion(Rotation) * transform; - } +/// +/// All Granny files start with this magic structure that defines endianness, bitness and header format. +/// +public class Magic +{ + /// + /// Magic value used for version 7 little-endian 32-bit Granny files + /// + private static readonly byte[] LittleEndian32Magic = [0x29, 0xDE, 0x6C, 0xC0, 0xBA, 0xA4, 0x53, 0x2B, 0x25, 0xF5, 0xB7, 0xA5, 0xF6, 0x66, 0xE2, 0xEE]; - if (HasScaleShear) - { - Matrix4 scaleShear = Matrix4.Identity; - for (var i = 0; i < 3; i++) - { - for (var j = 0; j < 3; j++) - scaleShear[i, j] = ScaleShear[i, j]; - } + /// + /// Magic value used for version 7 little-endian 32-bit Granny files + /// + private static readonly byte[] LittleEndian32Magic2 = [0x29, 0x75, 0x31, 0x82, 0xBA, 0x02, 0x11, 0x77, 0x25, 0x3A, 0x60, 0x2F, 0xF6, 0x6A, 0x8C, 0x2E]; + + /// + /// Magic value used for version 6 little-endian 32-bit Granny files + /// + private static readonly byte[] LittleEndian32MagicV6 = [0xB8, 0x67, 0xB0, 0xCA, 0xF8, 0x6D, 0xB1, 0x0F, 0x84, 0x72, 0x8C, 0x7E, 0x5E, 0x19, 0x00, 0x1E]; - transform = scaleShear * transform; - } - - return transform; - } + /// + /// Magic value used for version 7 big-endian 32-bit Granny files + /// + private static readonly byte[] BigEndian32Magic = [0x0E, 0x11, 0x95, 0xB5, 0x6A, 0xA5, 0xB5, 0x4B, 0xEB, 0x28, 0x28, 0x50, 0x25, 0x78, 0xB3, 0x04]; - public override string ToString() - { - return $"Rotation: ({Rotation.X}, {Rotation.Y}, {Rotation.Z}); Translation: ({Translation.X}, {Translation.Y}, {Translation.Z}); Scale: ({ScaleShear[0, 0]}, {ScaleShear[1, 1]}, {ScaleShear[2, 2]})"; - } + /// + /// Magic value used for version 7 big-endian 32-bit Granny files + /// + private static readonly byte[] BigEndian32Magic2 = [0x0E, 0x74, 0xA2, 0x0A, 0x6A, 0xEB, 0xEB, 0x64, 0xEB, 0x4E, 0x1E, 0xAB, 0x25, 0x91, 0xDB, 0x8F]; + + /// + /// Magic value used for version 7 little-endian 64-bit Granny files + /// + private static readonly byte[] LittleEndian64Magic = [0xE5, 0x9B, 0x49, 0x5E, 0x6F, 0x63, 0x1F, 0x14, 0x1E, 0x13, 0xEB, 0xA9, 0x90, 0xBE, 0xED, 0xC4]; + + /// + /// Magic value used for version 7 little-endian 64-bit Granny files + /// + private static readonly byte[] LittleEndian64Magic2 = [0xE5, 0x2F, 0x4A, 0xE1, 0x6F, 0xC2, 0x8A, 0xEE, 0x1E, 0xD2, 0xB4, 0x4C, 0x90, 0xD7, 0x55, 0xAF]; + + /// + /// Magic value used for version 7 big-endian 64-bit Granny files + /// + private static readonly byte[] BigEndian64Magic = [0x31, 0x95, 0xD4, 0xE3, 0x20, 0xDC, 0x4F, 0x62, 0xCC, 0x36, 0xD0, 0x3A, 0xB1, 0x82, 0xFF, 0x89]; + + /// + /// Magic value used for version 7 big-endian 64-bit Granny files + /// + private static readonly byte[] BigEndian64Magic2 = [0x31, 0xC2, 0x4E, 0x7C, 0x20, 0x40, 0xA3, 0x25, 0xCC, 0xE1, 0xC2, 0x7A, 0xB1, 0x32, 0x49, 0xF3]; + + /// + /// Size of magic value structure, in bytes + /// + public const UInt32 MagicSize = 0x20; + + /// + /// Defines endianness and address size + /// + public enum Format + { + LittleEndian32, + BigEndian32, + LittleEndian64, + BigEndian64 + }; + + /// + /// Indicates the 32-bitness of the GR2 file. + /// + public bool Is32Bit + { + get { return format == Format.LittleEndian32 || format == Format.BigEndian32; } } /// - /// All Granny files start with this magic structure that defines endianness, bitness and header format. + /// Indicates the 64-bitness of the GR2 file. /// - public class Magic + public bool Is64Bit { - /// - /// Magic value used for version 7 little-endian 32-bit Granny files - /// - private static readonly byte[] LittleEndian32Magic = [0x29, 0xDE, 0x6C, 0xC0, 0xBA, 0xA4, 0x53, 0x2B, 0x25, 0xF5, 0xB7, 0xA5, 0xF6, 0x66, 0xE2, 0xEE]; - - /// - /// Magic value used for version 7 little-endian 32-bit Granny files - /// - private static readonly byte[] LittleEndian32Magic2 = [0x29, 0x75, 0x31, 0x82, 0xBA, 0x02, 0x11, 0x77, 0x25, 0x3A, 0x60, 0x2F, 0xF6, 0x6A, 0x8C, 0x2E]; - - /// - /// Magic value used for version 6 little-endian 32-bit Granny files - /// - private static readonly byte[] LittleEndian32MagicV6 = [0xB8, 0x67, 0xB0, 0xCA, 0xF8, 0x6D, 0xB1, 0x0F, 0x84, 0x72, 0x8C, 0x7E, 0x5E, 0x19, 0x00, 0x1E]; - - /// - /// Magic value used for version 7 big-endian 32-bit Granny files - /// - private static readonly byte[] BigEndian32Magic = [0x0E, 0x11, 0x95, 0xB5, 0x6A, 0xA5, 0xB5, 0x4B, 0xEB, 0x28, 0x28, 0x50, 0x25, 0x78, 0xB3, 0x04]; - - /// - /// Magic value used for version 7 big-endian 32-bit Granny files - /// - private static readonly byte[] BigEndian32Magic2 = [0x0E, 0x74, 0xA2, 0x0A, 0x6A, 0xEB, 0xEB, 0x64, 0xEB, 0x4E, 0x1E, 0xAB, 0x25, 0x91, 0xDB, 0x8F]; - - /// - /// Magic value used for version 7 little-endian 64-bit Granny files - /// - private static readonly byte[] LittleEndian64Magic = [0xE5, 0x9B, 0x49, 0x5E, 0x6F, 0x63, 0x1F, 0x14, 0x1E, 0x13, 0xEB, 0xA9, 0x90, 0xBE, 0xED, 0xC4]; - - /// - /// Magic value used for version 7 little-endian 64-bit Granny files - /// - private static readonly byte[] LittleEndian64Magic2 = [0xE5, 0x2F, 0x4A, 0xE1, 0x6F, 0xC2, 0x8A, 0xEE, 0x1E, 0xD2, 0xB4, 0x4C, 0x90, 0xD7, 0x55, 0xAF]; - - /// - /// Magic value used for version 7 big-endian 64-bit Granny files - /// - private static readonly byte[] BigEndian64Magic = [0x31, 0x95, 0xD4, 0xE3, 0x20, 0xDC, 0x4F, 0x62, 0xCC, 0x36, 0xD0, 0x3A, 0xB1, 0x82, 0xFF, 0x89]; - - /// - /// Magic value used for version 7 big-endian 64-bit Granny files - /// - private static readonly byte[] BigEndian64Magic2 = [0x31, 0xC2, 0x4E, 0x7C, 0x20, 0x40, 0xA3, 0x25, 0xCC, 0xE1, 0xC2, 0x7A, 0xB1, 0x32, 0x49, 0xF3]; - - /// - /// Size of magic value structure, in bytes - /// - public const UInt32 MagicSize = 0x20; - - /// - /// Defines endianness and address size - /// - public enum Format - { - LittleEndian32, - BigEndian32, - LittleEndian64, - BigEndian64 - }; + get { return format == Format.LittleEndian64 || format == Format.BigEndian64; } + } - /// - /// Indicates the 32-bitness of the GR2 file. - /// - public bool Is32Bit - { - get { return format == Format.LittleEndian32 || format == Format.BigEndian32; } - } + /// + /// Indicates the endianness of the GR2 file. + /// + public bool IsLittleEndian + { + get { return format == Format.LittleEndian32 || format == Format.LittleEndian64; } + } - /// - /// Indicates the 64-bitness of the GR2 file. - /// - public bool Is64Bit - { - get { return format == Format.LittleEndian64 || format == Format.BigEndian64; } - } + /// + /// 16-byte long file signature, one of the *Magic constants. + /// + public byte[] signature; + /// + /// Size of file header; offset of the start of section data + /// + public UInt32 headersSize; + /// + /// Header format (0 = uncompressed, 1-2 = Oodle0/1 ?) + /// + public UInt32 headerFormat; + /// + /// Reserved field + /// + public UInt32 reserved1; + /// + /// Reserved field + /// + public UInt32 reserved2; - /// - /// Indicates the endianness of the GR2 file. - /// - public bool IsLittleEndian - { - get { return format == Format.LittleEndian32 || format == Format.LittleEndian64; } - } + /// + /// Endianness and address size of the file (derived from the signature) + /// + public Format format; - /// - /// 16-byte long file signature, one of the *Magic constants. - /// - public byte[] signature; - /// - /// Size of file header; offset of the start of section data - /// - public UInt32 headersSize; - /// - /// Header format (0 = uncompressed, 1-2 = Oodle0/1 ?) - /// - public UInt32 headerFormat; - /// - /// Reserved field - /// - public UInt32 reserved1; - /// - /// Reserved field - /// - public UInt32 reserved2; - - /// - /// Endianness and address size of the file (derived from the signature) - /// - public Format format; - - public static Format FormatFromSignature(byte[] sig) - { - if (sig.SequenceEqual(LittleEndian32Magic) || sig.SequenceEqual(LittleEndian32Magic2) || sig.SequenceEqual(LittleEndian32MagicV6)) - return Format.LittleEndian32; + public static Format FormatFromSignature(byte[] sig) + { + if (sig.SequenceEqual(LittleEndian32Magic) || sig.SequenceEqual(LittleEndian32Magic2) || sig.SequenceEqual(LittleEndian32MagicV6)) + return Format.LittleEndian32; - if (sig.SequenceEqual(BigEndian32Magic) || sig.SequenceEqual(BigEndian32Magic2)) - return Format.BigEndian32; + if (sig.SequenceEqual(BigEndian32Magic) || sig.SequenceEqual(BigEndian32Magic2)) + return Format.BigEndian32; - if (sig.SequenceEqual(LittleEndian64Magic) || sig.SequenceEqual(LittleEndian64Magic2)) - return Format.LittleEndian64; + if (sig.SequenceEqual(LittleEndian64Magic) || sig.SequenceEqual(LittleEndian64Magic2)) + return Format.LittleEndian64; - if (sig.SequenceEqual(BigEndian64Magic) || sig.SequenceEqual(BigEndian64Magic2)) - return Format.BigEndian64; + if (sig.SequenceEqual(BigEndian64Magic) || sig.SequenceEqual(BigEndian64Magic2)) + return Format.BigEndian64; - throw new ParsingException("Incorrect header signature (maybe not a Granny .GR2 file?)"); - } + throw new ParsingException("Incorrect header signature (maybe not a Granny .GR2 file?)"); + } + + public static byte[] SignatureFromFormat(Format format) + { + return format switch + { + Format.LittleEndian32 => LittleEndian32Magic, + Format.LittleEndian64 => LittleEndian64Magic, + Format.BigEndian32 => BigEndian32Magic, + Format.BigEndian64 => BigEndian64Magic, + _ => throw new ArgumentException(), + }; + } + + public void SetFormat(Format format, bool alternateSignature) + { + this.format = format; - public static byte[] SignatureFromFormat(Format format) + if (alternateSignature) + { + this.signature = format switch + { + Format.LittleEndian32 => LittleEndian32Magic2, + Format.LittleEndian64 => LittleEndian64Magic2, + Format.BigEndian32 => BigEndian32Magic2, + Format.BigEndian64 => BigEndian64Magic2 + }; + } + else { - return format switch + this.signature = format switch { Format.LittleEndian32 => LittleEndian32Magic, Format.LittleEndian64 => LittleEndian64Magic, Format.BigEndian32 => BigEndian32Magic, - Format.BigEndian64 => BigEndian64Magic, - _ => throw new ArgumentException(), + Format.BigEndian64 => BigEndian64Magic }; } + } +} - public void SetFormat(Format format, bool alternateSignature) - { - this.format = format; +public class Header +{ + /// + /// Default GR2 tag used for serialization (D:OS) + /// + public const UInt32 DefaultTag = 0x80000037; - if (alternateSignature) - { - this.signature = format switch - { - Format.LittleEndian32 => LittleEndian32Magic2, - Format.LittleEndian64 => LittleEndian64Magic2, - Format.BigEndian32 => BigEndian32Magic2, - Format.BigEndian64 => BigEndian64Magic2 - }; - } - else - { - this.signature = format switch - { - Format.LittleEndian32 => LittleEndian32Magic, - Format.LittleEndian64 => LittleEndian64Magic, - Format.BigEndian32 => BigEndian32Magic, - Format.BigEndian64 => BigEndian64Magic - }; - } - } - } + /// + /// D:OS vanilla version tag + /// + public const UInt32 Tag_DOS = 0x80000037; - public class Header - { - /// - /// Default GR2 tag used for serialization (D:OS) - /// - public const UInt32 DefaultTag = 0x80000037; - - /// - /// D:OS vanilla version tag - /// - public const UInt32 Tag_DOS = 0x80000037; - - /// - /// D:OS EE version tag - /// - public const UInt32 Tag_DOSEE = 0x80000039; - - /// - /// D:OS:2 DE LSM version tag - /// - public const UInt32 Tag_DOS2DE = 0xE57F0039; - - /// - /// Granny file format we support for writing (currently only version 7) - /// - public const UInt32 Version = 7; - /// - /// Size of header structure for V6 headers, in bytes - /// - public const UInt32 HeaderSize_V6 = 0x38; - /// - /// Size of header structure for V7 headers, in bytes - /// - public const UInt32 HeaderSize_V7 = 0x48; - /// - /// Number of user-defined tags in the header - /// - public const UInt32 ExtraTagCount = 4; - - /// - /// File format version; should be Header.Version - /// - public UInt32 version; - /// - /// Total size of .GR2 file, including headers - /// - public UInt32 fileSize; - /// - /// CRC-32 hash of the data starting after the header (offset = HeaderSize) to the end of the file (Header.fileSize - HeaderSize bytes) - /// - public UInt32 crc; - /// - /// Offset of the section list relative to the beginning of the file - /// - public UInt32 sectionsOffset; - /// - /// Number of Sections in the .GR2 file - /// - public UInt32 numSections; - /// - /// Reference to the type descriptor of the root element in the hierarchy - /// - public SectionReference rootType; - public SectionReference rootNode; - /// - /// File format version tag - /// - public UInt32 tag; - /// - /// Extra application-defined tags - /// - public UInt32[] extraTags; - /// - /// CRC of string table; seems to be unused? - /// - public UInt32 stringTableCrc; - public UInt32 reserved1; - public UInt32 reserved2; - public UInt32 reserved3; - - public UInt32 Size() - { - var headerSize = version switch - { - 6 => HeaderSize_V6, - 7 => HeaderSize_V7, - _ => throw new InvalidDataException("Cannot calculate CRC for unknown header versions."), - }; - return headerSize; - } + /// + /// D:OS EE version tag + /// + public const UInt32 Tag_DOSEE = 0x80000039; - public UInt32 CalculateCRC(Stream stream) - { - var originalPos = stream.Position; - var totalHeaderSize = Size() + Magic.MagicSize; - stream.Seek(totalHeaderSize, SeekOrigin.Begin); - byte[] body = new byte[fileSize - totalHeaderSize]; - stream.Read(body, 0, (int)(fileSize - totalHeaderSize)); - UInt32 crc = Native.Crc32.Compute(body, 0); - stream.Seek(originalPos, SeekOrigin.Begin); - return crc; - } - }; + /// + /// D:OS:2 DE LSM version tag + /// + public const UInt32 Tag_DOS2DE = 0xE57F0039; - public enum SectionType : uint - { - Main = 0, - TrackGroup = 1, - Skeleton = 2, - Mesh = 3, - StructDefinitions = 4, - FirstVertexData = 5, - Invalid = 0xffffffff - }; + /// + /// Granny file format we support for writing (currently only version 7) + /// + public const UInt32 Version = 7; + /// + /// Size of header structure for V6 headers, in bytes + /// + public const UInt32 HeaderSize_V6 = 0x38; + /// + /// Size of header structure for V7 headers, in bytes + /// + public const UInt32 HeaderSize_V7 = 0x48; + /// + /// Number of user-defined tags in the header + /// + public const UInt32 ExtraTagCount = 4; - public class SectionHeader - { - /// - /// Type of compression used; 0 = no compression; 1-2 = Oodle 1/2 compression - /// - public UInt32 compression; - /// - /// Absolute position of the section data in the GR2 file - /// - public UInt32 offsetInFile; - /// - /// Uncompressed size of section data - /// - public UInt32 compressedSize; - /// - /// Compressed size of section data - /// - public UInt32 uncompressedSize; - public UInt32 alignment; - /// - /// Oodle1 compressor stops - /// - public UInt32 first16bit; - public UInt32 first8bit; - /// - /// Absolute position of the relocation data in the GR2 file - /// - public UInt32 relocationsOffset; - /// - /// Number of relocations for this section - /// - public UInt32 numRelocations; - /// - /// Absolute position of the mixed-endianness marshalling data in the GR2 file - /// - public UInt32 mixedMarshallingDataOffset; - /// - /// Number of mixed-marshalling entries for this section - /// - public UInt32 numMixedMarshallingData; - }; + /// + /// File format version; should be Header.Version + /// + public UInt32 version; + /// + /// Total size of .GR2 file, including headers + /// + public UInt32 fileSize; + /// + /// CRC-32 hash of the data starting after the header (offset = HeaderSize) to the end of the file (Header.fileSize - HeaderSize bytes) + /// + public UInt32 crc; + /// + /// Offset of the section list relative to the beginning of the file + /// + public UInt32 sectionsOffset; + /// + /// Number of Sections in the .GR2 file + /// + public UInt32 numSections; + /// + /// Reference to the type descriptor of the root element in the hierarchy + /// + public SectionReference rootType; + public SectionReference rootNode; + /// + /// File format version tag + /// + public UInt32 tag; + /// + /// Extra application-defined tags + /// + public UInt32[] extraTags; + /// + /// CRC of string table; seems to be unused? + /// + public UInt32 stringTableCrc; + public UInt32 reserved1; + public UInt32 reserved2; + public UInt32 reserved3; - public class Section + public UInt32 Size() { - public SectionHeader Header; + var headerSize = version switch + { + 6 => HeaderSize_V6, + 7 => HeaderSize_V7, + _ => throw new InvalidDataException("Cannot calculate CRC for unknown header versions."), + }; + return headerSize; } - public enum MemberType : uint + public UInt32 CalculateCRC(Stream stream) { - None = 0, - Inline = 1, - Reference = 2, - ReferenceToArray = 3, - ArrayOfReferences = 4, - VariantReference = 5, - ReferenceToVariantArray = 7, - String = 8, - Transform = 9, - Real32 = 10, - Int8 = 11, - UInt8 = 12, - BinormalInt8 = 13, - NormalUInt8 = 14, - Int16 = 15, - UInt16 = 16, - BinormalInt16 = 17, - NormalUInt16 = 18, - Int32 = 19, - UInt32 = 20, - Real16 = 21, - EmptyReference = 22, - Max = EmptyReference, - Invalid = 0xffffffff - }; + var originalPos = stream.Position; + var totalHeaderSize = Size() + Magic.MagicSize; + stream.Seek(totalHeaderSize, SeekOrigin.Begin); + byte[] body = new byte[fileSize - totalHeaderSize]; + stream.Read(body, 0, (int)(fileSize - totalHeaderSize)); + UInt32 crc = Native.Crc32.Compute(body, 0); + stream.Seek(originalPos, SeekOrigin.Begin); + return crc; + } +}; + +public enum SectionType : uint +{ + Main = 0, + TrackGroup = 1, + Skeleton = 2, + Mesh = 3, + StructDefinitions = 4, + FirstVertexData = 5, + Invalid = 0xffffffff +}; + +public class SectionHeader +{ + /// + /// Type of compression used; 0 = no compression; 1-2 = Oodle 1/2 compression + /// + public UInt32 compression; + /// + /// Absolute position of the section data in the GR2 file + /// + public UInt32 offsetInFile; + /// + /// Uncompressed size of section data + /// + public UInt32 compressedSize; + /// + /// Compressed size of section data + /// + public UInt32 uncompressedSize; + public UInt32 alignment; + /// + /// Oodle1 compressor stops + /// + public UInt32 first16bit; + public UInt32 first8bit; + /// + /// Absolute position of the relocation data in the GR2 file + /// + public UInt32 relocationsOffset; + /// + /// Number of relocations for this section + /// + public UInt32 numRelocations; + /// + /// Absolute position of the mixed-endianness marshalling data in the GR2 file + /// + public UInt32 mixedMarshallingDataOffset; + /// + /// Number of mixed-marshalling entries for this section + /// + public UInt32 numMixedMarshallingData; +}; + +public class Section +{ + public SectionHeader Header; +} + +public enum MemberType : uint +{ + None = 0, + Inline = 1, + Reference = 2, + ReferenceToArray = 3, + ArrayOfReferences = 4, + VariantReference = 5, + ReferenceToVariantArray = 7, + String = 8, + Transform = 9, + Real32 = 10, + Int8 = 11, + UInt8 = 12, + BinormalInt8 = 13, + NormalUInt8 = 14, + Int16 = 15, + UInt16 = 16, + BinormalInt16 = 17, + NormalUInt16 = 18, + Int32 = 19, + UInt32 = 20, + Real16 = 21, + EmptyReference = 22, + Max = EmptyReference, + Invalid = 0xffffffff +}; + +/// +/// Reference to an absolute address within the GR2 file +/// +public class SectionReference +{ + /// + /// Zero-based index of referenced section (0 .. Header.numSections - 1) + /// + public UInt32 Section = (UInt32)SectionType.Invalid; + + /// + /// Offset in bytes from the beginning of the section + /// + public UInt32 Offset = 0; /// - /// Reference to an absolute address within the GR2 file + /// Returns if the reference points to a valid address within the file /// - public class SectionReference + public bool IsValid { - /// - /// Zero-based index of referenced section (0 .. Header.numSections - 1) - /// - public UInt32 Section = (UInt32)SectionType.Invalid; - - /// - /// Offset in bytes from the beginning of the section - /// - public UInt32 Offset = 0; - - /// - /// Returns if the reference points to a valid address within the file - /// - public bool IsValid - { - get { return Section != (UInt32)SectionType.Invalid; } - } + get { return Section != (UInt32)SectionType.Invalid; } + } - public SectionReference() - { - } + public SectionReference() + { + } - public SectionReference(SectionType section, UInt32 offset) - { - Section = (UInt32)section; - Offset = offset; - } + public SectionReference(SectionType section, UInt32 offset) + { + Section = (UInt32)section; + Offset = offset; + } - public override bool Equals(object o) - { - if (o == null) - return false; + public override bool Equals(object o) + { + if (o == null) + return false; - return o is SectionReference reference && reference.Section == Section && reference.Offset == Offset; - } + return o is SectionReference reference && reference.Section == Section && reference.Offset == Offset; + } - public bool Equals(SectionReference reference) - { - return reference != null && reference.Section == Section && reference.Offset == Offset; - } + public bool Equals(SectionReference reference) + { + return reference != null && reference.Section == Section && reference.Offset == Offset; + } - public override int GetHashCode() - { - return (int)Section * 31 + (int)Offset * 31 * 23; - } + public override int GetHashCode() + { + return (int)Section * 31 + (int)Offset * 31 * 23; } +} +/// +/// A reference whose final section and offset is stored in the relocation map +/// +public class RelocatableReference +{ /// - /// A reference whose final section and offset is stored in the relocation map + /// Offset in bytes from the beginning of the section /// - public class RelocatableReference - { - /// - /// Offset in bytes from the beginning of the section - /// - public UInt64 Offset = 0; - - /// - /// Returns if the reference points to a valid address within the file - /// - public bool IsValid - { - get { return Offset != 0; } - } + public UInt64 Offset = 0; - public override bool Equals(object o) - { - if (o == null) - return false; + /// + /// Returns if the reference points to a valid address within the file + /// + public bool IsValid + { + get { return Offset != 0; } + } - return o is RelocatableReference reference && reference.Offset == Offset; - } + public override bool Equals(object o) + { + if (o == null) + return false; - public bool Equals(RelocatableReference reference) - { - return reference != null && reference.Offset == Offset; - } + return o is RelocatableReference reference && reference.Offset == Offset; + } - public override int GetHashCode() - { - return (int)Offset; - } + public bool Equals(RelocatableReference reference) + { + return reference != null && reference.Offset == Offset; } - /// - /// Absolute reference to a structure type definition within the GR2 file - /// - public class StructReference : RelocatableReference + public override int GetHashCode() { - // Cached type value for this reference - public StructDefinition Type; + return (int)Offset; + } +} - public StructDefinition Resolve(GR2Reader gr2) - { - Debug.Assert(IsValid); - // Type definitions use a 2-level cache - // First we'll check the reference itself, if it has a cached ref to the resolved type - // If it has, we have nothing to do +/// +/// Absolute reference to a structure type definition within the GR2 file +/// +public class StructReference : RelocatableReference +{ + // Cached type value for this reference + public StructDefinition Type; - // If the struct wasn't resolved yet, try the type definition cache - // When a type definition is read from the GR2 file, it is stored here using its definition address as a key - if (Type == null) - gr2.Types.TryGetValue(this, out Type); + public StructDefinition Resolve(GR2Reader gr2) + { + Debug.Assert(IsValid); + // Type definitions use a 2-level cache + // First we'll check the reference itself, if it has a cached ref to the resolved type + // If it has, we have nothing to do - if (Type == null) - { - // We haven't seen this type before, read its definition from the file and cache it + // If the struct wasn't resolved yet, try the type definition cache + // When a type definition is read from the GR2 file, it is stored here using its definition address as a key + if (Type == null) + gr2.Types.TryGetValue(this, out Type); + + if (Type == null) + { + // We haven't seen this type before, read its definition from the file and cache it #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Struct definition at {0:X8} ===== ", Offset)); + System.Console.WriteLine(String.Format(" ===== Struct definition at {0:X8} ===== ", Offset)); #endif - var originalPos = gr2.Stream.Position; - gr2.Seek(this); - Type = gr2.ReadStructDefinition(); - gr2.Stream.Seek(originalPos, SeekOrigin.Begin); - gr2.Types[this] = Type; - } - - return Type; + var originalPos = gr2.Stream.Position; + gr2.Seek(this); + Type = gr2.ReadStructDefinition(); + gr2.Stream.Seek(originalPos, SeekOrigin.Begin); + gr2.Types[this] = Type; } - // - // - // TODO: REWORK --- Move Read(reader), Resolve(reader), PreSave(writer), Save(writer) here! - // - // + return Type; } - /// - /// Absolute reference to a null-terminated string value within the GR2 file - /// - public class StringReference : RelocatableReference - { - // Cached string value for this reference - public string Value; + // + // + // TODO: REWORK --- Move Read(reader), Resolve(reader), PreSave(writer), Save(writer) here! + // + // +} - public string Resolve(GR2Reader gr2) - { - Debug.Assert(IsValid); - // Don't use a global string cache here, as string constants are rarely referenced twice, - // unlike struct definitions - if (Value == null) - { - var originalPos = gr2.Stream.Position; - gr2.Seek(this); - Value = gr2.ReadString(); - gr2.Stream.Seek(originalPos, SeekOrigin.Begin); - } +/// +/// Absolute reference to a null-terminated string value within the GR2 file +/// +public class StringReference : RelocatableReference +{ + // Cached string value for this reference + public string Value; - return Value; + public string Resolve(GR2Reader gr2) + { + Debug.Assert(IsValid); + // Don't use a global string cache here, as string constants are rarely referenced twice, + // unlike struct definitions + if (Value == null) + { + var originalPos = gr2.Stream.Position; + gr2.Seek(this); + Value = gr2.ReadString(); + gr2.Stream.Seek(originalPos, SeekOrigin.Begin); } - } - /// - /// Absolute reference to an array of something (either indirect index references or structs) - /// - public class ArrayReference : RelocatableReference - { - /// - /// Number of items in this array - /// - public UInt32 Size; + return Value; } +} +/// +/// Absolute reference to an array of something (either indirect index references or structs) +/// +public class ArrayReference : RelocatableReference +{ /// - /// Absolute reference to an array of references + /// Number of items in this array /// - public class ArrayIndicesReference : ArrayReference - { - // Cached ref list for this reference - public List Items; + public UInt32 Size; +} + +/// +/// Absolute reference to an array of references +/// +public class ArrayIndicesReference : ArrayReference +{ + // Cached ref list for this reference + public List Items; - public List Resolve(GR2Reader gr2) + public List Resolve(GR2Reader gr2) + { + Debug.Assert(IsValid); + if (Items == null) { - Debug.Assert(IsValid); - if (Items == null) - { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" (Reference list at {0:X8})", Offset)); + System.Console.WriteLine(String.Format(" (Reference list at {0:X8})", Offset)); #endif - var originalPos = gr2.Stream.Position; - gr2.Seek(this); - Items = []; - for (int i = 0; i < Size; i++) - { - Items.Add(gr2.ReadReference()); + var originalPos = gr2.Stream.Position; + gr2.Seek(this); + Items = []; + for (int i = 0; i < Size; i++) + { + Items.Add(gr2.ReadReference()); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0:X8}", r.Offset)); + System.Console.WriteLine(String.Format(" {0:X8}", r.Offset)); #endif - } - gr2.Stream.Seek(originalPos, SeekOrigin.Begin); } - - return Items; + gr2.Stream.Seek(originalPos, SeekOrigin.Begin); } + + return Items; } +} + +public class MemberDefinition +{ + public const UInt32 ExtraTagCount = 3; - public class MemberDefinition + public MemberType Type = MemberType.Invalid; + public string Name; + public string GrannyName; + public StructReference Definition; + public UInt32 ArraySize; + /// + /// Extra application-defined data + /// + public UInt32[] Extra; + public UInt32 Unknown; + + // We need to keep a separate cached flag, as we can cache null fields as well + public bool HasCachedField = false; + public System.Reflection.FieldInfo CachedField; + + public NodeSerializer Serializer; + public VariantTypeSelector TypeSelector; + public SectionSelector SectionSelector; + public SerializationKind SerializationKind = SerializationKind.Builtin; + // Only available when writing a GR2 file! + public StructDefinition WriteDefinition; + public SectionType PreferredSection = SectionType.Invalid; + /// + /// Should we save this member to the data area? + /// + public bool DataArea = false; + /// + /// The Granny type we should save when serializing this field + /// (Mainly used to provide a type definition for user-defined serializers) + /// + public Type Prototype; + /// + /// Minimum GR2 file version this member should be exported to + /// + public UInt32 MinVersion = 0; + /// + /// Maximum GR2 file version this member should be exported to + /// + public UInt32 MaxVersion = 0; + + public bool IsValid { - public const UInt32 ExtraTagCount = 3; - - public MemberType Type = MemberType.Invalid; - public string Name; - public string GrannyName; - public StructReference Definition; - public UInt32 ArraySize; - /// - /// Extra application-defined data - /// - public UInt32[] Extra; - public UInt32 Unknown; - - // We need to keep a separate cached flag, as we can cache null fields as well - public bool HasCachedField = false; - public System.Reflection.FieldInfo CachedField; - - public NodeSerializer Serializer; - public VariantTypeSelector TypeSelector; - public SectionSelector SectionSelector; - public SerializationKind SerializationKind = SerializationKind.Builtin; - // Only available when writing a GR2 file! - public StructDefinition WriteDefinition; - public SectionType PreferredSection = SectionType.Invalid; - /// - /// Should we save this member to the data area? - /// - public bool DataArea = false; - /// - /// The Granny type we should save when serializing this field - /// (Mainly used to provide a type definition for user-defined serializers) - /// - public Type Prototype; - /// - /// Minimum GR2 file version this member should be exported to - /// - public UInt32 MinVersion = 0; - /// - /// Maximum GR2 file version this member should be exported to - /// - public UInt32 MaxVersion = 0; - - public bool IsValid - { - get { return Type != (UInt32)MemberType.None; } - } + get { return Type != (UInt32)MemberType.None; } + } - public bool IsScalar - { - get { return Type > MemberType.ReferenceToVariantArray; } - } + public bool IsScalar + { + get { return Type > MemberType.ReferenceToVariantArray; } + } - public UInt32 Size(GR2Reader gr2) + public UInt32 Size(GR2Reader gr2) + { + return Type switch { - return Type switch - { - MemberType.Inline => Definition.Resolve(gr2).Size(gr2), + MemberType.Inline => Definition.Resolve(gr2).Size(gr2), - MemberType.Int8 => 1, - MemberType.BinormalInt8 => 1, - MemberType.UInt8 => 1, - MemberType.NormalUInt8 => 1, + MemberType.Int8 => 1, + MemberType.BinormalInt8 => 1, + MemberType.UInt8 => 1, + MemberType.NormalUInt8 => 1, - MemberType.Int16 => 2, - MemberType.BinormalInt16 => 2, - MemberType.UInt16 => 2, - MemberType.NormalUInt16 => 2, - MemberType.Real16 => 2, + MemberType.Int16 => 2, + MemberType.BinormalInt16 => 2, + MemberType.UInt16 => 2, + MemberType.NormalUInt16 => 2, + MemberType.Real16 => 2, - MemberType.Reference => gr2.Magic.Is32Bit ? 4u : 8, + MemberType.Reference => gr2.Magic.Is32Bit ? 4u : 8, - MemberType.String => 4, - MemberType.Real32 => 4, - MemberType.Int32 => 4, - MemberType.UInt32 => 4, + MemberType.String => 4, + MemberType.Real32 => 4, + MemberType.Int32 => 4, + MemberType.UInt32 => 4, - MemberType.VariantReference => gr2.Magic.Is32Bit ? 8u : 16, - MemberType.ArrayOfReferences => gr2.Magic.Is32Bit ? 8u : 12, - MemberType.ReferenceToArray => gr2.Magic.Is32Bit ? 8u : 12, - MemberType.ReferenceToVariantArray => gr2.Magic.Is32Bit ? 12u : 20, + MemberType.VariantReference => gr2.Magic.Is32Bit ? 8u : 16, + MemberType.ArrayOfReferences => gr2.Magic.Is32Bit ? 8u : 12, + MemberType.ReferenceToArray => gr2.Magic.Is32Bit ? 8u : 12, + MemberType.ReferenceToVariantArray => gr2.Magic.Is32Bit ? 12u : 20, - MemberType.Transform => 17 * 4, + MemberType.Transform => 17 * 4, - _ => throw new ParsingException($"Unhandled member type: {Type}") - }; - } + _ => throw new ParsingException($"Unhandled member type: {Type}") + }; + } - public UInt32 MarshallingSize() + public UInt32 MarshallingSize() + { + switch (Type) { - switch (Type) - { - case MemberType.Inline: - case MemberType.Reference: - case MemberType.VariantReference: - case MemberType.EmptyReference: - return 0; - - case MemberType.Int8: - case MemberType.BinormalInt8: - case MemberType.UInt8: - case MemberType.NormalUInt8: - return 1; - - case MemberType.Int16: - case MemberType.BinormalInt16: - case MemberType.UInt16: - case MemberType.NormalUInt16: - case MemberType.Real16: - return 2; - - case MemberType.String: - case MemberType.Transform: - case MemberType.Real32: - case MemberType.Int32: - case MemberType.UInt32: - case MemberType.ReferenceToArray: - case MemberType.ArrayOfReferences: - case MemberType.ReferenceToVariantArray: - return 4; - - default: - throw new ParsingException(String.Format("Unhandled member type: {0}", Type.ToString())); - } + case MemberType.Inline: + case MemberType.Reference: + case MemberType.VariantReference: + case MemberType.EmptyReference: + return 0; + + case MemberType.Int8: + case MemberType.BinormalInt8: + case MemberType.UInt8: + case MemberType.NormalUInt8: + return 1; + + case MemberType.Int16: + case MemberType.BinormalInt16: + case MemberType.UInt16: + case MemberType.NormalUInt16: + case MemberType.Real16: + return 2; + + case MemberType.String: + case MemberType.Transform: + case MemberType.Real32: + case MemberType.Int32: + case MemberType.UInt32: + case MemberType.ReferenceToArray: + case MemberType.ArrayOfReferences: + case MemberType.ReferenceToVariantArray: + return 4; + + default: + throw new ParsingException(String.Format("Unhandled member type: {0}", Type.ToString())); } + } - public bool ShouldSerialize(UInt32 version) - { - return ((MinVersion == 0 || MinVersion <= version) && - (MaxVersion == 0 || MaxVersion >= version)); - } + public bool ShouldSerialize(UInt32 version) + { + return ((MinVersion == 0 || MinVersion <= version) && + (MaxVersion == 0 || MaxVersion >= version)); + } - private void LoadAttributes(FieldInfo info, GR2Writer writer) + private void LoadAttributes(FieldInfo info, GR2Writer writer) + { + var attrs = info.GetCustomAttributes(typeof(SerializationAttribute), true); + if (attrs.Length > 0) { - var attrs = info.GetCustomAttributes(typeof(SerializationAttribute), true); - if (attrs.Length > 0) - { - SerializationAttribute serialization = attrs[0] as SerializationAttribute; + SerializationAttribute serialization = attrs[0] as SerializationAttribute; - if (serialization.Section != SectionType.Invalid) - PreferredSection = serialization.Section; + if (serialization.Section != SectionType.Invalid) + PreferredSection = serialization.Section; - DataArea = serialization.DataArea; + DataArea = serialization.DataArea; - if (serialization.Type != MemberType.Invalid) - Type = serialization.Type; + if (serialization.Type != MemberType.Invalid) + Type = serialization.Type; - if (serialization.TypeSelector != null) - TypeSelector = Activator.CreateInstance(serialization.TypeSelector) as VariantTypeSelector; + if (serialization.TypeSelector != null) + TypeSelector = Activator.CreateInstance(serialization.TypeSelector) as VariantTypeSelector; - if (serialization.SectionSelector != null) - SectionSelector = Activator.CreateInstance(serialization.SectionSelector) as SectionSelector; + if (serialization.SectionSelector != null) + SectionSelector = Activator.CreateInstance(serialization.SectionSelector) as SectionSelector; - if (serialization.Serializer != null) - Serializer = Activator.CreateInstance(serialization.Serializer) as NodeSerializer; + if (serialization.Serializer != null) + Serializer = Activator.CreateInstance(serialization.Serializer) as NodeSerializer; - if (writer != null && serialization.Prototype != null) - WriteDefinition = writer.LookupStructDefinition(serialization.Prototype, serialization.Prototype); + if (writer != null && serialization.Prototype != null) + WriteDefinition = writer.LookupStructDefinition(serialization.Prototype, serialization.Prototype); - if (serialization.Name != null) - GrannyName = serialization.Name; + if (serialization.Name != null) + GrannyName = serialization.Name; - Prototype = serialization.Prototype; - SerializationKind = serialization.Kind; - ArraySize = serialization.ArraySize; - MinVersion = serialization.MinVersion; - MaxVersion = serialization.MaxVersion; - } + Prototype = serialization.Prototype; + SerializationKind = serialization.Kind; + ArraySize = serialization.ArraySize; + MinVersion = serialization.MinVersion; + MaxVersion = serialization.MaxVersion; } + } - public System.Reflection.FieldInfo LookupFieldInfo(object instance) - { - if (HasCachedField) - return CachedField; + public System.Reflection.FieldInfo LookupFieldInfo(object instance) + { + if (HasCachedField) + return CachedField; - var field = instance.GetType().GetField(Name); - AssignFieldInfo(field); - return field; - } + var field = instance.GetType().GetField(Name); + AssignFieldInfo(field); + return field; + } - public void AssignFieldInfo(FieldInfo field) - { - Debug.Assert(!HasCachedField); - CachedField = field; - HasCachedField = true; + public void AssignFieldInfo(FieldInfo field) + { + Debug.Assert(!HasCachedField); + CachedField = field; + HasCachedField = true; - if (field != null) - LoadAttributes(field, null); - } + if (field != null) + LoadAttributes(field, null); + } - public static MemberDefinition CreateFromFieldInfo(FieldInfo info, GR2Writer writer) - { - var member = new MemberDefinition(); - var type = info.FieldType; - member.Name = info.Name; - member.GrannyName = info.Name; - member.Extra = [0, 0, 0]; - member.CachedField = info; - member.HasCachedField = true; + public static MemberDefinition CreateFromFieldInfo(FieldInfo info, GR2Writer writer) + { + var member = new MemberDefinition(); + var type = info.FieldType; + member.Name = info.Name; + member.GrannyName = info.Name; + member.Extra = [0, 0, 0]; + member.CachedField = info; + member.HasCachedField = true; - member.LoadAttributes(info, writer); + member.LoadAttributes(info, writer); - if (type.IsArray && member.SerializationKind != SerializationKind.None) - { - if (member.ArraySize == 0) - throw new InvalidOperationException("SerializationAttribute.ArraySize must be set for fixed size arrays"); - type = type.GetElementType(); - } + if (type.IsArray && member.SerializationKind != SerializationKind.None) + { + if (member.ArraySize == 0) + throw new InvalidOperationException("SerializationAttribute.ArraySize must be set for fixed size arrays"); + type = type.GetElementType(); + } - if (member.Type == MemberType.Invalid) + if (member.Type == MemberType.Invalid) + { + if (type == typeof(SByte)) + member.Type = MemberType.Int8; + else if (type == typeof(Byte)) + member.Type = MemberType.UInt8; + else if (type == typeof(Int16)) + member.Type = MemberType.Int16; + else if (type == typeof(UInt16)) + member.Type = MemberType.UInt16; + else if (type == typeof(Int32)) + member.Type = MemberType.Int32; + else if (type == typeof(UInt32)) + member.Type = MemberType.UInt32; + else if (type == typeof(OpenTK.Mathematics.Half)) + member.Type = MemberType.Real16; + else if (type == typeof(Single)) + member.Type = MemberType.Real32; + else if (type == typeof(string)) + member.Type = MemberType.String; + else if (type == typeof(Transform)) + member.Type = MemberType.Transform; + else if (type == typeof(object) || type.IsAbstract || type.IsInterface) + member.Type = MemberType.VariantReference; + else if (type.GetInterfaces().Contains(typeof(IList))) + member.Type = MemberType.ReferenceToVariantArray; + else if (type.GetInterfaces().Contains(typeof(System.Collections.IList))) + member.Type = MemberType.ReferenceToArray; // or ArrayOfReferences? + else + member.Type = MemberType.Reference; // or Inline? + } + + if (member.SerializationKind != SerializationKind.None && member.WriteDefinition == null && writer != null) + { + if (member.Type == MemberType.Inline || member.Type == MemberType.Reference) { - if (type == typeof(SByte)) - member.Type = MemberType.Int8; - else if (type == typeof(Byte)) - member.Type = MemberType.UInt8; - else if (type == typeof(Int16)) - member.Type = MemberType.Int16; - else if (type == typeof(UInt16)) - member.Type = MemberType.UInt16; - else if (type == typeof(Int32)) - member.Type = MemberType.Int32; - else if (type == typeof(UInt32)) - member.Type = MemberType.UInt32; - else if (type == typeof(OpenTK.Mathematics.Half)) - member.Type = MemberType.Real16; - else if (type == typeof(Single)) - member.Type = MemberType.Real32; - else if (type == typeof(string)) - member.Type = MemberType.String; - else if (type == typeof(Transform)) - member.Type = MemberType.Transform; - else if (type == typeof(object) || type.IsAbstract || type.IsInterface) - member.Type = MemberType.VariantReference; - else if (type.GetInterfaces().Contains(typeof(IList))) - member.Type = MemberType.ReferenceToVariantArray; - else if (type.GetInterfaces().Contains(typeof(System.Collections.IList))) - member.Type = MemberType.ReferenceToArray; // or ArrayOfReferences? - else - member.Type = MemberType.Reference; // or Inline? + member.WriteDefinition = writer.LookupStructDefinition(type, null); } - - if (member.SerializationKind != SerializationKind.None && member.WriteDefinition == null && writer != null) + else if (member.Type == MemberType.ReferenceToArray || member.Type == MemberType.ArrayOfReferences) { - if (member.Type == MemberType.Inline || member.Type == MemberType.Reference) - { - member.WriteDefinition = writer.LookupStructDefinition(type, null); - } - else if (member.Type == MemberType.ReferenceToArray || member.Type == MemberType.ArrayOfReferences) - { - member.WriteDefinition = writer.LookupStructDefinition(type.GetGenericArguments().Single(), null); - } + member.WriteDefinition = writer.LookupStructDefinition(type.GetGenericArguments().Single(), null); } + } - return member; - } + return member; } +} - public class StructDefinition +public class StructDefinition +{ + public Type Type; + public List Members = new List(); + /// + /// Should we do mixed marshalling on this struct? + /// + public bool MixedMarshal = false; + + public UInt32 Size(GR2Reader gr2) { - public Type Type; - public List Members = new List(); - /// - /// Should we do mixed marshalling on this struct? - /// - public bool MixedMarshal = false; - - public UInt32 Size(GR2Reader gr2) - { - UInt32 size = 0; - foreach (var member in Members) - size += member.Size(gr2); - return size; - } + UInt32 size = 0; + foreach (var member in Members) + size += member.Size(gr2); + return size; + } - public void MapType(object instance) + public void MapType(object instance) + { + if (Type == null) { - if (Type == null) + Type = instance.GetType(); + foreach (var field in Type.GetFields(BindingFlags.Instance | BindingFlags.Public)) { - Type = instance.GetType(); - foreach (var field in Type.GetFields(BindingFlags.Instance | BindingFlags.Public)) + var name = field.Name; + var attrs = field.GetCustomAttributes(typeof(SerializationAttribute), true); + if (attrs.Length > 0) { - var name = field.Name; - var attrs = field.GetCustomAttributes(typeof(SerializationAttribute), true); - if (attrs.Length > 0) + SerializationAttribute serialization = attrs[0] as SerializationAttribute; + if (serialization.Name != null) { - SerializationAttribute serialization = attrs[0] as SerializationAttribute; - if (serialization.Name != null) - { - name = serialization.Name; - } + name = serialization.Name; } + } - foreach (var member in Members) + foreach (var member in Members) + { + if (member.Name == name) { - if (member.Name == name) - { - member.AssignFieldInfo(field); - } + member.AssignFieldInfo(field); } } } - - // If this assertion is triggered it most likely is because multiple C# types - // were assigned to the same Granny type in different places in the class definitions - Debug.Assert(Type == instance.GetType()); } - public void LoadFromType(Type type, GR2Writer writer) - { - Type = type; + // If this assertion is triggered it most likely is because multiple C# types + // were assigned to the same Granny type in different places in the class definitions + Debug.Assert(Type == instance.GetType()); + } - var attrs = type.GetCustomAttributes(typeof(StructSerializationAttribute), true); - if (attrs.Length > 0) - { - StructSerializationAttribute serialization = attrs[0] as StructSerializationAttribute; - MixedMarshal = serialization.MixedMarshal; - } + public void LoadFromType(Type type, GR2Writer writer) + { + Type = type; - foreach (var field in type.GetFields(BindingFlags.Instance | BindingFlags.Public)) - { - var member = MemberDefinition.CreateFromFieldInfo(field, writer); - if (member.SerializationKind != SerializationKind.None) - Members.Add(member); - } + var attrs = type.GetCustomAttributes(typeof(StructSerializationAttribute), true); + if (attrs.Length > 0) + { + StructSerializationAttribute serialization = attrs[0] as StructSerializationAttribute; + MixedMarshal = serialization.MixedMarshal; + } + + foreach (var field in type.GetFields(BindingFlags.Instance | BindingFlags.Public)) + { + var member = MemberDefinition.CreateFromFieldInfo(field, writer); + if (member.SerializationKind != SerializationKind.None) + Members.Add(member); } } +} +/// +/// Determines the way a structure field is serialized. +/// +public enum SerializationKind +{ /// - /// Determines the way a structure field is serialized. + /// Don't serialize this field + /// (The parser may still read this field internally, but it will not set the relevant struct field) /// - public enum SerializationKind - { - /// - /// Don't serialize this field - /// (The parser may still read this field internally, but it will not set the relevant struct field) - /// - None, - /// - /// Do serialization via the builtin GR2 parser (this is the default) - /// - Builtin, - /// - /// Serialize raw Granny data via the user-defined serializer class. - /// (This is almost the same as overriding Reader.ReadInstance(); the serializer doesn't create the - /// struct, won't process relocations/references, etc. You're on your own.) - /// - UserRaw, - /// - /// Serialize the struct once per member field via the user-defined serializer class. - /// - For primitive/inline types this is the same as UserRaw. - /// - For [Variant]Reference, the parser will process the relocation automatically - /// - For arrays and RefTo*Arrays, the first relocation is processed automatically, the array itself - /// (and item references for Ref types) should be processed by the user-defined serializer. - /// - UserMember, - /// - /// Serialize the struct once for each array element in the member field via the user-defined serializer class. - /// - For primitive/inline types this is the same as UserRaw. - /// - For [Variant]Reference, the parser will process the relocation automatically and the serializer is called once - /// - For arrays and RefTo*Arrays, all relocations are processed automatically and the serializer is - /// called once for each array element. - /// - UserElement - }; + None, + /// + /// Do serialization via the builtin GR2 parser (this is the default) + /// + Builtin, + /// + /// Serialize raw Granny data via the user-defined serializer class. + /// (This is almost the same as overriding Reader.ReadInstance(); the serializer doesn't create the + /// struct, won't process relocations/references, etc. You're on your own.) + /// + UserRaw, + /// + /// Serialize the struct once per member field via the user-defined serializer class. + /// - For primitive/inline types this is the same as UserRaw. + /// - For [Variant]Reference, the parser will process the relocation automatically + /// - For arrays and RefTo*Arrays, the first relocation is processed automatically, the array itself + /// (and item references for Ref types) should be processed by the user-defined serializer. + /// + UserMember, + /// + /// Serialize the struct once for each array element in the member field via the user-defined serializer class. + /// - For primitive/inline types this is the same as UserRaw. + /// - For [Variant]Reference, the parser will process the relocation automatically and the serializer is called once + /// - For arrays and RefTo*Arrays, all relocations are processed automatically and the serializer is + /// called once for each array element. + /// + UserElement +}; - public interface NodeSerializer - { - object Read(GR2Reader reader, StructDefinition definition, MemberDefinition member, uint arraySize, object parent); - void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj); - } +public interface NodeSerializer +{ + object Read(GR2Reader reader, StructDefinition definition, MemberDefinition member, uint arraySize, object parent); + void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj); +} - public interface VariantTypeSelector - { - Type SelectType(MemberDefinition member, object node); - Type SelectType(MemberDefinition member, StructDefinition defn, object parent); - } +public interface VariantTypeSelector +{ + Type SelectType(MemberDefinition member, object node); + Type SelectType(MemberDefinition member, StructDefinition defn, object parent); +} - public interface SectionSelector - { - SectionType SelectSection(MemberDefinition member, Type type, object obj); - } +public interface SectionSelector +{ + SectionType SelectSection(MemberDefinition member, Type type, object obj); +} - public interface StructDefinitionSelector - { - StructDefinition CreateStructDefinition(object instance); - } +public interface StructDefinitionSelector +{ + StructDefinition CreateStructDefinition(object instance); +} +/// +/// Tells the Granny serializer about the way we want it to write a field to the .GR2 file. +/// +[AttributeUsage(AttributeTargets.Field)] +public class SerializationAttribute : System.Attribute +{ /// - /// Tells the Granny serializer about the way we want it to write a field to the .GR2 file. + /// Which section should this field be serialized into /// - [AttributeUsage(AttributeTargets.Field)] - public class SerializationAttribute : System.Attribute - { - /// - /// Which section should this field be serialized into - /// - public SectionType Section = SectionType.Invalid; - /// - /// Should we save this member to the data area? - /// - public bool DataArea = false; - /// - /// Override Granny member type - /// - public MemberType Type = MemberType.Invalid; - /// - /// Size of static array - this *must* be set for array (ie. float[]) types! - /// - public UInt32 ArraySize; - /// - /// The Granny type we should save when serializing this field - /// (Mainly used to provide a type definition for user-defined serializers) - /// - public Type Prototype; - /// - /// User-defined section selector class (must implement SectionSelector) - /// - public Type SectionSelector; - /// - /// User-defined type selector class (must implement VariantTypeSelector) - /// - public Type TypeSelector; - /// - /// User-defined serializer class (must implement NodeSerializer) - /// - public Type Serializer; - /// - /// In what way should we serialize this item - /// - public SerializationKind Kind = SerializationKind.Builtin; - /// - /// Member name in the serialized file - /// - public String Name; - /// - /// Minimum GR2 file version this member should be exported to - /// - public UInt32 MinVersion = 0; - /// - /// Maximum GR2 file version this member should be exported to - /// - public UInt32 MaxVersion = 0; - /// - /// Should we do mixed marshalling on this struct? - /// - public bool MixedMarshal = false; - } + public SectionType Section = SectionType.Invalid; + /// + /// Should we save this member to the data area? + /// + public bool DataArea = false; + /// + /// Override Granny member type + /// + public MemberType Type = MemberType.Invalid; + /// + /// Size of static array - this *must* be set for array (ie. float[]) types! + /// + public UInt32 ArraySize; + /// + /// The Granny type we should save when serializing this field + /// (Mainly used to provide a type definition for user-defined serializers) + /// + public Type Prototype; + /// + /// User-defined section selector class (must implement SectionSelector) + /// + public Type SectionSelector; + /// + /// User-defined type selector class (must implement VariantTypeSelector) + /// + public Type TypeSelector; + /// + /// User-defined serializer class (must implement NodeSerializer) + /// + public Type Serializer; + /// + /// In what way should we serialize this item + /// + public SerializationKind Kind = SerializationKind.Builtin; + /// + /// Member name in the serialized file + /// + public String Name; + /// + /// Minimum GR2 file version this member should be exported to + /// + public UInt32 MinVersion = 0; + /// + /// Maximum GR2 file version this member should be exported to + /// + public UInt32 MaxVersion = 0; + /// + /// Should we do mixed marshalling on this struct? + /// + public bool MixedMarshal = false; +} +/// +/// Tells the Granny serializer about the way we want it to write a struct to the .GR2 file. +/// +[AttributeUsage(AttributeTargets.Class)] +public class StructSerializationAttribute : System.Attribute +{ /// - /// Tells the Granny serializer about the way we want it to write a struct to the .GR2 file. + /// Should we do mixed marshalling on this struct? /// - [AttributeUsage(AttributeTargets.Class)] - public class StructSerializationAttribute : System.Attribute - { - /// - /// Should we do mixed marshalling on this struct? - /// - public bool MixedMarshal = false; - - /// - /// User-defined data structure selector class (must implement StructDefinitionSelector) - /// - public Type TypeSelector; - } + public bool MixedMarshal = false; + + /// + /// User-defined data structure selector class (must implement StructDefinitionSelector) + /// + public Type TypeSelector; } diff --git a/LSLib/Granny/GR2/Helpers.cs b/LSLib/Granny/GR2/Helpers.cs index f6999b00..3f2cc86d 100644 --- a/LSLib/Granny/GR2/Helpers.cs +++ b/LSLib/Granny/GR2/Helpers.cs @@ -2,162 +2,161 @@ using System.Collections.Generic; using System.Linq.Expressions; -namespace LSLib.Granny.GR2 +namespace LSLib.Granny.GR2; + +public static class Helpers { - public static class Helpers - { - private static readonly Dictionary CachedConstructors = []; - private static readonly Dictionary CachedArrayConstructors = []; + private static readonly Dictionary CachedConstructors = []; + private static readonly Dictionary CachedArrayConstructors = []; - public delegate object ObjectCtor(); - public delegate object ArrayCtor(int size); + public delegate object ObjectCtor(); + public delegate object ArrayCtor(int size); - public static ObjectCtor GetConstructor(Type type) + public static ObjectCtor GetConstructor(Type type) + { + ObjectCtor ctor; + if (!CachedConstructors.TryGetValue(type, out ctor)) { - ObjectCtor ctor; - if (!CachedConstructors.TryGetValue(type, out ctor)) - { - NewExpression newExp = Expression.New(type); - LambdaExpression lambda = Expression.Lambda(typeof(ObjectCtor), newExp, new ParameterExpression[] { }); - ctor = (ObjectCtor)lambda.Compile(); - CachedConstructors.Add(type, ctor); - } - - return ctor; + NewExpression newExp = Expression.New(type); + LambdaExpression lambda = Expression.Lambda(typeof(ObjectCtor), newExp, new ParameterExpression[] { }); + ctor = (ObjectCtor)lambda.Compile(); + CachedConstructors.Add(type, ctor); } - public static object CreateInstance(Type type) - { - ObjectCtor ctor = GetConstructor(type); - return ctor(); - } + return ctor; + } - public static object CreateArrayInstance(Type type, int size) - { - if (!CachedArrayConstructors.TryGetValue(type, out ArrayCtor ctor)) - { - var typeCtor = type.GetConstructor(new Type[] { typeof(int) }); - var sizeParam = Expression.Parameter(typeof(int), ""); - NewExpression newExp = Expression.New(typeCtor, new Expression[] { sizeParam }); - LambdaExpression lambda = Expression.Lambda(typeof(ArrayCtor), newExp, new ParameterExpression[] { sizeParam }); - ctor = (ArrayCtor)lambda.Compile(); - CachedArrayConstructors.Add(type, ctor); - } - - return ctor(size); - } + public static object CreateInstance(Type type) + { + ObjectCtor ctor = GetConstructor(type); + return ctor(); } - class UInt8ListSerializer : NodeSerializer + public static object CreateArrayInstance(Type type, int size) { - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) + if (!CachedArrayConstructors.TryGetValue(type, out ArrayCtor ctor)) { - var controls = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - controls.Add(gr2.Reader.ReadByte()); - return controls; + var typeCtor = type.GetConstructor(new Type[] { typeof(int) }); + var sizeParam = Expression.Parameter(typeof(int), ""); + NewExpression newExp = Expression.New(typeCtor, new Expression[] { sizeParam }); + LambdaExpression lambda = Expression.Lambda(typeof(ArrayCtor), newExp, new ParameterExpression[] { sizeParam }); + ctor = (ArrayCtor)lambda.Compile(); + CachedArrayConstructors.Add(type, ctor); } - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) - { - var items = obj as List; - for (int i = 0; i < items.Count; i++) - section.Writer.Write(items[i]); - } + return ctor(size); + } +} + +class UInt8ListSerializer : NodeSerializer +{ + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) + { + var controls = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + controls.Add(gr2.Reader.ReadByte()); + return controls; + } + + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; + for (int i = 0; i < items.Count; i++) + section.Writer.Write(items[i]); } +} - class UInt16ListSerializer : NodeSerializer +class UInt16ListSerializer : NodeSerializer +{ + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) - { - var controls = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - controls.Add(gr2.Reader.ReadUInt16()); - return controls; - } + var controls = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + controls.Add(gr2.Reader.ReadUInt16()); + return controls; + } - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) - { - var items = obj as List; - for (int i = 0; i < items.Count; i++) - section.Writer.Write(items[i]); - } + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; + for (int i = 0; i < items.Count; i++) + section.Writer.Write(items[i]); } +} - class Int16ListSerializer : NodeSerializer +class Int16ListSerializer : NodeSerializer +{ + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) - { - var controls = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - controls.Add(gr2.Reader.ReadInt16()); - return controls; - } + var controls = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + controls.Add(gr2.Reader.ReadInt16()); + return controls; + } - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) - { - var items = obj as List; - for (int i = 0; i < items.Count; i++) - section.Writer.Write(items[i]); - } + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; + for (int i = 0; i < items.Count; i++) + section.Writer.Write(items[i]); } +} - class UInt32ListSerializer : NodeSerializer +class UInt32ListSerializer : NodeSerializer +{ + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) - { - var controls = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - controls.Add(gr2.Reader.ReadUInt32()); - return controls; - } + var controls = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + controls.Add(gr2.Reader.ReadUInt32()); + return controls; + } - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) - { - var items = obj as List; - for (int i = 0; i < items.Count; i++) - section.Writer.Write(items[i]); - } + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; + for (int i = 0; i < items.Count; i++) + section.Writer.Write(items[i]); } +} - class Int32ListSerializer : NodeSerializer +class Int32ListSerializer : NodeSerializer +{ + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) - { - var controls = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - controls.Add(gr2.Reader.ReadInt32()); - return controls; - } + var controls = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + controls.Add(gr2.Reader.ReadInt32()); + return controls; + } - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) - { - var items = obj as List; - for (int i = 0; i < items.Count; i++) - section.Writer.Write(items[i]); - } + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; + for (int i = 0; i < items.Count; i++) + section.Writer.Write(items[i]); } +} - class SingleListSerializer : NodeSerializer +class SingleListSerializer : NodeSerializer +{ + public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) { - public object Read(GR2Reader gr2, StructDefinition definition, MemberDefinition member, uint arraySize, object parent) - { - var controls = new List((int)arraySize); - for (int i = 0; i < arraySize; i++) - controls.Add(gr2.Reader.ReadSingle()); - return controls; - } + var controls = new List((int)arraySize); + for (int i = 0; i < arraySize; i++) + controls.Add(gr2.Reader.ReadSingle()); + return controls; + } - public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) - { - var items = obj as List; - for (int i = 0; i < items.Count; i++) - section.Writer.Write(items[i]); - } + public void Write(GR2Writer writer, WritableSection section, MemberDefinition member, object obj) + { + var items = obj as List; + for (int i = 0; i < items.Count; i++) + section.Writer.Write(items[i]); } } diff --git a/LSLib/Granny/GR2/Reader.cs b/LSLib/Granny/GR2/Reader.cs index a1b8076d..ff474ecb 100644 --- a/LSLib/Granny/GR2/Reader.cs +++ b/LSLib/Granny/GR2/Reader.cs @@ -9,1075 +9,1074 @@ using System.Text; using LSLib.Native; -namespace LSLib.Granny.GR2 +namespace LSLib.Granny.GR2; + +public class ParsingException(string message) : Exception(message) { - public class ParsingException(string message) : Exception(message) - { - } +} - public class GR2Reader(Stream stream) - { - internal Stream InputStream = stream; - internal BinaryReader InputReader; - internal Stream Stream; - internal BinaryReader Reader; - internal Magic Magic; - internal Header Header; - internal List
Sections = []; - internal Dictionary Types = []; - private readonly Dictionary CachedStructs = []; +public class GR2Reader(Stream stream) +{ + internal Stream InputStream = stream; + internal BinaryReader InputReader; + internal Stream Stream; + internal BinaryReader Reader; + internal Magic Magic; + internal Header Header; + internal List
Sections = []; + internal Dictionary Types = []; + private readonly Dictionary CachedStructs = []; #if DEBUG_GR2_SERIALIZATION - private HashSet DebugPendingResolve = []; + private HashSet DebugPendingResolve = []; #endif - public UInt32 Tag - { - get { return Header.tag; } - } + public UInt32 Tag + { + get { return Header.tag; } + } - public void Dispose() - { - Stream?.Dispose(); - } + public void Dispose() + { + Stream?.Dispose(); + } - public void Read(object root) + public void Read(object root) + { + using (this.InputReader = new BinaryReader(InputStream)) { - using (this.InputReader = new BinaryReader(InputStream)) - { - Magic = ReadMagic(); + Magic = ReadMagic(); - if (Magic.format != Magic.Format.LittleEndian32 && Magic.format != Magic.Format.LittleEndian64) - throw new ParsingException("Only little-endian GR2 files are supported"); + if (Magic.format != Magic.Format.LittleEndian32 && Magic.format != Magic.Format.LittleEndian64) + throw new ParsingException("Only little-endian GR2 files are supported"); - Header = ReadHeader(); - for (int i = 0; i < Header.numSections; i++) + Header = ReadHeader(); + for (int i = 0; i < Header.numSections; i++) + { + var section = new Section { - var section = new Section - { - Header = ReadSectionHeader() - }; - Sections.Add(section); - } + Header = ReadSectionHeader() + }; + Sections.Add(section); + } - Debug.Assert(InputStream.Position == Magic.headersSize); + Debug.Assert(InputStream.Position == Magic.headersSize); - UncompressStream(); + UncompressStream(); - foreach (var section in Sections) - { - ReadSectionRelocations(section); - } + foreach (var section in Sections) + { + ReadSectionRelocations(section); + } - if (Magic.IsLittleEndian != BitConverter.IsLittleEndian) + if (Magic.IsLittleEndian != BitConverter.IsLittleEndian) + { + // TODO: This should be done before applying relocations? + foreach (var section in Sections) { - // TODO: This should be done before applying relocations? - foreach (var section in Sections) - { - ReadSectionMixedMarshallingRelocations(section); - } + ReadSectionMixedMarshallingRelocations(section); } + } - var rootStruct = new StructReference - { - Offset = Sections[(int)Header.rootType.Section].Header.offsetInFile + Header.rootType.Offset - }; + var rootStruct = new StructReference + { + Offset = Sections[(int)Header.rootType.Section].Header.offsetInFile + Header.rootType.Offset + }; - Seek(Header.rootNode); - ReadStruct(rootStruct.Resolve(this), MemberType.Inline, root, null); - } + Seek(Header.rootNode); + ReadStruct(rootStruct.Resolve(this), MemberType.Inline, root, null); } + } - private Magic ReadMagic() + private Magic ReadMagic() + { + var magic = new Magic { - var magic = new Magic - { - signature = InputReader.ReadBytes(16), - headersSize = InputReader.ReadUInt32(), - headerFormat = InputReader.ReadUInt32(), - reserved1 = InputReader.ReadUInt32(), - reserved2 = InputReader.ReadUInt32() - }; - magic.format = Magic.FormatFromSignature(magic.signature); + signature = InputReader.ReadBytes(16), + headersSize = InputReader.ReadUInt32(), + headerFormat = InputReader.ReadUInt32(), + reserved1 = InputReader.ReadUInt32(), + reserved2 = InputReader.ReadUInt32() + }; + magic.format = Magic.FormatFromSignature(magic.signature); - if (magic.headerFormat != 0) - throw new ParsingException("Compressed GR2 files are not supported"); + if (magic.headerFormat != 0) + throw new ParsingException("Compressed GR2 files are not supported"); - Debug.Assert(magic.reserved1 == 0); - Debug.Assert(magic.reserved2 == 0); + Debug.Assert(magic.reserved1 == 0); + Debug.Assert(magic.reserved2 == 0); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" ===== GR2 Magic ===== "); - System.Console.WriteLine(String.Format("Format: {0}", magic.format)); - System.Console.WriteLine(String.Format("Headers size: {0:X8}, format: ", magic.headersSize, magic.headerFormat)); - System.Console.WriteLine(String.Format("Reserved1-2: {0:X8} {1:X8}", magic.reserved1, magic.reserved2)); + System.Console.WriteLine(" ===== GR2 Magic ===== "); + System.Console.WriteLine(String.Format("Format: {0}", magic.format)); + System.Console.WriteLine(String.Format("Headers size: {0:X8}, format: ", magic.headersSize, magic.headerFormat)); + System.Console.WriteLine(String.Format("Reserved1-2: {0:X8} {1:X8}", magic.reserved1, magic.reserved2)); #endif - return magic; - } + return magic; + } - private Header ReadHeader() + private Header ReadHeader() + { + var header = new Header { - var header = new Header - { - version = InputReader.ReadUInt32(), - fileSize = InputReader.ReadUInt32(), - crc = InputReader.ReadUInt32(), - sectionsOffset = InputReader.ReadUInt32(), - numSections = InputReader.ReadUInt32(), - rootType = ReadSectionReferenceUnchecked(), - rootNode = ReadSectionReferenceUnchecked(), - tag = InputReader.ReadUInt32(), - extraTags = new UInt32[Header.ExtraTagCount] - }; - for (int i = 0; i < Header.ExtraTagCount; i++) - header.extraTags[i] = InputReader.ReadUInt32(); - - if (header.version >= 7) - { - header.stringTableCrc = InputReader.ReadUInt32(); - header.reserved1 = InputReader.ReadUInt32(); - header.reserved2 = InputReader.ReadUInt32(); - header.reserved3 = InputReader.ReadUInt32(); - } + version = InputReader.ReadUInt32(), + fileSize = InputReader.ReadUInt32(), + crc = InputReader.ReadUInt32(), + sectionsOffset = InputReader.ReadUInt32(), + numSections = InputReader.ReadUInt32(), + rootType = ReadSectionReferenceUnchecked(), + rootNode = ReadSectionReferenceUnchecked(), + tag = InputReader.ReadUInt32(), + extraTags = new UInt32[Header.ExtraTagCount] + }; + for (int i = 0; i < Header.ExtraTagCount; i++) + header.extraTags[i] = InputReader.ReadUInt32(); + + if (header.version >= 7) + { + header.stringTableCrc = InputReader.ReadUInt32(); + header.reserved1 = InputReader.ReadUInt32(); + header.reserved2 = InputReader.ReadUInt32(); + header.reserved3 = InputReader.ReadUInt32(); + } - if (header.version < 6 || header.version > 7) - throw new ParsingException(String.Format("Unsupported GR2 version; file is version {0}, supported versions are 6 and 7", header.version)); + if (header.version < 6 || header.version > 7) + throw new ParsingException(String.Format("Unsupported GR2 version; file is version {0}, supported versions are 6 and 7", header.version)); - // if (header.tag != Header.Tag) - // throw new ParsingException(String.Format("Incorrect header tag; expected {0:X8}, got {1:X8}", Header.Tag, header.tag)); + // if (header.tag != Header.Tag) + // throw new ParsingException(String.Format("Incorrect header tag; expected {0:X8}, got {1:X8}", Header.Tag, header.tag)); - Debug.Assert(header.fileSize <= InputStream.Length); - Debug.Assert(header.CalculateCRC(InputStream) == header.crc); - Debug.Assert(header.sectionsOffset == header.Size()); - Debug.Assert(header.rootType.Section < header.numSections); - // TODO: check rootTypeOffset after serialization - Debug.Assert(header.stringTableCrc == 0); - Debug.Assert(header.reserved1 == 0); - Debug.Assert(header.reserved2 == 0); - Debug.Assert(header.reserved3 == 0); + Debug.Assert(header.fileSize <= InputStream.Length); + Debug.Assert(header.CalculateCRC(InputStream) == header.crc); + Debug.Assert(header.sectionsOffset == header.Size()); + Debug.Assert(header.rootType.Section < header.numSections); + // TODO: check rootTypeOffset after serialization + Debug.Assert(header.stringTableCrc == 0); + Debug.Assert(header.reserved1 == 0); + Debug.Assert(header.reserved2 == 0); + Debug.Assert(header.reserved3 == 0); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" ===== GR2 Header ===== "); - System.Console.WriteLine(String.Format("Version {0}, Size {1}, CRC {2:X8}", header.version, header.fileSize, header.crc)); - System.Console.WriteLine(String.Format("Offset of sections: {0}, num sections: {1}", header.sectionsOffset, header.numSections)); - System.Console.WriteLine(String.Format("Root type section {0}, Root type offset {1:X8}", header.rootType.Section, header.rootType.Offset)); - System.Console.WriteLine(String.Format("Root node section {0} {1:X8}", header.rootNode.Section, header.rootNode.Offset)); - System.Console.WriteLine(String.Format("Tag: {0:X8}, Strings CRC: {1:X8}", header.tag, header.stringTableCrc)); - System.Console.WriteLine(String.Format("Extra tags: {0:X8} {1:X8} {2:X8} {3:X8}", header.extraTags[0], header.extraTags[1], header.extraTags[2], header.extraTags[3])); - System.Console.WriteLine(String.Format("Reserved: {0:X8} {1:X8} {2:X8}", new object[] { header.reserved1, header.reserved2, header.reserved3 })); + System.Console.WriteLine(" ===== GR2 Header ===== "); + System.Console.WriteLine(String.Format("Version {0}, Size {1}, CRC {2:X8}", header.version, header.fileSize, header.crc)); + System.Console.WriteLine(String.Format("Offset of sections: {0}, num sections: {1}", header.sectionsOffset, header.numSections)); + System.Console.WriteLine(String.Format("Root type section {0}, Root type offset {1:X8}", header.rootType.Section, header.rootType.Offset)); + System.Console.WriteLine(String.Format("Root node section {0} {1:X8}", header.rootNode.Section, header.rootNode.Offset)); + System.Console.WriteLine(String.Format("Tag: {0:X8}, Strings CRC: {1:X8}", header.tag, header.stringTableCrc)); + System.Console.WriteLine(String.Format("Extra tags: {0:X8} {1:X8} {2:X8} {3:X8}", header.extraTags[0], header.extraTags[1], header.extraTags[2], header.extraTags[3])); + System.Console.WriteLine(String.Format("Reserved: {0:X8} {1:X8} {2:X8}", new object[] { header.reserved1, header.reserved2, header.reserved3 })); #endif - return header; - } + return header; + } - private SectionHeader ReadSectionHeader() + private SectionHeader ReadSectionHeader() + { + var header = new SectionHeader { - var header = new SectionHeader - { - compression = InputReader.ReadUInt32(), - offsetInFile = InputReader.ReadUInt32(), - compressedSize = InputReader.ReadUInt32(), - uncompressedSize = InputReader.ReadUInt32(), - alignment = InputReader.ReadUInt32(), - first16bit = InputReader.ReadUInt32(), - first8bit = InputReader.ReadUInt32(), - relocationsOffset = InputReader.ReadUInt32(), - numRelocations = InputReader.ReadUInt32(), - mixedMarshallingDataOffset = InputReader.ReadUInt32(), - numMixedMarshallingData = InputReader.ReadUInt32() - }; - - Debug.Assert(header.offsetInFile <= Header.fileSize); - - if (header.compression != 0) - { - Debug.Assert(header.offsetInFile + header.compressedSize <= Header.fileSize); - } - else - { - Debug.Assert(header.compressedSize == header.uncompressedSize); - Debug.Assert(header.offsetInFile + header.uncompressedSize <= Header.fileSize); - } + compression = InputReader.ReadUInt32(), + offsetInFile = InputReader.ReadUInt32(), + compressedSize = InputReader.ReadUInt32(), + uncompressedSize = InputReader.ReadUInt32(), + alignment = InputReader.ReadUInt32(), + first16bit = InputReader.ReadUInt32(), + first8bit = InputReader.ReadUInt32(), + relocationsOffset = InputReader.ReadUInt32(), + numRelocations = InputReader.ReadUInt32(), + mixedMarshallingDataOffset = InputReader.ReadUInt32(), + numMixedMarshallingData = InputReader.ReadUInt32() + }; + + Debug.Assert(header.offsetInFile <= Header.fileSize); + + if (header.compression != 0) + { + Debug.Assert(header.offsetInFile + header.compressedSize <= Header.fileSize); + } + else + { + Debug.Assert(header.compressedSize == header.uncompressedSize); + Debug.Assert(header.offsetInFile + header.uncompressedSize <= Header.fileSize); + } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" ===== Section Header ===== "); - System.Console.WriteLine(String.Format("Compression: {0}", header.compression)); - System.Console.WriteLine(String.Format("Offset {0:X8} Comp/UncompSize {1:X8}/{2:X8}", header.offsetInFile, header.compressedSize, header.uncompressedSize)); - System.Console.WriteLine(String.Format("Alignment {0}", header.alignment)); - System.Console.WriteLine(String.Format("First 16/8bit: {0:X8}/{1:X8}", header.first16bit, header.first8bit)); - System.Console.WriteLine(String.Format("Relocations: {0:X8} count {1}", header.relocationsOffset, header.numRelocations)); - System.Console.WriteLine(String.Format("Marshalling data: {0:X8} count {1}", header.mixedMarshallingDataOffset, header.numMixedMarshallingData)); + System.Console.WriteLine(" ===== Section Header ===== "); + System.Console.WriteLine(String.Format("Compression: {0}", header.compression)); + System.Console.WriteLine(String.Format("Offset {0:X8} Comp/UncompSize {1:X8}/{2:X8}", header.offsetInFile, header.compressedSize, header.uncompressedSize)); + System.Console.WriteLine(String.Format("Alignment {0}", header.alignment)); + System.Console.WriteLine(String.Format("First 16/8bit: {0:X8}/{1:X8}", header.first16bit, header.first8bit)); + System.Console.WriteLine(String.Format("Relocations: {0:X8} count {1}", header.relocationsOffset, header.numRelocations)); + System.Console.WriteLine(String.Format("Marshalling data: {0:X8} count {1}", header.mixedMarshallingDataOffset, header.numMixedMarshallingData)); #endif - return header; - } + return header; + } - private void UncompressStream() - { + private void UncompressStream() + { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Repacking sections ===== ")); + System.Console.WriteLine(String.Format(" ===== Repacking sections ===== ")); #endif - uint totalSize = 0; - foreach (var section in Sections) - { - totalSize += section.Header.uncompressedSize; - } + uint totalSize = 0; + foreach (var section in Sections) + { + totalSize += section.Header.uncompressedSize; + } - // Copy the whole file, as we'll update its contents because of relocations and marshalling fixups - byte[] uncompressedStream = new byte[totalSize]; - this.Stream = new MemoryStream(uncompressedStream); - this.Reader = new BinaryReader(this.Stream); + // Copy the whole file, as we'll update its contents because of relocations and marshalling fixups + byte[] uncompressedStream = new byte[totalSize]; + this.Stream = new MemoryStream(uncompressedStream); + this.Reader = new BinaryReader(this.Stream); - for (int i = 0; i < Sections.Count; i++) + for (int i = 0; i < Sections.Count; i++) + { + var section = Sections[i]; + var hdr = section.Header; + byte[] sectionContents = new byte[hdr.compressedSize]; + InputStream.Position = hdr.offsetInFile; + InputStream.Read(sectionContents, 0, (int)hdr.compressedSize); + + var originalOffset = hdr.offsetInFile; + hdr.offsetInFile = (uint)Stream.Position; + if (section.Header.compression == 0) + { + Stream.Write(sectionContents, 0, sectionContents.Length); + } + else if (section.Header.uncompressedSize > 0) { - var section = Sections[i]; - var hdr = section.Header; - byte[] sectionContents = new byte[hdr.compressedSize]; - InputStream.Position = hdr.offsetInFile; - InputStream.Read(sectionContents, 0, (int)hdr.compressedSize); - - var originalOffset = hdr.offsetInFile; - hdr.offsetInFile = (uint)Stream.Position; - if (section.Header.compression == 0) + if (hdr.compression == 4) { - Stream.Write(sectionContents, 0, sectionContents.Length); + var uncompressed = Granny2Compressor.Decompress4( + sectionContents, (int)hdr.uncompressedSize); + Stream.Write(uncompressed, 0, uncompressed.Length); } - else if (section.Header.uncompressedSize > 0) + else { - if (hdr.compression == 4) - { - var uncompressed = Granny2Compressor.Decompress4( - sectionContents, (int)hdr.uncompressedSize); - Stream.Write(uncompressed, 0, uncompressed.Length); - } - else - { - var uncompressed = Granny2Compressor.Decompress( - (int)hdr.compression, - sectionContents, (int)hdr.uncompressedSize, - (int)hdr.first16bit, (int)hdr.first8bit, (int)hdr.uncompressedSize); - Stream.Write(uncompressed, 0, uncompressed.Length); - } + var uncompressed = Granny2Compressor.Decompress( + (int)hdr.compression, + sectionContents, (int)hdr.uncompressedSize, + (int)hdr.first16bit, (int)hdr.first8bit, (int)hdr.uncompressedSize); + Stream.Write(uncompressed, 0, uncompressed.Length); } + } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0}: {1:X8} ({2}) --> {3:X8} ({4})", i, originalOffset, hdr.compressedSize, hdr.offsetInFile, hdr.uncompressedSize)); + System.Console.WriteLine(String.Format(" {0}: {1:X8} ({2}) --> {3:X8} ({4})", i, originalOffset, hdr.compressedSize, hdr.offsetInFile, hdr.uncompressedSize)); #endif - } } + } - private void ReadSectionRelocationsInternal(Section section, Stream relocationsStream) - { + private void ReadSectionRelocationsInternal(Section section, Stream relocationsStream) + { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); + System.Console.WriteLine(String.Format(" ===== Relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); #endif - using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); - for (int i = 0; i < section.Header.numRelocations; i++) - { - UInt32 offsetInSection = relocationsReader.ReadUInt32(); - Debug.Assert(offsetInSection <= section.Header.uncompressedSize); - var reference = ReadSectionReference(relocationsReader); + using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); + for (int i = 0; i < section.Header.numRelocations; i++) + { + UInt32 offsetInSection = relocationsReader.ReadUInt32(); + Debug.Assert(offsetInSection <= section.Header.uncompressedSize); + var reference = ReadSectionReference(relocationsReader); - Stream.Position = section.Header.offsetInFile + offsetInSection; - var fixupAddress = Sections[(int)reference.Section].Header.offsetInFile + reference.Offset; - Stream.Write(BitConverter.GetBytes(fixupAddress), 0, 4); + Stream.Position = section.Header.offsetInFile + offsetInSection; + var fixupAddress = Sections[(int)reference.Section].Header.offsetInFile + reference.Offset; + Stream.Write(BitConverter.GetBytes(fixupAddress), 0, 4); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" LOCAL {0:X8} --> {1}:{2:X8}", offsetInSection, (SectionType)reference.Section, reference.Offset)); - System.Console.WriteLine(String.Format(" GLOBAL {0:X8} --> {1:X8}", - offsetInSection + section.Header.offsetInFile, - reference.Offset + Sections[(int)reference.Section].Header.offsetInFile)); + System.Console.WriteLine(String.Format(" LOCAL {0:X8} --> {1}:{2:X8}", offsetInSection, (SectionType)reference.Section, reference.Offset)); + System.Console.WriteLine(String.Format(" GLOBAL {0:X8} --> {1:X8}", + offsetInSection + section.Header.offsetInFile, + reference.Offset + Sections[(int)reference.Section].Header.offsetInFile)); #endif - } } + } - private void ReadSectionRelocations(Section section) - { - if (section.Header.numRelocations == 0) return; + private void ReadSectionRelocations(Section section) + { + if (section.Header.numRelocations == 0) return; - InputStream.Seek(section.Header.relocationsOffset, SeekOrigin.Begin); - if (section.Header.compression == 4) - { - using var reader = new BinaryReader(InputStream, Encoding.Default, true); - UInt32 compressedSize = reader.ReadUInt32(); - byte[] compressed = reader.ReadBytes((int)compressedSize); - var uncompressed = Granny2Compressor.Decompress4( - compressed, (int)(section.Header.numRelocations * 12)); - using var ms = new MemoryStream(uncompressed); - ReadSectionRelocationsInternal(section, ms); - } - else - { - ReadSectionRelocationsInternal(section, InputStream); - } + InputStream.Seek(section.Header.relocationsOffset, SeekOrigin.Begin); + if (section.Header.compression == 4) + { + using var reader = new BinaryReader(InputStream, Encoding.Default, true); + UInt32 compressedSize = reader.ReadUInt32(); + byte[] compressed = reader.ReadBytes((int)compressedSize); + var uncompressed = Granny2Compressor.Decompress4( + compressed, (int)(section.Header.numRelocations * 12)); + using var ms = new MemoryStream(uncompressed); + ReadSectionRelocationsInternal(section, ms); } + else + { + ReadSectionRelocationsInternal(section, InputStream); + } + } - private void MixedMarshal(UInt32 count, StructDefinition definition) + private void MixedMarshal(UInt32 count, StructDefinition definition) + { + for (var arrayIdx = 0; arrayIdx < count; arrayIdx++) { - for (var arrayIdx = 0; arrayIdx < count; arrayIdx++) + foreach (var member in definition.Members) { - foreach (var member in definition.Members) + var size = member.Size(this); + if (member.Type == MemberType.Inline) { - var size = member.Size(this); - if (member.Type == MemberType.Inline) - { - MixedMarshal(member.ArraySize == 0 ? 1 : member.ArraySize, member.Definition.Resolve(this)); - } - else if (member.MarshallingSize() > 1) + MixedMarshal(member.ArraySize == 0 ? 1 : member.ArraySize, member.Definition.Resolve(this)); + } + else if (member.MarshallingSize() > 1) + { + var marshalSize = member.MarshallingSize(); + byte[] data = new byte[size]; + Stream.Read(data, 0, (int)size); + for (var j = 0; j < size / marshalSize; j++) { - var marshalSize = member.MarshallingSize(); - byte[] data = new byte[size]; - Stream.Read(data, 0, (int)size); - for (var j = 0; j < size / marshalSize; j++) + // Byte swap for 2, 4, 8-byte values + for (var off = 0; off < marshalSize / 2; off++) { - // Byte swap for 2, 4, 8-byte values - for (var off = 0; off < marshalSize / 2; off++) - { - var tmp = data[j * marshalSize + off]; - data[j * marshalSize + off] = data[j * marshalSize + marshalSize - 1 - off]; - data[j * marshalSize + marshalSize - 1 - off] = tmp; - } + var tmp = data[j * marshalSize + off]; + data[j * marshalSize + off] = data[j * marshalSize + marshalSize - 1 - off]; + data[j * marshalSize + marshalSize - 1 - off] = tmp; } - - Stream.Seek(-size, SeekOrigin.Current); - Stream.Write(data, 0, (int)size); - Stream.Seek(-size, SeekOrigin.Current); } - Stream.Seek(size, SeekOrigin.Current); + Stream.Seek(-size, SeekOrigin.Current); + Stream.Write(data, 0, (int)size); + Stream.Seek(-size, SeekOrigin.Current); } + + Stream.Seek(size, SeekOrigin.Current); } } + } - private void ReadSectionMixedMarshallingRelocationsInternal(Section section, Stream relocationsStream) - { + private void ReadSectionMixedMarshallingRelocationsInternal(Section section, Stream relocationsStream) + { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Mixed marshalling relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); + System.Console.WriteLine(String.Format(" ===== Mixed marshalling relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); #endif - using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); - for (int i = 0; i < section.Header.numMixedMarshallingData; i++) + using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); + for (int i = 0; i < section.Header.numMixedMarshallingData; i++) + { + UInt32 count = relocationsReader.ReadUInt32(); + UInt32 offsetInSection = relocationsReader.ReadUInt32(); + Debug.Assert(offsetInSection <= section.Header.uncompressedSize); + var type = ReadSectionReference(relocationsReader); + var typeDefn = new StructReference { - UInt32 count = relocationsReader.ReadUInt32(); - UInt32 offsetInSection = relocationsReader.ReadUInt32(); - Debug.Assert(offsetInSection <= section.Header.uncompressedSize); - var type = ReadSectionReference(relocationsReader); - var typeDefn = new StructReference - { - Offset = Sections[(int)type.Section].Header.offsetInFile + type.Offset - }; + Offset = Sections[(int)type.Section].Header.offsetInFile + type.Offset + }; - Seek(section, offsetInSection); - MixedMarshal(count, typeDefn.Resolve(this)); + Seek(section, offsetInSection); + MixedMarshal(count, typeDefn.Resolve(this)); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0:X8} [{1}] --> {2}:{3:X8}", offsetInSection, count, (SectionType)type.Section, type.Offset)); + System.Console.WriteLine(String.Format(" {0:X8} [{1}] --> {2}:{3:X8}", offsetInSection, count, (SectionType)type.Section, type.Offset)); #endif - } } + } - private void ReadSectionMixedMarshallingRelocations(Section section) - { - if (section.Header.numMixedMarshallingData == 0) return; - - InputStream.Seek(section.Header.mixedMarshallingDataOffset, SeekOrigin.Begin); - if (section.Header.compression == 4) - { - using var reader = new BinaryReader(InputStream, Encoding.Default, true); - UInt32 compressedSize = reader.ReadUInt32(); - byte[] compressed = reader.ReadBytes((int)compressedSize); - var uncompressed = Granny2Compressor.Decompress4( - compressed, (int)(section.Header.numMixedMarshallingData * 16)); - using var ms = new MemoryStream(uncompressed); - ReadSectionMixedMarshallingRelocationsInternal(section, ms); - } - else - { - ReadSectionMixedMarshallingRelocationsInternal(section, InputStream); - } - } + private void ReadSectionMixedMarshallingRelocations(Section section) + { + if (section.Header.numMixedMarshallingData == 0) return; - public SectionReference ReadSectionReferenceUnchecked(BinaryReader reader) + InputStream.Seek(section.Header.mixedMarshallingDataOffset, SeekOrigin.Begin); + if (section.Header.compression == 4) { - return new SectionReference - { - Section = reader.ReadUInt32(), - Offset = reader.ReadUInt32() - }; + using var reader = new BinaryReader(InputStream, Encoding.Default, true); + UInt32 compressedSize = reader.ReadUInt32(); + byte[] compressed = reader.ReadBytes((int)compressedSize); + var uncompressed = Granny2Compressor.Decompress4( + compressed, (int)(section.Header.numMixedMarshallingData * 16)); + using var ms = new MemoryStream(uncompressed); + ReadSectionMixedMarshallingRelocationsInternal(section, ms); } - - public SectionReference ReadSectionReferenceUnchecked() + else { - return ReadSectionReferenceUnchecked(InputReader); + ReadSectionMixedMarshallingRelocationsInternal(section, InputStream); } + } - public SectionReference ReadSectionReference(BinaryReader reader) + public SectionReference ReadSectionReferenceUnchecked(BinaryReader reader) + { + return new SectionReference { - var reference = ReadSectionReferenceUnchecked(reader); - Debug.Assert(reference.Section < Sections.Count); - Debug.Assert(reference.Offset <= Sections[(int)reference.Section].Header.uncompressedSize); - return reference; - } + Section = reader.ReadUInt32(), + Offset = reader.ReadUInt32() + }; + } - public SectionReference ReadSectionReference() - { - return ReadSectionReference(InputReader); - } + public SectionReference ReadSectionReferenceUnchecked() + { + return ReadSectionReferenceUnchecked(InputReader); + } - public RelocatableReference ReadReference() - { - var reference = new RelocatableReference(); - if (Magic.Is32Bit) - reference.Offset = Reader.ReadUInt32(); - else - reference.Offset = Reader.ReadUInt64(); - return reference; - } + public SectionReference ReadSectionReference(BinaryReader reader) + { + var reference = ReadSectionReferenceUnchecked(reader); + Debug.Assert(reference.Section < Sections.Count); + Debug.Assert(reference.Offset <= Sections[(int)reference.Section].Header.uncompressedSize); + return reference; + } - public StructReference ReadStructReference() - { - var reference = new StructReference(); - if (Magic.Is32Bit) - reference.Offset = Reader.ReadUInt32(); - else - reference.Offset = Reader.ReadUInt64(); - return reference; - } + public SectionReference ReadSectionReference() + { + return ReadSectionReference(InputReader); + } - public StringReference ReadStringReference() - { - var reference = new StringReference(); - if (Magic.Is32Bit) - reference.Offset = Reader.ReadUInt32(); - else - reference.Offset = Reader.ReadUInt64(); - return reference; - } + public RelocatableReference ReadReference() + { + var reference = new RelocatableReference(); + if (Magic.Is32Bit) + reference.Offset = Reader.ReadUInt32(); + else + reference.Offset = Reader.ReadUInt64(); + return reference; + } - public ArrayReference ReadArrayReference() - { - var reference = new ArrayReference - { - Size = Reader.ReadUInt32() - }; - if (Magic.Is32Bit) - reference.Offset = Reader.ReadUInt32(); - else - reference.Offset = Reader.ReadUInt64(); - return reference; - } + public StructReference ReadStructReference() + { + var reference = new StructReference(); + if (Magic.Is32Bit) + reference.Offset = Reader.ReadUInt32(); + else + reference.Offset = Reader.ReadUInt64(); + return reference; + } + + public StringReference ReadStringReference() + { + var reference = new StringReference(); + if (Magic.Is32Bit) + reference.Offset = Reader.ReadUInt32(); + else + reference.Offset = Reader.ReadUInt64(); + return reference; + } - public ArrayIndicesReference ReadArrayIndicesReference() + public ArrayReference ReadArrayReference() + { + var reference = new ArrayReference { - var reference = new ArrayIndicesReference - { - Size = Reader.ReadUInt32() - }; - if (Magic.Is32Bit) - reference.Offset = Reader.ReadUInt32(); - else - reference.Offset = Reader.ReadUInt64(); - Debug.Assert(!reference.IsValid || reference.Size == 0 || reference.Offset + reference.Size * 4 <= (ulong)Stream.Length); - return reference; - } + Size = Reader.ReadUInt32() + }; + if (Magic.Is32Bit) + reference.Offset = Reader.ReadUInt32(); + else + reference.Offset = Reader.ReadUInt64(); + return reference; + } - public MemberDefinition ReadMemberDefinition() + public ArrayIndicesReference ReadArrayIndicesReference() + { + var reference = new ArrayIndicesReference { + Size = Reader.ReadUInt32() + }; + if (Magic.Is32Bit) + reference.Offset = Reader.ReadUInt32(); + else + reference.Offset = Reader.ReadUInt64(); + Debug.Assert(!reference.IsValid || reference.Size == 0 || reference.Offset + reference.Size * 4 <= (ulong)Stream.Length); + return reference; + } + + public MemberDefinition ReadMemberDefinition() + { #if DEBUG_GR2_SERIALIZATION - var defnOffset = Stream.Position; + var defnOffset = Stream.Position; #endif - var defn = new MemberDefinition(); - int typeId = Reader.ReadInt32(); - if (typeId > (uint)MemberType.Max) - throw new ParsingException(String.Format("Unsupported member type: {0}", typeId)); - - defn.Type = (MemberType)typeId; - var name = ReadStringReference(); - Debug.Assert(!defn.IsValid || name.IsValid); - if (defn.IsValid) - { - defn.Name = name.Resolve(this); - - // Remove "The Divinity Engine" prefix from LSM fields - if (defn.Name.StartsWith("The Divinity Engine", StringComparison.Ordinal)) - { - defn.Name = defn.Name[19..]; - } + var defn = new MemberDefinition(); + int typeId = Reader.ReadInt32(); + if (typeId > (uint)MemberType.Max) + throw new ParsingException(String.Format("Unsupported member type: {0}", typeId)); + + defn.Type = (MemberType)typeId; + var name = ReadStringReference(); + Debug.Assert(!defn.IsValid || name.IsValid); + if (defn.IsValid) + { + defn.Name = name.Resolve(this); - defn.GrannyName = defn.Name; + // Remove "The Divinity Engine" prefix from LSM fields + if (defn.Name.StartsWith("The Divinity Engine", StringComparison.Ordinal)) + { + defn.Name = defn.Name[19..]; } - defn.Definition = ReadStructReference(); - defn.ArraySize = Reader.ReadUInt32(); - defn.Extra = new UInt32[MemberDefinition.ExtraTagCount]; - for (var i = 0; i < MemberDefinition.ExtraTagCount; i++) - defn.Extra[i] = Reader.ReadUInt32(); - // TODO 64-bit: ??? - if (Magic.Is32Bit) - defn.Unknown = Reader.ReadUInt32(); - else - defn.Unknown = (UInt32)Reader.ReadUInt64(); - Debug.Assert(!defn.IsValid || defn.Unknown == 0); - - if (defn.Type == MemberType.Inline || defn.Type == MemberType.Reference || defn.Type == MemberType.ArrayOfReferences || - defn.Type == MemberType.ReferenceToArray) - Debug.Assert(defn.Definition.IsValid); + defn.GrannyName = defn.Name; + } + defn.Definition = ReadStructReference(); + defn.ArraySize = Reader.ReadUInt32(); + defn.Extra = new UInt32[MemberDefinition.ExtraTagCount]; + for (var i = 0; i < MemberDefinition.ExtraTagCount; i++) + defn.Extra[i] = Reader.ReadUInt32(); + // TODO 64-bit: ??? + if (Magic.Is32Bit) + defn.Unknown = Reader.ReadUInt32(); + else + defn.Unknown = (UInt32)Reader.ReadUInt64(); + + Debug.Assert(!defn.IsValid || defn.Unknown == 0); + + if (defn.Type == MemberType.Inline || defn.Type == MemberType.Reference || defn.Type == MemberType.ArrayOfReferences || + defn.Type == MemberType.ReferenceToArray) + Debug.Assert(defn.Definition.IsValid); #if DEBUG_GR2_SERIALIZATION - string description; - if (defn.IsValid) - { - if (defn.ArraySize != 0) - description = String.Format(" [{0:X8}] {1}: {2}[{3}]", defnOffset, defn.Name, defn.Type.ToString(), defn.ArraySize); - else - description = String.Format(" [{0:X8}] {1}: {2}", defnOffset, defn.Name, defn.Type.ToString()); + string description; + if (defn.IsValid) + { + if (defn.ArraySize != 0) + description = String.Format(" [{0:X8}] {1}: {2}[{3}]", defnOffset, defn.Name, defn.Type.ToString(), defn.ArraySize); + else + description = String.Format(" [{0:X8}] {1}: {2}", defnOffset, defn.Name, defn.Type.ToString()); - if (defn.Definition.IsValid) + if (defn.Definition.IsValid) + { + if (!DebugPendingResolve.Contains(defn.Definition)) { - if (!DebugPendingResolve.Contains(defn.Definition)) - { - DebugPendingResolve.Add(defn.Definition); - System.Console.WriteLine(String.Format(" ===== Debug resolve for {0:X8} ===== ", defn.Definition.Offset)); - defn.Definition.Resolve(this); - System.Console.WriteLine(String.Format(" ===== End debug resolve for {0:X8} ===== ", defn.Definition.Offset)); - } - description += String.Format(" ", defn.Definition.Offset); + DebugPendingResolve.Add(defn.Definition); + System.Console.WriteLine(String.Format(" ===== Debug resolve for {0:X8} ===== ", defn.Definition.Offset)); + defn.Definition.Resolve(this); + System.Console.WriteLine(String.Format(" ===== End debug resolve for {0:X8} ===== ", defn.Definition.Offset)); } - - if (defn.Extra[0] != 0 || defn.Extra[1] != 0 || defn.Extra[2] != 0) - description += String.Format(" Extra: {0} {1} {2}", defn.Extra[0], defn.Extra[1], defn.Extra[2]); - } - else - { - description = String.Format(" : {0}", defn.Type.ToString()); + description += String.Format(" ", defn.Definition.Offset); } - System.Console.WriteLine(description); -#endif - return defn; + if (defn.Extra[0] != 0 || defn.Extra[1] != 0 || defn.Extra[2] != 0) + description += String.Format(" Extra: {0} {1} {2}", defn.Extra[0], defn.Extra[1], defn.Extra[2]); } - - public StructDefinition ReadStructDefinition() + else { - var defn = new StructDefinition(); - while (true) - { - var member = ReadMemberDefinition(); - if (member.IsValid) - defn.Members.Add(member); - else - break; - } + description = String.Format(" : {0}", defn.Type.ToString()); + } - return defn; + System.Console.WriteLine(description); +#endif + return defn; + } + + public StructDefinition ReadStructDefinition() + { + var defn = new StructDefinition(); + while (true) + { + var member = ReadMemberDefinition(); + if (member.IsValid) + defn.Members.Add(member); + else + break; } - internal object ReadStruct(StructDefinition definition, MemberType memberType, object node, object parent) + return defn; + } + + internal object ReadStruct(StructDefinition definition, MemberType memberType, object node, object parent) + { + var offset = (UInt32)Stream.Position; + object cachedNode = null; + if (memberType != MemberType.Inline && CachedStructs.TryGetValue(offset, out cachedNode)) { - var offset = (UInt32)Stream.Position; - object cachedNode = null; - if (memberType != MemberType.Inline && CachedStructs.TryGetValue(offset, out cachedNode)) - { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format("Skipped cached struct {1} at {0:X8}", offset, node.ToString())); + System.Console.WriteLine(String.Format("Skipped cached struct {1} at {0:X8}", offset, node.ToString())); #endif - Stream.Position += definition.Size(this); - return cachedNode; - } + Stream.Position += definition.Size(this); + return cachedNode; + } - // Work around serialization of UserData and ExtendedData fields - // whose structure may differ depending on the game and GR2 version - if (node != null && node.GetType() == typeof(System.Object)) - { - node = null; - } + // Work around serialization of UserData and ExtendedData fields + // whose structure may differ depending on the game and GR2 version + if (node != null && node.GetType() == typeof(System.Object)) + { + node = null; + } - if (node != null) - { - // Don't save inline structs in the cached struct map, as they can occupy the same address as a non-inline struct - // if they're at the beginning of said struct. - // They also cannot be referenced from multiple locations, so caching them is of no use. - if (memberType != MemberType.Inline) - CachedStructs.Add(offset, node); + if (node != null) + { + // Don't save inline structs in the cached struct map, as they can occupy the same address as a non-inline struct + // if they're at the beginning of said struct. + // They also cannot be referenced from multiple locations, so caching them is of no use. + if (memberType != MemberType.Inline) + CachedStructs.Add(offset, node); #if DEBUG_GR2_FORMAT_DIFFERENCES - // Create a struct definition from this instance and check if the GR2 type differs from the local type. - var localDefn = new StructDefinition(); - localDefn.LoadFromType(node.GetType(), null); + // Create a struct definition from this instance and check if the GR2 type differs from the local type. + var localDefn = new StructDefinition(); + localDefn.LoadFromType(node.GetType(), null); - var localMembers = localDefn.Members.Where(m => m.ShouldSerialize(Header.tag)).ToList(); - var defnMembers = definition.Members.Where(m => m.ShouldSerialize(Header.tag)).ToList(); + var localMembers = localDefn.Members.Where(m => m.ShouldSerialize(Header.tag)).ToList(); + var defnMembers = definition.Members.Where(m => m.ShouldSerialize(Header.tag)).ToList(); - if (localMembers.Count != defnMembers.Count) + if (localMembers.Count != defnMembers.Count) + { + Trace.TraceWarning(String.Format("Struct {0} differs: Field count differs ({1} vs {2})", node.GetType().Name, localMembers.Count, defnMembers.Count)); + for (int i = 0; i < defnMembers.Count; i++) { - Trace.TraceWarning(String.Format("Struct {0} differs: Field count differs ({1} vs {2})", node.GetType().Name, localMembers.Count, defnMembers.Count)); - for (int i = 0; i < defnMembers.Count; i++) - { - var member = defnMembers[i]; - Trace.TraceWarning(String.Format("\tField {0}: {1}[{2}]", member.Name, member.Type, member.ArraySize)); - } + var member = defnMembers[i]; + Trace.TraceWarning(String.Format("\tField {0}: {1}[{2}]", member.Name, member.Type, member.ArraySize)); } - else + } + else + { + for (int i = 0; i < localMembers.Count; i++) { - for (int i = 0; i < localMembers.Count; i++) + var member = localMembers[i]; + var local = defnMembers[i]; + if (member.Type != local.Type) { - var member = localMembers[i]; - var local = defnMembers[i]; - if (member.Type != local.Type) - { - Trace.TraceWarning(String.Format( - "Struct {0}: Field {1} type differs ({2} vs {3})", - node.GetType().Name, local.Name, local.Type, member.Type - )); - } - - if (!member.GrannyName.Equals(local.GrannyName)) - { - Trace.TraceWarning(String.Format( - "Struct {0}: Field {1} name differs ({2} vs {3})", - node.GetType().Name, local.Name, local.GrannyName, member.GrannyName - )); - } - - if (member.ArraySize != local.ArraySize) - { - Trace.TraceWarning(String.Format( - "Struct {0}: Field {1} array size differs ({2} vs {3})", - node.GetType().Name, local.Name, local.ArraySize, member.ArraySize - )); - } + Trace.TraceWarning(String.Format( + "Struct {0}: Field {1} type differs ({2} vs {3})", + node.GetType().Name, local.Name, local.Type, member.Type + )); } - } -#endif - definition.MapType(node); - foreach (var member in definition.Members) - { - var field = member.LookupFieldInfo(node); - if (field != null) + if (!member.GrannyName.Equals(local.GrannyName)) { - var value = ReadInstance(member, field.GetValue(node), field.FieldType, node); - field.SetValue(node, value); + Trace.TraceWarning(String.Format( + "Struct {0}: Field {1} name differs ({2} vs {3})", + node.GetType().Name, local.Name, local.GrannyName, member.GrannyName + )); } - else + + if (member.ArraySize != local.ArraySize) { - ReadInstance(member, null, null, node); + Trace.TraceWarning(String.Format( + "Struct {0}: Field {1} array size differs ({2} vs {3})", + node.GetType().Name, local.Name, local.ArraySize, member.ArraySize + )); } } } - else +#endif + + definition.MapType(node); + foreach (var member in definition.Members) { -#if DEBUG_GR2_FORMAT_DIFFERENCES - var defnMembers = definition.Members.Where(m => m.ShouldSerialize(Header.tag)).ToList(); - Trace.TraceWarning("Unnamed struct not defined locally"); - for (int i = 0; i < defnMembers.Count; i++) + var field = member.LookupFieldInfo(node); + if (field != null) { - var member = defnMembers[i]; - Trace.TraceWarning(String.Format("\tField {0}: {1}[{2}]", member.Name, member.Type, member.ArraySize)); + var value = ReadInstance(member, field.GetValue(node), field.FieldType, node); + field.SetValue(node, value); } -#endif - - foreach (var member in definition.Members) + else { - ReadInstance(member, null, null, null); + ReadInstance(member, null, null, node); } } + } + else + { +#if DEBUG_GR2_FORMAT_DIFFERENCES + var defnMembers = definition.Members.Where(m => m.ShouldSerialize(Header.tag)).ToList(); + Trace.TraceWarning("Unnamed struct not defined locally"); + for (int i = 0; i < defnMembers.Count; i++) + { + var member = defnMembers[i]; + Trace.TraceWarning(String.Format("\tField {0}: {1}[{2}]", member.Name, member.Type, member.ArraySize)); + } +#endif - return node; + foreach (var member in definition.Members) + { + ReadInstance(member, null, null, null); + } } - internal object ReadInstance(MemberDefinition definition, object node, Type propertyType, object parent) + return node; + } + + internal object ReadInstance(MemberDefinition definition, object node, Type propertyType, object parent) + { + if (definition.SerializationKind == SerializationKind.UserRaw) + return definition.Serializer.Read(this, null, definition, 0, parent); + + if (definition.ArraySize == 0) { - if (definition.SerializationKind == SerializationKind.UserRaw) - return definition.Serializer.Read(this, null, definition, 0, parent); + return ReadElement(definition, node, propertyType, parent); + } - if (definition.ArraySize == 0) + Type elementType = null; + if (propertyType != null) + { + if (definition.SerializationKind == SerializationKind.UserMember) { - return ReadElement(definition, node, propertyType, parent); + // Do unserialization directly on the whole array if per-member serialization was requested. + // This mode is a bit odd, as we resolve StructRef-s for non-arrays, but don't for array types. + StructDefinition defn = null; + if (definition.Definition.IsValid) + defn = definition.Definition.Resolve(this); + return definition.Serializer.Read(this, defn, definition, definition.ArraySize, parent); } - - Type elementType = null; - if (propertyType != null) + else if (propertyType.IsArray) { - if (definition.SerializationKind == SerializationKind.UserMember) - { - // Do unserialization directly on the whole array if per-member serialization was requested. - // This mode is a bit odd, as we resolve StructRef-s for non-arrays, but don't for array types. - StructDefinition defn = null; - if (definition.Definition.IsValid) - defn = definition.Definition.Resolve(this); - return definition.Serializer.Read(this, defn, definition, definition.ArraySize, parent); - } - else if (propertyType.IsArray) - { - // If the property is a native array (ie. SomeType[]), create an array instance and set its values - elementType = propertyType.GetElementType(); + // If the property is a native array (ie. SomeType[]), create an array instance and set its values + elementType = propertyType.GetElementType(); - Array objs = Helpers.CreateArrayInstance(propertyType, (int)definition.ArraySize) as Array; - for (int i = 0; i < definition.ArraySize; i++) - { - objs.SetValue(ReadElement(definition, objs.GetValue(i), elementType, parent), i); - } - return objs; - } - else + Array objs = Helpers.CreateArrayInstance(propertyType, (int)definition.ArraySize) as Array; + for (int i = 0; i < definition.ArraySize; i++) { - // For non-native arrays we always assume the property is an IList - node ??= Helpers.CreateInstance(propertyType); - - var items = node as System.Collections.IList; - for (int i = 0; i < definition.ArraySize; i++) - { - items.Add(ReadElement(definition, null, elementType, parent)); - } - - return items; + objs.SetValue(ReadElement(definition, objs.GetValue(i), elementType, parent), i); } + return objs; } else { + // For non-native arrays we always assume the property is an IList + node ??= Helpers.CreateInstance(propertyType); + + var items = node as System.Collections.IList; for (int i = 0; i < definition.ArraySize; i++) - ReadElement(definition, null, null, parent); - return null; + { + items.Add(ReadElement(definition, null, elementType, parent)); + } + + return items; } } - - private object ReadElement(MemberDefinition definition, object node, Type propertyType, object parent) + else { + for (int i = 0; i < definition.ArraySize; i++) + ReadElement(definition, null, null, parent); + return null; + } + } + + private object ReadElement(MemberDefinition definition, object node, Type propertyType, object parent) + { #if DEBUG_GR2_SERIALIZATION - var offsetInFile = Stream.Position; + var offsetInFile = Stream.Position; #endif - var kind = definition.SerializationKind; - Debug.Assert(kind == SerializationKind.Builtin || !definition.IsScalar); - if (node == null && - propertyType != null && - !definition.IsScalar && - (kind == SerializationKind.Builtin || kind == SerializationKind.UserElement) && - // Variant construction is a special case as we don't know the struct defn beforehand - definition.Type != MemberType.VariantReference) - { - node = Helpers.CreateInstance(propertyType); - } + var kind = definition.SerializationKind; + Debug.Assert(kind == SerializationKind.Builtin || !definition.IsScalar); + if (node == null && + propertyType != null && + !definition.IsScalar && + (kind == SerializationKind.Builtin || kind == SerializationKind.UserElement) && + // Variant construction is a special case as we don't know the struct defn beforehand + definition.Type != MemberType.VariantReference) + { + node = Helpers.CreateInstance(propertyType); + } - switch (definition.Type) - { - case MemberType.Inline: + switch (definition.Type) + { + case MemberType.Inline: + Debug.Assert(definition.Definition.IsValid); +#if DEBUG_GR2_SERIALIZATION + System.Console.WriteLine(String.Format(" === Inline Struct {0} === ", definition.Name)); +#endif + if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) + node = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); + else + node = ReadStruct(definition.Definition.Resolve(this), definition.Type, node, parent); +#if DEBUG_GR2_SERIALIZATION + System.Console.WriteLine(" === End Struct === "); +#endif + break; + + case MemberType.Reference: + { Debug.Assert(definition.Definition.IsValid); + var r = ReadReference(); + + if (r.IsValid && parent != null) + { + var originalPos = Stream.Position; + Seek(r); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Inline Struct {0} === ", definition.Name)); + System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif - if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) - node = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); - else - node = ReadStruct(definition.Definition.Resolve(this), definition.Type, node, parent); + if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) + node = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); + else + node = ReadStruct(definition.Definition.Resolve(this), definition.Type, node, parent); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + System.Console.WriteLine(" === End Struct === "); #endif + Stream.Seek(originalPos, SeekOrigin.Begin); + } + else + node = null; break; + } - case MemberType.Reference: + case MemberType.VariantReference: + { + var structRef = ReadStructReference(); + var r = ReadReference(); + + if (r.IsValid && parent != null) { - Debug.Assert(definition.Definition.IsValid); - var r = ReadReference(); + var structDefn = structRef.Resolve(this); + if (definition.TypeSelector != null && definition.Type == MemberType.VariantReference) + propertyType = definition.TypeSelector.SelectType(definition, structDefn, parent); + if (propertyType != null) + node = Helpers.CreateInstance(propertyType); - if (r.IsValid && parent != null) + if (node != null) { var originalPos = Stream.Position; Seek(r); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); + System.Console.WriteLine(String.Format(" === Variant Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) - node = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); + node = definition.Serializer.Read(this, structDefn, definition, 0, parent); else - node = ReadStruct(definition.Definition.Resolve(this), definition.Type, node, parent); + node = ReadStruct(structRef.Resolve(this), definition.Type, node, parent); #if DEBUG_GR2_SERIALIZATION System.Console.WriteLine(" === End Struct === "); #endif Stream.Seek(originalPos, SeekOrigin.Begin); } - else - node = null; - break; } + else + node = null; + break; + } - case MemberType.VariantReference: + case MemberType.ArrayOfReferences: + { + // Serializing as a struct member is nooooot a very good idea here. + Debug.Assert(kind != SerializationKind.UserMember); + Debug.Assert(definition.Definition.IsValid); + var indices = ReadArrayIndicesReference(); +#if DEBUG_GR2_SERIALIZATION + System.Console.WriteLine(String.Format(" Array of references at [{0:X8}]", indices.Offset)); +#endif + + if (Header.version >= 7) { - var structRef = ReadStructReference(); - var r = ReadReference(); + Debug.Assert(indices.IsValid == (indices.Size != 0)); + } - if (r.IsValid && parent != null) - { - var structDefn = structRef.Resolve(this); - if (definition.TypeSelector != null && definition.Type == MemberType.VariantReference) - propertyType = definition.TypeSelector.SelectType(definition, structDefn, parent); - if (propertyType != null) - node = Helpers.CreateInstance(propertyType); + if (indices.IsValid && indices.Size > 0 && node != null && parent != null) + { + var items = node as System.Collections.IList; + var type = items.GetType().GetGenericArguments().Single(); - if (node != null) - { - var originalPos = Stream.Position; - Seek(r); + var refs = indices.Resolve(this); + var originalPos = Stream.Position; + for (int i = 0; i < refs.Count; i++) + { + Seek(refs[i]); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Variant Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); + System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif - if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) - node = definition.Serializer.Read(this, structDefn, definition, 0, parent); - else - node = ReadStruct(structRef.Resolve(this), definition.Type, node, parent); + if (kind == SerializationKind.UserElement) + { + object element = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); + items.Add(element); + } + else + { + object element = Helpers.CreateInstance(type); + // TODO: Only create a new instance if we don't have a CachedStruct available! + element = ReadStruct(definition.Definition.Resolve(this), definition.Type, element, parent); + items.Add(element); + + } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + System.Console.WriteLine(" === End Struct === "); #endif - Stream.Seek(originalPos, SeekOrigin.Begin); - } } - else - node = null; - break; + + Stream.Seek(originalPos, SeekOrigin.Begin); + node = items; } + else + node = null; + break; + } - case MemberType.ArrayOfReferences: + case MemberType.ReferenceToArray: + case MemberType.ReferenceToVariantArray: + { + StructReference structRef; + if (definition.Type == MemberType.ReferenceToVariantArray) + structRef = ReadStructReference(); + else + structRef = definition.Definition; + + var itemsRef = ReadArrayReference(); + + if (Header.version >= 7) { - // Serializing as a struct member is nooooot a very good idea here. - Debug.Assert(kind != SerializationKind.UserMember); - Debug.Assert(definition.Definition.IsValid); - var indices = ReadArrayIndicesReference(); -#if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" Array of references at [{0:X8}]", indices.Offset)); -#endif + Debug.Assert(itemsRef.IsValid == (itemsRef.Size != 0)); + } + + if (itemsRef.IsValid && + itemsRef.Size > 0 && + parent != null && + (node != null || kind == SerializationKind.UserMember)) + { + Debug.Assert(structRef.IsValid); + var structType = structRef.Resolve(this); + var originalPos = Stream.Position; + Seek(itemsRef); - if (Header.version >= 7) + if (kind == SerializationKind.UserMember) { - Debug.Assert(indices.IsValid == (indices.Size != 0)); + // For ReferenceTo(Variant)Array, we start serialization after resolving the array ref itself. + node = definition.Serializer.Read(this, structType, definition, itemsRef.Size, parent); } - - if (indices.IsValid && indices.Size > 0 && node != null && parent != null) + else { var items = node as System.Collections.IList; var type = items.GetType().GetGenericArguments().Single(); + if (definition.Type == MemberType.ReferenceToVariantArray && + kind != SerializationKind.UserElement && + definition.TypeSelector != null) + type = definition.TypeSelector.SelectType(definition, structType, parent); - var refs = indices.Resolve(this); - var originalPos = Stream.Position; - for (int i = 0; i < refs.Count; i++) + for (int i = 0; i < itemsRef.Size; i++) { - Seek(refs[i]); #if DEBUG_GR2_SERIALIZATION System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif if (kind == SerializationKind.UserElement) { - object element = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); + object element = definition.Serializer.Read(this, structType, definition, 0, parent); items.Add(element); } else { object element = Helpers.CreateInstance(type); - // TODO: Only create a new instance if we don't have a CachedStruct available! - element = ReadStruct(definition.Definition.Resolve(this), definition.Type, element, parent); + element = ReadStruct(structType, definition.Type, element, parent); items.Add(element); - } #if DEBUG_GR2_SERIALIZATION System.Console.WriteLine(" === End Struct === "); #endif } - - Stream.Seek(originalPos, SeekOrigin.Begin); - node = items; } - else - node = null; - break; - } - - case MemberType.ReferenceToArray: - case MemberType.ReferenceToVariantArray: - { - StructReference structRef; - if (definition.Type == MemberType.ReferenceToVariantArray) - structRef = ReadStructReference(); - else - structRef = definition.Definition; - - var itemsRef = ReadArrayReference(); - - if (Header.version >= 7) - { - Debug.Assert(itemsRef.IsValid == (itemsRef.Size != 0)); - } - - if (itemsRef.IsValid && - itemsRef.Size > 0 && - parent != null && - (node != null || kind == SerializationKind.UserMember)) - { - Debug.Assert(structRef.IsValid); - var structType = structRef.Resolve(this); - var originalPos = Stream.Position; - Seek(itemsRef); - - if (kind == SerializationKind.UserMember) - { - // For ReferenceTo(Variant)Array, we start serialization after resolving the array ref itself. - node = definition.Serializer.Read(this, structType, definition, itemsRef.Size, parent); - } - else - { - var items = node as System.Collections.IList; - var type = items.GetType().GetGenericArguments().Single(); - if (definition.Type == MemberType.ReferenceToVariantArray && - kind != SerializationKind.UserElement && - definition.TypeSelector != null) - type = definition.TypeSelector.SelectType(definition, structType, parent); - - for (int i = 0; i < itemsRef.Size; i++) - { -#if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); -#endif - if (kind == SerializationKind.UserElement) - { - object element = definition.Serializer.Read(this, structType, definition, 0, parent); - items.Add(element); - } - else - { - object element = Helpers.CreateInstance(type); - element = ReadStruct(structType, definition.Type, element, parent); - items.Add(element); - } -#if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); -#endif - } - } - Stream.Seek(originalPos, SeekOrigin.Begin); - } - else - node = null; - break; + Stream.Seek(originalPos, SeekOrigin.Begin); } - - case MemberType.String: - var str = ReadStringReference(); - if (str.IsValid) - node = str.Resolve(this); else node = null; break; + } - case MemberType.Transform: - var transform = new Transform(); - transform.Flags = Reader.ReadUInt32(); + case MemberType.String: + var str = ReadStringReference(); + if (str.IsValid) + node = str.Resolve(this); + else + node = null; + break; - for (int i = 0; i < 3; i++) - transform.Translation[i] = Reader.ReadSingle(); + case MemberType.Transform: + var transform = new Transform(); + transform.Flags = Reader.ReadUInt32(); - transform.Rotation.X = Reader.ReadSingle(); - transform.Rotation.Y = Reader.ReadSingle(); - transform.Rotation.Z = Reader.ReadSingle(); - transform.Rotation.W = Reader.ReadSingle(); + for (int i = 0; i < 3; i++) + transform.Translation[i] = Reader.ReadSingle(); - for (int i = 0; i < 3; i++) - { - for (int j = 0; j < 3; j++) - transform.ScaleShear[i, j] = Reader.ReadSingle(); - } + transform.Rotation.X = Reader.ReadSingle(); + transform.Rotation.Y = Reader.ReadSingle(); + transform.Rotation.Z = Reader.ReadSingle(); + transform.Rotation.W = Reader.ReadSingle(); - node = transform; - break; + for (int i = 0; i < 3; i++) + { + for (int j = 0; j < 3; j++) + transform.ScaleShear[i, j] = Reader.ReadSingle(); + } - case MemberType.Real16: - throw new NotImplementedException("TODO"); + node = transform; + break; - case MemberType.Real32: - node = Reader.ReadSingle(); - break; + case MemberType.Real16: + throw new NotImplementedException("TODO"); - case MemberType.Int8: - case MemberType.BinormalInt8: - node = Reader.ReadSByte(); - break; + case MemberType.Real32: + node = Reader.ReadSingle(); + break; - case MemberType.UInt8: - case MemberType.NormalUInt8: - node = Reader.ReadByte(); - break; + case MemberType.Int8: + case MemberType.BinormalInt8: + node = Reader.ReadSByte(); + break; - case MemberType.Int16: - case MemberType.BinormalInt16: - node = Reader.ReadInt16(); - break; + case MemberType.UInt8: + case MemberType.NormalUInt8: + node = Reader.ReadByte(); + break; - case MemberType.UInt16: - case MemberType.NormalUInt16: - node = Reader.ReadUInt16(); - break; + case MemberType.Int16: + case MemberType.BinormalInt16: + node = Reader.ReadInt16(); + break; - case MemberType.Int32: - node = Reader.ReadInt32(); - break; + case MemberType.UInt16: + case MemberType.NormalUInt16: + node = Reader.ReadUInt16(); + break; - case MemberType.UInt32: - node = Reader.ReadUInt32(); - break; + case MemberType.Int32: + node = Reader.ReadInt32(); + break; - default: - throw new ParsingException(String.Format("Unhandled member type: {0}", definition.Type.ToString())); - } + case MemberType.UInt32: + node = Reader.ReadUInt32(); + break; + + default: + throw new ParsingException(String.Format("Unhandled member type: {0}", definition.Type.ToString())); + } #if DEBUG_GR2_SERIALIZATION - if (node != null) - System.Console.WriteLine(String.Format(" [{0:X8}] {1}: {2}", offsetInFile, definition.Name, node.ToString())); - else - System.Console.WriteLine(String.Format(" [{0:X8}] {1}: ", offsetInFile, definition.Name)); + if (node != null) + System.Console.WriteLine(String.Format(" [{0:X8}] {1}: {2}", offsetInFile, definition.Name, node.ToString())); + else + System.Console.WriteLine(String.Format(" [{0:X8}] {1}: ", offsetInFile, definition.Name)); #endif - return node; - } + return node; + } - internal string ReadString() + internal string ReadString() + { + // Not terribly efficient, but it'll do for now + var bytes = new List(); + while (true) { - // Not terribly efficient, but it'll do for now - var bytes = new List(); - while (true) - { - byte b = Reader.ReadByte(); - if (b != 0) - bytes.Add(b); - else - break; - } - - return Encoding.UTF8.GetString(bytes.ToArray()); + byte b = Reader.ReadByte(); + if (b != 0) + bytes.Add(b); + else + break; } - internal void Seek(SectionReference reference) - { - Debug.Assert(reference.IsValid); - Seek(reference.Section, reference.Offset); - } + return Encoding.UTF8.GetString(bytes.ToArray()); + } - internal void Seek(RelocatableReference reference) - { - Debug.Assert(reference.IsValid); - Debug.Assert(reference.Offset <= (ulong)Stream.Length); - Stream.Position = (long)reference.Offset; - } + internal void Seek(SectionReference reference) + { + Debug.Assert(reference.IsValid); + Seek(reference.Section, reference.Offset); + } - internal void Seek(UInt32 section, UInt32 offset) - { - Debug.Assert(section < Sections.Count); - Debug.Assert(offset <= Sections[(int)section].Header.uncompressedSize); - Stream.Position = Sections[(int)section].Header.offsetInFile + offset; - } + internal void Seek(RelocatableReference reference) + { + Debug.Assert(reference.IsValid); + Debug.Assert(reference.Offset <= (ulong)Stream.Length); + Stream.Position = (long)reference.Offset; + } - internal void Seek(Section section, UInt32 offset) - { - Debug.Assert(offset <= section.Header.uncompressedSize); - Stream.Position = section.Header.offsetInFile + offset; - } + internal void Seek(UInt32 section, UInt32 offset) + { + Debug.Assert(section < Sections.Count); + Debug.Assert(offset <= Sections[(int)section].Header.uncompressedSize); + Stream.Position = Sections[(int)section].Header.offsetInFile + offset; + } + + internal void Seek(Section section, UInt32 offset) + { + Debug.Assert(offset <= section.Header.uncompressedSize); + Stream.Position = section.Header.offsetInFile + offset; } } diff --git a/LSLib/Granny/GR2/Writer.cs b/LSLib/Granny/GR2/Writer.cs index 60a2cee9..2c1d207d 100644 --- a/LSLib/Granny/GR2/Writer.cs +++ b/LSLib/Granny/GR2/Writer.cs @@ -6,1044 +6,1043 @@ using System.Text; using OpenTK; -namespace LSLib.Granny.GR2 +namespace LSLib.Granny.GR2; + +public class MixedMarshallingData { - public class MixedMarshallingData - { - public object Obj; - public UInt32 Count; - public StructDefinition Type; - } + public object Obj; + public UInt32 Count; + public StructDefinition Type; +} - public class WritableSection : Section - { - public SectionType Type; - public MemoryStream MainStream; - public MemoryStream DataStream; +public class WritableSection : Section +{ + public SectionType Type; + public MemoryStream MainStream; + public MemoryStream DataStream; - public BinaryWriter MainWriter; - public BinaryWriter DataWriter; + public BinaryWriter MainWriter; + public BinaryWriter DataWriter; - public BinaryWriter Writer; - public GR2Writer GR2; + public BinaryWriter Writer; + public GR2Writer GR2; - public Dictionary Fixups = []; - // Fixups for the data area that we'll need to update after serialization is finished - public Dictionary DataFixups = []; + public Dictionary Fixups = []; + // Fixups for the data area that we'll need to update after serialization is finished + public Dictionary DataFixups = []; - public List MixedMarshalling = []; + public List MixedMarshalling = []; - public WritableSection(SectionType type, GR2Writer writer) - { - Type = type; - MainStream = new MemoryStream(); - MainWriter = new BinaryWriter(MainStream); + public WritableSection(SectionType type, GR2Writer writer) + { + Type = type; + MainStream = new MemoryStream(); + MainWriter = new BinaryWriter(MainStream); - DataStream = new MemoryStream(); - DataWriter = new BinaryWriter(DataStream); + DataStream = new MemoryStream(); + DataWriter = new BinaryWriter(DataStream); - Writer = MainWriter; - Header = InitHeader(); - GR2 = writer; - } + Writer = MainWriter; + Header = InitHeader(); + GR2 = writer; + } - public void Finish() + public void Finish() + { + var dataOffset = (UInt32)MainStream.Length; + + foreach (var dataFixup in DataFixups) { - var dataOffset = (UInt32)MainStream.Length; + Fixups.Add(dataFixup.Key + dataOffset, dataFixup.Value); + } - foreach (var dataFixup in DataFixups) - { - Fixups.Add(dataFixup.Key + dataOffset, dataFixup.Value); - } + MainWriter.Write(DataStream.ToArray()); + } - MainWriter.Write(DataStream.ToArray()); - } + private SectionHeader InitHeader() + { + return new SectionHeader + { + compression = 0, + offsetInFile = 0, // Set after serialization is finished + compressedSize = 0, // Set after serialization is finished + uncompressedSize = 0, // Set after serialization is finished + alignment = 4, + first16bit = 0, // Set after serialization is finished + first8bit = 0, // Set after serialization is finished + relocationsOffset = 0, // Set after serialization is finished + numRelocations = 0, // Set after serialization is finished + mixedMarshallingDataOffset = 0, // Set after serialization is finished + numMixedMarshallingData = 0 // Set after serialization is finished + }; + } - private SectionHeader InitHeader() + public void AddFixup(object o) + { + if (Writer == MainWriter) { - return new SectionHeader - { - compression = 0, - offsetInFile = 0, // Set after serialization is finished - compressedSize = 0, // Set after serialization is finished - uncompressedSize = 0, // Set after serialization is finished - alignment = 4, - first16bit = 0, // Set after serialization is finished - first8bit = 0, // Set after serialization is finished - relocationsOffset = 0, // Set after serialization is finished - numRelocations = 0, // Set after serialization is finished - mixedMarshallingDataOffset = 0, // Set after serialization is finished - numMixedMarshallingData = 0 // Set after serialization is finished - }; + Fixups.Add((UInt32)MainStream.Position, o); } - - public void AddFixup(object o) + else { - if (Writer == MainWriter) - { - Fixups.Add((UInt32)MainStream.Position, o); - } - else - { - DataFixups.Add((UInt32)DataStream.Position, o); - } + DataFixups.Add((UInt32)DataStream.Position, o); } + } - internal void AddMixedMarshalling(object o, UInt32 count, StructDefinition type) + internal void AddMixedMarshalling(object o, UInt32 count, StructDefinition type) + { + var marshal = new MixedMarshallingData { - var marshal = new MixedMarshallingData - { - Obj = o, - Count = count, - Type = type - }; - MixedMarshalling.Add(marshal); - } + Obj = o, + Count = count, + Type = type + }; + MixedMarshalling.Add(marshal); + } - internal void CheckMixedMarshalling(object o, Type type, UInt32 count) + internal void CheckMixedMarshalling(object o, Type type, UInt32 count) + { + if (type.IsClass) { - if (type.IsClass) + var defn = GR2.LookupStructDefinition(type, o); + if (defn.MixedMarshal) { - var defn = GR2.LookupStructDefinition(type, o); - if (defn.MixedMarshal) - { - AddMixedMarshalling(o, count, defn); - } + AddMixedMarshalling(o, count, defn); } } + } - internal void CheckMixedMarshalling(object o, UInt32 count) - { - CheckMixedMarshalling(o, o.GetType(), count); - } + internal void CheckMixedMarshalling(object o, UInt32 count) + { + CheckMixedMarshalling(o, o.GetType(), count); + } - public void WriteReference(object o) + public void WriteReference(object o) + { + if (o != null) { - if (o != null) - { - AddFixup(o); - } - - if (GR2.Magic.Is32Bit) - Writer.Write((UInt32)0); - else - Writer.Write((UInt64)0); + AddFixup(o); } - public void WriteStructReference(StructDefinition defn) - { - if (defn != null) - { - AddFixup(defn); - GR2.Types.TryAdd(defn.Type, defn); - } + if (GR2.Magic.Is32Bit) + Writer.Write((UInt32)0); + else + Writer.Write((UInt64)0); + } - if (GR2.Magic.Is32Bit) - Writer.Write((UInt32)0); - else - Writer.Write((UInt64)0); + public void WriteStructReference(StructDefinition defn) + { + if (defn != null) + { + AddFixup(defn); + GR2.Types.TryAdd(defn.Type, defn); } - public void WriteStringReference(string s) + if (GR2.Magic.Is32Bit) + Writer.Write((UInt32)0); + else + Writer.Write((UInt64)0); + } + + public void WriteStringReference(string s) + { + if (s != null) { - if (s != null) - { - AddFixup(s); + AddFixup(s); - if (!GR2.Strings.Contains(s)) - { - GR2.Strings.Add(s); - GR2.QueueStringWrite(SectionType.Main, s); - } + if (!GR2.Strings.Contains(s)) + { + GR2.Strings.Add(s); + GR2.QueueStringWrite(SectionType.Main, s); } - - if (GR2.Magic.Is32Bit) - Writer.Write((UInt32)0); - else - Writer.Write((UInt64)0); } - public void WriteArrayReference(System.Collections.IList list) - { - if (list != null && list.Count > 0) - { - Writer.Write((UInt32)list.Count); - AddFixup(list); - } - else - { - Writer.Write((UInt32)0); - } + if (GR2.Magic.Is32Bit) + Writer.Write((UInt32)0); + else + Writer.Write((UInt64)0); + } - if (GR2.Magic.Is32Bit) - Writer.Write((UInt32)0); - else - Writer.Write((UInt64)0); + public void WriteArrayReference(System.Collections.IList list) + { + if (list != null && list.Count > 0) + { + Writer.Write((UInt32)list.Count); + AddFixup(list); } - - public void WriteArrayIndicesReference(System.Collections.IList list) + else { - WriteArrayReference(list); + Writer.Write((UInt32)0); } - public void WriteMemberDefinition(MemberDefinition defn) + if (GR2.Magic.Is32Bit) + Writer.Write((UInt32)0); + else + Writer.Write((UInt64)0); + } + + public void WriteArrayIndicesReference(System.Collections.IList list) + { + WriteArrayReference(list); + } + + public void WriteMemberDefinition(MemberDefinition defn) + { + Writer.Write((UInt32)defn.Type); + WriteStringReference(defn.GrannyName); + WriteStructReference(defn.WriteDefinition); + Writer.Write(defn.ArraySize); + for (var i = 0; i < MemberDefinition.ExtraTagCount; i++) + Writer.Write(defn.Extra[i]); + if (GR2.Magic.Is32Bit) + Writer.Write(defn.Unknown); + else + Writer.Write((UInt64)defn.Unknown); + } + + public void WriteStructDefinition(StructDefinition defn) + { + Debug.Assert(Writer == MainWriter); + GR2.ObjectOffsets[defn] = new SectionReference(Type, (UInt32)MainStream.Position); + + var tag = GR2.Header.tag; + foreach (var member in defn.Members) { - Writer.Write((UInt32)defn.Type); - WriteStringReference(defn.GrannyName); - WriteStructReference(defn.WriteDefinition); - Writer.Write(defn.ArraySize); - for (var i = 0; i < MemberDefinition.ExtraTagCount; i++) - Writer.Write(defn.Extra[i]); - if (GR2.Magic.Is32Bit) - Writer.Write(defn.Unknown); - else - Writer.Write((UInt64)defn.Unknown); + if (member.ShouldSerialize(tag)) + { + WriteMemberDefinition(member); + } } - public void WriteStructDefinition(StructDefinition defn) + var end = new MemberDefinition { - Debug.Assert(Writer == MainWriter); - GR2.ObjectOffsets[defn] = new SectionReference(Type, (UInt32)MainStream.Position); + Type = MemberType.None, + Extra = [0, 0, 0] + }; + WriteMemberDefinition(end); + } - var tag = GR2.Header.tag; - foreach (var member in defn.Members) - { - if (member.ShouldSerialize(tag)) - { - WriteMemberDefinition(member); - } - } + internal void WriteStruct(object node, bool allowRecursion = true, bool allowAlign = true) + { + WriteStruct(node.GetType(), node, allowRecursion, allowAlign); + } - var end = new MemberDefinition - { - Type = MemberType.None, - Extra = [0, 0, 0] - }; - WriteMemberDefinition(end); - } + internal void WriteStruct(Type type, object node, bool allowRecursion = true, bool allowAlign = true) + { + WriteStruct(GR2.LookupStructDefinition(type, node), node, allowRecursion, allowAlign); + } - internal void WriteStruct(object node, bool allowRecursion = true, bool allowAlign = true) + public void StoreObjectOffset(object o) + { + if (Writer == MainWriter) { - WriteStruct(node.GetType(), node, allowRecursion, allowAlign); + GR2.ObjectOffsets[o] = new SectionReference(Type, (UInt32)MainStream.Position); } - - internal void WriteStruct(Type type, object node, bool allowRecursion = true, bool allowAlign = true) + else { - WriteStruct(GR2.LookupStructDefinition(type, node), node, allowRecursion, allowAlign); + GR2.DataObjectOffsets[o] = new SectionReference(Type, (UInt32)DataStream.Position); } + } - public void StoreObjectOffset(object o) + internal void AlignWrite() + { + if (Writer == MainWriter) { - if (Writer == MainWriter) + // Align the struct so its size (and the address of the subsequent struct) is a multiple of 4 + while ((MainStream.Position % Header.alignment) != 0) { - GR2.ObjectOffsets[o] = new SectionReference(Type, (UInt32)MainStream.Position); - } - else - { - GR2.DataObjectOffsets[o] = new SectionReference(Type, (UInt32)DataStream.Position); + Writer.Write((Byte)0); } } - - internal void AlignWrite() + else { - if (Writer == MainWriter) + // Align the struct so its size (and the address of the subsequent struct) is a multiple of 4 + while ((DataStream.Position % Header.alignment) != 0) { - // Align the struct so its size (and the address of the subsequent struct) is a multiple of 4 - while ((MainStream.Position % Header.alignment) != 0) - { - Writer.Write((Byte)0); - } - } - else - { - // Align the struct so its size (and the address of the subsequent struct) is a multiple of 4 - while ((DataStream.Position % Header.alignment) != 0) - { - Writer.Write((Byte)0); - } + Writer.Write((Byte)0); } } + } - internal void WriteStruct(StructDefinition definition, object node, bool allowRecursion = true, bool allowAlign = true) - { - if (node == null) throw new ArgumentNullException(); + internal void WriteStruct(StructDefinition definition, object node, bool allowRecursion = true, bool allowAlign = true) + { + if (node == null) throw new ArgumentNullException(); - if (allowAlign) - { - AlignWrite(); - } + if (allowAlign) + { + AlignWrite(); + } - StoreObjectOffset(node); + StoreObjectOffset(node); - var tag = GR2.Header.tag; - foreach (var member in definition.Members) + var tag = GR2.Header.tag; + foreach (var member in definition.Members) + { + if (member.ShouldSerialize(tag)) { - if (member.ShouldSerialize(tag)) - { - var value = member.CachedField.GetValue(node); - if (member.SerializationKind == SerializationKind.UserRaw) - member.Serializer.Write(this.GR2, this, member, value); - else - WriteInstance(member, member.CachedField.FieldType, value); - } + var value = member.CachedField.GetValue(node); + if (member.SerializationKind == SerializationKind.UserRaw) + member.Serializer.Write(this.GR2, this, member, value); + else + WriteInstance(member, member.CachedField.FieldType, value); } + } - // When the struct is empty, we need to write a dummy byte to make sure that another - // struct won't have the same address. - if (definition.Members.Count == 0) - { - Writer.Write((Byte)0); - } + // When the struct is empty, we need to write a dummy byte to make sure that another + // struct won't have the same address. + if (definition.Members.Count == 0) + { + Writer.Write((Byte)0); + } - if (Writer == MainWriter && allowRecursion) - { - // We need to write all child structs directly after the parent struct - // (at least this is how granny2.dll does it) - GR2.FlushPendingWrites(); - } + if (Writer == MainWriter && allowRecursion) + { + // We need to write all child structs directly after the parent struct + // (at least this is how granny2.dll does it) + GR2.FlushPendingWrites(); } + } - internal void WriteArray(MemberDefinition arrayDefn, Type elementType, System.Collections.IList list) + internal void WriteArray(MemberDefinition arrayDefn, Type elementType, System.Collections.IList list) + { + bool dataArea = arrayDefn.DataArea || (Writer == DataWriter); + AlignWrite(); + + switch (arrayDefn.Type) { - bool dataArea = arrayDefn.DataArea || (Writer == DataWriter); - AlignWrite(); + case MemberType.ArrayOfReferences: + { + // Serializing as a struct member is nooooot a very good idea here. + // Debug.Assert(kind != SerializationKind.UserMember); - switch (arrayDefn.Type) - { - case MemberType.ArrayOfReferences: + // Reference lists are always written to the data area + var oldWriter = Writer; + Writer = DataWriter; + + StoreObjectOffset(list); + for (int i = 0; i < list.Count; i++) { - // Serializing as a struct member is nooooot a very good idea here. - // Debug.Assert(kind != SerializationKind.UserMember); + WriteReference(list[i]); + GR2.QueueStructWrite(Type, dataArea, arrayDefn, elementType, list[i]); + } - // Reference lists are always written to the data area - var oldWriter = Writer; - Writer = DataWriter; + Writer = oldWriter; + break; + } - StoreObjectOffset(list); - for (int i = 0; i < list.Count; i++) - { - WriteReference(list[i]); - GR2.QueueStructWrite(Type, dataArea, arrayDefn, elementType, list[i]); - } + case MemberType.ReferenceToArray: + case MemberType.ReferenceToVariantArray: + { + StoreObjectOffset(list); - Writer = oldWriter; - break; + if (arrayDefn.SerializationKind == SerializationKind.UserMember) + { + arrayDefn.Serializer.Write(this.GR2, this, arrayDefn, list); } - - case MemberType.ReferenceToArray: - case MemberType.ReferenceToVariantArray: + else if (arrayDefn.SerializationKind == SerializationKind.UserElement) { - StoreObjectOffset(list); - - if (arrayDefn.SerializationKind == SerializationKind.UserMember) - { - arrayDefn.Serializer.Write(this.GR2, this, arrayDefn, list); - } - else if (arrayDefn.SerializationKind == SerializationKind.UserElement) + for (int i = 0; i < list.Count; i++) { - for (int i = 0; i < list.Count; i++) - { - StoreObjectOffset(list[i]); - arrayDefn.Serializer.Write(this.GR2, this, arrayDefn, list[i]); - } + StoreObjectOffset(list[i]); + arrayDefn.Serializer.Write(this.GR2, this, arrayDefn, list[i]); } - else + } + else + { + for (int i = 0; i < list.Count; i++) { - for (int i = 0; i < list.Count; i++) - { - WriteStruct(elementType, list[i], false, false); - } + WriteStruct(elementType, list[i], false, false); } + } - GR2.FlushPendingWrites(); + GR2.FlushPendingWrites(); - break; - } + break; + } - default: - throw new ParsingException(String.Format("Unhandled array member type: {0}", arrayDefn.Type.ToString())); - } + default: + throw new ParsingException(String.Format("Unhandled array member type: {0}", arrayDefn.Type.ToString())); } + } - internal void WriteInstance(MemberDefinition definition, Type propertyType, object node) + internal void WriteInstance(MemberDefinition definition, Type propertyType, object node) + { + if (definition.ArraySize == 0) { - if (definition.ArraySize == 0) - { - WriteElement(definition, node); - return; - } + WriteElement(definition, node); + return; + } - if (propertyType.IsArray) + if (propertyType.IsArray) + { + // If the property is a native array (ie. SomeType[]), create an array instance and set its values + Array arr = node as Array; + Debug.Assert(arr.Length == definition.ArraySize); + for (int i = 0; i < definition.ArraySize; i++) { - // If the property is a native array (ie. SomeType[]), create an array instance and set its values - Array arr = node as Array; - Debug.Assert(arr.Length == definition.ArraySize); - for (int i = 0; i < definition.ArraySize; i++) - { - WriteElement(definition, arr.GetValue(i)); - } + WriteElement(definition, arr.GetValue(i)); } - else + } + else + { + // For non-native arrays we always assume the property is an IList + var items = node as System.Collections.IList; + foreach (var element in items) { - // For non-native arrays we always assume the property is an IList - var items = node as System.Collections.IList; - foreach (var element in items) - { - WriteElement(definition, element); - } + WriteElement(definition, element); } } + } - private void WriteElement(MemberDefinition definition, object node) - { - var type = definition.CachedField.FieldType; - bool dataArea = definition.DataArea || (Writer == DataWriter); - - switch (definition.Type) - { - case MemberType.Inline: - if (definition.SerializationKind == SerializationKind.UserMember) - definition.Serializer.Write(this.GR2, this, definition, node); - else - WriteStruct(type, node, false); - break; + private void WriteElement(MemberDefinition definition, object node) + { + var type = definition.CachedField.FieldType; + bool dataArea = definition.DataArea || (Writer == DataWriter); - case MemberType.Reference: + switch (definition.Type) + { + case MemberType.Inline: + if (definition.SerializationKind == SerializationKind.UserMember) + definition.Serializer.Write(this.GR2, this, definition, node); + else + WriteStruct(type, node, false); + break; + + case MemberType.Reference: + { + WriteReference(node); + if (node != null) { - WriteReference(node); - if (node != null) - { - GR2.QueueStructWrite(Type, dataArea, definition, type, node); - } - break; + GR2.QueueStructWrite(Type, dataArea, definition, type, node); } + break; + } - case MemberType.VariantReference: + case MemberType.VariantReference: + { + if (node != null) { - if (node != null) + var inferredType = node.GetType(); + if (definition.TypeSelector != null) { - var inferredType = node.GetType(); - if (definition.TypeSelector != null) - { - var variantType = definition.TypeSelector.SelectType(definition, node); - if (variantType != null) - inferredType = variantType; - } - - WriteStructReference(GR2.LookupStructDefinition(inferredType, node)); - WriteReference(node); - - GR2.QueueStructWrite(Type, dataArea, definition, inferredType, node); + var variantType = definition.TypeSelector.SelectType(definition, node); + if (variantType != null) + inferredType = variantType; } - else - { - WriteStructReference(null); - WriteReference(null); - } - break; - } - case MemberType.ArrayOfReferences: - { - // Serializing as a struct member is nooooot a very good idea here. - // Debug.Assert(kind != SerializationKind.UserMember); - var list = node as System.Collections.IList; - WriteArrayIndicesReference(list); - - if (list != null && list.Count > 0) - { - GR2.QueueArrayWrite(Type, dataArea, type.GetGenericArguments().Single(), definition, list); - } + WriteStructReference(GR2.LookupStructDefinition(inferredType, node)); + WriteReference(node); - break; + GR2.QueueStructWrite(Type, dataArea, definition, inferredType, node); } - - case MemberType.ReferenceToArray: + else { - var list = node as System.Collections.IList; - WriteArrayIndicesReference(list); - - if (list != null && list.Count > 0) - { - GR2.QueueArrayWrite(Type, dataArea, type.GetGenericArguments().Single(), definition, list); - } - break; + WriteStructReference(null); + WriteReference(null); } + break; + } - case MemberType.ReferenceToVariantArray: - { - var list = node as System.Collections.IList; + case MemberType.ArrayOfReferences: + { + // Serializing as a struct member is nooooot a very good idea here. + // Debug.Assert(kind != SerializationKind.UserMember); + var list = node as System.Collections.IList; + WriteArrayIndicesReference(list); - if (list != null && list.Count > 0) - { - var inferredType = list[0].GetType(); - if (definition.TypeSelector != null) - { - var variantType = definition.TypeSelector.SelectType(definition, node); - if (variantType != null) - inferredType = variantType; - } - - WriteStructReference(GR2.LookupStructDefinition(inferredType, list[0])); - WriteArrayIndicesReference(list); - GR2.QueueArrayWrite(Type, dataArea, inferredType, definition, list); - } - else - { - WriteStructReference(null); - WriteArrayIndicesReference(list); - } - break; + if (list != null && list.Count > 0) + { + GR2.QueueArrayWrite(Type, dataArea, type.GetGenericArguments().Single(), definition, list); } - case MemberType.String: - WriteStringReference(node as string); break; + } - case MemberType.Transform: - var transform = node as Transform; - Writer.Write(transform.Flags); - - for (int i = 0; i < 3; i++) - Writer.Write(transform.Translation[i]); - - Writer.Write(transform.Rotation.X); - Writer.Write(transform.Rotation.Y); - Writer.Write(transform.Rotation.Z); - Writer.Write(transform.Rotation.W); + case MemberType.ReferenceToArray: + { + var list = node as System.Collections.IList; + WriteArrayIndicesReference(list); - for (int i = 0; i < 3; i++) + if (list != null && list.Count > 0) { - for (int j = 0; j < 3; j++) - Writer.Write(transform.ScaleShear[i, j]); + GR2.QueueArrayWrite(Type, dataArea, type.GetGenericArguments().Single(), definition, list); } break; + } - case MemberType.Real16: - Writer.Write((Half)node); - break; - - case MemberType.Real32: - Writer.Write((Single)node); - break; + case MemberType.ReferenceToVariantArray: + { + var list = node as System.Collections.IList; - case MemberType.Int8: - case MemberType.BinormalInt8: - Writer.Write((SByte)node); - break; + if (list != null && list.Count > 0) + { + var inferredType = list[0].GetType(); + if (definition.TypeSelector != null) + { + var variantType = definition.TypeSelector.SelectType(definition, node); + if (variantType != null) + inferredType = variantType; + } - case MemberType.UInt8: - case MemberType.NormalUInt8: - Writer.Write((Byte)node); + WriteStructReference(GR2.LookupStructDefinition(inferredType, list[0])); + WriteArrayIndicesReference(list); + GR2.QueueArrayWrite(Type, dataArea, inferredType, definition, list); + } + else + { + WriteStructReference(null); + WriteArrayIndicesReference(list); + } break; + } - case MemberType.Int16: - case MemberType.BinormalInt16: - Writer.Write((Int16)node); - break; + case MemberType.String: + WriteStringReference(node as string); + break; - case MemberType.UInt16: - case MemberType.NormalUInt16: - Writer.Write((UInt16)node); - break; + case MemberType.Transform: + var transform = node as Transform; + Writer.Write(transform.Flags); - case MemberType.Int32: - Writer.Write((Int32)node); - break; + for (int i = 0; i < 3; i++) + Writer.Write(transform.Translation[i]); - case MemberType.UInt32: - Writer.Write((UInt32)node); - break; + Writer.Write(transform.Rotation.X); + Writer.Write(transform.Rotation.Y); + Writer.Write(transform.Rotation.Z); + Writer.Write(transform.Rotation.W); - default: - throw new ParsingException(String.Format("Unhandled member type: {0}", definition.Type.ToString())); - } + for (int i = 0; i < 3; i++) + { + for (int j = 0; j < 3; j++) + Writer.Write(transform.ScaleShear[i, j]); + } + break; + + case MemberType.Real16: + Writer.Write((Half)node); + break; + + case MemberType.Real32: + Writer.Write((Single)node); + break; + + case MemberType.Int8: + case MemberType.BinormalInt8: + Writer.Write((SByte)node); + break; + + case MemberType.UInt8: + case MemberType.NormalUInt8: + Writer.Write((Byte)node); + break; + + case MemberType.Int16: + case MemberType.BinormalInt16: + Writer.Write((Int16)node); + break; + + case MemberType.UInt16: + case MemberType.NormalUInt16: + Writer.Write((UInt16)node); + break; + + case MemberType.Int32: + Writer.Write((Int32)node); + break; + + case MemberType.UInt32: + Writer.Write((UInt32)node); + break; + + default: + throw new ParsingException(String.Format("Unhandled member type: {0}", definition.Type.ToString())); } + } + + internal void WriteString(string s) + { + GR2.DataObjectOffsets[s] = new SectionReference(Type, (UInt32)DataStream.Position); + var bytes = Encoding.UTF8.GetBytes(s); + DataWriter.Write(bytes); + DataWriter.Write((Byte)0); + } - internal void WriteString(string s) + internal void WriteSectionRelocations(WritableSection section) + { + section.Header.numRelocations = (UInt32)section.Fixups.Count; + section.Header.relocationsOffset = (UInt32)MainStream.Position; + + foreach (var fixup in section.Fixups) { - GR2.DataObjectOffsets[s] = new SectionReference(Type, (UInt32)DataStream.Position); - var bytes = Encoding.UTF8.GetBytes(s); - DataWriter.Write(bytes); - DataWriter.Write((Byte)0); + Writer.Write(fixup.Key); + WriteSectionReference(GR2.ObjectOffsets[fixup.Value]); } + } - internal void WriteSectionRelocations(WritableSection section) - { - section.Header.numRelocations = (UInt32)section.Fixups.Count; - section.Header.relocationsOffset = (UInt32)MainStream.Position; + internal void WriteSectionMixedMarshallingRelocations(WritableSection section) + { + section.Header.numMixedMarshallingData = (UInt32)section.MixedMarshalling.Count; + section.Header.mixedMarshallingDataOffset = (UInt32)MainStream.Position; - foreach (var fixup in section.Fixups) - { - Writer.Write(fixup.Key); - WriteSectionReference(GR2.ObjectOffsets[fixup.Value]); - } + foreach (var marshal in section.MixedMarshalling) + { + Writer.Write(marshal.Count); + Writer.Write(GR2.ObjectOffsets[marshal.Obj].Offset); + WriteSectionReference(GR2.ObjectOffsets[marshal.Type]); } + } - internal void WriteSectionMixedMarshallingRelocations(WritableSection section) - { - section.Header.numMixedMarshallingData = (UInt32)section.MixedMarshalling.Count; - section.Header.mixedMarshallingDataOffset = (UInt32)MainStream.Position; + internal void WriteSectionReference(SectionReference r) + { + Writer.Write((UInt32)r.Section); + Writer.Write(r.Offset); + } +}; - foreach (var marshal in section.MixedMarshalling) - { - Writer.Write(marshal.Count); - Writer.Write(GR2.ObjectOffsets[marshal.Obj].Offset); - WriteSectionReference(GR2.ObjectOffsets[marshal.Type]); - } - } +public class RelocationArea +{ + public MemoryStream Stream; + public BinaryWriter Writer; + public GR2Writer GR2; - internal void WriteSectionReference(SectionReference r) - { - Writer.Write((UInt32)r.Section); - Writer.Write(r.Offset); - } - }; + public RelocationArea(GR2Writer writer) + { + Stream = new MemoryStream(); + Writer = new BinaryWriter(Stream); + GR2 = writer; + } - public class RelocationArea + internal void WriteSectionRelocations(WritableSection section) { - public MemoryStream Stream; - public BinaryWriter Writer; - public GR2Writer GR2; + section.Header.numRelocations = (UInt32)section.Fixups.Count; + section.Header.relocationsOffset = (UInt32)Stream.Position; - public RelocationArea(GR2Writer writer) + foreach (var fixup in section.Fixups) { - Stream = new MemoryStream(); - Writer = new BinaryWriter(Stream); - GR2 = writer; + Writer.Write(fixup.Key); + WriteSectionReference(GR2.ObjectOffsets[fixup.Value]); } + } - internal void WriteSectionRelocations(WritableSection section) - { - section.Header.numRelocations = (UInt32)section.Fixups.Count; - section.Header.relocationsOffset = (UInt32)Stream.Position; + internal void WriteSectionMixedMarshallingRelocations(WritableSection section) + { + section.Header.numMixedMarshallingData = (UInt32)section.MixedMarshalling.Count; + section.Header.mixedMarshallingDataOffset = (UInt32)Stream.Position; - foreach (var fixup in section.Fixups) - { - Writer.Write(fixup.Key); - WriteSectionReference(GR2.ObjectOffsets[fixup.Value]); - } + foreach (var marshal in section.MixedMarshalling) + { + Writer.Write(marshal.Count); + Writer.Write(GR2.ObjectOffsets[marshal.Obj].Offset); + WriteSectionReference(GR2.ObjectOffsets[marshal.Type]); } + } - internal void WriteSectionMixedMarshallingRelocations(WritableSection section) - { - section.Header.numMixedMarshallingData = (UInt32)section.MixedMarshalling.Count; - section.Header.mixedMarshallingDataOffset = (UInt32)Stream.Position; + internal void WriteSectionReference(SectionReference r) + { + Writer.Write((UInt32)r.Section); + Writer.Write(r.Offset); + } +}; - foreach (var marshal in section.MixedMarshalling) - { - Writer.Write(marshal.Count); - Writer.Write(GR2.ObjectOffsets[marshal.Obj].Offset); - WriteSectionReference(GR2.ObjectOffsets[marshal.Type]); - } - } +public class GR2Writer +{ + struct QueuedSerialization + { + public SectionType section; + public bool dataArea; + public MemberDefinition member; + public Type type; + public object obj; + } - internal void WriteSectionReference(SectionReference r) - { - Writer.Write((UInt32)r.Section); - Writer.Write(r.Offset); - } - }; + struct QueuedArraySerialization + { + public SectionType section; + public bool dataArea; + public Type elementType; + public MemberDefinition member; + public System.Collections.IList list; + } - public class GR2Writer + struct QueuedStringSerialization { - struct QueuedSerialization - { - public SectionType section; - public bool dataArea; - public MemberDefinition member; - public Type type; - public object obj; - } + public SectionType section; + public String str; + } - struct QueuedArraySerialization - { - public SectionType section; - public bool dataArea; - public Type elementType; - public MemberDefinition member; - public System.Collections.IList list; - } + internal MemoryStream Stream; + internal BinaryWriter Writer; + internal Magic Magic; + internal Header Header; + internal WritableSection CurrentSection; + internal List Sections = []; + internal Dictionary Types = []; + internal RelocationArea Relocations; - struct QueuedStringSerialization - { - public SectionType section; - public String str; - } + private List StructWrites = []; + private List ArrayWrites = []; + private List StringWrites = []; - internal MemoryStream Stream; - internal BinaryWriter Writer; - internal Magic Magic; - internal Header Header; - internal WritableSection CurrentSection; - internal List Sections = []; - internal Dictionary Types = []; - internal RelocationArea Relocations; + internal Dictionary ObjectOffsets = []; + internal Dictionary DataObjectOffsets = []; + internal HashSet Strings = []; - private List StructWrites = []; - private List ArrayWrites = []; - private List StringWrites = []; + // Version tag that will be written to the GR2 file + public UInt32 VersionTag = Header.DefaultTag; - internal Dictionary ObjectOffsets = []; - internal Dictionary DataObjectOffsets = []; - internal HashSet Strings = []; + // Format of the GR2 file + public Magic.Format Format = Magic.Format.LittleEndian32; - // Version tag that will be written to the GR2 file - public UInt32 VersionTag = Header.DefaultTag; + // Use alternate GR2 magic value? + public bool AlternateMagic = false; - // Format of the GR2 file - public Magic.Format Format = Magic.Format.LittleEndian32; + public GR2Writer() + { + this.Stream = new MemoryStream(); + } - // Use alternate GR2 magic value? - public bool AlternateMagic = false; + public void Dispose() + { + Stream.Dispose(); + } - public GR2Writer() + public void FlushPendingWrites() + { + if (ArrayWrites.Count == 0 && StructWrites.Count == 0 && StringWrites.Count == 0) { - this.Stream = new MemoryStream(); + return; } - public void Dispose() - { - Stream.Dispose(); - } + var arrayWrites = ArrayWrites; + var structWrites = StructWrites; + var stringWrites = StringWrites; + ArrayWrites = []; + StructWrites = []; + StringWrites = []; - public void FlushPendingWrites() + foreach (var write in structWrites) { - if (ArrayWrites.Count == 0 && StructWrites.Count == 0 && StringWrites.Count == 0) + if (!ObjectOffsets.ContainsKey(write.obj)) { - return; - } - - var arrayWrites = ArrayWrites; - var structWrites = StructWrites; - var stringWrites = StringWrites; - ArrayWrites = []; - StructWrites = []; - StringWrites = []; - - foreach (var write in structWrites) - { - if (!ObjectOffsets.ContainsKey(write.obj)) + var section = Sections[(int)write.section]; + var oldWriter = section.Writer; + if (write.dataArea) { - var section = Sections[(int)write.section]; - var oldWriter = section.Writer; - if (write.dataArea) - { - section.Writer = section.DataWriter; - } - - section.WriteStruct(write.type, write.obj); - section.Writer = oldWriter; + section.Writer = section.DataWriter; } + + section.WriteStruct(write.type, write.obj); + section.Writer = oldWriter; } + } - foreach (var write in arrayWrites) + foreach (var write in arrayWrites) + { + if (!ObjectOffsets.ContainsKey(write.list)) { - if (!ObjectOffsets.ContainsKey(write.list)) + var section = Sections[(int)write.section]; + var oldWriter = section.Writer; + if (write.dataArea) { - var section = Sections[(int)write.section]; - var oldWriter = section.Writer; - if (write.dataArea) - { - section.Writer = section.DataWriter; - } - - section.WriteArray(write.member, write.elementType, write.list); - section.Writer = oldWriter; + section.Writer = section.DataWriter; } - } - foreach (var write in stringWrites) - { - Sections[(int)write.section].WriteString(write.str); + section.WriteArray(write.member, write.elementType, write.list); + section.Writer = oldWriter; } } - internal void FinalizeOffsets() + foreach (var write in stringWrites) { - foreach (var offset in DataObjectOffsets) - { - offset.Value.Offset += (UInt32)Sections[(int)offset.Value.Section].MainStream.Length; - ObjectOffsets.Add(offset.Key, offset.Value); - } + Sections[(int)write.section].WriteString(write.str); } + } - public byte[] Write(object root, uint numCustomSections = 0) + internal void FinalizeOffsets() + { + foreach (var offset in DataObjectOffsets) { - using (this.Writer = new BinaryWriter(Stream)) - { - this.Magic = InitMagic(); - WriteMagic(Magic); + offset.Value.Offset += (UInt32)Sections[(int)offset.Value.Section].MainStream.Length; + ObjectOffsets.Add(offset.Key, offset.Value); + } + } - this.Header = InitHeader(numCustomSections); - WriteHeader(Header); + public byte[] Write(object root, uint numCustomSections = 0) + { + using (this.Writer = new BinaryWriter(Stream)) + { + this.Magic = InitMagic(); + WriteMagic(Magic); - this.Relocations = new RelocationArea(this); + this.Header = InitHeader(numCustomSections); + WriteHeader(Header); - for (int i = 0; i < Header.numSections; i++) - { - var section = new WritableSection((SectionType)i, this); - WriteSectionHeader(section.Header); - Sections.Add(section); - } + this.Relocations = new RelocationArea(this); - Magic.headersSize = (UInt32)Stream.Position; - - CurrentSection = Sections[(int)SectionType.Main]; - CurrentSection.WriteStruct(root); + for (int i = 0; i < Header.numSections; i++) + { + var section = new WritableSection((SectionType)i, this); + WriteSectionHeader(section.Header); + Sections.Add(section); + } - while (ArrayWrites.Count > 0 || StructWrites.Count > 0 || StringWrites.Count > 0) - { - FlushPendingWrites(); - } + Magic.headersSize = (UInt32)Stream.Position; - foreach (var defn in Types.Values) - { - Sections[(int)SectionType.StructDefinitions].WriteStructDefinition(defn); - } + CurrentSection = Sections[(int)SectionType.Main]; + CurrentSection.WriteStruct(root); - // We need to do this again to flush strings written by WriteMemberDefinition() + while (ArrayWrites.Count > 0 || StructWrites.Count > 0 || StringWrites.Count > 0) + { FlushPendingWrites(); + } - FinalizeOffsets(); + foreach (var defn in Types.Values) + { + Sections[(int)SectionType.StructDefinitions].WriteStructDefinition(defn); + } - foreach (var section in Sections) - { - section.Header.first16bit = (UInt32)section.MainStream.Length; - section.Header.first8bit = (UInt32)section.MainStream.Length; - section.Finish(); - } + // We need to do this again to flush strings written by WriteMemberDefinition() + FlushPendingWrites(); - foreach (var section in Sections) - { - Relocations.WriteSectionRelocations(section); - } + FinalizeOffsets(); - foreach (var section in Sections) - { - Relocations.WriteSectionMixedMarshallingRelocations(section); - } + foreach (var section in Sections) + { + section.Header.first16bit = (UInt32)section.MainStream.Length; + section.Header.first8bit = (UInt32)section.MainStream.Length; + section.Finish(); + } - foreach (var section in Sections) - { - // Pad section size to a multiple of the section alignment - while ((section.MainStream.Position % section.Header.alignment) > 0) - section.Writer.Write((Byte)0); - - section.MainStream.Flush(); - section.Header.offsetInFile = (UInt32)Stream.Position; - section.Header.uncompressedSize = (UInt32)section.MainStream.Length; - section.Header.compressedSize = (UInt32)section.MainStream.Length; - Writer.Write(section.MainStream.ToArray()); - } + foreach (var section in Sections) + { + Relocations.WriteSectionRelocations(section); + } - var relocationsOffset = (UInt32)Stream.Position; - Writer.Write(Relocations.Stream.ToArray()); + foreach (var section in Sections) + { + Relocations.WriteSectionMixedMarshallingRelocations(section); + } - foreach (var section in Sections) - { - section.Header.relocationsOffset += relocationsOffset; - section.Header.mixedMarshallingDataOffset += relocationsOffset; - } + foreach (var section in Sections) + { + // Pad section size to a multiple of the section alignment + while ((section.MainStream.Position % section.Header.alignment) > 0) + section.Writer.Write((Byte)0); + + section.MainStream.Flush(); + section.Header.offsetInFile = (UInt32)Stream.Position; + section.Header.uncompressedSize = (UInt32)section.MainStream.Length; + section.Header.compressedSize = (UInt32)section.MainStream.Length; + Writer.Write(section.MainStream.ToArray()); + } - var rootStruct = LookupStructDefinition(root.GetType(), root); - Header.rootType = ObjectOffsets[rootStruct]; - Header.rootNode = new SectionReference(SectionType.Main, 0); - Header.fileSize = (UInt32)Stream.Length; + var relocationsOffset = (UInt32)Stream.Position; + Writer.Write(Relocations.Stream.ToArray()); - Stream.Seek(Magic.MagicSize + Header.Size(), SeekOrigin.Begin); + foreach (var section in Sections) + { + section.Header.relocationsOffset += relocationsOffset; + section.Header.mixedMarshallingDataOffset += relocationsOffset; + } - foreach (var section in Sections) - { - WriteSectionHeader(section.Header); - } + var rootStruct = LookupStructDefinition(root.GetType(), root); + Header.rootType = ObjectOffsets[rootStruct]; + Header.rootNode = new SectionReference(SectionType.Main, 0); + Header.fileSize = (UInt32)Stream.Length; - Header.crc = Header.CalculateCRC(Stream); - Stream.Seek(0, SeekOrigin.Begin); - WriteMagic(Magic); - WriteHeader(Header); + Stream.Seek(Magic.MagicSize + Header.Size(), SeekOrigin.Begin); - return Stream.ToArray(); + foreach (var section in Sections) + { + WriteSectionHeader(section.Header); } + + Header.crc = Header.CalculateCRC(Stream); + Stream.Seek(0, SeekOrigin.Begin); + WriteMagic(Magic); + WriteHeader(Header); + + return Stream.ToArray(); } + } - private Magic InitMagic() - { - var magic = new Magic(); - magic.format = Magic.Format.LittleEndian32; - magic.signature = Magic.SignatureFromFormat(magic.format); + private Magic InitMagic() + { + var magic = new Magic(); + magic.format = Magic.Format.LittleEndian32; + magic.signature = Magic.SignatureFromFormat(magic.format); - magic.headersSize = 0; // Updated after headers are serialized - magic.headerFormat = 0; - magic.reserved1 = 0; - magic.reserved2 = 0; + magic.headersSize = 0; // Updated after headers are serialized + magic.headerFormat = 0; + magic.reserved1 = 0; + magic.reserved2 = 0; - magic.SetFormat(Format, AlternateMagic); - return magic; - } + magic.SetFormat(Format, AlternateMagic); + return magic; + } - private void WriteMagic(Magic magic) - { - Writer.Write(magic.signature); - Writer.Write(magic.headersSize); - Writer.Write(magic.headerFormat); - Writer.Write(magic.reserved1); - Writer.Write(magic.reserved2); - } + private void WriteMagic(Magic magic) + { + Writer.Write(magic.signature); + Writer.Write(magic.headersSize); + Writer.Write(magic.headerFormat); + Writer.Write(magic.reserved1); + Writer.Write(magic.reserved2); + } - private Header InitHeader(uint numCustomSections) + private Header InitHeader(uint numCustomSections) + { + var header = new Header { - var header = new Header - { - version = Header.Version, - fileSize = 0, // Set after serialization is finished - crc = 0, // Set after serialization is finished - rootType = new SectionReference(), // Updated after serialization is finished - rootNode = new SectionReference(), // Updated after serialization is finished - numSections = (UInt32)SectionType.FirstVertexData + numCustomSections, - tag = VersionTag, - extraTags = new UInt32[Header.ExtraTagCount], - stringTableCrc = 0, - reserved1 = 0, - reserved2 = 0, - reserved3 = 0 - }; - - header.sectionsOffset = header.Size(); - - for (int i = 0; i < Header.ExtraTagCount; i++) - header.extraTags[i] = 0; - - return header; - } + version = Header.Version, + fileSize = 0, // Set after serialization is finished + crc = 0, // Set after serialization is finished + rootType = new SectionReference(), // Updated after serialization is finished + rootNode = new SectionReference(), // Updated after serialization is finished + numSections = (UInt32)SectionType.FirstVertexData + numCustomSections, + tag = VersionTag, + extraTags = new UInt32[Header.ExtraTagCount], + stringTableCrc = 0, + reserved1 = 0, + reserved2 = 0, + reserved3 = 0 + }; + + header.sectionsOffset = header.Size(); + + for (int i = 0; i < Header.ExtraTagCount; i++) + header.extraTags[i] = 0; + + return header; + } - private void WriteHeader(Header header) - { - Writer.Write(header.version); - Writer.Write(header.fileSize); - Writer.Write(header.crc); - Writer.Write(header.sectionsOffset); - Writer.Write(header.numSections); - WriteSectionReference(header.rootType); - WriteSectionReference(header.rootNode); - Writer.Write(header.tag); - for (int i = 0; i < Header.ExtraTagCount; i++) - Writer.Write(header.extraTags[i]); - Writer.Write(header.stringTableCrc); - Writer.Write(header.reserved1); - Writer.Write(header.reserved2); - Writer.Write(header.reserved3); - } + private void WriteHeader(Header header) + { + Writer.Write(header.version); + Writer.Write(header.fileSize); + Writer.Write(header.crc); + Writer.Write(header.sectionsOffset); + Writer.Write(header.numSections); + WriteSectionReference(header.rootType); + WriteSectionReference(header.rootNode); + Writer.Write(header.tag); + for (int i = 0; i < Header.ExtraTagCount; i++) + Writer.Write(header.extraTags[i]); + Writer.Write(header.stringTableCrc); + Writer.Write(header.reserved1); + Writer.Write(header.reserved2); + Writer.Write(header.reserved3); + } - private void WriteSectionHeader(SectionHeader header) - { - Writer.Write(header.compression); - Writer.Write(header.offsetInFile); - Writer.Write(header.compressedSize); - Writer.Write(header.uncompressedSize); - Writer.Write(header.alignment); - Writer.Write(header.first16bit); - Writer.Write(header.first8bit); - Writer.Write(header.relocationsOffset); - Writer.Write(header.numRelocations); - Writer.Write(header.mixedMarshallingDataOffset); - Writer.Write(header.numMixedMarshallingData); - } + private void WriteSectionHeader(SectionHeader header) + { + Writer.Write(header.compression); + Writer.Write(header.offsetInFile); + Writer.Write(header.compressedSize); + Writer.Write(header.uncompressedSize); + Writer.Write(header.alignment); + Writer.Write(header.first16bit); + Writer.Write(header.first8bit); + Writer.Write(header.relocationsOffset); + Writer.Write(header.numRelocations); + Writer.Write(header.mixedMarshallingDataOffset); + Writer.Write(header.numMixedMarshallingData); + } + + public void WriteSectionReference(SectionReference r) + { + Writer.Write((UInt32)r.Section); + Writer.Write(r.Offset); + } - public void WriteSectionReference(SectionReference r) + internal StructDefinition LookupStructDefinition(Type type, object instance) + { + if (Types.TryGetValue(type, out StructDefinition defn)) { - Writer.Write((UInt32)r.Section); - Writer.Write(r.Offset); + return defn; } - internal StructDefinition LookupStructDefinition(Type type, object instance) + if (type.GetInterfaces().Contains(typeof(System.Collections.IList)) || type.IsArray || type.IsPrimitive) + throw new ArgumentException("Cannot create a struct definition for array or primitive types"); + + var attrs = type.GetCustomAttributes(typeof(StructSerializationAttribute), true); + if (attrs.Length > 0) { - if (Types.TryGetValue(type, out StructDefinition defn)) + StructSerializationAttribute serialization = attrs[0] as StructSerializationAttribute; + if (serialization.TypeSelector != null) { - return defn; + var selector = Activator.CreateInstance(serialization.TypeSelector) as StructDefinitionSelector; + defn = selector.CreateStructDefinition(instance); + Types.Add(type, defn); } + } - if (type.GetInterfaces().Contains(typeof(System.Collections.IList)) || type.IsArray || type.IsPrimitive) - throw new ArgumentException("Cannot create a struct definition for array or primitive types"); - - var attrs = type.GetCustomAttributes(typeof(StructSerializationAttribute), true); - if (attrs.Length > 0) - { - StructSerializationAttribute serialization = attrs[0] as StructSerializationAttribute; - if (serialization.TypeSelector != null) - { - var selector = Activator.CreateInstance(serialization.TypeSelector) as StructDefinitionSelector; - defn = selector.CreateStructDefinition(instance); - Types.Add(type, defn); - } - } + if (defn == null) + { + defn = new StructDefinition(); + Types.Add(type, defn); + defn.LoadFromType(type, this); + } - if (defn == null) - { - defn = new StructDefinition(); - Types.Add(type, defn); - defn.LoadFromType(type, this); - } + return defn; + } - return defn; + internal void QueueStructWrite(SectionType section, bool dataArea, MemberDefinition member, Type type, object obj) + { + QueuedSerialization serialization; + serialization.section = section; + serialization.dataArea = dataArea; + if (member.PreferredSection != SectionType.Invalid) + { + serialization.section = member.PreferredSection; } - internal void QueueStructWrite(SectionType section, bool dataArea, MemberDefinition member, Type type, object obj) + if (member.SectionSelector != null) { - QueuedSerialization serialization; - serialization.section = section; - serialization.dataArea = dataArea; - if (member.PreferredSection != SectionType.Invalid) + var selectedSection = member.SectionSelector.SelectSection(member, type, obj); + if (selectedSection != SectionType.Invalid) { - serialization.section = member.PreferredSection; + serialization.section = selectedSection; } + } - if (member.SectionSelector != null) - { - var selectedSection = member.SectionSelector.SelectSection(member, type, obj); - if (selectedSection != SectionType.Invalid) - { - serialization.section = selectedSection; - } - } + serialization.type = type; + serialization.member = member; + serialization.obj = obj; - serialization.type = type; - serialization.member = member; - serialization.obj = obj; + Sections[(int)serialization.section].CheckMixedMarshalling(obj, type, 1); + StructWrites.Add(serialization); + } - Sections[(int)serialization.section].CheckMixedMarshalling(obj, type, 1); - StructWrites.Add(serialization); - } + internal void QueueArrayWrite(SectionType section, bool dataArea, Type elementType, MemberDefinition member, System.Collections.IList list) + { + QueuedArraySerialization serialization; + serialization.section = section; + serialization.dataArea = dataArea; - internal void QueueArrayWrite(SectionType section, bool dataArea, Type elementType, MemberDefinition member, System.Collections.IList list) + if (member.SectionSelector != null) { - QueuedArraySerialization serialization; - serialization.section = section; - serialization.dataArea = dataArea; - - if (member.SectionSelector != null) + var selectedSection = member.SectionSelector.SelectSection(member, elementType, list); + if (selectedSection != SectionType.Invalid) { - var selectedSection = member.SectionSelector.SelectSection(member, elementType, list); - if (selectedSection != SectionType.Invalid) - { - serialization.section = selectedSection; - } + serialization.section = selectedSection; } + } - serialization.elementType = elementType; - serialization.member = member; - serialization.list = list; + serialization.elementType = elementType; + serialization.member = member; + serialization.list = list; - Sections[(int)serialization.section].CheckMixedMarshalling(list[0], elementType, (UInt32)list.Count); - ArrayWrites.Add(serialization); - } + Sections[(int)serialization.section].CheckMixedMarshalling(list[0], elementType, (UInt32)list.Count); + ArrayWrites.Add(serialization); + } - internal void QueueStringWrite(SectionType section, String s) - { - QueuedStringSerialization serialization; - serialization.section = section; - serialization.str = s; - StringWrites.Add(serialization); - } + internal void QueueStringWrite(SectionType section, String s) + { + QueuedStringSerialization serialization; + serialization.section = section; + serialization.str = s; + StringWrites.Add(serialization); } } diff --git a/LSLib/Granny/GR2Utils.cs b/LSLib/Granny/GR2Utils.cs index a38d334a..03c5db71 100644 --- a/LSLib/Granny/GR2Utils.cs +++ b/LSLib/Granny/GR2Utils.cs @@ -6,110 +6,109 @@ using LSLib.Granny.Model; using LSLib.LS; -namespace LSLib.Granny +namespace LSLib.Granny; + +public class GR2Utils { - public class GR2Utils - { - public delegate void ConversionErrorDelegate(string inputPath, string outputPath, Exception exc); + public delegate void ConversionErrorDelegate(string inputPath, string outputPath, Exception exc); - public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator); + public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator); - public ConversionErrorDelegate ConversionError = delegate { }; - public ProgressUpdateDelegate ProgressUpdate = delegate { }; + public ConversionErrorDelegate ConversionError = delegate { }; + public ProgressUpdateDelegate ProgressUpdate = delegate { }; - public static ExportFormat ExtensionToModelFormat(string path) + public static ExportFormat ExtensionToModelFormat(string path) + { + string extension = Path.GetExtension(path)?.ToLower(); + + return extension switch { - string extension = Path.GetExtension(path)?.ToLower(); + ".gr2" or ".lsm" => ExportFormat.GR2, + ".dae" => ExportFormat.DAE, + _ => throw new ArgumentException($"Unrecognized model file extension: {extension}"), + }; + } - return extension switch - { - ".gr2" or ".lsm" => ExportFormat.GR2, - ".dae" => ExportFormat.DAE, - _ => throw new ArgumentException($"Unrecognized model file extension: {extension}"), - }; - } + public static Root LoadModel(string inputPath) + { + var options = new ExporterOptions + { + InputFormat = ExtensionToModelFormat(inputPath) + }; + return LoadModel(inputPath, options); + } - public static Root LoadModel(string inputPath) + public static Root LoadModel(string inputPath, ExporterOptions options) + { + switch (options.InputFormat) { - var options = new ExporterOptions + case ExportFormat.GR2: { - InputFormat = ExtensionToModelFormat(inputPath) - }; - return LoadModel(inputPath, options); - } + using var fs = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + var root = new Root(); + var gr2 = new GR2Reader(fs); + gr2.Read(root); + root.PostLoad(gr2.Tag); + return root; + } - public static Root LoadModel(string inputPath, ExporterOptions options) - { - switch (options.InputFormat) + case ExportFormat.DAE: { - case ExportFormat.GR2: + var importer = new ColladaImporter { - using var fs = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - var root = new Root(); - var gr2 = new GR2Reader(fs); - gr2.Read(root); - root.PostLoad(gr2.Tag); - return root; - } - - case ExportFormat.DAE: - { - var importer = new ColladaImporter - { - Options = options - }; - return importer.Import(inputPath); - } - - default: - throw new ArgumentException("Invalid model format"); + Options = options + }; + return importer.Import(inputPath); } - } - public static void SaveModel(Root model, string outputPath, Exporter exporter) - { - exporter.Options.InputPath = null; - exporter.Options.Input = model; - exporter.Options.OutputPath = outputPath; - exporter.Export(); + default: + throw new ArgumentException("Invalid model format"); } + } - private static List EnumerateFiles(string path, ExportFormat format) - { - if (!path.EndsWith(Path.DirectorySeparatorChar.ToString())) - { - path += Path.DirectorySeparatorChar; - } + public static void SaveModel(Root model, string outputPath, Exporter exporter) + { + exporter.Options.InputPath = null; + exporter.Options.Input = model; + exporter.Options.OutputPath = outputPath; + exporter.Export(); + } - return Directory.EnumerateFiles(path, $"*.{format.ToString().ToLower()}", SearchOption.AllDirectories).ToList(); + private static List EnumerateFiles(string path, ExportFormat format) + { + if (!path.EndsWith(Path.DirectorySeparatorChar.ToString())) + { + path += Path.DirectorySeparatorChar; } - public void ConvertModels(string inputDirectoryPath, string outputDirectoryPath, Exporter exporter) - { - string outputExtension = exporter.Options.OutputFormat.ToString().ToLower(); + return Directory.EnumerateFiles(path, $"*.{format.ToString().ToLower()}", SearchOption.AllDirectories).ToList(); + } - ProgressUpdate("Enumerating files ...", 0, 1); - List inputFilePaths = EnumerateFiles(inputDirectoryPath, exporter.Options.InputFormat); + public void ConvertModels(string inputDirectoryPath, string outputDirectoryPath, Exporter exporter) + { + string outputExtension = exporter.Options.OutputFormat.ToString().ToLower(); - ProgressUpdate("Converting resources ...", 0, 1); - for (var i = 0; i < inputFilePaths.Count; i++) - { - string inputFilePath = inputFilePaths[i]; + ProgressUpdate("Enumerating files ...", 0, 1); + List inputFilePaths = EnumerateFiles(inputDirectoryPath, exporter.Options.InputFormat); - string outputFilePath = Path.ChangeExtension(inputFilePath.Replace(inputDirectoryPath, outputDirectoryPath), outputExtension); + ProgressUpdate("Converting resources ...", 0, 1); + for (var i = 0; i < inputFilePaths.Count; i++) + { + string inputFilePath = inputFilePaths[i]; - FileManager.TryToCreateDirectory(outputFilePath); + string outputFilePath = Path.ChangeExtension(inputFilePath.Replace(inputDirectoryPath, outputDirectoryPath), outputExtension); - ProgressUpdate($"Converting: {inputFilePath}", i, inputFilePaths.Count); - try - { - Root model = LoadModel(inputFilePath, exporter.Options); - SaveModel(model, outputFilePath, exporter); - } - catch (Exception exc) - { - ConversionError(inputFilePath, outputFilePath, exc); - } + FileManager.TryToCreateDirectory(outputFilePath); + + ProgressUpdate($"Converting: {inputFilePath}", i, inputFilePaths.Count); + try + { + Root model = LoadModel(inputFilePath, exporter.Options); + SaveModel(model, outputFilePath, exporter); + } + catch (Exception exc) + { + ConversionError(inputFilePath, outputFilePath, exc); } } } diff --git a/LSLib/Granny/Model/Animation.cs b/LSLib/Granny/Model/Animation.cs index ad9f20cf..4c6c2d7d 100644 --- a/LSLib/Granny/Model/Animation.cs +++ b/LSLib/Granny/Model/Animation.cs @@ -5,693 +5,692 @@ using LSLib.Granny.Model.CurveData; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public static class QuatHelpers { - public static class QuatHelpers + public static Quaternion Product(Quaternion r, Quaternion q) { - public static Quaternion Product(Quaternion r, Quaternion q) - { - return new Quaternion( - r.W * q.W - r.X * q.X - r.Y * q.Y - r.Z * q.Z, - r.W * q.X + r.X * q.W - r.Y * q.Z + r.Z * q.Y, - r.W * q.Y + r.X * q.Z + r.Y * q.W - r.Z * q.X, - r.W * q.Z - r.X * q.Y + r.Y * q.X + r.Z * q.W - ); - } + return new Quaternion( + r.W * q.W - r.X * q.X - r.Y * q.Y - r.Z * q.Z, + r.W * q.X + r.X * q.W - r.Y * q.Z + r.Z * q.Y, + r.W * q.Y + r.X * q.Z + r.Y * q.W - r.Z * q.X, + r.W * q.Z - r.X * q.Y + r.Y * q.X + r.Z * q.W + ); + } - public static float Dot(Quaternion r, Quaternion q) - { - return Vector3.Dot(r.Xyz, q.Xyz) + r.W * q.W; - } + public static float Dot(Quaternion r, Quaternion q) + { + return Vector3.Dot(r.Xyz, q.Xyz) + r.W * q.W; } +} - public class AnimationCurve +public class AnimationCurve +{ + [Serialization(Section = SectionType.Main, TypeSelector = typeof(AnimationCurveDataTypeSelector), Type = MemberType.VariantReference, MinVersion = 0x80000011)] + public AnimationCurveData CurveData; + [Serialization(MaxVersion = 0x80000010)] + public Int32 Degree; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer), MaxVersion = 0x80000010)] + public List Knots; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer), MaxVersion = 0x80000010)] + public List Controls; + + /// + /// Upgrades old animations (GR2 files with header version v6) to the new CurveData format + /// + public void UpgradeToGr7() { - [Serialization(Section = SectionType.Main, TypeSelector = typeof(AnimationCurveDataTypeSelector), Type = MemberType.VariantReference, MinVersion = 0x80000011)] - public AnimationCurveData CurveData; - [Serialization(MaxVersion = 0x80000010)] - public Int32 Degree; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer), MaxVersion = 0x80000010)] - public List Knots; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer), MaxVersion = 0x80000010)] - public List Controls; - - /// - /// Upgrades old animations (GR2 files with header version v6) to the new CurveData format - /// - public void UpgradeToGr7() - { - // Skip if we've already upgraded - if (this.CurveData != null) return; + // Skip if we've already upgraded + if (this.CurveData != null) return; - if (this.Degree == 0) - { - // Degree 0 curves are identities in all cases - CurveData = new DaIdentity - { - CurveDataHeader_DaIdentity = new CurveDataHeader - { - Format = (byte)CurveFormat.DaIdentity, - Degree = 0 - } - }; - } - else if (this.Degree == 2) + if (this.Degree == 0) + { + // Degree 0 curves are identities in all cases + CurveData = new DaIdentity { - if (this.Knots == null || this.Controls == null) + CurveDataHeader_DaIdentity = new CurveDataHeader { - throw new InvalidOperationException("Could not upgrade animation curve: knots/controls unavailable"); + Format = (byte)CurveFormat.DaIdentity, + Degree = 0 } - - // Degree 2 curves are stored in K32fC32f (v6 didn't support multiple curve formats) - CurveData = new DaK32fC32f - { - CurveDataHeader_DaK32fC32f = new CurveDataHeader - { - Format = (byte)CurveFormat.DaK32fC32f, - Degree = 2 - }, - Controls = Controls, - Knots = Knots - }; - } - else + }; + } + else if (this.Degree == 2) + { + if (this.Knots == null || this.Controls == null) { - throw new InvalidOperationException("Could not upgrade animation curve: Unsupported curve degree"); + throw new InvalidOperationException("Could not upgrade animation curve: knots/controls unavailable"); } - } - } - public class Keyframe - { - public float Time; - public bool HasTranslation; - public bool HasRotation; - public bool HasScaleShear; - public Vector3 Translation; - public Quaternion Rotation; - public Matrix3 ScaleShear; - - public Transform ToTransform() - { - var transform = new Transform(); - if (HasTranslation) transform.SetTranslation(Translation); - if (HasRotation) transform.SetRotation(Rotation); - if (HasScaleShear) transform.SetScaleShear(ScaleShear); - return transform; + // Degree 2 curves are stored in K32fC32f (v6 didn't support multiple curve formats) + CurveData = new DaK32fC32f + { + CurveDataHeader_DaK32fC32f = new CurveDataHeader + { + Format = (byte)CurveFormat.DaK32fC32f, + Degree = 2 + }, + Controls = Controls, + Knots = Knots + }; } - - public void FromTransform(Transform transform) + else { - Translation = transform.Translation; - Rotation = transform.Rotation; - ScaleShear = transform.ScaleShear; + throw new InvalidOperationException("Could not upgrade animation curve: Unsupported curve degree"); } - }; + } +} - public class KeyframeTrack +public class Keyframe +{ + public float Time; + public bool HasTranslation; + public bool HasRotation; + public bool HasScaleShear; + public Vector3 Translation; + public Quaternion Rotation; + public Matrix3 ScaleShear; + + public Transform ToTransform() { - public SortedList Keyframes = []; + var transform = new Transform(); + if (HasTranslation) transform.SetTranslation(Translation); + if (HasRotation) transform.SetRotation(Rotation); + if (HasScaleShear) transform.SetScaleShear(ScaleShear); + return transform; + } - private static Int32 FindFrame(IList list, T value, IComparer comparer = null) - { - ArgumentNullException.ThrowIfNull(list); + public void FromTransform(Transform transform) + { + Translation = transform.Translation; + Rotation = transform.Rotation; + ScaleShear = transform.ScaleShear; + } +}; - comparer ??= Comparer.Default; +public class KeyframeTrack +{ + public SortedList Keyframes = []; - Int32 lower = 0; - Int32 upper = list.Count - 1; + private static Int32 FindFrame(IList list, T value, IComparer comparer = null) + { + ArgumentNullException.ThrowIfNull(list); - while (lower <= upper) - { - Int32 middle = lower + (upper - lower) / 2; - Int32 comparisonResult = comparer.Compare(value, list[middle]); - if (comparisonResult == 0) - return middle; - else if (comparisonResult < 0) - upper = middle - 1; - else - lower = middle + 1; - } + comparer ??= Comparer.Default; - return ~lower; - } + Int32 lower = 0; + Int32 upper = list.Count - 1; - public Keyframe FindFrame(Single time, Single threshold = 0.01f) + while (lower <= upper) { - Int32 lower = FindFrame(Keyframes.Keys, time); - if (lower >= 0) - { - return Keyframes.Values[lower]; - } + Int32 middle = lower + (upper - lower) / 2; + Int32 comparisonResult = comparer.Compare(value, list[middle]); + if (comparisonResult == 0) + return middle; + else if (comparisonResult < 0) + upper = middle - 1; + else + lower = middle + 1; + } - if (-lower <= Keyframes.Count) - { - float frameTime = Keyframes.Keys[-lower - 1]; - if (Math.Abs(frameTime - time) < threshold) - { - return Keyframes.Values[-lower - 1]; - } - } + return ~lower; + } - return null; + public Keyframe FindFrame(Single time, Single threshold = 0.01f) + { + Int32 lower = FindFrame(Keyframes.Keys, time); + if (lower >= 0) + { + return Keyframes.Values[lower]; } - public Keyframe RequireFrame(Single time, Single threshold = 0.01f) + if (-lower <= Keyframes.Count) { - Keyframe frame = FindFrame(time, threshold); - if (frame == null) + float frameTime = Keyframes.Keys[-lower - 1]; + if (Math.Abs(frameTime - time) < threshold) { - frame = new Keyframe(); - frame.Time = time; - Keyframes.Add(time, frame); + return Keyframes.Values[-lower - 1]; } - - return frame; - } - - public void AddTranslation(Single time, Vector3 translation) - { - Keyframe frame = RequireFrame(time); - frame.Translation = translation; - frame.HasTranslation = true; } - public void AddRotation(Single time, Quaternion rotation) - { - Keyframe frame = RequireFrame(time); - frame.Rotation = rotation; - frame.HasRotation = true; - } + return null; + } - public void AddScaleShear(Single time, Matrix3 scaleShear) + public Keyframe RequireFrame(Single time, Single threshold = 0.01f) + { + Keyframe frame = FindFrame(time, threshold); + if (frame == null) { - Keyframe frame = RequireFrame(time); - frame.ScaleShear = scaleShear; - frame.HasScaleShear = true; + frame = new Keyframe(); + frame.Time = time; + Keyframes.Add(time, frame); } - public void MergeAdjacentFrames() - { - int i = 1; - while (i < Keyframes.Count) - { - Keyframe k0 = Keyframes.Values[i - 1], - k1 = Keyframes.Values[i]; - - if (k1.Time - k0.Time < 0.004f) - { - if (k1.HasTranslation && !k0.HasTranslation) - { - k0.HasTranslation = true; - k0.Translation = k1.Translation; - } + return frame; + } - if (k1.HasRotation && !k0.HasRotation) - { - k0.HasRotation = true; - k0.Rotation = k1.Rotation; - } + public void AddTranslation(Single time, Vector3 translation) + { + Keyframe frame = RequireFrame(time); + frame.Translation = translation; + frame.HasTranslation = true; + } - if (k1.HasScaleShear && !k0.HasScaleShear) - { - k0.HasScaleShear = true; - k0.ScaleShear = k1.ScaleShear; - } + public void AddRotation(Single time, Quaternion rotation) + { + Keyframe frame = RequireFrame(time); + frame.Rotation = rotation; + frame.HasRotation = true; + } - Keyframes.RemoveAt(i); - } - else - { - i++; - } - } - } + public void AddScaleShear(Single time, Matrix3 scaleShear) + { + Keyframe frame = RequireFrame(time); + frame.ScaleShear = scaleShear; + frame.HasScaleShear = true; + } - public void InterpolateFrames() + public void MergeAdjacentFrames() + { + int i = 1; + while (i < Keyframes.Count) { - for (int i = 1; i < Keyframes.Count; i++) - { - Keyframe k0 = Keyframes.Values[i - 1], - k1 = Keyframes.Values[i]; + Keyframe k0 = Keyframes.Values[i - 1], + k1 = Keyframes.Values[i]; - if (k0.HasTranslation && !k1.HasTranslation) + if (k1.Time - k0.Time < 0.004f) + { + if (k1.HasTranslation && !k0.HasTranslation) { - Keyframe k2 = null; - for (var j = i + 1; j < Keyframes.Count; j++) - { - if (Keyframes.Values[j].HasTranslation) - { - k2 = Keyframes.Values[j]; - break; - } - } - - k1.HasTranslation = true; - if (k2 != null) - { - float alpha = (k1.Time - k0.Time) / (k2.Time - k0.Time); - k1.Translation = Vector3.Lerp(k0.Translation, k2.Translation, alpha); - } - else - { - k1.Translation = k0.Translation; - } + k0.HasTranslation = true; + k0.Translation = k1.Translation; } - if (k0.HasRotation && !k1.HasRotation) + if (k1.HasRotation && !k0.HasRotation) { - Keyframe k2 = null; - for (var j = i + 1; j < Keyframes.Count; j++) - { - if (Keyframes.Values[j].HasRotation) - { - k2 = Keyframes.Values[j]; - break; - } - } - - k1.HasRotation = true; - if (k2 != null) - { - float alpha = (k1.Time - k0.Time) / (k2.Time - k0.Time); - k1.Rotation = Quaternion.Slerp(k0.Rotation, k2.Rotation, alpha); - } - else - { - k1.Rotation = k0.Rotation; - } + k0.HasRotation = true; + k0.Rotation = k1.Rotation; } - if (k0.HasScaleShear && !k1.HasScaleShear) + if (k1.HasScaleShear && !k0.HasScaleShear) { - Keyframe k2 = null; - for (var j = i + 1; j < Keyframes.Count; j++) - { - if (Keyframes.Values[j].HasScaleShear) - { - k2 = Keyframes.Values[j]; - break; - } - } - - k1.HasScaleShear = true; - if (k2 != null) - { - float alpha = (k1.Time - k0.Time) / (k2.Time - k0.Time); - k1.ScaleShear[0, 0] = k0.ScaleShear[0, 0] * (1.0f - alpha) + k2.ScaleShear[0, 0] * alpha; - k1.ScaleShear[1, 1] = k0.ScaleShear[1, 1] * (1.0f - alpha) + k2.ScaleShear[1, 1] * alpha; - k1.ScaleShear[2, 2] = k0.ScaleShear[2, 2] * (1.0f - alpha) + k2.ScaleShear[2, 2] * alpha; - } - else - { - k1.ScaleShear = k0.ScaleShear; - } + k0.HasScaleShear = true; + k0.ScaleShear = k1.ScaleShear; } - } - } - public void RemoveTrivialTranslations() - { - var times = Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Key).ToList(); - var transforms = Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Value.Translation).ToList(); - - var i = 1; - while (i < transforms.Count - 1) - { - Vector3 v0 = transforms[i - 1], - v1 = transforms[i], - v2 = transforms[i + 1]; - - Single t0 = times[i - 1], - t1 = times[i], - t2 = times[i + 1]; - - Single alpha = (t1 - t0) / (t2 - t0); - Vector3 v1l = Vector3.Lerp(v0, v2, alpha); - - if ((v1 - v1l).Length < 0.001f) - { - Keyframes[times[i]].HasTranslation = false; - Keyframes[times[i]].Translation = Vector3.Zero; - times.RemoveAt(i); - transforms.RemoveAt(i); - } - else - { - i++; - } + Keyframes.RemoveAt(i); } - - if (transforms.Count == 2 && (transforms[0] - transforms[1]).Length < 0.0001f) + else { - Keyframes[times[1]].HasTranslation = false; - Keyframes[times[1]].Translation = Vector3.Zero; - times.RemoveAt(1); - transforms.RemoveAt(1); + i++; } } + } - public void RemoveTrivialRotations() + public void InterpolateFrames() + { + for (int i = 1; i < Keyframes.Count; i++) { - var times = Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Key).ToList(); - var transforms = Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Value.Rotation).ToList(); + Keyframe k0 = Keyframes.Values[i - 1], + k1 = Keyframes.Values[i]; - var keyframesToRemove = 0; - for (int i = 1; i < transforms.Count - 1; i++) + if (k0.HasTranslation && !k1.HasTranslation) { - Quaternion v0 = transforms[i - 1], - v1 = transforms[i], - v2 = transforms[i + 1]; - - Single t0 = times[i - 1], - t1 = times[i], - t2 = times[i + 1]; - - Single alpha = (t1 - t0) / (t2 - t0); - Quaternion v1l = Quaternion.Slerp(v0, v2, alpha); - - if ((v1 - v1l).Length < 0.001f) + Keyframe k2 = null; + for (var j = i + 1; j < Keyframes.Count; j++) { - keyframesToRemove++; + if (Keyframes.Values[j].HasTranslation) + { + k2 = Keyframes.Values[j]; + break; + } } - } - if (keyframesToRemove == transforms.Count - 2 && (transforms[0] - transforms[^1]).Length < 0.0001f) - { - for (int i = 1; i < times.Count; i++) + k1.HasTranslation = true; + if (k2 != null) { - Keyframes[times[i]].HasRotation = false; - Keyframes[times[i]].Rotation = Quaternion.Identity; + float alpha = (k1.Time - k0.Time) / (k2.Time - k0.Time); + k1.Translation = Vector3.Lerp(k0.Translation, k2.Translation, alpha); + } + else + { + k1.Translation = k0.Translation; } - - times.RemoveRange(1, times.Count - 1); - transforms.RemoveRange(1, transforms.Count - 1); } - } - public void RemoveTrivialScales() - { - var times = Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Key).ToList(); - var transforms = Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Value.ScaleShear).ToList(); - - var i = 2; - while (i < transforms.Count - 1) + if (k0.HasRotation && !k1.HasRotation) { - Matrix3 t0 = transforms[i - 2], - t1 = transforms[i - 1], - t2 = transforms[i]; - - float diff1 = 0.0f, diff2 = 0.0f; - for (var x = 0; x < 3; x++) + Keyframe k2 = null; + for (var j = i + 1; j < Keyframes.Count; j++) { - for (var y = 0; y < 3; y++) + if (Keyframes.Values[j].HasRotation) { - diff1 += Math.Abs(t1[x, y] - t0[x, y]); - diff2 += Math.Abs(t2[x, y] - t1[x, y]); + k2 = Keyframes.Values[j]; + break; } } - if (diff1 < 0.001f && diff2 < 0.001f) + k1.HasRotation = true; + if (k2 != null) { - Keyframes[times[i]].HasScaleShear = false; - Keyframes[times[i]].ScaleShear = Matrix3.Identity; - times.RemoveAt(i); - transforms.RemoveAt(i); + float alpha = (k1.Time - k0.Time) / (k2.Time - k0.Time); + k1.Rotation = Quaternion.Slerp(k0.Rotation, k2.Rotation, alpha); } else { - i++; + k1.Rotation = k0.Rotation; } } - if (transforms.Count == 3) + if (k0.HasScaleShear && !k1.HasScaleShear) { - Matrix3 t0 = transforms[0], - t1 = transforms[1], - t2 = transforms[2]; - float diff = 0.0f; - for (var x = 0; x < 3; x++) + Keyframe k2 = null; + for (var j = i + 1; j < Keyframes.Count; j++) { - for (var y = 0; y < 3; y++) + if (Keyframes.Values[j].HasScaleShear) { - diff += Math.Abs(t0[x, y] - t1[x, y]) + Math.Abs(t0[x, y] - t2[x, y]); + k2 = Keyframes.Values[j]; + break; } } - if (diff < 0.001f) + k1.HasScaleShear = true; + if (k2 != null) { - Keyframes[times[2]].HasScaleShear = false; - Keyframes[times[2]].ScaleShear = Matrix3.Identity; - times.RemoveAt(2); - transforms.RemoveAt(2); - - Keyframes[times[1]].HasScaleShear = false; - Keyframes[times[1]].ScaleShear = Matrix3.Identity; - times.RemoveAt(1); - transforms.RemoveAt(1); + float alpha = (k1.Time - k0.Time) / (k2.Time - k0.Time); + k1.ScaleShear[0, 0] = k0.ScaleShear[0, 0] * (1.0f - alpha) + k2.ScaleShear[0, 0] * alpha; + k1.ScaleShear[1, 1] = k0.ScaleShear[1, 1] * (1.0f - alpha) + k2.ScaleShear[1, 1] * alpha; + k1.ScaleShear[2, 2] = k0.ScaleShear[2, 2] * (1.0f - alpha) + k2.ScaleShear[2, 2] * alpha; + } + else + { + k1.ScaleShear = k0.ScaleShear; } } } + } + + public void RemoveTrivialTranslations() + { + var times = Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Key).ToList(); + var transforms = Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Value.Translation).ToList(); - public void RemoveTrivialFrames() + var i = 1; + while (i < transforms.Count - 1) { - var newFrames = new SortedList(); - foreach (var kv in Keyframes) + Vector3 v0 = transforms[i - 1], + v1 = transforms[i], + v2 = transforms[i + 1]; + + Single t0 = times[i - 1], + t1 = times[i], + t2 = times[i + 1]; + + Single alpha = (t1 - t0) / (t2 - t0); + Vector3 v1l = Vector3.Lerp(v0, v2, alpha); + + if ((v1 - v1l).Length < 0.001f) { - if (kv.Value.HasTranslation - || kv.Value.HasRotation - || kv.Value.HasScaleShear) - { - newFrames.Add(kv.Key, kv.Value); - } + Keyframes[times[i]].HasTranslation = false; + Keyframes[times[i]].Translation = Vector3.Zero; + times.RemoveAt(i); + transforms.RemoveAt(i); + } + else + { + i++; } + } - Keyframes = newFrames; + if (transforms.Count == 2 && (transforms[0] - transforms[1]).Length < 0.0001f) + { + Keyframes[times[1]].HasTranslation = false; + Keyframes[times[1]].Translation = Vector3.Zero; + times.RemoveAt(1); + transforms.RemoveAt(1); } + } - public void SwapBindPose(Matrix4 oldBindPose, Matrix4 newBindPose) + public void RemoveTrivialRotations() + { + var times = Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Key).ToList(); + var transforms = Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Value.Rotation).ToList(); + + var keyframesToRemove = 0; + for (int i = 1; i < transforms.Count - 1; i++) { - var oldToNewTransform = newBindPose * oldBindPose.Inverted(); - foreach (var keyframe in Keyframes) + Quaternion v0 = transforms[i - 1], + v1 = transforms[i], + v2 = transforms[i + 1]; + + Single t0 = times[i - 1], + t1 = times[i], + t2 = times[i + 1]; + + Single alpha = (t1 - t0) / (t2 - t0); + Quaternion v1l = Quaternion.Slerp(v0, v2, alpha); + + if ((v1 - v1l).Length < 0.001f) { - var newTransform = oldToNewTransform * keyframe.Value.ToTransform().ToMatrix4(); - keyframe.Value.FromTransform(Transform.FromMatrix4(newTransform)); + keyframesToRemove++; } } - public static KeyframeTrack FromMatrices(IList times, IEnumerable transforms) + if (keyframesToRemove == transforms.Count - 2 && (transforms[0] - transforms[^1]).Length < 0.0001f) { - var track = new KeyframeTrack(); - - var translations = transforms.Select(m => m.ExtractTranslation()).ToList(); - var rotations = transforms.Select(m => m.ExtractRotation()).ToList(); - var scales = transforms.Select(m => m.ExtractScale()).ToList(); - - // Quaternion sign fixup - // The same rotation can be represented by both q and -q. However the Slerp path - // will be different; one will go the long away around, the other the short away around. - // Replace quaterions to ensure that Slerp will take the short path. - float flip = 1.0f; - for (var i = 0; i < rotations.Count - 1; i++) - { - var r0 = rotations[i]; - var r1 = rotations[i + 1]; - var dot = QuatHelpers.Dot(r0, r1 * flip); - - if (dot < 0.0f) - { - flip = -flip; - } - - rotations[i + 1] *= flip; - } - - for (var i = 0; i < times.Count; i++) + for (int i = 1; i < times.Count; i++) { - track.AddTranslation(times[i], translations[i]); - track.AddRotation(times[i], rotations[i]); - var scaleShear = new Matrix3( - scales[i][0], 0.0f, 0.0f, - 0.0f, scales[i][1], 0.0f, - 0.0f, 0.0f, scales[i][2] - ); - track.AddScaleShear(times[i], scaleShear); + Keyframes[times[i]].HasRotation = false; + Keyframes[times[i]].Rotation = Quaternion.Identity; } - return track; + times.RemoveRange(1, times.Count - 1); + transforms.RemoveRange(1, transforms.Count - 1); } } - public class TransformTrack + public void RemoveTrivialScales() { - public string Name; - [Serialization(MinVersion = 0x80000011)] - public int Flags; - [Serialization(Type = MemberType.Inline)] - public AnimationCurve OrientationCurve; - [Serialization(Type = MemberType.Inline)] - public AnimationCurve PositionCurve; - [Serialization(Type = MemberType.Inline)] - public AnimationCurve ScaleShearCurve; - [Serialization(Kind = SerializationKind.None)] - public Animation ParentAnimation; - - public static TransformTrack FromKeyframes(KeyframeTrack keyframes) + var times = Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Key).ToList(); + var transforms = Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Value.ScaleShear).ToList(); + + var i = 2; + while (i < transforms.Count - 1) { - var track = new TransformTrack - { - Flags = 0 - }; + Matrix3 t0 = transforms[i - 2], + t1 = transforms[i - 1], + t2 = transforms[i]; - var translateTimes = keyframes.Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Key).ToList(); - var translations = keyframes.Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Value.Translation).ToList(); - if (translateTimes.Count == 1) + float diff1 = 0.0f, diff2 = 0.0f; + for (var x = 0; x < 3; x++) { - var posCurve = new D3Constant32f + for (var y = 0; y < 3; y++) { - CurveDataHeader_D3Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D3Constant32f, Degree = 2 }, - Controls = new float[3] { translations[0].X, translations[0].Y, translations[0].Z } - }; - track.PositionCurve = new AnimationCurve { CurveData = posCurve }; - } - else - { - var posCurve = new DaK32fC32f(); - posCurve.CurveDataHeader_DaK32fC32f = new CurveDataHeader { Format = (int)CurveFormat.DaK32fC32f, Degree = 2 }; - posCurve.SetKnots(translateTimes); - posCurve.SetPoints(translations); - track.PositionCurve = new AnimationCurve { CurveData = posCurve }; + diff1 += Math.Abs(t1[x, y] - t0[x, y]); + diff2 += Math.Abs(t2[x, y] - t1[x, y]); + } } - var rotationTimes = keyframes.Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Key).ToList(); - var rotations = keyframes.Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Value.Rotation).ToList(); - if (rotationTimes.Count == 1) + if (diff1 < 0.001f && diff2 < 0.001f) { - var rotCurve = new D4Constant32f - { - CurveDataHeader_D4Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D4Constant32f, Degree = 2 }, - Controls = new float[4] { rotations[0].X, rotations[0].Y, rotations[0].Z, rotations[0].W } - }; - track.OrientationCurve = new AnimationCurve { CurveData = rotCurve }; + Keyframes[times[i]].HasScaleShear = false; + Keyframes[times[i]].ScaleShear = Matrix3.Identity; + times.RemoveAt(i); + transforms.RemoveAt(i); } else { - var rotCurve = new DaK32fC32f(); - rotCurve.CurveDataHeader_DaK32fC32f = new CurveDataHeader { Format = (int)CurveFormat.DaK32fC32f, Degree = 2 }; - rotCurve.SetKnots(rotationTimes); - rotCurve.SetQuaternions(rotations); - track.OrientationCurve = new AnimationCurve { CurveData = rotCurve }; + i++; } + } - var scaleTimes = keyframes.Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Key).ToList(); - var scales = keyframes.Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Value.ScaleShear).ToList(); - if (scaleTimes.Count == 1) + if (transforms.Count == 3) + { + Matrix3 t0 = transforms[0], + t1 = transforms[1], + t2 = transforms[2]; + float diff = 0.0f; + for (var x = 0; x < 3; x++) { - var scaleCurve = new DaConstant32f(); - scaleCurve.CurveDataHeader_DaConstant32f = new CurveDataHeader { Format = (int)CurveFormat.DaConstant32f, Degree = 2 }; - var m = scales[0]; - scaleCurve.Controls = - [ - m[0, 0], m[0, 1], m[0, 2], - m[1, 0], m[1, 1], m[1, 2], - m[2, 0], m[2, 1], m[2, 2] - ]; - track.ScaleShearCurve = new AnimationCurve { CurveData = scaleCurve }; + for (var y = 0; y < 3; y++) + { + diff += Math.Abs(t0[x, y] - t1[x, y]) + Math.Abs(t0[x, y] - t2[x, y]); + } } - else + + if (diff < 0.001f) { - var scaleCurve = new DaK32fC32f(); - scaleCurve.CurveDataHeader_DaK32fC32f = new CurveDataHeader { Format = (int)CurveFormat.DaK32fC32f, Degree = 2 }; - scaleCurve.SetKnots(scaleTimes); - scaleCurve.SetMatrices(scales); - track.ScaleShearCurve = new AnimationCurve { CurveData = scaleCurve }; - } + Keyframes[times[2]].HasScaleShear = false; + Keyframes[times[2]].ScaleShear = Matrix3.Identity; + times.RemoveAt(2); + transforms.RemoveAt(2); - return track; + Keyframes[times[1]].HasScaleShear = false; + Keyframes[times[1]].ScaleShear = Matrix3.Identity; + times.RemoveAt(1); + transforms.RemoveAt(1); + } } + } - public KeyframeTrack ToKeyframes() + public void RemoveTrivialFrames() + { + var newFrames = new SortedList(); + foreach (var kv in Keyframes) { - var track = new KeyframeTrack(); - - OrientationCurve.CurveData.ExportKeyframes(track, AnimationCurveData.ExportType.Rotation); - PositionCurve.CurveData.ExportKeyframes(track, AnimationCurveData.ExportType.Position); - ScaleShearCurve.CurveData.ExportKeyframes(track, AnimationCurveData.ExportType.ScaleShear); - - return track; + if (kv.Value.HasTranslation + || kv.Value.HasRotation + || kv.Value.HasScaleShear) + { + newFrames.Add(kv.Key, kv.Value); + } } - } - public class VectorTrack - { - public string Name; - public UInt32 TrackKey; - public Int32 Dimension; - [Serialization(Type = MemberType.Inline)] - public AnimationCurve ValueCurve; + Keyframes = newFrames; } - public class TransformLODError + public void SwapBindPose(Matrix4 oldBindPose, Matrix4 newBindPose) { - public Single Real32; + var oldToNewTransform = newBindPose * oldBindPose.Inverted(); + foreach (var keyframe in Keyframes) + { + var newTransform = oldToNewTransform * keyframe.Value.ToTransform().ToMatrix4(); + keyframe.Value.FromTransform(Transform.FromMatrix4(newTransform)); + } } - public class TextTrackEntry + public static KeyframeTrack FromMatrices(IList times, IEnumerable transforms) { - public Single TimeStamp; - public string Text; - } + var track = new KeyframeTrack(); + + var translations = transforms.Select(m => m.ExtractTranslation()).ToList(); + var rotations = transforms.Select(m => m.ExtractRotation()).ToList(); + var scales = transforms.Select(m => m.ExtractScale()).ToList(); + + // Quaternion sign fixup + // The same rotation can be represented by both q and -q. However the Slerp path + // will be different; one will go the long away around, the other the short away around. + // Replace quaterions to ensure that Slerp will take the short path. + float flip = 1.0f; + for (var i = 0; i < rotations.Count - 1; i++) + { + var r0 = rotations[i]; + var r1 = rotations[i + 1]; + var dot = QuatHelpers.Dot(r0, r1 * flip); + + if (dot < 0.0f) + { + flip = -flip; + } - public class TextTrack - { - public string Name; - public List Entries; - } + rotations[i + 1] *= flip; + } + + for (var i = 0; i < times.Count; i++) + { + track.AddTranslation(times[i], translations[i]); + track.AddRotation(times[i], rotations[i]); + var scaleShear = new Matrix3( + scales[i][0], 0.0f, 0.0f, + 0.0f, scales[i][1], 0.0f, + 0.0f, 0.0f, scales[i][2] + ); + track.AddScaleShear(times[i], scaleShear); + } - public class PeriodicLoop - { - public Single Radius; - public Single dAngle; - public Single dZ; - [Serialization(ArraySize = 3)] - public Single[] BasisX; - [Serialization(ArraySize = 3)] - public Single[] BasisY; - [Serialization(ArraySize = 3)] - public Single[] Axis; + return track; } +} - public class TrackGroup +public class TransformTrack +{ + public string Name; + [Serialization(MinVersion = 0x80000011)] + public int Flags; + [Serialization(Type = MemberType.Inline)] + public AnimationCurve OrientationCurve; + [Serialization(Type = MemberType.Inline)] + public AnimationCurve PositionCurve; + [Serialization(Type = MemberType.Inline)] + public AnimationCurve ScaleShearCurve; + [Serialization(Kind = SerializationKind.None)] + public Animation ParentAnimation; + + public static TransformTrack FromKeyframes(KeyframeTrack keyframes) { - public string Name; - public List VectorTracks; - public List TransformTracks; - public List TransformLODErrors; - public List TextTracks; - public Transform InitialPlacement; - public int AccumulationFlags; - [Serialization(ArraySize = 3)] - public float[] LoopTranslation; - public PeriodicLoop PeriodicLoop; - [Serialization(Type = MemberType.VariantReference)] - public BG3TrackGroupExtendedData ExtendedData; + var track = new TransformTrack + { + Flags = 0 + }; + + var translateTimes = keyframes.Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Key).ToList(); + var translations = keyframes.Keyframes.Where(f => f.Value.HasTranslation).Select(f => f.Value.Translation).ToList(); + if (translateTimes.Count == 1) + { + var posCurve = new D3Constant32f + { + CurveDataHeader_D3Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D3Constant32f, Degree = 2 }, + Controls = new float[3] { translations[0].X, translations[0].Y, translations[0].Z } + }; + track.PositionCurve = new AnimationCurve { CurveData = posCurve }; + } + else + { + var posCurve = new DaK32fC32f(); + posCurve.CurveDataHeader_DaK32fC32f = new CurveDataHeader { Format = (int)CurveFormat.DaK32fC32f, Degree = 2 }; + posCurve.SetKnots(translateTimes); + posCurve.SetPoints(translations); + track.PositionCurve = new AnimationCurve { CurveData = posCurve }; + } + + var rotationTimes = keyframes.Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Key).ToList(); + var rotations = keyframes.Keyframes.Where(f => f.Value.HasRotation).Select(f => f.Value.Rotation).ToList(); + if (rotationTimes.Count == 1) + { + var rotCurve = new D4Constant32f + { + CurveDataHeader_D4Constant32f = new CurveDataHeader { Format = (int)CurveFormat.D4Constant32f, Degree = 2 }, + Controls = new float[4] { rotations[0].X, rotations[0].Y, rotations[0].Z, rotations[0].W } + }; + track.OrientationCurve = new AnimationCurve { CurveData = rotCurve }; + } + else + { + var rotCurve = new DaK32fC32f(); + rotCurve.CurveDataHeader_DaK32fC32f = new CurveDataHeader { Format = (int)CurveFormat.DaK32fC32f, Degree = 2 }; + rotCurve.SetKnots(rotationTimes); + rotCurve.SetQuaternions(rotations); + track.OrientationCurve = new AnimationCurve { CurveData = rotCurve }; + } + + var scaleTimes = keyframes.Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Key).ToList(); + var scales = keyframes.Keyframes.Where(f => f.Value.HasScaleShear).Select(f => f.Value.ScaleShear).ToList(); + if (scaleTimes.Count == 1) + { + var scaleCurve = new DaConstant32f(); + scaleCurve.CurveDataHeader_DaConstant32f = new CurveDataHeader { Format = (int)CurveFormat.DaConstant32f, Degree = 2 }; + var m = scales[0]; + scaleCurve.Controls = + [ + m[0, 0], m[0, 1], m[0, 2], + m[1, 0], m[1, 1], m[1, 2], + m[2, 0], m[2, 1], m[2, 2] + ]; + track.ScaleShearCurve = new AnimationCurve { CurveData = scaleCurve }; + } + else + { + var scaleCurve = new DaK32fC32f(); + scaleCurve.CurveDataHeader_DaK32fC32f = new CurveDataHeader { Format = (int)CurveFormat.DaK32fC32f, Degree = 2 }; + scaleCurve.SetKnots(scaleTimes); + scaleCurve.SetMatrices(scales); + track.ScaleShearCurve = new AnimationCurve { CurveData = scaleCurve }; + } + + return track; } - public class Animation + public KeyframeTrack ToKeyframes() { - public string Name; - public float Duration; - public float TimeStep; - [Serialization(MinVersion = 0x80000011)] - public float Oversampling; - [Serialization(Type = MemberType.ArrayOfReferences)] - public List TrackGroups; - [Serialization(MinVersion = 0x80000011)] - public Int32 DefaultLoopCount; - [Serialization(MinVersion = 0x80000011)] - public Int32 Flags; - [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] - public object ExtendedData; + var track = new KeyframeTrack(); + + OrientationCurve.CurveData.ExportKeyframes(track, AnimationCurveData.ExportType.Rotation); + PositionCurve.CurveData.ExportKeyframes(track, AnimationCurveData.ExportType.Position); + ScaleShearCurve.CurveData.ExportKeyframes(track, AnimationCurveData.ExportType.ScaleShear); + + return track; } } + +public class VectorTrack +{ + public string Name; + public UInt32 TrackKey; + public Int32 Dimension; + [Serialization(Type = MemberType.Inline)] + public AnimationCurve ValueCurve; +} + +public class TransformLODError +{ + public Single Real32; +} + +public class TextTrackEntry +{ + public Single TimeStamp; + public string Text; +} + +public class TextTrack +{ + public string Name; + public List Entries; +} + +public class PeriodicLoop +{ + public Single Radius; + public Single dAngle; + public Single dZ; + [Serialization(ArraySize = 3)] + public Single[] BasisX; + [Serialization(ArraySize = 3)] + public Single[] BasisY; + [Serialization(ArraySize = 3)] + public Single[] Axis; +} + +public class TrackGroup +{ + public string Name; + public List VectorTracks; + public List TransformTracks; + public List TransformLODErrors; + public List TextTracks; + public Transform InitialPlacement; + public int AccumulationFlags; + [Serialization(ArraySize = 3)] + public float[] LoopTranslation; + public PeriodicLoop PeriodicLoop; + [Serialization(Type = MemberType.VariantReference)] + public BG3TrackGroupExtendedData ExtendedData; +} + +public class Animation +{ + public string Name; + public float Duration; + public float TimeStep; + [Serialization(MinVersion = 0x80000011)] + public float Oversampling; + [Serialization(Type = MemberType.ArrayOfReferences)] + public List TrackGroups; + [Serialization(MinVersion = 0x80000011)] + public Int32 DefaultLoopCount; + [Serialization(MinVersion = 0x80000011)] + public Int32 Flags; + [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] + public object ExtendedData; +} diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index 21a98420..c5cb3887 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -8,997 +8,996 @@ using LSLib.LS.Enums; using System.IO; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class ColladaMeshExporter(Mesh mesh, ExporterOptions options) { - public class ColladaMeshExporter(Mesh mesh, ExporterOptions options) + private Mesh ExportedMesh = mesh; + private ExporterOptions Options = options; + private List Sources; + private List Inputs; + private List InputOffsets; + private ulong LastInputOffset = 0; + private XmlDocument Xml = new(); + + private void AddInput(source collSource, string inputSemantic, string localInputSemantic = null, ulong setIndex = 0) { - private Mesh ExportedMesh = mesh; - private ExporterOptions Options = options; - private List Sources; - private List Inputs; - private List InputOffsets; - private ulong LastInputOffset = 0; - private XmlDocument Xml = new(); + if (collSource != null) + { + Sources.Add(collSource); + } - private void AddInput(source collSource, string inputSemantic, string localInputSemantic = null, ulong setIndex = 0) + if (inputSemantic != null) { - if (collSource != null) + var input = new InputLocal { - Sources.Add(collSource); - } + semantic = inputSemantic, + source = "#" + collSource.id + }; + Inputs.Add(input); + } - if (inputSemantic != null) + if (localInputSemantic != null) + { + var vertexInputOff = new InputLocalOffset { - var input = new InputLocal - { - semantic = inputSemantic, - source = "#" + collSource.id - }; - Inputs.Add(input); - } - - if (localInputSemantic != null) + semantic = localInputSemantic, + source = "#" + collSource.id, + offset = LastInputOffset++ + }; + if (localInputSemantic == "TEXCOORD" || localInputSemantic == "COLOR") { - var vertexInputOff = new InputLocalOffset - { - semantic = localInputSemantic, - source = "#" + collSource.id, - offset = LastInputOffset++ - }; - if (localInputSemantic == "TEXCOORD" || localInputSemantic == "COLOR") - { - vertexInputOff.set = setIndex; - } - - InputOffsets.Add(vertexInputOff); + vertexInputOff.set = setIndex; } + + InputOffsets.Add(vertexInputOff); } + } - private void DetermineInputsFromComponentNames(List componentNames) + private void DetermineInputsFromComponentNames(List componentNames) + { + foreach (var component in componentNames) { - foreach (var component in componentNames) + switch (component) { - switch (component) - { - case "Position": - { - var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedMesh.Name); - AddInput(positions, "POSITION", "VERTEX"); - break; - } + case "Position": + { + var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedMesh.Name); + AddInput(positions, "POSITION", "VERTEX"); + break; + } - case "Normal": + case "Normal": + { + if (Options.ExportNormals) { - if (Options.ExportNormals) - { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedMesh.Name); - AddInput(normals, "NORMAL"); - } - break; + var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedMesh.Name); + AddInput(normals, "NORMAL"); } + break; + } - case "Tangent": + case "Tangent": + { + if (Options.ExportTangents) { - if (Options.ExportTangents) - { - var tangents = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedMesh.Name); - AddInput(tangents, "TEXTANGENT"); - } - break; + var tangents = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedMesh.Name); + AddInput(tangents, "TEXTANGENT"); } + break; + } - case "Binormal": + case "Binormal": + { + if (Options.ExportTangents) { - if (Options.ExportTangents) - { - var binormals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedMesh.Name); - AddInput(binormals, "TEXBINORMAL"); - } - break; + var binormals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedMesh.Name); + AddInput(binormals, "TEXBINORMAL"); } + break; + } - case "TextureCoordinates0": - case "TextureCoordinates1": - case "TextureCoordinates2": - case "TextureCoordinates3": - case "TextureCoordinates4": - case "TextureCoordinates5": + case "TextureCoordinates0": + case "TextureCoordinates1": + case "TextureCoordinates2": + case "TextureCoordinates3": + case "TextureCoordinates4": + case "TextureCoordinates5": + { + if (Options.ExportUVs) { - if (Options.ExportUVs) - { - int uvIndex = Int32.Parse(component[^1..]); - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); - AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); - } - break; + int uvIndex = Int32.Parse(component[^1..]); + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); + AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } + break; + } - // Same as TextureCoordinatesX, but with 1-based indices - case "MaxChannel_1": - case "MaxChannel_2": - case "UVChannel_1": - case "UVChannel_2": - case "map1": + // Same as TextureCoordinatesX, but with 1-based indices + case "MaxChannel_1": + case "MaxChannel_2": + case "UVChannel_1": + case "UVChannel_2": + case "map1": + { + if (Options.ExportUVs) { - if (Options.ExportUVs) - { - int uvIndex = Int32.Parse(component[^1..]) - 1; - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); - AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); - } - break; + int uvIndex = Int32.Parse(component[^1..]) - 1; + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); + AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } - - case "BoneWeights": - case "BoneIndices": - // These are handled in ExportSkin() break; + } + + case "BoneWeights": + case "BoneIndices": + // These are handled in ExportSkin() + break; - case "DiffuseColor0": + case "DiffuseColor0": + { + if (Options.ExportColors) { - if (Options.ExportColors) - { - var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedMesh.Name, 0); - AddInput(colors, null, "COLOR", 0); - } - break; + var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedMesh.Name, 0); + AddInput(colors, null, "COLOR", 0); } + break; + } - default: - throw new NotImplementedException("Vertex component not supported: " + component); - } + default: + throw new NotImplementedException("Vertex component not supported: " + component); } } + } - private void DetermineInputsFromVertex(Vertex vertex) + private void DetermineInputsFromVertex(Vertex vertex) + { + var desc = vertex.Format; + if (desc.PositionType == PositionType.None) { - var desc = vertex.Format; - if (desc.PositionType == PositionType.None) - { - throw new NotImplementedException("Cannot import vertices without position"); - } - - // Vertex positions - var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedMesh.Name); - AddInput(positions, "POSITION", "VERTEX"); - - // Normals - if (desc.NormalType != NormalType.None && Options.ExportNormals) - { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedMesh.Name); - AddInput(normals, null, "NORMAL"); - } - - // Tangents - if (desc.TangentType != NormalType.None && Options.ExportTangents) - { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedMesh.Name); - AddInput(normals, null, "TEXTANGENT"); - } - - // Binormals - if (desc.BinormalType != NormalType.None && Options.ExportTangents) - { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedMesh.Name); - AddInput(normals, null, "TEXBINORMAL"); - } - - // Texture coordinates - if (Options.ExportUVs) - { - for (var uvIndex = 0; uvIndex < desc.TextureCoordinates; uvIndex++) - { - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); - AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); - } - } + throw new NotImplementedException("Cannot import vertices without position"); + } - // Vertex colors - if (Options.ExportColors) - { - for (var colorIndex = 0; colorIndex < desc.ColorMaps; colorIndex++) - { - var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedMesh.Name, colorIndex); - AddInput(colors, null, "COLOR", (ulong)colorIndex); - } - } + // Vertex positions + var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedMesh.Name); + AddInput(positions, "POSITION", "VERTEX"); - // BoneWeights and BoneIndices are handled in ExportSkin() + // Normals + if (desc.NormalType != NormalType.None && Options.ExportNormals) + { + var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedMesh.Name); + AddInput(normals, null, "NORMAL"); } - private void AddTechniqueProperty(List props, string property, string value) + // Tangents + if (desc.TangentType != NormalType.None && Options.ExportTangents) { - var prop = Xml.CreateElement(property); - prop.InnerText = value; - props.Add(prop); + var normals = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedMesh.Name); + AddInput(normals, null, "TEXTANGENT"); } - private technique ExportLSLibProfile() + // Binormals + if (desc.BinormalType != NormalType.None && Options.ExportTangents) { - var profile = new technique() - { - profile = "LSTools" - }; - - var props = new List(); + var normals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedMesh.Name); + AddInput(normals, null, "TEXBINORMAL"); + } - if (ExportedMesh.ExportOrder != -1) + // Texture coordinates + if (Options.ExportUVs) + { + for (var uvIndex = 0; uvIndex < desc.TextureCoordinates; uvIndex++) { - AddTechniqueProperty(props, "ExportOrder", ExportedMesh.ExportOrder.ToString()); + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); + AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } + } - var userProps = ExportedMesh.ExtendedData?.UserMeshProperties; - if (userProps != null) + // Vertex colors + if (Options.ExportColors) + { + for (var colorIndex = 0; colorIndex < desc.ColorMaps; colorIndex++) { - var flags = userProps.MeshFlags; - var clothFlags = userProps.ClothFlags; - - if (flags.IsMeshProxy()) - { - AddTechniqueProperty(props, "DivModelType", "MeshProxy"); - } - - if (flags.IsCloth()) - { - AddTechniqueProperty(props, "DivModelType", "Cloth"); - } - - if (flags.HasProxyGeometry()) - { - AddTechniqueProperty(props, "DivModelType", "ProxyGeometry"); - } - - if (flags.IsRigid()) - { - AddTechniqueProperty(props, "DivModelType", "Rigid"); - } - - if (flags.IsSpring()) - { - AddTechniqueProperty(props, "DivModelType", "Spring"); - } - - if (flags.IsOccluder()) - { - AddTechniqueProperty(props, "DivModelType", "Occluder"); - } - - if (clothFlags.HasClothFlag01()) - { - AddTechniqueProperty(props, "DivModelType", "Cloth01"); - } - - if (clothFlags.HasClothFlag02()) - { - AddTechniqueProperty(props, "DivModelType", "Cloth02"); - } - - if (clothFlags.HasClothFlag04()) - { - AddTechniqueProperty(props, "DivModelType", "Cloth04"); - } + var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedMesh.Name, colorIndex); + AddInput(colors, null, "COLOR", (ulong)colorIndex); + } + } - if (clothFlags.HasClothPhysics()) - { - AddTechniqueProperty(props, "DivModelType", "ClothPhysics"); - } + // BoneWeights and BoneIndices are handled in ExportSkin() + } - if (userProps.IsImpostor != null && userProps.IsImpostor[0] == 1) - { - AddTechniqueProperty(props, "IsImpostor", "1"); - } + private void AddTechniqueProperty(List props, string property, string value) + { + var prop = Xml.CreateElement(property); + prop.InnerText = value; + props.Add(prop); + } - if (userProps.Lod != null && userProps.Lod[0] != -1) - { - AddTechniqueProperty(props, "LOD", $"{userProps.Lod[0]}"); - } + private technique ExportLSLibProfile() + { + var profile = new technique() + { + profile = "LSTools" + }; - if (userProps.LodDistance != null && userProps.LodDistance[0] > 0 && userProps.LodDistance[0] < 1.0E+30f) - { - AddTechniqueProperty(props, "LODDistance", $"{userProps.LodDistance[0]}"); - } - } + var props = new List(); - profile.Any = props.ToArray(); - return profile; + if (ExportedMesh.ExportOrder != -1) + { + AddTechniqueProperty(props, "ExportOrder", ExportedMesh.ExportOrder.ToString()); } - public mesh Export() + var userProps = ExportedMesh.ExtendedData?.UserMeshProperties; + if (userProps != null) { - // Jank we need to create XMLElements on the fly - Sources = []; - Inputs = []; - InputOffsets = []; - LastInputOffset = 0; + var flags = userProps.MeshFlags; + var clothFlags = userProps.ClothFlags; - var vertexData = ExportedMesh.PrimaryVertexData; - if (vertexData.Vertices != null - && vertexData.Vertices.Count > 0) + if (flags.IsMeshProxy()) { - var vertex = vertexData.Vertices[0]; - DetermineInputsFromVertex(vertex); + AddTechniqueProperty(props, "DivModelType", "MeshProxy"); } - else + + if (flags.IsCloth()) { - var componentNames = ExportedMesh.VertexComponentNames(); - DetermineInputsFromComponentNames(componentNames); + AddTechniqueProperty(props, "DivModelType", "Cloth"); } - // TODO: model transform/inverse transform? - var triangles = ExportedMesh.PrimaryTopology.MakeColladaTriangles( - InputOffsets.ToArray(), - vertexData.Deduplicator.Vertices.DeduplicationMap, - vertexData.Deduplicator.Normals.DeduplicationMap, - vertexData.Deduplicator.UVs.Select(uv => uv.DeduplicationMap).ToList(), - vertexData.Deduplicator.Colors.Select(color => color.DeduplicationMap).ToList() - ); - - var colladaMesh = new mesh + if (flags.HasProxyGeometry()) { - vertices = new vertices - { - id = ExportedMesh.Name + "-vertices", - input = Inputs.ToArray() - }, - source = Sources.ToArray(), - Items = [triangles], - extra = - [ - new extra - { - technique = - [ - ExportLSLibProfile() - ] - } - ] - }; - - return colladaMesh; - } - } - - - public class ColladaExporter - { - [Serialization(Kind = SerializationKind.None)] - public ExporterOptions Options = new(); - - private XmlDocument Xml = new(); + AddTechniqueProperty(props, "DivModelType", "ProxyGeometry"); + } - private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBinding, List geometries, List controllers, List geomNodes) - { - var exporter = new ColladaMeshExporter(meshBinding.Mesh, Options); - var mesh = exporter.Export(); - var geom = new geometry + if (flags.IsRigid()) { - id = meshBinding.Mesh.Name + "-geom", - name = meshBinding.Mesh.Name, - Item = mesh - }; - geometries.Add(geom); + AddTechniqueProperty(props, "DivModelType", "Rigid"); + } - bool hasSkin = skelRef != null && meshBinding.Mesh.IsSkinned(); - skin skin = null; - controller ctrl = null; - if (hasSkin) + if (flags.IsSpring()) { - var boneNames = new Dictionary(); - foreach (var bone in model.Skeleton.Bones) - { - boneNames.Add(bone.Name, bone); - } - - skin = ExportSkin(meshBinding.Mesh, model.Skeleton.Bones, boneNames, geom.id); - ctrl = new controller - { - id = meshBinding.Mesh.Name + "-skin", - name = meshBinding.Mesh.Name + "_Skin", - Item = skin - }; - controllers.Add(ctrl); + AddTechniqueProperty(props, "DivModelType", "Spring"); } - var geomNode = new node + if (flags.IsOccluder()) { - id = geom.name + "-node", - name = geom.name, - type = NodeType.NODE - }; + AddTechniqueProperty(props, "DivModelType", "Occluder"); + } - if (hasSkin) + if (clothFlags.HasClothFlag01()) { - var controllerInstance = new instance_controller - { - url = "#" + ctrl.id, - skeleton = ["#" + skelRef] - }; - geomNode.instance_controller = [controllerInstance]; + AddTechniqueProperty(props, "DivModelType", "Cloth01"); } - else + + if (clothFlags.HasClothFlag02()) { - var geomInstance = new instance_geometry - { - url = "#" + geom.id - }; - geomNode.instance_geometry = [geomInstance]; + AddTechniqueProperty(props, "DivModelType", "Cloth02"); } - geomNodes.Add(geomNode); - } - - private skin ExportSkin(Mesh mesh, List bones, Dictionary nameMaps, string geometryId) - { - var sources = new List(); - var joints = new List(); - var poses = new List(); - - var boundBones = new HashSet(); - var orderedBones = new List(); - foreach (var boneBinding in mesh.BoneBindings) + if (clothFlags.HasClothFlag04()) { - boundBones.Add(boneBinding.BoneName); - orderedBones.Add(nameMaps[boneBinding.BoneName]); + AddTechniqueProperty(props, "DivModelType", "Cloth04"); } - /* - * Append all bones to the end of the bone list, even if they're not influencing the mesh. - * We need this because some tools (eg. Blender) expect all bones to be present, otherwise their - * inverse world transform would reset to identity. - */ - foreach (var bone in bones) + if (clothFlags.HasClothPhysics()) { - if (!boundBones.Contains(bone.Name)) - { - orderedBones.Add(bone); - } + AddTechniqueProperty(props, "DivModelType", "ClothPhysics"); } - foreach (var bone in orderedBones) + if (userProps.IsImpostor != null && userProps.IsImpostor[0] == 1) { - boundBones.Add(bone.Name); - joints.Add(bone.Name); - - var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); - invWorldTransform.Transpose(); - - poses.AddRange(new float[] { - invWorldTransform.M11, invWorldTransform.M12, invWorldTransform.M13, invWorldTransform.M14, - invWorldTransform.M21, invWorldTransform.M22, invWorldTransform.M23, invWorldTransform.M24, - invWorldTransform.M31, invWorldTransform.M32, invWorldTransform.M33, invWorldTransform.M34, - invWorldTransform.M41, invWorldTransform.M42, invWorldTransform.M43, invWorldTransform.M44 - }); + AddTechniqueProperty(props, "IsImpostor", "1"); } - var jointSource = ColladaUtils.MakeNameSource(mesh.Name, "joints", ["JOINT"], joints.ToArray()); - var poseSource = ColladaUtils.MakeFloatSource(mesh.Name, "poses", ["TRANSFORM"], poses.ToArray(), 16, "float4x4"); - var weightsSource = mesh.PrimaryVertexData.MakeBoneWeights(mesh.Name); - - var vertices = mesh.PrimaryVertexData.Deduplicator.Vertices.Uniques; - var vertexInfluenceCounts = new List(vertices.Count); - var vertexInfluences = new List(vertices.Count); - int weightIdx = 0; - foreach (var vertex in vertices) + if (userProps.Lod != null && userProps.Lod[0] != -1) { - int influences = 0; - var indices = vertex.Indices; - var weights = vertex.Weights; - for (int i = 0; i < 4; i++) - { - if (weights[i] > 0) - { - influences++; - vertexInfluences.Add(indices[i]); - vertexInfluences.Add(weightIdx++); - } - } - - vertexInfluenceCounts.Add(influences); + AddTechniqueProperty(props, "LOD", $"{userProps.Lod[0]}"); } - var jointOffsets = new InputLocalOffset + if (userProps.LodDistance != null && userProps.LodDistance[0] > 0 && userProps.LodDistance[0] < 1.0E+30f) { - semantic = "JOINT", - source = "#" + jointSource.id, - offset = 0 - }; + AddTechniqueProperty(props, "LODDistance", $"{userProps.LodDistance[0]}"); + } + } - var weightOffsets = new InputLocalOffset - { - semantic = "WEIGHT", - source = "#" + weightsSource.id, - offset = 1 - }; + profile.Any = props.ToArray(); + return profile; + } - var vertWeights = new skinVertex_weights - { - count = (ulong)vertices.Count, - input = [jointOffsets, weightOffsets], - v = string.Join(" ", vertexInfluences.Select(x => x.ToString()).ToArray()), - vcount = string.Join(" ", vertexInfluenceCounts.Select(x => x.ToString()).ToArray()) - }; + public mesh Export() + { + // Jank we need to create XMLElements on the fly + Sources = []; + Inputs = []; + InputOffsets = []; + LastInputOffset = 0; + + var vertexData = ExportedMesh.PrimaryVertexData; + if (vertexData.Vertices != null + && vertexData.Vertices.Count > 0) + { + var vertex = vertexData.Vertices[0]; + DetermineInputsFromVertex(vertex); + } + else + { + var componentNames = ExportedMesh.VertexComponentNames(); + DetermineInputsFromComponentNames(componentNames); + } - var skin = new skin - { - source1 = "#" + geometryId, - bind_shape_matrix = "1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1", + // TODO: model transform/inverse transform? + var triangles = ExportedMesh.PrimaryTopology.MakeColladaTriangles( + InputOffsets.ToArray(), + vertexData.Deduplicator.Vertices.DeduplicationMap, + vertexData.Deduplicator.Normals.DeduplicationMap, + vertexData.Deduplicator.UVs.Select(uv => uv.DeduplicationMap).ToList(), + vertexData.Deduplicator.Colors.Select(color => color.DeduplicationMap).ToList() + ); - joints = new skinJoints - { - input = [ - new InputLocal - { - semantic = "JOINT", - source = "#" + jointSource.id - }, - new InputLocal - { - semantic = "INV_BIND_MATRIX", - source = "#" + poseSource.id - } + var colladaMesh = new mesh + { + vertices = new vertices + { + id = ExportedMesh.Name + "-vertices", + input = Inputs.ToArray() + }, + source = Sources.ToArray(), + Items = [triangles], + extra = + [ + new extra + { + technique = + [ + ExportLSLibProfile() ] - }, + } + ] + }; - source = [jointSource, poseSource, weightsSource], - vertex_weights = vertWeights - }; + return colladaMesh; + } +} - return skin; - } - private node ExportBone(Skeleton skeleton, string name, int index, Bone bone) +public class ColladaExporter +{ + [Serialization(Kind = SerializationKind.None)] + public ExporterOptions Options = new(); + + private XmlDocument Xml = new(); + + private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBinding, List geometries, List controllers, List geomNodes) + { + var exporter = new ColladaMeshExporter(meshBinding.Mesh, Options); + var mesh = exporter.Export(); + var geom = new geometry { - var node = bone.MakeCollada(Xml); - var children = new List(); - for (int i = 0; i < skeleton.Bones.Count; i++) + id = meshBinding.Mesh.Name + "-geom", + name = meshBinding.Mesh.Name, + Item = mesh + }; + geometries.Add(geom); + + bool hasSkin = skelRef != null && meshBinding.Mesh.IsSkinned(); + skin skin = null; + controller ctrl = null; + if (hasSkin) + { + var boneNames = new Dictionary(); + foreach (var bone in model.Skeleton.Bones) { - if (skeleton.Bones[i].ParentIndex == index) - children.Add(ExportBone(skeleton, name, i, skeleton.Bones[i])); + boneNames.Add(bone.Name, bone); } - node.node1 = children.ToArray(); - return node; + skin = ExportSkin(meshBinding.Mesh, model.Skeleton.Bones, boneNames, geom.id); + ctrl = new controller + { + id = meshBinding.Mesh.Name + "-skin", + name = meshBinding.Mesh.Name + "_Skin", + Item = skin + }; + controllers.Add(ctrl); } - public node ExportSkeleton(Skeleton skeleton, string name) + var geomNode = new node { - int rootIndex = -1; + id = geom.name + "-node", + name = geom.name, + type = NodeType.NODE + }; - // Find the root bone and export it - for (var i = 0; i < skeleton.Bones.Count; i++) - { - if (skeleton.Bones[i].IsRoot) - { - if (rootIndex == -1) - { - rootIndex = i; - } - else - { - throw new ParsingException( - "Model has multiple root bones! Please use the \"Conform to GR2\" option to " + - "make sure that all bones from the base mesh are included in the export."); - } - } - } - - if (rootIndex == -1) + if (hasSkin) + { + var controllerInstance = new instance_controller { - throw new ParsingException("Model has no root bone!"); - } - - return ExportBone(skeleton, name, rootIndex, skeleton.Bones[rootIndex]); + url = "#" + ctrl.id, + skeleton = ["#" + skelRef] + }; + geomNode.instance_controller = [controllerInstance]; } - - private void ExportModels(Root root, List geometries, List controllers, List geomNodes) + else { - if (root.Models == null) + var geomInstance = new instance_geometry { - return; - } + url = "#" + geom.id + }; + geomNode.instance_geometry = [geomInstance]; + } - foreach(var model in root.Models) - { - string skelRef = null; - if (model.Skeleton != null && !model.Skeleton.IsDummy && model.Skeleton.Bones.Count > 1 && root.Skeletons.Any(s => s.Name == model.Skeleton.Name)) - { - Utils.Info($"Exporting model {model.Name} with skeleton {model.Skeleton.Name}"); - var skeleton = ExportSkeleton(model.Skeleton, model.Name); - geomNodes.Add(skeleton); - skelRef = skeleton.id; - } + geomNodes.Add(geomNode); + } - if (model.MeshBindings != null) - { - foreach (var meshBinding in model.MeshBindings) - { - ExportMeshBinding(model, skelRef, meshBinding, geometries, controllers, geomNodes); - } - } - } - } + private skin ExportSkin(Mesh mesh, List bones, Dictionary nameMaps, string geometryId) + { + var sources = new List(); + var joints = new List(); + var poses = new List(); - private technique ExportAnimationLSLibProfile(BG3TrackGroupExtendedData extData) + var boundBones = new HashSet(); + var orderedBones = new List(); + foreach (var boneBinding in mesh.BoneBindings) { - var profile = new technique() - { - profile = "LSTools" - }; + boundBones.Add(boneBinding.BoneName); + orderedBones.Add(nameMaps[boneBinding.BoneName]); + } - var props = new List(); - if (extData != null && extData.SkeletonResourceID != null && extData.SkeletonResourceID != "") + /* + * Append all bones to the end of the bone list, even if they're not influencing the mesh. + * We need this because some tools (eg. Blender) expect all bones to be present, otherwise their + * inverse world transform would reset to identity. + */ + foreach (var bone in bones) + { + if (!boundBones.Contains(bone.Name)) { - var prop = Xml.CreateElement("SkeletonResourceID"); - prop.InnerText = extData.SkeletonResourceID; - props.Add(prop); + orderedBones.Add(bone); } - - profile.Any = props.ToArray(); - return profile; } - public List ExportKeyframeTrack(TransformTrack transformTrack, BG3TrackGroupExtendedData extData, string name, string target) + foreach (var bone in orderedBones) { - var track = transformTrack.ToKeyframes(); - track.MergeAdjacentFrames(); - track.InterpolateFrames(); + boundBones.Add(bone.Name); + joints.Add(bone.Name); + + var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); + invWorldTransform.Transpose(); + + poses.AddRange(new float[] { + invWorldTransform.M11, invWorldTransform.M12, invWorldTransform.M13, invWorldTransform.M14, + invWorldTransform.M21, invWorldTransform.M22, invWorldTransform.M23, invWorldTransform.M24, + invWorldTransform.M31, invWorldTransform.M32, invWorldTransform.M33, invWorldTransform.M34, + invWorldTransform.M41, invWorldTransform.M42, invWorldTransform.M43, invWorldTransform.M44 + }); + } - var anims = new List(); + var jointSource = ColladaUtils.MakeNameSource(mesh.Name, "joints", ["JOINT"], joints.ToArray()); + var poseSource = ColladaUtils.MakeFloatSource(mesh.Name, "poses", ["TRANSFORM"], poses.ToArray(), 16, "float4x4"); + var weightsSource = mesh.PrimaryVertexData.MakeBoneWeights(mesh.Name); - var outputs = new List(track.Keyframes.Count * 16); - foreach (var keyframe in track.Keyframes.Values) + var vertices = mesh.PrimaryVertexData.Deduplicator.Vertices.Uniques; + var vertexInfluenceCounts = new List(vertices.Count); + var vertexInfluences = new List(vertices.Count); + int weightIdx = 0; + foreach (var vertex in vertices) + { + int influences = 0; + var indices = vertex.Indices; + var weights = vertex.Weights; + for (int i = 0; i < 4; i++) { - var transform = keyframe.ToTransform().ToMatrix4(); - transform.Transpose(); - for (int i = 0; i < 4; i++) + if (weights[i] > 0) { - for (int j = 0; j < 4; j++) - outputs.Add(transform[i, j]); + influences++; + vertexInfluences.Add(indices[i]); + vertexInfluences.Add(weightIdx++); } } - var interpolations = new List(track.Keyframes.Count); - for (int i = 0; i < track.Keyframes.Count; i++) - { - interpolations.Add("LINEAR"); - } + vertexInfluenceCounts.Add(influences); + } - var knots = new List(track.Keyframes.Count); - foreach (var keyframe in track.Keyframes) - { - knots.Add(keyframe.Key); - } + var jointOffsets = new InputLocalOffset + { + semantic = "JOINT", + source = "#" + jointSource.id, + offset = 0 + }; - /* - * Fix up animations that have only one keyframe by adding another keyframe at - * the end of the animation. - * (This mainly applies to DaIdentity and DnConstant32f) - */ - if (track.Keyframes.Count == 1) - { - knots.Add(transformTrack.ParentAnimation.Duration); - for (int i = 0; i < 16; i++) - outputs.Add(outputs[i]); - interpolations.Add(interpolations[0]); - } + var weightOffsets = new InputLocalOffset + { + semantic = "WEIGHT", + source = "#" + weightsSource.id, + offset = 1 + }; + + var vertWeights = new skinVertex_weights + { + count = (ulong)vertices.Count, + input = [jointOffsets, weightOffsets], + v = string.Join(" ", vertexInfluences.Select(x => x.ToString()).ToArray()), + vcount = string.Join(" ", vertexInfluenceCounts.Select(x => x.ToString()).ToArray()) + }; - var knotsSource = ColladaUtils.MakeFloatSource(name, "inputs", ["TIME"], knots.ToArray()); - var outSource = ColladaUtils.MakeFloatSource(name, "outputs", ["TRANSFORM"], outputs.ToArray(), 16, "float4x4"); - var interpSource = ColladaUtils.MakeNameSource(name, "interpolations", ["INTERPOLATION"], interpolations.ToArray()); + var skin = new skin + { + source1 = "#" + geometryId, + bind_shape_matrix = "1 0 0 0 0 1 0 0 0 0 1 0 0 0 0 1", - var sampler = new sampler + joints = new skinJoints { - id = name + "_sampler", - input = - [ - new InputLocal - { - semantic = "INTERPOLATION", - source = "#" + interpSource.id - }, + input = [ new InputLocal { - semantic = "OUTPUT", - source = "#" + outSource.id + semantic = "JOINT", + source = "#" + jointSource.id }, new InputLocal { - semantic = "INPUT", - source = "#" + knotsSource.id + semantic = "INV_BIND_MATRIX", + source = "#" + poseSource.id } ] - }; + }, - var channel = new channel - { - source = "#" + sampler.id, - target = target - }; + source = [jointSource, poseSource, weightsSource], + vertex_weights = vertWeights + }; - var animation = new animation - { - id = name, - name = name, - Items = [ - knotsSource, - outSource, - interpSource, - sampler, - channel - ], - extra = - [ - new extra - { - technique = - [ - ExportAnimationLSLibProfile(extData) - ] - } - ] - }; - - anims.Add(animation); - return anims; - } + return skin; + } - public List ExportTrack(TransformTrack track, BG3TrackGroupExtendedData extData) + private node ExportBone(Skeleton skeleton, string name, int index, Bone bone) + { + var node = bone.MakeCollada(Xml); + var children = new List(); + for (int i = 0; i < skeleton.Bones.Count; i++) { - var anims = new List(); - var name = track.Name.Replace(' ', '_'); - var boneName = "Bone_" + track.Name.Replace(' ', '_'); - - // Export all tracks in a single transform - anims.AddRange(ExportKeyframeTrack(track, extData, name + "_Transform", boneName + "/Transform")); - - return anims; + if (skeleton.Bones[i].ParentIndex == index) + children.Add(ExportBone(skeleton, name, i, skeleton.Bones[i])); } - public List ExportTracks(TrackGroup trackGroup) - { - var anims = new List(); - foreach (var track in trackGroup.TransformTracks) - { - anims.AddRange(ExportTrack(track, trackGroup.ExtendedData)); - } + node.node1 = children.ToArray(); + return node; + } - return anims; - } + public node ExportSkeleton(Skeleton skeleton, string name) + { + int rootIndex = -1; - public List ExportAnimations(Animation animation) + // Find the root bone and export it + for (var i = 0; i < skeleton.Bones.Count; i++) { - var animations = new List(); - foreach (var trackGroup in animation.TrackGroups) + if (skeleton.Bones[i].IsRoot) { - /* - * We need to propagate animation data as the track exporter may need information from it - * (Duration and TimeStep usually) - */ - foreach (var track in trackGroup.TransformTracks) + if (rootIndex == -1) { - track.ParentAnimation = animation; - track.OrientationCurve.CurveData.ParentAnimation = animation; - track.PositionCurve.CurveData.ParentAnimation = animation; - track.ScaleShearCurve.CurveData.ParentAnimation = animation; + rootIndex = i; + } + else + { + throw new ParsingException( + "Model has multiple root bones! Please use the \"Conform to GR2\" option to " + + "make sure that all bones from the base mesh are included in the export."); } - - animations.AddRange(ExportTracks(trackGroup)); } + } - return animations; + if (rootIndex == -1) + { + throw new ParsingException("Model has no root bone!"); } + + return ExportBone(skeleton, name, rootIndex, skeleton.Bones[rootIndex]); + } - private Game DetectGame(Root root) + private void ExportModels(Root root, List geometries, List controllers, List geomNodes) + { + if (root.Models == null) { - if (root.GR2Tag == Header.Tag_DOS) - { - return Game.DivinityOriginalSin; - } + return; + } - if (root.GR2Tag == Header.Tag_DOS2DE) + foreach(var model in root.Models) + { + string skelRef = null; + if (model.Skeleton != null && !model.Skeleton.IsDummy && model.Skeleton.Bones.Count > 1 && root.Skeletons.Any(s => s.Name == model.Skeleton.Name)) { - return Game.DivinityOriginalSin2DE; + Utils.Info($"Exporting model {model.Name} with skeleton {model.Skeleton.Name}"); + var skeleton = ExportSkeleton(model.Skeleton, model.Name); + geomNodes.Add(skeleton); + skelRef = skeleton.id; } - if (root.GR2Tag == Header.Tag_DOSEE) + if (model.MeshBindings != null) { - foreach (var mesh in root.Meshes ?? Enumerable.Empty()) + foreach (var meshBinding in model.MeshBindings) { - if (mesh.ExtendedData != null) - { - if (mesh.ExtendedData.LSMVersion == 0) - { - return Game.DivinityOriginalSinEE; - } - - if (mesh.ExtendedData.LSMVersion == 1) - { - return Game.DivinityOriginalSin2DE; - } - } + ExportMeshBinding(model, skelRef, meshBinding, geometries, controllers, geomNodes); } - - return Game.BaldursGate3; } - - return Game.Unset; } + } - private technique ExportRootLSLibProfile(Root root) + private technique ExportAnimationLSLibProfile(BG3TrackGroupExtendedData extData) + { + var profile = new technique() { - var profile = new technique() - { - profile = "LSTools" - }; + profile = "LSTools" + }; - var props = new List(); - - var prop = Xml.CreateElement("LSLibMajor"); - prop.InnerText = Common.MajorVersion.ToString(); + var props = new List(); + if (extData != null && extData.SkeletonResourceID != null && extData.SkeletonResourceID != "") + { + var prop = Xml.CreateElement("SkeletonResourceID"); + prop.InnerText = extData.SkeletonResourceID; props.Add(prop); + } - prop = Xml.CreateElement("LSLibMinor"); - prop.InnerText = Common.MinorVersion.ToString(); - props.Add(prop); + profile.Any = props.ToArray(); + return profile; + } - prop = Xml.CreateElement("LSLibPatch"); - prop.InnerText = Common.PatchVersion.ToString(); - props.Add(prop); + public List ExportKeyframeTrack(TransformTrack transformTrack, BG3TrackGroupExtendedData extData, string name, string target) + { + var track = transformTrack.ToKeyframes(); + track.MergeAdjacentFrames(); + track.InterpolateFrames(); - prop = Xml.CreateElement("MetadataVersion"); - prop.InnerText = Common.ColladaMetadataVersion.ToString(); - props.Add(prop); + var anims = new List(); - var game = DetectGame(root); - if (game != LS.Enums.Game.Unset) + var outputs = new List(track.Keyframes.Count * 16); + foreach (var keyframe in track.Keyframes.Values) + { + var transform = keyframe.ToTransform().ToMatrix4(); + transform.Transpose(); + for (int i = 0; i < 4; i++) { - prop = Xml.CreateElement("Game"); - prop.InnerText = game.ToString(); - props.Add(prop); + for (int j = 0; j < 4; j++) + outputs.Add(transform[i, j]); } + } - profile.Any = props.ToArray(); - return profile; + var interpolations = new List(track.Keyframes.Count); + for (int i = 0; i < track.Keyframes.Count; i++) + { + interpolations.Add("LINEAR"); } - public void Export(Root root, string outputPath) + var knots = new List(track.Keyframes.Count); + foreach (var keyframe in track.Keyframes) { - var contributor = new assetContributor(); - if (root.ArtToolInfo != null) - contributor.authoring_tool = root.ArtToolInfo.FromArtToolName; - else - contributor.authoring_tool = "LSLib COLLADA Exporter v" + Common.LibraryVersion(); + knots.Add(keyframe.Key); + } - var asset = new asset - { - contributor = [contributor], - created = DateTime.Now, - modified = DateTime.Now, - unit = new assetUnit + /* + * Fix up animations that have only one keyframe by adding another keyframe at + * the end of the animation. + * (This mainly applies to DaIdentity and DnConstant32f) + */ + if (track.Keyframes.Count == 1) + { + knots.Add(transformTrack.ParentAnimation.Duration); + for (int i = 0; i < 16; i++) + outputs.Add(outputs[i]); + interpolations.Add(interpolations[0]); + } + + var knotsSource = ColladaUtils.MakeFloatSource(name, "inputs", ["TIME"], knots.ToArray()); + var outSource = ColladaUtils.MakeFloatSource(name, "outputs", ["TRANSFORM"], outputs.ToArray(), 16, "float4x4"); + var interpSource = ColladaUtils.MakeNameSource(name, "interpolations", ["INTERPOLATION"], interpolations.ToArray()); + + var sampler = new sampler + { + id = name + "_sampler", + input = + [ + new InputLocal { - name = "meter" + semantic = "INTERPOLATION", + source = "#" + interpSource.id }, - up_axis = UpAxisType.Y_UP - }; + new InputLocal + { + semantic = "OUTPUT", + source = "#" + outSource.id + }, + new InputLocal + { + semantic = "INPUT", + source = "#" + knotsSource.id + } + ] + }; - // TODO: Handle up vector, etc. properly? - if (root.ArtToolInfo != null) - asset.unit.meter = root.ArtToolInfo.UnitsPerMeter; - else - asset.unit.meter = 1; + var channel = new channel + { + source = "#" + sampler.id, + target = target + }; - var geometries = new List(); - var controllers = new List(); - var geomNodes = new List(); - ExportModels(root, geometries, controllers, geomNodes); + var animation = new animation + { + id = name, + name = name, + Items = [ + knotsSource, + outSource, + interpSource, + sampler, + channel + ], + extra = + [ + new extra + { + technique = + [ + ExportAnimationLSLibProfile(extData) + ] + } + ] + }; - var animations = new List(); - var animationClips = new List(); + anims.Add(animation); + return anims; + } - foreach (var anim in root.Animations ?? Enumerable.Empty()) - { - var anims = ExportAnimations(anim); - animations.AddRange(anims); - var clip = new animation_clip - { - id = anim.Name + "_Animation", - name = anim.Name, - start = 0.0, - end = anim.Duration, - endSpecified = true - }; + public List ExportTrack(TransformTrack track, BG3TrackGroupExtendedData extData) + { + var anims = new List(); + var name = track.Name.Replace(' ', '_'); + var boneName = "Bone_" + track.Name.Replace(' ', '_'); - var animInstances = new List(); - foreach (var animChannel in anims) - { - var instance = new InstanceWithExtra - { - url = "#" + animChannel.id - }; - animInstances.Add(instance); - } + // Export all tracks in a single transform + anims.AddRange(ExportKeyframeTrack(track, extData, name + "_Transform", boneName + "/Transform")); - clip.instance_animation = animInstances.ToArray(); - animationClips.Add(clip); - } + return anims; + } - var rootElements = new List(); + public List ExportTracks(TrackGroup trackGroup) + { + var anims = new List(); + foreach (var track in trackGroup.TransformTracks) + { + anims.AddRange(ExportTrack(track, trackGroup.ExtendedData)); + } - if (animations.Count > 0) - { - var animationLib = new library_animations - { - animation = animations.ToArray() - }; - rootElements.Add(animationLib); - } + return anims; + } - if (animationClips.Count > 0) + public List ExportAnimations(Animation animation) + { + var animations = new List(); + foreach (var trackGroup in animation.TrackGroups) + { + /* + * We need to propagate animation data as the track exporter may need information from it + * (Duration and TimeStep usually) + */ + foreach (var track in trackGroup.TransformTracks) { - var animationClipLib = new library_animation_clips - { - animation_clip = animationClips.ToArray() - }; - rootElements.Add(animationClipLib); + track.ParentAnimation = animation; + track.OrientationCurve.CurveData.ParentAnimation = animation; + track.PositionCurve.CurveData.ParentAnimation = animation; + track.ScaleShearCurve.CurveData.ParentAnimation = animation; } - if (geometries.Count > 0) + animations.AddRange(ExportTracks(trackGroup)); + } + + return animations; + } + + private Game DetectGame(Root root) + { + if (root.GR2Tag == Header.Tag_DOS) + { + return Game.DivinityOriginalSin; + } + + if (root.GR2Tag == Header.Tag_DOS2DE) + { + return Game.DivinityOriginalSin2DE; + } + + if (root.GR2Tag == Header.Tag_DOSEE) + { + foreach (var mesh in root.Meshes ?? Enumerable.Empty()) { - var geometryLib = new library_geometries + if (mesh.ExtendedData != null) { - geometry = geometries.ToArray() - }; - rootElements.Add(geometryLib); + if (mesh.ExtendedData.LSMVersion == 0) + { + return Game.DivinityOriginalSinEE; + } + + if (mesh.ExtendedData.LSMVersion == 1) + { + return Game.DivinityOriginalSin2DE; + } + } } - if (controllers.Count > 0) + return Game.BaldursGate3; + } + + return Game.Unset; + } + + private technique ExportRootLSLibProfile(Root root) + { + var profile = new technique() + { + profile = "LSTools" + }; + + var props = new List(); + + var prop = Xml.CreateElement("LSLibMajor"); + prop.InnerText = Common.MajorVersion.ToString(); + props.Add(prop); + + prop = Xml.CreateElement("LSLibMinor"); + prop.InnerText = Common.MinorVersion.ToString(); + props.Add(prop); + + prop = Xml.CreateElement("LSLibPatch"); + prop.InnerText = Common.PatchVersion.ToString(); + props.Add(prop); + + prop = Xml.CreateElement("MetadataVersion"); + prop.InnerText = Common.ColladaMetadataVersion.ToString(); + props.Add(prop); + + var game = DetectGame(root); + if (game != LS.Enums.Game.Unset) + { + prop = Xml.CreateElement("Game"); + prop.InnerText = game.ToString(); + props.Add(prop); + } + + profile.Any = props.ToArray(); + return profile; + } + + public void Export(Root root, string outputPath) + { + var contributor = new assetContributor(); + if (root.ArtToolInfo != null) + contributor.authoring_tool = root.ArtToolInfo.FromArtToolName; + else + contributor.authoring_tool = "LSLib COLLADA Exporter v" + Common.LibraryVersion(); + + var asset = new asset + { + contributor = [contributor], + created = DateTime.Now, + modified = DateTime.Now, + unit = new assetUnit + { + name = "meter" + }, + up_axis = UpAxisType.Y_UP + }; + + // TODO: Handle up vector, etc. properly? + if (root.ArtToolInfo != null) + asset.unit.meter = root.ArtToolInfo.UnitsPerMeter; + else + asset.unit.meter = 1; + + var geometries = new List(); + var controllers = new List(); + var geomNodes = new List(); + ExportModels(root, geometries, controllers, geomNodes); + + var animations = new List(); + var animationClips = new List(); + + foreach (var anim in root.Animations ?? Enumerable.Empty()) + { + var anims = ExportAnimations(anim); + animations.AddRange(anims); + var clip = new animation_clip + { + id = anim.Name + "_Animation", + name = anim.Name, + start = 0.0, + end = anim.Duration, + endSpecified = true + }; + + var animInstances = new List(); + foreach (var animChannel in anims) { - var controllerLib = new library_controllers + var instance = new InstanceWithExtra { - controller = controllers.ToArray() + url = "#" + animChannel.id }; - rootElements.Add(controllerLib); + animInstances.Add(instance); } - var visualScenes = new library_visual_scenes(); - var visualScene = new visual_scene + clip.instance_animation = animInstances.ToArray(); + animationClips.Add(clip); + } + + var rootElements = new List(); + + if (animations.Count > 0) + { + var animationLib = new library_animations { - id = "DefaultVisualScene", - name = "unnamed", - node = geomNodes.ToArray() + animation = animations.ToArray() }; - visualScenes.visual_scene = [visualScene]; + rootElements.Add(animationLib); + } - var visualSceneInstance = new InstanceWithExtra + if (animationClips.Count > 0) + { + var animationClipLib = new library_animation_clips { - url = "#DefaultVisualScene" + animation_clip = animationClips.ToArray() }; - rootElements.Add(visualScenes); + rootElements.Add(animationClipLib); + } - var scene = new COLLADAScene + if (geometries.Count > 0) + { + var geometryLib = new library_geometries { - instance_visual_scene = visualSceneInstance + geometry = geometries.ToArray() }; + rootElements.Add(geometryLib); + } - var collada = new COLLADA + if (controllers.Count > 0) + { + var controllerLib = new library_controllers { - asset = asset, - scene = scene, - Items = rootElements.ToArray(), - extra = - [ - new extra - { - technique = - [ - ExportRootLSLibProfile(root) - ] - } - ] + controller = controllers.ToArray() }; - - using var stream = File.Open(outputPath, FileMode.Create); - collada.Save(stream); + rootElements.Add(controllerLib); } + + var visualScenes = new library_visual_scenes(); + var visualScene = new visual_scene + { + id = "DefaultVisualScene", + name = "unnamed", + node = geomNodes.ToArray() + }; + visualScenes.visual_scene = [visualScene]; + + var visualSceneInstance = new InstanceWithExtra + { + url = "#DefaultVisualScene" + }; + rootElements.Add(visualScenes); + + var scene = new COLLADAScene + { + instance_visual_scene = visualSceneInstance + }; + + var collada = new COLLADA + { + asset = asset, + scene = scene, + Items = rootElements.ToArray(), + extra = + [ + new extra + { + technique = + [ + ExportRootLSLibProfile(root) + ] + } + ] + }; + + using var stream = File.Open(outputPath, FileMode.Create); + collada.Save(stream); } } diff --git a/LSLib/Granny/Model/ColladaHelpers.cs b/LSLib/Granny/Model/ColladaHelpers.cs index b024793b..c477662c 100644 --- a/LSLib/Granny/Model/ColladaHelpers.cs +++ b/LSLib/Granny/Model/ColladaHelpers.cs @@ -3,228 +3,227 @@ using System; using System.Collections.Generic; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +static class NodeHelpers { - static class NodeHelpers + public static Matrix4 GetTransformHierarchy(IEnumerable nodes) { - public static Matrix4 GetTransformHierarchy(IEnumerable nodes) + var accum = Matrix4.Identity; + foreach (var node in nodes) { - var accum = Matrix4.Identity; - foreach (var node in nodes) - { - accum = node.GetLocalTransform() * accum; - } - - return accum; + accum = node.GetLocalTransform() * accum; } - public static Matrix4 ToMatrix4(this matrix m) - { - var v = m.Values; - return new Matrix4( - (float)v[0], (float)v[1], (float)v[2], (float)v[3], - (float)v[4], (float)v[5], (float)v[6], (float)v[7], - (float)v[8], (float)v[9], (float)v[10], (float)v[11], - (float)v[12], (float)v[13], (float)v[14], (float)v[15] - ); - } + return accum; + } - public static Matrix4 ToMatrix4(this rotate r) - { - var axis = new Vector3((float)r.Values[0], (float)r.Values[1], (float)r.Values[2]); - var rot = Quaternion.FromAxisAngle(axis, (float)r.Values[3]); - return Matrix4.CreateFromQuaternion(rot); - } + public static Matrix4 ToMatrix4(this matrix m) + { + var v = m.Values; + return new Matrix4( + (float)v[0], (float)v[1], (float)v[2], (float)v[3], + (float)v[4], (float)v[5], (float)v[6], (float)v[7], + (float)v[8], (float)v[9], (float)v[10], (float)v[11], + (float)v[12], (float)v[13], (float)v[14], (float)v[15] + ); + } - public static Matrix4 TranslationToMatrix4(this TargetableFloat3 t) - { - Matrix4.CreateTranslation((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out Matrix4 trans); - return trans; - } + public static Matrix4 ToMatrix4(this rotate r) + { + var axis = new Vector3((float)r.Values[0], (float)r.Values[1], (float)r.Values[2]); + var rot = Quaternion.FromAxisAngle(axis, (float)r.Values[3]); + return Matrix4.CreateFromQuaternion(rot); + } - public static Matrix4 ScaleToMatrix4(this TargetableFloat3 t) - { - Matrix4.CreateScale((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out Matrix4 scale); - return scale; - } + public static Matrix4 TranslationToMatrix4(this TargetableFloat3 t) + { + Matrix4.CreateTranslation((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out Matrix4 trans); + return trans; + } - public static Matrix4 GetLocalTransform(this node n) - { - var accum = Matrix4.Identity; + public static Matrix4 ScaleToMatrix4(this TargetableFloat3 t) + { + Matrix4.CreateScale((float)t.Values[0], (float)t.Values[1], (float)t.Values[2], out Matrix4 scale); + return scale; + } - if (n.ItemsElementName != null) + public static Matrix4 GetLocalTransform(this node n) + { + var accum = Matrix4.Identity; + + if (n.ItemsElementName != null) + { + for (var i = 0; i < n.ItemsElementName.Length; i++) { - for (var i = 0; i < n.ItemsElementName.Length; i++) + var name = n.ItemsElementName[i]; + accum = name switch { - var name = n.ItemsElementName[i]; - accum = name switch - { - ItemsChoiceType2.matrix => (n.Items[i] as matrix).ToMatrix4() * Matrix4.Identity, - ItemsChoiceType2.translate => (n.Items[i] as TargetableFloat3).TranslationToMatrix4() * Matrix4.Identity, - ItemsChoiceType2.rotate => (n.Items[i] as rotate).ToMatrix4() * Matrix4.Identity, - ItemsChoiceType2.scale => (n.Items[i] as TargetableFloat3).ScaleToMatrix4() * Matrix4.Identity, - _ => throw new Exception("Unsupported Collada NODE transform: " + name), - }; - } + ItemsChoiceType2.matrix => (n.Items[i] as matrix).ToMatrix4() * Matrix4.Identity, + ItemsChoiceType2.translate => (n.Items[i] as TargetableFloat3).TranslationToMatrix4() * Matrix4.Identity, + ItemsChoiceType2.rotate => (n.Items[i] as rotate).ToMatrix4() * Matrix4.Identity, + ItemsChoiceType2.scale => (n.Items[i] as TargetableFloat3).ScaleToMatrix4() * Matrix4.Identity, + _ => throw new Exception("Unsupported Collada NODE transform: " + name), + }; } - - return accum; } + + return accum; } +} - class ColladaHelpers +class ColladaHelpers +{ + public class TransformMatrix { - public class TransformMatrix - { - public Matrix4 transform; - public string TransformSID; - } + public Matrix4 transform; + public string TransformSID; + } - public static void ApplyMatrixTransform(TransformMatrix transformMat, matrix m) - { - var values = m.Values; - var mat = new Matrix4( - (float)values[0], (float)values[1], (float)values[2], (float)values[3], - (float)values[4], (float)values[5], (float)values[6], (float)values[7], - (float)values[8], (float)values[9], (float)values[10], (float)values[11], - (float)values[12], (float)values[13], (float)values[14], (float)values[15] - ); - mat.Transpose(); - transformMat.transform *= mat; - } + public static void ApplyMatrixTransform(TransformMatrix transformMat, matrix m) + { + var values = m.Values; + var mat = new Matrix4( + (float)values[0], (float)values[1], (float)values[2], (float)values[3], + (float)values[4], (float)values[5], (float)values[6], (float)values[7], + (float)values[8], (float)values[9], (float)values[10], (float)values[11], + (float)values[12], (float)values[13], (float)values[14], (float)values[15] + ); + mat.Transpose(); + transformMat.transform *= mat; + } - public static void ApplyTranslation(TransformMatrix transformMat, TargetableFloat3 translation) - { - var translationMat = Matrix4.CreateTranslation( - (float)translation.Values[0], - (float)translation.Values[1], - (float)translation.Values[2] - ); - transformMat.transform = translationMat * transformMat.transform; + public static void ApplyTranslation(TransformMatrix transformMat, TargetableFloat3 translation) + { + var translationMat = Matrix4.CreateTranslation( + (float)translation.Values[0], + (float)translation.Values[1], + (float)translation.Values[2] + ); + transformMat.transform = translationMat * transformMat.transform; - } + } - public static void ApplyRotation(TransformMatrix transformMat, rotate rotation) - { - var axis = new Vector3((float)rotation.Values[0], (float)rotation.Values[1], (float)rotation.Values[2]); - var rotationMat = Matrix4.CreateFromAxisAngle(axis, (float)rotation.Values[3]); - transformMat.transform = rotationMat * transformMat.transform; - } + public static void ApplyRotation(TransformMatrix transformMat, rotate rotation) + { + var axis = new Vector3((float)rotation.Values[0], (float)rotation.Values[1], (float)rotation.Values[2]); + var rotationMat = Matrix4.CreateFromAxisAngle(axis, (float)rotation.Values[3]); + transformMat.transform = rotationMat * transformMat.transform; + } - public static void ApplyScale(TransformMatrix transformMat, TargetableFloat3 scale) + public static void ApplyScale(TransformMatrix transformMat, TargetableFloat3 scale) + { + var scaleMat = Matrix4.CreateScale( + (float)scale.Values[0], + (float)scale.Values[1], + (float)scale.Values[2] + ); + transformMat.transform = scaleMat * transformMat.transform; + } + + public static TransformMatrix TransformFromNode(node node) + { + var transform = new TransformMatrix { - var scaleMat = Matrix4.CreateScale( - (float)scale.Values[0], - (float)scale.Values[1], - (float)scale.Values[2] - ); - transformMat.transform = scaleMat * transformMat.transform; - } + transform = Matrix4.Identity, + TransformSID = null + }; - public static TransformMatrix TransformFromNode(node node) + if (node.ItemsElementName != null) { - var transform = new TransformMatrix + for (int i = 0; i < node.ItemsElementName.Length; i++) { - transform = Matrix4.Identity, - TransformSID = null - }; + var name = node.ItemsElementName[i]; + var item = node.Items[i]; - if (node.ItemsElementName != null) - { - for (int i = 0; i < node.ItemsElementName.Length; i++) + switch (name) { - var name = node.ItemsElementName[i]; - var item = node.Items[i]; - - switch (name) - { - case ItemsChoiceType2.translate: - { - var translation = item as TargetableFloat3; - ApplyTranslation(transform, translation); - break; - } - - case ItemsChoiceType2.rotate: - { - var rotation = item as rotate; - ApplyRotation(transform, rotation); - break; - } - - case ItemsChoiceType2.scale: - { - var scale = item as TargetableFloat3; - ApplyScale(transform, scale); - break; - } - - case ItemsChoiceType2.matrix: - { - var mat = item as matrix; - transform.TransformSID = mat.sid; - ApplyMatrixTransform(transform, mat); - break; - } - } + case ItemsChoiceType2.translate: + { + var translation = item as TargetableFloat3; + ApplyTranslation(transform, translation); + break; + } + + case ItemsChoiceType2.rotate: + { + var rotation = item as rotate; + ApplyRotation(transform, rotation); + break; + } + + case ItemsChoiceType2.scale: + { + var scale = item as TargetableFloat3; + ApplyScale(transform, scale); + break; + } + + case ItemsChoiceType2.matrix: + { + var mat = item as matrix; + transform.TransformSID = mat.sid; + ApplyMatrixTransform(transform, mat); + break; + } } } - - return transform; } - public static List StringsToIntegers(String s) + return transform; + } + + public static List StringsToIntegers(String s) + { + var floats = new List(s.Length / 6); + int startingPos = -1; + for (var i = 0; i < s.Length; i++) { - var floats = new List(s.Length / 6); - int startingPos = -1; - for (var i = 0; i < s.Length; i++) + if (s[i] != ' ') { - if (s[i] != ' ') - { - if (startingPos == -1) - startingPos = i; - } - else + if (startingPos == -1) + startingPos = i; + } + else + { + if (startingPos != -1) { - if (startingPos != -1) - { - floats.Add(int.Parse(s[startingPos..i])); - startingPos = -1; - } + floats.Add(int.Parse(s[startingPos..i])); + startingPos = -1; } } + } - if (startingPos != -1) - floats.Add(int.Parse(s[startingPos..])); + if (startingPos != -1) + floats.Add(int.Parse(s[startingPos..])); - return floats; - } + return floats; + } - public static Matrix4 FloatsToMatrix(float[] items) - { - return new Matrix4( - items[0], items[1], items[2], items[3], - items[4], items[5], items[6], items[7], - items[8], items[9], items[10], items[11], - items[12], items[13], items[14], items[15] - ); - } + public static Matrix4 FloatsToMatrix(float[] items) + { + return new Matrix4( + items[0], items[1], items[2], items[3], + items[4], items[5], items[6], items[7], + items[8], items[9], items[10], items[11], + items[12], items[13], items[14], items[15] + ); + } - public static List SourceToPositions(ColladaSource source) + public static List SourceToPositions(ColladaSource source) + { + List x = null, y = null, z = null; + if (!source.FloatParams.TryGetValue("X", out x) || + !source.FloatParams.TryGetValue("Y", out y) || + !source.FloatParams.TryGetValue("Z", out z)) + throw new ParsingException("Position source " + source.id + " must have X, Y, Z float attributes"); + + var positions = new List(x.Count); + for (var i = 0; i < x.Count; i++) { - List x = null, y = null, z = null; - if (!source.FloatParams.TryGetValue("X", out x) || - !source.FloatParams.TryGetValue("Y", out y) || - !source.FloatParams.TryGetValue("Z", out z)) - throw new ParsingException("Position source " + source.id + " must have X, Y, Z float attributes"); - - var positions = new List(x.Count); - for (var i = 0; i < x.Count; i++) - { - positions.Add(new Vector3(x[i], y[i], z[i])); - } - - return positions; + positions.Add(new Vector3(x[i], y[i], z[i])); } + + return positions; } } diff --git a/LSLib/Granny/Model/ColladaMesh.cs b/LSLib/Granny/Model/ColladaMesh.cs index 387f2258..30f5692b 100644 --- a/LSLib/Granny/Model/ColladaMesh.cs +++ b/LSLib/Granny/Model/ColladaMesh.cs @@ -4,640 +4,639 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class ColladaMesh { - public class ColladaMesh + private mesh Mesh; + private Dictionary Sources; + private InputLocalOffset[] Inputs; + private List Vertices; + private List Normals; + private List Tangents; + private List Binormals; + private List> UVs; + private List> Colors; + private List Indices; + + private int InputOffsetCount = 0; + private int VertexInputIndex = -1; + private int NormalsInputIndex = -1; + private int TangentsInputIndex = -1; + private int BinormalsInputIndex = -1; + private List UVInputIndices = []; + private List ColorInputIndices = []; + private VertexDescriptor InputVertexType; + private VertexDescriptor OutputVertexType; + private bool HasNormals = false; + private bool HasTangents = false; + + public int TriangleCount; + public List ConsolidatedVertices; + public List ConsolidatedIndices; + public Dictionary> OriginalToConsolidatedVertexIndexMap; + private ExporterOptions Options; + + public VertexDescriptor InternalVertexType { - private mesh Mesh; - private Dictionary Sources; - private InputLocalOffset[] Inputs; - private List Vertices; - private List Normals; - private List Tangents; - private List Binormals; - private List> UVs; - private List> Colors; - private List Indices; - - private int InputOffsetCount = 0; - private int VertexInputIndex = -1; - private int NormalsInputIndex = -1; - private int TangentsInputIndex = -1; - private int BinormalsInputIndex = -1; - private List UVInputIndices = []; - private List ColorInputIndices = []; - private VertexDescriptor InputVertexType; - private VertexDescriptor OutputVertexType; - private bool HasNormals = false; - private bool HasTangents = false; - - public int TriangleCount; - public List ConsolidatedVertices; - public List ConsolidatedIndices; - public Dictionary> OriginalToConsolidatedVertexIndexMap; - private ExporterOptions Options; - - public VertexDescriptor InternalVertexType - { - get { return OutputVertexType; } - } + get { return OutputVertexType; } + } - private class VertexIndexComparer : IEqualityComparer + private class VertexIndexComparer : IEqualityComparer + { + public bool Equals(int[] x, int[] y) { - public bool Equals(int[] x, int[] y) + if (x.Length != y.Length) { - if (x.Length != y.Length) - { - return false; - } - for (int i = 0; i < x.Length; i++) - { - if (x[i] != y[i]) - { - return false; - } - } - return true; + return false; } - - public int GetHashCode(int[] obj) + for (int i = 0; i < x.Length; i++) { - int result = 17; - for (int i = 0; i < obj.Length; i++) + if (x[i] != y[i]) { - unchecked - { - result = result * 23 + obj[i]; - } + return false; } - return result; } + return true; } - void computeTangents() + public int GetHashCode(int[] obj) { - // Check if the vertex format has at least one UV set - if (ConsolidatedVertices.Count > 0) + int result = 17; + for (int i = 0; i < obj.Length; i++) { - var v = ConsolidatedVertices[0]; - if (v.Format.TextureCoordinates == 0) + unchecked { - throw new InvalidOperationException("At least one UV set is required to recompute tangents"); + result = result * 23 + obj[i]; } } + return result; + } + } - foreach (var v in ConsolidatedVertices) + void computeTangents() + { + // Check if the vertex format has at least one UV set + if (ConsolidatedVertices.Count > 0) + { + var v = ConsolidatedVertices[0]; + if (v.Format.TextureCoordinates == 0) { - v.Tangent = Vector3.Zero; - v.Binormal = Vector3.Zero; + throw new InvalidOperationException("At least one UV set is required to recompute tangents"); } + } - for (int i = 0; i < TriangleCount; i++) - { - var i1 = ConsolidatedIndices[i * 3 + 0]; - var i2 = ConsolidatedIndices[i * 3 + 1]; - var i3 = ConsolidatedIndices[i * 3 + 2]; + foreach (var v in ConsolidatedVertices) + { + v.Tangent = Vector3.Zero; + v.Binormal = Vector3.Zero; + } - var vert1 = ConsolidatedVertices[i1]; - var vert2 = ConsolidatedVertices[i2]; - var vert3 = ConsolidatedVertices[i3]; + for (int i = 0; i < TriangleCount; i++) + { + var i1 = ConsolidatedIndices[i * 3 + 0]; + var i2 = ConsolidatedIndices[i * 3 + 1]; + var i3 = ConsolidatedIndices[i * 3 + 2]; - var v1 = vert1.Position; - var v2 = vert2.Position; - var v3 = vert3.Position; + var vert1 = ConsolidatedVertices[i1]; + var vert2 = ConsolidatedVertices[i2]; + var vert3 = ConsolidatedVertices[i3]; - var w1 = vert1.TextureCoordinates0; - var w2 = vert2.TextureCoordinates0; - var w3 = vert3.TextureCoordinates0; + var v1 = vert1.Position; + var v2 = vert2.Position; + var v3 = vert3.Position; - float x1 = v2.X - v1.X; - float x2 = v3.X - v1.X; - float y1 = v2.Y - v1.Y; - float y2 = v3.Y - v1.Y; - float z1 = v2.Z - v1.Z; - float z2 = v3.Z - v1.Z; + var w1 = vert1.TextureCoordinates0; + var w2 = vert2.TextureCoordinates0; + var w3 = vert3.TextureCoordinates0; - float s1 = w2.X - w1.X; - float s2 = w3.X - w1.X; - float t1 = w2.Y - w1.Y; - float t2 = w3.Y - w1.Y; + float x1 = v2.X - v1.X; + float x2 = v3.X - v1.X; + float y1 = v2.Y - v1.Y; + float y2 = v3.Y - v1.Y; + float z1 = v2.Z - v1.Z; + float z2 = v3.Z - v1.Z; - float r = 1.0F / (s1 * t2 - s2 * t1); + float s1 = w2.X - w1.X; + float s2 = w3.X - w1.X; + float t1 = w2.Y - w1.Y; + float t2 = w3.Y - w1.Y; - if ((Single.IsNaN(r) || Single.IsInfinity(r)) && !Options.IgnoreUVNaN) - { - throw new Exception($"Couldn't calculate tangents; the mesh most likely contains non-manifold geometry.{Environment.NewLine}" - + $"UV1: {w1}{Environment.NewLine}UV2: {w2}{Environment.NewLine}UV3: {w3}"); - } + float r = 1.0F / (s1 * t2 - s2 * t1); - var sdir = new Vector3( - (t2 * x1 - t1 * x2) * r, - (t2 * y1 - t1 * y2) * r, - (t2 * z1 - t1 * z2) * r - ); - var tdir = new Vector3( - (s1 * x2 - s2 * x1) * r, - (s1 * y2 - s2 * y1) * r, - (s1 * z2 - s2 * z1) * r - ); - - vert1.Tangent += sdir; - vert2.Tangent += sdir; - vert3.Tangent += sdir; - - vert1.Binormal += tdir; - vert2.Binormal += tdir; - vert3.Binormal += tdir; + if ((Single.IsNaN(r) || Single.IsInfinity(r)) && !Options.IgnoreUVNaN) + { + throw new Exception($"Couldn't calculate tangents; the mesh most likely contains non-manifold geometry.{Environment.NewLine}" + + $"UV1: {w1}{Environment.NewLine}UV2: {w2}{Environment.NewLine}UV3: {w3}"); } - foreach (var v in ConsolidatedVertices) - { - var n = v.Normal; - var t = v.Tangent; - var b = v.Binormal; + var sdir = new Vector3( + (t2 * x1 - t1 * x2) * r, + (t2 * y1 - t1 * y2) * r, + (t2 * z1 - t1 * z2) * r + ); + var tdir = new Vector3( + (s1 * x2 - s2 * x1) * r, + (s1 * y2 - s2 * y1) * r, + (s1 * z2 - s2 * z1) * r + ); + + vert1.Tangent += sdir; + vert2.Tangent += sdir; + vert3.Tangent += sdir; + + vert1.Binormal += tdir; + vert2.Binormal += tdir; + vert3.Binormal += tdir; + } - // Gram-Schmidt orthogonalize - var tangent = (t - n * Vector3.Dot(n, t)).Normalized(); + foreach (var v in ConsolidatedVertices) + { + var n = v.Normal; + var t = v.Tangent; + var b = v.Binormal; - // Calculate handedness - var w = (Vector3.Dot(Vector3.Cross(n, t), b) < 0.0F) ? 1.0F : -1.0F; - var binormal = (Vector3.Cross(n, t) * w).Normalized(); + // Gram-Schmidt orthogonalize + var tangent = (t - n * Vector3.Dot(n, t)).Normalized(); - v.Tangent = tangent; - v.Binormal = binormal; - } - } + // Calculate handedness + var w = (Vector3.Dot(Vector3.Cross(n, t), b) < 0.0F) ? 1.0F : -1.0F; + var binormal = (Vector3.Cross(n, t) * w).Normalized(); - private Vector3 triangleNormalFromVertex(int[] indices, int vertexIndex) - { - // This assumes that A->B->C is a counter-clockwise ordering - var a = Vertices[indices[vertexIndex]].Position; - var b = Vertices[indices[(vertexIndex + 1) % 3]].Position; - var c = Vertices[indices[(vertexIndex + 2) % 3]].Position; - - var N = Vector3.Cross(b - a, c - a); - float sin_alpha = N.Length / ((b - a).Length * (c - a).Length); - return N.Normalized() * (float)Math.Asin(sin_alpha); + v.Tangent = tangent; + v.Binormal = binormal; } + } - private int VertexIndexCount() - { - return Indices.Count / InputOffsetCount; - } + private Vector3 triangleNormalFromVertex(int[] indices, int vertexIndex) + { + // This assumes that A->B->C is a counter-clockwise ordering + var a = Vertices[indices[vertexIndex]].Position; + var b = Vertices[indices[(vertexIndex + 1) % 3]].Position; + var c = Vertices[indices[(vertexIndex + 2) % 3]].Position; + + var N = Vector3.Cross(b - a, c - a); + float sin_alpha = N.Length / ((b - a).Length * (c - a).Length); + return N.Normalized() * (float)Math.Asin(sin_alpha); + } - private int VertexIndex(int index) - { - return Indices[index * InputOffsetCount + VertexInputIndex]; - } + private int VertexIndexCount() + { + return Indices.Count / InputOffsetCount; + } + + private int VertexIndex(int index) + { + return Indices[index * InputOffsetCount + VertexInputIndex]; + } - private void computeNormals() + private void computeNormals() + { + for (var vertexIdx = 0; vertexIdx < Vertices.Count; vertexIdx++) { - for (var vertexIdx = 0; vertexIdx < Vertices.Count; vertexIdx++) + Vector3 N = new(0, 0, 0); + var numIndices = VertexIndexCount(); + for (int triVertIdx = 0; triVertIdx < numIndices; triVertIdx++) { - Vector3 N = new(0, 0, 0); - var numIndices = VertexIndexCount(); - for (int triVertIdx = 0; triVertIdx < numIndices; triVertIdx++) + if (VertexIndex(triVertIdx) == vertexIdx) { - if (VertexIndex(triVertIdx) == vertexIdx) - { - int baseIdx = ((int)(triVertIdx / 3)) * 3; - var indices = new int[] { - VertexIndex(baseIdx + 0), - VertexIndex(baseIdx + 1), - VertexIndex(baseIdx + 2) - }; - N += triangleNormalFromVertex(indices, triVertIdx - baseIdx); - } + int baseIdx = ((int)(triVertIdx / 3)) * 3; + var indices = new int[] { + VertexIndex(baseIdx + 0), + VertexIndex(baseIdx + 1), + VertexIndex(baseIdx + 2) + }; + N += triangleNormalFromVertex(indices, triVertIdx - baseIdx); } - - N.Normalize(); - Vertices[vertexIdx].Normal = N; } + + N.Normalize(); + Vertices[vertexIdx].Normal = N; } + } - private void ImportFaces() + private void ImportFaces() + { + foreach (var item in Mesh.Items) { - foreach (var item in Mesh.Items) + if (item is triangles) { - if (item is triangles) - { - var tris = item as triangles; - TriangleCount = (int)tris.count; - Inputs = tris.input; - Indices = ColladaHelpers.StringsToIntegers(tris.p); - } - else if (item is polylist) - { - var plist = item as polylist; - TriangleCount = (int)plist.count; - Inputs = plist.input; - Indices = ColladaHelpers.StringsToIntegers(plist.p); - var vertexCounts = ColladaHelpers.StringsToIntegers(plist.vcount); - foreach (var count in vertexCounts) - { - if (count != 3) - throw new ParsingException("Non-triangle found in COLLADA polylist. Make sure that all geometries are triangulated."); - } - } - else if (item is lines) + var tris = item as triangles; + TriangleCount = (int)tris.count; + Inputs = tris.input; + Indices = ColladaHelpers.StringsToIntegers(tris.p); + } + else if (item is polylist) + { + var plist = item as polylist; + TriangleCount = (int)plist.count; + Inputs = plist.input; + Indices = ColladaHelpers.StringsToIntegers(plist.p); + var vertexCounts = ColladaHelpers.StringsToIntegers(plist.vcount); + foreach (var count in vertexCounts) { - throw new ParsingException("Lines found in input geometry. Make sure that all geometries are triangulated."); + if (count != 3) + throw new ParsingException("Non-triangle found in COLLADA polylist. Make sure that all geometries are triangulated."); } } + else if (item is lines) + { + throw new ParsingException("Lines found in input geometry. Make sure that all geometries are triangulated."); + } + } - if (Indices == null || Inputs == null) - throw new ParsingException("No valid triangle source found, expected or "); + if (Indices == null || Inputs == null) + throw new ParsingException("No valid triangle source found, expected or "); - InputOffsetCount = 0; - foreach (var input in Inputs) + InputOffsetCount = 0; + foreach (var input in Inputs) + { + if ((int)input.offset >= InputOffsetCount) { - if ((int)input.offset >= InputOffsetCount) - { - InputOffsetCount = (int)input.offset + 1; - } + InputOffsetCount = (int)input.offset + 1; } - - if (Indices.Count % (InputOffsetCount * 3) != 0 || Indices.Count / InputOffsetCount / 3 != TriangleCount) - throw new ParsingException("Triangle input stride / vertex count mismatch."); } - private ColladaSource FindSource(string id) + if (Indices.Count % (InputOffsetCount * 3) != 0 || Indices.Count / InputOffsetCount / 3 != TriangleCount) + throw new ParsingException("Triangle input stride / vertex count mismatch."); + } + + private ColladaSource FindSource(string id) + { + if (id.Length == 0 || id[0] != '#') + throw new ParsingException("Only ID references are supported for input sources: " + id); + + if (!Sources.TryGetValue(id.Substring(1), out ColladaSource inputSource)) + throw new ParsingException("Input source does not exist: " + id); + + return inputSource; + } + + private void ImportVertices() + { + var vertexSemantics = new Dictionary>(); + foreach (var input in Mesh.vertices.input) { - if (id.Length == 0 || id[0] != '#') - throw new ParsingException("Only ID references are supported for input sources: " + id); + ColladaSource inputSource = FindSource(input.source); + var vertices = ColladaHelpers.SourceToPositions(inputSource); + vertexSemantics.Add(input.semantic, vertices); + } + + List vertexPositions = null; + List perVertexNormals = null; + List perVertexTangents = null; + List perVertexBinormals = null; - if (!Sources.TryGetValue(id.Substring(1), out ColladaSource inputSource)) - throw new ParsingException("Input source does not exist: " + id); + vertexSemantics.TryGetValue("POSITION", out vertexPositions); + vertexSemantics.TryGetValue("NORMAL", out perVertexNormals); + if (!vertexSemantics.TryGetValue("TANGENT", out perVertexTangents)) + { + vertexSemantics.TryGetValue("TEXTANGENT", out perVertexTangents); + } - return inputSource; + if (!vertexSemantics.TryGetValue("BINORMAL", out perVertexBinormals)) + { + vertexSemantics.TryGetValue("TEXBINORMAL", out perVertexBinormals); } - private void ImportVertices() + foreach (var input in Inputs) { - var vertexSemantics = new Dictionary>(); - foreach (var input in Mesh.vertices.input) + if (input.semantic == "VERTEX") { - ColladaSource inputSource = FindSource(input.source); - var vertices = ColladaHelpers.SourceToPositions(inputSource); - vertexSemantics.Add(input.semantic, vertices); + VertexInputIndex = (int)input.offset; } - - List vertexPositions = null; - List perVertexNormals = null; - List perVertexTangents = null; - List perVertexBinormals = null; - - vertexSemantics.TryGetValue("POSITION", out vertexPositions); - vertexSemantics.TryGetValue("NORMAL", out perVertexNormals); - if (!vertexSemantics.TryGetValue("TANGENT", out perVertexTangents)) + else if (input.semantic == "NORMAL") { - vertexSemantics.TryGetValue("TEXTANGENT", out perVertexTangents); + var normalsSource = FindSource(input.source); + Normals = ColladaHelpers.SourceToPositions(normalsSource); + NormalsInputIndex = (int)input.offset; } - - if (!vertexSemantics.TryGetValue("BINORMAL", out perVertexBinormals)) + else if (input.semantic == "TANGENT" || input.semantic == "TEXTANGENT") { - vertexSemantics.TryGetValue("TEXBINORMAL", out perVertexBinormals); + var tangentsSource = FindSource(input.source); + Tangents = ColladaHelpers.SourceToPositions(tangentsSource); + TangentsInputIndex = (int)input.offset; } - - foreach (var input in Inputs) + else if (input.semantic == "BINORMAL" || input.semantic == "TEXBINORMAL") { - if (input.semantic == "VERTEX") - { - VertexInputIndex = (int)input.offset; - } - else if (input.semantic == "NORMAL") - { - var normalsSource = FindSource(input.source); - Normals = ColladaHelpers.SourceToPositions(normalsSource); - NormalsInputIndex = (int)input.offset; - } - else if (input.semantic == "TANGENT" || input.semantic == "TEXTANGENT") - { - var tangentsSource = FindSource(input.source); - Tangents = ColladaHelpers.SourceToPositions(tangentsSource); - TangentsInputIndex = (int)input.offset; - } - else if (input.semantic == "BINORMAL" || input.semantic == "TEXBINORMAL") - { - var binormalsSource = FindSource(input.source); - Binormals = ColladaHelpers.SourceToPositions(binormalsSource); - BinormalsInputIndex = (int)input.offset; - } + var binormalsSource = FindSource(input.source); + Binormals = ColladaHelpers.SourceToPositions(binormalsSource); + BinormalsInputIndex = (int)input.offset; } + } - if (VertexInputIndex == -1) - throw new ParsingException("Required triangle input semantic missing: VERTEX"); - - Vertices = new List(vertexPositions.Count); - for (var vert = 0; vert < vertexPositions.Count; vert++) - { - var vertex = OutputVertexType.CreateInstance(); - vertex.Position = vertexPositions[vert]; + if (VertexInputIndex == -1) + throw new ParsingException("Required triangle input semantic missing: VERTEX"); - if (perVertexNormals != null) - { - vertex.Normal = perVertexNormals[vert]; - } + Vertices = new List(vertexPositions.Count); + for (var vert = 0; vert < vertexPositions.Count; vert++) + { + var vertex = OutputVertexType.CreateInstance(); + vertex.Position = vertexPositions[vert]; - if (perVertexTangents != null) - { - vertex.Tangent = perVertexTangents[vert]; - } + if (perVertexNormals != null) + { + vertex.Normal = perVertexNormals[vert]; + } - if (perVertexBinormals != null) - { - vertex.Binormal = perVertexBinormals[vert]; - } + if (perVertexTangents != null) + { + vertex.Tangent = perVertexTangents[vert]; + } - Vertices.Add(vertex); + if (perVertexBinormals != null) + { + vertex.Binormal = perVertexBinormals[vert]; } - HasNormals = perVertexNormals != null || NormalsInputIndex != -1; - HasTangents = (perVertexTangents != null || TangentsInputIndex != -1) - && (perVertexBinormals != null || BinormalsInputIndex != -1); + Vertices.Add(vertex); } - private void ImportColors() + HasNormals = perVertexNormals != null || NormalsInputIndex != -1; + HasTangents = (perVertexTangents != null || TangentsInputIndex != -1) + && (perVertexBinormals != null || BinormalsInputIndex != -1); + } + + private void ImportColors() + { + ColorInputIndices.Clear(); + Colors = []; + foreach (var input in Inputs) { - ColorInputIndices.Clear(); - Colors = []; - foreach (var input in Inputs) + if (input.semantic == "COLOR") { - if (input.semantic == "COLOR") - { - ColorInputIndices.Add((int)input.offset); + ColorInputIndices.Add((int)input.offset); - if (input.source[0] != '#') - throw new ParsingException("Only ID references are supported for color input sources"); + if (input.source[0] != '#') + throw new ParsingException("Only ID references are supported for color input sources"); - ColladaSource inputSource = null; - if (!Sources.TryGetValue(input.source.Substring(1), out inputSource)) - throw new ParsingException("Color input source does not exist: " + input.source); + ColladaSource inputSource = null; + if (!Sources.TryGetValue(input.source.Substring(1), out inputSource)) + throw new ParsingException("Color input source does not exist: " + input.source); - List r = null, g = null, b = null; - if (!inputSource.FloatParams.TryGetValue("R", out r) || - !inputSource.FloatParams.TryGetValue("G", out g) || - !inputSource.FloatParams.TryGetValue("B", out b)) + List r = null, g = null, b = null; + if (!inputSource.FloatParams.TryGetValue("R", out r) || + !inputSource.FloatParams.TryGetValue("G", out g) || + !inputSource.FloatParams.TryGetValue("B", out b)) + { + if (!inputSource.FloatParams.TryGetValue("X", out r) || + !inputSource.FloatParams.TryGetValue("Y", out g) || + !inputSource.FloatParams.TryGetValue("Z", out b)) { - if (!inputSource.FloatParams.TryGetValue("X", out r) || - !inputSource.FloatParams.TryGetValue("Y", out g) || - !inputSource.FloatParams.TryGetValue("Z", out b)) - { - throw new ParsingException("Color input source " + input.source + " must have R, G, B float attributes"); - } + throw new ParsingException("Color input source " + input.source + " must have R, G, B float attributes"); } + } - var colors = new List(); - Colors.Add(colors); - for (var i = 0; i < r.Count; i++) - { - colors.Add(new Vector4(r[i], g[i], b[i], 1.0f)); - } + var colors = new List(); + Colors.Add(colors); + for (var i = 0; i < r.Count; i++) + { + colors.Add(new Vector4(r[i], g[i], b[i], 1.0f)); } } } + } - private void ImportUVs() + private void ImportUVs() + { + bool flip = Options.FlipUVs; + UVInputIndices.Clear(); + UVs = []; + foreach (var input in Inputs) { - bool flip = Options.FlipUVs; - UVInputIndices.Clear(); - UVs = []; - foreach (var input in Inputs) + if (input.semantic == "TEXCOORD") { - if (input.semantic == "TEXCOORD") - { - UVInputIndices.Add((int)input.offset); + UVInputIndices.Add((int)input.offset); - if (input.source[0] != '#') - throw new ParsingException("Only ID references are supported for UV input sources"); + if (input.source[0] != '#') + throw new ParsingException("Only ID references are supported for UV input sources"); - ColladaSource inputSource = null; - if (!Sources.TryGetValue(input.source[1..], out inputSource)) - throw new ParsingException("UV input source does not exist: " + input.source); + ColladaSource inputSource = null; + if (!Sources.TryGetValue(input.source[1..], out inputSource)) + throw new ParsingException("UV input source does not exist: " + input.source); - List s = null, t = null; - if (!inputSource.FloatParams.TryGetValue("S", out s) || - !inputSource.FloatParams.TryGetValue("T", out t)) - throw new ParsingException("UV input source " + input.source + " must have S, T float attributes"); + List s = null, t = null; + if (!inputSource.FloatParams.TryGetValue("S", out s) || + !inputSource.FloatParams.TryGetValue("T", out t)) + throw new ParsingException("UV input source " + input.source + " must have S, T float attributes"); - var uvs = new List(); - UVs.Add(uvs); - for (var i = 0; i < s.Count; i++) - { - if (flip) t[i] = 1.0f - t[i]; - uvs.Add(new Vector2(s[i], t[i])); - } + var uvs = new List(); + UVs.Add(uvs); + for (var i = 0; i < s.Count; i++) + { + if (flip) t[i] = 1.0f - t[i]; + uvs.Add(new Vector2(s[i], t[i])); } } } + } - private void ImportSources() + private void ImportSources() + { + Sources = []; + foreach (var source in Mesh.source) { - Sources = []; - foreach (var source in Mesh.source) - { - var src = ColladaSource.FromCollada(source); - Sources.Add(src.id, src); - } + var src = ColladaSource.FromCollada(source); + Sources.Add(src.id, src); } + } - private VertexDescriptor FindVertexFormat(bool isSkinned) + private VertexDescriptor FindVertexFormat(bool isSkinned) + { + var desc = new VertexDescriptor { - var desc = new VertexDescriptor - { - PositionType = PositionType.Float3 - }; - if (isSkinned) - { - desc.HasBoneWeights = true; - } + PositionType = PositionType.Float3 + }; + if (isSkinned) + { + desc.HasBoneWeights = true; + } - foreach (var input in Mesh.vertices.input) + foreach (var input in Mesh.vertices.input) + { + switch (input.semantic) { - switch (input.semantic) - { - case "NORMAL": desc.NormalType = NormalType.Float3; break; - case "TANGENT": - case "TEXTANGENT": - desc.TangentType = NormalType.Float3; break; - case "BINORMAL": - case "TEXBINORMAL": - desc.BinormalType = NormalType.Float3; break; - } + case "NORMAL": desc.NormalType = NormalType.Float3; break; + case "TANGENT": + case "TEXTANGENT": + desc.TangentType = NormalType.Float3; break; + case "BINORMAL": + case "TEXBINORMAL": + desc.BinormalType = NormalType.Float3; break; } + } - foreach (var input in Inputs) + foreach (var input in Inputs) + { + switch (input.semantic) { - switch (input.semantic) - { - case "NORMAL": desc.NormalType = NormalType.Float3; break; - case "TANGENT": - case "TEXTANGENT": - desc.TangentType = NormalType.Float3; break; - case "BINORMAL": - case "TEXBINORMAL": - desc.BinormalType = NormalType.Float3; break; - case "TEXCOORD": - desc.TextureCoordinateType = TextureCoordinateType.Float2; - desc.TextureCoordinates++; - break; - case "COLOR": - desc.ColorMapType = ColorMapType.Float4; - desc.ColorMaps++; - break; - } + case "NORMAL": desc.NormalType = NormalType.Float3; break; + case "TANGENT": + case "TEXTANGENT": + desc.TangentType = NormalType.Float3; break; + case "BINORMAL": + case "TEXBINORMAL": + desc.BinormalType = NormalType.Float3; break; + case "TEXCOORD": + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates++; + break; + case "COLOR": + desc.ColorMapType = ColorMapType.Float4; + desc.ColorMaps++; + break; } - - return desc; } - public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isSkinned, ExporterOptions options) - { - Options = options; - Mesh = mesh; - ImportSources(); - ImportFaces(); + return desc; + } - vertexFormat ??= FindVertexFormat(isSkinned); + public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isSkinned, ExporterOptions options) + { + Options = options; + Mesh = mesh; + ImportSources(); + ImportFaces(); - InputVertexType = vertexFormat; - OutputVertexType = new VertexDescriptor - { - HasBoneWeights = InputVertexType.HasBoneWeights, - NumBoneInfluences = InputVertexType.NumBoneInfluences, - PositionType = InputVertexType.PositionType, - NormalType = InputVertexType.NormalType, - TangentType = InputVertexType.TangentType, - BinormalType = InputVertexType.BinormalType, - ColorMapType = InputVertexType.ColorMapType, - ColorMaps = InputVertexType.ColorMaps, - TextureCoordinateType = InputVertexType.TextureCoordinateType, - TextureCoordinates = InputVertexType.TextureCoordinates - }; - - ImportVertices(); - - // TODO: This should be done before deduplication! - // TODO: Move this to somewhere else ... ? - if (!HasNormals || Options.RecalculateNormals) - { - if (!HasNormals) - Utils.Info(String.Format("Channel 'NORMAL' not found, will rebuild vertex normals after import.")); + vertexFormat ??= FindVertexFormat(isSkinned); - HasNormals = true; - OutputVertexType.NormalType = NormalType.Float3; - computeNormals(); - } + InputVertexType = vertexFormat; + OutputVertexType = new VertexDescriptor + { + HasBoneWeights = InputVertexType.HasBoneWeights, + NumBoneInfluences = InputVertexType.NumBoneInfluences, + PositionType = InputVertexType.PositionType, + NormalType = InputVertexType.NormalType, + TangentType = InputVertexType.TangentType, + BinormalType = InputVertexType.BinormalType, + ColorMapType = InputVertexType.ColorMapType, + ColorMaps = InputVertexType.ColorMaps, + TextureCoordinateType = InputVertexType.TextureCoordinateType, + TextureCoordinates = InputVertexType.TextureCoordinates + }; + + ImportVertices(); + + // TODO: This should be done before deduplication! + // TODO: Move this to somewhere else ... ? + if (!HasNormals || Options.RecalculateNormals) + { + if (!HasNormals) + Utils.Info(String.Format("Channel 'NORMAL' not found, will rebuild vertex normals after import.")); - ImportColors(); - ImportUVs(); + HasNormals = true; + OutputVertexType.NormalType = NormalType.Float3; + computeNormals(); + } + + ImportColors(); + ImportUVs(); - if (UVInputIndices.Count > 0 || ColorInputIndices.Count > 0 - || NormalsInputIndex != -1 || TangentsInputIndex != -1 || BinormalsInputIndex != -1) + if (UVInputIndices.Count > 0 || ColorInputIndices.Count > 0 + || NormalsInputIndex != -1 || TangentsInputIndex != -1 || BinormalsInputIndex != -1) + { + var outVertexIndices = new Dictionary(new VertexIndexComparer()); + ConsolidatedIndices = new List(TriangleCount * 3); + ConsolidatedVertices = new List(Vertices.Count); + OriginalToConsolidatedVertexIndexMap = []; + for (var vert = 0; vert < TriangleCount * 3; vert++) { - var outVertexIndices = new Dictionary(new VertexIndexComparer()); - ConsolidatedIndices = new List(TriangleCount * 3); - ConsolidatedVertices = new List(Vertices.Count); - OriginalToConsolidatedVertexIndexMap = []; - for (var vert = 0; vert < TriangleCount * 3; vert++) + var index = new int[InputOffsetCount]; + for (var i = 0; i < InputOffsetCount; i++) + { + index[i] = Indices[vert * InputOffsetCount + i]; + } + + if (!outVertexIndices.TryGetValue(index, out int consolidatedIndex)) { - var index = new int[InputOffsetCount]; - for (var i = 0; i < InputOffsetCount; i++) + var vertexIndex = index[VertexInputIndex]; + consolidatedIndex = ConsolidatedVertices.Count; + Vertex vertex = Vertices[vertexIndex].Clone(); + if (NormalsInputIndex != -1) { - index[i] = Indices[vert * InputOffsetCount + i]; + vertex.Normal = Normals[index[NormalsInputIndex]]; } + if (TangentsInputIndex != -1) + { + vertex.Tangent = Tangents[index[TangentsInputIndex]]; + } + if (BinormalsInputIndex != -1) + { + vertex.Binormal = Binormals[index[BinormalsInputIndex]]; + } + for (int uv = 0; uv < UVInputIndices.Count; uv++) + { + vertex.SetUV(uv, UVs[uv][index[UVInputIndices[uv]]]); + } + for (int color = 0; color < ColorInputIndices.Count; color++) + { + vertex.SetColor(color, Colors[color][index[ColorInputIndices[color]]]); + } + outVertexIndices.Add(index, consolidatedIndex); + ConsolidatedVertices.Add(vertex); - if (!outVertexIndices.TryGetValue(index, out int consolidatedIndex)) + if (!OriginalToConsolidatedVertexIndexMap.TryGetValue(vertexIndex, out List mappedIndices)) { - var vertexIndex = index[VertexInputIndex]; - consolidatedIndex = ConsolidatedVertices.Count; - Vertex vertex = Vertices[vertexIndex].Clone(); - if (NormalsInputIndex != -1) - { - vertex.Normal = Normals[index[NormalsInputIndex]]; - } - if (TangentsInputIndex != -1) - { - vertex.Tangent = Tangents[index[TangentsInputIndex]]; - } - if (BinormalsInputIndex != -1) - { - vertex.Binormal = Binormals[index[BinormalsInputIndex]]; - } - for (int uv = 0; uv < UVInputIndices.Count; uv++) - { - vertex.SetUV(uv, UVs[uv][index[UVInputIndices[uv]]]); - } - for (int color = 0; color < ColorInputIndices.Count; color++) - { - vertex.SetColor(color, Colors[color][index[ColorInputIndices[color]]]); - } - outVertexIndices.Add(index, consolidatedIndex); - ConsolidatedVertices.Add(vertex); - - if (!OriginalToConsolidatedVertexIndexMap.TryGetValue(vertexIndex, out List mappedIndices)) - { - mappedIndices = []; - OriginalToConsolidatedVertexIndexMap.Add(vertexIndex, mappedIndices); - } - - mappedIndices.Add(consolidatedIndex); + mappedIndices = []; + OriginalToConsolidatedVertexIndexMap.Add(vertexIndex, mappedIndices); } - ConsolidatedIndices.Add(consolidatedIndex); + mappedIndices.Add(consolidatedIndex); } - Utils.Info(String.Format("Merged {0} vertices into {1} output vertices", Vertices.Count, ConsolidatedVertices.Count)); + ConsolidatedIndices.Add(consolidatedIndex); } - else - { - Utils.Info(String.Format("Mesh has no separate normals, colors or UV map, vertex consolidation step skipped.")); - ConsolidatedVertices = Vertices; + Utils.Info(String.Format("Merged {0} vertices into {1} output vertices", Vertices.Count, ConsolidatedVertices.Count)); + } + else + { + Utils.Info(String.Format("Mesh has no separate normals, colors or UV map, vertex consolidation step skipped.")); - ConsolidatedIndices = new List(TriangleCount * 3); - for (var vert = 0; vert < TriangleCount * 3; vert++) - ConsolidatedIndices.Add(VertexIndex(vert)); + ConsolidatedVertices = Vertices; - OriginalToConsolidatedVertexIndexMap = []; - for (var i = 0; i < Vertices.Count; i++) - OriginalToConsolidatedVertexIndexMap.Add(i, [i]); - } + ConsolidatedIndices = new List(TriangleCount * 3); + for (var vert = 0; vert < TriangleCount * 3; vert++) + ConsolidatedIndices.Add(VertexIndex(vert)); - if ((InputVertexType.TangentType == NormalType.None - || InputVertexType.BinormalType == NormalType.None) - && ((!HasTangents && UVs.Count > 0) || Options.RecalculateTangents)) - { - if (!HasTangents) - Utils.Info(String.Format("Channel 'TANGENT'/'BINROMAL' not found, will rebuild vertex tangents after import.")); + OriginalToConsolidatedVertexIndexMap = []; + for (var i = 0; i < Vertices.Count; i++) + OriginalToConsolidatedVertexIndexMap.Add(i, [i]); + } - OutputVertexType.TangentType = NormalType.Float3; - OutputVertexType.BinormalType = NormalType.Float3; - HasTangents = true; - computeTangents(); - } + if ((InputVertexType.TangentType == NormalType.None + || InputVertexType.BinormalType == NormalType.None) + && ((!HasTangents && UVs.Count > 0) || Options.RecalculateTangents)) + { + if (!HasTangents) + Utils.Info(String.Format("Channel 'TANGENT'/'BINROMAL' not found, will rebuild vertex tangents after import.")); - // Use optimized tangent, texture map and color map format when exporting for D:OS 2 - if ((Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv0 - || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv1 - || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv3) - && Options.EnableQTangents - && HasNormals - && HasTangents) - { - OutputVertexType.NormalType = NormalType.QTangent; - OutputVertexType.TangentType = NormalType.QTangent; - OutputVertexType.BinormalType = NormalType.QTangent; + OutputVertexType.TangentType = NormalType.Float3; + OutputVertexType.BinormalType = NormalType.Float3; + HasTangents = true; + computeTangents(); + } - if (OutputVertexType.TextureCoordinateType == TextureCoordinateType.Float2) - { - OutputVertexType.TextureCoordinateType = TextureCoordinateType.Half2; - } + // Use optimized tangent, texture map and color map format when exporting for D:OS 2 + if ((Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv0 + || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv1 + || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv3) + && Options.EnableQTangents + && HasNormals + && HasTangents) + { + OutputVertexType.NormalType = NormalType.QTangent; + OutputVertexType.TangentType = NormalType.QTangent; + OutputVertexType.BinormalType = NormalType.QTangent; - if (OutputVertexType.ColorMapType == ColorMapType.Float4) - { - OutputVertexType.ColorMapType = ColorMapType.Byte4; - } + if (OutputVertexType.TextureCoordinateType == TextureCoordinateType.Float2) + { + OutputVertexType.TextureCoordinateType = TextureCoordinateType.Half2; + } + + if (OutputVertexType.ColorMapType == ColorMapType.Float4) + { + OutputVertexType.ColorMapType = ColorMapType.Byte4; } } } diff --git a/LSLib/Granny/Model/CurveData/AnimationCurveData.cs b/LSLib/Granny/Model/CurveData/AnimationCurveData.cs index 29782f07..efed85e2 100644 --- a/LSLib/Granny/Model/CurveData/AnimationCurveData.cs +++ b/LSLib/Granny/Model/CurveData/AnimationCurveData.cs @@ -3,285 +3,283 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; +public class CurveRegistry { - public class CurveRegistry + private static Dictionary TypeToFormatMap; + private static Dictionary NameToTypeMap; + + private static void Register(Type type, CurveFormat format) { - private static Dictionary TypeToFormatMap; - private static Dictionary NameToTypeMap; + TypeToFormatMap.Add(type, format); + NameToTypeMap.Add(type.Name, type); + } - private static void Register(Type type, CurveFormat format) + private static void Init() + { + if (TypeToFormatMap != null) { - TypeToFormatMap.Add(type, format); - NameToTypeMap.Add(type.Name, type); + return; } - private static void Init() - { - if (TypeToFormatMap != null) - { - return; - } - - TypeToFormatMap = []; - NameToTypeMap = []; - - Register(typeof(DaKeyframes32f), CurveFormat.DaKeyframes32f); - Register(typeof(DaK32fC32f), CurveFormat.DaK32fC32f); - Register(typeof(DaIdentity), CurveFormat.DaIdentity); - Register(typeof(DaConstant32f), CurveFormat.DaConstant32f); - Register(typeof(D3Constant32f), CurveFormat.D3Constant32f); - Register(typeof(D4Constant32f), CurveFormat.D4Constant32f); - Register(typeof(DaK16uC16u), CurveFormat.DaK16uC16u); - Register(typeof(DaK8uC8u), CurveFormat.DaK8uC8u); - Register(typeof(D4nK16uC15u), CurveFormat.D4nK16uC15u); - Register(typeof(D4nK8uC7u), CurveFormat.D4nK8uC7u); - Register(typeof(D3K16uC16u), CurveFormat.D3K16uC16u); - Register(typeof(D3K8uC8u), CurveFormat.D3K8uC8u); - Register(typeof(D9I1K16uC16u), CurveFormat.D9I1K16uC16u); - Register(typeof(D9I3K16uC16u), CurveFormat.D9I3K16uC16u); - Register(typeof(D9I1K8uC8u), CurveFormat.D9I1K8uC8u); - Register(typeof(D9I3K8uC8u), CurveFormat.D9I3K8uC8u); - Register(typeof(D3I1K32fC32f), CurveFormat.D3I1K32fC32f); - Register(typeof(D3I1K16uC16u), CurveFormat.D3I1K16uC16u); - Register(typeof(D3I1K8uC8u), CurveFormat.D3I1K8uC8u); - } + TypeToFormatMap = []; + NameToTypeMap = []; + + Register(typeof(DaKeyframes32f), CurveFormat.DaKeyframes32f); + Register(typeof(DaK32fC32f), CurveFormat.DaK32fC32f); + Register(typeof(DaIdentity), CurveFormat.DaIdentity); + Register(typeof(DaConstant32f), CurveFormat.DaConstant32f); + Register(typeof(D3Constant32f), CurveFormat.D3Constant32f); + Register(typeof(D4Constant32f), CurveFormat.D4Constant32f); + Register(typeof(DaK16uC16u), CurveFormat.DaK16uC16u); + Register(typeof(DaK8uC8u), CurveFormat.DaK8uC8u); + Register(typeof(D4nK16uC15u), CurveFormat.D4nK16uC15u); + Register(typeof(D4nK8uC7u), CurveFormat.D4nK8uC7u); + Register(typeof(D3K16uC16u), CurveFormat.D3K16uC16u); + Register(typeof(D3K8uC8u), CurveFormat.D3K8uC8u); + Register(typeof(D9I1K16uC16u), CurveFormat.D9I1K16uC16u); + Register(typeof(D9I3K16uC16u), CurveFormat.D9I3K16uC16u); + Register(typeof(D9I1K8uC8u), CurveFormat.D9I1K8uC8u); + Register(typeof(D9I3K8uC8u), CurveFormat.D9I3K8uC8u); + Register(typeof(D3I1K32fC32f), CurveFormat.D3I1K32fC32f); + Register(typeof(D3I1K16uC16u), CurveFormat.D3I1K16uC16u); + Register(typeof(D3I1K8uC8u), CurveFormat.D3I1K8uC8u); + } - public static Dictionary GetAllTypes() - { - Init(); + public static Dictionary GetAllTypes() + { + Init(); - return NameToTypeMap; - } + return NameToTypeMap; + } - public static Type Resolve(String name) - { - Init(); + public static Type Resolve(String name) + { + Init(); - if (!NameToTypeMap.TryGetValue(name, out Type type)) - throw new ParsingException("Unsupported curve type: " + name); + if (!NameToTypeMap.TryGetValue(name, out Type type)) + throw new ParsingException("Unsupported curve type: " + name); - return type; - } + return type; } +} - public enum CurveFormat - { - // Types: - // Da: (animation) 3x3 matrix - // D[1-4]: 1 - 4 component vector - // I[1/3]: 1/3 values for the main diagonal, others are zero - // n: Normalized quaternion - // Constant: Constant vector/matrix - // K[n][nothing/u/f]: n-bit value for knots; u = unsigned; f = floating point - // C[n][nothing/u/f]: n-bit value for controls; u = unsigned; f = floating point - DaKeyframes32f = 0, - DaK32fC32f = 1, - DaIdentity = 2, - DaConstant32f = 3, - D3Constant32f = 4, - D4Constant32f = 5, - DaK16uC16u = 6, - DaK8uC8u = 7, - D4nK16uC15u = 8, - D4nK8uC7u = 9, - D3K16uC16u = 10, - D3K8uC8u = 11, - D9I1K16uC16u = 12, - D9I3K16uC16u = 13, - D9I1K8uC8u = 14, - D9I3K8uC8u = 15, - D3I1K32fC32f = 16, - D3I1K16uC16u = 17, - D3I1K8uC8u = 18 - } +public enum CurveFormat +{ + // Types: + // Da: (animation) 3x3 matrix + // D[1-4]: 1 - 4 component vector + // I[1/3]: 1/3 values for the main diagonal, others are zero + // n: Normalized quaternion + // Constant: Constant vector/matrix + // K[n][nothing/u/f]: n-bit value for knots; u = unsigned; f = floating point + // C[n][nothing/u/f]: n-bit value for controls; u = unsigned; f = floating point + DaKeyframes32f = 0, + DaK32fC32f = 1, + DaIdentity = 2, + DaConstant32f = 3, + D3Constant32f = 4, + D4Constant32f = 5, + DaK16uC16u = 6, + DaK8uC8u = 7, + D4nK16uC15u = 8, + D4nK8uC7u = 9, + D3K16uC16u = 10, + D3K8uC8u = 11, + D9I1K16uC16u = 12, + D9I3K16uC16u = 13, + D9I1K8uC8u = 14, + D9I3K8uC8u = 15, + D3I1K32fC32f = 16, + D3I1K16uC16u = 17, + D3I1K8uC8u = 18 +} - public class CurveDataHeader - { - public byte Format; - public byte Degree; +public class CurveDataHeader +{ + public byte Format; + public byte Degree; - public bool IsFloat() + public bool IsFloat() + { + switch ((CurveFormat)Format) { - switch ((CurveFormat)Format) - { - case CurveFormat.DaKeyframes32f: - case CurveFormat.DaK32fC32f: - case CurveFormat.DaIdentity: - case CurveFormat.DaConstant32f: - case CurveFormat.D3Constant32f: - case CurveFormat.D4Constant32f: - return true; - - default: - return false; - } + case CurveFormat.DaKeyframes32f: + case CurveFormat.DaK32fC32f: + case CurveFormat.DaIdentity: + case CurveFormat.DaConstant32f: + case CurveFormat.D3Constant32f: + case CurveFormat.D4Constant32f: + return true; + + default: + return false; } + } - public int BytesPerKnot() + public int BytesPerKnot() + { + switch ((CurveFormat)Format) { - switch ((CurveFormat)Format) - { - case CurveFormat.DaKeyframes32f: - case CurveFormat.DaK32fC32f: - case CurveFormat.D3I1K32fC32f: - return 4; - - case CurveFormat.DaIdentity: - case CurveFormat.DaConstant32f: - case CurveFormat.D3Constant32f: - case CurveFormat.D4Constant32f: - throw new ParsingException("Should not serialize knots/controls here"); - - case CurveFormat.DaK16uC16u: - case CurveFormat.D4nK16uC15u: - case CurveFormat.D3K16uC16u: - case CurveFormat.D9I1K16uC16u: - case CurveFormat.D9I3K16uC16u: - case CurveFormat.D3I1K16uC16u: - return 2; - - case CurveFormat.DaK8uC8u: - case CurveFormat.D4nK8uC7u: - case CurveFormat.D3K8uC8u: - case CurveFormat.D9I1K8uC8u: - case CurveFormat.D9I3K8uC8u: - case CurveFormat.D3I1K8uC8u: - return 1; - - default: - throw new ParsingException("Unsupported curve data format"); - } + case CurveFormat.DaKeyframes32f: + case CurveFormat.DaK32fC32f: + case CurveFormat.D3I1K32fC32f: + return 4; + + case CurveFormat.DaIdentity: + case CurveFormat.DaConstant32f: + case CurveFormat.D3Constant32f: + case CurveFormat.D4Constant32f: + throw new ParsingException("Should not serialize knots/controls here"); + + case CurveFormat.DaK16uC16u: + case CurveFormat.D4nK16uC15u: + case CurveFormat.D3K16uC16u: + case CurveFormat.D9I1K16uC16u: + case CurveFormat.D9I3K16uC16u: + case CurveFormat.D3I1K16uC16u: + return 2; + + case CurveFormat.DaK8uC8u: + case CurveFormat.D4nK8uC7u: + case CurveFormat.D3K8uC8u: + case CurveFormat.D9I1K8uC8u: + case CurveFormat.D9I3K8uC8u: + case CurveFormat.D3I1K8uC8u: + return 1; + + default: + throw new ParsingException("Unsupported curve data format"); } } +} - class ControlUInt8 - { - public Byte UInt8 = 0; - } +class ControlUInt8 +{ + public Byte UInt8 = 0; +} - class ControlUInt16 - { - public UInt16 UInt16 = 0; - } +class ControlUInt16 +{ + public UInt16 UInt16 = 0; +} + +class ControlReal32 +{ + public Single Real32 = 0; +} - class ControlReal32 +class AnimationCurveDataTypeSelector : VariantTypeSelector +{ + public Type SelectType(MemberDefinition member, object node) { - public Single Real32 = 0; + return null; } - class AnimationCurveDataTypeSelector : VariantTypeSelector + public Type SelectType(MemberDefinition member, StructDefinition defn, object parent) { - public Type SelectType(MemberDefinition member, object node) - { - return null; - } + var fieldName = defn.Members[0].Name; + if (fieldName[..16] != "CurveDataHeader_") + throw new ParsingException("Unrecognized curve data header type: " + fieldName); - public Type SelectType(MemberDefinition member, StructDefinition defn, object parent) - { - var fieldName = defn.Members[0].Name; - if (fieldName[..16] != "CurveDataHeader_") - throw new ParsingException("Unrecognized curve data header type: " + fieldName); - - var curveType = fieldName[16..]; - return CurveRegistry.Resolve(curveType); - } + var curveType = fieldName[16..]; + return CurveRegistry.Resolve(curveType); } +} - [StructSerialization(MixedMarshal = true)] - public abstract class AnimationCurveData +[StructSerialization(MixedMarshal = true)] +public abstract class AnimationCurveData +{ + public enum ExportType { - public enum ExportType - { - Position, - Rotation, - ScaleShear - }; + Position, + Rotation, + ScaleShear + }; - [Serialization(Kind = SerializationKind.None)] - public Animation ParentAnimation; + [Serialization(Kind = SerializationKind.None)] + public Animation ParentAnimation; - protected float ConvertOneOverKnotScaleTrunc(UInt16 oneOverKnotScaleTrunc) - { - UInt32[] i = [(UInt32)oneOverKnotScaleTrunc << 16]; - float[] f = new float[1]; - Buffer.BlockCopy(i, 0, f, 0, i.Length * 4); - return f[0]; - } + protected float ConvertOneOverKnotScaleTrunc(UInt16 oneOverKnotScaleTrunc) + { + UInt32[] i = [(UInt32)oneOverKnotScaleTrunc << 16]; + float[] f = new float[1]; + Buffer.BlockCopy(i, 0, f, 0, i.Length * 4); + return f[0]; + } - public float Duration() - { - return GetKnots()[NumKnots() - 1]; - } + public float Duration() + { + return GetKnots()[NumKnots() - 1]; + } - public abstract int NumKnots(); - public abstract List GetKnots(); + public abstract int NumKnots(); + public abstract List GetKnots(); - public virtual List GetPoints() - { - throw new ParsingException("Curve does not contain position data"); - } + public virtual List GetPoints() + { + throw new ParsingException("Curve does not contain position data"); + } - public virtual List GetMatrices() - { - throw new ParsingException("Curve does not contain rotation data"); - } + public virtual List GetMatrices() + { + throw new ParsingException("Curve does not contain rotation data"); + } - public virtual List GetQuaternions() + public virtual List GetQuaternions() + { + var matrices = GetMatrices(); + List quats = new(matrices.Count); + foreach (var matrix in matrices) { - var matrices = GetMatrices(); - List quats = new(matrices.Count); - foreach (var matrix in matrices) + // Check that the matrix is orthogonal + for (var i = 0; i < 3; i++) { - // Check that the matrix is orthogonal - for (var i = 0; i < 3; i++) + for (var j = 0; j < i; j++) { - for (var j = 0; j < i; j++) - { - if (matrix[i, j] != matrix[j, i]) - throw new ParsingException("Cannot convert into quaternion: Transformation matrix is not orthogonal!"); - } + if (matrix[i, j] != matrix[j, i]) + throw new ParsingException("Cannot convert into quaternion: Transformation matrix is not orthogonal!"); } - - // Check that the matrix is special orthogonal - // det(matrix) = 1 - if (Math.Abs(matrix.Determinant - 1) > 0.001) - throw new ParsingException("Cannot convert into quaternion: Transformation matrix is not special orthogonal!"); - - quats.Add(matrix.ExtractRotation()); } - return quats; + // Check that the matrix is special orthogonal + // det(matrix) = 1 + if (Math.Abs(matrix.Determinant - 1) > 0.001) + throw new ParsingException("Cannot convert into quaternion: Transformation matrix is not special orthogonal!"); + + quats.Add(matrix.ExtractRotation()); } + + return quats; + } - public void ExportKeyframes(KeyframeTrack track, ExportType type) + public void ExportKeyframes(KeyframeTrack track, ExportType type) + { + var numKnots = NumKnots(); + var knots = GetKnots(); + if (type == ExportType.Position) { - var numKnots = NumKnots(); - var knots = GetKnots(); - if (type == ExportType.Position) + var positions = GetPoints(); + for (var i = 0; i < numKnots; i++) { - var positions = GetPoints(); - for (var i = 0; i < numKnots; i++) - { - track.AddTranslation(knots[i], positions[i]); - } + track.AddTranslation(knots[i], positions[i]); } - else if (type == ExportType.Rotation) + } + else if (type == ExportType.Rotation) + { + var quats = GetQuaternions(); + for (var i = 0; i < numKnots; i++) { - var quats = GetQuaternions(); - for (var i = 0; i < numKnots; i++) - { - track.AddRotation(knots[i], quats[i]); - } + track.AddRotation(knots[i], quats[i]); } - else if (type == ExportType.ScaleShear) + } + else if (type == ExportType.ScaleShear) + { + var mats = GetMatrices(); + for (var i = 0; i < numKnots; i++) { - var mats = GetMatrices(); - for (var i = 0; i < numKnots; i++) - { - track.AddScaleShear(knots[i], mats[i]); - } + track.AddScaleShear(knots[i], mats[i]); } } } diff --git a/LSLib/Granny/Model/CurveData/D3Constant32f.cs b/LSLib/Granny/Model/CurveData/D3Constant32f.cs index adbc18cb..005d4fad 100644 --- a/LSLib/Granny/Model/CurveData/D3Constant32f.cs +++ b/LSLib/Granny/Model/CurveData/D3Constant32f.cs @@ -3,29 +3,28 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D3Constant32f : AnimationCurveData { - public class D3Constant32f : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D3Constant32f; - public Int16 Padding; - [Serialization(ArraySize = 3)] - public float[] Controls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D3Constant32f; + public Int16 Padding; + [Serialization(ArraySize = 3)] + public float[] Controls; - public override int NumKnots() - { - return 1; - } + public override int NumKnots() + { + return 1; + } - public override List GetKnots() - { - return [0.0f]; - } + public override List GetKnots() + { + return [0.0f]; + } - public override List GetPoints() - { - return [new Vector3(Controls[0], Controls[1], Controls[2])]; - } + public override List GetPoints() + { + return [new Vector3(Controls[0], Controls[1], Controls[2])]; } } diff --git a/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs b/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs index dcc0783a..582365b6 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs @@ -3,51 +3,50 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D3I1K16uC16u : AnimationCurveData { - public class D3I1K16uC16u : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D3I1K16uC16u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D3I1K16uC16u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List KnotsControls; - public override int NumKnots() - { - return KnotsControls.Count / 2; - } + public override int NumKnots() + { + return KnotsControls.Count / 2; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetPoints() + public override List GetPoints() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - (float)KnotsControls[numKnots + i] * ControlScales[0] + ControlOffsets[0], - (float)KnotsControls[numKnots + i] * ControlScales[1] + ControlOffsets[1], - (float)KnotsControls[numKnots + i] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(vec); - } - - return knots; + var vec = new Vector3( + (float)KnotsControls[numKnots + i] * ControlScales[0] + ControlOffsets[0], + (float)KnotsControls[numKnots + i] * ControlScales[1] + ControlOffsets[1], + (float)KnotsControls[numKnots + i] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(vec); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs b/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs index 40856eb9..c6f8d09f 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs @@ -3,50 +3,49 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D3I1K32fC32f : AnimationCurveData { - public class D3I1K32fC32f : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D3I1K32fC32f; - public UInt16 Padding; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D3I1K32fC32f; + public UInt16 Padding; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List KnotsControls; - public override int NumKnots() - { - return KnotsControls.Count / 2; - } + public override int NumKnots() + { + return KnotsControls.Count / 2; + } - public override List GetKnots() - { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add(KnotsControls[i]); + public override List GetKnots() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add(KnotsControls[i]); - return knots; - } + return knots; + } - public override List GetPoints() + public override List GetPoints() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - KnotsControls[numKnots + i] * ControlScales[0] + ControlOffsets[0], - KnotsControls[numKnots + i] * ControlScales[1] + ControlOffsets[1], - KnotsControls[numKnots + i] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(vec); - } - - return knots; + var vec = new Vector3( + KnotsControls[numKnots + i] * ControlScales[0] + ControlOffsets[0], + KnotsControls[numKnots + i] * ControlScales[1] + ControlOffsets[1], + KnotsControls[numKnots + i] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(vec); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs b/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs index 115d79f0..e9abb44d 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs @@ -3,51 +3,50 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D3I1K8uC8u : AnimationCurveData { - public class D3I1K8uC8u : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D3I1K8uC8u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D3I1K8uC8u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] + public List KnotsControls; - public override int NumKnots() - { - return KnotsControls.Count / 2; - } + public override int NumKnots() + { + return KnotsControls.Count / 2; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetPoints() + public override List GetPoints() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - (float)KnotsControls[numKnots + i] * ControlScales[0] + ControlOffsets[0], - (float)KnotsControls[numKnots + i] * ControlScales[1] + ControlOffsets[1], - (float)KnotsControls[numKnots + i] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(vec); - } - - return knots; + var vec = new Vector3( + (float)KnotsControls[numKnots + i] * ControlScales[0] + ControlOffsets[0], + (float)KnotsControls[numKnots + i] * ControlScales[1] + ControlOffsets[1], + (float)KnotsControls[numKnots + i] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(vec); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D3K16uC16u.cs b/LSLib/Granny/Model/CurveData/D3K16uC16u.cs index 6a74d99e..d8e86537 100644 --- a/LSLib/Granny/Model/CurveData/D3K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D3K16uC16u.cs @@ -3,51 +3,50 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D3K16uC16u : AnimationCurveData { - public class D3K16uC16u : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D3K16uC16u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D3K16uC16u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List KnotsControls; - public override int NumKnots() - { - return KnotsControls.Count / 4; - } + public override int NumKnots() + { + return KnotsControls.Count / 4; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetPoints() + public override List GetPoints() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], - (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], - (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(vec); - } - - return knots; + var vec = new Vector3( + (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], + (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], + (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(vec); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D3K8uC8u.cs b/LSLib/Granny/Model/CurveData/D3K8uC8u.cs index d562324b..0f4c933f 100644 --- a/LSLib/Granny/Model/CurveData/D3K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D3K8uC8u.cs @@ -3,51 +3,50 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D3K8uC8u : AnimationCurveData { - public class D3K8uC8u : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D3K8uC8u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D3K8uC8u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] + public List KnotsControls; - public override int NumKnots() - { - return KnotsControls.Count / 4; - } + public override int NumKnots() + { + return KnotsControls.Count / 4; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetPoints() + public override List GetPoints() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], - (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], - (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(vec); - } - - return knots; + var vec = new Vector3( + (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], + (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], + (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(vec); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D4Constant32f.cs b/LSLib/Granny/Model/CurveData/D4Constant32f.cs index df13f86f..5fb949ae 100644 --- a/LSLib/Granny/Model/CurveData/D4Constant32f.cs +++ b/LSLib/Granny/Model/CurveData/D4Constant32f.cs @@ -3,30 +3,29 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D4Constant32f : AnimationCurveData { - public class D4Constant32f : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D4Constant32f; - public Int16 Padding; - [Serialization(ArraySize = 4)] - public float[] Controls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D4Constant32f; + public Int16 Padding; + [Serialization(ArraySize = 4)] + public float[] Controls; - public override int NumKnots() - { - return 1; - } + public override int NumKnots() + { + return 1; + } - public override List GetKnots() - { - return [0.0f]; - } + public override List GetKnots() + { + return [0.0f]; + } - // TODO: GetMatrices - public override List GetQuaternions() - { - return [new Quaternion(Controls[0], Controls[1], Controls[2], Controls[3])]; - } + // TODO: GetMatrices + public override List GetQuaternions() + { + return [new Quaternion(Controls[0], Controls[1], Controls[2], Controls[3])]; } } diff --git a/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs b/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs index 8cd7774f..1b41e41a 100644 --- a/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs +++ b/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs @@ -3,117 +3,116 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D4nK16uC15u : AnimationCurveData { - public class D4nK16uC15u : AnimationCurveData + private readonly static float[] ScaleTable = [ + 1.4142135f, 0.70710677f, 0.35355338f, 0.35355338f, + 0.35355338f, 0.17677669f, 0.17677669f, 0.17677669f, + -1.4142135f, -0.70710677f, -0.35355338f, -0.35355338f, + -0.35355338f, -0.17677669f, -0.17677669f, -0.17677669f + ]; + + private readonly static float[] OffsetTable = [ + -0.70710677f, -0.35355338f, -0.53033006f, -0.17677669f, + 0.17677669f, -0.17677669f, -0.088388346f, 0.0f, + 0.70710677f, 0.35355338f, 0.53033006f, 0.17677669f, + -0.17677669f, 0.17677669f, 0.088388346f, -0.0f + ]; + + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D4nK16uC15u; + public UInt16 ScaleOffsetTableEntries; + public Single OneOverKnotScale; + [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List KnotsControls; + + + public override int NumKnots() { - private readonly static float[] ScaleTable = [ - 1.4142135f, 0.70710677f, 0.35355338f, 0.35355338f, - 0.35355338f, 0.17677669f, 0.17677669f, 0.17677669f, - -1.4142135f, -0.70710677f, -0.35355338f, -0.35355338f, - -0.35355338f, -0.17677669f, -0.17677669f, -0.17677669f - ]; - - private readonly static float[] OffsetTable = [ - -0.70710677f, -0.35355338f, -0.53033006f, -0.17677669f, - 0.17677669f, -0.17677669f, -0.088388346f, 0.0f, - 0.70710677f, 0.35355338f, 0.53033006f, 0.17677669f, - -0.17677669f, 0.17677669f, 0.088388346f, -0.0f - ]; - - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D4nK16uC15u; - public UInt16 ScaleOffsetTableEntries; - public Single OneOverKnotScale; - [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List KnotsControls; - - - public override int NumKnots() - { - return KnotsControls.Count / 4; - } + return KnotsControls.Count / 4; + } - public override List GetKnots() - { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / OneOverKnotScale); + public override List GetKnots() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / OneOverKnotScale); - return knots; - } + return knots; + } - private Quaternion QuatFromControl(UInt16 a, UInt16 b, UInt16 c, float[] scales, float[] offsets) - { - // Control data format: - // ----- A ----- ----- B ----- ----- C ----- - // | 1 2 ... 15 | 1 2 ... 15 | 1 2 ... 15 | - // G DA S1 DB S2 DC - // - // G: Sign flag; the last word is negative if G == 1 - // S1, S2: Swizzle value (S1 << 1) | S2, determines the order of X, Y, Z, W components. - // DA, DB, DC: Data values for 3 of 4 components - - // The swizzle value for each component is calculated using an addition over 4, - // using the formula: S(n+1) = (S(n) + 1) & 3 - var swizzle1 = ((b & 0x8000) >> 14) | (c >> 15); - var swizzle2 = (swizzle1 + 1) & 3; - var swizzle3 = (swizzle2 + 1) & 3; - var swizzle4 = (swizzle3 + 1) & 3; - - var dataA = (a & 0x7fff) * scales[swizzle2] + offsets[swizzle2]; - var dataB = (b & 0x7fff) * scales[swizzle3] + offsets[swizzle3]; - var dataC = (c & 0x7fff) * scales[swizzle4] + offsets[swizzle4]; - - var dataD = (float)Math.Sqrt(1 - (dataA * dataA + dataB * dataB + dataC * dataC)); - if ((a & 0x8000) != 0) - dataD = -dataD; - - var f = new float[4]; - f[swizzle2] = dataA; - f[swizzle3] = dataB; - f[swizzle4] = dataC; - f[swizzle1] = dataD; - - return new Quaternion(f[0], f[1], f[2], f[3]); - } + private Quaternion QuatFromControl(UInt16 a, UInt16 b, UInt16 c, float[] scales, float[] offsets) + { + // Control data format: + // ----- A ----- ----- B ----- ----- C ----- + // | 1 2 ... 15 | 1 2 ... 15 | 1 2 ... 15 | + // G DA S1 DB S2 DC + // + // G: Sign flag; the last word is negative if G == 1 + // S1, S2: Swizzle value (S1 << 1) | S2, determines the order of X, Y, Z, W components. + // DA, DB, DC: Data values for 3 of 4 components + + // The swizzle value for each component is calculated using an addition over 4, + // using the formula: S(n+1) = (S(n) + 1) & 3 + var swizzle1 = ((b & 0x8000) >> 14) | (c >> 15); + var swizzle2 = (swizzle1 + 1) & 3; + var swizzle3 = (swizzle2 + 1) & 3; + var swizzle4 = (swizzle3 + 1) & 3; + + var dataA = (a & 0x7fff) * scales[swizzle2] + offsets[swizzle2]; + var dataB = (b & 0x7fff) * scales[swizzle3] + offsets[swizzle3]; + var dataC = (c & 0x7fff) * scales[swizzle4] + offsets[swizzle4]; + + var dataD = (float)Math.Sqrt(1 - (dataA * dataA + dataB * dataB + dataC * dataC)); + if ((a & 0x8000) != 0) + dataD = -dataD; + + var f = new float[4]; + f[swizzle2] = dataA; + f[swizzle3] = dataB; + f[swizzle4] = dataC; + f[swizzle1] = dataD; + + return new Quaternion(f[0], f[1], f[2], f[3]); + } - public override List GetQuaternions() + public override List GetQuaternions() + { + // ScaleOffsetTableEntries is a bitmask containing the indexes of 4 scale table and offset table entries. + // Format: + // | 1 ... 4 5 ... 8 | 1 ... 4 5 ... 8 | + // Entry 4 Entry 3 | Entry 2 Entry 1 + var selector = ScaleOffsetTableEntries; + var scaleTable = new float[] { + ScaleTable[(selector >> 0) & 0x0F] * 0.000030518509f, + ScaleTable[(selector >> 4) & 0x0F] * 0.000030518509f, + ScaleTable[(selector >> 8) & 0x0F] * 0.000030518509f, + ScaleTable[(selector >> 12) & 0x0F] * 0.000030518509f + }; + + var offsetTable = new float[] { + OffsetTable[(selector >> 0) & 0x0F], + OffsetTable[(selector >> 4) & 0x0F], + OffsetTable[(selector >> 8) & 0x0F], + OffsetTable[(selector >> 12) & 0x0F] + }; + + var numKnots = NumKnots(); + var quats = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - // ScaleOffsetTableEntries is a bitmask containing the indexes of 4 scale table and offset table entries. - // Format: - // | 1 ... 4 5 ... 8 | 1 ... 4 5 ... 8 | - // Entry 4 Entry 3 | Entry 2 Entry 1 - var selector = ScaleOffsetTableEntries; - var scaleTable = new float[] { - ScaleTable[(selector >> 0) & 0x0F] * 0.000030518509f, - ScaleTable[(selector >> 4) & 0x0F] * 0.000030518509f, - ScaleTable[(selector >> 8) & 0x0F] * 0.000030518509f, - ScaleTable[(selector >> 12) & 0x0F] * 0.000030518509f - }; - - var offsetTable = new float[] { - OffsetTable[(selector >> 0) & 0x0F], - OffsetTable[(selector >> 4) & 0x0F], - OffsetTable[(selector >> 8) & 0x0F], - OffsetTable[(selector >> 12) & 0x0F] - }; - - var numKnots = NumKnots(); - var quats = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var quat = QuatFromControl( - KnotsControls[numKnots + i * 3 + 0], - KnotsControls[numKnots + i * 3 + 1], - KnotsControls[numKnots + i * 3 + 2], - scaleTable, offsetTable - ); - quats.Add(quat); - } - - return quats; + var quat = QuatFromControl( + KnotsControls[numKnots + i * 3 + 0], + KnotsControls[numKnots + i * 3 + 1], + KnotsControls[numKnots + i * 3 + 2], + scaleTable, offsetTable + ); + quats.Add(quat); } + + return quats; } } diff --git a/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs b/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs index 2a58537d..47b91590 100644 --- a/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs +++ b/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs @@ -3,116 +3,115 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D4nK8uC7u : AnimationCurveData { - public class D4nK8uC7u : AnimationCurveData + private static readonly float[] ScaleTable = [ + 1.4142135f, 0.70710677f, 0.35355338f, 0.35355338f, + 0.35355338f, 0.17677669f, 0.17677669f, 0.17677669f, + -1.4142135f, -0.70710677f, -0.35355338f, -0.35355338f, + -0.35355338f, -0.17677669f, -0.17677669f, -0.17677669f + ]; + + private static readonly float[] OffsetTable = [ + -0.70710677f, -0.35355338f, -0.53033006f, -0.17677669f, + 0.17677669f, -0.17677669f, -0.088388346f, 0.0f, + 0.70710677f, 0.35355338f, 0.53033006f, 0.17677669f, + -0.17677669f, 0.17677669f, 0.088388346f, -0.0f + ]; + + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D4nK8uC7u; + public UInt16 ScaleOffsetTableEntries; + public Single OneOverKnotScale; + [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] + public List KnotsControls; + + public override int NumKnots() { - private static readonly float[] ScaleTable = [ - 1.4142135f, 0.70710677f, 0.35355338f, 0.35355338f, - 0.35355338f, 0.17677669f, 0.17677669f, 0.17677669f, - -1.4142135f, -0.70710677f, -0.35355338f, -0.35355338f, - -0.35355338f, -0.17677669f, -0.17677669f, -0.17677669f - ]; - - private static readonly float[] OffsetTable = [ - -0.70710677f, -0.35355338f, -0.53033006f, -0.17677669f, - 0.17677669f, -0.17677669f, -0.088388346f, 0.0f, - 0.70710677f, 0.35355338f, 0.53033006f, 0.17677669f, - -0.17677669f, 0.17677669f, 0.088388346f, -0.0f - ]; - - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D4nK8uC7u; - public UInt16 ScaleOffsetTableEntries; - public Single OneOverKnotScale; - [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] - public List KnotsControls; - - public override int NumKnots() - { - return KnotsControls.Count / 4; - } + return KnotsControls.Count / 4; + } - public override List GetKnots() - { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / OneOverKnotScale); + public override List GetKnots() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / OneOverKnotScale); - return knots; - } + return knots; + } - private Quaternion QuatFromControl(Byte a, Byte b, Byte c, float[] scales, float[] offsets) - { - // Control data format: - // ----- A ----- ----- B ----- ----- C ----- - // | 1 2 ... 7 | 1 2 ... 7 | 1 2 ... 7 | - // G DA S1 DB S2 DC - // - // G: Sign flag; the last word is negative if G == 1 - // S1, S2: Swizzle value (S1 << 1) | S2, determines the order of X, Y, Z, W components. - // DA, DB, DC: Data values for 3 of 4 components - - // The swizzle value for each component is calculated using an addition over 4, - // using the formula: S(n+1) = (S(n) + 1) & 3 - var swizzle1 = ((b & 0x80) >> 6) | ((c & 0x80) >> 7); - var swizzle2 = (swizzle1 + 1) & 3; - var swizzle3 = (swizzle2 + 1) & 3; - var swizzle4 = (swizzle3 + 1) & 3; - - var dataA = (a & 0x7f) * scales[swizzle2] + offsets[swizzle2]; - var dataB = (b & 0x7f) * scales[swizzle3] + offsets[swizzle3]; - var dataC = (c & 0x7f) * scales[swizzle4] + offsets[swizzle4]; - - var dataD = (float)Math.Sqrt(1 - (dataA * dataA + dataB * dataB + dataC * dataC)); - if ((a & 0x80) != 0) - dataD = -dataD; - - var f = new float[4]; - f[swizzle2] = dataA; - f[swizzle3] = dataB; - f[swizzle4] = dataC; - f[swizzle1] = dataD; - - return new Quaternion(f[0], f[1], f[2], f[3]); - } + private Quaternion QuatFromControl(Byte a, Byte b, Byte c, float[] scales, float[] offsets) + { + // Control data format: + // ----- A ----- ----- B ----- ----- C ----- + // | 1 2 ... 7 | 1 2 ... 7 | 1 2 ... 7 | + // G DA S1 DB S2 DC + // + // G: Sign flag; the last word is negative if G == 1 + // S1, S2: Swizzle value (S1 << 1) | S2, determines the order of X, Y, Z, W components. + // DA, DB, DC: Data values for 3 of 4 components + + // The swizzle value for each component is calculated using an addition over 4, + // using the formula: S(n+1) = (S(n) + 1) & 3 + var swizzle1 = ((b & 0x80) >> 6) | ((c & 0x80) >> 7); + var swizzle2 = (swizzle1 + 1) & 3; + var swizzle3 = (swizzle2 + 1) & 3; + var swizzle4 = (swizzle3 + 1) & 3; + + var dataA = (a & 0x7f) * scales[swizzle2] + offsets[swizzle2]; + var dataB = (b & 0x7f) * scales[swizzle3] + offsets[swizzle3]; + var dataC = (c & 0x7f) * scales[swizzle4] + offsets[swizzle4]; + + var dataD = (float)Math.Sqrt(1 - (dataA * dataA + dataB * dataB + dataC * dataC)); + if ((a & 0x80) != 0) + dataD = -dataD; + + var f = new float[4]; + f[swizzle2] = dataA; + f[swizzle3] = dataB; + f[swizzle4] = dataC; + f[swizzle1] = dataD; + + return new Quaternion(f[0], f[1], f[2], f[3]); + } - public override List GetQuaternions() + public override List GetQuaternions() + { + // ScaleOffsetTableEntries is a bitmask containing the indexes of 4 scale table and offset table entries. + // Format: + // | 1 ... 4 5 ... 8 | 1 ... 4 5 ... 8 | + // Entry 4 Entry 3 | Entry 2 Entry 1 + var selector = ScaleOffsetTableEntries; + var scaleTable = new float[] { + ScaleTable[(selector >> 0) & 0x0F] * 0.0078740157f, + ScaleTable[(selector >> 4) & 0x0F] * 0.0078740157f, + ScaleTable[(selector >> 8) & 0x0F] * 0.0078740157f, + ScaleTable[(selector >> 12) & 0x0F] * 0.0078740157f + }; + + var offsetTable = new float[] { + OffsetTable[(selector >> 0) & 0x0F], + OffsetTable[(selector >> 4) & 0x0F], + OffsetTable[(selector >> 8) & 0x0F], + OffsetTable[(selector >> 12) & 0x0F] + }; + + var numKnots = NumKnots(); + var quats = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - // ScaleOffsetTableEntries is a bitmask containing the indexes of 4 scale table and offset table entries. - // Format: - // | 1 ... 4 5 ... 8 | 1 ... 4 5 ... 8 | - // Entry 4 Entry 3 | Entry 2 Entry 1 - var selector = ScaleOffsetTableEntries; - var scaleTable = new float[] { - ScaleTable[(selector >> 0) & 0x0F] * 0.0078740157f, - ScaleTable[(selector >> 4) & 0x0F] * 0.0078740157f, - ScaleTable[(selector >> 8) & 0x0F] * 0.0078740157f, - ScaleTable[(selector >> 12) & 0x0F] * 0.0078740157f - }; - - var offsetTable = new float[] { - OffsetTable[(selector >> 0) & 0x0F], - OffsetTable[(selector >> 4) & 0x0F], - OffsetTable[(selector >> 8) & 0x0F], - OffsetTable[(selector >> 12) & 0x0F] - }; - - var numKnots = NumKnots(); - var quats = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var quat = QuatFromControl( - KnotsControls[numKnots + i * 3 + 0], - KnotsControls[numKnots + i * 3 + 1], - KnotsControls[numKnots + i * 3 + 2], - scaleTable, offsetTable - ); - quats.Add(quat); - } - - return quats; + var quat = QuatFromControl( + KnotsControls[numKnots + i * 3 + 0], + KnotsControls[numKnots + i * 3 + 1], + KnotsControls[numKnots + i * 3 + 2], + scaleTable, offsetTable + ); + quats.Add(quat); } + + return quats; } } diff --git a/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs b/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs index e700b6bb..63ad5fc5 100644 --- a/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs @@ -3,55 +3,54 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D9I1K16uC16u : AnimationCurveData { - public class D9I1K16uC16u : AnimationCurveData + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D9I1K16uC16u; + public UInt16 OneOverKnotScaleTrunc; + public float ControlScale; + public float ControlOffset; + [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List KnotsControls; + + public override int NumKnots() { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D9I1K16uC16u; - public UInt16 OneOverKnotScaleTrunc; - public float ControlScale; - public float ControlOffset; - [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List KnotsControls; - - public override int NumKnots() - { - return KnotsControls.Count / 2; - } + return KnotsControls.Count / 2; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetQuaternions() - { - throw new InvalidOperationException("D9I1K16uC16u is not a rotation curve!"); - } + public override List GetQuaternions() + { + throw new InvalidOperationException("D9I1K16uC16u is not a rotation curve!"); + } - public override List GetMatrices() + public override List GetMatrices() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var scale = (float)KnotsControls[numKnots + i] * ControlScale + ControlOffset; - var mat = new Matrix3( - scale, 0, 0, - 0, scale, 0, - 0, 0, scale - ); - knots.Add(mat); - } - - return knots; + var scale = (float)KnotsControls[numKnots + i] * ControlScale + ControlOffset; + var mat = new Matrix3( + scale, 0, 0, + 0, scale, 0, + 0, 0, scale + ); + knots.Add(mat); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs b/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs index bb9ed368..bff9f53d 100644 --- a/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs @@ -3,56 +3,55 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D9I1K8uC8u : AnimationCurveData { - public class D9I1K8uC8u : AnimationCurveData + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D9I1K8uC8u; + public UInt16 OneOverKnotScaleTrunc; + public float ControlScale; + public float ControlOffset; + [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] + public List KnotsControls; + + public override int NumKnots() { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D9I1K8uC8u; - public UInt16 OneOverKnotScaleTrunc; - public float ControlScale; - public float ControlOffset; - [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] - public List KnotsControls; - - public override int NumKnots() - { - return KnotsControls.Count / 2; - } + return KnotsControls.Count / 2; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetQuaternions() - { - throw new InvalidOperationException("D9I1K8uC8u is not a rotation curve!"); - } + public override List GetQuaternions() + { + throw new InvalidOperationException("D9I1K8uC8u is not a rotation curve!"); + } - public override List GetMatrices() + public override List GetMatrices() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - // TODO: Not sure if correct? - var scale = (float)KnotsControls[numKnots + i] * ControlScale + ControlOffset; - var mat = new Matrix3( - scale, 0, 0, - 0, scale, 0, - 0, 0, scale - ); - knots.Add(mat); - } - - return knots; + // TODO: Not sure if correct? + var scale = (float)KnotsControls[numKnots + i] * ControlScale + ControlOffset; + var mat = new Matrix3( + scale, 0, 0, + 0, scale, 0, + 0, 0, scale + ); + knots.Add(mat); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs b/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs index b51b8bd2..fa776453 100644 --- a/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs @@ -3,56 +3,55 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D9I3K16uC16u : AnimationCurveData { - public class D9I3K16uC16u : AnimationCurveData + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D9I3K16uC16u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List KnotsControls; + + public override int NumKnots() { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D9I3K16uC16u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List KnotsControls; - - public override int NumKnots() - { - return KnotsControls.Count / 4; - } + return KnotsControls.Count / 4; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetQuaternions() - { - throw new InvalidOperationException("D9I3K16uC16u is not a rotation curve!"); - } + public override List GetQuaternions() + { + throw new InvalidOperationException("D9I3K16uC16u is not a rotation curve!"); + } - public override List GetMatrices() + public override List GetMatrices() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var mat = new Matrix3( - (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], 0, 0, - 0, (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], 0, - 0, 0, (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(mat); - } - - return knots; + var mat = new Matrix3( + (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], 0, 0, + 0, (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], 0, + 0, 0, (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(mat); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs b/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs index 9b2fa0ff..22aeb973 100644 --- a/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs @@ -3,56 +3,55 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class D9I3K8uC8u : AnimationCurveData { - public class D9I3K8uC8u : AnimationCurveData + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_D9I3K8uC8u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(ArraySize = 3)] + public float[] ControlScales; + [Serialization(ArraySize = 3)] + public float[] ControlOffsets; + [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] + public List KnotsControls; + + public override int NumKnots() { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_D9I3K8uC8u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(ArraySize = 3)] - public float[] ControlScales; - [Serialization(ArraySize = 3)] - public float[] ControlOffsets; - [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] - public List KnotsControls; - - public override int NumKnots() - { - return KnotsControls.Count / 4; - } + return KnotsControls.Count / 4; + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetQuaternions() - { - throw new InvalidOperationException("D9I3K8uC8u is not a rotation curve!"); - } + public override List GetQuaternions() + { + throw new InvalidOperationException("D9I3K8uC8u is not a rotation curve!"); + } - public override List GetMatrices() + public override List GetMatrices() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var mat = new Matrix3( - (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], 0, 0, - 0, (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], 0, - 0, 0, (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] - ); - knots.Add(mat); - } - - return knots; + var mat = new Matrix3( + (float)KnotsControls[numKnots + i * 3 + 0] * ControlScales[0] + ControlOffsets[0], 0, 0, + 0, (float)KnotsControls[numKnots + i * 3 + 1] * ControlScales[1] + ControlOffsets[1], 0, + 0, 0, (float)KnotsControls[numKnots + i * 3 + 2] * ControlScales[2] + ControlOffsets[2] + ); + knots.Add(mat); } + + return knots; } } diff --git a/LSLib/Granny/Model/CurveData/DaConstant32f.cs b/LSLib/Granny/Model/CurveData/DaConstant32f.cs index 53d8b4f2..991b954c 100644 --- a/LSLib/Granny/Model/CurveData/DaConstant32f.cs +++ b/LSLib/Granny/Model/CurveData/DaConstant32f.cs @@ -4,37 +4,36 @@ using LSLib.Granny.GR2; using System.Diagnostics; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class DaConstant32f : AnimationCurveData { - public class DaConstant32f : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_DaConstant32f; - public Int16 Padding; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List Controls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_DaConstant32f; + public Int16 Padding; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List Controls; - public override int NumKnots() - { - return 1; - } + public override int NumKnots() + { + return 1; + } - public override List GetKnots() - { - return [0.0f]; - } + public override List GetKnots() + { + return [0.0f]; + } - public override List GetMatrices() - { - Debug.Assert(Controls.Count == 9); - var m = Controls; - Matrix3 mat = new( - m[0], m[1], m[2], - m[3], m[4], m[5], - m[6], m[7], m[8] - ); + public override List GetMatrices() + { + Debug.Assert(Controls.Count == 9); + var m = Controls; + Matrix3 mat = new( + m[0], m[1], m[2], + m[3], m[4], m[5], + m[6], m[7], m[8] + ); - return [mat]; - } + return [mat]; } } diff --git a/LSLib/Granny/Model/CurveData/DaIdentity.cs b/LSLib/Granny/Model/CurveData/DaIdentity.cs index 0179b378..65170715 100644 --- a/LSLib/Granny/Model/CurveData/DaIdentity.cs +++ b/LSLib/Granny/Model/CurveData/DaIdentity.cs @@ -3,32 +3,31 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class DaIdentity : AnimationCurveData { - public class DaIdentity : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_DaIdentity; - public Int16 Dimension; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_DaIdentity; + public Int16 Dimension; - public override int NumKnots() - { - return 1; - } + public override int NumKnots() + { + return 1; + } - public override List GetKnots() - { - return [0.0f]; - } + public override List GetKnots() + { + return [0.0f]; + } - public override List GetPoints() - { - return [new Vector3(0.0f, 0.0f, 0.0f)]; - } + public override List GetPoints() + { + return [new Vector3(0.0f, 0.0f, 0.0f)]; + } - public override List GetMatrices() - { - return [Matrix3.Identity]; - } + public override List GetMatrices() + { + return [Matrix3.Identity]; } } diff --git a/LSLib/Granny/Model/CurveData/DaK16uC16u.cs b/LSLib/Granny/Model/CurveData/DaK16uC16u.cs index cfbac448..d4048cbc 100644 --- a/LSLib/Granny/Model/CurveData/DaK16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/DaK16uC16u.cs @@ -4,80 +4,79 @@ using System.Diagnostics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class DaK16uC16u : AnimationCurveData { - public class DaK16uC16u : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_DaK16uC16u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List ControlScaleOffsets; - [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_DaK16uC16u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List ControlScaleOffsets; + [Serialization(Prototype = typeof(ControlUInt16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List KnotsControls; - public int Components() - { - return ControlScaleOffsets.Count / 2; - } + public int Components() + { + return ControlScaleOffsets.Count / 2; + } - public override int NumKnots() - { - return KnotsControls.Count / (Components() + 1); - } + public override int NumKnots() + { + return KnotsControls.Count / (Components() + 1); + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetMatrices() + public override List GetMatrices() + { + Debug.Assert(Components() == 9); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - Debug.Assert(Components() == 9); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var mat = new Matrix3( - (float)KnotsControls[numKnots + i * 9 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[9 + 0], - (float)KnotsControls[numKnots + i * 9 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[9 + 1], - (float)KnotsControls[numKnots + i * 9 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[9 + 2], - (float)KnotsControls[numKnots + i * 9 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[9 + 3], - (float)KnotsControls[numKnots + i * 9 + 4] * ControlScaleOffsets[4] + ControlScaleOffsets[9 + 4], - (float)KnotsControls[numKnots + i * 9 + 5] * ControlScaleOffsets[5] + ControlScaleOffsets[9 + 5], - (float)KnotsControls[numKnots + i * 9 + 6] * ControlScaleOffsets[6] + ControlScaleOffsets[9 + 6], - (float)KnotsControls[numKnots + i * 9 + 7] * ControlScaleOffsets[7] + ControlScaleOffsets[9 + 7], - (float)KnotsControls[numKnots + i * 9 + 8] * ControlScaleOffsets[8] + ControlScaleOffsets[9 + 8] - ); - knots.Add(mat); - } - - return knots; + var mat = new Matrix3( + (float)KnotsControls[numKnots + i * 9 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[9 + 0], + (float)KnotsControls[numKnots + i * 9 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[9 + 1], + (float)KnotsControls[numKnots + i * 9 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[9 + 2], + (float)KnotsControls[numKnots + i * 9 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[9 + 3], + (float)KnotsControls[numKnots + i * 9 + 4] * ControlScaleOffsets[4] + ControlScaleOffsets[9 + 4], + (float)KnotsControls[numKnots + i * 9 + 5] * ControlScaleOffsets[5] + ControlScaleOffsets[9 + 5], + (float)KnotsControls[numKnots + i * 9 + 6] * ControlScaleOffsets[6] + ControlScaleOffsets[9 + 6], + (float)KnotsControls[numKnots + i * 9 + 7] * ControlScaleOffsets[7] + ControlScaleOffsets[9 + 7], + (float)KnotsControls[numKnots + i * 9 + 8] * ControlScaleOffsets[8] + ControlScaleOffsets[9 + 8] + ); + knots.Add(mat); } - public override List GetQuaternions() - { - Debug.Assert(Components() == 4); - var numKnots = NumKnots(); - var quats = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var quat = new Quaternion( - (float)KnotsControls[numKnots + i * 4 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[4 + 0], - (float)KnotsControls[numKnots + i * 4 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[4 + 1], - (float)KnotsControls[numKnots + i * 4 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[4 + 2], - (float)KnotsControls[numKnots + i * 4 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[4 + 3] - ); - quats.Add(quat); - } + return knots; + } - return quats; + public override List GetQuaternions() + { + Debug.Assert(Components() == 4); + var numKnots = NumKnots(); + var quats = new List(numKnots); + for (var i = 0; i < numKnots; i++) + { + var quat = new Quaternion( + (float)KnotsControls[numKnots + i * 4 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[4 + 0], + (float)KnotsControls[numKnots + i * 4 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[4 + 1], + (float)KnotsControls[numKnots + i * 4 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[4 + 2], + (float)KnotsControls[numKnots + i * 4 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[4 + 3] + ); + quats.Add(quat); } + + return quats; } } diff --git a/LSLib/Granny/Model/CurveData/DaK32fC32f.cs b/LSLib/Granny/Model/CurveData/DaK32fC32f.cs index c12d4829..c615bcfc 100644 --- a/LSLib/Granny/Model/CurveData/DaK32fC32f.cs +++ b/LSLib/Granny/Model/CurveData/DaK32fC32f.cs @@ -4,134 +4,133 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class DaK32fC32f : AnimationCurveData { - public class DaK32fC32f : AnimationCurveData + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_DaK32fC32f; + public Int16 Padding; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List Knots; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List Controls; + + public ExportType CurveType() { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_DaK32fC32f; - public Int16 Padding; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List Knots; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List Controls; - - public ExportType CurveType() - { - if (Knots.Count * 3 == Controls.Count) - return ExportType.Position; - else if (Knots.Count * 4 == Controls.Count) - return ExportType.Rotation; - else if (Knots.Count * 9 == Controls.Count) - return ExportType.ScaleShear; - else - throw new NotSupportedException("Unsupported DaK32fC32f control data size"); - } + if (Knots.Count * 3 == Controls.Count) + return ExportType.Position; + else if (Knots.Count * 4 == Controls.Count) + return ExportType.Rotation; + else if (Knots.Count * 9 == Controls.Count) + return ExportType.ScaleShear; + else + throw new NotSupportedException("Unsupported DaK32fC32f control data size"); + } - public override int NumKnots() - { - return Knots.Count; - } + public override int NumKnots() + { + return Knots.Count; + } - public override List GetKnots() - { - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add(Knots[i]); + public override List GetKnots() + { + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add(Knots[i]); - return knots; - } + return knots; + } - public void SetKnots(List knots) - { - Knots = new List(knots); - } + public void SetKnots(List knots) + { + Knots = new List(knots); + } - public override List GetPoints() - { - if (CurveType() != ExportType.Position) - throw new InvalidOperationException("DaK32fC32f: This curve is not a position curve!"); - - var numKnots = NumKnots(); - var positions = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - Controls[i * 3 + 0], - Controls[i * 3 + 1], - Controls[i * 3 + 2] - ); - positions.Add(vec); - } - - return positions; - } + public override List GetPoints() + { + if (CurveType() != ExportType.Position) + throw new InvalidOperationException("DaK32fC32f: This curve is not a position curve!"); - public void SetPoints(List points) + var numKnots = NumKnots(); + var positions = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - Controls = points.SelectMany(p => new float[] { p.X, p.Y, p.Z }).ToList(); + var vec = new Vector3( + Controls[i * 3 + 0], + Controls[i * 3 + 1], + Controls[i * 3 + 2] + ); + positions.Add(vec); } - public override List GetMatrices() - { - if (CurveType() != ExportType.ScaleShear) - throw new InvalidOperationException("DaK32fC32f: This curve is not a scale/shear curve!"); - - var numKnots = NumKnots(); - var scaleShear = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var mat = new Matrix3( - Controls[i * 9 + 0], - Controls[i * 9 + 1], - Controls[i * 9 + 2], - Controls[i * 9 + 3], - Controls[i * 9 + 4], - Controls[i * 9 + 5], - Controls[i * 9 + 6], - Controls[i * 9 + 7], - Controls[i * 9 + 8] - ); - scaleShear.Add(mat); - } - - return scaleShear; - } + return positions; + } - public void SetMatrices(List matrices) - { - Controls = matrices.SelectMany(m => new float[] { - m[0, 0], m[0, 1], m[0, 2], - m[1, 0], m[1, 1], m[1, 2], - m[2, 0], m[2, 1], m[2, 2] - }).ToList(); - } + public void SetPoints(List points) + { + Controls = points.SelectMany(p => new float[] { p.X, p.Y, p.Z }).ToList(); + } + + public override List GetMatrices() + { + if (CurveType() != ExportType.ScaleShear) + throw new InvalidOperationException("DaK32fC32f: This curve is not a scale/shear curve!"); - public override List GetQuaternions() + var numKnots = NumKnots(); + var scaleShear = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - if (CurveType() != ExportType.Rotation) - throw new InvalidOperationException("DaK32fC32f: This curve is not a rotation curve!"); - - var numKnots = NumKnots(); - var rotations = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var quat = new Quaternion( - Controls[i * 4 + 0], - Controls[i * 4 + 1], - Controls[i * 4 + 2], - Controls[i * 4 + 3] - ); - rotations.Add(quat); - } - - return rotations; + var mat = new Matrix3( + Controls[i * 9 + 0], + Controls[i * 9 + 1], + Controls[i * 9 + 2], + Controls[i * 9 + 3], + Controls[i * 9 + 4], + Controls[i * 9 + 5], + Controls[i * 9 + 6], + Controls[i * 9 + 7], + Controls[i * 9 + 8] + ); + scaleShear.Add(mat); } - public void SetQuaternions(List quats) + return scaleShear; + } + + public void SetMatrices(List matrices) + { + Controls = matrices.SelectMany(m => new float[] { + m[0, 0], m[0, 1], m[0, 2], + m[1, 0], m[1, 1], m[1, 2], + m[2, 0], m[2, 1], m[2, 2] + }).ToList(); + } + + public override List GetQuaternions() + { + if (CurveType() != ExportType.Rotation) + throw new InvalidOperationException("DaK32fC32f: This curve is not a rotation curve!"); + + var numKnots = NumKnots(); + var rotations = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - Controls = quats.SelectMany(q => new float[] { q.X, q.Y, q.Z, q.W }).ToList(); + var quat = new Quaternion( + Controls[i * 4 + 0], + Controls[i * 4 + 1], + Controls[i * 4 + 2], + Controls[i * 4 + 3] + ); + rotations.Add(quat); } + + return rotations; + } + + public void SetQuaternions(List quats) + { + Controls = quats.SelectMany(q => new float[] { q.X, q.Y, q.Z, q.W }).ToList(); } } diff --git a/LSLib/Granny/Model/CurveData/DaK8uC8u.cs b/LSLib/Granny/Model/CurveData/DaK8uC8u.cs index fb730f70..0f78619d 100644 --- a/LSLib/Granny/Model/CurveData/DaK8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/DaK8uC8u.cs @@ -4,80 +4,79 @@ using System.Diagnostics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class DaK8uC8u : AnimationCurveData { - public class DaK8uC8u : AnimationCurveData - { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_DaK8uC8u; - public UInt16 OneOverKnotScaleTrunc; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List ControlScaleOffsets; - [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] - public List KnotsControls; + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_DaK8uC8u; + public UInt16 OneOverKnotScaleTrunc; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List ControlScaleOffsets; + [Serialization(Prototype = typeof(ControlUInt8), Kind = SerializationKind.UserMember, Serializer = typeof(UInt8ListSerializer))] + public List KnotsControls; - public int Components() - { - return ControlScaleOffsets.Count / 2; - } + public int Components() + { + return ControlScaleOffsets.Count / 2; + } - public override int NumKnots() - { - return KnotsControls.Count / (Components() + 1); - } + public override int NumKnots() + { + return KnotsControls.Count / (Components() + 1); + } - public override List GetKnots() - { - var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - knots.Add((float)KnotsControls[i] / scale); + public override List GetKnots() + { + var scale = ConvertOneOverKnotScaleTrunc(OneOverKnotScaleTrunc); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) + knots.Add((float)KnotsControls[i] / scale); - return knots; - } + return knots; + } - public override List GetMatrices() + public override List GetMatrices() + { + Debug.Assert(Components() == 9); + var numKnots = NumKnots(); + var knots = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - Debug.Assert(Components() == 9); - var numKnots = NumKnots(); - var knots = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var mat = new Matrix3( - (float)KnotsControls[numKnots + i * 9 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[9 + 0], - (float)KnotsControls[numKnots + i * 9 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[9 + 1], - (float)KnotsControls[numKnots + i * 9 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[9 + 2], - (float)KnotsControls[numKnots + i * 9 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[9 + 3], - (float)KnotsControls[numKnots + i * 9 + 4] * ControlScaleOffsets[4] + ControlScaleOffsets[9 + 4], - (float)KnotsControls[numKnots + i * 9 + 5] * ControlScaleOffsets[5] + ControlScaleOffsets[9 + 5], - (float)KnotsControls[numKnots + i * 9 + 6] * ControlScaleOffsets[6] + ControlScaleOffsets[9 + 6], - (float)KnotsControls[numKnots + i * 9 + 7] * ControlScaleOffsets[7] + ControlScaleOffsets[9 + 7], - (float)KnotsControls[numKnots + i * 9 + 8] * ControlScaleOffsets[8] + ControlScaleOffsets[9 + 8] - ); - knots.Add(mat); - } - - return knots; + var mat = new Matrix3( + (float)KnotsControls[numKnots + i * 9 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[9 + 0], + (float)KnotsControls[numKnots + i * 9 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[9 + 1], + (float)KnotsControls[numKnots + i * 9 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[9 + 2], + (float)KnotsControls[numKnots + i * 9 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[9 + 3], + (float)KnotsControls[numKnots + i * 9 + 4] * ControlScaleOffsets[4] + ControlScaleOffsets[9 + 4], + (float)KnotsControls[numKnots + i * 9 + 5] * ControlScaleOffsets[5] + ControlScaleOffsets[9 + 5], + (float)KnotsControls[numKnots + i * 9 + 6] * ControlScaleOffsets[6] + ControlScaleOffsets[9 + 6], + (float)KnotsControls[numKnots + i * 9 + 7] * ControlScaleOffsets[7] + ControlScaleOffsets[9 + 7], + (float)KnotsControls[numKnots + i * 9 + 8] * ControlScaleOffsets[8] + ControlScaleOffsets[9 + 8] + ); + knots.Add(mat); } - public override List GetQuaternions() - { - Debug.Assert(Components() == 4); - var numKnots = NumKnots(); - var quats = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var quat = new Quaternion( - (float)KnotsControls[numKnots + i * 4 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[4 + 0], - (float)KnotsControls[numKnots + i * 4 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[4 + 1], - (float)KnotsControls[numKnots + i * 4 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[4 + 2], - (float)KnotsControls[numKnots + i * 4 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[4 + 3] - ); - quats.Add(quat); - } + return knots; + } - return quats; + public override List GetQuaternions() + { + Debug.Assert(Components() == 4); + var numKnots = NumKnots(); + var quats = new List(numKnots); + for (var i = 0; i < numKnots; i++) + { + var quat = new Quaternion( + (float)KnotsControls[numKnots + i * 4 + 0] * ControlScaleOffsets[0] + ControlScaleOffsets[4 + 0], + (float)KnotsControls[numKnots + i * 4 + 1] * ControlScaleOffsets[1] + ControlScaleOffsets[4 + 1], + (float)KnotsControls[numKnots + i * 4 + 2] * ControlScaleOffsets[2] + ControlScaleOffsets[4 + 2], + (float)KnotsControls[numKnots + i * 4 + 3] * ControlScaleOffsets[3] + ControlScaleOffsets[4 + 3] + ); + quats.Add(quat); } + + return quats; } } diff --git a/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs b/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs index 6b065dd2..0195a179 100644 --- a/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs +++ b/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs @@ -4,131 +4,130 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model.CurveData +namespace LSLib.Granny.Model.CurveData; + +public class DaKeyframes32f : AnimationCurveData { - public class DaKeyframes32f : AnimationCurveData + [Serialization(Type = MemberType.Inline)] + public CurveDataHeader CurveDataHeader_DaKeyframes32f; + public Int16 Dimension; + [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] + public List Controls; + + public ExportType CurveType() { - [Serialization(Type = MemberType.Inline)] - public CurveDataHeader CurveDataHeader_DaKeyframes32f; - public Int16 Dimension; - [Serialization(Prototype = typeof(ControlReal32), Kind = SerializationKind.UserMember, Serializer = typeof(SingleListSerializer))] - public List Controls; + if (Dimension == 3) + return ExportType.Position; + else if (Dimension == 4) + return ExportType.Rotation; + else if (Dimension == 9) + return ExportType.ScaleShear; + else + throw new NotSupportedException("Unsupported DaKeyframes32f dimension number"); + } - public ExportType CurveType() - { - if (Dimension == 3) - return ExportType.Position; - else if (Dimension == 4) - return ExportType.Rotation; - else if (Dimension == 9) - return ExportType.ScaleShear; - else - throw new NotSupportedException("Unsupported DaKeyframes32f dimension number"); - } + public override int NumKnots() + { + return Controls.Count / Dimension; + } - public override int NumKnots() - { - return Controls.Count / Dimension; - } + public override List GetKnots() + { + var knots = new List(NumKnots()); + for (var i = 0; i < NumKnots(); i++) + knots.Add((float)i); - public override List GetKnots() - { - var knots = new List(NumKnots()); - for (var i = 0; i < NumKnots(); i++) - knots.Add((float)i); + return knots; + } - return knots; - } + public void SetKnots(List knots) + { + throw new NotSupportedException("Knots are fixed for DaKeyframes32f curves"); + } - public void SetKnots(List knots) - { - throw new NotSupportedException("Knots are fixed for DaKeyframes32f curves"); - } + public override List GetPoints() + { + if (CurveType() != ExportType.Position) + throw new InvalidOperationException("DaKeyframes32f: This curve is not a position curve!"); - public override List GetPoints() + var numKnots = NumKnots(); + var positions = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - if (CurveType() != ExportType.Position) - throw new InvalidOperationException("DaKeyframes32f: This curve is not a position curve!"); - - var numKnots = NumKnots(); - var positions = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var vec = new Vector3( - Controls[i * 3 + 0], - Controls[i * 3 + 1], - Controls[i * 3 + 2] - ); - positions.Add(vec); - } - - return positions; + var vec = new Vector3( + Controls[i * 3 + 0], + Controls[i * 3 + 1], + Controls[i * 3 + 2] + ); + positions.Add(vec); } - public void SetPoints(List points) - { - Controls = points.SelectMany(p => new float[] { p.X, p.Y, p.Z }).ToList(); - } + return positions; + } - public override List GetMatrices() - { - if (CurveType() != ExportType.ScaleShear) - throw new InvalidOperationException("DaKeyframes32f: This curve is not a scale/shear curve!"); - - var numKnots = NumKnots(); - var scaleShear = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var mat = new Matrix3( - Controls[i * 9 + 0], - Controls[i * 9 + 1], - Controls[i * 9 + 2], - Controls[i * 9 + 3], - Controls[i * 9 + 4], - Controls[i * 9 + 5], - Controls[i * 9 + 6], - Controls[i * 9 + 7], - Controls[i * 9 + 8] - ); - scaleShear.Add(mat); - } - - return scaleShear; - } + public void SetPoints(List points) + { + Controls = points.SelectMany(p => new float[] { p.X, p.Y, p.Z }).ToList(); + } - public void SetMatrices(List matrices) - { - Controls = matrices.SelectMany(m => new float[] { - m[0, 0], m[0, 1], m[0, 2], - m[1, 0], m[1, 1], m[1, 2], - m[2, 0], m[2, 1], m[2, 2] - }).ToList(); - } + public override List GetMatrices() + { + if (CurveType() != ExportType.ScaleShear) + throw new InvalidOperationException("DaKeyframes32f: This curve is not a scale/shear curve!"); - public override List GetQuaternions() + var numKnots = NumKnots(); + var scaleShear = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - if (CurveType() != ExportType.Rotation) - throw new InvalidOperationException("DaKeyframes32f: This curve is not a rotation curve!"); - - var numKnots = NumKnots(); - var rotations = new List(numKnots); - for (var i = 0; i < numKnots; i++) - { - var quat = new Quaternion( - Controls[i * 4 + 0], - Controls[i * 4 + 1], - Controls[i * 4 + 2], - Controls[i * 4 + 3] - ); - rotations.Add(quat); - } - - return rotations; + var mat = new Matrix3( + Controls[i * 9 + 0], + Controls[i * 9 + 1], + Controls[i * 9 + 2], + Controls[i * 9 + 3], + Controls[i * 9 + 4], + Controls[i * 9 + 5], + Controls[i * 9 + 6], + Controls[i * 9 + 7], + Controls[i * 9 + 8] + ); + scaleShear.Add(mat); } - public void SetQuaternions(List quats) + return scaleShear; + } + + public void SetMatrices(List matrices) + { + Controls = matrices.SelectMany(m => new float[] { + m[0, 0], m[0, 1], m[0, 2], + m[1, 0], m[1, 1], m[1, 2], + m[2, 0], m[2, 1], m[2, 2] + }).ToList(); + } + + public override List GetQuaternions() + { + if (CurveType() != ExportType.Rotation) + throw new InvalidOperationException("DaKeyframes32f: This curve is not a rotation curve!"); + + var numKnots = NumKnots(); + var rotations = new List(numKnots); + for (var i = 0; i < numKnots; i++) { - Controls = quats.SelectMany(q => new float[] { q.X, q.Y, q.Z, q.W }).ToList(); + var quat = new Quaternion( + Controls[i * 4 + 0], + Controls[i * 4 + 1], + Controls[i * 4 + 2], + Controls[i * 4 + 3] + ); + rotations.Add(quat); } + + return rotations; + } + + public void SetQuaternions(List quats) + { + Controls = quats.SelectMany(q => new float[] { q.X, q.Y, q.Z, q.W }).ToList(); } } diff --git a/LSLib/Granny/Model/DivinityMesh.cs b/LSLib/Granny/Model/DivinityMesh.cs index 2aa64c10..8ffa3590 100644 --- a/LSLib/Granny/Model/DivinityMesh.cs +++ b/LSLib/Granny/Model/DivinityMesh.cs @@ -2,403 +2,402 @@ using System; using System.Collections.Generic; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +[Flags] +public enum DivinityModelFlag { - [Flags] - public enum DivinityModelFlag - { - MeshProxy = 0x01, - Cloth = 0x02, - HasProxyGeometry = 0x04, - HasColor = 0x08, - Skinned = 0x10, - Rigid = 0x20, - Spring = 0x40, - Occluder = 0x80 - }; - - - [Flags] - public enum DivinityClothFlag + MeshProxy = 0x01, + Cloth = 0x02, + HasProxyGeometry = 0x04, + HasColor = 0x08, + Skinned = 0x10, + Rigid = 0x20, + Spring = 0x40, + Occluder = 0x80 +}; + + +[Flags] +public enum DivinityClothFlag +{ + // Unknown flags, possibly related to nudity filters + Cloth01 = 0x01, + Cloth02 = 0x02, + Cloth04 = 0x04, + ClothPhysics = 0x100 +}; + +public static class DivinityModelFlagMethods +{ + public static bool IsMeshProxy(this DivinityModelFlag flag) { - // Unknown flags, possibly related to nudity filters - Cloth01 = 0x01, - Cloth02 = 0x02, - Cloth04 = 0x04, - ClothPhysics = 0x100 - }; - - public static class DivinityModelFlagMethods + return (flag & DivinityModelFlag.MeshProxy) == DivinityModelFlag.MeshProxy; + } + + public static bool IsCloth(this DivinityModelFlag flag) { - public static bool IsMeshProxy(this DivinityModelFlag flag) - { - return (flag & DivinityModelFlag.MeshProxy) == DivinityModelFlag.MeshProxy; - } + return (flag & DivinityModelFlag.Cloth) == DivinityModelFlag.Cloth; + } - public static bool IsCloth(this DivinityModelFlag flag) - { - return (flag & DivinityModelFlag.Cloth) == DivinityModelFlag.Cloth; - } + public static bool HasProxyGeometry(this DivinityModelFlag flag) + { + return (flag & DivinityModelFlag.HasProxyGeometry) == DivinityModelFlag.HasProxyGeometry; + } - public static bool HasProxyGeometry(this DivinityModelFlag flag) - { - return (flag & DivinityModelFlag.HasProxyGeometry) == DivinityModelFlag.HasProxyGeometry; - } + public static bool IsRigid(this DivinityModelFlag flag) + { + return (flag & DivinityModelFlag.Rigid) == DivinityModelFlag.Rigid; + } - public static bool IsRigid(this DivinityModelFlag flag) - { - return (flag & DivinityModelFlag.Rigid) == DivinityModelFlag.Rigid; - } + public static bool IsSpring(this DivinityModelFlag flag) + { + return (flag & DivinityModelFlag.Spring) == DivinityModelFlag.Spring; + } - public static bool IsSpring(this DivinityModelFlag flag) - { - return (flag & DivinityModelFlag.Spring) == DivinityModelFlag.Spring; - } + public static bool IsOccluder(this DivinityModelFlag flag) + { + return (flag & DivinityModelFlag.Occluder) == DivinityModelFlag.Occluder; + } - public static bool IsOccluder(this DivinityModelFlag flag) - { - return (flag & DivinityModelFlag.Occluder) == DivinityModelFlag.Occluder; - } + public static bool HasClothFlag01(this DivinityClothFlag flag) + { + return (flag & DivinityClothFlag.Cloth01) == DivinityClothFlag.Cloth01; + } - public static bool HasClothFlag01(this DivinityClothFlag flag) - { - return (flag & DivinityClothFlag.Cloth01) == DivinityClothFlag.Cloth01; - } + public static bool HasClothFlag02(this DivinityClothFlag flag) + { + return (flag & DivinityClothFlag.Cloth02) == DivinityClothFlag.Cloth02; + } - public static bool HasClothFlag02(this DivinityClothFlag flag) - { - return (flag & DivinityClothFlag.Cloth02) == DivinityClothFlag.Cloth02; - } + public static bool HasClothFlag04(this DivinityClothFlag flag) + { + return (flag & DivinityClothFlag.Cloth04) == DivinityClothFlag.Cloth04; + } - public static bool HasClothFlag04(this DivinityClothFlag flag) - { - return (flag & DivinityClothFlag.Cloth04) == DivinityClothFlag.Cloth04; - } + public static bool HasClothPhysics(this DivinityClothFlag flag) + { + return (flag & DivinityClothFlag.ClothPhysics) == DivinityClothFlag.ClothPhysics; + } +} - public static bool HasClothPhysics(this DivinityClothFlag flag) +public enum DivinityVertexUsage +{ + None = 0, + Position = 1, + TexCoord = 2, + QTangent = 3, + Normal = 3, // The same value is reused for QTangents + Tangent = 4, + Binormal = 5, + BoneWeights = 6, + BoneIndices = 7, + Color = 8 +}; + +public enum DivinityVertexAttributeFormat +{ + Real32 = 0, + UInt32 = 1, + Int32 = 2, + Real16 = 3, + NormalUInt16 = 4, + UInt16 = 5, + BinormalInt16 = 6, + Int16 = 7, + NormalUInt8 = 8, + UInt8 = 9, + BinormalInt8 = 10, + Int8 = 11 +}; + +public class DivinityFormatDesc +{ + [Serialization(ArraySize = 1)] + public SByte[] Stream; + [Serialization(ArraySize = 1)] + public Byte[] Usage; + [Serialization(ArraySize = 1)] + public Byte[] UsageIndex; + [Serialization(ArraySize = 1)] + public Byte[] RefType; + [Serialization(ArraySize = 1)] + public Byte[] Format; + [Serialization(ArraySize = 1)] + public Byte[] Size; + + private static DivinityFormatDesc Make(DivinityVertexUsage usage, DivinityVertexAttributeFormat format, Byte size, Byte usageIndex = 0) + { + return new DivinityFormatDesc { - return (flag & DivinityClothFlag.ClothPhysics) == DivinityClothFlag.ClothPhysics; - } + Stream = [0], + Usage = [(byte)usage], + UsageIndex = [usageIndex], + RefType = [0], + Format = [(byte)format], + Size = [size] + }; } - public enum DivinityVertexUsage - { - None = 0, - Position = 1, - TexCoord = 2, - QTangent = 3, - Normal = 3, // The same value is reused for QTangents - Tangent = 4, - Binormal = 5, - BoneWeights = 6, - BoneIndices = 7, - Color = 8 - }; - - public enum DivinityVertexAttributeFormat - { - Real32 = 0, - UInt32 = 1, - Int32 = 2, - Real16 = 3, - NormalUInt16 = 4, - UInt16 = 5, - BinormalInt16 = 6, - Int16 = 7, - NormalUInt8 = 8, - UInt8 = 9, - BinormalInt8 = 10, - Int8 = 11 - }; - - public class DivinityFormatDesc + public static List FromVertexFormat(VertexDescriptor format) { - [Serialization(ArraySize = 1)] - public SByte[] Stream; - [Serialization(ArraySize = 1)] - public Byte[] Usage; - [Serialization(ArraySize = 1)] - public Byte[] UsageIndex; - [Serialization(ArraySize = 1)] - public Byte[] RefType; - [Serialization(ArraySize = 1)] - public Byte[] Format; - [Serialization(ArraySize = 1)] - public Byte[] Size; - - private static DivinityFormatDesc Make(DivinityVertexUsage usage, DivinityVertexAttributeFormat format, Byte size, Byte usageIndex = 0) + var formats = new List(); + if (format.PositionType != PositionType.None) { - return new DivinityFormatDesc - { - Stream = [0], - Usage = [(byte)usage], - UsageIndex = [usageIndex], - RefType = [0], - Format = [(byte)format], - Size = [size] - }; + formats.Add(Make(DivinityVertexUsage.Position, DivinityVertexAttributeFormat.Real32, 3)); } - public static List FromVertexFormat(VertexDescriptor format) + if (format.HasBoneWeights) { - var formats = new List(); - if (format.PositionType != PositionType.None) - { - formats.Add(Make(DivinityVertexUsage.Position, DivinityVertexAttributeFormat.Real32, 3)); - } + formats.Add(Make(DivinityVertexUsage.BoneWeights, DivinityVertexAttributeFormat.NormalUInt8, (byte)format.NumBoneInfluences)); + formats.Add(Make(DivinityVertexUsage.BoneIndices, DivinityVertexAttributeFormat.UInt8, (byte)format.NumBoneInfluences)); + } - if (format.HasBoneWeights) + if (format.NormalType != NormalType.None) + { + if (format.NormalType == NormalType.QTangent) { - formats.Add(Make(DivinityVertexUsage.BoneWeights, DivinityVertexAttributeFormat.NormalUInt8, (byte)format.NumBoneInfluences)); - formats.Add(Make(DivinityVertexUsage.BoneIndices, DivinityVertexAttributeFormat.UInt8, (byte)format.NumBoneInfluences)); + formats.Add(Make(DivinityVertexUsage.QTangent, DivinityVertexAttributeFormat.BinormalInt16, 4)); } - - if (format.NormalType != NormalType.None) + else if (format.NormalType == NormalType.Float3) { - if (format.NormalType == NormalType.QTangent) - { - formats.Add(Make(DivinityVertexUsage.QTangent, DivinityVertexAttributeFormat.BinormalInt16, 4)); - } - else if (format.NormalType == NormalType.Float3) + formats.Add(Make(DivinityVertexUsage.Normal, DivinityVertexAttributeFormat.Real32, 3)); + if (format.TangentType == NormalType.Float3) { - formats.Add(Make(DivinityVertexUsage.Normal, DivinityVertexAttributeFormat.Real32, 3)); - if (format.TangentType == NormalType.Float3) - { - formats.Add(Make(DivinityVertexUsage.Tangent, DivinityVertexAttributeFormat.Real32, 3)); - } - if (format.BinormalType == NormalType.Float3) - { - formats.Add(Make(DivinityVertexUsage.Binormal, DivinityVertexAttributeFormat.Real32, 3)); - } + formats.Add(Make(DivinityVertexUsage.Tangent, DivinityVertexAttributeFormat.Real32, 3)); } - else + if (format.BinormalType == NormalType.Float3) { - throw new InvalidOperationException($"Normal format not supported in LSM: {format.NormalType}"); + formats.Add(Make(DivinityVertexUsage.Binormal, DivinityVertexAttributeFormat.Real32, 3)); } } + else + { + throw new InvalidOperationException($"Normal format not supported in LSM: {format.NormalType}"); + } + } - if (format.ColorMapType != ColorMapType.None) + if (format.ColorMapType != ColorMapType.None) + { + if (format.ColorMapType == ColorMapType.Byte4) { - if (format.ColorMapType == ColorMapType.Byte4) - { - for (int i = 0; i < format.ColorMaps; i++) - { - formats.Add(Make(DivinityVertexUsage.Color, DivinityVertexAttributeFormat.NormalUInt8, 4, (byte)i)); - } - } - else if (format.ColorMapType == ColorMapType.Float4) + for (int i = 0; i < format.ColorMaps; i++) { - for (int i = 0; i < format.ColorMaps; i++) - { - formats.Add(Make(DivinityVertexUsage.Color, DivinityVertexAttributeFormat.Real32, 4, (byte)i)); - } + formats.Add(Make(DivinityVertexUsage.Color, DivinityVertexAttributeFormat.NormalUInt8, 4, (byte)i)); } - else + } + else if (format.ColorMapType == ColorMapType.Float4) + { + for (int i = 0; i < format.ColorMaps; i++) { - throw new InvalidOperationException($"Color format not supported in LSM: {format.ColorMapType}"); + formats.Add(Make(DivinityVertexUsage.Color, DivinityVertexAttributeFormat.Real32, 4, (byte)i)); } } + else + { + throw new InvalidOperationException($"Color format not supported in LSM: {format.ColorMapType}"); + } + } - if (format.TextureCoordinateType != TextureCoordinateType.None) + if (format.TextureCoordinateType != TextureCoordinateType.None) + { + if (format.TextureCoordinateType == TextureCoordinateType.Half2) { - if (format.TextureCoordinateType == TextureCoordinateType.Half2) - { - for (int i = 0; i < format.TextureCoordinates; i++) - { - formats.Add(Make(DivinityVertexUsage.TexCoord, DivinityVertexAttributeFormat.Real16, 2, (byte)i)); - } - } - else if (format.TextureCoordinateType == TextureCoordinateType.Float2) + for (int i = 0; i < format.TextureCoordinates; i++) { - for (int i = 0; i < format.TextureCoordinates; i++) - { - formats.Add(Make(DivinityVertexUsage.TexCoord, DivinityVertexAttributeFormat.Real32, 2, (byte)i)); - } + formats.Add(Make(DivinityVertexUsage.TexCoord, DivinityVertexAttributeFormat.Real16, 2, (byte)i)); } - else + } + else if (format.TextureCoordinateType == TextureCoordinateType.Float2) + { + for (int i = 0; i < format.TextureCoordinates; i++) { - throw new InvalidOperationException($"UV format not supported in LSM: {format.TextureCoordinateType}"); + formats.Add(Make(DivinityVertexUsage.TexCoord, DivinityVertexAttributeFormat.Real32, 2, (byte)i)); } } - - return formats; + else + { + throw new InvalidOperationException($"UV format not supported in LSM: {format.TextureCoordinateType}"); + } } + + return formats; } +} - public class DivinityMeshProperties +public class DivinityMeshProperties +{ + [Serialization(ArraySize = 4)] + public UInt32[] Flags; + [Serialization(ArraySize = 1)] + public Int32[] Lod; + public List FormatDescs; + [Serialization(Type = MemberType.VariantReference)] + public object ExtendedData; + [Serialization(ArraySize = 1)] + public float[] LodDistance; + [Serialization(ArraySize = 1)] + public Int32[] IsImpostor; + + public DivinityModelFlag MeshFlags { - [Serialization(ArraySize = 4)] - public UInt32[] Flags; - [Serialization(ArraySize = 1)] - public Int32[] Lod; - public List FormatDescs; - [Serialization(Type = MemberType.VariantReference)] - public object ExtendedData; - [Serialization(ArraySize = 1)] - public float[] LodDistance; - [Serialization(ArraySize = 1)] - public Int32[] IsImpostor; - - public DivinityModelFlag MeshFlags - { - get { return (DivinityModelFlag)Flags[0]; } - set { Flags[0] = (UInt32)value; } - } + get { return (DivinityModelFlag)Flags[0]; } + set { Flags[0] = (UInt32)value; } + } - public DivinityClothFlag ClothFlags - { - get { return (DivinityClothFlag)Flags[2]; } - set { Flags[2] = (UInt32)value; } - } + public DivinityClothFlag ClothFlags + { + get { return (DivinityClothFlag)Flags[2]; } + set { Flags[2] = (UInt32)value; } } +} - public class DivinityMeshExtendedData +public class DivinityMeshExtendedData +{ + const Int32 CurrentLSMVersion = 3; + + public Int32 MeshProxy; + public Int32 Rigid; + public Int32 Cloth; + public Int32 Spring; + public Int32 Occluder; + public Int32 LOD; + public string UserDefinedProperties; + public DivinityMeshProperties UserMeshProperties; + public Int32 LSMVersion; + + public static DivinityMeshExtendedData Make() { - const Int32 CurrentLSMVersion = 3; - - public Int32 MeshProxy; - public Int32 Rigid; - public Int32 Cloth; - public Int32 Spring; - public Int32 Occluder; - public Int32 LOD; - public string UserDefinedProperties; - public DivinityMeshProperties UserMeshProperties; - public Int32 LSMVersion; - - public static DivinityMeshExtendedData Make() + return new DivinityMeshExtendedData { - return new DivinityMeshExtendedData + Rigid = 0, + Cloth = 0, + Spring = 0, + Occluder = 0, + LOD = 0, + UserDefinedProperties = "", + UserMeshProperties = new DivinityMeshProperties { - Rigid = 0, - Cloth = 0, - Spring = 0, - Occluder = 0, - LOD = 0, - UserDefinedProperties = "", - UserMeshProperties = new DivinityMeshProperties - { - Flags = [0, 0, 0, 0], - Lod = [-1], - FormatDescs = null, - ExtendedData = null, - LodDistance = [3.40282347E+38f], - IsImpostor = [0] - }, - LSMVersion = CurrentLSMVersion - }; + Flags = [0, 0, 0, 0], + Lod = [-1], + FormatDescs = null, + ExtendedData = null, + LodDistance = [3.40282347E+38f], + IsImpostor = [0] + }, + LSMVersion = CurrentLSMVersion + }; + } + + + public void UpdateFromModelInfo(Mesh mesh, DivinityModelInfoFormat format) + { + DivinityModelFlag meshFlags = 0; + if (UserMeshProperties != null) + { + meshFlags = UserMeshProperties.MeshFlags; } + if (mesh.VertexFormat.HasBoneWeights) + { + meshFlags |= DivinityModelFlag.Skinned; + } - public void UpdateFromModelInfo(Mesh mesh, DivinityModelInfoFormat format) + if (mesh.VertexFormat.ColorMaps > 0) { - DivinityModelFlag meshFlags = 0; - if (UserMeshProperties != null) - { - meshFlags = UserMeshProperties.MeshFlags; - } + meshFlags |= DivinityModelFlag.HasColor; + } + else + { + meshFlags &= ~DivinityModelFlag.Cloth; + } - if (mesh.VertexFormat.HasBoneWeights) - { - meshFlags |= DivinityModelFlag.Skinned; - } + if (format == DivinityModelInfoFormat.UserDefinedProperties) + { + LSMVersion = 0; + UserMeshProperties = null; + UserDefinedProperties = + UserDefinedPropertiesHelpers.MeshFlagsToUserDefinedProperties(meshFlags); + } + else + { + UserMeshProperties.MeshFlags = meshFlags; - if (mesh.VertexFormat.ColorMaps > 0) + if (format == DivinityModelInfoFormat.LSMv3) { - meshFlags |= DivinityModelFlag.HasColor; + LSMVersion = 3; + UserMeshProperties.FormatDescs = DivinityFormatDesc.FromVertexFormat(mesh.VertexFormat); } - else + else if (format == DivinityModelInfoFormat.LSMv1) { - meshFlags &= ~DivinityModelFlag.Cloth; - } - - if (format == DivinityModelInfoFormat.UserDefinedProperties) - { - LSMVersion = 0; - UserMeshProperties = null; - UserDefinedProperties = - UserDefinedPropertiesHelpers.MeshFlagsToUserDefinedProperties(meshFlags); + LSMVersion = 1; + UserMeshProperties.FormatDescs = DivinityFormatDesc.FromVertexFormat(mesh.VertexFormat); } else { - UserMeshProperties.MeshFlags = meshFlags; - - if (format == DivinityModelInfoFormat.LSMv3) - { - LSMVersion = 3; - UserMeshProperties.FormatDescs = DivinityFormatDesc.FromVertexFormat(mesh.VertexFormat); - } - else if (format == DivinityModelInfoFormat.LSMv1) - { - LSMVersion = 1; - UserMeshProperties.FormatDescs = DivinityFormatDesc.FromVertexFormat(mesh.VertexFormat); - } - else - { - LSMVersion = 0; - UserMeshProperties.FormatDescs = []; - } + LSMVersion = 0; + UserMeshProperties.FormatDescs = []; } } } +} - public class BG3TrackGroupExtendedData - { - public string SkeletonResourceID; - } +public class BG3TrackGroupExtendedData +{ + public string SkeletonResourceID; +} + +public static class UserDefinedPropertiesHelpers +{ + // The GR2 loader checks for this exact string, including spaces. + public const string UserDefinedProperties_Rigid = "Rigid = true"; + // The GR2 loader checks for this exact string. + public const string UserDefinedProperties_Cloth = "Cloth=true"; + public const string UserDefinedProperties_MeshProxy = "MeshProxy=true"; - public static class UserDefinedPropertiesHelpers + public static string MeshFlagsToUserDefinedProperties(DivinityModelFlag meshFlags) { - // The GR2 loader checks for this exact string, including spaces. - public const string UserDefinedProperties_Rigid = "Rigid = true"; - // The GR2 loader checks for this exact string. - public const string UserDefinedProperties_Cloth = "Cloth=true"; - public const string UserDefinedProperties_MeshProxy = "MeshProxy=true"; + List properties = new(); + if (meshFlags.IsRigid()) + { + properties.Add(UserDefinedProperties_Rigid); + } - public static string MeshFlagsToUserDefinedProperties(DivinityModelFlag meshFlags) + if (meshFlags.IsCloth()) { - List properties = new(); - if (meshFlags.IsRigid()) - { - properties.Add(UserDefinedProperties_Rigid); - } + properties.Add(UserDefinedProperties_Cloth); + } - if (meshFlags.IsCloth()) - { - properties.Add(UserDefinedProperties_Cloth); - } + if (meshFlags.IsMeshProxy()) + { + properties.Add(UserDefinedProperties_MeshProxy); + } - if (meshFlags.IsMeshProxy()) - { - properties.Add(UserDefinedProperties_MeshProxy); - } + return String.Join("\n", properties); + } - return String.Join("\n", properties); + public static DivinityModelFlag UserDefinedPropertiesToMeshType(string userDefinedProperties) + { + // The D:OS 2 editor uses the ExtendedData attribute to determine whether a model can be + // bound to a character. + // The "Rigid = true" user defined property is checked for rigid bodies (e.g. weapons), the "Cloth=true" + // user defined property is checked for clothes. + DivinityModelFlag flags = 0; + if (userDefinedProperties.Contains("Rigid")) + { + flags |= DivinityModelFlag.Rigid; } - public static DivinityModelFlag UserDefinedPropertiesToMeshType(string userDefinedProperties) + if (userDefinedProperties.Contains("Cloth")) { - // The D:OS 2 editor uses the ExtendedData attribute to determine whether a model can be - // bound to a character. - // The "Rigid = true" user defined property is checked for rigid bodies (e.g. weapons), the "Cloth=true" - // user defined property is checked for clothes. - DivinityModelFlag flags = 0; - if (userDefinedProperties.Contains("Rigid")) - { - flags |= DivinityModelFlag.Rigid; - } - - if (userDefinedProperties.Contains("Cloth")) - { - flags |= DivinityModelFlag.Cloth; - } - - if (userDefinedProperties.Contains("MeshProxy")) - { - flags |= DivinityModelFlag.MeshProxy | DivinityModelFlag.HasProxyGeometry; - } + flags |= DivinityModelFlag.Cloth; + } - return flags; + if (userDefinedProperties.Contains("MeshProxy")) + { + flags |= DivinityModelFlag.MeshProxy | DivinityModelFlag.HasProxyGeometry; } + + return flags; } } diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 3418a879..bcebc546 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -6,808 +6,807 @@ using LSLib.LS; using LSLib.LS.Enums; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class ExportException(string message) : Exception(message) { - public class ExportException(string message) : Exception(message) - { - } +} - public enum ExportFormat - { - GR2, - DAE - }; +public enum ExportFormat +{ + GR2, + DAE +}; - public enum DivinityModelInfoFormat - { - // No ExtendedInfo on bones and meshes - None, - // User the UserDefinedProperties string to add properties - UserDefinedProperties, - // Use LSM UserMeshProperties - LSMv0, - // Use LSM UserMeshProperties and FormatDescs - LSMv1, - // Use BG3 extended LSM UserMeshProperties and FormatDescs - LSMv3 - }; - - public class ExporterOptions +public enum DivinityModelInfoFormat +{ + // No ExtendedInfo on bones and meshes + None, + // User the UserDefinedProperties string to add properties + UserDefinedProperties, + // Use LSM UserMeshProperties + LSMv0, + // Use LSM UserMeshProperties and FormatDescs + LSMv1, + // Use BG3 extended LSM UserMeshProperties and FormatDescs + LSMv3 +}; + +public class ExporterOptions +{ + public string InputPath; + public Root Input; + public ExportFormat InputFormat; + public string OutputPath; + public ExportFormat OutputFormat; + + // Export 64-bit GR2 + public bool Is64Bit = false; + // Use alternate GR2 signature when saving + // (This is the signature D:OS EE and D:OS 2 uses, but GR2 tools + // don't recognize it as legitimate.) + public bool AlternateSignature = false; + // GR2 run-time tag that that'll appear in the output file + // If the GR2 tag doesn't match, the game will convert the GR2 to the latest tag, + // which is a slow process. The advantage of a mismatched tag is that we don't + // have to 1:1 match the GR2 structs for that version, as it won't just + // memcpy the struct from the GR2 file directly. + public UInt32 VersionTag = GR2.Header.DefaultTag; + // Export vertex normals to DAE/GR2 file + public bool ExportNormals = true; + // Export tangents/binormals to DAE/GR2 file + public bool ExportTangents = true; + // Export UV-s to DAE/GR2 file + public bool ExportUVs = true; + // Export vertex colors to DAE/GR2 file + public bool ExportColors = true; + // Flip the V coord of UV-s (GR2 stores them in flipped format) + public bool FlipUVs = true; + // Recalculate normals, even if they're available in the source mesh + // (They'll be recalculated automatically if unavailable) + public bool RecalculateNormals = false; + // Recalculate tangents/binormals, even if they're available in the source mesh + // (They'll be recalculated automatically if unavailable) + public bool RecalculateTangents = false; + // Recalculate bone inverse world transforms + public bool RecalculateIWT = false; + // Create a dummy skeleton if none exists in the mesh + // Some games will crash if they encounter a mesh without a skeleton + public bool BuildDummySkeleton = false; + // Save 16-bit vertex indices, if possible + public bool CompactIndices = true; + public bool DeduplicateVertices = true; // TODO: Add Collada conforming vert. handling as well + public bool DeduplicateUVs = true; // TODO: UNHANDLED + public bool ApplyBasisTransforms = true; + // Use an obsolete version tag to prevent Granny from memory mapping the structs + public bool UseObsoleteVersionTag = false; + public string ConformGR2Path; + public bool ConformSkeletons = true; + public bool ConformSkeletonsCopy = false; + public bool ConformAnimations = true; + public bool ConformMeshBoneBindings = true; + public bool ConformModels = true; + public Dictionary VertexFormats = []; + // Extended model info format to use when exporting to D:OS + public DivinityModelInfoFormat ModelInfoFormat = DivinityModelInfoFormat.None; + // Model flags to use when exporting + public DivinityModelFlag ModelType = 0; + // Remove unused metadata from the GR2 file + public bool StripMetadata = true; + // Flip mesh on X axis + public bool FlipMesh = false; + // Flip skeleton on X axis + public bool FlipSkeleton = false; + // Apply Y-up transforms on skeletons? + public bool TransformSkeletons = true; + // Ignore cases where we couldn't calculate tangents from UVs because of non-manifold geometry + public bool IgnoreUVNaN = false; + // Remove animation keys that are a linear interpolation of the preceding and following keys + // Disabled by default, as D:OS doesn't support sparse knot values in anim curves. + public bool RemoveTrivialAnimationKeys = false; + // Recalculate mesh bone binding OBBs + public bool RecalculateOBBs = false; + // Allow encoding tangents/binormals as QTangents + // See: Spherical Skinning with Dual-Quaternions and QTangents, Crytek R&D + public bool EnableQTangents = true; + + public List DisabledAnimations = []; + public List DisabledModels = []; + public List DisabledSkeletons = []; + + public void LoadGameSettings(Game game) { - public string InputPath; - public Root Input; - public ExportFormat InputFormat; - public string OutputPath; - public ExportFormat OutputFormat; - - // Export 64-bit GR2 - public bool Is64Bit = false; - // Use alternate GR2 signature when saving - // (This is the signature D:OS EE and D:OS 2 uses, but GR2 tools - // don't recognize it as legitimate.) - public bool AlternateSignature = false; - // GR2 run-time tag that that'll appear in the output file - // If the GR2 tag doesn't match, the game will convert the GR2 to the latest tag, - // which is a slow process. The advantage of a mismatched tag is that we don't - // have to 1:1 match the GR2 structs for that version, as it won't just - // memcpy the struct from the GR2 file directly. - public UInt32 VersionTag = GR2.Header.DefaultTag; - // Export vertex normals to DAE/GR2 file - public bool ExportNormals = true; - // Export tangents/binormals to DAE/GR2 file - public bool ExportTangents = true; - // Export UV-s to DAE/GR2 file - public bool ExportUVs = true; - // Export vertex colors to DAE/GR2 file - public bool ExportColors = true; - // Flip the V coord of UV-s (GR2 stores them in flipped format) - public bool FlipUVs = true; - // Recalculate normals, even if they're available in the source mesh - // (They'll be recalculated automatically if unavailable) - public bool RecalculateNormals = false; - // Recalculate tangents/binormals, even if they're available in the source mesh - // (They'll be recalculated automatically if unavailable) - public bool RecalculateTangents = false; - // Recalculate bone inverse world transforms - public bool RecalculateIWT = false; - // Create a dummy skeleton if none exists in the mesh - // Some games will crash if they encounter a mesh without a skeleton - public bool BuildDummySkeleton = false; - // Save 16-bit vertex indices, if possible - public bool CompactIndices = true; - public bool DeduplicateVertices = true; // TODO: Add Collada conforming vert. handling as well - public bool DeduplicateUVs = true; // TODO: UNHANDLED - public bool ApplyBasisTransforms = true; - // Use an obsolete version tag to prevent Granny from memory mapping the structs - public bool UseObsoleteVersionTag = false; - public string ConformGR2Path; - public bool ConformSkeletons = true; - public bool ConformSkeletonsCopy = false; - public bool ConformAnimations = true; - public bool ConformMeshBoneBindings = true; - public bool ConformModels = true; - public Dictionary VertexFormats = []; - // Extended model info format to use when exporting to D:OS - public DivinityModelInfoFormat ModelInfoFormat = DivinityModelInfoFormat.None; - // Model flags to use when exporting - public DivinityModelFlag ModelType = 0; - // Remove unused metadata from the GR2 file - public bool StripMetadata = true; - // Flip mesh on X axis - public bool FlipMesh = false; - // Flip skeleton on X axis - public bool FlipSkeleton = false; - // Apply Y-up transforms on skeletons? - public bool TransformSkeletons = true; - // Ignore cases where we couldn't calculate tangents from UVs because of non-manifold geometry - public bool IgnoreUVNaN = false; - // Remove animation keys that are a linear interpolation of the preceding and following keys - // Disabled by default, as D:OS doesn't support sparse knot values in anim curves. - public bool RemoveTrivialAnimationKeys = false; - // Recalculate mesh bone binding OBBs - public bool RecalculateOBBs = false; - // Allow encoding tangents/binormals as QTangents - // See: Spherical Skinning with Dual-Quaternions and QTangents, Crytek R&D - public bool EnableQTangents = true; - - public List DisabledAnimations = []; - public List DisabledModels = []; - public List DisabledSkeletons = []; - - public void LoadGameSettings(Game game) - { - switch (game) - { - case Game.DivinityOriginalSin: - Is64Bit = false; - AlternateSignature = false; - VersionTag = Header.Tag_DOS; - ModelInfoFormat = DivinityModelInfoFormat.None; - break; - case Game.DivinityOriginalSinEE: - Is64Bit = true; - AlternateSignature = true; - VersionTag = Header.Tag_DOSEE; - ModelInfoFormat = DivinityModelInfoFormat.UserDefinedProperties; - break; - case Game.DivinityOriginalSin2: - Is64Bit = true; - AlternateSignature = true; - VersionTag = Header.Tag_DOSEE; - ModelInfoFormat = DivinityModelInfoFormat.LSMv1; - break; - case Game.BaldursGate3: - Is64Bit = true; - AlternateSignature = false; - VersionTag = Header.Tag_DOSEE; - ModelInfoFormat = DivinityModelInfoFormat.LSMv3; - break; - case Game.DivinityOriginalSin2DE: - default: - Is64Bit = true; - AlternateSignature = true; - VersionTag = Header.Tag_DOS2DE; - ModelInfoFormat = DivinityModelInfoFormat.LSMv1; - break; - } + switch (game) + { + case Game.DivinityOriginalSin: + Is64Bit = false; + AlternateSignature = false; + VersionTag = Header.Tag_DOS; + ModelInfoFormat = DivinityModelInfoFormat.None; + break; + case Game.DivinityOriginalSinEE: + Is64Bit = true; + AlternateSignature = true; + VersionTag = Header.Tag_DOSEE; + ModelInfoFormat = DivinityModelInfoFormat.UserDefinedProperties; + break; + case Game.DivinityOriginalSin2: + Is64Bit = true; + AlternateSignature = true; + VersionTag = Header.Tag_DOSEE; + ModelInfoFormat = DivinityModelInfoFormat.LSMv1; + break; + case Game.BaldursGate3: + Is64Bit = true; + AlternateSignature = false; + VersionTag = Header.Tag_DOSEE; + ModelInfoFormat = DivinityModelInfoFormat.LSMv3; + break; + case Game.DivinityOriginalSin2DE: + default: + Is64Bit = true; + AlternateSignature = true; + VersionTag = Header.Tag_DOS2DE; + ModelInfoFormat = DivinityModelInfoFormat.LSMv1; + break; } } +} - public class Exporter - { - public ExporterOptions Options = new ExporterOptions(); - private Root Root; +public class Exporter +{ + public ExporterOptions Options = new ExporterOptions(); + private Root Root; - private Root LoadGR2(string inPath) - { - var root = new Root(); - FileStream fs = File.Open(inPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - var gr2 = new GR2Reader(fs); - gr2.Read(root); - root.PostLoad(gr2.Tag); - fs.Close(); - fs.Dispose(); - return root; - } + private Root LoadGR2(string inPath) + { + var root = new Root(); + FileStream fs = File.Open(inPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); + var gr2 = new GR2Reader(fs); + gr2.Read(root); + root.PostLoad(gr2.Tag); + fs.Close(); + fs.Dispose(); + return root; + } - private Root LoadDAE(string inPath) + private Root LoadDAE(string inPath) + { + var importer = new ColladaImporter { - var importer = new ColladaImporter - { - Options = Options - }; - return importer.Import(inPath); - } + Options = Options + }; + return importer.Import(inPath); + } - private Root Load(string inPath, ExportFormat format) + private Root Load(string inPath, ExportFormat format) + { + switch (format) { - switch (format) - { - case ExportFormat.GR2: - return LoadGR2(inPath); + case ExportFormat.GR2: + return LoadGR2(inPath); - case ExportFormat.DAE: - return LoadDAE(inPath); + case ExportFormat.DAE: + return LoadDAE(inPath); - default: - throw new NotImplementedException("Unsupported input format"); - } + default: + throw new NotImplementedException("Unsupported input format"); } + } - private void SaveGR2(string outPath, Root root) + private void SaveGR2(string outPath, Root root) + { + root.PreSave(); + var writer = new GR2Writer { - root.PreSave(); - var writer = new GR2Writer - { - Format = Options.Is64Bit ? Magic.Format.LittleEndian64 : Magic.Format.LittleEndian32, - AlternateMagic = Options.AlternateSignature, - VersionTag = Options.VersionTag - }; - - if (Options.UseObsoleteVersionTag) - { - // Use an obsolete version tag to prevent Granny from memory mapping the structs - writer.VersionTag -= 1; - } + Format = Options.Is64Bit ? Magic.Format.LittleEndian64 : Magic.Format.LittleEndian32, + AlternateMagic = Options.AlternateSignature, + VersionTag = Options.VersionTag + }; - var body = writer.Write(root, (root.Meshes != null) ? (uint)root.Meshes.Count : 0); - writer.Dispose(); - - FileStream f = File.Open(outPath, FileMode.Create, System.IO.FileAccess.Write, FileShare.None); - f.Write(body, 0, body.Length); - f.Close(); - f.Dispose(); - } - - private void SaveDAE(Root root, ExporterOptions options) + if (Options.UseObsoleteVersionTag) { - var exporter = new ColladaExporter - { - Options = options - }; - exporter.Export(root, options.OutputPath); + // Use an obsolete version tag to prevent Granny from memory mapping the structs + writer.VersionTag -= 1; } - private void Save(Root root, ExporterOptions options) - { - switch (options.OutputFormat) - { - case ExportFormat.GR2: - FileManager.TryToCreateDirectory(options.OutputPath); - SaveGR2(options.OutputPath, root); - break; - - case ExportFormat.DAE: - SaveDAE(root, options); - break; + var body = writer.Write(root, (root.Meshes != null) ? (uint)root.Meshes.Count : 0); + writer.Dispose(); - default: - throw new NotImplementedException("Unsupported output format"); - } - } + FileStream f = File.Open(outPath, FileMode.Create, System.IO.FileAccess.Write, FileShare.None); + f.Write(body, 0, body.Length); + f.Close(); + f.Dispose(); + } - private void GenerateDummySkeleton(Root root) + private void SaveDAE(Root root, ExporterOptions options) + { + var exporter = new ColladaExporter { - foreach (var model in root.Models) - { - if (model.Skeleton == null) - { - Utils.Info($"Generating dummy skeleton for model '{model.Name}'"); - var bone = new Bone - { - Name = model.Name, - ParentIndex = -1, - Transform = new Transform() - }; - - var skeleton = new Skeleton - { - Name = model.Name, - LODType = 1, - IsDummy = true, - Bones = [bone] - }; - root.Skeletons.Add(skeleton); + Options = options + }; + exporter.Export(root, options.OutputPath); + } - // TODO: Transform / IWT is not always identity on dummy bones! - skeleton.UpdateWorldTransforms(); - model.Skeleton = skeleton; + private void Save(Root root, ExporterOptions options) + { + switch (options.OutputFormat) + { + case ExportFormat.GR2: + FileManager.TryToCreateDirectory(options.OutputPath); + SaveGR2(options.OutputPath, root); + break; - foreach (var mesh in model.MeshBindings) - { - if (mesh.Mesh.BoneBindings != null && mesh.Mesh.BoneBindings.Count > 0) - { - throw new ParsingException("Failed to generate dummy skeleton: Mesh already has bone bindings."); - } + case ExportFormat.DAE: + SaveDAE(root, options); + break; - var binding = new BoneBinding - { - BoneName = bone.Name, - // TODO: Calculate bounding box! - // Use small bounding box values, as it interferes with object placement - // in D:OS 2 (after the Gift Bag 2 update) - OBBMin = [-0.1f, -0.1f, -0.1f], - OBBMax = [0.1f, 0.1f, 0.1f] - }; - mesh.Mesh.BoneBindings = [binding]; - } - } - } + default: + throw new NotImplementedException("Unsupported output format"); } + } - private void ConformAnimationBindPoses(Skeleton skeleton, Skeleton conformToSkeleton) + private void GenerateDummySkeleton(Root root) + { + foreach (var model in root.Models) { - if (Root.TrackGroups == null) return; - - foreach (var trackGroup in Root.TrackGroups) + if (model.Skeleton == null) { - for (var i = 0; i < trackGroup.TransformTracks.Count; i++) + Utils.Info($"Generating dummy skeleton for model '{model.Name}'"); + var bone = new Bone { - var track = trackGroup.TransformTracks[i]; - var bone = skeleton.GetBoneByName(track.Name); - //Dummy_Foot -> Dummy_Foot_01 - bone ??= skeleton.GetBoneByName(track.Name + "_01"); + Name = model.Name, + ParentIndex = -1, + Transform = new Transform() + }; - if (bone == null) + var skeleton = new Skeleton + { + Name = model.Name, + LODType = 1, + IsDummy = true, + Bones = [bone] + }; + root.Skeletons.Add(skeleton); + + // TODO: Transform / IWT is not always identity on dummy bones! + skeleton.UpdateWorldTransforms(); + model.Skeleton = skeleton; + + foreach (var mesh in model.MeshBindings) + { + if (mesh.Mesh.BoneBindings != null && mesh.Mesh.BoneBindings.Count > 0) { - throw new ExportException($"Animation track references bone '{track.Name}' that cannot be found in the skeleton '{skeleton.Name}'."); + throw new ParsingException("Failed to generate dummy skeleton: Mesh already has bone bindings."); } - var conformingBone = conformToSkeleton.GetBoneByName(bone.Name); - if (conformingBone == null) + var binding = new BoneBinding { - throw new ExportException($"Animation track references bone '{bone.Name}' that cannot be found in the conforming skeleton '{conformToSkeleton.Name}'."); - } - - var keyframes = track.ToKeyframes(); - keyframes.SwapBindPose(bone.OriginalTransform, conformingBone.Transform.ToMatrix4()); - var newTrack = TransformTrack.FromKeyframes(keyframes); - newTrack.Flags = track.Flags; - newTrack.Name = track.Name; - newTrack.ParentAnimation = track.ParentAnimation; - trackGroup.TransformTracks[i] = newTrack; + BoneName = bone.Name, + // TODO: Calculate bounding box! + // Use small bounding box values, as it interferes with object placement + // in D:OS 2 (after the Gift Bag 2 update) + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; + mesh.Mesh.BoneBindings = [binding]; } } } + } - private void ConformSkeleton(Skeleton skeleton, Skeleton conformToSkeleton) - { - skeleton.LODType = conformToSkeleton.LODType; + private void ConformAnimationBindPoses(Skeleton skeleton, Skeleton conformToSkeleton) + { + if (Root.TrackGroups == null) return; - // TODO: Tolerate missing bones? - foreach (var conformBone in conformToSkeleton.Bones) + foreach (var trackGroup in Root.TrackGroups) + { + for (var i = 0; i < trackGroup.TransformTracks.Count; i++) { - Bone inputBone = null; - foreach (var bone in skeleton.Bones) - { - if (bone.Name == conformBone.Name) - { - inputBone = bone; - break; - } - } + var track = trackGroup.TransformTracks[i]; + var bone = skeleton.GetBoneByName(track.Name); + //Dummy_Foot -> Dummy_Foot_01 + bone ??= skeleton.GetBoneByName(track.Name + "_01"); - if (inputBone == null) + if (bone == null) { - throw new ExportException($"No matching bone found for conforming bone '{conformBone.Name}' in skeleton '{skeleton.Name}'."); + throw new ExportException($"Animation track references bone '{track.Name}' that cannot be found in the skeleton '{skeleton.Name}'."); } - // Bones must have the same parent. We check this in two steps: - // 1) Either both of them are root bones (no parent index) or none of them are. - if (conformBone.IsRoot != inputBone.IsRoot) + var conformingBone = conformToSkeleton.GetBoneByName(bone.Name); + if (conformingBone == null) { - throw new ExportException($"Cannot map non-root bones to root bone '{conformBone.Name}' for skeleton '{skeleton.Name}'."); + throw new ExportException($"Animation track references bone '{bone.Name}' that cannot be found in the conforming skeleton '{conformToSkeleton.Name}'."); } - // 2) The name of their parent bones is the same (index may differ!) - if (conformBone.ParentIndex != -1) - { - var conformParent = conformToSkeleton.Bones[conformBone.ParentIndex]; - var inputParent = skeleton.Bones[inputBone.ParentIndex]; - if (conformParent.Name != inputParent.Name) - { - throw new ExportException($"Conforming parent ({conformParent.Name}) for bone '{conformBone.Name}' " + - $"differs from input parent ({inputParent.Name}) for skeleton '{skeleton.Name}'."); - } - } + var keyframes = track.ToKeyframes(); + keyframes.SwapBindPose(bone.OriginalTransform, conformingBone.Transform.ToMatrix4()); + var newTrack = TransformTrack.FromKeyframes(keyframes); + newTrack.Flags = track.Flags; + newTrack.Name = track.Name; + newTrack.ParentAnimation = track.ParentAnimation; + trackGroup.TransformTracks[i] = newTrack; + } + } + } + private void ConformSkeleton(Skeleton skeleton, Skeleton conformToSkeleton) + { + skeleton.LODType = conformToSkeleton.LODType; - // The bones match, copy relevant parameters from the conforming skeleton to the input. - inputBone.InverseWorldTransform = conformBone.InverseWorldTransform; - inputBone.LODError = conformBone.LODError; - inputBone.Transform = conformBone.Transform; + // TODO: Tolerate missing bones? + foreach (var conformBone in conformToSkeleton.Bones) + { + Bone inputBone = null; + foreach (var bone in skeleton.Bones) + { + if (bone.Name == conformBone.Name) + { + inputBone = bone; + break; + } } - if (Options.ConformAnimations) + if (inputBone == null) { - ConformAnimationBindPoses(skeleton, conformToSkeleton); + throw new ExportException($"No matching bone found for conforming bone '{conformBone.Name}' in skeleton '{skeleton.Name}'."); } - } - private void ConformSkeletonAnimations(Skeleton skeleton) - { - if (Root.TrackGroups == null) return; - - foreach (var trackGroup in Root.TrackGroups) + // Bones must have the same parent. We check this in two steps: + // 1) Either both of them are root bones (no parent index) or none of them are. + if (conformBone.IsRoot != inputBone.IsRoot) { - foreach (var track in trackGroup.TransformTracks) - { - //Dummy_Foot -> Dummy_Foot_01 - var bone = skeleton.GetBoneByName(track.Name) ?? skeleton.GetBoneByName(track.Name + "_01"); - - if (bone == null) - { - throw new ExportException($"Animation track references bone '{track.Name}' that cannot be found in the skeleton '{skeleton.Name}'."); - } - } + throw new ExportException($"Cannot map non-root bones to root bone '{conformBone.Name}' for skeleton '{skeleton.Name}'."); } - } - private void ConformSkeletons(IEnumerable skeletons) - { - // We don't have any skeletons in this mesh, nothing to conform. - if (Root.Skeletons == null || Root.Skeletons.Count == 0) + // 2) The name of their parent bones is the same (index may differ!) + if (conformBone.ParentIndex != -1) { - // If we're exporting animations without a skeleton, copy the source skeleton - // and check if all animation tracks are referencing existing bones. - if (Root.Animations != null && Root.Animations.Count > 0) + var conformParent = conformToSkeleton.Bones[conformBone.ParentIndex]; + var inputParent = skeleton.Bones[inputBone.ParentIndex]; + if (conformParent.Name != inputParent.Name) { - Root.Skeletons = skeletons.ToList(); - if (Root.Skeletons.Count != 1) - { - throw new ExportException($"Skeleton source file should contain exactly one skeleton. Skeleton Count: '{Root.Skeletons.Count}'."); - } + throw new ExportException($"Conforming parent ({conformParent.Name}) for bone '{conformBone.Name}' " + + $"differs from input parent ({inputParent.Name}) for skeleton '{skeleton.Name}'."); + } + } - var skeleton = Root.Skeletons.First(); - // Generate a dummy model if there isn't one, otherwise we won't - // be able to bind the animations to anything - Root.Models ??= [ - new Model - { - InitialPlacement = new Transform(), - Name = skeleton.Name, - Skeleton = skeleton - } - ]; + // The bones match, copy relevant parameters from the conforming skeleton to the input. + inputBone.InverseWorldTransform = conformBone.InverseWorldTransform; + inputBone.LODError = conformBone.LODError; + inputBone.Transform = conformBone.Transform; + } - ConformSkeletonAnimations(skeleton); - } + if (Options.ConformAnimations) + { + ConformAnimationBindPoses(skeleton, conformToSkeleton); + } + } - return; - } + private void ConformSkeletonAnimations(Skeleton skeleton) + { + if (Root.TrackGroups == null) return; - foreach (var skeleton in Root.Skeletons) + foreach (var trackGroup in Root.TrackGroups) + { + foreach (var track in trackGroup.TransformTracks) { - // Check if there is a matching skeleton in the source file - Skeleton conformingSkel = null; - foreach (var skel in skeletons) - { - if (skel.Name == skeleton.Name) - { - conformingSkel = skel; - break; - } - } - - // Allow name mismatches if there is only 1 skeleton in each file - if (conformingSkel == null && skeletons.Count() == 1 && Root.Skeletons.Count == 1) - { - conformingSkel = skeletons.First(); - } + //Dummy_Foot -> Dummy_Foot_01 + var bone = skeleton.GetBoneByName(track.Name) ?? skeleton.GetBoneByName(track.Name + "_01"); - if (conformingSkel == null) + if (bone == null) { - throw new ExportException($"No matching skeleton found in source file for skeleton '{skeleton.Name}'."); + throw new ExportException($"Animation track references bone '{track.Name}' that cannot be found in the skeleton '{skeleton.Name}'."); } - - ConformSkeleton(skeleton, conformingSkel); } } + } - private void ConformMeshBoneBindings(Mesh mesh, Mesh conformToMesh) + private void ConformSkeletons(IEnumerable skeletons) + { + // We don't have any skeletons in this mesh, nothing to conform. + if (Root.Skeletons == null || Root.Skeletons.Count == 0) { - mesh.BoneBindings ??= []; - - foreach (var conformBone in conformToMesh.BoneBindings) + // If we're exporting animations without a skeleton, copy the source skeleton + // and check if all animation tracks are referencing existing bones. + if (Root.Animations != null && Root.Animations.Count > 0) { - BoneBinding inputBone = null; - foreach (var bone in mesh.BoneBindings) + Root.Skeletons = skeletons.ToList(); + if (Root.Skeletons.Count != 1) { - if (bone.BoneName == conformBone.BoneName) - { - inputBone = bone; - break; - } + throw new ExportException($"Skeleton source file should contain exactly one skeleton. Skeleton Count: '{Root.Skeletons.Count}'."); } - if (inputBone == null) - { - // Create a new "dummy" binding if it does not exist in the new mesh - inputBone = new BoneBinding + var skeleton = Root.Skeletons.First(); + + // Generate a dummy model if there isn't one, otherwise we won't + // be able to bind the animations to anything + Root.Models ??= [ + new Model { - BoneName = conformBone.BoneName - }; - mesh.BoneBindings.Add(inputBone); - } + InitialPlacement = new Transform(), + Name = skeleton.Name, + Skeleton = skeleton + } + ]; - // The bones match, copy relevant parameters from the conforming binding to the input. - inputBone.OBBMin = conformBone.OBBMin; - inputBone.OBBMax = conformBone.OBBMax; + ConformSkeletonAnimations(skeleton); } + + return; } - private void ConformMeshBoneBindings(IEnumerable meshes) + foreach (var skeleton in Root.Skeletons) { - if (Root.Meshes == null) + // Check if there is a matching skeleton in the source file + Skeleton conformingSkel = null; + foreach (var skel in skeletons) { - return; - } - - foreach (var mesh in Root.Meshes) - { - Mesh conformingMesh = null; - foreach (var mesh2 in meshes) + if (skel.Name == skeleton.Name) { - if (mesh.Name == mesh2.Name) - { - conformingMesh = mesh2; - break; - } + conformingSkel = skel; + break; } + } - if (conformingMesh == null) - { - throw new ExportException($"No matching mesh found in source file for mesh '{mesh.Name}'."); - } + // Allow name mismatches if there is only 1 skeleton in each file + if (conformingSkel == null && skeletons.Count() == 1 && Root.Skeletons.Count == 1) + { + conformingSkel = skeletons.First(); + } - ConformMeshBoneBindings(mesh, conformingMesh); + if (conformingSkel == null) + { + throw new ExportException($"No matching skeleton found in source file for skeleton '{skeleton.Name}'."); } + + ConformSkeleton(skeleton, conformingSkel); } + } - private Mesh GenerateDummyMesh(MeshBinding meshBinding) - { - var vertexData = new VertexData(); - vertexData.VertexComponentNames = meshBinding.Mesh.PrimaryVertexData.VertexComponentNames - .Select(name => new GrannyString(name.String)).ToList(); - vertexData.Vertices = []; - var dummyVertex = meshBinding.Mesh.VertexFormat.CreateInstance(); - vertexData.Vertices.Add(dummyVertex); - Root.VertexDatas.Add(vertexData); + private void ConformMeshBoneBindings(Mesh mesh, Mesh conformToMesh) + { + mesh.BoneBindings ??= []; - var topology = new TriTopology + foreach (var conformBone in conformToMesh.BoneBindings) + { + BoneBinding inputBone = null; + foreach (var bone in mesh.BoneBindings) { - Groups = [ - new TriTopologyGroup - { - MaterialIndex = 0, - TriCount = 0, - TriFirst = 0 - } - ], - Indices = [] - }; - Root.TriTopologies.Add(topology); + if (bone.BoneName == conformBone.BoneName) + { + inputBone = bone; + break; + } + } - var mesh = new Mesh + if (inputBone == null) { - Name = meshBinding.Mesh.Name, - VertexFormat = meshBinding.Mesh.VertexFormat, - PrimaryTopology = topology, - PrimaryVertexData = vertexData - }; - if (meshBinding.Mesh.BoneBindings != null) - { - mesh.BoneBindings = []; - ConformMeshBoneBindings(mesh, meshBinding.Mesh); + // Create a new "dummy" binding if it does not exist in the new mesh + inputBone = new BoneBinding + { + BoneName = conformBone.BoneName + }; + mesh.BoneBindings.Add(inputBone); } - return mesh; + // The bones match, copy relevant parameters from the conforming binding to the input. + inputBone.OBBMin = conformBone.OBBMin; + inputBone.OBBMax = conformBone.OBBMax; } + } - private Model MakeDummyModel(Model original) + private void ConformMeshBoneBindings(IEnumerable meshes) + { + if (Root.Meshes == null) { - var newModel = new Model - { - InitialPlacement = original.InitialPlacement, - Name = original.Name - }; + return; + } - if (original.Skeleton != null) + foreach (var mesh in Root.Meshes) + { + Mesh conformingMesh = null; + foreach (var mesh2 in meshes) { - var skeleton = Root.Skeletons.Where(skel => skel.Name == original.Skeleton.Name).FirstOrDefault(); - if (skeleton == null) + if (mesh.Name == mesh2.Name) { - throw new ExportException($"Model '{original.Name}' references skeleton '{original.Skeleton.Name}' that does not exist in the source file."); + conformingMesh = mesh2; + break; } - - newModel.Skeleton = skeleton; } - if (original.MeshBindings != null) + if (conformingMesh == null) { - newModel.MeshBindings = []; - foreach (var meshBinding in original.MeshBindings) - { - // Try to bind the original mesh, if it exists in the source file. - // If it doesn't, generate a dummy mesh with 0 vertices - var mesh = Root.Meshes.Where(m => m.Name == meshBinding.Mesh.Name).FirstOrDefault(); - if (mesh == null) - { - mesh = GenerateDummyMesh(meshBinding); - Root.Meshes.Add(mesh); - } + throw new ExportException($"No matching mesh found in source file for mesh '{mesh.Name}'."); + } - var binding = new MeshBinding - { - Mesh = mesh - }; - newModel.MeshBindings.Add(binding); + ConformMeshBoneBindings(mesh, conformingMesh); + } + } + + private Mesh GenerateDummyMesh(MeshBinding meshBinding) + { + var vertexData = new VertexData(); + vertexData.VertexComponentNames = meshBinding.Mesh.PrimaryVertexData.VertexComponentNames + .Select(name => new GrannyString(name.String)).ToList(); + vertexData.Vertices = []; + var dummyVertex = meshBinding.Mesh.VertexFormat.CreateInstance(); + vertexData.Vertices.Add(dummyVertex); + Root.VertexDatas.Add(vertexData); + + var topology = new TriTopology + { + Groups = [ + new TriTopologyGroup + { + MaterialIndex = 0, + TriCount = 0, + TriFirst = 0 } - } + ], + Indices = [] + }; + Root.TriTopologies.Add(topology); - Root.Models.Add(newModel); - return newModel; + var mesh = new Mesh + { + Name = meshBinding.Mesh.Name, + VertexFormat = meshBinding.Mesh.VertexFormat, + PrimaryTopology = topology, + PrimaryVertexData = vertexData + }; + if (meshBinding.Mesh.BoneBindings != null) + { + mesh.BoneBindings = []; + ConformMeshBoneBindings(mesh, meshBinding.Mesh); } - private void ConformModels(IEnumerable models) + return mesh; + } + + private Model MakeDummyModel(Model original) + { + var newModel = new Model + { + InitialPlacement = original.InitialPlacement, + Name = original.Name + }; + + if (original.Skeleton != null) { - if (Root.Models == null || Root.Models.Count == 0) + var skeleton = Root.Skeletons.Where(skel => skel.Name == original.Skeleton.Name).FirstOrDefault(); + if (skeleton == null) { - return; + throw new ExportException($"Model '{original.Name}' references skeleton '{original.Skeleton.Name}' that does not exist in the source file."); } - // Rebuild the model list to match the order used in the original GR2 - // If a model is missing, generate a dummy model & mesh. - var originalModels = Root.Models; - Root.Models = []; + newModel.Skeleton = skeleton; + } - foreach (var model in models) + if (original.MeshBindings != null) + { + newModel.MeshBindings = []; + foreach (var meshBinding in original.MeshBindings) { - Model newModel = null; - foreach (var model2 in originalModels) + // Try to bind the original mesh, if it exists in the source file. + // If it doesn't, generate a dummy mesh with 0 vertices + var mesh = Root.Meshes.Where(m => m.Name == meshBinding.Mesh.Name).FirstOrDefault(); + if (mesh == null) { - if (model.Name == model2.Name) - { - newModel = model2; - break; - } + mesh = GenerateDummyMesh(meshBinding); + Root.Meshes.Add(mesh); } - if (newModel == null) + var binding = new MeshBinding { - newModel = MakeDummyModel(model); - Root.Models.Add(newModel); - } - else - { - newModel.InitialPlacement = model.InitialPlacement; - } + Mesh = mesh + }; + newModel.MeshBindings.Add(binding); } - - // If the new GR2 contains models that are not in the original GR2, - // append them to the end of the model list - Root.Models.AddRange(originalModels.Where(m => !Root.Models.Contains(m))); } - private void Conform(string inPath) + Root.Models.Add(newModel); + return newModel; + } + + private void ConformModels(IEnumerable models) + { + if (Root.Models == null || Root.Models.Count == 0) { - var conformRoot = LoadGR2(inPath); + return; + } - if (Options.ConformSkeletonsCopy) - { - Root.Skeletons = conformRoot.Skeletons; - if (Root.Models != null) - { - foreach (var model in Root.Models) - { - model.Skeleton = Root.Skeletons.First(); - } - } - else - { - Root.Models = conformRoot.Models; - } - } - else if (Options.ConformSkeletons) + // Rebuild the model list to match the order used in the original GR2 + // If a model is missing, generate a dummy model & mesh. + var originalModels = Root.Models; + Root.Models = []; + + foreach (var model in models) + { + Model newModel = null; + foreach (var model2 in originalModels) { - if (conformRoot.Skeletons == null || conformRoot.Skeletons.Count == 0) + if (model.Name == model2.Name) { - throw new ExportException("Source file contains no skeletons."); + newModel = model2; + break; } - - ConformSkeletons(conformRoot.Skeletons); } - if (Options.ConformModels && conformRoot.Models != null) + if (newModel == null) { - ConformModels(conformRoot.Models); + newModel = MakeDummyModel(model); + Root.Models.Add(newModel); } - - if (Options.ConformMeshBoneBindings && conformRoot.Meshes != null) + else { - ConformMeshBoneBindings(conformRoot.Meshes); + newModel.InitialPlacement = model.InitialPlacement; } } - public void Export() + // If the new GR2 contains models that are not in the original GR2, + // append them to the end of the model list + Root.Models.AddRange(originalModels.Where(m => !Root.Models.Contains(m))); + } + + private void Conform(string inPath) + { + var conformRoot = LoadGR2(inPath); + + if (Options.ConformSkeletonsCopy) { - if (Options.InputPath != null) + Root.Skeletons = conformRoot.Skeletons; + if (Root.Models != null) { - Root = Load(Options.InputPath, Options.InputFormat); - } - else - { - if (Options.Input == null) + foreach (var model in Root.Models) { - throw new ExportException("No input model specified. Either the InputPath or the Input option must be specified."); + model.Skeleton = Root.Skeletons.First(); } - - Root = Options.Input; } - - if (Options.DisabledAnimations.Count > 0) + else { - Root.Animations = Root.Animations.Where(a => !Options.DisabledAnimations.Contains(a.Name)).ToList(); + Root.Models = conformRoot.Models; } - - if (Options.DisabledModels.Count > 0) + } + else if (Options.ConformSkeletons) + { + if (conformRoot.Skeletons == null || conformRoot.Skeletons.Count == 0) { - Root.Models = Root.Models.Where(a => !Options.DisabledModels.Contains(a.Name)).ToList(); + throw new ExportException("Source file contains no skeletons."); } - if (Options.DisabledSkeletons.Count > 0) - { - Root.Skeletons = Root.Skeletons.Where(a => !Options.DisabledSkeletons.Contains(a.Name)).ToList(); - } + ConformSkeletons(conformRoot.Skeletons); + } - if (Options.DeduplicateVertices) - { - if (Root.VertexDatas != null) - { - foreach (var vertexData in Root.VertexDatas) - { - vertexData.Deduplicate(); - } - } - } + if (Options.ConformModels && conformRoot.Models != null) + { + ConformModels(conformRoot.Models); + } + + if (Options.ConformMeshBoneBindings && conformRoot.Meshes != null) + { + ConformMeshBoneBindings(conformRoot.Meshes); + } + } - if (Options.ApplyBasisTransforms) + public void Export() + { + if (Options.InputPath != null) + { + Root = Load(Options.InputPath, Options.InputFormat); + } + else + { + if (Options.Input == null) { - Root.ConvertToYUp(Options.TransformSkeletons); + throw new ExportException("No input model specified. Either the InputPath or the Input option must be specified."); } - if (Options.RecalculateIWT && Root.Skeletons != null) + Root = Options.Input; + } + + if (Options.DisabledAnimations.Count > 0) + { + Root.Animations = Root.Animations.Where(a => !Options.DisabledAnimations.Contains(a.Name)).ToList(); + } + + if (Options.DisabledModels.Count > 0) + { + Root.Models = Root.Models.Where(a => !Options.DisabledModels.Contains(a.Name)).ToList(); + } + + if (Options.DisabledSkeletons.Count > 0) + { + Root.Skeletons = Root.Skeletons.Where(a => !Options.DisabledSkeletons.Contains(a.Name)).ToList(); + } + + if (Options.DeduplicateVertices) + { + if (Root.VertexDatas != null) { - foreach (var skeleton in Root.Skeletons) + foreach (var vertexData in Root.VertexDatas) { - skeleton.UpdateWorldTransforms(); + vertexData.Deduplicate(); } } + } - // TODO: DeduplicateUVs + if (Options.ApplyBasisTransforms) + { + Root.ConvertToYUp(Options.TransformSkeletons); + } - if (Options.ConformGR2Path != null) + if (Options.RecalculateIWT && Root.Skeletons != null) + { + foreach (var skeleton in Root.Skeletons) { - try - { - Conform(Options.ConformGR2Path); - } - catch (ExportException e) - { - throw new ExportException("Failed to conform skeleton:\n" + e.Message); - } + skeleton.UpdateWorldTransforms(); } + } + + // TODO: DeduplicateUVs - if (Options.BuildDummySkeleton && Root.Models != null) + if (Options.ConformGR2Path != null) + { + try { - GenerateDummySkeleton(Root); + Conform(Options.ConformGR2Path); } - - if (Options.FlipMesh || Options.FlipSkeleton) + catch (ExportException e) { - Root.Flip(Options.FlipMesh, Options.FlipSkeleton); + throw new ExportException("Failed to conform skeleton:\n" + e.Message); } + } + + if (Options.BuildDummySkeleton && Root.Models != null) + { + GenerateDummySkeleton(Root); + } + + if (Options.FlipMesh || Options.FlipSkeleton) + { + Root.Flip(Options.FlipMesh, Options.FlipSkeleton); + } - // This option should be handled after everything else, as it converts Indices - // into Indices16 and breaks every other operation that manipulates tri topologies. - if (Options.OutputFormat == ExportFormat.GR2 && Options.CompactIndices) + // This option should be handled after everything else, as it converts Indices + // into Indices16 and breaks every other operation that manipulates tri topologies. + if (Options.OutputFormat == ExportFormat.GR2 && Options.CompactIndices) + { + if (Root.TriTopologies != null) { - if (Root.TriTopologies != null) + foreach (var topology in Root.TriTopologies) { - foreach (var topology in Root.TriTopologies) + if (topology.Indices != null) { - if (topology.Indices != null) + // Make sure that we don't have indices over 32767. If we do, + // int16 won't be big enough to hold the index, so we won't convert. + bool hasHighIndex = false; + foreach (var index in topology.Indices) { - // Make sure that we don't have indices over 32767. If we do, - // int16 won't be big enough to hold the index, so we won't convert. - bool hasHighIndex = false; - foreach (var index in topology.Indices) + if (index > 0xffff) { - if (index > 0xffff) - { - hasHighIndex = true; - break; - } + hasHighIndex = true; + break; } + } - if (!hasHighIndex) + if (!hasHighIndex) + { + topology.Indices16 = new List(topology.Indices.Count); + foreach (var index in topology.Indices) { - topology.Indices16 = new List(topology.Indices.Count); - foreach (var index in topology.Indices) - { - topology.Indices16.Add((ushort)index); - } - - topology.Indices = null; + topology.Indices16.Add((ushort)index); } + + topology.Indices = null; } } } } - - Save(Root, Options); } + + Save(Root, Options); } } diff --git a/LSLib/Granny/Model/HalfHelpers.cs b/LSLib/Granny/Model/HalfHelpers.cs index 603b0630..81d7ef24 100644 --- a/LSLib/Granny/Model/HalfHelpers.cs +++ b/LSLib/Granny/Model/HalfHelpers.cs @@ -1,168 +1,167 @@ -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +/// +/// Helper class for Half conversions and some low level operations. +/// This class is internally used in the Half class. +/// +/// +/// References: +/// - Fast Half Float Conversions, Jeroen van der Zijp, link: http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf +/// +internal static class HalfHelpers { - /// - /// Helper class for Half conversions and some low level operations. - /// This class is internally used in the Half class. - /// - /// - /// References: - /// - Fast Half Float Conversions, Jeroen van der Zijp, link: http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf - /// - internal static class HalfHelpers + private static uint[] mantissaTable = GenerateMantissaTable(); + private static uint[] exponentTable = GenerateExponentTable(); + private static ushort[] offsetTable = GenerateOffsetTable(); + private static ushort[] baseTable = GenerateBaseTable(); + private static sbyte[] shiftTable = GenerateShiftTable(); + + // Transforms the subnormal representation to a normalized one. + private static uint ConvertMantissa(int i) { - private static uint[] mantissaTable = GenerateMantissaTable(); - private static uint[] exponentTable = GenerateExponentTable(); - private static ushort[] offsetTable = GenerateOffsetTable(); - private static ushort[] baseTable = GenerateBaseTable(); - private static sbyte[] shiftTable = GenerateShiftTable(); + uint m = (uint)(i << 13); // Zero pad mantissa bits + uint e = 0; // Zero exponent - // Transforms the subnormal representation to a normalized one. - private static uint ConvertMantissa(int i) + // While not normalized + while ((m & 0x00800000) == 0) { - uint m = (uint)(i << 13); // Zero pad mantissa bits - uint e = 0; // Zero exponent - - // While not normalized - while ((m & 0x00800000) == 0) - { - e -= 0x00800000; // Decrement exponent (1<<23) - m <<= 1; // Shift mantissa - } - m &= unchecked((uint)~0x00800000); // Clear leading 1 bit - e += 0x38800000; // Adjust bias ((127-14)<<23) - return m | e; // Return combined number + e -= 0x00800000; // Decrement exponent (1<<23) + m <<= 1; // Shift mantissa } + m &= unchecked((uint)~0x00800000); // Clear leading 1 bit + e += 0x38800000; // Adjust bias ((127-14)<<23) + return m | e; // Return combined number + } - private static uint[] GenerateMantissaTable() + private static uint[] GenerateMantissaTable() + { + uint[] mantissaTable = new uint[2048]; + mantissaTable[0] = 0; + for (int i = 1; i < 1024; i++) { - uint[] mantissaTable = new uint[2048]; - mantissaTable[0] = 0; - for (int i = 1; i < 1024; i++) - { - mantissaTable[i] = ConvertMantissa(i); - } - for (int i = 1024; i < 2048; i++) - { - mantissaTable[i] = (uint)(0x38000000 + ((i - 1024) << 13)); - } - - return mantissaTable; + mantissaTable[i] = ConvertMantissa(i); } - private static uint[] GenerateExponentTable() + for (int i = 1024; i < 2048; i++) { - uint[] exponentTable = new uint[64]; - exponentTable[0] = 0; - for (int i = 1; i < 31; i++) - { - exponentTable[i] = (uint)(i << 23); - } - exponentTable[31] = 0x47800000; - exponentTable[32] = 0x80000000; - for (int i = 33; i < 63; i++) - { - exponentTable[i] = (uint)(0x80000000 + ((i - 32) << 23)); - } - exponentTable[63] = 0xc7800000; + mantissaTable[i] = (uint)(0x38000000 + ((i - 1024) << 13)); + } - return exponentTable; + return mantissaTable; + } + private static uint[] GenerateExponentTable() + { + uint[] exponentTable = new uint[64]; + exponentTable[0] = 0; + for (int i = 1; i < 31; i++) + { + exponentTable[i] = (uint)(i << 23); } - private static ushort[] GenerateOffsetTable() + exponentTable[31] = 0x47800000; + exponentTable[32] = 0x80000000; + for (int i = 33; i < 63; i++) { - ushort[] offsetTable = new ushort[64]; - offsetTable[0] = 0; - for (int i = 1; i < 32; i++) - { - offsetTable[i] = 1024; - } - offsetTable[32] = 0; - for (int i = 33; i < 64; i++) - { - offsetTable[i] = 1024; - } + exponentTable[i] = (uint)(0x80000000 + ((i - 32) << 23)); + } + exponentTable[63] = 0xc7800000; - return offsetTable; + return exponentTable; + } + private static ushort[] GenerateOffsetTable() + { + ushort[] offsetTable = new ushort[64]; + offsetTable[0] = 0; + for (int i = 1; i < 32; i++) + { + offsetTable[i] = 1024; } - private static ushort[] GenerateBaseTable() + offsetTable[32] = 0; + for (int i = 33; i < 64; i++) { - ushort[] baseTable = new ushort[512]; - for (int i = 0; i < 256; ++i) - { - sbyte e = (sbyte)(127 - i); - if (e > 24) - { // Very small numbers map to zero - baseTable[i | 0x000] = 0x0000; - baseTable[i | 0x100] = 0x8000; - } - else if (e > 14) - { // Small numbers map to denorms - baseTable[i | 0x000] = (ushort)(0x0400 >> (18 + e)); - baseTable[i | 0x100] = (ushort)((0x0400 >> (18 + e)) | 0x8000); - } - else if (e >= -15) - { // Normal numbers just lose precision - baseTable[i | 0x000] = (ushort)((15 - e) << 10); - baseTable[i | 0x100] = (ushort)(((15 - e) << 10) | 0x8000); - } - else if (e > -128) - { // Large numbers map to Infinity - baseTable[i | 0x000] = 0x7c00; - baseTable[i | 0x100] = 0xfc00; - } - else - { // Infinity and NaN's stay Infinity and NaN's - baseTable[i | 0x000] = 0x7c00; - baseTable[i | 0x100] = 0xfc00; - } - } - - return baseTable; + offsetTable[i] = 1024; } - private static sbyte[] GenerateShiftTable() + + return offsetTable; + } + private static ushort[] GenerateBaseTable() + { + ushort[] baseTable = new ushort[512]; + for (int i = 0; i < 256; ++i) { - sbyte[] shiftTable = new sbyte[512]; - for (int i = 0; i < 256; ++i) - { - sbyte e = (sbyte)(127 - i); - if (e > 24) - { // Very small numbers map to zero - shiftTable[i | 0x000] = 24; - shiftTable[i | 0x100] = 24; - } - else if (e > 14) - { // Small numbers map to denorms - shiftTable[i | 0x000] = (sbyte)(e - 1); - shiftTable[i | 0x100] = (sbyte)(e - 1); - } - else if (e >= -15) - { // Normal numbers just lose precision - shiftTable[i | 0x000] = 13; - shiftTable[i | 0x100] = 13; - } - else if (e > -128) - { // Large numbers map to Infinity - shiftTable[i | 0x000] = 24; - shiftTable[i | 0x100] = 24; - } - else - { // Infinity and NaN's stay Infinity and NaN's - shiftTable[i | 0x000] = 13; - shiftTable[i | 0x100] = 13; - } + sbyte e = (sbyte)(127 - i); + if (e > 24) + { // Very small numbers map to zero + baseTable[i | 0x000] = 0x0000; + baseTable[i | 0x100] = 0x8000; + } + else if (e > 14) + { // Small numbers map to denorms + baseTable[i | 0x000] = (ushort)(0x0400 >> (18 + e)); + baseTable[i | 0x100] = (ushort)((0x0400 >> (18 + e)) | 0x8000); + } + else if (e >= -15) + { // Normal numbers just lose precision + baseTable[i | 0x000] = (ushort)((15 - e) << 10); + baseTable[i | 0x100] = (ushort)(((15 - e) << 10) | 0x8000); + } + else if (e > -128) + { // Large numbers map to Infinity + baseTable[i | 0x000] = 0x7c00; + baseTable[i | 0x100] = 0xfc00; + } + else + { // Infinity and NaN's stay Infinity and NaN's + baseTable[i | 0x000] = 0x7c00; + baseTable[i | 0x100] = 0xfc00; } - - return shiftTable; } - public static unsafe float HalfToSingle(ushort half) + return baseTable; + } + private static sbyte[] GenerateShiftTable() + { + sbyte[] shiftTable = new sbyte[512]; + for (int i = 0; i < 256; ++i) { - uint result = mantissaTable[offsetTable[half >> 10] + (half & 0x3ff)] + exponentTable[half >> 10]; - return *((float*)&result); + sbyte e = (sbyte)(127 - i); + if (e > 24) + { // Very small numbers map to zero + shiftTable[i | 0x000] = 24; + shiftTable[i | 0x100] = 24; + } + else if (e > 14) + { // Small numbers map to denorms + shiftTable[i | 0x000] = (sbyte)(e - 1); + shiftTable[i | 0x100] = (sbyte)(e - 1); + } + else if (e >= -15) + { // Normal numbers just lose precision + shiftTable[i | 0x000] = 13; + shiftTable[i | 0x100] = 13; + } + else if (e > -128) + { // Large numbers map to Infinity + shiftTable[i | 0x000] = 24; + shiftTable[i | 0x100] = 24; + } + else + { // Infinity and NaN's stay Infinity and NaN's + shiftTable[i | 0x000] = 13; + shiftTable[i | 0x100] = 13; + } } - public static unsafe ushort SingleToHalf(float single) - { - uint value = *((uint*)&single); - return (ushort)(baseTable[(value >> 23) & 0x1ff] + ((value & 0x007fffff) >> shiftTable[value >> 23])); - } + return shiftTable; + } + + public static unsafe float HalfToSingle(ushort half) + { + uint result = mantissaTable[offsetTable[half >> 10] + (half & 0x3ff)] + exponentTable[half >> 10]; + return *((float*)&result); + } + public static unsafe ushort SingleToHalf(float single) + { + uint value = *((uint*)&single); + + return (ushort)(baseTable[(value >> 23) & 0x1ff] + ((value & 0x007fffff) >> shiftTable[value >> 23])); } } diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 790afc91..89fb09a9 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -5,745 +5,744 @@ using OpenTK.Mathematics; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class Deduplicator(IEqualityComparer comparer) { - public class Deduplicator(IEqualityComparer comparer) - { - private readonly IEqualityComparer Comparer = comparer; - public Dictionary DeduplicationMap = []; - public List Uniques = []; + private readonly IEqualityComparer Comparer = comparer; + public Dictionary DeduplicationMap = []; + public List Uniques = []; - public void MakeIdentityMapping(IEnumerable items) + public void MakeIdentityMapping(IEnumerable items) + { + var i = 0; + foreach (var item in items) { - var i = 0; - foreach (var item in items) - { - Uniques.Add(item); - DeduplicationMap.Add(i, i); - i++; - } + Uniques.Add(item); + DeduplicationMap.Add(i, i); + i++; } + } - public void Deduplicate(IEnumerable items) + public void Deduplicate(IEnumerable items) + { + var uniqueItems = new Dictionary(Comparer); + var i = 0; + foreach (var item in items) { - var uniqueItems = new Dictionary(Comparer); - var i = 0; - foreach (var item in items) + if (!uniqueItems.TryGetValue(item, out int mappedIndex)) { - if (!uniqueItems.TryGetValue(item, out int mappedIndex)) - { - mappedIndex = uniqueItems.Count; - uniqueItems.Add(item, mappedIndex); - Uniques.Add(item); - } - - DeduplicationMap.Add(i, mappedIndex); - i++; + mappedIndex = uniqueItems.Count; + uniqueItems.Add(item, mappedIndex); + Uniques.Add(item); } + + DeduplicationMap.Add(i, mappedIndex); + i++; } } - - class GenericEqualityComparer : IEqualityComparer where T : IEquatable - { - public bool Equals(T a, T b) - { - return a.Equals(b); - } +} - public int GetHashCode(T v) - { - return v.GetHashCode(); - } +class GenericEqualityComparer : IEqualityComparer where T : IEquatable +{ + public bool Equals(T a, T b) + { + return a.Equals(b); } - public struct SkinnedVertex : IEquatable + public int GetHashCode(T v) { - public Vector3 Position; - public BoneWeight Indices; - public BoneWeight Weights; + return v.GetHashCode(); + } +} - public bool Equals(SkinnedVertex w) - { - return Position.Equals(w.Position) - && Indices.Equals(w.Indices) - && Weights.Equals(w.Weights); - } +public struct SkinnedVertex : IEquatable +{ + public Vector3 Position; + public BoneWeight Indices; + public BoneWeight Weights; - public override int GetHashCode() - { - return Position.GetHashCode() ^ Indices.GetHashCode() ^ Weights.GetHashCode(); - } + public bool Equals(SkinnedVertex w) + { + return Position.Equals(w.Position) + && Indices.Equals(w.Indices) + && Weights.Equals(w.Weights); } - - public class VertexDeduplicator + + public override int GetHashCode() { - public Deduplicator Vertices = new(new GenericEqualityComparer()); - public Deduplicator Normals = new(new GenericEqualityComparer()); - public List> UVs = []; - public List> Colors = []; + return Position.GetHashCode() ^ Indices.GetHashCode() ^ Weights.GetHashCode(); + } +} - public void MakeIdentityMapping(List vertices) - { - if (vertices.Count == 0) return; +public class VertexDeduplicator +{ + public Deduplicator Vertices = new(new GenericEqualityComparer()); + public Deduplicator Normals = new(new GenericEqualityComparer()); + public List> UVs = []; + public List> Colors = []; - var format = vertices[0].Format; + public void MakeIdentityMapping(List vertices) + { + if (vertices.Count == 0) return; - Vertices.MakeIdentityMapping(vertices.Select(v => new SkinnedVertex { - Position = v.Position, - Indices = v.BoneIndices, - Weights = v.BoneWeights - })); + var format = vertices[0].Format; - if (format.NormalType != NormalType.None - || format.TangentType != NormalType.None - || format.BinormalType != NormalType.None) - { - Normals.MakeIdentityMapping(vertices.Select(v => new Matrix3(v.Normal, v.Tangent, v.Binormal))); - } + Vertices.MakeIdentityMapping(vertices.Select(v => new SkinnedVertex { + Position = v.Position, + Indices = v.BoneIndices, + Weights = v.BoneWeights + })); - var numUvs = format.TextureCoordinates; - for (var uv = 0; uv < numUvs; uv++) - { - var uvDedup = new Deduplicator(new GenericEqualityComparer()); - uvDedup.MakeIdentityMapping(vertices.Select(v => v.GetUV(uv))); - UVs.Add(uvDedup); - } + if (format.NormalType != NormalType.None + || format.TangentType != NormalType.None + || format.BinormalType != NormalType.None) + { + Normals.MakeIdentityMapping(vertices.Select(v => new Matrix3(v.Normal, v.Tangent, v.Binormal))); + } - var numColors = format.ColorMaps; - for (var color = 0; color < numColors; color++) - { - var colorDedup = new Deduplicator(new GenericEqualityComparer()); - colorDedup.MakeIdentityMapping(vertices.Select(v => v.GetColor(color))); - Colors.Add(colorDedup); - } + var numUvs = format.TextureCoordinates; + for (var uv = 0; uv < numUvs; uv++) + { + var uvDedup = new Deduplicator(new GenericEqualityComparer()); + uvDedup.MakeIdentityMapping(vertices.Select(v => v.GetUV(uv))); + UVs.Add(uvDedup); } - public void Deduplicate(List vertices) + var numColors = format.ColorMaps; + for (var color = 0; color < numColors; color++) { - if (vertices.Count == 0) return; + var colorDedup = new Deduplicator(new GenericEqualityComparer()); + colorDedup.MakeIdentityMapping(vertices.Select(v => v.GetColor(color))); + Colors.Add(colorDedup); + } + } - var format = vertices[0].Format; + public void Deduplicate(List vertices) + { + if (vertices.Count == 0) return; - Vertices.Deduplicate(vertices.Select(v => new SkinnedVertex - { - Position = v.Position, - Indices = v.BoneIndices, - Weights = v.BoneWeights - })); - - if (format.NormalType != NormalType.None - || format.TangentType != NormalType.None - || format.BinormalType != NormalType.None) - { - Normals.Deduplicate(vertices.Select(v => new Matrix3(v.Normal, v.Tangent, v.Binormal))); - } + var format = vertices[0].Format; - var numUvs = format.TextureCoordinates; - for (var uv = 0; uv < numUvs; uv++) - { - var uvDedup = new Deduplicator(new GenericEqualityComparer()); - uvDedup.Deduplicate(vertices.Select(v => v.GetUV(uv))); - UVs.Add(uvDedup); - } + Vertices.Deduplicate(vertices.Select(v => new SkinnedVertex + { + Position = v.Position, + Indices = v.BoneIndices, + Weights = v.BoneWeights + })); + + if (format.NormalType != NormalType.None + || format.TangentType != NormalType.None + || format.BinormalType != NormalType.None) + { + Normals.Deduplicate(vertices.Select(v => new Matrix3(v.Normal, v.Tangent, v.Binormal))); + } - var numColors = format.ColorMaps; - for (var color = 0; color < numColors; color++) - { - var colorDedup = new Deduplicator(new GenericEqualityComparer()); - colorDedup.Deduplicate(vertices.Select(v => v.GetColor(color))); - Colors.Add(colorDedup); - } + var numUvs = format.TextureCoordinates; + for (var uv = 0; uv < numUvs; uv++) + { + var uvDedup = new Deduplicator(new GenericEqualityComparer()); + uvDedup.Deduplicate(vertices.Select(v => v.GetUV(uv))); + UVs.Add(uvDedup); } - } - public class VertexAnnotationSet - { - public string Name; - [Serialization(Type = MemberType.ReferenceToVariantArray)] - public List VertexAnnotations; - public Int32 IndicesMapFromVertexToAnnotation; - public List VertexAnnotationIndices; + var numColors = format.ColorMaps; + for (var color = 0; color < numColors; color++) + { + var colorDedup = new Deduplicator(new GenericEqualityComparer()); + colorDedup.Deduplicate(vertices.Select(v => v.GetColor(color))); + Colors.Add(colorDedup); + } } +} + +public class VertexAnnotationSet +{ + public string Name; + [Serialization(Type = MemberType.ReferenceToVariantArray)] + public List VertexAnnotations; + public Int32 IndicesMapFromVertexToAnnotation; + public List VertexAnnotationIndices; +} - public class VertexDataSectionSelector : SectionSelector +public class VertexDataSectionSelector : SectionSelector +{ + public SectionType SelectSection(MemberDefinition member, Type type, object obj) { - public SectionType SelectSection(MemberDefinition member, Type type, object obj) + if (obj is VertexData data) { - if (obj is VertexData data) - { - return data.SerializationSection; - } - else - { - return SectionType.Invalid; - } + return data.SerializationSection; + } + else + { + return SectionType.Invalid; } } +} - public class VertexData +public class VertexData +{ + [Serialization(Type = MemberType.ReferenceToVariantArray, MixedMarshal = true, + TypeSelector = typeof(VertexSerializer), Serializer = typeof(VertexSerializer), + Kind = SerializationKind.UserMember)] + public List Vertices; + public List VertexComponentNames; + public List VertexAnnotationSets; + [Serialization(Kind = SerializationKind.None)] + public VertexDeduplicator Deduplicator; + + [Serialization(Kind = SerializationKind.None)] + public SectionType SerializationSection = SectionType.Invalid; + + public void PostLoad() { - [Serialization(Type = MemberType.ReferenceToVariantArray, MixedMarshal = true, - TypeSelector = typeof(VertexSerializer), Serializer = typeof(VertexSerializer), - Kind = SerializationKind.UserMember)] - public List Vertices; - public List VertexComponentNames; - public List VertexAnnotationSets; - [Serialization(Kind = SerializationKind.None)] - public VertexDeduplicator Deduplicator; - - [Serialization(Kind = SerializationKind.None)] - public SectionType SerializationSection = SectionType.Invalid; - - public void PostLoad() - { - // Fix missing vertex component names - if (VertexComponentNames == null) + // Fix missing vertex component names + if (VertexComponentNames == null) + { + VertexComponentNames = []; + if (Vertices.Count > 0) { - VertexComponentNames = []; - if (Vertices.Count > 0) + var components = Vertices[0].Format.ComponentNames(); + foreach (var name in components) { - var components = Vertices[0].Format.ComponentNames(); - foreach (var name in components) - { - VertexComponentNames.Add(new GrannyString(name)); - } + VertexComponentNames.Add(new GrannyString(name)); } } } + } - public void Deduplicate() + public void Deduplicate() + { + Deduplicator = new VertexDeduplicator(); + Deduplicator.Deduplicate(Vertices); + } + + private void EnsureDeduplicationMap() + { + // Makes sure that we have an original -> duplicate vertex index map to work with. + // If we don't, it creates an identity mapping between the original and the Collada vertices. + // To deduplicate GR2 vertex data, Deduplicate() should be called before any Collada export call. + if (Deduplicator == null) { Deduplicator = new VertexDeduplicator(); - Deduplicator.Deduplicate(Vertices); + Deduplicator.MakeIdentityMapping(Vertices); } + } - private void EnsureDeduplicationMap() - { - // Makes sure that we have an original -> duplicate vertex index map to work with. - // If we don't, it creates an identity mapping between the original and the Collada vertices. - // To deduplicate GR2 vertex data, Deduplicate() should be called before any Collada export call. - if (Deduplicator == null) - { - Deduplicator = new VertexDeduplicator(); - Deduplicator.MakeIdentityMapping(Vertices); - } - } + public source MakeColladaPositions(string name) + { + EnsureDeduplicationMap(); - public source MakeColladaPositions(string name) + int index = 0; + var positions = new float[Deduplicator.Vertices.Uniques.Count * 3]; + foreach (var vertex in Deduplicator.Vertices.Uniques) { - EnsureDeduplicationMap(); + var pos = vertex.Position; + positions[index++] = pos[0]; + positions[index++] = pos[1]; + positions[index++] = pos[2]; + } - int index = 0; - var positions = new float[Deduplicator.Vertices.Uniques.Count * 3]; - foreach (var vertex in Deduplicator.Vertices.Uniques) - { - var pos = vertex.Position; - positions[index++] = pos[0]; - positions[index++] = pos[1]; - positions[index++] = pos[2]; - } + return ColladaUtils.MakeFloatSource(name, "positions", ["X", "Y", "Z"], positions); + } - return ColladaUtils.MakeFloatSource(name, "positions", ["X", "Y", "Z"], positions); - } + public source MakeColladaNormals(string name) + { + EnsureDeduplicationMap(); - public source MakeColladaNormals(string name) + int index = 0; + var normals = new float[Deduplicator.Normals.Uniques.Count * 3]; + foreach (var ntb in Deduplicator.Normals.Uniques) { - EnsureDeduplicationMap(); + var normal = ntb.Row0; + normals[index++] = normal[0]; + normals[index++] = normal[1]; + normals[index++] = normal[2]; + } - int index = 0; - var normals = new float[Deduplicator.Normals.Uniques.Count * 3]; - foreach (var ntb in Deduplicator.Normals.Uniques) - { - var normal = ntb.Row0; - normals[index++] = normal[0]; - normals[index++] = normal[1]; - normals[index++] = normal[2]; - } + return ColladaUtils.MakeFloatSource(name, "normals", ["X", "Y", "Z"], normals); + } - return ColladaUtils.MakeFloatSource(name, "normals", ["X", "Y", "Z"], normals); - } + public source MakeColladaTangents(string name) + { + EnsureDeduplicationMap(); - public source MakeColladaTangents(string name) + int index = 0; + var tangents = new float[Deduplicator.Normals.Uniques.Count * 3]; + foreach (var ntb in Deduplicator.Normals.Uniques) { - EnsureDeduplicationMap(); + var tangent = ntb.Row1; + tangents[index++] = tangent[0]; + tangents[index++] = tangent[1]; + tangents[index++] = tangent[2]; + } - int index = 0; - var tangents = new float[Deduplicator.Normals.Uniques.Count * 3]; - foreach (var ntb in Deduplicator.Normals.Uniques) - { - var tangent = ntb.Row1; - tangents[index++] = tangent[0]; - tangents[index++] = tangent[1]; - tangents[index++] = tangent[2]; - } + return ColladaUtils.MakeFloatSource(name, "tangents", ["X", "Y", "Z"], tangents); + } - return ColladaUtils.MakeFloatSource(name, "tangents", ["X", "Y", "Z"], tangents); - } + public source MakeColladaBinormals(string name) + { + EnsureDeduplicationMap(); - public source MakeColladaBinormals(string name) + int index = 0; + var binormals = new float[Deduplicator.Normals.Uniques.Count * 3]; + foreach (var ntb in Deduplicator.Normals.Uniques) { - EnsureDeduplicationMap(); + var binormal = ntb.Row2; + binormals[index++] = binormal[0]; + binormals[index++] = binormal[1]; + binormals[index++] = binormal[2]; + } - int index = 0; - var binormals = new float[Deduplicator.Normals.Uniques.Count * 3]; - foreach (var ntb in Deduplicator.Normals.Uniques) - { - var binormal = ntb.Row2; - binormals[index++] = binormal[0]; - binormals[index++] = binormal[1]; - binormals[index++] = binormal[2]; - } + return ColladaUtils.MakeFloatSource(name, "binormals", ["X", "Y", "Z"], binormals); + } - return ColladaUtils.MakeFloatSource(name, "binormals", ["X", "Y", "Z"], binormals); - } + public source MakeColladaUVs(string name, int uvIndex, bool flip) + { + EnsureDeduplicationMap(); - public source MakeColladaUVs(string name, int uvIndex, bool flip) + int index = 0; + var uvs = new float[Deduplicator.UVs[uvIndex].Uniques.Count * 2]; + foreach (var uv in Deduplicator.UVs[uvIndex].Uniques) { - EnsureDeduplicationMap(); + uvs[index++] = uv[0]; + if (flip) + uvs[index++] = 1.0f - uv[1]; + else + uvs[index++] = uv[1]; + } - int index = 0; - var uvs = new float[Deduplicator.UVs[uvIndex].Uniques.Count * 2]; - foreach (var uv in Deduplicator.UVs[uvIndex].Uniques) - { - uvs[index++] = uv[0]; - if (flip) - uvs[index++] = 1.0f - uv[1]; - else - uvs[index++] = uv[1]; - } + return ColladaUtils.MakeFloatSource(name, "uvs" + uvIndex.ToString(), ["S", "T"], uvs); + } - return ColladaUtils.MakeFloatSource(name, "uvs" + uvIndex.ToString(), ["S", "T"], uvs); - } + public source MakeColladaColors(string name, int setIndex) + { + EnsureDeduplicationMap(); - public source MakeColladaColors(string name, int setIndex) + int index = 0; + var colors = new float[Deduplicator.Colors[setIndex].Uniques.Count * 3]; + foreach (var color in Deduplicator.Colors[setIndex].Uniques) { - EnsureDeduplicationMap(); + colors[index++] = color[0]; + colors[index++] = color[1]; + colors[index++] = color[2]; + } - int index = 0; - var colors = new float[Deduplicator.Colors[setIndex].Uniques.Count * 3]; - foreach (var color in Deduplicator.Colors[setIndex].Uniques) - { - colors[index++] = color[0]; - colors[index++] = color[1]; - colors[index++] = color[2]; - } + return ColladaUtils.MakeFloatSource(name, "colors" + setIndex.ToString(), ["R", "G", "B"], colors); + } - return ColladaUtils.MakeFloatSource(name, "colors" + setIndex.ToString(), ["R", "G", "B"], colors); - } + public source MakeBoneWeights(string name) + { + EnsureDeduplicationMap(); - public source MakeBoneWeights(string name) + var weights = new List(Deduplicator.Vertices.Uniques.Count); + foreach (var vertex in Deduplicator.Vertices.Uniques) { - EnsureDeduplicationMap(); - - var weights = new List(Deduplicator.Vertices.Uniques.Count); - foreach (var vertex in Deduplicator.Vertices.Uniques) + var boneWeights = vertex.Weights; + for (int i = 0; i < 4; i++) { - var boneWeights = vertex.Weights; - for (int i = 0; i < 4; i++) - { - if (boneWeights[i] > 0) - weights.Add(boneWeights[i] / 255.0f); - } + if (boneWeights[i] > 0) + weights.Add(boneWeights[i] / 255.0f); } - - return ColladaUtils.MakeFloatSource(name, "weights", ["WEIGHT"], weights.ToArray()); } - public void Transform(Matrix4 transformation) - { - var inverse = transformation.Inverted(); + return ColladaUtils.MakeFloatSource(name, "weights", ["WEIGHT"], weights.ToArray()); + } - foreach (var vertex in Vertices) - { - vertex.Transform(transformation, inverse); - } + public void Transform(Matrix4 transformation) + { + var inverse = transformation.Inverted(); + + foreach (var vertex in Vertices) + { + vertex.Transform(transformation, inverse); } + } - public void Flip() + public void Flip() + { + foreach (var vertex in Vertices) { - foreach (var vertex in Vertices) - { - vertex.Position.X *= -1; - vertex.Normal.X *= -1; - vertex.Tangent.X *= -1; - vertex.Binormal.X *= -1; - } + vertex.Position.X *= -1; + vertex.Normal.X *= -1; + vertex.Tangent.X *= -1; + vertex.Binormal.X *= -1; + } - if (Deduplicator == null) return; + if (Deduplicator == null) return; - for (var i = 0; i < Deduplicator.Vertices.Uniques.Count; i++) - { - var vert = Deduplicator.Vertices.Uniques[i]; - vert.Position.X *= -1; - Deduplicator.Vertices.Uniques[i] = vert; - } + for (var i = 0; i < Deduplicator.Vertices.Uniques.Count; i++) + { + var vert = Deduplicator.Vertices.Uniques[i]; + vert.Position.X *= -1; + Deduplicator.Vertices.Uniques[i] = vert; + } - for (var i = 0; i < Deduplicator.Normals.Uniques.Count; i++) - { - var normal = Deduplicator.Normals.Uniques[i]; - normal.Row0.X *= -1; // vertex.Normal.X - normal.Row1.X *= -1; // vertex.Tangent.X - normal.Row2.X *= -1; // vertex.Binormal.X - Deduplicator.Normals.Uniques[i] = normal; - } + for (var i = 0; i < Deduplicator.Normals.Uniques.Count; i++) + { + var normal = Deduplicator.Normals.Uniques[i]; + normal.Row0.X *= -1; // vertex.Normal.X + normal.Row1.X *= -1; // vertex.Tangent.X + normal.Row2.X *= -1; // vertex.Binormal.X + Deduplicator.Normals.Uniques[i] = normal; } } +} - public class TriTopologyGroup - { - public int MaterialIndex; - public int TriFirst; - public int TriCount; - } +public class TriTopologyGroup +{ + public int MaterialIndex; + public int TriFirst; + public int TriCount; +} - public class TriIndex - { - public Int32 Int32; - } +public class TriIndex +{ + public Int32 Int32; +} - public class TriIndex16 - { - public Int16 Int16; - } +public class TriIndex16 +{ + public Int16 Int16; +} - public class TriAnnotationSet - { - public string Name; - [Serialization(Type = MemberType.ReferenceToVariantArray)] - public object TriAnnotations; - public Int32 IndicesMapFromTriToAnnotation; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List TriAnnotationIndices; - } +public class TriAnnotationSet +{ + public string Name; + [Serialization(Type = MemberType.ReferenceToVariantArray)] + public object TriAnnotations; + public Int32 IndicesMapFromTriToAnnotation; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List TriAnnotationIndices; +} - public class TriTopologySectionSelector : SectionSelector +public class TriTopologySectionSelector : SectionSelector +{ + public SectionType SelectSection(MemberDefinition member, Type type, object obj) { - public SectionType SelectSection(MemberDefinition member, Type type, object obj) + if (obj is TriTopology) { - if (obj is TriTopology) - { - return ((TriTopology)obj).SerializationSection; - } - else - { - return SectionType.Invalid; - } + return ((TriTopology)obj).SerializationSection; + } + else + { + return SectionType.Invalid; + } +} } - } - public class TriTopology +public class TriTopology +{ + public List Groups; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List Indices; + [Serialization(Prototype = typeof(TriIndex16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] + public List Indices16; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List VertexToVertexMap; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List VertexToTriangleMap; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List SideToNeighborMap; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer), MinVersion = 0x80000038)] + public List PolygonIndexStarts; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer), MinVersion = 0x80000038)] + public List PolygonIndices; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List BonesForTriangle; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List TriangleToBoneIndices; + public List TriAnnotationSets; + + [Serialization(Kind = SerializationKind.None)] + public SectionType SerializationSection = SectionType.Invalid; + + public void ChangeWindingOrder() { - public List Groups; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List Indices; - [Serialization(Prototype = typeof(TriIndex16), Kind = SerializationKind.UserMember, Serializer = typeof(UInt16ListSerializer))] - public List Indices16; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List VertexToVertexMap; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List VertexToTriangleMap; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List SideToNeighborMap; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer), MinVersion = 0x80000038)] - public List PolygonIndexStarts; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer), MinVersion = 0x80000038)] - public List PolygonIndices; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List BonesForTriangle; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List TriangleToBoneIndices; - public List TriAnnotationSets; - - [Serialization(Kind = SerializationKind.None)] - public SectionType SerializationSection = SectionType.Invalid; - - public void ChangeWindingOrder() - { - if (Indices != null) + if (Indices != null) + { + var tris = Indices.Count / 3; + for (var i = 0; i < tris; i++) { - var tris = Indices.Count / 3; - for (var i = 0; i < tris; i++) - { - var v1 = Indices[i * 3 + 1]; - Indices[i * 3 + 1] = Indices[i * 3 + 2]; - Indices[i * 3 + 2] = v1; - } + var v1 = Indices[i * 3 + 1]; + Indices[i * 3 + 1] = Indices[i * 3 + 2]; + Indices[i * 3 + 2] = v1; } + } - if (Indices16 != null) + if (Indices16 != null) + { + var tris = Indices16.Count / 3; + for (var i = 0; i < tris; i++) { - var tris = Indices16.Count / 3; - for (var i = 0; i < tris; i++) - { - var v1 = Indices16[i * 3 + 1]; - Indices16[i * 3 + 1] = Indices16[i * 3 + 2]; - Indices16[i * 3 + 2] = v1; - } + var v1 = Indices16[i * 3 + 1]; + Indices16[i * 3 + 1] = Indices16[i * 3 + 2]; + Indices16[i * 3 + 2] = v1; } } + } - public void PostLoad() + public void PostLoad() + { + // Convert 16-bit vertex indices to 32-bit indices + // (for convenience, so we won't have to handle both Indices and Indices16 in all code paths) + if (Indices16 != null) { - // Convert 16-bit vertex indices to 32-bit indices - // (for convenience, so we won't have to handle both Indices and Indices16 in all code paths) - if (Indices16 != null) + Indices = new List(Indices16.Count); + foreach (var index in Indices16) { - Indices = new List(Indices16.Count); - foreach (var index in Indices16) - { - Indices.Add(index); - } - - Indices16 = null; + Indices.Add(index); } + + Indices16 = null; } + } - public triangles MakeColladaTriangles(InputLocalOffset[] inputs, - Dictionary positionMaps, - Dictionary normalMaps, - List> uvMaps, - List> colorMaps) - { - int numTris = (from grp in Groups - select grp.TriCount).Sum(); + public triangles MakeColladaTriangles(InputLocalOffset[] inputs, + Dictionary positionMaps, + Dictionary normalMaps, + List> uvMaps, + List> colorMaps) + { + int numTris = (from grp in Groups + select grp.TriCount).Sum(); - var tris = new triangles - { - count = (ulong)numTris, - input = inputs - }; + var tris = new triangles + { + count = (ulong)numTris, + input = inputs + }; - List> inputMaps = []; - int uvIndex = 0, colorIndex = 0; - for (int i = 0; i < inputs.Length; i++) - { - var input = inputs[i]; - switch (input.semantic) - { - case "VERTEX": inputMaps.Add(positionMaps); break; - case "NORMAL": - case "TANGENT": - case "BINORMAL": - case "TEXTANGENT": - case "TEXBINORMAL": - inputMaps.Add(normalMaps); break; - case "TEXCOORD": inputMaps.Add(uvMaps[uvIndex]); uvIndex++; break; - case "COLOR": inputMaps.Add(colorMaps[colorIndex]); colorIndex++; break; - default: throw new InvalidOperationException("No input maps available for semantic " + input.semantic); - } + List> inputMaps = []; + int uvIndex = 0, colorIndex = 0; + for (int i = 0; i < inputs.Length; i++) + { + var input = inputs[i]; + switch (input.semantic) + { + case "VERTEX": inputMaps.Add(positionMaps); break; + case "NORMAL": + case "TANGENT": + case "BINORMAL": + case "TEXTANGENT": + case "TEXBINORMAL": + inputMaps.Add(normalMaps); break; + case "TEXCOORD": inputMaps.Add(uvMaps[uvIndex]); uvIndex++; break; + case "COLOR": inputMaps.Add(colorMaps[colorIndex]); colorIndex++; break; + default: throw new InvalidOperationException("No input maps available for semantic " + input.semantic); } + } - var indicesBuilder = new StringBuilder(); - foreach (var group in Groups) + var indicesBuilder = new StringBuilder(); + foreach (var group in Groups) + { + var indices = Indices; + for (int index = group.TriFirst; index < group.TriFirst + group.TriCount; index++) { - var indices = Indices; - for (int index = group.TriFirst; index < group.TriFirst + group.TriCount; index++) + int firstIdx = index * 3; + for (int vertIndex = 0; vertIndex < 3; vertIndex++) { - int firstIdx = index * 3; - for (int vertIndex = 0; vertIndex < 3; vertIndex++) + for (int i = 0; i < inputs.Length; i++) { - for (int i = 0; i < inputs.Length; i++) - { - indicesBuilder.Append(inputMaps[i][indices[firstIdx + vertIndex]]); - indicesBuilder.Append(" "); - } + indicesBuilder.Append(inputMaps[i][indices[firstIdx + vertIndex]]); + indicesBuilder.Append(" "); } } } - - tris.p = indicesBuilder.ToString(); - return tris; } - } - public class BoneBinding - { - public string BoneName; - [Serialization(ArraySize = 3)] - public float[] OBBMin; - [Serialization(ArraySize = 3)] - public float[] OBBMax; - [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] - public List TriangleIndices; + tris.p = indicesBuilder.ToString(); + return tris; } +} - public class MaterialReference - { - public string Usage; - public Material Map; - } +public class BoneBinding +{ + public string BoneName; + [Serialization(ArraySize = 3)] + public float[] OBBMin; + [Serialization(ArraySize = 3)] + public float[] OBBMax; + [Serialization(Prototype = typeof(TriIndex), Kind = SerializationKind.UserMember, Serializer = typeof(Int32ListSerializer))] + public List TriangleIndices; +} - public class TextureLayout - { - public Int32 BytesPerPixel; - [Serialization(ArraySize = 4)] - public Int32[] ShiftForComponent; - [Serialization(ArraySize = 4)] - public Int32[] BitsForComponent; - } +public class MaterialReference +{ + public string Usage; + public Material Map; +} - public class PixelByte - { - public Byte UInt8; - } +public class TextureLayout +{ + public Int32 BytesPerPixel; + [Serialization(ArraySize = 4)] + public Int32[] ShiftForComponent; + [Serialization(ArraySize = 4)] + public Int32[] BitsForComponent; +} - public class TextureMipLevel - { - public Int32 Stride; - public List PixelBytes; - } +public class PixelByte +{ + public Byte UInt8; +} - public class TextureImage - { - public List MIPLevels; - } +public class TextureMipLevel +{ + public Int32 Stride; + public List PixelBytes; +} - public class Texture - { - public string FromFileName; - public Int32 TextureType; - public Int32 Width; - public Int32 Height; - public Int32 Encoding; - public Int32 SubFormat; - [Serialization(Type = MemberType.Inline)] - public TextureLayout Layout; - public List Images; - public object ExtendedData; - } +public class TextureImage +{ + public List MIPLevels; +} - public class Material - { - public string Name; - public List Maps; - public Texture Texture; - public object ExtendedData; - } +public class Texture +{ + public string FromFileName; + public Int32 TextureType; + public Int32 Width; + public Int32 Height; + public Int32 Encoding; + public Int32 SubFormat; + [Serialization(Type = MemberType.Inline)] + public TextureLayout Layout; + public List Images; + public object ExtendedData; +} - public class MaterialBinding - { - public Material Material; - } +public class Material +{ + public string Name; + public List Maps; + public Texture Texture; + public object ExtendedData; +} - public class MorphTarget +public class MaterialBinding +{ + public Material Material; +} + +public class MorphTarget +{ + public string ScalarName; + [Serialization(SectionSelector = typeof(VertexDataSectionSelector))] + public VertexData VertexData; + public Int32 DataIsDeltas; +} + +public class Mesh +{ + public string Name; + [Serialization(SectionSelector = typeof(VertexDataSectionSelector))] + public VertexData PrimaryVertexData; + public List MorphTargets; + [Serialization(SectionSelector = typeof(TriTopologySectionSelector))] + public TriTopology PrimaryTopology; + [Serialization(DataArea = true)] + public List MaterialBindings; + public List BoneBindings; + [Serialization(Type = MemberType.VariantReference)] + public DivinityMeshExtendedData ExtendedData; + + [Serialization(Kind = SerializationKind.None)] + public Dictionary> OriginalToConsolidatedVertexIndexMap; + + [Serialization(Kind = SerializationKind.None)] + public VertexDescriptor VertexFormat; + + [Serialization(Kind = SerializationKind.None)] + public int ExportOrder = -1; + + public void PostLoad() { - public string ScalarName; - [Serialization(SectionSelector = typeof(VertexDataSectionSelector))] - public VertexData VertexData; - public Int32 DataIsDeltas; + if (PrimaryVertexData.Vertices.Count > 0) + { + VertexFormat = PrimaryVertexData.Vertices[0].Format; + } + + if (ExtendedData != null && ExtendedData.UserMeshProperties.MeshFlags == 0) + { + ExtendedData.UserMeshProperties.MeshFlags = AutodetectMeshFlags(); + } } - - public class Mesh + public DivinityModelFlag AutodetectMeshFlags() { - public string Name; - [Serialization(SectionSelector = typeof(VertexDataSectionSelector))] - public VertexData PrimaryVertexData; - public List MorphTargets; - [Serialization(SectionSelector = typeof(TriTopologySectionSelector))] - public TriTopology PrimaryTopology; - [Serialization(DataArea = true)] - public List MaterialBindings; - public List BoneBindings; - [Serialization(Type = MemberType.VariantReference)] - public DivinityMeshExtendedData ExtendedData; - - [Serialization(Kind = SerializationKind.None)] - public Dictionary> OriginalToConsolidatedVertexIndexMap; - - [Serialization(Kind = SerializationKind.None)] - public VertexDescriptor VertexFormat; - - [Serialization(Kind = SerializationKind.None)] - public int ExportOrder = -1; - - public void PostLoad() - { - if (PrimaryVertexData.Vertices.Count > 0) - { - VertexFormat = PrimaryVertexData.Vertices[0].Format; - } + DivinityModelFlag flags = 0; - if (ExtendedData != null && ExtendedData.UserMeshProperties.MeshFlags == 0) - { - ExtendedData.UserMeshProperties.MeshFlags = AutodetectMeshFlags(); - } - } - public DivinityModelFlag AutodetectMeshFlags() + if (ExtendedData != null + && ExtendedData.UserMeshProperties != null + && ExtendedData.UserMeshProperties.MeshFlags != 0) { - DivinityModelFlag flags = 0; + return ExtendedData.UserMeshProperties.MeshFlags; + } - if (ExtendedData != null - && ExtendedData.UserMeshProperties != null - && ExtendedData.UserMeshProperties.MeshFlags != 0) + if (ExtendedData != null + && ExtendedData.UserDefinedProperties != null) + { + flags = UserDefinedPropertiesHelpers.UserDefinedPropertiesToMeshType(ExtendedData.UserDefinedProperties); + } + else + { + // Only mark model as cloth if it has colored vertices + if (VertexFormat.ColorMaps > 0) { - return ExtendedData.UserMeshProperties.MeshFlags; + flags |= DivinityModelFlag.Cloth; } - if (ExtendedData != null - && ExtendedData.UserDefinedProperties != null) + if (!VertexFormat.HasBoneWeights) { - flags = UserDefinedPropertiesHelpers.UserDefinedPropertiesToMeshType(ExtendedData.UserDefinedProperties); + flags |= DivinityModelFlag.Rigid; } - else - { - // Only mark model as cloth if it has colored vertices - if (VertexFormat.ColorMaps > 0) - { - flags |= DivinityModelFlag.Cloth; - } + } - if (!VertexFormat.HasBoneWeights) - { - flags |= DivinityModelFlag.Rigid; - } - } + return flags; + } - return flags; + public List VertexComponentNames() + { + if (PrimaryVertexData.VertexComponentNames != null + && PrimaryVertexData.VertexComponentNames.Count > 0 + && PrimaryVertexData.VertexComponentNames[0].String != "") + { + return PrimaryVertexData.VertexComponentNames.Select(s => s.String).ToList(); } - - public List VertexComponentNames() + else if (PrimaryVertexData.Vertices != null + && PrimaryVertexData.Vertices.Count > 0) { - if (PrimaryVertexData.VertexComponentNames != null - && PrimaryVertexData.VertexComponentNames.Count > 0 - && PrimaryVertexData.VertexComponentNames[0].String != "") - { - return PrimaryVertexData.VertexComponentNames.Select(s => s.String).ToList(); - } - else if (PrimaryVertexData.Vertices != null - && PrimaryVertexData.Vertices.Count > 0) - { - return PrimaryVertexData.Vertices[0].Format.ComponentNames(); - } - else - { - throw new ParsingException("Unable to determine mesh component list: No vertices and vertex component names available."); - } + return PrimaryVertexData.Vertices[0].Format.ComponentNames(); } - - public bool IsSkinned() + else { - // Check if we have both the BoneWeights and BoneIndices vertex components. - bool hasWeights = false, hasIndices = false; + throw new ParsingException("Unable to determine mesh component list: No vertices and vertex component names available."); + } + } - // If we have vertices, check the vertex prototype, as VertexComponentNames is unreliable. - if (PrimaryVertexData.Vertices.Count > 0) - { - var desc = PrimaryVertexData.Vertices[0].Format; - hasWeights = hasIndices = desc.HasBoneWeights; - } - else + public bool IsSkinned() + { + // Check if we have both the BoneWeights and BoneIndices vertex components. + bool hasWeights = false, hasIndices = false; + + // If we have vertices, check the vertex prototype, as VertexComponentNames is unreliable. + if (PrimaryVertexData.Vertices.Count > 0) + { + var desc = PrimaryVertexData.Vertices[0].Format; + hasWeights = hasIndices = desc.HasBoneWeights; + } + else + { + // Otherwise try to figure out the components from VertexComponentNames + foreach (var component in PrimaryVertexData.VertexComponentNames) { - // Otherwise try to figure out the components from VertexComponentNames - foreach (var component in PrimaryVertexData.VertexComponentNames) - { - if (component.String == "BoneWeights") - hasWeights = true; - else if (component.String == "BoneIndices") - hasIndices = true; - } + if (component.String == "BoneWeights") + hasWeights = true; + else if (component.String == "BoneIndices") + hasIndices = true; } - - return hasWeights && hasIndices; } + + return hasWeights && hasIndices; } } diff --git a/LSLib/Granny/Model/Metadata.cs b/LSLib/Granny/Model/Metadata.cs index 93bf54b6..345152af 100644 --- a/LSLib/Granny/Model/Metadata.cs +++ b/LSLib/Granny/Model/Metadata.cs @@ -1,50 +1,49 @@ using System; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model -{ - public class ArtToolInfo - { - public String FromArtToolName; - public Int32 ArtToolMajorRevision; - public Int32 ArtToolMinorRevision; - [Serialization(MinVersion = 0x80000011)] - public Int32 ArtToolPointerSize; - public Single UnitsPerMeter; - [Serialization(ArraySize = 3)] - public Single[] Origin; - [Serialization(ArraySize = 3)] - public Single[] RightVector; - [Serialization(ArraySize = 3)] - public Single[] UpVector; - [Serialization(ArraySize = 3)] - public Single[] BackVector; - [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] - public object ExtendedData; +namespace LSLib.Granny.Model; - public void SetYUp() - { - RightVector = new float[] { 1, 0, 0 }; - UpVector = new float[] { 0, 1, 0 }; - BackVector = new float[] { 0, 0, -1 }; - } +public class ArtToolInfo +{ + public String FromArtToolName; + public Int32 ArtToolMajorRevision; + public Int32 ArtToolMinorRevision; + [Serialization(MinVersion = 0x80000011)] + public Int32 ArtToolPointerSize; + public Single UnitsPerMeter; + [Serialization(ArraySize = 3)] + public Single[] Origin; + [Serialization(ArraySize = 3)] + public Single[] RightVector; + [Serialization(ArraySize = 3)] + public Single[] UpVector; + [Serialization(ArraySize = 3)] + public Single[] BackVector; + [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] + public object ExtendedData; - public void SetZUp() - { - RightVector = new float[] { 1, 0, 0 }; - UpVector = new float[] { 0, 0, 1 }; - BackVector = new float[] { 0, 1, 0 }; - } + public void SetYUp() + { + RightVector = new float[] { 1, 0, 0 }; + UpVector = new float[] { 0, 1, 0 }; + BackVector = new float[] { 0, 0, -1 }; } - public class ExporterInfo + public void SetZUp() { - public String ExporterName; - public Int32 ExporterMajorRevision; - public Int32 ExporterMinorRevision; - public Int32 ExporterCustomization; - public Int32 ExporterBuildNumber; - [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] - public object ExtendedData; + RightVector = new float[] { 1, 0, 0 }; + UpVector = new float[] { 0, 0, 1 }; + BackVector = new float[] { 0, 1, 0 }; } } + +public class ExporterInfo +{ + public String ExporterName; + public Int32 ExporterMajorRevision; + public Int32 ExporterMinorRevision; + public Int32 ExporterCustomization; + public Int32 ExporterBuildNumber; + [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] + public object ExtendedData; +} diff --git a/LSLib/Granny/Model/Model.cs b/LSLib/Granny/Model/Model.cs index 8a4880eb..f3578f60 100644 --- a/LSLib/Granny/Model/Model.cs +++ b/LSLib/Granny/Model/Model.cs @@ -1,21 +1,20 @@ using System.Collections.Generic; using LSLib.Granny.GR2; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class MeshBinding { - public class MeshBinding - { - public Mesh Mesh; - } + public Mesh Mesh; +} - public class Model - { - public string Name; - public Skeleton Skeleton; - public Transform InitialPlacement; - [Serialization(DataArea = true)] - public List MeshBindings; - [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000027)] - public object ExtendedData; - } +public class Model +{ + public string Name; + public Skeleton Skeleton; + public Transform InitialPlacement; + [Serialization(DataArea = true)] + public List MeshBindings; + [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000027)] + public object ExtendedData; } diff --git a/LSLib/Granny/Model/Root.cs b/LSLib/Granny/Model/Root.cs index e8c44a03..c54e819a 100644 --- a/LSLib/Granny/Model/Root.cs +++ b/LSLib/Granny/Model/Root.cs @@ -4,157 +4,156 @@ using LSLib.Granny.GR2; using OpenTK.Mathematics; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class Root { - public class Root + public ArtToolInfo ArtToolInfo; + public ExporterInfo ExporterInfo; + public string FromFileName; + [Serialization(Type = MemberType.ArrayOfReferences)] + public List Textures; + [Serialization(Type = MemberType.ArrayOfReferences)] + public List Materials; + [Serialization(Section = SectionType.Skeleton, Type = MemberType.ArrayOfReferences)] + public List Skeletons; + [Serialization(Type = MemberType.ArrayOfReferences, SectionSelector = typeof(VertexDataSectionSelector))] + public List VertexDatas; + [Serialization(Type = MemberType.ArrayOfReferences, SectionSelector = typeof(TriTopologySectionSelector))] + public List TriTopologies; + [Serialization(Section = SectionType.Mesh, Type = MemberType.ArrayOfReferences)] + public List Meshes; + [Serialization(Type = MemberType.ArrayOfReferences)] + public List Models; + [Serialization(Section = SectionType.TrackGroup, Type = MemberType.ArrayOfReferences)] + public List TrackGroups; + [Serialization(Type = MemberType.ArrayOfReferences)] + public List Animations; + [Serialization(Type = MemberType.VariantReference)] + public object ExtendedData; + + [Serialization(Kind = SerializationKind.None)] + public bool ZUp = false; + [Serialization(Kind = SerializationKind.None)] + public UInt32 GR2Tag; + + + public void TransformVertices(Matrix4 transformation) { - public ArtToolInfo ArtToolInfo; - public ExporterInfo ExporterInfo; - public string FromFileName; - [Serialization(Type = MemberType.ArrayOfReferences)] - public List Textures; - [Serialization(Type = MemberType.ArrayOfReferences)] - public List Materials; - [Serialization(Section = SectionType.Skeleton, Type = MemberType.ArrayOfReferences)] - public List Skeletons; - [Serialization(Type = MemberType.ArrayOfReferences, SectionSelector = typeof(VertexDataSectionSelector))] - public List VertexDatas; - [Serialization(Type = MemberType.ArrayOfReferences, SectionSelector = typeof(TriTopologySectionSelector))] - public List TriTopologies; - [Serialization(Section = SectionType.Mesh, Type = MemberType.ArrayOfReferences)] - public List Meshes; - [Serialization(Type = MemberType.ArrayOfReferences)] - public List Models; - [Serialization(Section = SectionType.TrackGroup, Type = MemberType.ArrayOfReferences)] - public List TrackGroups; - [Serialization(Type = MemberType.ArrayOfReferences)] - public List Animations; - [Serialization(Type = MemberType.VariantReference)] - public object ExtendedData; - - [Serialization(Kind = SerializationKind.None)] - public bool ZUp = false; - [Serialization(Kind = SerializationKind.None)] - public UInt32 GR2Tag; - - - public void TransformVertices(Matrix4 transformation) + if (VertexDatas != null) { - if (VertexDatas != null) + foreach (var vertexData in VertexDatas) { - foreach (var vertexData in VertexDatas) - { - vertexData.Transform(transformation); - } + vertexData.Transform(transformation); } } + } - public void TransformSkeletons(Matrix4 transformation) + public void TransformSkeletons(Matrix4 transformation) + { + if (Skeletons != null) { - if (Skeletons != null) + foreach (var skeleton in Skeletons) { - foreach (var skeleton in Skeletons) - { - skeleton.TransformRoots(transformation); - } + skeleton.TransformRoots(transformation); } } + } - public void ConvertToYUp(bool transformSkeletons) - { - if (!ZUp) return; + public void ConvertToYUp(bool transformSkeletons) + { + if (!ZUp) return; - var transform = Matrix4.CreateRotationX((float)(-0.5 * Math.PI)); - TransformVertices(transform); - if (transformSkeletons) - { - TransformSkeletons(transform); - } + var transform = Matrix4.CreateRotationX((float)(-0.5 * Math.PI)); + TransformVertices(transform); + if (transformSkeletons) + { + TransformSkeletons(transform); + } - ArtToolInfo?.SetYUp(); + ArtToolInfo?.SetYUp(); - ZUp = false; - } + ZUp = false; + } - public void Flip(bool flipMesh, bool flipSkeleton) + public void Flip(bool flipMesh, bool flipSkeleton) + { + if (flipMesh && VertexDatas != null) { - if (flipMesh && VertexDatas != null) + foreach (var vertexData in VertexDatas) { - foreach (var vertexData in VertexDatas) - { - vertexData.Flip(); - } + vertexData.Flip(); } + } - if (flipSkeleton && Skeletons != null) + if (flipSkeleton && Skeletons != null) + { + foreach (var skeleton in Skeletons) { - foreach (var skeleton in Skeletons) - { - skeleton.Flip(); - } + skeleton.Flip(); } + } - if (flipMesh && TriTopologies != null) + if (flipMesh && TriTopologies != null) + { + foreach (var topology in TriTopologies) { - foreach (var topology in TriTopologies) - { - topology.ChangeWindingOrder(); - } + topology.ChangeWindingOrder(); } } + } - public void PostLoad(UInt32 tag) - { - GR2Tag = tag; + public void PostLoad(UInt32 tag) + { + GR2Tag = tag; - if (tag == Header.Tag_DOS2DE) - { - Flip(true, true); - } + if (tag == Header.Tag_DOS2DE) + { + Flip(true, true); + } - foreach (var vertexData in VertexDatas ?? Enumerable.Empty()) - { - vertexData.PostLoad(); - } + foreach (var vertexData in VertexDatas ?? Enumerable.Empty()) + { + vertexData.PostLoad(); + } - foreach (var triTopology in TriTopologies ?? Enumerable.Empty()) - { - triTopology.PostLoad(); - } + foreach (var triTopology in TriTopologies ?? Enumerable.Empty()) + { + triTopology.PostLoad(); + } - Meshes?.ForEach(m => m.PostLoad()); + Meshes?.ForEach(m => m.PostLoad()); - var modelIndex = 0; - foreach (var model in Models ?? Enumerable.Empty()) + var modelIndex = 0; + foreach (var model in Models ?? Enumerable.Empty()) + { + foreach (var binding in model.MeshBindings ?? Enumerable.Empty()) { - foreach (var binding in model.MeshBindings ?? Enumerable.Empty()) - { - binding.Mesh.ExportOrder = modelIndex++; - } + binding.Mesh.ExportOrder = modelIndex++; } + } - foreach (var skeleton in Skeletons ?? Enumerable.Empty()) - { - skeleton.PostLoad(this); - } + foreach (var skeleton in Skeletons ?? Enumerable.Empty()) + { + skeleton.PostLoad(this); + } - // Upgrade legacy animation formats - foreach (var group in TrackGroups ?? Enumerable.Empty()) + // Upgrade legacy animation formats + foreach (var group in TrackGroups ?? Enumerable.Empty()) + { + if (group.TransformTracks != null) { - if (group.TransformTracks != null) + foreach (var track in group.TransformTracks) { - foreach (var track in group.TransformTracks) - { - track.OrientationCurve.UpgradeToGr7(); - track.PositionCurve.UpgradeToGr7(); - track.ScaleShearCurve.UpgradeToGr7(); - } + track.OrientationCurve.UpgradeToGr7(); + track.PositionCurve.UpgradeToGr7(); + track.ScaleShearCurve.UpgradeToGr7(); } } } + } - public void PreSave() - { - } + public void PreSave() + { } } diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index 0ad614f4..1b7de565 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -5,275 +5,274 @@ using LSLib.Granny.GR2; using System.Xml; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public class DivinityBoneExtendedData { - public class DivinityBoneExtendedData - { - public String UserDefinedProperties; - public Int32 IsRigid; - } + public String UserDefinedProperties; + public Int32 IsRigid; +} - public class Bone +public class Bone +{ + public string Name; + public int ParentIndex; + public Transform Transform; + [Serialization(ArraySize = 16)] + public float[] InverseWorldTransform; + public float LODError; + [Serialization(Type = MemberType.VariantReference)] + public DivinityBoneExtendedData ExtendedData; + + [Serialization(Kind = SerializationKind.None)] + public string TransformSID; + [Serialization(Kind = SerializationKind.None)] + public Matrix4 OriginalTransform; + [Serialization(Kind = SerializationKind.None)] + public Matrix4 WorldTransform; + [Serialization(Kind = SerializationKind.None)] + public int ExportIndex = -1; + + public bool IsRoot { get { return ParentIndex == -1; } } + + public void UpdateWorldTransforms(List bones) { - public string Name; - public int ParentIndex; - public Transform Transform; - [Serialization(ArraySize = 16)] - public float[] InverseWorldTransform; - public float LODError; - [Serialization(Type = MemberType.VariantReference)] - public DivinityBoneExtendedData ExtendedData; - - [Serialization(Kind = SerializationKind.None)] - public string TransformSID; - [Serialization(Kind = SerializationKind.None)] - public Matrix4 OriginalTransform; - [Serialization(Kind = SerializationKind.None)] - public Matrix4 WorldTransform; - [Serialization(Kind = SerializationKind.None)] - public int ExportIndex = -1; - - public bool IsRoot { get { return ParentIndex == -1; } } - - public void UpdateWorldTransforms(List bones) + var localTransform = Transform.ToMatrix4Composite(); + if (IsRoot) { - var localTransform = Transform.ToMatrix4Composite(); - if (IsRoot) - { - WorldTransform = localTransform; - } - else - { - var parentBone = bones[ParentIndex]; - WorldTransform = localTransform * parentBone.WorldTransform; - } - - var iwt = WorldTransform.Inverted(); - InverseWorldTransform = [ - iwt[0, 0], iwt[0, 1], iwt[0, 2], iwt[0, 3], - iwt[1, 0], iwt[1, 1], iwt[1, 2], iwt[1, 3], - iwt[2, 0], iwt[2, 1], iwt[2, 2], iwt[2, 3], - iwt[3, 0], iwt[3, 1], iwt[3, 2], iwt[3, 3] - ]; + WorldTransform = localTransform; + } + else + { + var parentBone = bones[ParentIndex]; + WorldTransform = localTransform * parentBone.WorldTransform; } - private void ImportLSLibProfile(node node) + var iwt = WorldTransform.Inverted(); + InverseWorldTransform = [ + iwt[0, 0], iwt[0, 1], iwt[0, 2], iwt[0, 3], + iwt[1, 0], iwt[1, 1], iwt[1, 2], iwt[1, 3], + iwt[2, 0], iwt[2, 1], iwt[2, 2], iwt[2, 3], + iwt[3, 0], iwt[3, 1], iwt[3, 2], iwt[3, 3] + ]; + } + + private void ImportLSLibProfile(node node) + { + var extraData = ColladaImporter.FindExporterExtraData(node.extra); + if (extraData != null && extraData.Any != null) { - var extraData = ColladaImporter.FindExporterExtraData(node.extra); - if (extraData != null && extraData.Any != null) + foreach (var setting in extraData.Any) { - foreach (var setting in extraData.Any) + switch (setting.LocalName) { - switch (setting.LocalName) - { - case "BoneIndex": - ExportIndex = Int32.Parse(setting.InnerText.Trim()); - break; - - default: - Utils.Warn($"Unrecognized LSLib bone attribute: {setting.LocalName}"); - break; - } + case "BoneIndex": + ExportIndex = Int32.Parse(setting.InnerText.Trim()); + break; + + default: + Utils.Warn($"Unrecognized LSLib bone attribute: {setting.LocalName}"); + break; } } } + } - public static Bone FromCollada(node bone, int parentIndex, List bones, Dictionary boneSIDs, Dictionary boneIDs) + public static Bone FromCollada(node bone, int parentIndex, List bones, Dictionary boneSIDs, Dictionary boneIDs) + { + var transMat = ColladaHelpers.TransformFromNode(bone); + var myIndex = bones.Count; + var colladaBone = new Bone { - var transMat = ColladaHelpers.TransformFromNode(bone); - var myIndex = bones.Count; - var colladaBone = new Bone - { - TransformSID = transMat.TransformSID, - ParentIndex = parentIndex, - Name = bone.name, - LODError = 0, // TODO - OriginalTransform = transMat.transform, - Transform = Transform.FromMatrix4(transMat.transform) - }; - - if (bone.id != null) - { - boneIDs.Add(bone.id, colladaBone); - } + TransformSID = transMat.TransformSID, + ParentIndex = parentIndex, + Name = bone.name, + LODError = 0, // TODO + OriginalTransform = transMat.transform, + Transform = Transform.FromMatrix4(transMat.transform) + }; + + if (bone.id != null) + { + boneIDs.Add(bone.id, colladaBone); + } - bones.Add(colladaBone); - boneSIDs.Add(bone.sid, colladaBone); + bones.Add(colladaBone); + boneSIDs.Add(bone.sid, colladaBone); - colladaBone.UpdateWorldTransforms(bones); - colladaBone.ImportLSLibProfile(bone); + colladaBone.UpdateWorldTransforms(bones); + colladaBone.ImportLSLibProfile(bone); - if (bone.node1 != null) + if (bone.node1 != null) + { + foreach (var node in bone.node1) { - foreach (var node in bone.node1) + if (node.type == NodeType.JOINT) { - if (node.type == NodeType.JOINT) - { - FromCollada(node, myIndex, bones, boneSIDs, boneIDs); - } + FromCollada(node, myIndex, bones, boneSIDs, boneIDs); } } - - return colladaBone; } - private technique ExportLSLibProfile(XmlDocument Xml) + + return colladaBone; + } + private technique ExportLSLibProfile(XmlDocument Xml) + { + var profile = new technique() { - var profile = new technique() - { - profile = "LSTools" - }; - - var props = new List(); - var prop = Xml.CreateElement("BoneIndex"); - prop.InnerText = ExportIndex.ToString(); - props.Add(prop); - profile.Any = props.ToArray(); - return profile; - } + profile = "LSTools" + }; + + var props = new List(); + var prop = Xml.CreateElement("BoneIndex"); + prop.InnerText = ExportIndex.ToString(); + props.Add(prop); + profile.Any = props.ToArray(); + return profile; + } + + public node MakeCollada(XmlDocument Xml) + { + var mat = Transform.ToMatrix4(); + mat.Transpose(); - public node MakeCollada(XmlDocument Xml) + return new node { - var mat = Transform.ToMatrix4(); - mat.Transpose(); + id = "Bone_" + Name.Replace(' ', '_'), + name = Name, // .Replace(' ', '_'); + sid = Name.Replace(' ', '_'), + type = NodeType.JOINT, - return new node - { - id = "Bone_" + Name.Replace(' ', '_'), - name = Name, // .Replace(' ', '_'); - sid = Name.Replace(' ', '_'), - type = NodeType.JOINT, - - Items = [ - new matrix - { - sid = "Transform", - Values = [ - mat[0, 0], mat[0, 1], mat[0, 2], mat[0, 3], - mat[1, 0], mat[1, 1], mat[1, 2], mat[1, 3], - mat[2, 0], mat[2, 1], mat[2, 2], mat[2, 3], - mat[3, 0], mat[3, 1], mat[3, 2], mat[3, 3] - ] - } - ], - ItemsElementName = [ItemsChoiceType2.matrix], - - extra = - [ - new extra - { - technique = - [ - ExportLSLibProfile(Xml) - ] - } - ] - }; - } + Items = [ + new matrix + { + sid = "Transform", + Values = [ + mat[0, 0], mat[0, 1], mat[0, 2], mat[0, 3], + mat[1, 0], mat[1, 1], mat[1, 2], mat[1, 3], + mat[2, 0], mat[2, 1], mat[2, 2], mat[2, 3], + mat[3, 0], mat[3, 1], mat[3, 2], mat[3, 3] + ] + } + ], + ItemsElementName = [ItemsChoiceType2.matrix], + + extra = + [ + new extra + { + technique = + [ + ExportLSLibProfile(Xml) + ] + } + ] + }; } +} - public class Skeleton - { - public string Name; - public List Bones; - public int LODType; - [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000027)] - public object ExtendedData; +public class Skeleton +{ + public string Name; + public List Bones; + public int LODType; + [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000027)] + public object ExtendedData; - [Serialization(Kind = SerializationKind.None)] - public Dictionary BonesBySID; + [Serialization(Kind = SerializationKind.None)] + public Dictionary BonesBySID; - [Serialization(Kind = SerializationKind.None)] - public Dictionary BonesByID; + [Serialization(Kind = SerializationKind.None)] + public Dictionary BonesByID; - [Serialization(Kind = SerializationKind.None)] - public bool IsDummy = false; + [Serialization(Kind = SerializationKind.None)] + public bool IsDummy = false; - public static Skeleton FromCollada(node root) + public static Skeleton FromCollada(node root) + { + var skeleton = new Skeleton { - var skeleton = new Skeleton - { - Bones = [], - LODType = 1, - Name = root.name, - BonesBySID = [], - BonesByID = [] - }; - Bone.FromCollada(root, -1, skeleton.Bones, skeleton.BonesBySID, skeleton.BonesByID); - return skeleton; - } + Bones = [], + LODType = 1, + Name = root.name, + BonesBySID = [], + BonesByID = [] + }; + Bone.FromCollada(root, -1, skeleton.Bones, skeleton.BonesBySID, skeleton.BonesByID); + return skeleton; + } - public Bone GetBoneByName(string name) - { - return Bones.FirstOrDefault(b => b.Name == name); - } + public Bone GetBoneByName(string name) + { + return Bones.FirstOrDefault(b => b.Name == name); + } - public void TransformRoots(Matrix4 transform) + public void TransformRoots(Matrix4 transform) + { + foreach (var bone in Bones) { - foreach (var bone in Bones) + if (bone.IsRoot) { - if (bone.IsRoot) - { - var boneTransform = bone.Transform.ToMatrix4() * transform; - bone.Transform = GR2.Transform.FromMatrix4(boneTransform); - } + var boneTransform = bone.Transform.ToMatrix4() * transform; + bone.Transform = GR2.Transform.FromMatrix4(boneTransform); } - - UpdateWorldTransforms(); } - public void Flip() - { - foreach (var bone in Bones) if (bone.IsRoot) - { - bone.Transform.SetScale(new Vector3(-1, 1, 1)); - } + UpdateWorldTransforms(); + } - UpdateWorldTransforms(); + public void Flip() + { + foreach (var bone in Bones) if (bone.IsRoot) + { + bone.Transform.SetScale(new Vector3(-1, 1, 1)); } - public void UpdateWorldTransforms() + UpdateWorldTransforms(); + } + + public void UpdateWorldTransforms() + { + foreach (var bone in Bones) { - foreach (var bone in Bones) - { - bone.UpdateWorldTransforms(Bones); - } + bone.UpdateWorldTransforms(Bones); } + } - public void ReorderBones() + public void ReorderBones() + { + // Reorder bones based on their ExportOrder + if (Bones.Any(m => m.ExportIndex > -1)) { - // Reorder bones based on their ExportOrder - if (Bones.Any(m => m.ExportIndex > -1)) - { - var newBones = Bones.ToList(); - newBones.Sort((a, b) => a.ExportIndex - b.ExportIndex); + var newBones = Bones.ToList(); + newBones.Sort((a, b) => a.ExportIndex - b.ExportIndex); - // Fix up parent indices - foreach (var bone in newBones) + // Fix up parent indices + foreach (var bone in newBones) + { + if (bone.ParentIndex != -1) { - if (bone.ParentIndex != -1) - { - var parent = Bones[bone.ParentIndex]; - bone.ParentIndex = newBones.IndexOf(parent); - } + var parent = Bones[bone.ParentIndex]; + bone.ParentIndex = newBones.IndexOf(parent); } - - Bones = newBones; } + + Bones = newBones; } + } - public void PostLoad(Root root) + public void PostLoad(Root root) + { + var hasSkinnedMeshes = root.Models.Any((model) => model.Skeleton == this); + if (!hasSkinnedMeshes || Bones.Count == 1) { - var hasSkinnedMeshes = root.Models.Any((model) => model.Skeleton == this); - if (!hasSkinnedMeshes || Bones.Count == 1) - { - IsDummy = true; - Utils.Info(String.Format("Skeleton '{0}' marked as dummy", this.Name)); - } + IsDummy = true; + Utils.Info(String.Format("Skeleton '{0}' marked as dummy", this.Name)); + } - for (var i = 0; i < Bones.Count; i++) - { - Bones[i].ExportIndex = i; - } + for (var i = 0; i < Bones.Count; i++) + { + Bones[i].ExportIndex = i; } } } diff --git a/LSLib/Granny/Model/VertexSerialization.cs b/LSLib/Granny/Model/VertexSerialization.cs index 543b6d23..8f118345 100644 --- a/LSLib/Granny/Model/VertexSerialization.cs +++ b/LSLib/Granny/Model/VertexSerialization.cs @@ -5,603 +5,602 @@ using System.Reflection; using System.Reflection.Emit; -namespace LSLib.Granny.Model +namespace LSLib.Granny.Model; + +public static class VertexSerializationHelpers { - public static class VertexSerializationHelpers + public static Vector2 ReadVector2(GR2Reader reader) { - public static Vector2 ReadVector2(GR2Reader reader) - { - Vector2 v; - v.X = reader.Reader.ReadSingle(); - v.Y = reader.Reader.ReadSingle(); - return v; - } + Vector2 v; + v.X = reader.Reader.ReadSingle(); + v.Y = reader.Reader.ReadSingle(); + return v; + } - public static Vector2 ReadHalfVector2(GR2Reader reader) - { - Vector2 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - return v; - } + public static Vector2 ReadHalfVector2(GR2Reader reader) + { + Vector2 v; + v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + return v; + } - public static Vector3 ReadVector3(GR2Reader reader) - { - Vector3 v; - v.X = reader.Reader.ReadSingle(); - v.Y = reader.Reader.ReadSingle(); - v.Z = reader.Reader.ReadSingle(); - return v; - } + public static Vector3 ReadVector3(GR2Reader reader) + { + Vector3 v; + v.X = reader.Reader.ReadSingle(); + v.Y = reader.Reader.ReadSingle(); + v.Z = reader.Reader.ReadSingle(); + return v; + } - public static Vector3 ReadHalfVector3(GR2Reader reader) - { - Vector3 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - return v; - } + public static Vector3 ReadHalfVector3(GR2Reader reader) + { + Vector3 v; + v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + return v; + } - public static Vector3 ReadHalfVector4As3(GR2Reader reader) - { - Vector3 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - reader.Reader.ReadUInt16(); - return v; - } + public static Vector3 ReadHalfVector4As3(GR2Reader reader) + { + Vector3 v; + v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + reader.Reader.ReadUInt16(); + return v; + } - public static Quaternion ReadBinormalShortVector4(GR2Reader reader) + public static Quaternion ReadBinormalShortVector4(GR2Reader reader) + { + return new Quaternion { - return new Quaternion - { - X = reader.Reader.ReadInt16() / 32767.0f, - Y = reader.Reader.ReadInt16() / 32767.0f, - Z = reader.Reader.ReadInt16() / 32767.0f, - W = reader.Reader.ReadInt16() / 32767.0f - }; - } + X = reader.Reader.ReadInt16() / 32767.0f, + Y = reader.Reader.ReadInt16() / 32767.0f, + Z = reader.Reader.ReadInt16() / 32767.0f, + W = reader.Reader.ReadInt16() / 32767.0f + }; + } - public static Vector4 ReadVector4(GR2Reader reader) - { - Vector4 v; - v.X = reader.Reader.ReadSingle(); - v.Y = reader.Reader.ReadSingle(); - v.Z = reader.Reader.ReadSingle(); - v.W = reader.Reader.ReadSingle(); - return v; - } + public static Vector4 ReadVector4(GR2Reader reader) + { + Vector4 v; + v.X = reader.Reader.ReadSingle(); + v.Y = reader.Reader.ReadSingle(); + v.Z = reader.Reader.ReadSingle(); + v.W = reader.Reader.ReadSingle(); + return v; + } - public static Vector4 ReadHalfVector4(GR2Reader reader) - { - Vector4 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.W = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - return v; - } + public static Vector4 ReadHalfVector4(GR2Reader reader) + { + Vector4 v; + v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.W = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + return v; + } - public static Vector4 ReadNormalByteVector4(GR2Reader reader) - { - Vector4 v; - v.X = reader.Reader.ReadByte() / 255.0f; - v.Y = reader.Reader.ReadByte() / 255.0f; - v.Z = reader.Reader.ReadByte() / 255.0f; - v.W = reader.Reader.ReadByte() / 255.0f; - return v; - } + public static Vector4 ReadNormalByteVector4(GR2Reader reader) + { + Vector4 v; + v.X = reader.Reader.ReadByte() / 255.0f; + v.Y = reader.Reader.ReadByte() / 255.0f; + v.Z = reader.Reader.ReadByte() / 255.0f; + v.W = reader.Reader.ReadByte() / 255.0f; + return v; + } - public static Vector3 ReadNormalSWordVector4As3(GR2Reader reader) - { - Vector3 v; - v.X = reader.Reader.ReadInt16() / 32767.0f; - v.Y = reader.Reader.ReadInt16() / 32767.0f; - v.Z = reader.Reader.ReadInt16() / 32767.0f; - reader.Reader.ReadInt16(); // Unused word - return v; - } + public static Vector3 ReadNormalSWordVector4As3(GR2Reader reader) + { + Vector3 v; + v.X = reader.Reader.ReadInt16() / 32767.0f; + v.Y = reader.Reader.ReadInt16() / 32767.0f; + v.Z = reader.Reader.ReadInt16() / 32767.0f; + reader.Reader.ReadInt16(); // Unused word + return v; + } - public static Vector3 ReadNormalSByteVector4As3(GR2Reader reader) + public static Vector3 ReadNormalSByteVector4As3(GR2Reader reader) + { + Vector3 v; + v.X = reader.Reader.ReadSByte() / 127.0f; + v.Y = reader.Reader.ReadSByte() / 127.0f; + v.Z = reader.Reader.ReadSByte() / 127.0f; + reader.Reader.ReadSByte(); // Unused byte + return v; + } + + public static Matrix3 ReadQTangent(GR2Reader reader) + { + Quaternion qTangent = ReadBinormalShortVector4(reader); + return QTangentToMatrix(qTangent); + } + + private static Matrix3 Orthonormalize(Matrix3 m) + { + Vector3 x = new Vector3(m.M11, m.M21, m.M31).Normalized(); + Vector3 y = Vector3.Cross(new Vector3(m.M13, m.M23, m.M33), x).Normalized(); + Vector3 z = Vector3.Cross(x, y); + return new Matrix3( + x.X, y.X, z.X, + x.Y, y.Y, z.Y, + x.Z, y.Z, z.Z + ); + } + + private static Quaternion MatrixToQTangent(Matrix3 mm, bool reflect) + { + var m = Orthonormalize(mm); + + var quat = Quaternion.FromMatrix(m); + quat.Normalize(); + + quat.Conjugate(); + if (quat.W < 0.0f) { - Vector3 v; - v.X = reader.Reader.ReadSByte() / 127.0f; - v.Y = reader.Reader.ReadSByte() / 127.0f; - v.Z = reader.Reader.ReadSByte() / 127.0f; - reader.Reader.ReadSByte(); // Unused byte - return v; + quat.Conjugate(); + quat.Invert(); } - public static Matrix3 ReadQTangent(GR2Reader reader) + // Make sure we don't end up with 0 as w component + const float threshold16bit = 1.0f / 32767.0f; + if (Math.Abs(quat.W) < threshold16bit) { - Quaternion qTangent = ReadBinormalShortVector4(reader); - return QTangentToMatrix(qTangent); + var bias16bit = (float)Math.Sqrt(1.0f - (threshold16bit * threshold16bit)); + quat *= bias16bit; + quat.W = threshold16bit; } - - private static Matrix3 Orthonormalize(Matrix3 m) + + // Encode reflection into quaternion's W element by making sign of W negative + // if Y axis needs to be flipped, positive otherwise + if (reflect) { - Vector3 x = new Vector3(m.M11, m.M21, m.M31).Normalized(); - Vector3 y = Vector3.Cross(new Vector3(m.M13, m.M23, m.M33), x).Normalized(); - Vector3 z = Vector3.Cross(x, y); - return new Matrix3( - x.X, y.X, z.X, - x.Y, y.Y, z.Y, - x.Z, y.Z, z.Z - ); + quat.Conjugate(); + quat.Invert(); } - private static Quaternion MatrixToQTangent(Matrix3 mm, bool reflect) - { - var m = Orthonormalize(mm); + return quat; + } - var quat = Quaternion.FromMatrix(m); - quat.Normalize(); + private static Matrix3 QTangentToMatrix(Quaternion q) + { + Matrix3 m = new Matrix3( + 1.0f - 2.0f * (q.Y * q.Y + q.Z * q.Z), 2 * (q.X * q.Y + q.W * q.Z), 2 * (q.X * q.Z - q.W * q.Y), + 2.0f * (q.X * q.Y - q.W * q.Z), 1 - 2 * (q.X * q.X + q.Z * q.Z), 2 * (q.Y * q.Z + q.W * q.X), + 0.0f, 0.0f, 0.0f + ); + + m.Row2 = Vector3.Cross(m.Row0, m.Row1) * ((q.W < 0.0f) ? -1.0f : 1.0f); + return m; + } - quat.Conjugate(); - if (quat.W < 0.0f) - { - quat.Conjugate(); - quat.Invert(); - } + public static BoneWeight ReadInfluences2(GR2Reader reader) + { + BoneWeight v; + v.A = reader.Reader.ReadByte(); + v.B = reader.Reader.ReadByte(); + v.C = 0; + v.D = 0; + return v; + } - // Make sure we don't end up with 0 as w component - const float threshold16bit = 1.0f / 32767.0f; - if (Math.Abs(quat.W) < threshold16bit) - { - var bias16bit = (float)Math.Sqrt(1.0f - (threshold16bit * threshold16bit)); - quat *= bias16bit; - quat.W = threshold16bit; - } - - // Encode reflection into quaternion's W element by making sign of W negative - // if Y axis needs to be flipped, positive otherwise - if (reflect) - { - quat.Conjugate(); - quat.Invert(); - } + public static BoneWeight ReadInfluences(GR2Reader reader) + { + BoneWeight v; + v.A = reader.Reader.ReadByte(); + v.B = reader.Reader.ReadByte(); + v.C = reader.Reader.ReadByte(); + v.D = reader.Reader.ReadByte(); + return v; + } - return quat; - } + public static void WriteVector2(WritableSection section, Vector2 v) + { + section.Writer.Write(v.X); + section.Writer.Write(v.Y); + } - private static Matrix3 QTangentToMatrix(Quaternion q) - { - Matrix3 m = new Matrix3( - 1.0f - 2.0f * (q.Y * q.Y + q.Z * q.Z), 2 * (q.X * q.Y + q.W * q.Z), 2 * (q.X * q.Z - q.W * q.Y), - 2.0f * (q.X * q.Y - q.W * q.Z), 1 - 2 * (q.X * q.X + q.Z * q.Z), 2 * (q.Y * q.Z + q.W * q.X), - 0.0f, 0.0f, 0.0f - ); - - m.Row2 = Vector3.Cross(m.Row0, m.Row1) * ((q.W < 0.0f) ? -1.0f : 1.0f); - return m; - } + public static void WriteHalfVector2(WritableSection section, Vector2 v) + { + section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); + } - public static BoneWeight ReadInfluences2(GR2Reader reader) - { - BoneWeight v; - v.A = reader.Reader.ReadByte(); - v.B = reader.Reader.ReadByte(); - v.C = 0; - v.D = 0; - return v; - } + public static void WriteVector3(WritableSection section, Vector3 v) + { + section.Writer.Write(v.X); + section.Writer.Write(v.Y); + section.Writer.Write(v.Z); + } - public static BoneWeight ReadInfluences(GR2Reader reader) - { - BoneWeight v; - v.A = reader.Reader.ReadByte(); - v.B = reader.Reader.ReadByte(); - v.C = reader.Reader.ReadByte(); - v.D = reader.Reader.ReadByte(); - return v; - } + public static void WriteHalfVector3(WritableSection section, Vector3 v) + { + section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); + } - public static void WriteVector2(WritableSection section, Vector2 v) - { - section.Writer.Write(v.X); - section.Writer.Write(v.Y); - } + public static void WriteHalfVector3As4(WritableSection section, Vector3 v) + { + section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); + section.Writer.Write((ushort)0); + } - public static void WriteHalfVector2(WritableSection section, Vector2 v) - { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - } + public static void WriteBinormalShortVector4(WritableSection section, Quaternion v) + { + section.Writer.Write((Int16)(v.X * 32767.0f)); + section.Writer.Write((Int16)(v.Y * 32767.0f)); + section.Writer.Write((Int16)(v.Z * 32767.0f)); + section.Writer.Write((Int16)(v.W * 32767.0f)); + } - public static void WriteVector3(WritableSection section, Vector3 v) - { - section.Writer.Write(v.X); - section.Writer.Write(v.Y); - section.Writer.Write(v.Z); - } + public static void WriteVector4(WritableSection section, Vector4 v) + { + section.Writer.Write(v.X); + section.Writer.Write(v.Y); + section.Writer.Write(v.Z); + section.Writer.Write(v.W); + } - public static void WriteHalfVector3(WritableSection section, Vector3 v) - { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); - } + public static void WriteHalfVector4(WritableSection section, Vector4 v) + { + section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); + section.Writer.Write(HalfHelpers.SingleToHalf(v.W)); + } - public static void WriteHalfVector3As4(WritableSection section, Vector3 v) - { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); - section.Writer.Write((ushort)0); - } + public static void WriteNormalByteVector4(WritableSection section, Vector4 v) + { + section.Writer.Write((byte)(v.X * 255)); + section.Writer.Write((byte)(v.Y * 255)); + section.Writer.Write((byte)(v.Z * 255)); + section.Writer.Write((byte)(v.W * 255)); + } + public static void WriteNormalSWordVector3As4(WritableSection section, Vector3 v) + { + section.Writer.Write((Int16)(v.X * 32767)); + section.Writer.Write((Int16)(v.Y * 32767)); + section.Writer.Write((Int16)(v.Z * 32767)); + section.Writer.Write(0); + } + public static void WriteNormalSByteVector3As4(WritableSection section, Vector3 v) + { + section.Writer.Write((sbyte)(v.X * 127)); + section.Writer.Write((sbyte)(v.Y * 127)); + section.Writer.Write((sbyte)(v.Z * 127)); + section.Writer.Write(0); + } - public static void WriteBinormalShortVector4(WritableSection section, Quaternion v) - { - section.Writer.Write((Int16)(v.X * 32767.0f)); - section.Writer.Write((Int16)(v.Y * 32767.0f)); - section.Writer.Write((Int16)(v.Z * 32767.0f)); - section.Writer.Write((Int16)(v.W * 32767.0f)); - } + public static void WriteInfluences2(WritableSection section, BoneWeight v) + { + section.Writer.Write(v.A); + section.Writer.Write(v.B); + } - public static void WriteVector4(WritableSection section, Vector4 v) - { - section.Writer.Write(v.X); - section.Writer.Write(v.Y); - section.Writer.Write(v.Z); - section.Writer.Write(v.W); - } + public static void WriteInfluences(WritableSection section, BoneWeight v) + { + section.Writer.Write(v.A); + section.Writer.Write(v.B); + section.Writer.Write(v.C); + section.Writer.Write(v.D); + } - public static void WriteHalfVector4(WritableSection section, Vector4 v) - { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.W)); - } + public static void WriteQTangent(WritableSection section, Vector3 normal, Vector3 tangent, Vector3 binormal) + { + var n2 = Vector3.Cross(tangent, binormal).Normalized(); + var reflection = Vector3.Dot(normal, n2); + Matrix3 normals = new Matrix3(tangent, binormal, n2); + var qTangent = MatrixToQTangent(normals, reflection < 0.0f); + WriteBinormalShortVector4(section, qTangent); + } - public static void WriteNormalByteVector4(WritableSection section, Vector4 v) - { - section.Writer.Write((byte)(v.X * 255)); - section.Writer.Write((byte)(v.Y * 255)); - section.Writer.Write((byte)(v.Z * 255)); - section.Writer.Write((byte)(v.W * 255)); - } - public static void WriteNormalSWordVector3As4(WritableSection section, Vector3 v) + public static void Serialize(WritableSection section, Vertex v) + { + var d = v.Format; + + switch (d.PositionType) { - section.Writer.Write((Int16)(v.X * 32767)); - section.Writer.Write((Int16)(v.Y * 32767)); - section.Writer.Write((Int16)(v.Z * 32767)); - section.Writer.Write(0); + case PositionType.None: break; + case PositionType.Float3: WriteVector3(section, v.Position); break; + case PositionType.Word4: WriteNormalSWordVector3As4(section, v.Position); break; } - public static void WriteNormalSByteVector3As4(WritableSection section, Vector3 v) + + if (d.HasBoneWeights) { - section.Writer.Write((sbyte)(v.X * 127)); - section.Writer.Write((sbyte)(v.Y * 127)); - section.Writer.Write((sbyte)(v.Z * 127)); - section.Writer.Write(0); + if (d.NumBoneInfluences == 2) + { + WriteInfluences2(section, v.BoneWeights); + WriteInfluences2(section, v.BoneIndices); + } + else + { + WriteInfluences(section, v.BoneWeights); + WriteInfluences(section, v.BoneIndices); + } } - public static void WriteInfluences2(WritableSection section, BoneWeight v) + switch (d.NormalType) { - section.Writer.Write(v.A); - section.Writer.Write(v.B); + case NormalType.None: break; + case NormalType.Float3: WriteVector3(section, v.Normal); break; + case NormalType.Half4: WriteHalfVector3As4(section, v.Normal); break; + case NormalType.Byte4: WriteNormalSByteVector3As4(section, v.Normal); break; + case NormalType.QTangent: WriteQTangent(section, v.Normal, v.Tangent, v.Binormal); break; } - public static void WriteInfluences(WritableSection section, BoneWeight v) + switch (d.TangentType) { - section.Writer.Write(v.A); - section.Writer.Write(v.B); - section.Writer.Write(v.C); - section.Writer.Write(v.D); + case NormalType.None: break; + case NormalType.Float3: WriteVector3(section, v.Tangent); break; + case NormalType.Half4: WriteHalfVector3As4(section, v.Tangent); break; + case NormalType.Byte4: WriteNormalSByteVector3As4(section, v.Tangent); break; + case NormalType.QTangent: break; // Tangent saved into QTangent } - public static void WriteQTangent(WritableSection section, Vector3 normal, Vector3 tangent, Vector3 binormal) + switch (d.BinormalType) { - var n2 = Vector3.Cross(tangent, binormal).Normalized(); - var reflection = Vector3.Dot(normal, n2); - Matrix3 normals = new Matrix3(tangent, binormal, n2); - var qTangent = MatrixToQTangent(normals, reflection < 0.0f); - WriteBinormalShortVector4(section, qTangent); + case NormalType.None: break; + case NormalType.Float3: WriteVector3(section, v.Binormal); break; + case NormalType.Half4: WriteHalfVector3As4(section, v.Binormal); break; + case NormalType.Byte4: WriteNormalSByteVector3As4(section, v.Binormal); break; + case NormalType.QTangent: break; // Binormal saved into QTangent } - public static void Serialize(WritableSection section, Vertex v) + if (d.ColorMaps > 0) { - var d = v.Format; - - switch (d.PositionType) + for (var i = 0; i < d.ColorMaps; i++) { - case PositionType.None: break; - case PositionType.Float3: WriteVector3(section, v.Position); break; - case PositionType.Word4: WriteNormalSWordVector3As4(section, v.Position); break; - } - - if (d.HasBoneWeights) - { - if (d.NumBoneInfluences == 2) - { - WriteInfluences2(section, v.BoneWeights); - WriteInfluences2(section, v.BoneIndices); - } - else + var color = v.GetColor(i); + switch (d.ColorMapType) { - WriteInfluences(section, v.BoneWeights); - WriteInfluences(section, v.BoneIndices); + case ColorMapType.Float4: WriteVector4(section, color); break; + case ColorMapType.Byte4: WriteNormalByteVector4(section, color); break; + default: throw new Exception($"Cannot unserialize color map: Unsupported format {d.ColorMapType}"); } } + } - switch (d.NormalType) + if (d.TextureCoordinates > 0) + { + for (var i = 0; i < d.TextureCoordinates; i++) { - case NormalType.None: break; - case NormalType.Float3: WriteVector3(section, v.Normal); break; - case NormalType.Half4: WriteHalfVector3As4(section, v.Normal); break; - case NormalType.Byte4: WriteNormalSByteVector3As4(section, v.Normal); break; - case NormalType.QTangent: WriteQTangent(section, v.Normal, v.Tangent, v.Binormal); break; + var uv = v.GetUV(i); + switch (d.TextureCoordinateType) + { + case TextureCoordinateType.Float2: WriteVector2(section, uv); break; + case TextureCoordinateType.Half2: WriteHalfVector2(section, uv); break; + default: throw new Exception($"Cannot serialize UV map: Unsupported format {d.TextureCoordinateType}"); + } } + } + } - switch (d.TangentType) - { - case NormalType.None: break; - case NormalType.Float3: WriteVector3(section, v.Tangent); break; - case NormalType.Half4: WriteHalfVector3As4(section, v.Tangent); break; - case NormalType.Byte4: WriteNormalSByteVector3As4(section, v.Tangent); break; - case NormalType.QTangent: break; // Tangent saved into QTangent - } + public static void Unserialize(GR2Reader reader, Vertex v) + { + var d = v.Format; - switch (d.BinormalType) - { - case NormalType.None: break; - case NormalType.Float3: WriteVector3(section, v.Binormal); break; - case NormalType.Half4: WriteHalfVector3As4(section, v.Binormal); break; - case NormalType.Byte4: WriteNormalSByteVector3As4(section, v.Binormal); break; - case NormalType.QTangent: break; // Binormal saved into QTangent - } + switch (d.PositionType) + { + case PositionType.None: break; + case PositionType.Float3: v.Position = ReadVector3(reader); break; + case PositionType.Word4: v.Position = ReadNormalSWordVector4As3(reader); break; + } - if (d.ColorMaps > 0) + if (d.HasBoneWeights) + { + if (d.NumBoneInfluences == 2) { - for (var i = 0; i < d.ColorMaps; i++) - { - var color = v.GetColor(i); - switch (d.ColorMapType) - { - case ColorMapType.Float4: WriteVector4(section, color); break; - case ColorMapType.Byte4: WriteNormalByteVector4(section, color); break; - default: throw new Exception($"Cannot unserialize color map: Unsupported format {d.ColorMapType}"); - } - } + v.BoneWeights = ReadInfluences2(reader); + v.BoneIndices = ReadInfluences2(reader); } - - if (d.TextureCoordinates > 0) + else { - for (var i = 0; i < d.TextureCoordinates; i++) - { - var uv = v.GetUV(i); - switch (d.TextureCoordinateType) - { - case TextureCoordinateType.Float2: WriteVector2(section, uv); break; - case TextureCoordinateType.Half2: WriteHalfVector2(section, uv); break; - default: throw new Exception($"Cannot serialize UV map: Unsupported format {d.TextureCoordinateType}"); - } - } + v.BoneWeights = ReadInfluences(reader); + v.BoneIndices = ReadInfluences(reader); } } - public static void Unserialize(GR2Reader reader, Vertex v) + switch (d.NormalType) { - var d = v.Format; - - switch (d.PositionType) - { - case PositionType.None: break; - case PositionType.Float3: v.Position = ReadVector3(reader); break; - case PositionType.Word4: v.Position = ReadNormalSWordVector4As3(reader); break; - } - - if (d.HasBoneWeights) - { - if (d.NumBoneInfluences == 2) + case NormalType.None: break; + case NormalType.Float3: v.Normal = ReadVector3(reader); break; + case NormalType.Half4: v.Normal = ReadHalfVector4As3(reader); break; + case NormalType.Byte4: v.Normal = ReadNormalSByteVector4As3(reader); break; + case NormalType.QTangent: { - v.BoneWeights = ReadInfluences2(reader); - v.BoneIndices = ReadInfluences2(reader); + var qTangent = ReadQTangent(reader); + v.Normal = qTangent.Row2; + v.Tangent = qTangent.Row1; + v.Binormal = qTangent.Row0; + break; } - else - { - v.BoneWeights = ReadInfluences(reader); - v.BoneIndices = ReadInfluences(reader); - } - } - - switch (d.NormalType) - { - case NormalType.None: break; - case NormalType.Float3: v.Normal = ReadVector3(reader); break; - case NormalType.Half4: v.Normal = ReadHalfVector4As3(reader); break; - case NormalType.Byte4: v.Normal = ReadNormalSByteVector4As3(reader); break; - case NormalType.QTangent: - { - var qTangent = ReadQTangent(reader); - v.Normal = qTangent.Row2; - v.Tangent = qTangent.Row1; - v.Binormal = qTangent.Row0; - break; - } - } + } - switch (d.TangentType) - { - case NormalType.None: break; - case NormalType.Float3: v.Tangent = ReadVector3(reader); break; - case NormalType.Half4: v.Tangent = ReadHalfVector4As3(reader); break; - case NormalType.Byte4: v.Tangent = ReadNormalSByteVector4As3(reader); break; - case NormalType.QTangent: break; // Tangent read from QTangent - } + switch (d.TangentType) + { + case NormalType.None: break; + case NormalType.Float3: v.Tangent = ReadVector3(reader); break; + case NormalType.Half4: v.Tangent = ReadHalfVector4As3(reader); break; + case NormalType.Byte4: v.Tangent = ReadNormalSByteVector4As3(reader); break; + case NormalType.QTangent: break; // Tangent read from QTangent + } - switch (d.BinormalType) - { - case NormalType.None: break; - case NormalType.Float3: v.Binormal = ReadVector3(reader); break; - case NormalType.Half4: v.Binormal = ReadHalfVector4As3(reader); break; - case NormalType.Byte4: v.Binormal = ReadNormalSByteVector4As3(reader); break; - case NormalType.QTangent: break; // Binormal read from QTangent - } + switch (d.BinormalType) + { + case NormalType.None: break; + case NormalType.Float3: v.Binormal = ReadVector3(reader); break; + case NormalType.Half4: v.Binormal = ReadHalfVector4As3(reader); break; + case NormalType.Byte4: v.Binormal = ReadNormalSByteVector4As3(reader); break; + case NormalType.QTangent: break; // Binormal read from QTangent + } - if (d.ColorMaps > 0) + if (d.ColorMaps > 0) + { + for (var i = 0; i < d.ColorMaps; i++) { - for (var i = 0; i < d.ColorMaps; i++) + var color = d.ColorMapType switch { - var color = d.ColorMapType switch - { - ColorMapType.Float4 => ReadVector4(reader), - ColorMapType.Byte4 => ReadNormalByteVector4(reader), - _ => throw new Exception($"Cannot unserialize color map: Unsupported format {d.ColorMapType}"), - }; - v.SetColor(i, color); - } + ColorMapType.Float4 => ReadVector4(reader), + ColorMapType.Byte4 => ReadNormalByteVector4(reader), + _ => throw new Exception($"Cannot unserialize color map: Unsupported format {d.ColorMapType}"), + }; + v.SetColor(i, color); } + } - if (d.TextureCoordinates > 0) + if (d.TextureCoordinates > 0) + { + for (var i = 0; i < d.TextureCoordinates; i++) { - for (var i = 0; i < d.TextureCoordinates; i++) + var uv = d.TextureCoordinateType switch { - var uv = d.TextureCoordinateType switch - { - TextureCoordinateType.Float2 => ReadVector2(reader), - TextureCoordinateType.Half2 => ReadHalfVector2(reader), - _ => throw new Exception($"Cannot unserialize UV map: Unsupported format {d.TextureCoordinateType}"), - }; - v.SetUV(i, uv); - } + TextureCoordinateType.Float2 => ReadVector2(reader), + TextureCoordinateType.Half2 => ReadHalfVector2(reader), + _ => throw new Exception($"Cannot unserialize UV map: Unsupported format {d.TextureCoordinateType}"), + }; + v.SetUV(i, uv); } } } +} - public static class VertexTypeBuilder - { - private static ModuleBuilder ModBuilder; +public static class VertexTypeBuilder +{ + private static ModuleBuilder ModBuilder; - private static ModuleBuilder GetModuleBuilder() + private static ModuleBuilder GetModuleBuilder() + { + if (ModBuilder != null) { - if (ModBuilder != null) - { - return ModBuilder; - } - - var an = new AssemblyName("VertexFactoryAssembly"); - var assemblyBuilder = AssemblyBuilder.DefineDynamicAssembly(an, AssemblyBuilderAccess.Run); - var moduleBuilder = assemblyBuilder.DefineDynamicModule("VertexFactoryClasses"); - ModBuilder = moduleBuilder; return ModBuilder; } - public static Type CreateVertexSubtype(string className) - { - var cls = GetModuleBuilder().GetType(className); - if (cls != null) - { - return cls; - } - - TypeBuilder tb = GetModuleBuilder().DefineType(className, - TypeAttributes.Public | - TypeAttributes.Class | - TypeAttributes.AutoClass | - TypeAttributes.AnsiClass | - TypeAttributes.BeforeFieldInit | - TypeAttributes.AutoLayout, - null); - ConstructorBuilder constructor = tb.DefineDefaultConstructor( - MethodAttributes.Public | - MethodAttributes.SpecialName | - MethodAttributes.RTSpecialName); - - tb.SetParent(typeof(Vertex)); - - return tb.CreateType(); - } + var an = new AssemblyName("VertexFactoryAssembly"); + var assemblyBuilder = AssemblyBuilder.DefineDynamicAssembly(an, AssemblyBuilderAccess.Run); + var moduleBuilder = assemblyBuilder.DefineDynamicModule("VertexFactoryClasses"); + ModBuilder = moduleBuilder; + return ModBuilder; } - class VertexDefinitionSelector : StructDefinitionSelector + public static Type CreateVertexSubtype(string className) { - private void AddMember(StructDefinition defn, String name, MemberType type, UInt32 arraySize) + var cls = GetModuleBuilder().GetType(className); + if (cls != null) { - var member = new MemberDefinition - { - Type = type, - Name = name, - GrannyName = name, - Definition = null, - ArraySize = arraySize, - Extra = [0, 0, 0], - Unknown = 0 - }; - defn.Members.Add(member); + return cls; } - public StructDefinition CreateStructDefinition(object instance) + TypeBuilder tb = GetModuleBuilder().DefineType(className, + TypeAttributes.Public | + TypeAttributes.Class | + TypeAttributes.AutoClass | + TypeAttributes.AnsiClass | + TypeAttributes.BeforeFieldInit | + TypeAttributes.AutoLayout, + null); + ConstructorBuilder constructor = tb.DefineDefaultConstructor( + MethodAttributes.Public | + MethodAttributes.SpecialName | + MethodAttributes.RTSpecialName); + + tb.SetParent(typeof(Vertex)); + + return tb.CreateType(); + } +} + +class VertexDefinitionSelector : StructDefinitionSelector +{ + private void AddMember(StructDefinition defn, String name, MemberType type, UInt32 arraySize) + { + var member = new MemberDefinition + { + Type = type, + Name = name, + GrannyName = name, + Definition = null, + ArraySize = arraySize, + Extra = [0, 0, 0], + Unknown = 0 + }; + defn.Members.Add(member); + } + + public StructDefinition CreateStructDefinition(object instance) + { + var desc = (instance as Vertex).Format; + var defn = new StructDefinition { - var desc = (instance as Vertex).Format; - var defn = new StructDefinition - { - Members = [], - MixedMarshal = true, - Type = typeof(Vertex) - }; + Members = [], + MixedMarshal = true, + Type = typeof(Vertex) + }; - switch (desc.PositionType) - { - case PositionType.None: break; - case PositionType.Float3: AddMember(defn, "Position", MemberType.Real32, 3); break; - case PositionType.Word4: AddMember(defn, "Position", MemberType.BinormalInt16, 4); break; - } + switch (desc.PositionType) + { + case PositionType.None: break; + case PositionType.Float3: AddMember(defn, "Position", MemberType.Real32, 3); break; + case PositionType.Word4: AddMember(defn, "Position", MemberType.BinormalInt16, 4); break; + } - if (desc.HasBoneWeights) - { - AddMember(defn, "BoneWeights", MemberType.NormalUInt8, (UInt32)desc.NumBoneInfluences); - AddMember(defn, "BoneIndices", MemberType.UInt8, (UInt32)desc.NumBoneInfluences); - } + if (desc.HasBoneWeights) + { + AddMember(defn, "BoneWeights", MemberType.NormalUInt8, (UInt32)desc.NumBoneInfluences); + AddMember(defn, "BoneIndices", MemberType.UInt8, (UInt32)desc.NumBoneInfluences); + } - switch (desc.NormalType) - { - case NormalType.None: break; - case NormalType.Float3: AddMember(defn, "Normal", MemberType.Real32, 3); break; - case NormalType.Half4: AddMember(defn, "Normal", MemberType.Real16, 4); break; - case NormalType.Byte4: AddMember(defn, "Normal", MemberType.BinormalInt8, 4); break; - case NormalType.QTangent: AddMember(defn, "QTangent", MemberType.BinormalInt16, 4); break; - } + switch (desc.NormalType) + { + case NormalType.None: break; + case NormalType.Float3: AddMember(defn, "Normal", MemberType.Real32, 3); break; + case NormalType.Half4: AddMember(defn, "Normal", MemberType.Real16, 4); break; + case NormalType.Byte4: AddMember(defn, "Normal", MemberType.BinormalInt8, 4); break; + case NormalType.QTangent: AddMember(defn, "QTangent", MemberType.BinormalInt16, 4); break; + } - switch (desc.TangentType) - { - case NormalType.None: break; - case NormalType.Float3: AddMember(defn, "Tangent", MemberType.Real32, 3); break; - case NormalType.Half4: AddMember(defn, "Tangent", MemberType.Real16, 4); break; - case NormalType.Byte4: AddMember(defn, "Tangent", MemberType.BinormalInt8, 4); break; - case NormalType.QTangent: break; // Tangent saved into QTangent - } + switch (desc.TangentType) + { + case NormalType.None: break; + case NormalType.Float3: AddMember(defn, "Tangent", MemberType.Real32, 3); break; + case NormalType.Half4: AddMember(defn, "Tangent", MemberType.Real16, 4); break; + case NormalType.Byte4: AddMember(defn, "Tangent", MemberType.BinormalInt8, 4); break; + case NormalType.QTangent: break; // Tangent saved into QTangent + } - switch (desc.BinormalType) - { - case NormalType.None: break; - case NormalType.Float3: AddMember(defn, "Binormal", MemberType.Real32, 3); break; - case NormalType.Half4: AddMember(defn, "Binormal", MemberType.Real16, 4); break; - case NormalType.Byte4: AddMember(defn, "Binormal", MemberType.BinormalInt8, 4); break; - case NormalType.QTangent: break; // Binormal saved into QTangent - } + switch (desc.BinormalType) + { + case NormalType.None: break; + case NormalType.Float3: AddMember(defn, "Binormal", MemberType.Real32, 3); break; + case NormalType.Half4: AddMember(defn, "Binormal", MemberType.Real16, 4); break; + case NormalType.Byte4: AddMember(defn, "Binormal", MemberType.BinormalInt8, 4); break; + case NormalType.QTangent: break; // Binormal saved into QTangent + } - for (int i = 0; i < desc.ColorMaps; i++) + for (int i = 0; i < desc.ColorMaps; i++) + { + switch (desc.ColorMapType) { - switch (desc.ColorMapType) - { - case ColorMapType.Float4: AddMember(defn, "DiffuseColor" + i.ToString(), MemberType.Real32, 4); break; - case ColorMapType.Byte4: AddMember(defn, "DiffuseColor" + i.ToString(), MemberType.NormalUInt8, 4); break; - } + case ColorMapType.Float4: AddMember(defn, "DiffuseColor" + i.ToString(), MemberType.Real32, 4); break; + case ColorMapType.Byte4: AddMember(defn, "DiffuseColor" + i.ToString(), MemberType.NormalUInt8, 4); break; } + } - for (int i = 0; i < desc.TextureCoordinates; i++) + for (int i = 0; i < desc.TextureCoordinates; i++) + { + switch (desc.TextureCoordinateType) { - switch (desc.TextureCoordinateType) - { - case TextureCoordinateType.Float2: AddMember(defn, "TextureCoordinates" + i.ToString(), MemberType.Real32, 2); break; - case TextureCoordinateType.Half2: AddMember(defn, "TextureCoordinates" + i.ToString(), MemberType.Real16, 2); break; - } + case TextureCoordinateType.Float2: AddMember(defn, "TextureCoordinates" + i.ToString(), MemberType.Real32, 2); break; + case TextureCoordinateType.Half2: AddMember(defn, "TextureCoordinates" + i.ToString(), MemberType.Real16, 2); break; } - - return defn; } + + return defn; } } diff --git a/LSLib/Granny/Utils.cs b/LSLib/Granny/Utils.cs index 1c6c54e7..f6d42d7e 100644 --- a/LSLib/Granny/Utils.cs +++ b/LSLib/Granny/Utils.cs @@ -1,15 +1,14 @@ -namespace LSLib.Granny +namespace LSLib.Granny; + +abstract class Utils { - abstract class Utils + public static void Warn(string message) { - public static void Warn(string message) - { - System.Console.WriteLine("WARNING: " + message); - } + System.Console.WriteLine("WARNING: " + message); + } - public static void Info(string message) - { - System.Console.WriteLine(message); - } + public static void Info(string message) + { + System.Console.WriteLine(message); } } diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index ceb30d6e..da01fec6 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -5,407 +5,406 @@ using LSLib.LS.Enums; using System.IO.Compression; -namespace LSLib.LS +namespace LSLib.LS; + +public static class BinUtils { - public static class BinUtils + public static T ReadStruct(BinaryReader reader) { - public static T ReadStruct(BinaryReader reader) - { - T outStruct; - int count = Marshal.SizeOf(typeof(T)); - byte[] readBuffer = reader.ReadBytes(count); - GCHandle handle = GCHandle.Alloc(readBuffer, GCHandleType.Pinned); - outStruct = (T)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(T)); - handle.Free(); - return outStruct; - } + T outStruct; + int count = Marshal.SizeOf(typeof(T)); + byte[] readBuffer = reader.ReadBytes(count); + GCHandle handle = GCHandle.Alloc(readBuffer, GCHandleType.Pinned); + outStruct = (T)Marshal.PtrToStructure(handle.AddrOfPinnedObject(), typeof(T)); + handle.Free(); + return outStruct; + } - public static void ReadStructs(BinaryReader reader, T[] elements) + public static void ReadStructs(BinaryReader reader, T[] elements) + { + int elementSize = Marshal.SizeOf(typeof(T)); + int bytes = elementSize * elements.Length; + byte[] readBuffer = reader.ReadBytes(bytes); + GCHandle handle = GCHandle.Alloc(readBuffer, GCHandleType.Pinned); + var addr = handle.AddrOfPinnedObject(); + for (var i = 0; i < elements.Length; i++) { - int elementSize = Marshal.SizeOf(typeof(T)); - int bytes = elementSize * elements.Length; - byte[] readBuffer = reader.ReadBytes(bytes); - GCHandle handle = GCHandle.Alloc(readBuffer, GCHandleType.Pinned); - var addr = handle.AddrOfPinnedObject(); - for (var i = 0; i < elements.Length; i++) - { - var elementAddr = new IntPtr(addr.ToInt64() + elementSize * i); - elements[i] = Marshal.PtrToStructure(elementAddr); - } - handle.Free(); + var elementAddr = new IntPtr(addr.ToInt64() + elementSize * i); + elements[i] = Marshal.PtrToStructure(elementAddr); } + handle.Free(); + } - public static void WriteStruct(BinaryWriter writer, ref T inStruct) - { - int count = Marshal.SizeOf(typeof(T)); - byte[] writeBuffer = new byte[count]; - GCHandle handle = GCHandle.Alloc(writeBuffer, GCHandleType.Pinned); - Marshal.StructureToPtr(inStruct, handle.AddrOfPinnedObject(), true); - handle.Free(); - writer.Write(writeBuffer); - } + public static void WriteStruct(BinaryWriter writer, ref T inStruct) + { + int count = Marshal.SizeOf(typeof(T)); + byte[] writeBuffer = new byte[count]; + GCHandle handle = GCHandle.Alloc(writeBuffer, GCHandleType.Pinned); + Marshal.StructureToPtr(inStruct, handle.AddrOfPinnedObject(), true); + handle.Free(); + writer.Write(writeBuffer); + } - public static void WriteStructs(BinaryWriter writer, T[] elements) + public static void WriteStructs(BinaryWriter writer, T[] elements) + { + int elementSize = Marshal.SizeOf(typeof(T)); + int bytes = elementSize * elements.Length; + byte[] writeBuffer = new byte[bytes]; + GCHandle handle = GCHandle.Alloc(writeBuffer, GCHandleType.Pinned); + var addr = handle.AddrOfPinnedObject(); + for (var i = 0; i < elements.Length; i++) { - int elementSize = Marshal.SizeOf(typeof(T)); - int bytes = elementSize * elements.Length; - byte[] writeBuffer = new byte[bytes]; - GCHandle handle = GCHandle.Alloc(writeBuffer, GCHandleType.Pinned); - var addr = handle.AddrOfPinnedObject(); - for (var i = 0; i < elements.Length; i++) - { - var elementAddr = new IntPtr(addr.ToInt64() + elementSize * i); - Marshal.StructureToPtr(elements[i], elementAddr, true); - } - handle.Free(); - writer.Write(writeBuffer); + var elementAddr = new IntPtr(addr.ToInt64() + elementSize * i); + Marshal.StructureToPtr(elements[i], elementAddr, true); } + handle.Free(); + writer.Write(writeBuffer); + } - public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader) + public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader) + { + var attr = new NodeAttribute(type); + switch (type) { - var attr = new NodeAttribute(type); - switch (type) - { - case NodeAttribute.DataType.DT_None: + case NodeAttribute.DataType.DT_None: + break; + + case NodeAttribute.DataType.DT_Byte: + attr.Value = reader.ReadByte(); + break; + + case NodeAttribute.DataType.DT_Short: + attr.Value = reader.ReadInt16(); + break; + + case NodeAttribute.DataType.DT_UShort: + attr.Value = reader.ReadUInt16(); + break; + + case NodeAttribute.DataType.DT_Int: + attr.Value = reader.ReadInt32(); + break; + + case NodeAttribute.DataType.DT_UInt: + attr.Value = reader.ReadUInt32(); + break; + + case NodeAttribute.DataType.DT_Float: + attr.Value = reader.ReadSingle(); + break; + + case NodeAttribute.DataType.DT_Double: + attr.Value = reader.ReadDouble(); + break; + + case NodeAttribute.DataType.DT_IVec2: + case NodeAttribute.DataType.DT_IVec3: + case NodeAttribute.DataType.DT_IVec4: + { + int columns = attr.GetColumns(); + var vec = new int[columns]; + for (int i = 0; i < columns; i++) + vec[i] = reader.ReadInt32(); + attr.Value = vec; break; - - case NodeAttribute.DataType.DT_Byte: - attr.Value = reader.ReadByte(); + } + + case NodeAttribute.DataType.DT_Vec2: + case NodeAttribute.DataType.DT_Vec3: + case NodeAttribute.DataType.DT_Vec4: + { + int columns = attr.GetColumns(); + var vec = new float[columns]; + for (int i = 0; i < columns; i++) + vec[i] = reader.ReadSingle(); + attr.Value = vec; break; - - case NodeAttribute.DataType.DT_Short: - attr.Value = reader.ReadInt16(); - break; - - case NodeAttribute.DataType.DT_UShort: - attr.Value = reader.ReadUInt16(); - break; - - case NodeAttribute.DataType.DT_Int: - attr.Value = reader.ReadInt32(); - break; - - case NodeAttribute.DataType.DT_UInt: - attr.Value = reader.ReadUInt32(); - break; - - case NodeAttribute.DataType.DT_Float: - attr.Value = reader.ReadSingle(); - break; - - case NodeAttribute.DataType.DT_Double: - attr.Value = reader.ReadDouble(); - break; - - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: + } + + case NodeAttribute.DataType.DT_Mat2: + case NodeAttribute.DataType.DT_Mat3: + case NodeAttribute.DataType.DT_Mat3x4: + case NodeAttribute.DataType.DT_Mat4x3: + case NodeAttribute.DataType.DT_Mat4: + { + int columns = attr.GetColumns(); + int rows = attr.GetRows(); + var mat = new Matrix(rows, columns); + attr.Value = mat; + + for (int col = 0; col < columns; col++) { - int columns = attr.GetColumns(); - var vec = new int[columns]; - for (int i = 0; i < columns; i++) - vec[i] = reader.ReadInt32(); - attr.Value = vec; - break; - } - - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: - { - int columns = attr.GetColumns(); - var vec = new float[columns]; - for (int i = 0; i < columns; i++) - vec[i] = reader.ReadSingle(); - attr.Value = vec; - break; - } - - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: - { - int columns = attr.GetColumns(); - int rows = attr.GetRows(); - var mat = new Matrix(rows, columns); - attr.Value = mat; - - for (int col = 0; col < columns; col++) + for (int row = 0; row < rows; row++) { - for (int row = 0; row < rows; row++) - { - mat[row, col] = reader.ReadSingle(); - } + mat[row, col] = reader.ReadSingle(); } - break; } - - case NodeAttribute.DataType.DT_Bool: - attr.Value = reader.ReadByte() != 0; break; + } - case NodeAttribute.DataType.DT_ULongLong: - attr.Value = reader.ReadUInt64(); - break; + case NodeAttribute.DataType.DT_Bool: + attr.Value = reader.ReadByte() != 0; + break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: - attr.Value = reader.ReadInt64(); - break; + case NodeAttribute.DataType.DT_ULongLong: + attr.Value = reader.ReadUInt64(); + break; - case NodeAttribute.DataType.DT_Int8: - attr.Value = reader.ReadSByte(); - break; + case NodeAttribute.DataType.DT_Long: + case NodeAttribute.DataType.DT_Int64: + attr.Value = reader.ReadInt64(); + break; - case NodeAttribute.DataType.DT_UUID: - attr.Value = new Guid(reader.ReadBytes(16)); - break; + case NodeAttribute.DataType.DT_Int8: + attr.Value = reader.ReadSByte(); + break; - default: - // Strings are serialized differently for each file format and should be - // handled by the format-specific ReadAttribute() - throw new InvalidFormatException(String.Format("ReadAttribute() not implemented for type {0}", type)); - } + case NodeAttribute.DataType.DT_UUID: + attr.Value = new Guid(reader.ReadBytes(16)); + break; - return attr; + default: + // Strings are serialized differently for each file format and should be + // handled by the format-specific ReadAttribute() + throw new InvalidFormatException(String.Format("ReadAttribute() not implemented for type {0}", type)); } - public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) - { - switch (attr.Type) - { - case NodeAttribute.DataType.DT_None: - break; - - case NodeAttribute.DataType.DT_Byte: - writer.Write((Byte)attr.Value); - break; - - case NodeAttribute.DataType.DT_Short: - writer.Write((Int16)attr.Value); - break; - - case NodeAttribute.DataType.DT_UShort: - writer.Write((UInt16)attr.Value); - break; - - case NodeAttribute.DataType.DT_Int: - writer.Write((Int32)attr.Value); - break; - - case NodeAttribute.DataType.DT_UInt: - writer.Write((UInt32)attr.Value); - break; - - case NodeAttribute.DataType.DT_Float: - writer.Write((float)attr.Value); - break; - - case NodeAttribute.DataType.DT_Double: - writer.Write((Double)attr.Value); - break; - - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: - foreach (var item in (int[])attr.Value) - { - writer.Write(item); - } - break; - - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: - foreach (var item in (float[])attr.Value) - { - writer.Write(item); - } - break; + return attr; + } - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: + public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) + { + switch (attr.Type) + { + case NodeAttribute.DataType.DT_None: + break; + + case NodeAttribute.DataType.DT_Byte: + writer.Write((Byte)attr.Value); + break; + + case NodeAttribute.DataType.DT_Short: + writer.Write((Int16)attr.Value); + break; + + case NodeAttribute.DataType.DT_UShort: + writer.Write((UInt16)attr.Value); + break; + + case NodeAttribute.DataType.DT_Int: + writer.Write((Int32)attr.Value); + break; + + case NodeAttribute.DataType.DT_UInt: + writer.Write((UInt32)attr.Value); + break; + + case NodeAttribute.DataType.DT_Float: + writer.Write((float)attr.Value); + break; + + case NodeAttribute.DataType.DT_Double: + writer.Write((Double)attr.Value); + break; + + case NodeAttribute.DataType.DT_IVec2: + case NodeAttribute.DataType.DT_IVec3: + case NodeAttribute.DataType.DT_IVec4: + foreach (var item in (int[])attr.Value) + { + writer.Write(item); + } + break; + + case NodeAttribute.DataType.DT_Vec2: + case NodeAttribute.DataType.DT_Vec3: + case NodeAttribute.DataType.DT_Vec4: + foreach (var item in (float[])attr.Value) + { + writer.Write(item); + } + break; + + case NodeAttribute.DataType.DT_Mat2: + case NodeAttribute.DataType.DT_Mat3: + case NodeAttribute.DataType.DT_Mat3x4: + case NodeAttribute.DataType.DT_Mat4x3: + case NodeAttribute.DataType.DT_Mat4: + { + var mat = (Matrix)attr.Value; + for (int col = 0; col < mat.cols; col++) { - var mat = (Matrix)attr.Value; - for (int col = 0; col < mat.cols; col++) + for (int row = 0; row < mat.rows; row++) { - for (int row = 0; row < mat.rows; row++) - { - writer.Write((float)mat[row, col]); - } + writer.Write((float)mat[row, col]); } - break; } - - case NodeAttribute.DataType.DT_Bool: - writer.Write((Byte)((Boolean)attr.Value ? 1 : 0)); break; + } - case NodeAttribute.DataType.DT_ULongLong: - writer.Write((UInt64)attr.Value); - break; + case NodeAttribute.DataType.DT_Bool: + writer.Write((Byte)((Boolean)attr.Value ? 1 : 0)); + break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: - writer.Write((Int64)attr.Value); - break; + case NodeAttribute.DataType.DT_ULongLong: + writer.Write((UInt64)attr.Value); + break; - case NodeAttribute.DataType.DT_Int8: - writer.Write((SByte)attr.Value); - break; + case NodeAttribute.DataType.DT_Long: + case NodeAttribute.DataType.DT_Int64: + writer.Write((Int64)attr.Value); + break; - case NodeAttribute.DataType.DT_UUID: - writer.Write(((Guid)attr.Value).ToByteArray()); - break; + case NodeAttribute.DataType.DT_Int8: + writer.Write((SByte)attr.Value); + break; + + case NodeAttribute.DataType.DT_UUID: + writer.Write(((Guid)attr.Value).ToByteArray()); + break; - default: - throw new InvalidFormatException(String.Format("WriteAttribute() not implemented for type {0}", attr.Type)); - } + default: + throw new InvalidFormatException(String.Format("WriteAttribute() not implemented for type {0}", attr.Type)); } + } - public static CompressionMethod CompressionFlagsToMethod(byte flags) + public static CompressionMethod CompressionFlagsToMethod(byte flags) + { + return (flags & 0x0f) switch { - return (flags & 0x0f) switch - { - (int)CompressionMethod.None => CompressionMethod.None, - (int)CompressionMethod.Zlib => CompressionMethod.Zlib, - (int)CompressionMethod.LZ4 => CompressionMethod.LZ4, - _ => throw new ArgumentException("Invalid compression method") - }; - } + (int)CompressionMethod.None => CompressionMethod.None, + (int)CompressionMethod.Zlib => CompressionMethod.Zlib, + (int)CompressionMethod.LZ4 => CompressionMethod.LZ4, + _ => throw new ArgumentException("Invalid compression method") + }; + } - public static LSCompressionLevel CompressionFlagsToLevel(byte flags) + public static LSCompressionLevel CompressionFlagsToLevel(byte flags) + { + return (flags & 0xf0) switch { - return (flags & 0xf0) switch - { - (int)CompressionFlags.FastCompress => LSCompressionLevel.FastCompression, - (int)CompressionFlags.DefaultCompress => LSCompressionLevel.DefaultCompression, - (int)CompressionFlags.MaxCompressionLevel => LSCompressionLevel.MaxCompression, - _ => throw new ArgumentException("Invalid compression flags") - }; - } + (int)CompressionFlags.FastCompress => LSCompressionLevel.FastCompression, + (int)CompressionFlags.DefaultCompress => LSCompressionLevel.DefaultCompression, + (int)CompressionFlags.MaxCompressionLevel => LSCompressionLevel.MaxCompression, + _ => throw new ArgumentException("Invalid compression flags") + }; + } - public static byte MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) + public static byte MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) + { + if (method == CompressionMethod.None) { - if (method == CompressionMethod.None) - { - return 0; - } - - byte flags = 0; - if (method == CompressionMethod.Zlib) - flags = 0x1; - else if (method == CompressionMethod.LZ4) - flags = 0x2; - - if (level == LSCompressionLevel.FastCompression) - flags |= 0x10; - else if (level == LSCompressionLevel.DefaultCompression) - flags |= 0x20; - else if (level == LSCompressionLevel.MaxCompression) - flags |= 0x40; - - return flags; + return 0; } - public static byte[] Decompress(byte[] compressed, int decompressedSize, byte compressionFlags, bool chunked = false) - { - switch (CompressionFlagsToMethod(compressionFlags)) - { - case CompressionMethod.None: - return compressed; + byte flags = 0; + if (method == CompressionMethod.Zlib) + flags = 0x1; + else if (method == CompressionMethod.LZ4) + flags = 0x2; - case CompressionMethod.Zlib: + if (level == LSCompressionLevel.FastCompression) + flags |= 0x10; + else if (level == LSCompressionLevel.DefaultCompression) + flags |= 0x20; + else if (level == LSCompressionLevel.MaxCompression) + flags |= 0x40; + + return flags; + } + + public static byte[] Decompress(byte[] compressed, int decompressedSize, byte compressionFlags, bool chunked = false) + { + switch (CompressionFlagsToMethod(compressionFlags)) + { + case CompressionMethod.None: + return compressed; + + case CompressionMethod.Zlib: + { + using (var compressedStream = new MemoryStream(compressed)) + using (var decompressedStream = new MemoryStream()) + using (var stream = new ZLibStream(compressedStream, CompressionMode.Decompress)) { - using (var compressedStream = new MemoryStream(compressed)) - using (var decompressedStream = new MemoryStream()) - using (var stream = new ZLibStream(compressedStream, CompressionMode.Decompress)) + byte[] buf = new byte[0x10000]; + int length = 0; + while ((length = stream.Read(buf, 0, buf.Length)) > 0) { - byte[] buf = new byte[0x10000]; - int length = 0; - while ((length = stream.Read(buf, 0, buf.Length)) > 0) - { - decompressedStream.Write(buf, 0, length); - } - - return decompressedStream.ToArray(); + decompressedStream.Write(buf, 0, length); } - } - case CompressionMethod.LZ4: - if (chunked) - { - var decompressed = Native.LZ4FrameCompressor.Decompress(compressed); - return decompressed; + return decompressedStream.ToArray(); } - else - { - var decompressed = new byte[decompressedSize]; - LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, decompressedSize, true); - return decompressed; - } - - default: - { - var msg = String.Format("No decompressor found for this format: {0}", compressionFlags); - throw new InvalidDataException(msg); - } - } + } + + case CompressionMethod.LZ4: + if (chunked) + { + var decompressed = Native.LZ4FrameCompressor.Decompress(compressed); + return decompressed; + } + else + { + var decompressed = new byte[decompressedSize]; + LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, decompressedSize, true); + return decompressed; + } + + default: + { + var msg = String.Format("No decompressor found for this format: {0}", compressionFlags); + throw new InvalidDataException(msg); + } } + } - public static byte[] Compress(byte[] uncompressed, byte compressionFlags) + public static byte[] Compress(byte[] uncompressed, byte compressionFlags) + { + return Compress(uncompressed, (CompressionMethod)(compressionFlags & 0x0F), CompressionFlagsToLevel(compressionFlags)); + } + + public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel compressionLevel, bool chunked = false) + { + return method switch { - return Compress(uncompressed, (CompressionMethod)(compressionFlags & 0x0F), CompressionFlagsToLevel(compressionFlags)); - } + CompressionMethod.None => uncompressed, + CompressionMethod.Zlib => CompressZlib(uncompressed, compressionLevel), + CompressionMethod.LZ4 => CompressLZ4(uncompressed, compressionLevel, chunked), + _ => throw new ArgumentException("Invalid compression method specified") + }; + } - public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel compressionLevel, bool chunked = false) + public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel compressionLevel) + { + var level = compressionLevel switch { - return method switch - { - CompressionMethod.None => uncompressed, - CompressionMethod.Zlib => CompressZlib(uncompressed, compressionLevel), - CompressionMethod.LZ4 => CompressLZ4(uncompressed, compressionLevel, chunked), - _ => throw new ArgumentException("Invalid compression method specified") - }; - } + LSCompressionLevel.FastCompression => CompressionLevel.Fastest, + LSCompressionLevel.DefaultCompression => CompressionLevel.Optimal, + LSCompressionLevel.MaxCompression => CompressionLevel.SmallestSize, + _ => throw new ArgumentException() + }; + + using var outputStream = new MemoryStream(); + using var compressor = new ZLibStream(outputStream, level); + + compressor.Write(uncompressed, 0, uncompressed.Length); + compressor.Flush(); + return outputStream.ToArray(); + } - public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel compressionLevel) + public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compressionLevel, bool chunked = false) + { + if (chunked) { - var level = compressionLevel switch - { - LSCompressionLevel.FastCompression => CompressionLevel.Fastest, - LSCompressionLevel.DefaultCompression => CompressionLevel.Optimal, - LSCompressionLevel.MaxCompression => CompressionLevel.SmallestSize, - _ => throw new ArgumentException() - }; - - using var outputStream = new MemoryStream(); - using var compressor = new ZLibStream(outputStream, level); - - compressor.Write(uncompressed, 0, uncompressed.Length); - compressor.Flush(); - return outputStream.ToArray(); + return Native.LZ4FrameCompressor.Compress(uncompressed); } - - public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compressionLevel, bool chunked = false) + else if (compressionLevel == LSCompressionLevel.FastCompression) + { + return LZ4Codec.Encode(uncompressed, 0, uncompressed.Length); + } + else { - if (chunked) - { - return Native.LZ4FrameCompressor.Compress(uncompressed); - } - else if (compressionLevel == LSCompressionLevel.FastCompression) - { - return LZ4Codec.Encode(uncompressed, 0, uncompressed.Length); - } - else - { - return LZ4Codec.EncodeHC(uncompressed, 0, uncompressed.Length); - } + return LZ4Codec.EncodeHC(uncompressed, 0, uncompressed.Length); } } } diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index a3c59c04..5f6ccc25 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -1,47 +1,46 @@ using System; using System.Text.RegularExpressions; -namespace LSLib.LS +namespace LSLib.LS; + +public static class Common { - public static class Common - { - public const int MajorVersion = 1; + public const int MajorVersion = 1; - public const int MinorVersion = 19; + public const int MinorVersion = 19; - public const int PatchVersion = 0; + public const int PatchVersion = 0; - // Version of LSTools profile data in generated DAE files + // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; /// /// Returns the version number of the LSLib library /// public static string LibraryVersion() - { - return String.Format("{0}.{1}.{2}", MajorVersion, MinorVersion, PatchVersion); - } - - /// - /// Compares the string against a given pattern. - /// - /// The string - /// The pattern to match, where "*" means any sequence of characters, and "?" means any single character - /// true if the string matches the given pattern; otherwise false. - public static bool Like(this string str, string pattern) - { - return new Regex("^" + Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline).IsMatch(str); - } - - /// - /// Compares the string against a given pattern. - /// - /// The string - /// The pattern to match as a RegEx object - /// true if the string matches the given pattern; otherwise false. - public static bool Like(this string str, Regex pattern) - { - return pattern.IsMatch(str); - } + { + return String.Format("{0}.{1}.{2}", MajorVersion, MinorVersion, PatchVersion); + } + + /// + /// Compares the string against a given pattern. + /// + /// The string + /// The pattern to match, where "*" means any sequence of characters, and "?" means any single character + /// true if the string matches the given pattern; otherwise false. + public static bool Like(this string str, string pattern) + { + return new Regex("^" + Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline).IsMatch(str); + } + + /// + /// Compares the string against a given pattern. + /// + /// The string + /// The pattern to match as a RegEx object + /// true if the string matches the given pattern; otherwise false. + public static bool Like(this string str, Regex pattern) + { + return pattern.IsMatch(str); } } diff --git a/LSLib/LS/Enums/CompressionFlags.cs b/LSLib/LS/Enums/CompressionFlags.cs index 72fb7a19..131976e0 100644 --- a/LSLib/LS/Enums/CompressionFlags.cs +++ b/LSLib/LS/Enums/CompressionFlags.cs @@ -1,9 +1,8 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum CompressionFlags { - public enum CompressionFlags - { - FastCompress = 0x10, - DefaultCompress = 0x20, - MaxCompressionLevel = 0x40 - }; -} \ No newline at end of file + FastCompress = 0x10, + DefaultCompress = 0x20, + MaxCompressionLevel = 0x40 +}; \ No newline at end of file diff --git a/LSLib/LS/Enums/CompressionLevel.cs b/LSLib/LS/Enums/CompressionLevel.cs index 0119e48a..669ad4c8 100644 --- a/LSLib/LS/Enums/CompressionLevel.cs +++ b/LSLib/LS/Enums/CompressionLevel.cs @@ -1,9 +1,8 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum LSCompressionLevel { - public enum LSCompressionLevel - { - FastCompression, - DefaultCompression, - MaxCompression - }; -} \ No newline at end of file + FastCompression, + DefaultCompression, + MaxCompression +}; \ No newline at end of file diff --git a/LSLib/LS/Enums/CompressionMethod.cs b/LSLib/LS/Enums/CompressionMethod.cs index 3d6f36c6..84ef27de 100644 --- a/LSLib/LS/Enums/CompressionMethod.cs +++ b/LSLib/LS/Enums/CompressionMethod.cs @@ -1,9 +1,8 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum CompressionMethod { - public enum CompressionMethod - { - None = 0, - Zlib = 1, - LZ4 = 2 - }; -} \ No newline at end of file + None = 0, + Zlib = 1, + LZ4 = 2 +}; \ No newline at end of file diff --git a/LSLib/LS/Enums/Game.cs b/LSLib/LS/Enums/Game.cs index ac6922b9..ae65b037 100644 --- a/LSLib/LS/Enums/Game.cs +++ b/LSLib/LS/Enums/Game.cs @@ -1,65 +1,64 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum Game { - public enum Game - { - DivinityOriginalSin = 0, - DivinityOriginalSinEE = 1, - DivinityOriginalSin2 = 2, - DivinityOriginalSin2DE = 3, - BaldursGate3 = 4, - Unset = 5 - }; + DivinityOriginalSin = 0, + DivinityOriginalSinEE = 1, + DivinityOriginalSin2 = 2, + DivinityOriginalSin2DE = 3, + BaldursGate3 = 4, + Unset = 5 +}; - public static class GameEnumExtensions +public static class GameEnumExtensions +{ + public static bool IsFW3(this Game game) { - public static bool IsFW3(this Game game) - { - return game != Game.DivinityOriginalSin - && game != Game.DivinityOriginalSinEE; - } + return game != Game.DivinityOriginalSin + && game != Game.DivinityOriginalSinEE; + } - public static PackageVersion PAKVersion(this Game game) + public static PackageVersion PAKVersion(this Game game) + { + switch (game) { - switch (game) - { - case Game.DivinityOriginalSin: return PackageVersion.V7; - case Game.DivinityOriginalSinEE: return PackageVersion.V9; - case Game.DivinityOriginalSin2: return PackageVersion.V10; - case Game.DivinityOriginalSin2DE: return PackageVersion.V13; - case Game.BaldursGate3: return PackageVersion.V18; - default: return PackageVersion.V18; - } + case Game.DivinityOriginalSin: return PackageVersion.V7; + case Game.DivinityOriginalSinEE: return PackageVersion.V9; + case Game.DivinityOriginalSin2: return PackageVersion.V10; + case Game.DivinityOriginalSin2DE: return PackageVersion.V13; + case Game.BaldursGate3: return PackageVersion.V18; + default: return PackageVersion.V18; } + } - public static LSFVersion LSFVersion(this Game game) + public static LSFVersion LSFVersion(this Game game) + { + switch (game) { - switch (game) - { - case Game.DivinityOriginalSin: return Enums.LSFVersion.VerChunkedCompress; - case Game.DivinityOriginalSinEE: return Enums.LSFVersion.VerChunkedCompress; - case Game.DivinityOriginalSin2: return Enums.LSFVersion.VerExtendedNodes; - case Game.DivinityOriginalSin2DE: return Enums.LSFVersion.VerExtendedNodes; - case Game.BaldursGate3: return Enums.LSFVersion.VerBG3AdditionalBlob; - default: return Enums.LSFVersion.VerBG3AdditionalBlob; - } + case Game.DivinityOriginalSin: return Enums.LSFVersion.VerChunkedCompress; + case Game.DivinityOriginalSinEE: return Enums.LSFVersion.VerChunkedCompress; + case Game.DivinityOriginalSin2: return Enums.LSFVersion.VerExtendedNodes; + case Game.DivinityOriginalSin2DE: return Enums.LSFVersion.VerExtendedNodes; + case Game.BaldursGate3: return Enums.LSFVersion.VerBG3AdditionalBlob; + default: return Enums.LSFVersion.VerBG3AdditionalBlob; } + } - public static LSXVersion LSXVersion(this Game game) + public static LSXVersion LSXVersion(this Game game) + { + switch (game) { - switch (game) - { - case Game.DivinityOriginalSin: - case Game.DivinityOriginalSinEE: - case Game.DivinityOriginalSin2: - case Game.DivinityOriginalSin2DE: - return Enums.LSXVersion.V3; + case Game.DivinityOriginalSin: + case Game.DivinityOriginalSinEE: + case Game.DivinityOriginalSin2: + case Game.DivinityOriginalSin2DE: + return Enums.LSXVersion.V3; - case Game.BaldursGate3: - return Enums.LSXVersion.V4; + case Game.BaldursGate3: + return Enums.LSXVersion.V4; - default: - return Enums.LSXVersion.V4; - } + default: + return Enums.LSXVersion.V4; } } } diff --git a/LSLib/LS/Enums/LSFVersion.cs b/LSLib/LS/Enums/LSFVersion.cs index 8e0aac22..790e35ab 100644 --- a/LSLib/LS/Enums/LSFVersion.cs +++ b/LSLib/LS/Enums/LSFVersion.cs @@ -1,63 +1,62 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum LSFVersion +{ + /// + /// Initial version of the LSF format + /// + VerInitial = 0x01, + + /// + /// LSF version that added chunked compression for substreams + /// + VerChunkedCompress = 0x02, + + /// + /// LSF version that extended the node descriptors + /// + VerExtendedNodes = 0x03, + + /// + /// BG3 version, no changes found so far apart from version numbering + /// + VerBG3 = 0x04, + + /// + /// BG3 version with updated header metadata + /// + VerBG3ExtendedHeader = 0x05, + + /// + /// BG3 version with unknown additions + /// + VerBG3AdditionalBlob = 0x06, + + /// + /// BG3 Patch 3 version with unknown additions + /// + VerBG3Patch3 = 0x07, + + /// + /// Latest input version supported by this library + /// + MaxReadVersion = 0x07, + + /// + /// Latest output version supported by this library + /// + MaxWriteVersion = 0x06 +} + +public enum LSXVersion { - public enum LSFVersion - { - /// - /// Initial version of the LSF format - /// - VerInitial = 0x01, - - /// - /// LSF version that added chunked compression for substreams - /// - VerChunkedCompress = 0x02, - - /// - /// LSF version that extended the node descriptors - /// - VerExtendedNodes = 0x03, - - /// - /// BG3 version, no changes found so far apart from version numbering - /// - VerBG3 = 0x04, - - /// - /// BG3 version with updated header metadata - /// - VerBG3ExtendedHeader = 0x05, - - /// - /// BG3 version with unknown additions - /// - VerBG3AdditionalBlob = 0x06, - - /// - /// BG3 Patch 3 version with unknown additions - /// - VerBG3Patch3 = 0x07, - - /// - /// Latest input version supported by this library - /// - MaxReadVersion = 0x07, - - /// - /// Latest output version supported by this library - /// - MaxWriteVersion = 0x06 - } - - public enum LSXVersion - { - /// - /// Version used in D:OS 2 (DE) - /// - V3 = 3, - /// - /// Version used in BG3 - /// Replaces type IDs with type names - /// - V4 = 4 - } + /// + /// Version used in D:OS 2 (DE) + /// + V3 = 3, + /// + /// Version used in BG3 + /// Replaces type IDs with type names + /// + V4 = 4 } \ No newline at end of file diff --git a/LSLib/LS/Enums/LogLevel.cs b/LSLib/LS/Enums/LogLevel.cs index 28f8a4d8..40fb931e 100644 --- a/LSLib/LS/Enums/LogLevel.cs +++ b/LSLib/LS/Enums/LogLevel.cs @@ -1,15 +1,14 @@ // ReSharper disable InconsistentNaming -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum LogLevel { - public enum LogLevel - { - OFF = 0, - FATAL = 100, - ERROR = 200, - WARN = 300, - INFO = 400, - DEBUG = 500, - TRACE = 600, - ALL = 1000 - } + OFF = 0, + FATAL = 100, + ERROR = 200, + WARN = 300, + INFO = 400, + DEBUG = 500, + TRACE = 600, + ALL = 1000 } diff --git a/LSLib/LS/Enums/PackageVersion.cs b/LSLib/LS/Enums/PackageVersion.cs index 190d4015..35613c7d 100644 --- a/LSLib/LS/Enums/PackageVersion.cs +++ b/LSLib/LS/Enums/PackageVersion.cs @@ -1,13 +1,12 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum PackageVersion { - public enum PackageVersion - { - V7 = 7, // D:OS 1 - V9 = 9, // D:OS 1 EE - V10 = 10, // D:OS 2 - V13 = 13, // D:OS 2 DE - V15 = 15, // BG3 EA - V16 = 16, // BG3 EA Patch4 - V18 = 18 // BG3 Release - }; -} + V7 = 7, // D:OS 1 + V9 = 9, // D:OS 1 EE + V10 = 10, // D:OS 2 + V13 = 13, // D:OS 2 DE + V15 = 15, // BG3 EA + V16 = 16, // BG3 EA Patch4 + V18 = 18 // BG3 Release +}; diff --git a/LSLib/LS/Enums/ResourceFormat.cs b/LSLib/LS/Enums/ResourceFormat.cs index 20d25d6b..4ffce834 100644 --- a/LSLib/LS/Enums/ResourceFormat.cs +++ b/LSLib/LS/Enums/ResourceFormat.cs @@ -1,10 +1,9 @@ -namespace LSLib.LS.Enums +namespace LSLib.LS.Enums; + +public enum ResourceFormat { - public enum ResourceFormat - { - LSX, - LSB, - LSF, - LSJ - }; -} \ No newline at end of file + LSX, + LSB, + LSF, + LSJ +}; \ No newline at end of file diff --git a/LSLib/LS/FileManager.cs b/LSLib/LS/FileManager.cs index 478a2692..f8172811 100644 --- a/LSLib/LS/FileManager.cs +++ b/LSLib/LS/FileManager.cs @@ -1,50 +1,49 @@ using System; using System.IO; -namespace LSLib.LS +namespace LSLib.LS; + +public class FileManager { - public class FileManager + public static void TryToCreateDirectory(string path) { - public static void TryToCreateDirectory(string path) + string outputPath = path; + + if (string.IsNullOrWhiteSpace(outputPath)) + { + throw new ArgumentNullException(nameof(path), "Cannot create directory without path"); + } + + // throw exception if path is relative + Uri uri; + try + { + Uri.TryCreate(outputPath, UriKind.RelativeOrAbsolute, out uri); + } + catch (InvalidOperationException) + { + throw new ArgumentException("Cannot create directory without absolute path", nameof(path)); + } + + if (!Path.IsPathRooted(outputPath) || !uri.IsFile) + { + throw new ArgumentException("Cannot create directory without absolute path", nameof(path)); + } + + // validate path + outputPath = Path.GetFullPath(path); + + outputPath = Path.GetDirectoryName(outputPath); + + if (outputPath == null) + { + throw new NullReferenceException("Cannot create directory without non-null output path"); + } + + // if the directory does not exist, create the directory + if (!Directory.Exists(outputPath)) { - string outputPath = path; - - if (string.IsNullOrWhiteSpace(outputPath)) - { - throw new ArgumentNullException(nameof(path), "Cannot create directory without path"); - } - - // throw exception if path is relative - Uri uri; - try - { - Uri.TryCreate(outputPath, UriKind.RelativeOrAbsolute, out uri); - } - catch (InvalidOperationException) - { - throw new ArgumentException("Cannot create directory without absolute path", nameof(path)); - } - - if (!Path.IsPathRooted(outputPath) || !uri.IsFile) - { - throw new ArgumentException("Cannot create directory without absolute path", nameof(path)); - } - - // validate path - outputPath = Path.GetFullPath(path); - - outputPath = Path.GetDirectoryName(outputPath); - - if (outputPath == null) - { - throw new NullReferenceException("Cannot create directory without non-null output path"); - } - - // if the directory does not exist, create the directory - if (!Directory.Exists(outputPath)) - { - Directory.CreateDirectory(outputPath); - } + Directory.CreateDirectory(outputPath); } } } diff --git a/LSLib/LS/Localization.cs b/LSLib/LS/Localization.cs index a2a2a550..0d359709 100644 --- a/LSLib/LS/Localization.cs +++ b/LSLib/LS/Localization.cs @@ -5,313 +5,312 @@ using System.Collections.Generic; using System.Xml; -namespace LSLib.LS +namespace LSLib.LS; + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LocaHeader { - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LocaHeader - { - public static UInt32 DefaultSignature = 0x41434f4c; // 'LOCA' + public static UInt32 DefaultSignature = 0x41434f4c; // 'LOCA' - public UInt32 Signature; - public UInt32 NumEntries; - public UInt32 TextsOffset; - } + public UInt32 Signature; + public UInt32 NumEntries; + public UInt32 TextsOffset; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LocaEntry - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 64)] - public byte[] Key; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LocaEntry +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 64)] + public byte[] Key; - public UInt16 Version; - public UInt32 Length; + public UInt16 Version; + public UInt32 Length; - public string KeyString + public string KeyString + { + get { - get - { - int nameLen; - for (nameLen = 0; nameLen < Key.Length && Key[nameLen] != 0; nameLen++) ; - return Encoding.UTF8.GetString(Key, 0, nameLen); - } + int nameLen; + for (nameLen = 0; nameLen < Key.Length && Key[nameLen] != 0; nameLen++) ; + return Encoding.UTF8.GetString(Key, 0, nameLen); + } - set - { - var bytes = Encoding.UTF8.GetBytes(value); - Key = new byte[64]; - Array.Clear(Key, 0, Key.Length); - Array.Copy(bytes, Key, bytes.Length); - } + set + { + var bytes = Encoding.UTF8.GetBytes(value); + Key = new byte[64]; + Array.Clear(Key, 0, Key.Length); + Array.Copy(bytes, Key, bytes.Length); } } +} - public class LocalizedText - { - public string Key; - public UInt16 Version; - public string Text; - } +public class LocalizedText +{ + public string Key; + public UInt16 Version; + public string Text; +} - public class LocaResource - { - public List Entries; - } +public class LocaResource +{ + public List Entries; +} - public class LocaReader(Stream stream) : IDisposable +public class LocaReader(Stream stream) : IDisposable +{ + private readonly Stream Stream = stream; + + public void Dispose() { - private readonly Stream Stream = stream; + Stream.Dispose(); + } - public void Dispose() + public LocaResource Read() + { + using var reader = new BinaryReader(Stream); + var loca = new LocaResource { - Stream.Dispose(); - } + Entries = [] + }; + var header = BinUtils.ReadStruct(reader); - public LocaResource Read() + if (header.Signature != (ulong)LocaHeader.DefaultSignature) { - using var reader = new BinaryReader(Stream); - var loca = new LocaResource - { - Entries = [] - }; - var header = BinUtils.ReadStruct(reader); - - if (header.Signature != (ulong)LocaHeader.DefaultSignature) - { - throw new InvalidDataException("Incorrect signature in localization file"); - } + throw new InvalidDataException("Incorrect signature in localization file"); + } - var entries = new LocaEntry[header.NumEntries]; - BinUtils.ReadStructs(reader, entries); + var entries = new LocaEntry[header.NumEntries]; + BinUtils.ReadStructs(reader, entries); - Stream.Position = header.TextsOffset; - foreach (var entry in entries) + Stream.Position = header.TextsOffset; + foreach (var entry in entries) + { + var text = Encoding.UTF8.GetString(reader.ReadBytes((int)entry.Length - 1)); + loca.Entries.Add(new LocalizedText { - var text = Encoding.UTF8.GetString(reader.ReadBytes((int)entry.Length - 1)); - loca.Entries.Add(new LocalizedText - { - Key = entry.KeyString, - Version = entry.Version, - Text = text - }); - reader.ReadByte(); - } - - return loca; + Key = entry.KeyString, + Version = entry.Version, + Text = text + }); + reader.ReadByte(); } + + return loca; } +} - public class LocaWriter(Stream stream) - { - private readonly Stream stream = stream; +public class LocaWriter(Stream stream) +{ + private readonly Stream stream = stream; - public void Write(LocaResource res) + public void Write(LocaResource res) + { + using var writer = new BinaryWriter(stream); + var header = new LocaHeader { - using var writer = new BinaryWriter(stream); - var header = new LocaHeader + Signature = LocaHeader.DefaultSignature, + NumEntries = (uint)res.Entries.Count, + TextsOffset = (uint)(Marshal.SizeOf(typeof(LocaHeader)) + Marshal.SizeOf(typeof(LocaEntry)) * res.Entries.Count) + }; + BinUtils.WriteStruct(writer, ref header); + + var entries = new LocaEntry[header.NumEntries]; + for (var i = 0; i < entries.Length; i++) + { + var entry = res.Entries[i]; + entries[i] = new LocaEntry { - Signature = LocaHeader.DefaultSignature, - NumEntries = (uint)res.Entries.Count, - TextsOffset = (uint)(Marshal.SizeOf(typeof(LocaHeader)) + Marshal.SizeOf(typeof(LocaEntry)) * res.Entries.Count) + KeyString = entry.Key, + Version = entry.Version, + Length = (uint)Encoding.UTF8.GetByteCount(entry.Text) + 1 }; - BinUtils.WriteStruct(writer, ref header); - - var entries = new LocaEntry[header.NumEntries]; - for (var i = 0; i < entries.Length; i++) - { - var entry = res.Entries[i]; - entries[i] = new LocaEntry - { - KeyString = entry.Key, - Version = entry.Version, - Length = (uint)Encoding.UTF8.GetByteCount(entry.Text) + 1 - }; - } + } - BinUtils.WriteStructs(writer, entries); + BinUtils.WriteStructs(writer, entries); - foreach (var entry in res.Entries) - { - var bin = Encoding.UTF8.GetBytes(entry.Text); - writer.Write(bin); - writer.Write((Byte)0); - } + foreach (var entry in res.Entries) + { + var bin = Encoding.UTF8.GetBytes(entry.Text); + writer.Write(bin); + writer.Write((Byte)0); } } - public class LocaXmlReader(Stream stream) : IDisposable +} +public class LocaXmlReader(Stream stream) : IDisposable +{ + private readonly Stream stream = stream; + private XmlReader reader; + private LocaResource resource; + + public void Dispose() { - private readonly Stream stream = stream; - private XmlReader reader; - private LocaResource resource; + stream.Dispose(); + } - public void Dispose() + private void ReadElement() + { + switch (reader.Name) { - stream.Dispose(); - } + case "contentList": + // Root element + break; - private void ReadElement() - { - switch (reader.Name) - { - case "contentList": - // Root element - break; + case "content": + var key = reader["contentuid"]; + var version = reader["version"] != null ? UInt16.Parse(reader["version"]) : (UInt16)1; + var text = reader.ReadString(); - case "content": - var key = reader["contentuid"]; - var version = reader["version"] != null ? UInt16.Parse(reader["version"]) : (UInt16)1; - var text = reader.ReadString(); - - resource.Entries.Add(new LocalizedText - { - Key = key, - Version = version, - Text = text - }); - break; + resource.Entries.Add(new LocalizedText + { + Key = key, + Version = version, + Text = text + }); + break; - default: - throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); - } + default: + throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); } + } - public LocaResource Read() + public LocaResource Read() + { + resource = new LocaResource { - resource = new LocaResource - { - Entries = [] - }; + Entries = [] + }; - using (this.reader = XmlReader.Create(stream)) + using (this.reader = XmlReader.Create(stream)) + { + while (reader.Read()) { - while (reader.Read()) + if (reader.NodeType == XmlNodeType.Element) { - if (reader.NodeType == XmlNodeType.Element) - { - ReadElement(); - } + ReadElement(); } } - - return resource; } + + return resource; } +} - public class LocaXmlWriter(Stream stream) - { - private readonly Stream stream = stream; +public class LocaXmlWriter(Stream stream) +{ + private readonly Stream stream = stream; - public void Write(LocaResource res) + public void Write(LocaResource res) + { + var settings = new XmlWriterSettings { - var settings = new XmlWriterSettings - { - Indent = true, - IndentChars = "\t" - }; - - using var writer = XmlWriter.Create(stream, settings); - writer.WriteStartElement("contentList"); + Indent = true, + IndentChars = "\t" + }; - foreach (var entry in res.Entries) - { - writer.WriteStartElement("content"); - writer.WriteAttributeString("contentuid", entry.Key); - writer.WriteAttributeString("version", entry.Version.ToString()); - writer.WriteString(entry.Text); - writer.WriteEndElement(); - } + using var writer = XmlWriter.Create(stream, settings); + writer.WriteStartElement("contentList"); + foreach (var entry in res.Entries) + { + writer.WriteStartElement("content"); + writer.WriteAttributeString("contentuid", entry.Key); + writer.WriteAttributeString("version", entry.Version.ToString()); + writer.WriteString(entry.Text); writer.WriteEndElement(); - writer.Flush(); } + + writer.WriteEndElement(); + writer.Flush(); } +} - public enum LocaFormat - { - Loca, - Xml - }; +public enum LocaFormat +{ + Loca, + Xml +}; - public class LocaUtils +public class LocaUtils +{ + public static LocaFormat ExtensionToFileFormat(string path) { - public static LocaFormat ExtensionToFileFormat(string path) + var extension = Path.GetExtension(path).ToLower(); + + return extension switch { - var extension = Path.GetExtension(path).ToLower(); + ".loca" => LocaFormat.Loca, + ".xml" => LocaFormat.Xml, + _ => throw new ArgumentException("Unrecognized file extension: " + extension), + }; + } - return extension switch - { - ".loca" => LocaFormat.Loca, - ".xml" => LocaFormat.Xml, - _ => throw new ArgumentException("Unrecognized file extension: " + extension), - }; - } + public static LocaResource Load(string inputPath) + { + return Load(inputPath, ExtensionToFileFormat(inputPath)); + } - public static LocaResource Load(string inputPath) - { - return Load(inputPath, ExtensionToFileFormat(inputPath)); - } + public static LocaResource Load(string inputPath, LocaFormat format) + { + using var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read); + return Load(stream, format); + } - public static LocaResource Load(string inputPath, LocaFormat format) + public static LocaResource Load(Stream stream, LocaFormat format) + { + switch (format) { - using var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read); - return Load(stream, format); - } + case LocaFormat.Loca: + { + using var reader = new LocaReader(stream); + return reader.Read(); + } - public static LocaResource Load(Stream stream, LocaFormat format) - { - switch (format) - { - case LocaFormat.Loca: - { - using var reader = new LocaReader(stream); - return reader.Read(); - } - - case LocaFormat.Xml: - { - using var reader = new LocaXmlReader(stream); - return reader.Read(); - } - - default: - throw new ArgumentException("Invalid loca format"); - } - } + case LocaFormat.Xml: + { + using var reader = new LocaXmlReader(stream); + return reader.Read(); + } - public static void Save(LocaResource resource, string outputPath) - { - Save(resource, outputPath, ExtensionToFileFormat(outputPath)); + default: + throw new ArgumentException("Invalid loca format"); } + } + + public static void Save(LocaResource resource, string outputPath) + { + Save(resource, outputPath, ExtensionToFileFormat(outputPath)); + } - public static void Save(LocaResource resource, string outputPath, LocaFormat format) + public static void Save(LocaResource resource, string outputPath, LocaFormat format) + { + FileManager.TryToCreateDirectory(outputPath); + + using var file = File.Open(outputPath, FileMode.Create, FileAccess.Write); + switch (format) { - FileManager.TryToCreateDirectory(outputPath); + case LocaFormat.Loca: + { + var writer = new LocaWriter(file); + writer.Write(resource); + break; + } - using var file = File.Open(outputPath, FileMode.Create, FileAccess.Write); - switch (format) - { - case LocaFormat.Loca: - { - var writer = new LocaWriter(file); - writer.Write(resource); - break; - } - - case LocaFormat.Xml: - { - var writer = new LocaXmlWriter(file); - writer.Write(resource); - break; - } - - default: - throw new ArgumentException("Invalid loca format"); - } + case LocaFormat.Xml: + { + var writer = new LocaXmlWriter(file); + writer.Write(resource); + break; + } + + default: + throw new ArgumentException("Invalid loca format"); } } } diff --git a/LSLib/LS/Matrix.cs b/LSLib/LS/Matrix.cs index aac6d74b..fbcea8ac 100644 --- a/LSLib/LS/Matrix.cs +++ b/LSLib/LS/Matrix.cs @@ -36,559 +36,557 @@ OTHER DEALINGS IN THE SOFTWARE. using System; using System.Text.RegularExpressions; -namespace LSLib.LS -{ +namespace LSLib.LS; - public class Matrix - { - public int rows; - public int cols; - public double[,] mat; - public Matrix L; - public Matrix U; - private int[] pi; - private double detOfP = 1; +public class Matrix +{ + public int rows; + public int cols; + public double[,] mat; - public Matrix(int iRows, int iCols) // Matrix Class constructor - { - rows = iRows; - cols = iCols; - mat = new double[rows, cols]; - } + public Matrix L; + public Matrix U; + private int[] pi; + private double detOfP = 1; - public Boolean IsSquare() - { - return (rows == cols); - } + public Matrix(int iRows, int iCols) // Matrix Class constructor + { + rows = iRows; + cols = iCols; + mat = new double[rows, cols]; + } - public double this[int iRow, int iCol] // Access this matrix as a 2D array - { - get { return mat[iRow, iCol]; } - set { mat[iRow, iCol] = value; } - } + public Boolean IsSquare() + { + return (rows == cols); + } - public Matrix GetCol(int k) - { - Matrix m = new Matrix(rows, 1); - for (int i = 0; i < rows; i++) m[i, 0] = mat[i, k]; - return m; - } + public double this[int iRow, int iCol] // Access this matrix as a 2D array + { + get { return mat[iRow, iCol]; } + set { mat[iRow, iCol] = value; } + } - public void SetCol(Matrix v, int k) - { - for (int i = 0; i < rows; i++) mat[i, k] = v[i, 0]; - } + public Matrix GetCol(int k) + { + Matrix m = new Matrix(rows, 1); + for (int i = 0; i < rows; i++) m[i, 0] = mat[i, k]; + return m; + } - public void MakeLU() // Function for LU decomposition - { - if (!IsSquare()) throw new MException("The matrix is not square!"); - L = IdentityMatrix(rows, cols); - U = Duplicate(); + public void SetCol(Matrix v, int k) + { + for (int i = 0; i < rows; i++) mat[i, k] = v[i, 0]; + } + + public void MakeLU() // Function for LU decomposition + { + if (!IsSquare()) throw new MException("The matrix is not square!"); + L = IdentityMatrix(rows, cols); + U = Duplicate(); - pi = new int[rows]; - for (int i = 0; i < rows; i++) pi[i] = i; + pi = new int[rows]; + for (int i = 0; i < rows; i++) pi[i] = i; - double p = 0; - double pom2; - int k0 = 0; - int pom1 = 0; + double p = 0; + double pom2; + int k0 = 0; + int pom1 = 0; - for (int k = 0; k < cols - 1; k++) + for (int k = 0; k < cols - 1; k++) + { + p = 0; + for (int i = k; i < rows; i++) // find the row with the biggest pivot { - p = 0; - for (int i = k; i < rows; i++) // find the row with the biggest pivot + if (Math.Abs(U[i, k]) > p) { - if (Math.Abs(U[i, k]) > p) - { - p = Math.Abs(U[i, k]); - k0 = i; - } + p = Math.Abs(U[i, k]); + k0 = i; } - if (p == 0) // samé nuly ve sloupci - throw new MException("The matrix is singular!"); + } + if (p == 0) // samé nuly ve sloupci + throw new MException("The matrix is singular!"); - pom1 = pi[k]; pi[k] = pi[k0]; pi[k0] = pom1; // switch two rows in permutation matrix + pom1 = pi[k]; pi[k] = pi[k0]; pi[k0] = pom1; // switch two rows in permutation matrix - for (int i = 0; i < k; i++) - { - pom2 = L[k, i]; L[k, i] = L[k0, i]; L[k0, i] = pom2; - } + for (int i = 0; i < k; i++) + { + pom2 = L[k, i]; L[k, i] = L[k0, i]; L[k0, i] = pom2; + } - if (k != k0) detOfP *= -1; + if (k != k0) detOfP *= -1; - for (int i = 0; i < cols; i++) // Switch rows in U - { - pom2 = U[k, i]; U[k, i] = U[k0, i]; U[k0, i] = pom2; - } + for (int i = 0; i < cols; i++) // Switch rows in U + { + pom2 = U[k, i]; U[k, i] = U[k0, i]; U[k0, i] = pom2; + } - for (int i = k + 1; i < rows; i++) - { - L[i, k] = U[i, k] / U[k, k]; - for (int j = k; j < cols; j++) - U[i, j] = U[i, j] - L[i, k] * U[k, j]; - } + for (int i = k + 1; i < rows; i++) + { + L[i, k] = U[i, k] / U[k, k]; + for (int j = k; j < cols; j++) + U[i, j] = U[i, j] - L[i, k] * U[k, j]; } } + } - public Matrix SolveWith(Matrix v) // Function solves Ax = v in confirmity with solution vector "v" - { - if (rows != cols) throw new MException("The matrix is not square!"); - if (rows != v.rows) throw new MException("Wrong number of results in solution vector!"); - if (L == null) MakeLU(); + public Matrix SolveWith(Matrix v) // Function solves Ax = v in confirmity with solution vector "v" + { + if (rows != cols) throw new MException("The matrix is not square!"); + if (rows != v.rows) throw new MException("Wrong number of results in solution vector!"); + if (L == null) MakeLU(); - Matrix b = new Matrix(rows, 1); - for (int i = 0; i < rows; i++) b[i, 0] = v[pi[i], 0]; // switch two items in "v" due to permutation matrix + Matrix b = new Matrix(rows, 1); + for (int i = 0; i < rows; i++) b[i, 0] = v[pi[i], 0]; // switch two items in "v" due to permutation matrix - Matrix z = SubsForth(L, b); - Matrix x = SubsBack(U, z); + Matrix z = SubsForth(L, b); + Matrix x = SubsBack(U, z); - return x; - } + return x; + } - public Matrix Invert() // Function returns the inverted matrix - { - if (L == null) MakeLU(); + public Matrix Invert() // Function returns the inverted matrix + { + if (L == null) MakeLU(); - Matrix inv = new Matrix(rows, cols); + Matrix inv = new Matrix(rows, cols); - for (int i = 0; i < rows; i++) - { - Matrix Ei = Matrix.ZeroMatrix(rows, 1); - Ei[i, 0] = 1; - Matrix col = SolveWith(Ei); - inv.SetCol(col, i); - } - return inv; + for (int i = 0; i < rows; i++) + { + Matrix Ei = Matrix.ZeroMatrix(rows, 1); + Ei[i, 0] = 1; + Matrix col = SolveWith(Ei); + inv.SetCol(col, i); } + return inv; + } - public double Det() // Function for determinant - { - if (L == null) MakeLU(); - double det = detOfP; - for (int i = 0; i < rows; i++) det *= U[i, i]; - return det; - } + public double Det() // Function for determinant + { + if (L == null) MakeLU(); + double det = detOfP; + for (int i = 0; i < rows; i++) det *= U[i, i]; + return det; + } - public Matrix GetP() // Function returns permutation matrix "P" due to permutation vector "pi" - { - if (L == null) MakeLU(); + public Matrix GetP() // Function returns permutation matrix "P" due to permutation vector "pi" + { + if (L == null) MakeLU(); - Matrix matrix = ZeroMatrix(rows, cols); - for (int i = 0; i < rows; i++) matrix[pi[i], i] = 1; - return matrix; - } + Matrix matrix = ZeroMatrix(rows, cols); + for (int i = 0; i < rows; i++) matrix[pi[i], i] = 1; + return matrix; + } + + public Matrix Duplicate() // Function returns the copy of this matrix + { + Matrix matrix = new Matrix(rows, cols); + for (int i = 0; i < rows; i++) + for (int j = 0; j < cols; j++) + matrix[i, j] = mat[i, j]; + return matrix; + } + + public static Matrix SubsForth(Matrix A, Matrix b) // Function solves Ax = b for A as a lower triangular matrix + { + if (A.L == null) A.MakeLU(); + int n = A.rows; + Matrix x = new Matrix(n, 1); - public Matrix Duplicate() // Function returns the copy of this matrix + for (int i = 0; i < n; i++) { - Matrix matrix = new Matrix(rows, cols); - for (int i = 0; i < rows; i++) - for (int j = 0; j < cols; j++) - matrix[i, j] = mat[i, j]; - return matrix; + x[i, 0] = b[i, 0]; + for (int j = 0; j < i; j++) x[i, 0] -= A[i, j] * x[j, 0]; + x[i, 0] = x[i, 0] / A[i, i]; } + return x; + } - public static Matrix SubsForth(Matrix A, Matrix b) // Function solves Ax = b for A as a lower triangular matrix - { - if (A.L == null) A.MakeLU(); - int n = A.rows; - Matrix x = new Matrix(n, 1); + public static Matrix SubsBack(Matrix A, Matrix b) // Function solves Ax = b for A as an upper triangular matrix + { + if (A.L == null) A.MakeLU(); + int n = A.rows; + Matrix x = new Matrix(n, 1); - for (int i = 0; i < n; i++) - { - x[i, 0] = b[i, 0]; - for (int j = 0; j < i; j++) x[i, 0] -= A[i, j] * x[j, 0]; - x[i, 0] = x[i, 0] / A[i, i]; - } - return x; + for (int i = n - 1; i > -1; i--) + { + x[i, 0] = b[i, 0]; + for (int j = n - 1; j > i; j--) x[i, 0] -= A[i, j] * x[j, 0]; + x[i, 0] = x[i, 0] / A[i, i]; } + return x; + } - public static Matrix SubsBack(Matrix A, Matrix b) // Function solves Ax = b for A as an upper triangular matrix - { - if (A.L == null) A.MakeLU(); - int n = A.rows; - Matrix x = new Matrix(n, 1); + public static Matrix ZeroMatrix(int iRows, int iCols) // Function generates the zero matrix + { + Matrix matrix = new Matrix(iRows, iCols); + for (int i = 0; i < iRows; i++) + for (int j = 0; j < iCols; j++) + matrix[i, j] = 0; + return matrix; + } + + public static Matrix IdentityMatrix(int iRows, int iCols) // Function generates the identity matrix + { + Matrix matrix = ZeroMatrix(iRows, iCols); + for (int i = 0; i < Math.Min(iRows, iCols); i++) + matrix[i, i] = 1; + return matrix; + } + + public static Matrix RandomMatrix(int iRows, int iCols, int dispersion) // Function generates the random matrix + { + Random random = new Random(); + Matrix matrix = new Matrix(iRows, iCols); + for (int i = 0; i < iRows; i++) + for (int j = 0; j < iCols; j++) + matrix[i, j] = random.Next(-dispersion, dispersion); + return matrix; + } - for (int i = n - 1; i > -1; i--) + public static Matrix Parse(string ps) // Function parses the matrix from string + { + string s = NormalizeMatrixString(ps); + string[] rows = Regex.Split(s, "\r\n"); + string[] nums = rows[0].Split(' '); + Matrix matrix = new Matrix(rows.Length, nums.Length); + try + { + for (int i = 0; i < rows.Length; i++) { - x[i, 0] = b[i, 0]; - for (int j = n - 1; j > i; j--) x[i, 0] -= A[i, j] * x[j, 0]; - x[i, 0] = x[i, 0] / A[i, i]; + nums = rows[i].Split(' '); + for (int j = 0; j < nums.Length; j++) matrix[i, j] = double.Parse(nums[j]); } - return x; } + catch (FormatException) { throw new MException("Wrong input format!"); } + return matrix; + } - public static Matrix ZeroMatrix(int iRows, int iCols) // Function generates the zero matrix + public override string ToString() // Function returns matrix as a string + { + string s = ""; + for (int i = 0; i < rows; i++) { - Matrix matrix = new Matrix(iRows, iCols); - for (int i = 0; i < iRows; i++) - for (int j = 0; j < iCols; j++) - matrix[i, j] = 0; - return matrix; + for (int j = 0; j < cols; j++) s += String.Format("{0,5:0.00}", mat[i, j]) + " "; + s += "\r\n"; } + return s; + } - public static Matrix IdentityMatrix(int iRows, int iCols) // Function generates the identity matrix - { - Matrix matrix = ZeroMatrix(iRows, iCols); - for (int i = 0; i < Math.Min(iRows, iCols); i++) - matrix[i, i] = 1; - return matrix; - } + public static Matrix Transpose(Matrix m) // Matrix transpose, for any rectangular matrix + { + Matrix t = new Matrix(m.cols, m.rows); + for (int i = 0; i < m.rows; i++) + for (int j = 0; j < m.cols; j++) + t[j, i] = m[i, j]; + return t; + } + + public static Matrix Power(Matrix m, int pow) // Power matrix to exponent + { + if (pow == 0) return IdentityMatrix(m.rows, m.cols); + if (pow == 1) return m.Duplicate(); + if (pow == -1) return m.Invert(); - public static Matrix RandomMatrix(int iRows, int iCols, int dispersion) // Function generates the random matrix + Matrix x; + if (pow < 0) { x = m.Invert(); pow *= -1; } + else x = m.Duplicate(); + + Matrix ret = IdentityMatrix(m.rows, m.cols); + while (pow != 0) { - Random random = new Random(); - Matrix matrix = new Matrix(iRows, iCols); - for (int i = 0; i < iRows; i++) - for (int j = 0; j < iCols; j++) - matrix[i, j] = random.Next(-dispersion, dispersion); - return matrix; + if ((pow & 1) == 1) ret *= x; + x *= x; + pow >>= 1; } + return ret; + } - public static Matrix Parse(string ps) // Function parses the matrix from string - { - string s = NormalizeMatrixString(ps); - string[] rows = Regex.Split(s, "\r\n"); - string[] nums = rows[0].Split(' '); - Matrix matrix = new Matrix(rows.Length, nums.Length); - try + private static void SafeAplusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) + { + for (int i = 0; i < size; i++) // rows + for (int j = 0; j < size; j++) // cols { - for (int i = 0; i < rows.Length; i++) - { - nums = rows[i].Split(' '); - for (int j = 0; j < nums.Length; j++) matrix[i, j] = double.Parse(nums[j]); - } + C[i, j] = 0; + if (xa + j < A.cols && ya + i < A.rows) C[i, j] += A[ya + i, xa + j]; + if (xb + j < B.cols && yb + i < B.rows) C[i, j] += B[yb + i, xb + j]; } - catch (FormatException) { throw new MException("Wrong input format!"); } - return matrix; - } + } - public override string ToString() // Function returns matrix as a string - { - string s = ""; - for (int i = 0; i < rows; i++) + private static void SafeAminusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) + { + for (int i = 0; i < size; i++) // rows + for (int j = 0; j < size; j++) // cols { - for (int j = 0; j < cols; j++) s += String.Format("{0,5:0.00}", mat[i, j]) + " "; - s += "\r\n"; + C[i, j] = 0; + if (xa + j < A.cols && ya + i < A.rows) C[i, j] += A[ya + i, xa + j]; + if (xb + j < B.cols && yb + i < B.rows) C[i, j] -= B[yb + i, xb + j]; } - return s; - } + } - public static Matrix Transpose(Matrix m) // Matrix transpose, for any rectangular matrix - { - Matrix t = new Matrix(m.cols, m.rows); - for (int i = 0; i < m.rows; i++) - for (int j = 0; j < m.cols; j++) - t[j, i] = m[i, j]; - return t; - } + private static void SafeACopytoC(Matrix A, int xa, int ya, Matrix C, int size) + { + for (int i = 0; i < size; i++) // rows + for (int j = 0; j < size; j++) // cols + { + C[i, j] = 0; + if (xa + j < A.cols && ya + i < A.rows) C[i, j] += A[ya + i, xa + j]; + } + } - public static Matrix Power(Matrix m, int pow) // Power matrix to exponent - { - if (pow == 0) return IdentityMatrix(m.rows, m.cols); - if (pow == 1) return m.Duplicate(); - if (pow == -1) return m.Invert(); + private static void AplusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) + { + for (int i = 0; i < size; i++) // rows + for (int j = 0; j < size; j++) C[i, j] = A[ya + i, xa + j] + B[yb + i, xb + j]; + } - Matrix x; - if (pow < 0) { x = m.Invert(); pow *= -1; } - else x = m.Duplicate(); + private static void AminusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) + { + for (int i = 0; i < size; i++) // rows + for (int j = 0; j < size; j++) C[i, j] = A[ya + i, xa + j] - B[yb + i, xb + j]; + } - Matrix ret = IdentityMatrix(m.rows, m.cols); - while (pow != 0) - { - if ((pow & 1) == 1) ret *= x; - x *= x; - pow >>= 1; - } - return ret; - } + private static void ACopytoC(Matrix A, int xa, int ya, Matrix C, int size) + { + for (int i = 0; i < size; i++) // rows + for (int j = 0; j < size; j++) C[i, j] = A[ya + i, xa + j]; + } - private static void SafeAplusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) - { - for (int i = 0; i < size; i++) // rows - for (int j = 0; j < size; j++) // cols - { - C[i, j] = 0; - if (xa + j < A.cols && ya + i < A.rows) C[i, j] += A[ya + i, xa + j]; - if (xb + j < B.cols && yb + i < B.rows) C[i, j] += B[yb + i, xb + j]; - } - } + private static Matrix StrassenMultiply(Matrix A, Matrix B) // Smart matrix multiplication + { + if (A.cols != B.rows) throw new MException("Wrong dimension of matrix!"); - private static void SafeAminusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) - { - for (int i = 0; i < size; i++) // rows - for (int j = 0; j < size; j++) // cols - { - C[i, j] = 0; - if (xa + j < A.cols && ya + i < A.rows) C[i, j] += A[ya + i, xa + j]; - if (xb + j < B.cols && yb + i < B.rows) C[i, j] -= B[yb + i, xb + j]; - } - } + Matrix R; - private static void SafeACopytoC(Matrix A, int xa, int ya, Matrix C, int size) - { - for (int i = 0; i < size; i++) // rows - for (int j = 0; j < size; j++) // cols - { - C[i, j] = 0; - if (xa + j < A.cols && ya + i < A.rows) C[i, j] += A[ya + i, xa + j]; - } - } + int msize = Math.Max(Math.Max(A.rows, A.cols), Math.Max(B.rows, B.cols)); - private static void AplusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) + if (msize < 32) { - for (int i = 0; i < size; i++) // rows - for (int j = 0; j < size; j++) C[i, j] = A[ya + i, xa + j] + B[yb + i, xb + j]; + R = ZeroMatrix(A.rows, B.cols); + for (int i = 0; i < R.rows; i++) + for (int j = 0; j < R.cols; j++) + for (int k = 0; k < A.cols; k++) + R[i, j] += A[i, k] * B[k, j]; + return R; } - private static void AminusBintoC(Matrix A, int xa, int ya, Matrix B, int xb, int yb, Matrix C, int size) - { - for (int i = 0; i < size; i++) // rows - for (int j = 0; j < size; j++) C[i, j] = A[ya + i, xa + j] - B[yb + i, xb + j]; - } + int size = 1; int n = 0; + while (msize > size) { size *= 2; n++; }; + int h = size / 2; + - private static void ACopytoC(Matrix A, int xa, int ya, Matrix C, int size) + Matrix[,] mField = new Matrix[n, 9]; + + /* + * 8x8, 8x8, 8x8, ... + * 4x4, 4x4, 4x4, ... + * 2x2, 2x2, 2x2, ... + * . . . + */ + + int z; + for (int i = 0; i < n - 4; i++) // rows { - for (int i = 0; i < size; i++) // rows - for (int j = 0; j < size; j++) C[i, j] = A[ya + i, xa + j]; + z = (int)Math.Pow(2, n - i - 1); + for (int j = 0; j < 9; j++) mField[i, j] = new Matrix(z, z); } - private static Matrix StrassenMultiply(Matrix A, Matrix B) // Smart matrix multiplication - { - if (A.cols != B.rows) throw new MException("Wrong dimension of matrix!"); + SafeAplusBintoC(A, 0, 0, A, h, h, mField[0, 0], h); + SafeAplusBintoC(B, 0, 0, B, h, h, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 1], 1, mField); // (A11 + A22) * (B11 + B22); - Matrix R; + SafeAplusBintoC(A, 0, h, A, h, h, mField[0, 0], h); + SafeACopytoC(B, 0, 0, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 2], 1, mField); // (A21 + A22) * B11; - int msize = Math.Max(Math.Max(A.rows, A.cols), Math.Max(B.rows, B.cols)); + SafeACopytoC(A, 0, 0, mField[0, 0], h); + SafeAminusBintoC(B, h, 0, B, h, h, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 3], 1, mField); //A11 * (B12 - B22); - if (msize < 32) - { - R = ZeroMatrix(A.rows, B.cols); - for (int i = 0; i < R.rows; i++) - for (int j = 0; j < R.cols; j++) - for (int k = 0; k < A.cols; k++) - R[i, j] += A[i, k] * B[k, j]; - return R; - } + SafeACopytoC(A, h, h, mField[0, 0], h); + SafeAminusBintoC(B, 0, h, B, 0, 0, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 4], 1, mField); //A22 * (B21 - B11); - int size = 1; int n = 0; - while (msize > size) { size *= 2; n++; }; - int h = size / 2; + SafeAplusBintoC(A, 0, 0, A, h, 0, mField[0, 0], h); + SafeACopytoC(B, h, h, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 5], 1, mField); //(A11 + A12) * B22; + SafeAminusBintoC(A, 0, h, A, 0, 0, mField[0, 0], h); + SafeAplusBintoC(B, 0, 0, B, h, 0, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 6], 1, mField); //(A21 - A11) * (B11 + B12); - Matrix[,] mField = new Matrix[n, 9]; + SafeAminusBintoC(A, h, 0, A, h, h, mField[0, 0], h); + SafeAplusBintoC(B, 0, h, B, h, h, mField[0, 1], h); + StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 7], 1, mField); // (A12 - A22) * (B21 + B22); - /* - * 8x8, 8x8, 8x8, ... - * 4x4, 4x4, 4x4, ... - * 2x2, 2x2, 2x2, ... - * . . . - */ + R = new Matrix(A.rows, B.cols); // result - int z; - for (int i = 0; i < n - 4; i++) // rows - { - z = (int)Math.Pow(2, n - i - 1); - for (int j = 0; j < 9; j++) mField[i, j] = new Matrix(z, z); - } + /// C11 + for (int i = 0; i < Math.Min(h, R.rows); i++) // rows + for (int j = 0; j < Math.Min(h, R.cols); j++) // cols + R[i, j] = mField[0, 1 + 1][i, j] + mField[0, 1 + 4][i, j] - mField[0, 1 + 5][i, j] + mField[0, 1 + 7][i, j]; - SafeAplusBintoC(A, 0, 0, A, h, h, mField[0, 0], h); - SafeAplusBintoC(B, 0, 0, B, h, h, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 1], 1, mField); // (A11 + A22) * (B11 + B22); + /// C12 + for (int i = 0; i < Math.Min(h, R.rows); i++) // rows + for (int j = h; j < Math.Min(2 * h, R.cols); j++) // cols + R[i, j] = mField[0, 1 + 3][i, j - h] + mField[0, 1 + 5][i, j - h]; - SafeAplusBintoC(A, 0, h, A, h, h, mField[0, 0], h); - SafeACopytoC(B, 0, 0, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 2], 1, mField); // (A21 + A22) * B11; + /// C21 + for (int i = h; i < Math.Min(2 * h, R.rows); i++) // rows + for (int j = 0; j < Math.Min(h, R.cols); j++) // cols + R[i, j] = mField[0, 1 + 2][i - h, j] + mField[0, 1 + 4][i - h, j]; - SafeACopytoC(A, 0, 0, mField[0, 0], h); - SafeAminusBintoC(B, h, 0, B, h, h, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 3], 1, mField); //A11 * (B12 - B22); + /// C22 + for (int i = h; i < Math.Min(2 * h, R.rows); i++) // rows + for (int j = h; j < Math.Min(2 * h, R.cols); j++) // cols + R[i, j] = mField[0, 1 + 1][i - h, j - h] - mField[0, 1 + 2][i - h, j - h] + mField[0, 1 + 3][i - h, j - h] + mField[0, 1 + 6][i - h, j - h]; - SafeACopytoC(A, h, h, mField[0, 0], h); - SafeAminusBintoC(B, 0, h, B, 0, 0, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 4], 1, mField); //A22 * (B21 - B11); + return R; + } - SafeAplusBintoC(A, 0, 0, A, h, 0, mField[0, 0], h); - SafeACopytoC(B, h, h, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 5], 1, mField); //(A11 + A12) * B22; + // function for square matrix 2^N x 2^N - SafeAminusBintoC(A, 0, h, A, 0, 0, mField[0, 0], h); - SafeAplusBintoC(B, 0, 0, B, h, 0, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 6], 1, mField); //(A21 - A11) * (B11 + B12); + private static void StrassenMultiplyRun(Matrix A, Matrix B, Matrix C, int l, Matrix[,] f) // A * B into C, level of recursion, matrix field + { + int size = A.rows; + int h = size / 2; - SafeAminusBintoC(A, h, 0, A, h, h, mField[0, 0], h); - SafeAplusBintoC(B, 0, h, B, h, h, mField[0, 1], h); - StrassenMultiplyRun(mField[0, 0], mField[0, 1], mField[0, 1 + 7], 1, mField); // (A12 - A22) * (B21 + B22); + if (size < 32) + { + for (int i = 0; i < C.rows; i++) + for (int j = 0; j < C.cols; j++) + { + C[i, j] = 0; + for (int k = 0; k < A.cols; k++) C[i, j] += A[i, k] * B[k, j]; + } + return; + } - R = new Matrix(A.rows, B.cols); // result + AplusBintoC(A, 0, 0, A, h, h, f[l, 0], h); + AplusBintoC(B, 0, 0, B, h, h, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 1], l + 1, f); // (A11 + A22) * (B11 + B22); - /// C11 - for (int i = 0; i < Math.Min(h, R.rows); i++) // rows - for (int j = 0; j < Math.Min(h, R.cols); j++) // cols - R[i, j] = mField[0, 1 + 1][i, j] + mField[0, 1 + 4][i, j] - mField[0, 1 + 5][i, j] + mField[0, 1 + 7][i, j]; + AplusBintoC(A, 0, h, A, h, h, f[l, 0], h); + ACopytoC(B, 0, 0, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 2], l + 1, f); // (A21 + A22) * B11; - /// C12 - for (int i = 0; i < Math.Min(h, R.rows); i++) // rows - for (int j = h; j < Math.Min(2 * h, R.cols); j++) // cols - R[i, j] = mField[0, 1 + 3][i, j - h] + mField[0, 1 + 5][i, j - h]; + ACopytoC(A, 0, 0, f[l, 0], h); + AminusBintoC(B, h, 0, B, h, h, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 3], l + 1, f); //A11 * (B12 - B22); - /// C21 - for (int i = h; i < Math.Min(2 * h, R.rows); i++) // rows - for (int j = 0; j < Math.Min(h, R.cols); j++) // cols - R[i, j] = mField[0, 1 + 2][i - h, j] + mField[0, 1 + 4][i - h, j]; + ACopytoC(A, h, h, f[l, 0], h); + AminusBintoC(B, 0, h, B, 0, 0, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 4], l + 1, f); //A22 * (B21 - B11); - /// C22 - for (int i = h; i < Math.Min(2 * h, R.rows); i++) // rows - for (int j = h; j < Math.Min(2 * h, R.cols); j++) // cols - R[i, j] = mField[0, 1 + 1][i - h, j - h] - mField[0, 1 + 2][i - h, j - h] + mField[0, 1 + 3][i - h, j - h] + mField[0, 1 + 6][i - h, j - h]; + AplusBintoC(A, 0, 0, A, h, 0, f[l, 0], h); + ACopytoC(B, h, h, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 5], l + 1, f); //(A11 + A12) * B22; - return R; - } + AminusBintoC(A, 0, h, A, 0, 0, f[l, 0], h); + AplusBintoC(B, 0, 0, B, h, 0, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 6], l + 1, f); //(A21 - A11) * (B11 + B12); - // function for square matrix 2^N x 2^N + AminusBintoC(A, h, 0, A, h, h, f[l, 0], h); + AplusBintoC(B, 0, h, B, h, h, f[l, 1], h); + StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 7], l + 1, f); // (A12 - A22) * (B21 + B22); - private static void StrassenMultiplyRun(Matrix A, Matrix B, Matrix C, int l, Matrix[,] f) // A * B into C, level of recursion, matrix field - { - int size = A.rows; - int h = size / 2; + /// C11 + for (int i = 0; i < h; i++) // rows + for (int j = 0; j < h; j++) // cols + C[i, j] = f[l, 1 + 1][i, j] + f[l, 1 + 4][i, j] - f[l, 1 + 5][i, j] + f[l, 1 + 7][i, j]; - if (size < 32) - { - for (int i = 0; i < C.rows; i++) - for (int j = 0; j < C.cols; j++) - { - C[i, j] = 0; - for (int k = 0; k < A.cols; k++) C[i, j] += A[i, k] * B[k, j]; - } - return; - } + /// C12 + for (int i = 0; i < h; i++) // rows + for (int j = h; j < size; j++) // cols + C[i, j] = f[l, 1 + 3][i, j - h] + f[l, 1 + 5][i, j - h]; - AplusBintoC(A, 0, 0, A, h, h, f[l, 0], h); - AplusBintoC(B, 0, 0, B, h, h, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 1], l + 1, f); // (A11 + A22) * (B11 + B22); - - AplusBintoC(A, 0, h, A, h, h, f[l, 0], h); - ACopytoC(B, 0, 0, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 2], l + 1, f); // (A21 + A22) * B11; - - ACopytoC(A, 0, 0, f[l, 0], h); - AminusBintoC(B, h, 0, B, h, h, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 3], l + 1, f); //A11 * (B12 - B22); - - ACopytoC(A, h, h, f[l, 0], h); - AminusBintoC(B, 0, h, B, 0, 0, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 4], l + 1, f); //A22 * (B21 - B11); - - AplusBintoC(A, 0, 0, A, h, 0, f[l, 0], h); - ACopytoC(B, h, h, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 5], l + 1, f); //(A11 + A12) * B22; - - AminusBintoC(A, 0, h, A, 0, 0, f[l, 0], h); - AplusBintoC(B, 0, 0, B, h, 0, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 6], l + 1, f); //(A21 - A11) * (B11 + B12); - - AminusBintoC(A, h, 0, A, h, h, f[l, 0], h); - AplusBintoC(B, 0, h, B, h, h, f[l, 1], h); - StrassenMultiplyRun(f[l, 0], f[l, 1], f[l, 1 + 7], l + 1, f); // (A12 - A22) * (B21 + B22); - - /// C11 - for (int i = 0; i < h; i++) // rows - for (int j = 0; j < h; j++) // cols - C[i, j] = f[l, 1 + 1][i, j] + f[l, 1 + 4][i, j] - f[l, 1 + 5][i, j] + f[l, 1 + 7][i, j]; - - /// C12 - for (int i = 0; i < h; i++) // rows - for (int j = h; j < size; j++) // cols - C[i, j] = f[l, 1 + 3][i, j - h] + f[l, 1 + 5][i, j - h]; - - /// C21 - for (int i = h; i < size; i++) // rows - for (int j = 0; j < h; j++) // cols - C[i, j] = f[l, 1 + 2][i - h, j] + f[l, 1 + 4][i - h, j]; - - /// C22 - for (int i = h; i < size; i++) // rows - for (int j = h; j < size; j++) // cols - C[i, j] = f[l, 1 + 1][i - h, j - h] - f[l, 1 + 2][i - h, j - h] + f[l, 1 + 3][i - h, j - h] + f[l, 1 + 6][i - h, j - h]; - } + /// C21 + for (int i = h; i < size; i++) // rows + for (int j = 0; j < h; j++) // cols + C[i, j] = f[l, 1 + 2][i - h, j] + f[l, 1 + 4][i - h, j]; - public static Matrix StupidMultiply(Matrix m1, Matrix m2) // Stupid matrix multiplication - { - if (m1.cols != m2.rows) throw new MException("Wrong dimensions of matrix!"); - - Matrix result = ZeroMatrix(m1.rows, m2.cols); - for (int i = 0; i < result.rows; i++) - for (int j = 0; j < result.cols; j++) - for (int k = 0; k < m1.cols; k++) - result[i, j] += m1[i, k] * m2[k, j]; - return result; - } - private static Matrix Multiply(double n, Matrix m) // Multiplication by constant n - { - Matrix r = new Matrix(m.rows, m.cols); - for (int i = 0; i < m.rows; i++) - for (int j = 0; j < m.cols; j++) - r[i, j] = m[i, j] * n; - return r; - } - private static Matrix Add(Matrix m1, Matrix m2) // Sčítání matic - { - if (m1.rows != m2.rows || m1.cols != m2.cols) throw new MException("Matrices must have the same dimensions!"); - Matrix r = new Matrix(m1.rows, m1.cols); - for (int i = 0; i < r.rows; i++) - for (int j = 0; j < r.cols; j++) - r[i, j] = m1[i, j] + m2[i, j]; - return r; - } + /// C22 + for (int i = h; i < size; i++) // rows + for (int j = h; j < size; j++) // cols + C[i, j] = f[l, 1 + 1][i - h, j - h] - f[l, 1 + 2][i - h, j - h] + f[l, 1 + 3][i - h, j - h] + f[l, 1 + 6][i - h, j - h]; + } - public static string NormalizeMatrixString(string matStr) // From Andy - thank you! :) - { - // Remove any multiple spaces - while (matStr.IndexOf(" ") != -1) - matStr = matStr.Replace(" ", " "); - - // Remove any spaces before or after newlines - matStr = matStr.Replace(" \r\n", "\r\n"); - matStr = matStr.Replace("\r\n ", "\r\n"); - - // If the data ends in a newline, remove the trailing newline. - // Make it easier by first replacing \r\n’s with |’s then - // restore the |’s with \r\n’s - matStr = matStr.Replace("\r\n", "|"); - while (matStr.LastIndexOf("|") == (matStr.Length - 1)) - matStr = matStr.Substring(0, matStr.Length - 1); - - matStr = matStr.Replace("|", "\r\n"); - return matStr.Trim(); - } + public static Matrix StupidMultiply(Matrix m1, Matrix m2) // Stupid matrix multiplication + { + if (m1.cols != m2.rows) throw new MException("Wrong dimensions of matrix!"); + + Matrix result = ZeroMatrix(m1.rows, m2.cols); + for (int i = 0; i < result.rows; i++) + for (int j = 0; j < result.cols; j++) + for (int k = 0; k < m1.cols; k++) + result[i, j] += m1[i, k] * m2[k, j]; + return result; + } + private static Matrix Multiply(double n, Matrix m) // Multiplication by constant n + { + Matrix r = new Matrix(m.rows, m.cols); + for (int i = 0; i < m.rows; i++) + for (int j = 0; j < m.cols; j++) + r[i, j] = m[i, j] * n; + return r; + } + private static Matrix Add(Matrix m1, Matrix m2) // Sčítání matic + { + if (m1.rows != m2.rows || m1.cols != m2.cols) throw new MException("Matrices must have the same dimensions!"); + Matrix r = new Matrix(m1.rows, m1.cols); + for (int i = 0; i < r.rows; i++) + for (int j = 0; j < r.cols; j++) + r[i, j] = m1[i, j] + m2[i, j]; + return r; + } - // O P E R A T O R S + public static string NormalizeMatrixString(string matStr) // From Andy - thank you! :) + { + // Remove any multiple spaces + while (matStr.IndexOf(" ") != -1) + matStr = matStr.Replace(" ", " "); + + // Remove any spaces before or after newlines + matStr = matStr.Replace(" \r\n", "\r\n"); + matStr = matStr.Replace("\r\n ", "\r\n"); + + // If the data ends in a newline, remove the trailing newline. + // Make it easier by first replacing \r\n’s with |’s then + // restore the |’s with \r\n’s + matStr = matStr.Replace("\r\n", "|"); + while (matStr.LastIndexOf("|") == (matStr.Length - 1)) + matStr = matStr.Substring(0, matStr.Length - 1); + + matStr = matStr.Replace("|", "\r\n"); + return matStr.Trim(); + } - public static Matrix operator -(Matrix m) - { return Matrix.Multiply(-1, m); } + // O P E R A T O R S - public static Matrix operator +(Matrix m1, Matrix m2) - { return Matrix.Add(m1, m2); } + public static Matrix operator -(Matrix m) + { return Matrix.Multiply(-1, m); } - public static Matrix operator -(Matrix m1, Matrix m2) - { return Matrix.Add(m1, -m2); } + public static Matrix operator +(Matrix m1, Matrix m2) + { return Matrix.Add(m1, m2); } - public static Matrix operator *(Matrix m1, Matrix m2) - { return Matrix.StrassenMultiply(m1, m2); } + public static Matrix operator -(Matrix m1, Matrix m2) + { return Matrix.Add(m1, -m2); } - public static Matrix operator *(double n, Matrix m) - { return Matrix.Multiply(n, m); } - } + public static Matrix operator *(Matrix m1, Matrix m2) + { return Matrix.StrassenMultiply(m1, m2); } - // The class for exceptions + public static Matrix operator *(double n, Matrix m) + { return Matrix.Multiply(n, m); } +} - public class MException : Exception - { - public MException(string Message) - : base(Message) - { } - } +// The class for exceptions +public class MException : Exception +{ + public MException(string Message) + : base(Message) + { } } \ No newline at end of file diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index b35aad95..6eff5156 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -5,536 +5,535 @@ using System.Linq; using System.Text.RegularExpressions; -namespace LSLib.LS +namespace LSLib.LS; + +public class ModInfo(string name) +{ + public string Name = name; + public AbstractFileInfo Meta; + public Dictionary Scripts = []; + public Dictionary Stats = []; + public Dictionary Globals = []; + public Dictionary LevelObjects = []; + public AbstractFileInfo OrphanQueryIgnoreList; + public AbstractFileInfo StoryHeaderFile; + public AbstractFileInfo TypeCoercionWhitelistFile; + public AbstractFileInfo ModifiersFile; + public AbstractFileInfo ValueListsFile; + public AbstractFileInfo ActionResourcesFile; + public AbstractFileInfo ActionResourceGroupsFile; + public List TagFiles = []; +} + +public class ModResources : IDisposable { - public class ModInfo(string name) + public Dictionary Mods = []; + public List LoadedPackages = []; + + public void Dispose() { - public string Name = name; - public AbstractFileInfo Meta; - public Dictionary Scripts = []; - public Dictionary Stats = []; - public Dictionary Globals = []; - public Dictionary LevelObjects = []; - public AbstractFileInfo OrphanQueryIgnoreList; - public AbstractFileInfo StoryHeaderFile; - public AbstractFileInfo TypeCoercionWhitelistFile; - public AbstractFileInfo ModifiersFile; - public AbstractFileInfo ValueListsFile; - public AbstractFileInfo ActionResourcesFile; - public AbstractFileInfo ActionResourceGroupsFile; - public List TagFiles = []; + LoadedPackages.ForEach(p => p.Dispose()); + LoadedPackages.Clear(); } +} - public class ModResources : IDisposable +public partial class ModPathVisitor +{ + private static readonly Regex metaRe = MetaRegex(); + private static readonly Regex scriptRe = ScriptRegex(); + private static readonly Regex statRe = StatRegex(); + private static readonly Regex staticLsxRe = StaticLsxRegex(); + private static readonly Regex statStructureRe = StatStructureRegex(); + private static readonly Regex orphanQueryIgnoresRe = OrphanQueryIgnoresRegex(); + private static readonly Regex storyDefinitionsRe = StoryDefinitionsRegex(); + private static readonly Regex typeCoercionWhitelistRe = TypeCoercionWhitelistRegex(); + private static readonly Regex globalsRe = GlobalsRegex(); + private static readonly Regex levelObjectsRe = LevelObjectsRegex(); + // Pattern for excluding subsequent parts of a multi-part archive + public static readonly Regex archivePartRe = ArchivePartRegex(); + + public readonly ModResources Resources; + + public bool CollectStoryGoals = false; + public bool CollectStats = false; + public bool CollectGlobals = false; + public bool CollectLevels = false; + public bool CollectGuidResources = false; + public bool LoadPackages = true; + public TargetGame Game = TargetGame.DOS2; + + public ModPathVisitor(ModResources resources) { - public Dictionary Mods = []; - public List LoadedPackages = []; - - public void Dispose() - { - LoadedPackages.ForEach(p => p.Dispose()); - LoadedPackages.Clear(); - } + Resources = resources; } - public partial class ModPathVisitor + private static void EnumerateFiles(List paths, string rootPath, string currentPath, string pattern) { - private static readonly Regex metaRe = MetaRegex(); - private static readonly Regex scriptRe = ScriptRegex(); - private static readonly Regex statRe = StatRegex(); - private static readonly Regex staticLsxRe = StaticLsxRegex(); - private static readonly Regex statStructureRe = StatStructureRegex(); - private static readonly Regex orphanQueryIgnoresRe = OrphanQueryIgnoresRegex(); - private static readonly Regex storyDefinitionsRe = StoryDefinitionsRegex(); - private static readonly Regex typeCoercionWhitelistRe = TypeCoercionWhitelistRegex(); - private static readonly Regex globalsRe = GlobalsRegex(); - private static readonly Regex levelObjectsRe = LevelObjectsRegex(); - // Pattern for excluding subsequent parts of a multi-part archive - public static readonly Regex archivePartRe = ArchivePartRegex(); - - public readonly ModResources Resources; - - public bool CollectStoryGoals = false; - public bool CollectStats = false; - public bool CollectGlobals = false; - public bool CollectLevels = false; - public bool CollectGuidResources = false; - public bool LoadPackages = true; - public TargetGame Game = TargetGame.DOS2; - - public ModPathVisitor(ModResources resources) - { - Resources = resources; - } - - private static void EnumerateFiles(List paths, string rootPath, string currentPath, string pattern) + foreach (string filePath in Directory.GetFiles(currentPath, pattern)) { - foreach (string filePath in Directory.GetFiles(currentPath, pattern)) + var relativePath = filePath[rootPath.Length..]; + if (relativePath[0] == '/' || relativePath[0] == '\\') { - var relativePath = filePath[rootPath.Length..]; - if (relativePath[0] == '/' || relativePath[0] == '\\') - { - relativePath = relativePath[1..]; - } - - paths.Add(relativePath); + relativePath = relativePath[1..]; } - foreach (string directoryPath in Directory.GetDirectories(currentPath)) - { - EnumerateFiles(paths, rootPath, directoryPath, pattern); - } + paths.Add(relativePath); } - private ModInfo GetMod(string modName) + foreach (string directoryPath in Directory.GetDirectories(currentPath)) { - if (!Resources.Mods.TryGetValue(modName, out ModInfo mod)) - { - mod = new ModInfo(modName); - Resources.Mods[modName] = mod; - } - - return mod; + EnumerateFiles(paths, rootPath, directoryPath, pattern); } + } - private void AddMetadataToMod(string modName, AbstractFileInfo file) + private ModInfo GetMod(string modName) + { + if (!Resources.Mods.TryGetValue(modName, out ModInfo mod)) { - GetMod(modName).Meta = file; + mod = new ModInfo(modName); + Resources.Mods[modName] = mod; } - private void AddStatToMod(string modName, string path, AbstractFileInfo file) - { - GetMod(modName).Stats[path] = file; - } + return mod; + } - private void AddScriptToMod(string modName, string scriptName, AbstractFileInfo file) - { - GetMod(modName).Scripts[scriptName] = file; - } + private void AddMetadataToMod(string modName, AbstractFileInfo file) + { + GetMod(modName).Meta = file; + } - private void AddGlobalsToMod(string modName, string path, AbstractFileInfo file) - { - GetMod(modName).Globals[path] = file; - } + private void AddStatToMod(string modName, string path, AbstractFileInfo file) + { + GetMod(modName).Stats[path] = file; + } + + private void AddScriptToMod(string modName, string scriptName, AbstractFileInfo file) + { + GetMod(modName).Scripts[scriptName] = file; + } - private void AddLevelObjectsToMod(string modName, string path, AbstractFileInfo file) + private void AddGlobalsToMod(string modName, string path, AbstractFileInfo file) + { + GetMod(modName).Globals[path] = file; + } + + private void AddLevelObjectsToMod(string modName, string path, AbstractFileInfo file) + { + GetMod(modName).LevelObjects[path] = file; + } + + private void DiscoverPackagedFile(AbstractFileInfo file) + { + if (file.IsDeletion()) return; + + if (file.Name.EndsWith("meta.lsx", StringComparison.Ordinal)) { - GetMod(modName).LevelObjects[path] = file; + var match = metaRe.Match(file.Name); + if (match != null && match.Success) + { + AddMetadataToMod(match.Groups[1].Value, file); + } } - private void DiscoverPackagedFile(AbstractFileInfo file) + if (CollectStoryGoals) { - if (file.IsDeletion()) return; - - if (file.Name.EndsWith("meta.lsx", StringComparison.Ordinal)) + if (file.Name.EndsWith(".txt", StringComparison.Ordinal) && file.Name.Contains("/Story/RawFiles/Goals")) { - var match = metaRe.Match(file.Name); + var match = scriptRe.Match(file.Name); if (match != null && match.Success) { - AddMetadataToMod(match.Groups[1].Value, file); + AddScriptToMod(match.Groups[1].Value, match.Groups[2].Value, file); } } - if (CollectStoryGoals) + if (file.Name.EndsWith("/Story/story_orphanqueries_ignore_local.txt", StringComparison.Ordinal)) { - if (file.Name.EndsWith(".txt", StringComparison.Ordinal) && file.Name.Contains("/Story/RawFiles/Goals")) + var match = orphanQueryIgnoresRe.Match(file.Name); + if (match != null && match.Success) { - var match = scriptRe.Match(file.Name); - if (match != null && match.Success) - { - AddScriptToMod(match.Groups[1].Value, match.Groups[2].Value, file); - } + GetMod(match.Groups[1].Value).OrphanQueryIgnoreList = file; } + } - if (file.Name.EndsWith("/Story/story_orphanqueries_ignore_local.txt", StringComparison.Ordinal)) + if (file.Name.EndsWith("/Story/RawFiles/story_header.div", StringComparison.Ordinal)) + { + var match = storyDefinitionsRe.Match(file.Name); + if (match != null && match.Success) { - var match = orphanQueryIgnoresRe.Match(file.Name); - if (match != null && match.Success) - { - GetMod(match.Groups[1].Value).OrphanQueryIgnoreList = file; - } + GetMod(match.Groups[1].Value).StoryHeaderFile = file; } + } - if (file.Name.EndsWith("/Story/RawFiles/story_header.div", StringComparison.Ordinal)) + if (file.Name.EndsWith("/Story/RawFiles/TypeCoercionWhitelist.txt", StringComparison.Ordinal)) + { + var match = typeCoercionWhitelistRe.Match(file.Name); + if (match != null && match.Success) { - var match = storyDefinitionsRe.Match(file.Name); - if (match != null && match.Success) - { - GetMod(match.Groups[1].Value).StoryHeaderFile = file; - } + GetMod(match.Groups[1].Value).TypeCoercionWhitelistFile = file; } + } + } - if (file.Name.EndsWith("/Story/RawFiles/TypeCoercionWhitelist.txt", StringComparison.Ordinal)) + if (CollectStats) + { + if (file.Name.EndsWith(".txt", StringComparison.Ordinal)) + { + if (file.Name.Contains("/Stats/Generated/Data")) { - var match = typeCoercionWhitelistRe.Match(file.Name); + var match = statRe.Match(file.Name); if (match != null && match.Success) { - GetMod(match.Groups[1].Value).TypeCoercionWhitelistFile = file; + AddStatToMod(match.Groups[1].Value, match.Groups[2].Value, file); } } - } - - if (CollectStats) - { - if (file.Name.EndsWith(".txt", StringComparison.Ordinal)) + else if (file.Name.Contains("/Stats/Generated/Structure")) { - if (file.Name.Contains("/Stats/Generated/Data")) + var match = statStructureRe.Match(file.Name); + if (match != null && match.Success) { - var match = statRe.Match(file.Name); - if (match != null && match.Success) + if (file.Name.EndsWith("Modifiers.txt")) { - AddStatToMod(match.Groups[1].Value, match.Groups[2].Value, file); + GetMod(match.Groups[1].Value).ModifiersFile = file; } - } - else if (file.Name.Contains("/Stats/Generated/Structure")) - { - var match = statStructureRe.Match(file.Name); - if (match != null && match.Success) + else if (file.Name.EndsWith("ValueLists.txt")) { - if (file.Name.EndsWith("Modifiers.txt")) - { - GetMod(match.Groups[1].Value).ModifiersFile = file; - } - else if (file.Name.EndsWith("ValueLists.txt")) - { - GetMod(match.Groups[1].Value).ValueListsFile = file; - } + GetMod(match.Groups[1].Value).ValueListsFile = file; } } } } + } - if (CollectGuidResources) + if (CollectGuidResources) + { + if (file.Name.EndsWith(".lsx", StringComparison.Ordinal)) { - if (file.Name.EndsWith(".lsx", StringComparison.Ordinal)) + var match = staticLsxRe.Match(file.Name); + if (match != null && match.Success) { - var match = staticLsxRe.Match(file.Name); - if (match != null && match.Success) + if (match.Groups[2].Value == "ActionResourceDefinitions/ActionResourceDefinitions.lsx") { - if (match.Groups[2].Value == "ActionResourceDefinitions/ActionResourceDefinitions.lsx") - { - GetMod(match.Groups[1].Value).ActionResourcesFile = file; - } - else if (match.Groups[2].Value == "ActionResourceGroupDefinitions/ActionResourceGroupDefinitions.lsx") - { - GetMod(match.Groups[1].Value).ActionResourceGroupsFile = file; - } - else if (match.Groups[2].Value.StartsWith("Tags/")) - { - GetMod(match.Groups[1].Value).TagFiles.Add(file); - } + GetMod(match.Groups[1].Value).ActionResourcesFile = file; + } + else if (match.Groups[2].Value == "ActionResourceGroupDefinitions/ActionResourceGroupDefinitions.lsx") + { + GetMod(match.Groups[1].Value).ActionResourceGroupsFile = file; + } + else if (match.Groups[2].Value.StartsWith("Tags/")) + { + GetMod(match.Groups[1].Value).TagFiles.Add(file); } } } + } - if (CollectGlobals) + if (CollectGlobals) + { + if (file.Name.EndsWith(".lsf", StringComparison.Ordinal) && file.Name.Contains("/Globals/")) { - if (file.Name.EndsWith(".lsf", StringComparison.Ordinal) && file.Name.Contains("/Globals/")) + var match = globalsRe.Match(file.Name); + if (match != null && match.Success) { - var match = globalsRe.Match(file.Name); - if (match != null && match.Success) - { - AddGlobalsToMod(match.Groups[1].Value, match.Groups[0].Value, file); - } + AddGlobalsToMod(match.Groups[1].Value, match.Groups[0].Value, file); } } + } - if (CollectLevels) + if (CollectLevels) + { + if (file.Name.EndsWith(".lsf", StringComparison.Ordinal) && file.Name.Contains("/Levels/")) { - if (file.Name.EndsWith(".lsf", StringComparison.Ordinal) && file.Name.Contains("/Levels/")) + var match = levelObjectsRe.Match(file.Name); + if (match != null && match.Success) { - var match = levelObjectsRe.Match(file.Name); - if (match != null && match.Success) - { - AddLevelObjectsToMod(match.Groups[1].Value, match.Groups[0].Value, file); - } + AddLevelObjectsToMod(match.Groups[1].Value, match.Groups[0].Value, file); } } } + } + + public void DiscoverPackage(string packagePath) + { + var reader = new PackageReader(packagePath); + Resources.LoadedPackages.Add(reader); + var package = reader.Read(); - public void DiscoverPackage(string packagePath) + foreach (var file in package.Files) { - var reader = new PackageReader(packagePath); - Resources.LoadedPackages.Add(reader); - var package = reader.Read(); + DiscoverPackagedFile(file); + } + } - foreach (var file in package.Files) + public void DiscoverBuiltinPackages(string gameDataPath) + { + // List of packages we won't ever load + // These packages don't contain any mod resources, but have a large + // file table that makes loading unneccessarily slow. + HashSet packageBlacklist = + [ + "Assets.pak", + "Effects.pak", + "Engine.pak", + "EngineShaders.pak", + "Game.pak", + "GamePlatform.pak", + "Gustav_NavCloud.pak", + "Gustav_Textures.pak", + "Gustav_Video.pak", + "Icons.pak", + "LowTex.pak", + "Materials.pak", + "Minimaps.pak", + "Models.pak", + "PsoCache.pak", + "SharedSoundBanks.pak", + "SharedSounds.pak", + "Textures.pak", + "VirtualTextures.pak" + ]; + + // Collect priority value from headers + var packagePriorities = new List>(); + + foreach (var path in Directory.GetFiles(gameDataPath, "*.pak")) + { + var baseName = Path.GetFileName(path); + if (!packageBlacklist.Contains(baseName) + // Don't load 2nd, 3rd, ... parts of a multi-part archive + && !archivePartRe.IsMatch(baseName)) { - DiscoverPackagedFile(file); + var reader = new PackageReader(path, true); + var package = reader.Read(); + packagePriorities.Add(new Tuple(path, package.Metadata.Priority)); } } - public void DiscoverBuiltinPackages(string gameDataPath) - { - // List of packages we won't ever load - // These packages don't contain any mod resources, but have a large - // file table that makes loading unneccessarily slow. - HashSet packageBlacklist = - [ - "Assets.pak", - "Effects.pak", - "Engine.pak", - "EngineShaders.pak", - "Game.pak", - "GamePlatform.pak", - "Gustav_NavCloud.pak", - "Gustav_Textures.pak", - "Gustav_Video.pak", - "Icons.pak", - "LowTex.pak", - "Materials.pak", - "Minimaps.pak", - "Models.pak", - "PsoCache.pak", - "SharedSoundBanks.pak", - "SharedSounds.pak", - "Textures.pak", - "VirtualTextures.pak" - ]; - - // Collect priority value from headers - var packagePriorities = new List>(); - - foreach (var path in Directory.GetFiles(gameDataPath, "*.pak")) + packagePriorities.Sort( + delegate (Tuple a, Tuple b) { - var baseName = Path.GetFileName(path); - if (!packageBlacklist.Contains(baseName) - // Don't load 2nd, 3rd, ... parts of a multi-part archive - && !archivePartRe.IsMatch(baseName)) - { - var reader = new PackageReader(path, true); - var package = reader.Read(); - packagePriorities.Add(new Tuple(path, package.Metadata.Priority)); - } + return a.Item2.CompareTo(b.Item2); } + ); - packagePriorities.Sort( - delegate (Tuple a, Tuple b) - { - return a.Item2.CompareTo(b.Item2); - } - ); + // Load non-patch packages first + foreach (var package in packagePriorities) + { + DiscoverPackage(package.Item1); + } + } - // Load non-patch packages first - foreach (var package in packagePriorities) + public void DiscoverUserPackages(string gameDataPath) + { + foreach (var packagePath in Directory.GetFiles(gameDataPath, "*.pak")) + { + // Don't load 2nd, 3rd, ... parts of a multi-part archive + if (!archivePartRe.IsMatch(packagePath)) { - DiscoverPackage(package.Item1); + DiscoverPackage(packagePath); } } + } + + private void DiscoverModGoals(string modName, string modPath) + { + var goalPath = Path.Join(modPath, @"Story\RawFiles\Goals"); + if (!Directory.Exists(goalPath)) return; - public void DiscoverUserPackages(string gameDataPath) + List goalFiles = []; + EnumerateFiles(goalFiles, goalPath, goalPath, "*.txt"); + + foreach (var goalFile in goalFiles) { - foreach (var packagePath in Directory.GetFiles(gameDataPath, "*.pak")) + var fileInfo = new FilesystemFileInfo { - // Don't load 2nd, 3rd, ... parts of a multi-part archive - if (!archivePartRe.IsMatch(packagePath)) - { - DiscoverPackage(packagePath); - } - } + FilesystemPath = Path.Join(goalPath, goalFile), + Name = goalFile + }; + AddScriptToMod(modName, goalFile, fileInfo); } + } - private void DiscoverModGoals(string modName, string modPath) - { - var goalPath = Path.Join(modPath, @"Story\RawFiles\Goals"); - if (!Directory.Exists(goalPath)) return; + private void DiscoverModStats(string modName, string modPublicPath) + { + var statsPath = Path.Join(modPublicPath, @"Stats\Generated\Data"); + if (!Directory.Exists(statsPath)) return; - List goalFiles = []; - EnumerateFiles(goalFiles, goalPath, goalPath, "*.txt"); + List statFiles = []; + EnumerateFiles(statFiles, statsPath, statsPath, "*.txt"); - foreach (var goalFile in goalFiles) + foreach (var statFile in statFiles) + { + var fileInfo = new FilesystemFileInfo { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = Path.Join(goalPath, goalFile), - Name = goalFile - }; - AddScriptToMod(modName, goalFile, fileInfo); - } + FilesystemPath = Path.Join(statsPath, statFile), + Name = statFile + }; + AddStatToMod(modName, statFile, fileInfo); } + } - private void DiscoverModStats(string modName, string modPublicPath) - { - var statsPath = Path.Join(modPublicPath, @"Stats\Generated\Data"); - if (!Directory.Exists(statsPath)) return; + private void DiscoverModGlobals(string modName, string modPath) + { + var globalsPath = Path.Join(modPath, "Globals"); + if (!Directory.Exists(globalsPath)) return; - List statFiles = []; - EnumerateFiles(statFiles, statsPath, statsPath, "*.txt"); + List globalFiles = []; + EnumerateFiles(globalFiles, globalsPath, globalsPath, "*.lsf"); - foreach (var statFile in statFiles) + foreach (var globalFile in globalFiles) + { + var fileInfo = new FilesystemFileInfo { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = Path.Join(statsPath, statFile), - Name = statFile - }; - AddStatToMod(modName, statFile, fileInfo); - } + FilesystemPath = Path.Join(globalsPath, globalFile), + Name = globalFile + }; + AddGlobalsToMod(modName, globalFile, fileInfo); } + } - private void DiscoverModGlobals(string modName, string modPath) - { - var globalsPath = Path.Join(modPath, "Globals"); - if (!Directory.Exists(globalsPath)) return; + private void DiscoverModLevelObjects(string modName, string modPath) + { + var levelsPath = Path.Join(modPath, "Levels"); + if (!Directory.Exists(levelsPath)) return; - List globalFiles = []; - EnumerateFiles(globalFiles, globalsPath, globalsPath, "*.lsf"); + List levelFiles = []; + EnumerateFiles(levelFiles, levelsPath, levelsPath, "*.lsf"); - foreach (var globalFile in globalFiles) + var levelObjectsRe = LevelObjectsLocalRegex(); + foreach (var levelFile in levelFiles) + { + var fileInfo = new FilesystemFileInfo { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = Path.Join(globalsPath, globalFile), - Name = globalFile - }; - AddGlobalsToMod(modName, globalFile, fileInfo); - } + FilesystemPath = Path.Join(levelsPath, levelFile), + Name = levelFile + }; + AddLevelObjectsToMod(modName, levelFile, fileInfo); } + } - private void DiscoverModLevelObjects(string modName, string modPath) - { - var levelsPath = Path.Join(modPath, "Levels"); - if (!Directory.Exists(levelsPath)) return; + public void DiscoverModDirectory(string modName, string modPath, string publicPath) + { + // Trigger mod entry creation even if there are no resources + GetMod(modName); - List levelFiles = []; - EnumerateFiles(levelFiles, levelsPath, levelsPath, "*.lsf"); + if (CollectStoryGoals) + { + DiscoverModGoals(modName, modPath); - var levelObjectsRe = LevelObjectsLocalRegex(); - foreach (var levelFile in levelFiles) + var headerPath = Path.Join(modPath, @"Story\RawFiles\story_header.div"); + if (File.Exists(headerPath)) { var fileInfo = new FilesystemFileInfo { - FilesystemPath = Path.Join(levelsPath, levelFile), - Name = levelFile + FilesystemPath = headerPath, + Name = headerPath }; - AddLevelObjectsToMod(modName, levelFile, fileInfo); + GetMod(modName).StoryHeaderFile = fileInfo; } - } - public void DiscoverModDirectory(string modName, string modPath, string publicPath) - { - // Trigger mod entry creation even if there are no resources - GetMod(modName); - - if (CollectStoryGoals) + var orphanQueryIgnoresPath = Path.Join(modPath, @"Story\story_orphanqueries_ignore_local.txt"); + if (File.Exists(orphanQueryIgnoresPath)) { - DiscoverModGoals(modName, modPath); - - var headerPath = Path.Join(modPath, @"Story\RawFiles\story_header.div"); - if (File.Exists(headerPath)) - { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = headerPath, - Name = headerPath - }; - GetMod(modName).StoryHeaderFile = fileInfo; - } - - var orphanQueryIgnoresPath = Path.Join(modPath, @"Story\story_orphanqueries_ignore_local.txt"); - if (File.Exists(orphanQueryIgnoresPath)) - { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = orphanQueryIgnoresPath, - Name = orphanQueryIgnoresPath - }; - GetMod(modName).OrphanQueryIgnoreList = fileInfo; - } - - var typeCoercionWhitelistPath = Path.Join(modPath, @"Story\RawFiles\TypeCoercionWhitelist.txt"); - if (File.Exists(typeCoercionWhitelistPath)) + var fileInfo = new FilesystemFileInfo { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = typeCoercionWhitelistPath, - Name = typeCoercionWhitelistPath - }; - GetMod(modName).TypeCoercionWhitelistFile = fileInfo; - } + FilesystemPath = orphanQueryIgnoresPath, + Name = orphanQueryIgnoresPath + }; + GetMod(modName).OrphanQueryIgnoreList = fileInfo; } - if (CollectStats) + var typeCoercionWhitelistPath = Path.Join(modPath, @"Story\RawFiles\TypeCoercionWhitelist.txt"); + if (File.Exists(typeCoercionWhitelistPath)) { - DiscoverModStats(modName, publicPath); + var fileInfo = new FilesystemFileInfo + { + FilesystemPath = typeCoercionWhitelistPath, + Name = typeCoercionWhitelistPath + }; + GetMod(modName).TypeCoercionWhitelistFile = fileInfo; } + } - if (CollectGlobals) - { - DiscoverModGlobals(modName, modPath); - } + if (CollectStats) + { + DiscoverModStats(modName, publicPath); + } - if (CollectLevels) - { - DiscoverModLevelObjects(modName, modPath); - } + if (CollectGlobals) + { + DiscoverModGlobals(modName, modPath); } - public void DiscoverMods(string gameDataPath) + if (CollectLevels) { - var modsPath = Path.Combine(gameDataPath, "Mods"); - var publicPath = Path.Combine(gameDataPath, "Public"); + DiscoverModLevelObjects(modName, modPath); + } + } - if (Directory.Exists(modsPath)) - { - var modPaths = Directory.GetDirectories(modsPath); + public void DiscoverMods(string gameDataPath) + { + var modsPath = Path.Combine(gameDataPath, "Mods"); + var publicPath = Path.Combine(gameDataPath, "Public"); + + if (Directory.Exists(modsPath)) + { + var modPaths = Directory.GetDirectories(modsPath); - foreach (var modPath in modPaths) + foreach (var modPath in modPaths) + { + if (File.Exists(Path.Combine(modPath, "meta.lsx"))) { - if (File.Exists(Path.Combine(modPath, "meta.lsx"))) - { - var modName = Path.GetFileNameWithoutExtension(modPath); - var modPublicPath = Path.Combine(publicPath, Path.GetFileName(modPath)); - DiscoverModDirectory(modName, modPath, modPublicPath); - } + var modName = Path.GetFileNameWithoutExtension(modPath); + var modPublicPath = Path.Combine(publicPath, Path.GetFileName(modPath)); + DiscoverModDirectory(modName, modPath, modPublicPath); } } } + } - public void Discover(String gameDataPath) + public void Discover(String gameDataPath) + { + if (LoadPackages) { - if (LoadPackages) - { - DiscoverBuiltinPackages(gameDataPath); - } - - DiscoverMods(gameDataPath); + DiscoverBuiltinPackages(gameDataPath); } - [GeneratedRegex("^Mods/([^/]+)/meta\\.lsx$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex MetaRegex(); + DiscoverMods(gameDataPath); + } - [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/Goals/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex ScriptRegex(); + [GeneratedRegex("^Mods/([^/]+)/meta\\.lsx$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex MetaRegex(); - [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Data/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StatRegex(); + [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/Goals/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex ScriptRegex(); - [GeneratedRegex("^Public/([^/]+)/(.*\\.lsx)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StaticLsxRegex(); + [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Data/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StatRegex(); - [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Structure/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StatStructureRegex(); + [GeneratedRegex("^Public/([^/]+)/(.*\\.lsx)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StaticLsxRegex(); - [GeneratedRegex("^Mods/([^/]+)/Story/story_orphanqueries_ignore_local\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex OrphanQueryIgnoresRegex(); + [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Structure/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StatStructureRegex(); - [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/story_header\\.div$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StoryDefinitionsRegex(); + [GeneratedRegex("^Mods/([^/]+)/Story/story_orphanqueries_ignore_local\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex OrphanQueryIgnoresRegex(); - [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/TypeCoercionWhitelist\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex TypeCoercionWhitelistRegex(); + [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/story_header\\.div$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex StoryDefinitionsRegex(); - [GeneratedRegex("^Mods/([^/]+)/Globals/.*/.*/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex GlobalsRegex(); + [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/TypeCoercionWhitelist\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex TypeCoercionWhitelistRegex(); - [GeneratedRegex("^Mods/([^/]+)/Levels/.*/(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex LevelObjectsRegex(); + [GeneratedRegex("^Mods/([^/]+)/Globals/.*/.*/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex GlobalsRegex(); - [GeneratedRegex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex ArchivePartRegex(); + [GeneratedRegex("^Mods/([^/]+)/Levels/.*/(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex LevelObjectsRegex(); - [GeneratedRegex("^(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)] - private static partial Regex LevelObjectsLocalRegex(); - } + [GeneratedRegex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] + private static partial Regex ArchivePartRegex(); + + [GeneratedRegex("^(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)] + private static partial Regex LevelObjectsLocalRegex(); } diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index 801e2b14..57789919 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -2,395 +2,394 @@ using System.Collections.Generic; using System.Linq; -namespace LSLib.LS +namespace LSLib.LS; + +public class TranslatedString { - public class TranslatedString + public UInt16 Version = 0; + public string Value; + public string Handle; + + public override string ToString() { - public UInt16 Version = 0; - public string Value; - public string Handle; + return Value; + } +} + +public class TranslatedFSStringArgument +{ + public string Key; + public TranslatedFSString String; + public string Value; +} - public override string ToString() +public class TranslatedFSString : TranslatedString +{ + public List Arguments; +} + +public class NodeSerializationSettings +{ + public bool DefaultByteSwapGuids = true; + public bool ByteSwapGuids = true; + + public void InitFromMeta(string meta) + { + if (meta.Length == 0) { - return Value; + // No metadata available, use defaults + ByteSwapGuids = DefaultByteSwapGuids; + } + else + { + var tags = meta.Split(','); + ByteSwapGuids = tags.Contains("bswap_guids"); } } - public class TranslatedFSStringArgument + public string BuildMeta() { - public string Key; - public TranslatedFSString String; - public string Value; + List tags = [ "v1" ]; + if (ByteSwapGuids) + { + tags.Add("bswap_guids"); + } + + return String.Join(",", tags); } +} - public class TranslatedFSString : TranslatedString +public class NodeAttribute(NodeAttribute.DataType type) +{ + public enum DataType { - public List Arguments; + DT_None = 0, + DT_Byte = 1, + DT_Short = 2, + DT_UShort = 3, + DT_Int = 4, + DT_UInt = 5, + DT_Float = 6, + DT_Double = 7, + DT_IVec2 = 8, + DT_IVec3 = 9, + DT_IVec4 = 10, + DT_Vec2 = 11, + DT_Vec3 = 12, + DT_Vec4 = 13, + DT_Mat2 = 14, + DT_Mat3 = 15, + DT_Mat3x4 = 16, + DT_Mat4x3 = 17, + DT_Mat4 = 18, + DT_Bool = 19, + DT_String = 20, + DT_Path = 21, + DT_FixedString = 22, + DT_LSString = 23, + DT_ULongLong = 24, + DT_ScratchBuffer = 25, + // Seems to be unused? + DT_Long = 26, + DT_Int8 = 27, + DT_TranslatedString = 28, + DT_WString = 29, + DT_LSWString = 30, + DT_UUID = 31, + DT_Int64 = 32, + DT_TranslatedFSString = 33, + // Last supported datatype, always keep this one at the end + DT_Max = DT_TranslatedFSString + }; + + private readonly DataType type = type; + private object value; + + public DataType Type + { + get { return type; } } - public class NodeSerializationSettings + public object Value { - public bool DefaultByteSwapGuids = true; - public bool ByteSwapGuids = true; + get { return value; } + set { this.value = value; } + } - public void InitFromMeta(string meta) - { - if (meta.Length == 0) - { - // No metadata available, use defaults - ByteSwapGuids = DefaultByteSwapGuids; - } - else - { - var tags = meta.Split(','); - ByteSwapGuids = tags.Contains("bswap_guids"); - } - } + public override string ToString() + { + throw new NotImplementedException("ToString() is not safe to use anymore, AsString(settings) instead"); + } - public string BuildMeta() + public static Guid ByteSwapGuid(Guid g) + { + var bytes = g.ToByteArray(); + for (var i = 8; i < 16; i += 2) { - List tags = [ "v1" ]; - if (ByteSwapGuids) - { - tags.Add("bswap_guids"); - } - - return String.Join(",", tags); + (bytes[i + 1], bytes[i]) = (bytes[i], bytes[i + 1]); } + + return new Guid(bytes); } - public class NodeAttribute(NodeAttribute.DataType type) + public string AsString(NodeSerializationSettings settings) { - public enum DataType - { - DT_None = 0, - DT_Byte = 1, - DT_Short = 2, - DT_UShort = 3, - DT_Int = 4, - DT_UInt = 5, - DT_Float = 6, - DT_Double = 7, - DT_IVec2 = 8, - DT_IVec3 = 9, - DT_IVec4 = 10, - DT_Vec2 = 11, - DT_Vec3 = 12, - DT_Vec4 = 13, - DT_Mat2 = 14, - DT_Mat3 = 15, - DT_Mat3x4 = 16, - DT_Mat4x3 = 17, - DT_Mat4 = 18, - DT_Bool = 19, - DT_String = 20, - DT_Path = 21, - DT_FixedString = 22, - DT_LSString = 23, - DT_ULongLong = 24, - DT_ScratchBuffer = 25, - // Seems to be unused? - DT_Long = 26, - DT_Int8 = 27, - DT_TranslatedString = 28, - DT_WString = 29, - DT_LSWString = 30, - DT_UUID = 31, - DT_Int64 = 32, - DT_TranslatedFSString = 33, - // Last supported datatype, always keep this one at the end - DT_Max = DT_TranslatedFSString - }; - - private readonly DataType type = type; - private object value; - - public DataType Type + switch (type) { - get { return type; } - } + case DataType.DT_ScratchBuffer: + // ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want + return Convert.ToBase64String((byte[])value); + + case DataType.DT_IVec2: + case DataType.DT_IVec3: + case DataType.DT_IVec4: + return String.Join(" ", new List((int[])value).ConvertAll(i => i.ToString()).ToArray()); + + case DataType.DT_Vec2: + case DataType.DT_Vec3: + case DataType.DT_Vec4: + return String.Join(" ", new List((float[])value).ConvertAll(i => i.ToString()).ToArray()); + + case DataType.DT_UUID: + if (settings.ByteSwapGuids) + { + return ByteSwapGuid((Guid)value).ToString(); + } + else + { + return value.ToString(); + } - public object Value - { - get { return value; } - set { this.value = value; } + default: + return value.ToString(); } + } - public override string ToString() + public int GetRows() + { + switch (this.type) { - throw new NotImplementedException("ToString() is not safe to use anymore, AsString(settings) instead"); + case DataType.DT_IVec2: + case DataType.DT_IVec3: + case DataType.DT_IVec4: + case DataType.DT_Vec2: + case DataType.DT_Vec3: + case DataType.DT_Vec4: + return 1; + + case DataType.DT_Mat2: + return 2; + + case DataType.DT_Mat3: + case DataType.DT_Mat3x4: + return 3; + + case DataType.DT_Mat4x3: + case DataType.DT_Mat4: + return 4; + + default: + throw new NotSupportedException("Data type does not have rows"); } + } - public static Guid ByteSwapGuid(Guid g) + public int GetColumns() + { + switch (this.type) { - var bytes = g.ToByteArray(); - for (var i = 8; i < 16; i += 2) - { - (bytes[i + 1], bytes[i]) = (bytes[i], bytes[i + 1]); - } - - return new Guid(bytes); + case DataType.DT_IVec2: + case DataType.DT_Vec2: + case DataType.DT_Mat2: + return 2; + + case DataType.DT_IVec3: + case DataType.DT_Vec3: + case DataType.DT_Mat3: + case DataType.DT_Mat4x3: + return 3; + + case DataType.DT_IVec4: + case DataType.DT_Vec4: + case DataType.DT_Mat3x4: + case DataType.DT_Mat4: + return 4; + + default: + throw new NotSupportedException("Data type does not have columns"); } + } - public string AsString(NodeSerializationSettings settings) - { - switch (type) - { - case DataType.DT_ScratchBuffer: - // ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want - return Convert.ToBase64String((byte[])value); - - case DataType.DT_IVec2: - case DataType.DT_IVec3: - case DataType.DT_IVec4: - return String.Join(" ", new List((int[])value).ConvertAll(i => i.ToString()).ToArray()); - - case DataType.DT_Vec2: - case DataType.DT_Vec3: - case DataType.DT_Vec4: - return String.Join(" ", new List((float[])value).ConvertAll(i => i.ToString()).ToArray()); - - case DataType.DT_UUID: - if (settings.ByteSwapGuids) - { - return ByteSwapGuid((Guid)value).ToString(); - } - else - { - return value.ToString(); - } - - default: - return value.ToString(); - } - } + public bool IsNumeric() + { + return this.type == DataType.DT_Byte + || this.type == DataType.DT_Short + || this.type == DataType.DT_Short + || this.type == DataType.DT_Int + || this.type == DataType.DT_UInt + || this.type == DataType.DT_Float + || this.type == DataType.DT_Double + || this.type == DataType.DT_ULongLong + || this.type == DataType.DT_Long + || this.type == DataType.DT_Int8; + } - public int GetRows() + public void FromString(string str, NodeSerializationSettings settings) + { + if (IsNumeric()) { - switch (this.type) + // Workaround: Some XML files use empty strings, instead of "0" for zero values. + if (str == "") { - case DataType.DT_IVec2: - case DataType.DT_IVec3: - case DataType.DT_IVec4: - case DataType.DT_Vec2: - case DataType.DT_Vec3: - case DataType.DT_Vec4: - return 1; - - case DataType.DT_Mat2: - return 2; - - case DataType.DT_Mat3: - case DataType.DT_Mat3x4: - return 3; - - case DataType.DT_Mat4x3: - case DataType.DT_Mat4: - return 4; - - default: - throw new NotSupportedException("Data type does not have rows"); + str = "0"; } - } - - public int GetColumns() - { - switch (this.type) + // Handle hexadecimal integers in XML files + else if (str.Length > 2 && str[..2] == "0x") { - case DataType.DT_IVec2: - case DataType.DT_Vec2: - case DataType.DT_Mat2: - return 2; - - case DataType.DT_IVec3: - case DataType.DT_Vec3: - case DataType.DT_Mat3: - case DataType.DT_Mat4x3: - return 3; - - case DataType.DT_IVec4: - case DataType.DT_Vec4: - case DataType.DT_Mat3x4: - case DataType.DT_Mat4: - return 4; - - default: - throw new NotSupportedException("Data type does not have columns"); + str = Convert.ToUInt64(str[2..], 16).ToString(); } } - public bool IsNumeric() + switch (this.type) { - return this.type == DataType.DT_Byte - || this.type == DataType.DT_Short - || this.type == DataType.DT_Short - || this.type == DataType.DT_Int - || this.type == DataType.DT_UInt - || this.type == DataType.DT_Float - || this.type == DataType.DT_Double - || this.type == DataType.DT_ULongLong - || this.type == DataType.DT_Long - || this.type == DataType.DT_Int8; - } + case DataType.DT_None: + // This is a null type, cannot have a value + break; - public void FromString(string str, NodeSerializationSettings settings) - { - if (IsNumeric()) - { - // Workaround: Some XML files use empty strings, instead of "0" for zero values. - if (str == "") - { - str = "0"; - } - // Handle hexadecimal integers in XML files - else if (str.Length > 2 && str[..2] == "0x") - { - str = Convert.ToUInt64(str[2..], 16).ToString(); - } - } + case DataType.DT_Byte: + value = Convert.ToByte(str); + break; - switch (this.type) - { - case DataType.DT_None: - // This is a null type, cannot have a value - break; + case DataType.DT_Short: + value = Convert.ToInt16(str); + break; - case DataType.DT_Byte: - value = Convert.ToByte(str); - break; + case DataType.DT_UShort: + value = Convert.ToUInt16(str); + break; - case DataType.DT_Short: - value = Convert.ToInt16(str); - break; + case DataType.DT_Int: + value = Convert.ToInt32(str); + break; - case DataType.DT_UShort: - value = Convert.ToUInt16(str); - break; + case DataType.DT_UInt: + value = Convert.ToUInt32(str); + break; - case DataType.DT_Int: - value = Convert.ToInt32(str); - break; + case DataType.DT_Float: + value = Convert.ToSingle(str); + break; - case DataType.DT_UInt: - value = Convert.ToUInt32(str); - break; + case DataType.DT_Double: + value = Convert.ToDouble(str); + break; - case DataType.DT_Float: - value = Convert.ToSingle(str); - break; - - case DataType.DT_Double: - value = Convert.ToDouble(str); - break; - - case DataType.DT_IVec2: - case DataType.DT_IVec3: - case DataType.DT_IVec4: - { - string[] nums = str.Split(' '); - int length = GetColumns(); - if (length != nums.Length) - throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - - int[] vec = new int[length]; - for (int i = 0; i < length; i++) - vec[i] = int.Parse(nums[i]); - - value = vec; - break; - } - - case DataType.DT_Vec2: - case DataType.DT_Vec3: - case DataType.DT_Vec4: - { - string[] nums = str.Split(' '); - int length = GetColumns(); - if (length != nums.Length) - throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - - float[] vec = new float[length]; - for (int i = 0; i < length; i++) - vec[i] = float.Parse(nums[i]); - - value = vec; - break; - } - - case DataType.DT_Mat2: - case DataType.DT_Mat3: - case DataType.DT_Mat3x4: - case DataType.DT_Mat4x3: - case DataType.DT_Mat4: - var mat = Matrix.Parse(str); - if (mat.cols != GetColumns() || mat.rows != GetRows()) - throw new FormatException("Invalid column/row count for matrix"); - value = mat; - break; - - case DataType.DT_Bool: - if (str == "0") value = false; - else if (str == "1") value = true; - else value = Convert.ToBoolean(str); - break; - - case DataType.DT_String: - case DataType.DT_Path: - case DataType.DT_FixedString: - case DataType.DT_LSString: - case DataType.DT_WString: - case DataType.DT_LSWString: - value = str; - break; - - case DataType.DT_TranslatedString: - // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part - // That can be changed separately via attribute.Value.Handle - value ??= new TranslatedString(); - - ((TranslatedString)value).Value = str; - break; + case DataType.DT_IVec2: + case DataType.DT_IVec3: + case DataType.DT_IVec4: + { + string[] nums = str.Split(' '); + int length = GetColumns(); + if (length != nums.Length) + throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - case DataType.DT_TranslatedFSString: - // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part - // That can be changed separately via attribute.Value.Handle - value ??= new TranslatedFSString(); + int[] vec = new int[length]; + for (int i = 0; i < length; i++) + vec[i] = int.Parse(nums[i]); - ((TranslatedFSString)value).Value = str; - break; - - case DataType.DT_ULongLong: - value = Convert.ToUInt64(str); + value = vec; break; + } - case DataType.DT_ScratchBuffer: - value = Convert.FromBase64String(str); - break; + case DataType.DT_Vec2: + case DataType.DT_Vec3: + case DataType.DT_Vec4: + { + string[] nums = str.Split(' '); + int length = GetColumns(); + if (length != nums.Length) + throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - case DataType.DT_Long: - case DataType.DT_Int64: - value = Convert.ToInt64(str); - break; + float[] vec = new float[length]; + for (int i = 0; i < length; i++) + vec[i] = float.Parse(nums[i]); - case DataType.DT_Int8: - value = Convert.ToSByte(str); + value = vec; break; + } - case DataType.DT_UUID: - if (settings.ByteSwapGuids) - { - value = ByteSwapGuid(new Guid(str)); - } - else - { - value = new Guid(str); - } - break; + case DataType.DT_Mat2: + case DataType.DT_Mat3: + case DataType.DT_Mat3x4: + case DataType.DT_Mat4x3: + case DataType.DT_Mat4: + var mat = Matrix.Parse(str); + if (mat.cols != GetColumns() || mat.rows != GetRows()) + throw new FormatException("Invalid column/row count for matrix"); + value = mat; + break; + + case DataType.DT_Bool: + if (str == "0") value = false; + else if (str == "1") value = true; + else value = Convert.ToBoolean(str); + break; + + case DataType.DT_String: + case DataType.DT_Path: + case DataType.DT_FixedString: + case DataType.DT_LSString: + case DataType.DT_WString: + case DataType.DT_LSWString: + value = str; + break; + + case DataType.DT_TranslatedString: + // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part + // That can be changed separately via attribute.Value.Handle + value ??= new TranslatedString(); + + ((TranslatedString)value).Value = str; + break; + + case DataType.DT_TranslatedFSString: + // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part + // That can be changed separately via attribute.Value.Handle + value ??= new TranslatedFSString(); + + ((TranslatedFSString)value).Value = str; + break; + + case DataType.DT_ULongLong: + value = Convert.ToUInt64(str); + break; + + case DataType.DT_ScratchBuffer: + value = Convert.FromBase64String(str); + break; + + case DataType.DT_Long: + case DataType.DT_Int64: + value = Convert.ToInt64(str); + break; + + case DataType.DT_Int8: + value = Convert.ToSByte(str); + break; + + case DataType.DT_UUID: + if (settings.ByteSwapGuids) + { + value = ByteSwapGuid(new Guid(str)); + } + else + { + value = new Guid(str); + } + break; - default: - // This should not happen! - throw new NotImplementedException(String.Format("FromString() not implemented for type {0}", this.type)); - } + default: + // This should not happen! + throw new NotImplementedException(String.Format("FromString() not implemented for type {0}", this.type)); } } } diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 17bf4ea8..7a7ba5cd 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -7,696 +7,696 @@ using LSLib.LS.Enums; using LSLib.Native; -namespace LSLib.LS -{ - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSPKHeader7 - { - public UInt32 Version; - public UInt32 DataOffset; - public UInt32 NumParts; - public UInt32 FileListSize; - public Byte LittleEndian; - public UInt32 NumFiles; - } - - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct FileEntry7 - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt32 OffsetInFile; - public UInt32 SizeOnDisk; - public UInt32 UncompressedSize; - public UInt32 ArchivePart; - } - - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSPKHeader10 - { - public UInt32 Version; - public UInt32 DataOffset; - public UInt32 FileListSize; - public UInt16 NumParts; - public Byte Flags; - public Byte Priority; - public UInt32 NumFiles; - } +namespace LSLib.LS; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSPKHeader13 - { - public UInt32 Version; - public UInt32 FileListOffset; - public UInt32 FileListSize; - public UInt16 NumParts; - public Byte Flags; - public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader7 +{ + public UInt32 Version; + public UInt32 DataOffset; + public UInt32 NumParts; + public UInt32 FileListSize; + public Byte LittleEndian; + public UInt32 NumFiles; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSPKHeader15 - { - public UInt32 Version; - public UInt64 FileListOffset; - public UInt32 FileListSize; - public Byte Flags; - public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry7 +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSPKHeader16 - { - public UInt32 Version; - public UInt64 FileListOffset; - public UInt32 FileListSize; - public Byte Flags; - public Byte Priority; + public UInt32 OffsetInFile; + public UInt32 SizeOnDisk; + public UInt32 UncompressedSize; + public UInt32 ArchivePart; +} - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader10 +{ + public UInt32 Version; + public UInt32 DataOffset; + public UInt32 FileListSize; + public UInt16 NumParts; + public Byte Flags; + public Byte Priority; + public UInt32 NumFiles; +} - public UInt16 NumParts; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader13 +{ + public UInt32 Version; + public UInt32 FileListOffset; + public UInt32 FileListSize; + public UInt16 NumParts; + public Byte Flags; + public Byte Priority; + + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Md5; +} - [Flags] - public enum PackageFlags - { - /// - /// Allow memory-mapped access to the files in this archive. - /// - AllowMemoryMapping = 0x02, - /// - /// All files are compressed into a single LZ4 stream - /// - Solid = 0x04, - /// - /// Archive contents should be preloaded on game startup. - /// - Preload = 0x08 - }; - - public class PackageMetadata - { - /// - /// Package flags bitmask. Allowed values are in the PackageFlags enumeration. - /// - public PackageFlags Flags = 0; - /// - /// Load priority. Packages with higher priority are loaded later (i.e. they override earlier packages). - /// - public Byte Priority = 0; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader15 +{ + public UInt32 Version; + public UInt64 FileListOffset; + public UInt32 FileListSize; + public Byte Flags; + public Byte Priority; + + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Md5; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct FileEntry13 - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt32 OffsetInFile; - public UInt32 SizeOnDisk; - public UInt32 UncompressedSize; - public UInt32 ArchivePart; - public UInt32 Flags; - public UInt32 Crc; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader16 +{ + public UInt32 Version; + public UInt64 FileListOffset; + public UInt32 FileListSize; + public Byte Flags; + public Byte Priority; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct FileEntry15 - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt64 OffsetInFile; - public UInt64 SizeOnDisk; - public UInt64 UncompressedSize; - public UInt32 ArchivePart; - public UInt32 Flags; - public UInt32 Crc; - public UInt32 Unknown2; - } + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Md5; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct FileEntry18 - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt32 OffsetInFile1; - public UInt16 OffsetInFile2; - public Byte ArchivePart; - public Byte Flags; - public UInt32 SizeOnDisk; - public UInt32 UncompressedSize; - } + public UInt16 NumParts; +} - public abstract class AbstractFileInfo - { - public String Name; +[Flags] +public enum PackageFlags +{ + /// + /// Allow memory-mapped access to the files in this archive. + /// + AllowMemoryMapping = 0x02, + /// + /// All files are compressed into a single LZ4 stream + /// + Solid = 0x04, + /// + /// Archive contents should be preloaded on game startup. + /// + Preload = 0x08 +}; + +public class PackageMetadata +{ + /// + /// Package flags bitmask. Allowed values are in the PackageFlags enumeration. + /// + public PackageFlags Flags = 0; + /// + /// Load priority. Packages with higher priority are loaded later (i.e. they override earlier packages). + /// + public Byte Priority = 0; +} - public abstract UInt64 Size(); - public abstract UInt32 CRC(); - public abstract Stream MakeStream(); - public abstract void ReleaseStream(); - public abstract bool IsDeletion(); - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry13 +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt32 OffsetInFile; + public UInt32 SizeOnDisk; + public UInt32 UncompressedSize; + public UInt32 ArchivePart; + public UInt32 Flags; + public UInt32 Crc; +} +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry15 +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt64 OffsetInFile; + public UInt64 SizeOnDisk; + public UInt64 UncompressedSize; + public UInt32 ArchivePart; + public UInt32 Flags; + public UInt32 Crc; + public UInt32 Unknown2; +} - public class UncompressedPackagedFileStream : Stream - { - private readonly Stream PackageStream; - private readonly PackagedFileInfo FileInfo; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry18 +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt32 OffsetInFile1; + public UInt16 OffsetInFile2; + public Byte ArchivePart; + public Byte Flags; + public UInt32 SizeOnDisk; + public UInt32 UncompressedSize; +} - public UncompressedPackagedFileStream(Stream packageStream, PackagedFileInfo fileInfo) - { - PackageStream = packageStream; - FileInfo = fileInfo; - PackageStream.Seek((long)fileInfo.OffsetInFile, SeekOrigin.Begin); - - if ((CompressionMethod)(FileInfo.Flags & 0x0F) != CompressionMethod.None) - { - throw new ArgumentException("We only support uncompressed files!"); - } - } +public abstract class AbstractFileInfo +{ + public String Name; - public override bool CanRead { get { return true; } } - public override bool CanSeek { get { return false; } } + public abstract UInt64 Size(); + public abstract UInt32 CRC(); + public abstract Stream MakeStream(); + public abstract void ReleaseStream(); + public abstract bool IsDeletion(); +} - public override int Read(byte[] buffer, int offset, int count) - { - if (PackageStream.Position < (long)FileInfo.OffsetInFile - || PackageStream.Position > (long)FileInfo.OffsetInFile + (long)FileInfo.SizeOnDisk) - { - throw new Exception("Stream at unexpected position while reading packaged file?"); - } - long readable = (long)FileInfo.SizeOnDisk - Position; - int bytesToRead = (readable < count) ? (int)readable : count; - return PackageStream.Read(buffer, offset, bytesToRead); - } +public class UncompressedPackagedFileStream : Stream +{ + private readonly Stream PackageStream; + private readonly PackagedFileInfo FileInfo; - public override long Seek(long offset, SeekOrigin origin) + public UncompressedPackagedFileStream(Stream packageStream, PackagedFileInfo fileInfo) + { + PackageStream = packageStream; + FileInfo = fileInfo; + PackageStream.Seek((long)fileInfo.OffsetInFile, SeekOrigin.Begin); + + if ((CompressionMethod)(FileInfo.Flags & 0x0F) != CompressionMethod.None) { - throw new NotSupportedException(); + throw new ArgumentException("We only support uncompressed files!"); } + } + public override bool CanRead { get { return true; } } + public override bool CanSeek { get { return false; } } - public override long Position + public override int Read(byte[] buffer, int offset, int count) + { + if (PackageStream.Position < (long)FileInfo.OffsetInFile + || PackageStream.Position > (long)FileInfo.OffsetInFile + (long)FileInfo.SizeOnDisk) { - get { return PackageStream.Position - (long)FileInfo.OffsetInFile; } - set { throw new NotSupportedException(); } + throw new Exception("Stream at unexpected position while reading packaged file?"); } - public override bool CanTimeout { get { return PackageStream.CanTimeout; } } - public override bool CanWrite { get { return false; } } - public override long Length { get { return (long)FileInfo.SizeOnDisk; } } - public override void SetLength(long value) { throw new NotSupportedException(); } - public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } - public override void Flush() { } + long readable = (long)FileInfo.SizeOnDisk - Position; + int bytesToRead = (readable < count) ? (int)readable : count; + return PackageStream.Read(buffer, offset, bytesToRead); } - public class PackagedFileInfo : AbstractFileInfo, IDisposable + public override long Seek(long offset, SeekOrigin origin) { - public UInt32 ArchivePart; - public UInt32 Crc; - public UInt32 Flags; - public UInt64 OffsetInFile; - public Stream PackageStream; - public UInt64 SizeOnDisk; - public UInt64 UncompressedSize; - public bool Solid; - public UInt32 SolidOffset; - public Stream SolidStream; - private Stream _uncompressedStream; - - public void Dispose() - { - ReleaseStream(); - } - - public override UInt64 Size() => (Flags & 0x0F) == 0 ? SizeOnDisk : UncompressedSize; - - public override UInt32 CRC() => Crc; - - public override Stream MakeStream() - { - if (IsDeletion()) - { - throw new InvalidOperationException("Cannot open file stream for a deleted file"); - } - - if (_uncompressedStream != null) - { - return _uncompressedStream; - } + throw new NotSupportedException(); + } - if ((CompressionMethod)(Flags & 0x0F) == CompressionMethod.None && !Solid) - { - // Use direct stream read for non-compressed files - _uncompressedStream = new UncompressedPackagedFileStream(PackageStream, this); - return _uncompressedStream; - } - if (SizeOnDisk > 0x7fffffff) - { - throw new InvalidDataException($"File '{Name}' is over 2GB ({SizeOnDisk} bytes), which is not supported yet!"); - } + public override long Position + { + get { return PackageStream.Position - (long)FileInfo.OffsetInFile; } + set { throw new NotSupportedException(); } + } - var compressed = new byte[SizeOnDisk]; + public override bool CanTimeout { get { return PackageStream.CanTimeout; } } + public override bool CanWrite { get { return false; } } + public override long Length { get { return (long)FileInfo.SizeOnDisk; } } + public override void SetLength(long value) { throw new NotSupportedException(); } + public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } + public override void Flush() { } +} - PackageStream.Seek((long)OffsetInFile, SeekOrigin.Begin); - int readSize = PackageStream.Read(compressed, 0, (int)SizeOnDisk); - if (readSize != (long)SizeOnDisk) - { - string msg = $"Failed to read {SizeOnDisk} bytes from archive (only got {readSize})"; - throw new InvalidDataException(msg); - } +public class PackagedFileInfo : AbstractFileInfo, IDisposable +{ + public UInt32 ArchivePart; + public UInt32 Crc; + public UInt32 Flags; + public UInt64 OffsetInFile; + public Stream PackageStream; + public UInt64 SizeOnDisk; + public UInt64 UncompressedSize; + public bool Solid; + public UInt32 SolidOffset; + public Stream SolidStream; + private Stream _uncompressedStream; + + public void Dispose() + { + ReleaseStream(); + } - if (Crc != 0) - { - UInt32 computedCrc = Crc32.Compute(compressed, 0); - if (computedCrc != Crc) - { - string msg = $"CRC check failed on file '{Name}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; - throw new InvalidDataException(msg); - } - } + public override UInt64 Size() => (Flags & 0x0F) == 0 ? SizeOnDisk : UncompressedSize; - if (Solid) - { - SolidStream.Seek(SolidOffset, SeekOrigin.Begin); - byte[] uncompressed = new byte[UncompressedSize]; - SolidStream.Read(uncompressed, 0, (int)UncompressedSize); - _uncompressedStream = new MemoryStream(uncompressed); - } - else - { - byte[] uncompressed = BinUtils.Decompress(compressed, (int)Size(), (byte)Flags); - _uncompressedStream = new MemoryStream(uncompressed); - } + public override UInt32 CRC() => Crc; - return _uncompressedStream; + public override Stream MakeStream() + { + if (IsDeletion()) + { + throw new InvalidOperationException("Cannot open file stream for a deleted file"); } - public override void ReleaseStream() + if (_uncompressedStream != null) { - if (_uncompressedStream == null) - { - return; - } - - _uncompressedStream.Dispose(); - _uncompressedStream = null; + return _uncompressedStream; } - internal static PackagedFileInfo CreateFromEntry(FileEntry13 entry, Stream dataStream) + if ((CompressionMethod)(Flags & 0x0F) == CompressionMethod.None && !Solid) { - var info = new PackagedFileInfo - { - PackageStream = dataStream, - OffsetInFile = entry.OffsetInFile, - SizeOnDisk = entry.SizeOnDisk, - UncompressedSize = entry.UncompressedSize, - ArchivePart = entry.ArchivePart, - Flags = entry.Flags, - Crc = entry.Crc, - Solid = false - }; - - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - - uint compressionMethod = entry.Flags & 0x0F; - if (compressionMethod > 2 || (entry.Flags & ~0x7F) != 0) - { - string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; - throw new InvalidDataException(msg); - } - - return info; + // Use direct stream read for non-compressed files + _uncompressedStream = new UncompressedPackagedFileStream(PackageStream, this); + return _uncompressedStream; } - internal static PackagedFileInfo CreateFromEntry(FileEntry15 entry, Stream dataStream) + if (SizeOnDisk > 0x7fffffff) { - var info = new PackagedFileInfo - { - PackageStream = dataStream, - OffsetInFile = entry.OffsetInFile, - SizeOnDisk = entry.SizeOnDisk, - UncompressedSize = entry.UncompressedSize, - ArchivePart = entry.ArchivePart, - Flags = entry.Flags, - Crc = entry.Crc, - Solid = false - }; - - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); + throw new InvalidDataException($"File '{Name}' is over 2GB ({SizeOnDisk} bytes), which is not supported yet!"); + } - uint compressionMethod = entry.Flags & 0x0F; - if (compressionMethod > 2 || (entry.Flags & ~0x7F) != 0) - { - string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; - throw new InvalidDataException(msg); - } + var compressed = new byte[SizeOnDisk]; - return info; + PackageStream.Seek((long)OffsetInFile, SeekOrigin.Begin); + int readSize = PackageStream.Read(compressed, 0, (int)SizeOnDisk); + if (readSize != (long)SizeOnDisk) + { + string msg = $"Failed to read {SizeOnDisk} bytes from archive (only got {readSize})"; + throw new InvalidDataException(msg); } - internal static PackagedFileInfo CreateFromEntry(FileEntry18 entry, Stream dataStream) + if (Crc != 0) { - var info = new PackagedFileInfo - { - PackageStream = dataStream, - OffsetInFile = entry.OffsetInFile1 | ((ulong)entry.OffsetInFile2 << 32), - SizeOnDisk = entry.SizeOnDisk, - UncompressedSize = entry.UncompressedSize, - ArchivePart = entry.ArchivePart, - Flags = entry.Flags, - Crc = 0, - Solid = false - }; - - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - - uint compressionMethod = (uint)entry.Flags & 0x0F; - if (compressionMethod > 2 || ((uint)entry.Flags & ~0x7F) != 0) + UInt32 computedCrc = Crc32.Compute(compressed, 0); + if (computedCrc != Crc) { - string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; + string msg = $"CRC check failed on file '{Name}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; throw new InvalidDataException(msg); } - - return info; } - internal static PackagedFileInfo CreateSolidFromEntry(FileEntry13 entry, Stream dataStream, uint solidOffset, Stream solidStream) + if (Solid) { - var info = CreateFromEntry(entry, dataStream); - info.Solid = true; - info.SolidOffset = solidOffset; - info.SolidStream = solidStream; - return info; + SolidStream.Seek(SolidOffset, SeekOrigin.Begin); + byte[] uncompressed = new byte[UncompressedSize]; + SolidStream.Read(uncompressed, 0, (int)UncompressedSize); + _uncompressedStream = new MemoryStream(uncompressed); } - - internal static PackagedFileInfo CreateFromEntry(FileEntry7 entry, Stream dataStream) + else { - var info = new PackagedFileInfo - { - PackageStream = dataStream - }; + byte[] uncompressed = BinUtils.Decompress(compressed, (int)Size(), (byte)Flags); + _uncompressedStream = new MemoryStream(uncompressed); + } - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); + return _uncompressedStream; + } - info.OffsetInFile = entry.OffsetInFile; - info.SizeOnDisk = entry.SizeOnDisk; - info.UncompressedSize = entry.UncompressedSize; - info.ArchivePart = entry.ArchivePart; - info.Crc = 0; + public override void ReleaseStream() + { + if (_uncompressedStream == null) + { + return; + } - info.Flags = entry.UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.DefaultCompression) : (uint) 0; + _uncompressedStream.Dispose(); + _uncompressedStream = null; + } - return info; + internal static PackagedFileInfo CreateFromEntry(FileEntry13 entry, Stream dataStream) + { + var info = new PackagedFileInfo + { + PackageStream = dataStream, + OffsetInFile = entry.OffsetInFile, + SizeOnDisk = entry.SizeOnDisk, + UncompressedSize = entry.UncompressedSize, + ArchivePart = entry.ArchivePart, + Flags = entry.Flags, + Crc = entry.Crc, + Solid = false + }; + + int nameLen; + for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) + { } + info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - internal FileEntry7 MakeEntryV7() + uint compressionMethod = entry.Flags & 0x0F; + if (compressionMethod > 2 || (entry.Flags & ~0x7F) != 0) { - var entry = new FileEntry7 - { - Name = new byte[256] - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - entry.OffsetInFile = (uint)OffsetInFile; - entry.SizeOnDisk = (uint)SizeOnDisk; - entry.UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize; - entry.ArchivePart = ArchivePart; - return entry; + string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; + throw new InvalidDataException(msg); } - internal FileEntry13 MakeEntryV13() + return info; + } + + internal static PackagedFileInfo CreateFromEntry(FileEntry15 entry, Stream dataStream) + { + var info = new PackagedFileInfo + { + PackageStream = dataStream, + OffsetInFile = entry.OffsetInFile, + SizeOnDisk = entry.SizeOnDisk, + UncompressedSize = entry.UncompressedSize, + ArchivePart = entry.ArchivePart, + Flags = entry.Flags, + Crc = entry.Crc, + Solid = false + }; + + int nameLen; + for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) { - var entry = new FileEntry13 - { - Name = new byte[256] - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - entry.OffsetInFile = (uint)OffsetInFile; - entry.SizeOnDisk = (uint)SizeOnDisk; - entry.UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize; - entry.ArchivePart = ArchivePart; - entry.Flags = Flags; - entry.Crc = Crc; - return entry; } + info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - internal FileEntry15 MakeEntryV15() + uint compressionMethod = entry.Flags & 0x0F; + if (compressionMethod > 2 || (entry.Flags & ~0x7F) != 0) { - var entry = new FileEntry15 - { - Name = new byte[256], - OffsetInFile = OffsetInFile, - SizeOnDisk = SizeOnDisk, - UncompressedSize = (Flags & 0x0F) == 0 ? 0 : UncompressedSize, - Flags = Flags, - Crc = Crc, - ArchivePart = ArchivePart, - Unknown2 = 0 - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - return entry; + string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; + throw new InvalidDataException(msg); } - internal FileEntry18 MakeEntryV18() + return info; + } + + internal static PackagedFileInfo CreateFromEntry(FileEntry18 entry, Stream dataStream) + { + var info = new PackagedFileInfo + { + PackageStream = dataStream, + OffsetInFile = entry.OffsetInFile1 | ((ulong)entry.OffsetInFile2 << 32), + SizeOnDisk = entry.SizeOnDisk, + UncompressedSize = entry.UncompressedSize, + ArchivePart = entry.ArchivePart, + Flags = entry.Flags, + Crc = 0, + Solid = false + }; + + int nameLen; + for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) { - var entry = new FileEntry18 - { - Name = new byte[256], - OffsetInFile1 = (uint)(OffsetInFile & 0xffffffff), - OffsetInFile2 = (ushort)((OffsetInFile >> 32) & 0xffff), - SizeOnDisk = (uint)SizeOnDisk, - UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize, - Flags = (byte)Flags, - ArchivePart = (byte)ArchivePart - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - return entry; } + info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - public override bool IsDeletion() + uint compressionMethod = (uint)entry.Flags & 0x0F; + if (compressionMethod > 2 || ((uint)entry.Flags & ~0x7F) != 0) { - return OffsetInFile == 0xdeadbeefdeadbeef; + string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; + throw new InvalidDataException(msg); } + + return info; } - public class FilesystemFileInfo : AbstractFileInfo, IDisposable + internal static PackagedFileInfo CreateSolidFromEntry(FileEntry13 entry, Stream dataStream, uint solidOffset, Stream solidStream) { - public long CachedSize; - public string FilesystemPath; - private FileStream _stream; + var info = CreateFromEntry(entry, dataStream); + info.Solid = true; + info.SolidOffset = solidOffset; + info.SolidStream = solidStream; + return info; + } - public void Dispose() + internal static PackagedFileInfo CreateFromEntry(FileEntry7 entry, Stream dataStream) + { + var info = new PackagedFileInfo + { + PackageStream = dataStream + }; + + int nameLen; + for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) { - ReleaseStream(); } + info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - public override UInt64 Size() => (UInt64) CachedSize; + info.OffsetInFile = entry.OffsetInFile; + info.SizeOnDisk = entry.SizeOnDisk; + info.UncompressedSize = entry.UncompressedSize; + info.ArchivePart = entry.ArchivePart; + info.Crc = 0; - public override UInt32 CRC() => throw new NotImplementedException("!"); + info.Flags = entry.UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.DefaultCompression) : (uint) 0; - public override Stream MakeStream() => _stream ??= File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); + return info; + } - public override void ReleaseStream() + internal FileEntry7 MakeEntryV7() + { + var entry = new FileEntry7 { - _stream?.Dispose(); - _stream = null; - } + Name = new byte[256] + }; + byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); + Array.Copy(encodedName, entry.Name, encodedName.Length); + + entry.OffsetInFile = (uint)OffsetInFile; + entry.SizeOnDisk = (uint)SizeOnDisk; + entry.UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize; + entry.ArchivePart = ArchivePart; + return entry; + } - public static FilesystemFileInfo CreateFromEntry(string filesystemPath, string name) + internal FileEntry13 MakeEntryV13() + { + var entry = new FileEntry13 { - var info = new FilesystemFileInfo - { - Name = name, - FilesystemPath = filesystemPath - }; + Name = new byte[256] + }; + byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); + Array.Copy(encodedName, entry.Name, encodedName.Length); + + entry.OffsetInFile = (uint)OffsetInFile; + entry.SizeOnDisk = (uint)SizeOnDisk; + entry.UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize; + entry.ArchivePart = ArchivePart; + entry.Flags = Flags; + entry.Crc = Crc; + return entry; + } - var fsInfo = new FileInfo(filesystemPath); - info.CachedSize = fsInfo.Length; - return info; - } + internal FileEntry15 MakeEntryV15() + { + var entry = new FileEntry15 + { + Name = new byte[256], + OffsetInFile = OffsetInFile, + SizeOnDisk = SizeOnDisk, + UncompressedSize = (Flags & 0x0F) == 0 ? 0 : UncompressedSize, + Flags = Flags, + Crc = Crc, + ArchivePart = ArchivePart, + Unknown2 = 0 + }; + byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); + Array.Copy(encodedName, entry.Name, encodedName.Length); + + return entry; + } - public override bool IsDeletion() + internal FileEntry18 MakeEntryV18() + { + var entry = new FileEntry18 { - return false; - } + Name = new byte[256], + OffsetInFile1 = (uint)(OffsetInFile & 0xffffffff), + OffsetInFile2 = (ushort)((OffsetInFile >> 32) & 0xffff), + SizeOnDisk = (uint)SizeOnDisk, + UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize, + Flags = (byte)Flags, + ArchivePart = (byte)ArchivePart + }; + byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); + Array.Copy(encodedName, entry.Name, encodedName.Length); + + return entry; } - public class StreamFileInfo : AbstractFileInfo + public override bool IsDeletion() { - public Stream Stream; + return OffsetInFile == 0xdeadbeefdeadbeef; + } +} - public override UInt64 Size() => (UInt64) Stream.Length; +public class FilesystemFileInfo : AbstractFileInfo, IDisposable +{ + public long CachedSize; + public string FilesystemPath; + private FileStream _stream; - public override UInt32 CRC() => throw new NotImplementedException("!"); + public void Dispose() + { + ReleaseStream(); + } - public override Stream MakeStream() => Stream; + public override UInt64 Size() => (UInt64) CachedSize; - public override void ReleaseStream() - { - } + public override UInt32 CRC() => throw new NotImplementedException("!"); - public static StreamFileInfo CreateFromStream(Stream stream, string name) - { - var info = new StreamFileInfo - { - Name = name, - Stream = stream - }; - return info; - } + public override Stream MakeStream() => _stream ??= File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); - public override bool IsDeletion() + public override void ReleaseStream() + { + _stream?.Dispose(); + _stream = null; + } + + public static FilesystemFileInfo CreateFromEntry(string filesystemPath, string name) + { + var info = new FilesystemFileInfo { - return false; - } + Name = name, + FilesystemPath = filesystemPath + }; + + var fsInfo = new FileInfo(filesystemPath); + info.CachedSize = fsInfo.Length; + return info; } - public class Package + public override bool IsDeletion() { - public const PackageVersion CurrentVersion = PackageVersion.V18; + return false; + } +} + +public class StreamFileInfo : AbstractFileInfo +{ + public Stream Stream; + + public override UInt64 Size() => (UInt64) Stream.Length; - public readonly static byte[] Signature = [ 0x4C, 0x53, 0x50, 0x4B ]; + public override UInt32 CRC() => throw new NotImplementedException("!"); - public PackageMetadata Metadata = new(); - public List Files = []; - public PackageVersion Version; + public override Stream MakeStream() => Stream; - public static string MakePartFilename(string path, int part) + public override void ReleaseStream() + { + } + + public static StreamFileInfo CreateFromStream(Stream stream, string name) + { + var info = new StreamFileInfo { - string dirName = Path.GetDirectoryName(path); - string baseName = Path.GetFileNameWithoutExtension(path); - string extension = Path.GetExtension(path); - return $"{dirName}/{baseName}_{part}{extension}"; - } + Name = name, + Stream = stream + }; + return info; } - public class PackageCreationOptions + public override bool IsDeletion() { - public PackageVersion Version = PackageVersion.V16; - public CompressionMethod Compression = CompressionMethod.None; - public bool FastCompression = true; - public PackageFlags Flags = 0; - public byte Priority = 0; + return false; } +} + +public class Package +{ + public const PackageVersion CurrentVersion = PackageVersion.V18; + + public readonly static byte[] Signature = [ 0x4C, 0x53, 0x50, 0x4B ]; - public class Packager + public PackageMetadata Metadata = new(); + public List Files = []; + public PackageVersion Version; + + public static string MakePartFilename(string path, int part) { - public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator, AbstractFileInfo file); + string dirName = Path.GetDirectoryName(path); + string baseName = Path.GetFileNameWithoutExtension(path); + string extension = Path.GetExtension(path); + return $"{dirName}/{baseName}_{part}{extension}"; + } +} - public ProgressUpdateDelegate ProgressUpdate = delegate { }; +public class PackageCreationOptions +{ + public PackageVersion Version = PackageVersion.V16; + public CompressionMethod Compression = CompressionMethod.None; + public bool FastCompression = true; + public PackageFlags Flags = 0; + public byte Priority = 0; +} - private void WriteProgressUpdate(AbstractFileInfo file, long numerator, long denominator) - { - ProgressUpdate(file.Name, numerator, denominator, file); - } +public class Packager +{ + public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator, AbstractFileInfo file); - public void UncompressPackage(Package package, string outputPath, Func filter = null) + public ProgressUpdateDelegate ProgressUpdate = delegate { }; + + private void WriteProgressUpdate(AbstractFileInfo file, long numerator, long denominator) + { + ProgressUpdate(file.Name, numerator, denominator, file); + } + + public void UncompressPackage(Package package, string outputPath, Func filter = null) + { + if (outputPath.Length > 0 && !outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) { - if (outputPath.Length > 0 && !outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) - { - outputPath += Path.DirectorySeparatorChar; - } + outputPath += Path.DirectorySeparatorChar; + } - List files = package.Files; + List files = package.Files; - if (filter != null) - { - files = files.FindAll(obj => filter(obj)); - } + if (filter != null) + { + files = files.FindAll(obj => filter(obj)); + } - long totalSize = files.Sum(p => (long)p.Size()); - long currentSize = 0; + long totalSize = files.Sum(p => (long)p.Size()); + long currentSize = 0; - var buffer = new byte[32768]; - foreach (AbstractFileInfo file in files) - { - ProgressUpdate(file.Name, currentSize, totalSize, file); - currentSize += (long)file.Size(); + var buffer = new byte[32768]; + foreach (AbstractFileInfo file in files) + { + ProgressUpdate(file.Name, currentSize, totalSize, file); + currentSize += (long)file.Size(); - if (file.IsDeletion()) continue; + if (file.IsDeletion()) continue; - string outPath = Path.Join(outputPath, file.Name); + string outPath = Path.Join(outputPath, file.Name); - FileManager.TryToCreateDirectory(outPath); + FileManager.TryToCreateDirectory(outPath); - Stream inStream = file.MakeStream(); + Stream inStream = file.MakeStream(); - try - { - using var inReader = new BinaryReader(inStream); - using var outFile = File.Open(outPath, FileMode.Create, FileAccess.Write); - int read; - while ((read = inReader.Read(buffer, 0, buffer.Length)) > 0) - { - outFile.Write(buffer, 0, read); - } - } - finally + try + { + using var inReader = new BinaryReader(inStream); + using var outFile = File.Open(outPath, FileMode.Create, FileAccess.Write); + int read; + while ((read = inReader.Read(buffer, 0, buffer.Length)) > 0) { - file.ReleaseStream(); + outFile.Write(buffer, 0, read); } } + finally + { + file.ReleaseStream(); + } } + } - public void UncompressPackage(string packagePath, string outputPath, Func filter = null) - { - ProgressUpdate("Reading package headers ...", 0, 1, null); - using var reader = new PackageReader(packagePath); - Package package = reader.Read(); - UncompressPackage(package, outputPath, filter); - } + public void UncompressPackage(string packagePath, string outputPath, Func filter = null) + { + ProgressUpdate("Reading package headers ...", 0, 1, null); + using var reader = new PackageReader(packagePath); + Package package = reader.Read(); + UncompressPackage(package, outputPath, filter); + } - private static Package CreatePackageFromPath(string path) - { - var package = new Package(); + private static Package CreatePackageFromPath(string path) + { + var package = new Package(); - if (!path.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) - { - path += Path.DirectorySeparatorChar; - } + if (!path.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) + { + path += Path.DirectorySeparatorChar; + } - Dictionary files = Directory.EnumerateFiles(path, "*.*", SearchOption.AllDirectories) - .ToDictionary(k => k.Replace(path, string.Empty), v => v); + Dictionary files = Directory.EnumerateFiles(path, "*.*", SearchOption.AllDirectories) + .ToDictionary(k => k.Replace(path, string.Empty), v => v); - foreach (KeyValuePair file in files) + foreach (KeyValuePair file in files) { FilesystemFileInfo fileInfo = FilesystemFileInfo.CreateFromEntry(file.Value, file.Key); package.Files.Add(fileInfo); @@ -704,24 +704,23 @@ private static Package CreatePackageFromPath(string path) } return package; - } - - public void CreatePackage(string packagePath, string inputPath, PackageCreationOptions options) - { - FileManager.TryToCreateDirectory(packagePath); - - ProgressUpdate("Enumerating files ...", 0, 1, null); - Package package = CreatePackageFromPath(inputPath); - package.Metadata.Flags = options.Flags; - package.Metadata.Priority = options.Priority; + } - ProgressUpdate("Creating archive ...", 0, 1, null); - using var writer = new PackageWriter(package, packagePath); - writer.WriteProgress += WriteProgressUpdate; - writer.Version = options.Version; - writer.Compression = options.Compression; - writer.LSCompressionLevel = options.FastCompression ? LSCompressionLevel.FastCompression : LSCompressionLevel.DefaultCompression; - writer.Write(); - } + public void CreatePackage(string packagePath, string inputPath, PackageCreationOptions options) + { + FileManager.TryToCreateDirectory(packagePath); + + ProgressUpdate("Enumerating files ...", 0, 1, null); + Package package = CreatePackageFromPath(inputPath); + package.Metadata.Flags = options.Flags; + package.Metadata.Priority = options.Priority; + + ProgressUpdate("Creating archive ...", 0, 1, null); + using var writer = new PackageWriter(package, packagePath); + writer.WriteProgress += WriteProgressUpdate; + writer.Version = options.Version; + writer.Compression = options.Compression; + writer.LSCompressionLevel = options.FastCompression ? LSCompressionLevel.FastCompression : LSCompressionLevel.DefaultCompression; + writer.Write(); } } diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 2d235663..67b30d8b 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -6,386 +6,385 @@ using LZ4; using LSLib.LS.Enums; -namespace LSLib.LS +namespace LSLib.LS; + +public class NotAPackageException : Exception { - public class NotAPackageException : Exception + public NotAPackageException() { - public NotAPackageException() - { - } + } - public NotAPackageException(string message) : base(message) - { - } + public NotAPackageException(string message) : base(message) + { + } - public NotAPackageException(string message, Exception innerException) : base(message, innerException) + public NotAPackageException(string message, Exception innerException) : base(message, innerException) + { + } +} + +public class PackageReader(string path, bool metadataOnly = false) : IDisposable +{ + private Stream[] _streams; + + public void Dispose() + { + foreach (Stream stream in _streams ?? []) { + stream?.Dispose(); } } - public class PackageReader(string path, bool metadataOnly = false) : IDisposable + private void OpenStreams(FileStream mainStream, int numParts) { - private Stream[] _streams; + // Open a stream for each file chunk + _streams = new Stream[numParts]; + _streams[0] = mainStream; - public void Dispose() + for (var part = 1; part < numParts; part++) { - foreach (Stream stream in _streams ?? []) - { - stream?.Dispose(); - } + string partPath = Package.MakePartFilename(path, part); + _streams[part] = File.Open(partPath, FileMode.Open, FileAccess.Read, FileShare.Read); } + } - private void OpenStreams(FileStream mainStream, int numParts) - { - // Open a stream for each file chunk - _streams = new Stream[numParts]; - _streams[0] = mainStream; + private Package ReadPackageV7(FileStream mainStream, BinaryReader reader) + { + var package = new Package(); + mainStream.Seek(0, SeekOrigin.Begin); + var header = BinUtils.ReadStruct(reader); + + package.Metadata.Flags = 0; + package.Metadata.Priority = 0; + package.Version = PackageVersion.V7; + + if (metadataOnly) return package; - for (var part = 1; part < numParts; part++) + OpenStreams(mainStream, (int) header.NumParts); + for (uint i = 0; i < header.NumFiles; i++) + { + var entry = BinUtils.ReadStruct(reader); + if (entry.ArchivePart == 0) { - string partPath = Package.MakePartFilename(path, part); - _streams[part] = File.Open(partPath, FileMode.Open, FileAccess.Read, FileShare.Read); + entry.OffsetInFile += header.DataOffset; } + package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } - private Package ReadPackageV7(FileStream mainStream, BinaryReader reader) - { - var package = new Package(); - mainStream.Seek(0, SeekOrigin.Begin); - var header = BinUtils.ReadStruct(reader); + return package; + } + + private Package ReadPackageV10(FileStream mainStream, BinaryReader reader) + { + var package = new Package(); + mainStream.Seek(4, SeekOrigin.Begin); + var header = BinUtils.ReadStruct(reader); - package.Metadata.Flags = 0; - package.Metadata.Priority = 0; - package.Version = PackageVersion.V7; + package.Metadata.Flags = (PackageFlags)header.Flags; + package.Metadata.Priority = header.Priority; + package.Version = PackageVersion.V10; - if (metadataOnly) return package; + if (metadataOnly) return package; - OpenStreams(mainStream, (int) header.NumParts); - for (uint i = 0; i < header.NumFiles; i++) + OpenStreams(mainStream, header.NumParts); + for (uint i = 0; i < header.NumFiles; i++) + { + var entry = BinUtils.ReadStruct(reader); + if (entry.ArchivePart == 0) { - var entry = BinUtils.ReadStruct(reader); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile += header.DataOffset; - } - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); + entry.OffsetInFile += header.DataOffset; } - return package; + // Add missing compression level flags + entry.Flags = (entry.Flags & 0x0f) | 0x20; + package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } - private Package ReadPackageV10(FileStream mainStream, BinaryReader reader) - { - var package = new Package(); - mainStream.Seek(4, SeekOrigin.Begin); - var header = BinUtils.ReadStruct(reader); + return package; + } - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V10; + private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) + { + var package = new Package(); + var header = BinUtils.ReadStruct(reader); - if (metadataOnly) return package; + if (header.Version != (ulong) PackageVersion.V13) + { + string msg = $"Unsupported package version {header.Version}; this package layout is only supported for {PackageVersion.V13}"; + throw new InvalidDataException(msg); + } - OpenStreams(mainStream, header.NumParts); - for (uint i = 0; i < header.NumFiles; i++) - { - var entry = BinUtils.ReadStruct(reader); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile += header.DataOffset; - } + package.Metadata.Flags = (PackageFlags)header.Flags; + package.Metadata.Priority = header.Priority; + package.Version = PackageVersion.V13; - // Add missing compression level flags - entry.Flags = (entry.Flags & 0x0f) | 0x20; - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } + if (metadataOnly) return package; - return package; - } + OpenStreams(mainStream, header.NumParts); + mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); + int numFiles = reader.ReadInt32(); + int fileBufferSize = Marshal.SizeOf(typeof(FileEntry13)) * numFiles; + byte[] compressedFileList = reader.ReadBytes((int) header.FileListSize - 4); - private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) + var uncompressedList = new byte[fileBufferSize]; + int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); + if (uncompressedSize != fileBufferSize) { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); + string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; + throw new InvalidDataException(msg); + } - if (header.Version != (ulong) PackageVersion.V13) - { - string msg = $"Unsupported package version {header.Version}; this package layout is only supported for {PackageVersion.V13}"; - throw new InvalidDataException(msg); - } + var ms = new MemoryStream(uncompressedList); + var msr = new BinaryReader(ms); - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V13; + var entries = new FileEntry13[numFiles]; + BinUtils.ReadStructs(msr, entries); - if (metadataOnly) return package; + if ((package.Metadata.Flags & PackageFlags.Solid) == PackageFlags.Solid && numFiles > 0) + { + // Calculate compressed frame offset and bounds + uint totalUncompressedSize = 0; + uint totalSizeOnDisk = 0; + uint firstOffset = 0xffffffff; + uint lastOffset = 0; - OpenStreams(mainStream, header.NumParts); - mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); - int numFiles = reader.ReadInt32(); - int fileBufferSize = Marshal.SizeOf(typeof(FileEntry13)) * numFiles; - byte[] compressedFileList = reader.ReadBytes((int) header.FileListSize - 4); + foreach (var entry in entries) + { + totalUncompressedSize += entry.UncompressedSize; + totalSizeOnDisk += entry.SizeOnDisk; + if (entry.OffsetInFile < firstOffset) + { + firstOffset = entry.OffsetInFile; + } + if (entry.OffsetInFile + entry.SizeOnDisk > lastOffset) + { + lastOffset = entry.OffsetInFile + entry.SizeOnDisk; + } + } - var uncompressedList = new byte[fileBufferSize]; - int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); - if (uncompressedSize != fileBufferSize) + if (firstOffset != 7 || lastOffset - firstOffset != totalSizeOnDisk) { - string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; + string msg = $"Incorrectly compressed solid archive; offsets {firstOffset}/{lastOffset}, bytes {totalSizeOnDisk}"; throw new InvalidDataException(msg); } - var ms = new MemoryStream(uncompressedList); - var msr = new BinaryReader(ms); + // Decompress all files as a single frame (solid) + byte[] frame = new byte[lastOffset]; + mainStream.Seek(0, SeekOrigin.Begin); + mainStream.Read(frame, 0, (int)lastOffset); - var entries = new FileEntry13[numFiles]; - BinUtils.ReadStructs(msr, entries); + byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); + var decompressedStream = new MemoryStream(decompressed); - if ((package.Metadata.Flags & PackageFlags.Solid) == PackageFlags.Solid && numFiles > 0) + // Update offsets to point to the decompressed chunk + uint offset = 7; + uint compressedOffset = 0; + foreach (var entry in entries) { - // Calculate compressed frame offset and bounds - uint totalUncompressedSize = 0; - uint totalSizeOnDisk = 0; - uint firstOffset = 0xffffffff; - uint lastOffset = 0; - - foreach (var entry in entries) + if (entry.OffsetInFile != offset) { - totalUncompressedSize += entry.UncompressedSize; - totalSizeOnDisk += entry.SizeOnDisk; - if (entry.OffsetInFile < firstOffset) - { - firstOffset = entry.OffsetInFile; - } - if (entry.OffsetInFile + entry.SizeOnDisk > lastOffset) - { - lastOffset = entry.OffsetInFile + entry.SizeOnDisk; - } + throw new InvalidDataException("File list in solid archive not contiguous"); } - if (firstOffset != 7 || lastOffset - firstOffset != totalSizeOnDisk) - { - string msg = $"Incorrectly compressed solid archive; offsets {firstOffset}/{lastOffset}, bytes {totalSizeOnDisk}"; - throw new InvalidDataException(msg); - } - - // Decompress all files as a single frame (solid) - byte[] frame = new byte[lastOffset]; - mainStream.Seek(0, SeekOrigin.Begin); - mainStream.Read(frame, 0, (int)lastOffset); - - byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); - var decompressedStream = new MemoryStream(decompressed); - - // Update offsets to point to the decompressed chunk - uint offset = 7; - uint compressedOffset = 0; - foreach (var entry in entries) - { - if (entry.OffsetInFile != offset) - { - throw new InvalidDataException("File list in solid archive not contiguous"); - } - - var file = PackagedFileInfo.CreateSolidFromEntry(entry, _streams[entry.ArchivePart], compressedOffset, decompressedStream); - package.Files.Add(file); + var file = PackagedFileInfo.CreateSolidFromEntry(entry, _streams[entry.ArchivePart], compressedOffset, decompressedStream); + package.Files.Add(file); - offset += entry.SizeOnDisk; - compressedOffset += entry.UncompressedSize; - } + offset += entry.SizeOnDisk; + compressedOffset += entry.UncompressedSize; } - else + } + else + { + foreach (var entry in entries) { - foreach (var entry in entries) - { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } + package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } - - return package; } - private void ReadFileListV15(BinaryReader reader, Package package) + return package; + } + + private void ReadFileListV15(BinaryReader reader, Package package) + { + int numFiles = reader.ReadInt32(); + int compressedSize = reader.ReadInt32(); + byte[] compressedFileList = reader.ReadBytes(compressedSize); + + int fileBufferSize = Marshal.SizeOf(typeof(FileEntry15)) * numFiles; + var uncompressedList = new byte[fileBufferSize]; + int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); + if (uncompressedSize != fileBufferSize) { - int numFiles = reader.ReadInt32(); - int compressedSize = reader.ReadInt32(); - byte[] compressedFileList = reader.ReadBytes(compressedSize); - - int fileBufferSize = Marshal.SizeOf(typeof(FileEntry15)) * numFiles; - var uncompressedList = new byte[fileBufferSize]; - int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); - if (uncompressedSize != fileBufferSize) - { - string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; - throw new InvalidDataException(msg); - } + string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; + throw new InvalidDataException(msg); + } - var ms = new MemoryStream(uncompressedList); - var msr = new BinaryReader(ms); + var ms = new MemoryStream(uncompressedList); + var msr = new BinaryReader(ms); - var entries = new FileEntry15[numFiles]; - BinUtils.ReadStructs(msr, entries); + var entries = new FileEntry15[numFiles]; + BinUtils.ReadStructs(msr, entries); - foreach (var entry in entries) - { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } + foreach (var entry in entries) + { + package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } + } - private void ReadFileListV18(BinaryReader reader, Package package) + private void ReadFileListV18(BinaryReader reader, Package package) + { + int numFiles = reader.ReadInt32(); + int compressedSize = reader.ReadInt32(); + byte[] compressedFileList = reader.ReadBytes(compressedSize); + + int fileBufferSize = Marshal.SizeOf(typeof(FileEntry18)) * numFiles; + var uncompressedList = new byte[fileBufferSize]; + int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, false); + if (uncompressedSize != fileBufferSize) { - int numFiles = reader.ReadInt32(); - int compressedSize = reader.ReadInt32(); - byte[] compressedFileList = reader.ReadBytes(compressedSize); - - int fileBufferSize = Marshal.SizeOf(typeof(FileEntry18)) * numFiles; - var uncompressedList = new byte[fileBufferSize]; - int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, false); - if (uncompressedSize != fileBufferSize) - { - string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; - throw new InvalidDataException(msg); - } + string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; + throw new InvalidDataException(msg); + } - var ms = new MemoryStream(uncompressedList); - var msr = new BinaryReader(ms); + var ms = new MemoryStream(uncompressedList); + var msr = new BinaryReader(ms); - var entries = new FileEntry18[numFiles]; - BinUtils.ReadStructs(msr, entries); + var entries = new FileEntry18[numFiles]; + BinUtils.ReadStructs(msr, entries); - foreach (var entry in entries) - { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } + foreach (var entry in entries) + { + package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); } + } + + private Package ReadPackageV15(FileStream mainStream, BinaryReader reader) + { + var package = new Package(); + var header = BinUtils.ReadStruct(reader); - private Package ReadPackageV15(FileStream mainStream, BinaryReader reader) + if (header.Version != (ulong)PackageVersion.V15) { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); + string msg = $"Unsupported package version {header.Version}; this layout is only supported for V15"; + throw new InvalidDataException(msg); + } - if (header.Version != (ulong)PackageVersion.V15) - { - string msg = $"Unsupported package version {header.Version}; this layout is only supported for V15"; - throw new InvalidDataException(msg); - } + package.Metadata.Flags = (PackageFlags)header.Flags; + package.Metadata.Priority = header.Priority; + package.Version = PackageVersion.V15; - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V15; + if (metadataOnly) return package; - if (metadataOnly) return package; + OpenStreams(mainStream, 1); + mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); + ReadFileListV15(reader, package); - OpenStreams(mainStream, 1); - mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); - ReadFileListV15(reader, package); + return package; + } - return package; - } + private Package ReadPackageV16(FileStream mainStream, BinaryReader reader) + { + var package = new Package(); + var header = BinUtils.ReadStruct(reader); - private Package ReadPackageV16(FileStream mainStream, BinaryReader reader) + if (header.Version != (ulong)PackageVersion.V16) { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); + string msg = $"Unsupported package version {header.Version}; this layout is only supported for V16"; + throw new InvalidDataException(msg); + } - if (header.Version != (ulong)PackageVersion.V16) - { - string msg = $"Unsupported package version {header.Version}; this layout is only supported for V16"; - throw new InvalidDataException(msg); - } + package.Metadata.Flags = (PackageFlags)header.Flags; + package.Metadata.Priority = header.Priority; + package.Version = PackageVersion.V16; - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V16; + if (metadataOnly) return package; - if (metadataOnly) return package; + OpenStreams(mainStream, header.NumParts); + mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); + ReadFileListV15(reader, package); - OpenStreams(mainStream, header.NumParts); - mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); - ReadFileListV15(reader, package); + return package; + } - return package; - } + private Package ReadPackageV18(FileStream mainStream, BinaryReader reader) + { + var package = new Package(); + var header = BinUtils.ReadStruct(reader); - private Package ReadPackageV18(FileStream mainStream, BinaryReader reader) + if (header.Version != (ulong)PackageVersion.V18) { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); + string msg = $"Unsupported package version {header.Version}; this layout is only supported for V18"; + throw new InvalidDataException(msg); + } - if (header.Version != (ulong)PackageVersion.V18) - { - string msg = $"Unsupported package version {header.Version}; this layout is only supported for V18"; - throw new InvalidDataException(msg); - } + package.Metadata.Flags = (PackageFlags)header.Flags; + package.Metadata.Priority = header.Priority; + package.Version = PackageVersion.V18; - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V18; + if (metadataOnly) return package; - if (metadataOnly) return package; + OpenStreams(mainStream, header.NumParts); + mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); + ReadFileListV18(reader, package); - OpenStreams(mainStream, header.NumParts); - mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); - ReadFileListV18(reader, package); + return package; + } - return package; + public Package Read() + { + var mainStream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new BinaryReader(mainStream, new UTF8Encoding(), true); + + // Check for v13 package headers + mainStream.Seek(-8, SeekOrigin.End); + Int32 headerSize = reader.ReadInt32(); + byte[] signature = reader.ReadBytes(4); + if (Package.Signature.SequenceEqual(signature)) + { + mainStream.Seek(-headerSize, SeekOrigin.End); + return ReadPackageV13(mainStream, reader); } - public Package Read() + // Check for v10 package headers + mainStream.Seek(0, SeekOrigin.Begin); + signature = reader.ReadBytes(4); + Int32 version; + if (Package.Signature.SequenceEqual(signature)) { - var mainStream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new BinaryReader(mainStream, new UTF8Encoding(), true); - - // Check for v13 package headers - mainStream.Seek(-8, SeekOrigin.End); - Int32 headerSize = reader.ReadInt32(); - byte[] signature = reader.ReadBytes(4); - if (Package.Signature.SequenceEqual(signature)) + version = reader.ReadInt32(); + if (version == 10) { - mainStream.Seek(-headerSize, SeekOrigin.End); - return ReadPackageV13(mainStream, reader); + return ReadPackageV10(mainStream, reader); } - - // Check for v10 package headers - mainStream.Seek(0, SeekOrigin.Begin); - signature = reader.ReadBytes(4); - Int32 version; - if (Package.Signature.SequenceEqual(signature)) + else if (version == 15) { - version = reader.ReadInt32(); - if (version == 10) - { - return ReadPackageV10(mainStream, reader); - } - else if (version == 15) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV15(mainStream, reader); - } - else if (version == 16) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV16(mainStream, reader); - } - else if (version == 18) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV18(mainStream, reader); - } - else - { - throw new InvalidDataException($"Package version v{version} not supported"); - } + mainStream.Seek(4, SeekOrigin.Begin); + return ReadPackageV15(mainStream, reader); } - - // Check for v9 and v7 package headers - mainStream.Seek(0, SeekOrigin.Begin); - version = reader.ReadInt32(); - if (version == 7 || version == 9) + else if (version == 16) + { + mainStream.Seek(4, SeekOrigin.Begin); + return ReadPackageV16(mainStream, reader); + } + else if (version == 18) { - return ReadPackageV7(mainStream, reader); + mainStream.Seek(4, SeekOrigin.Begin); + return ReadPackageV18(mainStream, reader); } + else + { + throw new InvalidDataException($"Package version v{version} not supported"); + } + } - throw new NotAPackageException("No valid signature found in package file"); + // Check for v9 and v7 package headers + mainStream.Seek(0, SeekOrigin.Begin); + version = reader.ReadInt32(); + if (version == 7 || version == 9) + { + return ReadPackageV7(mainStream, reader); } + + throw new NotAPackageException("No valid signature found in package file"); } } diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 8ba81383..69c3ec57 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -9,489 +9,488 @@ using LSLib.Native; using LZ4; -namespace LSLib.LS +namespace LSLib.LS; + +public class PackageWriter(Package package, string path) : IDisposable { - public class PackageWriter(Package package, string path) : IDisposable + public delegate void WriteProgressDelegate(AbstractFileInfo abstractFile, long numerator, long denominator); + + private const long MaxPackageSizeDOS = 0x40000000; + private const long MaxPackageSizeBG3 = 0x100000000; + public CompressionMethod Compression = CompressionMethod.None; + public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + private readonly List _streams = []; + public PackageVersion Version = Package.CurrentVersion; + public WriteProgressDelegate WriteProgress = delegate { }; + + public void Dispose() { - public delegate void WriteProgressDelegate(AbstractFileInfo abstractFile, long numerator, long denominator); + foreach (Stream stream in _streams) + { + stream.Dispose(); + } + } - private const long MaxPackageSizeDOS = 0x40000000; - private const long MaxPackageSizeBG3 = 0x100000000; - public CompressionMethod Compression = CompressionMethod.None; - public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; - private readonly List _streams = []; - public PackageVersion Version = Package.CurrentVersion; - public WriteProgressDelegate WriteProgress = delegate { }; + public int PaddingLength() => Version <= PackageVersion.V9 ? 0x8000 : 0x40; - public void Dispose() + public PackagedFileInfo WriteFile(AbstractFileInfo info) + { + // Assume that all files are written uncompressed (worst-case) when calculating package sizes + long size = (long)info.Size(); + if ((Version < PackageVersion.V15 && _streams.Last().Position + size > MaxPackageSizeDOS) + || (Version >= PackageVersion.V16 && _streams.Last().Position + size > MaxPackageSizeBG3)) { - foreach (Stream stream in _streams) - { - stream.Dispose(); - } + // Start a new package file if the current one is full. + string partPath = Package.MakePartFilename(path, _streams.Count); + var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); + _streams.Add(nextPart); } - public int PaddingLength() => Version <= PackageVersion.V9 ? 0x8000 : 0x40; + var compression = Compression; + var compressionLevel = LSCompressionLevel; - public PackagedFileInfo WriteFile(AbstractFileInfo info) + if (info.Name.EndsWith(".gts") || info.Name.EndsWith(".gtp")) { - // Assume that all files are written uncompressed (worst-case) when calculating package sizes - long size = (long)info.Size(); - if ((Version < PackageVersion.V15 && _streams.Last().Position + size > MaxPackageSizeDOS) - || (Version >= PackageVersion.V16 && _streams.Last().Position + size > MaxPackageSizeBG3)) - { - // Start a new package file if the current one is full. - string partPath = Package.MakePartFilename(path, _streams.Count); - var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); - _streams.Add(nextPart); - } + compression = CompressionMethod.None; + compressionLevel = LSCompressionLevel.FastCompression; + } - var compression = Compression; - var compressionLevel = LSCompressionLevel; + Stream stream = _streams.Last(); + var packaged = new PackagedFileInfo + { + PackageStream = stream, + Name = info.Name, + UncompressedSize = (ulong)size, + ArchivePart = (UInt32) (_streams.Count - 1), + OffsetInFile = (UInt32) stream.Position, + Flags = BinUtils.MakeCompressionFlags(compression, compressionLevel) + }; + + Stream packagedStream = info.MakeStream(); + byte[] compressed; + try + { + using var reader = new BinaryReader(packagedStream, Encoding.UTF8, true); + byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); + compressed = BinUtils.Compress(uncompressed, compression, compressionLevel); + stream.Write(compressed, 0, compressed.Length); + } + finally + { + info.ReleaseStream(); + } - if (info.Name.EndsWith(".gts") || info.Name.EndsWith(".gtp")) - { - compression = CompressionMethod.None; - compressionLevel = LSCompressionLevel.FastCompression; - } + packaged.SizeOnDisk = (UInt64) (stream.Position - (long)packaged.OffsetInFile); + packaged.Crc = Crc32.Compute(compressed, 0); - Stream stream = _streams.Last(); - var packaged = new PackagedFileInfo - { - PackageStream = stream, - Name = info.Name, - UncompressedSize = (ulong)size, - ArchivePart = (UInt32) (_streams.Count - 1), - OffsetInFile = (UInt32) stream.Position, - Flags = BinUtils.MakeCompressionFlags(compression, compressionLevel) - }; - - Stream packagedStream = info.MakeStream(); - byte[] compressed; - try + if ((package.Metadata.Flags & PackageFlags.Solid) == 0) + { + int padLength = PaddingLength(); + long alignTo; + if (Version >= PackageVersion.V16) { - using var reader = new BinaryReader(packagedStream, Encoding.UTF8, true); - byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); - compressed = BinUtils.Compress(uncompressed, compression, compressionLevel); - stream.Write(compressed, 0, compressed.Length); + alignTo = stream.Position - Marshal.SizeOf(typeof(LSPKHeader16)) - 4; } - finally + else { - info.ReleaseStream(); + alignTo = stream.Position; } - packaged.SizeOnDisk = (UInt64) (stream.Position - (long)packaged.OffsetInFile); - packaged.Crc = Crc32.Compute(compressed, 0); - - if ((package.Metadata.Flags & PackageFlags.Solid) == 0) + // Pad the file to a multiple of 64 bytes + var padBytes = (padLength - alignTo % padLength) % padLength; + var pad = new byte[padBytes]; + for (var i = 0; i < pad.Length; i++) { - int padLength = PaddingLength(); - long alignTo; - if (Version >= PackageVersion.V16) - { - alignTo = stream.Position - Marshal.SizeOf(typeof(LSPKHeader16)) - 4; - } - else - { - alignTo = stream.Position; - } + pad[i] = 0xAD; + } - // Pad the file to a multiple of 64 bytes - var padBytes = (padLength - alignTo % padLength) % padLength; - var pad = new byte[padBytes]; - for (var i = 0; i < pad.Length; i++) - { - pad[i] = 0xAD; - } + stream.Write(pad, 0, pad.Length); + } - stream.Write(pad, 0, pad.Length); - } + return packaged; + } - return packaged; + public void WriteV7(FileStream mainStream) + { + if (Compression == CompressionMethod.LZ4) + { + throw new ArgumentException("LZ4 compression is only supported by V10 and later package versions"); } - public void WriteV7(FileStream mainStream) + using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); + var header = new LSPKHeader7 { - if (Compression == CompressionMethod.LZ4) - { - throw new ArgumentException("LZ4 compression is only supported by V10 and later package versions"); - } + Version = (uint)Version, + NumFiles = (UInt32)package.Files.Count, + FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry7)) * package.Files.Count) + }; + header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader7)) + header.FileListSize; + int paddingLength = PaddingLength(); + if (header.DataOffset % paddingLength > 0) + { + header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); + } - using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - var header = new LSPKHeader7 - { - Version = (uint)Version, - NumFiles = (UInt32)package.Files.Count, - FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry7)) * package.Files.Count) - }; - header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader7)) + header.FileListSize; - int paddingLength = PaddingLength(); - if (header.DataOffset % paddingLength > 0) - { - header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); - } + // Write a placeholder instead of the actual headers; we'll write them after we + // compressed and flushed all files to disk + var placeholder = new byte[header.DataOffset]; + writer.Write(placeholder); + + long totalSize = package.Files.Sum(p => (long)p.Size()); + long currentSize = 0; + var writtenFiles = new List(); + foreach (AbstractFileInfo file in package.Files) + { + WriteProgress(file, currentSize, totalSize); + writtenFiles.Add(WriteFile(file)); + currentSize += (long)file.Size(); + } - // Write a placeholder instead of the actual headers; we'll write them after we - // compressed and flushed all files to disk - var placeholder = new byte[header.DataOffset]; - writer.Write(placeholder); + mainStream.Seek(0, SeekOrigin.Begin); + header.LittleEndian = 0; + header.NumParts = (UInt16)_streams.Count; + BinUtils.WriteStruct(writer, ref header); - long totalSize = package.Files.Sum(p => (long)p.Size()); - long currentSize = 0; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) + foreach (PackagedFileInfo file in writtenFiles) + { + FileEntry7 entry = file.MakeEntryV7(); + if (entry.ArchivePart == 0) { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); + entry.OffsetInFile -= header.DataOffset; } - mainStream.Seek(0, SeekOrigin.Begin); - header.LittleEndian = 0; - header.NumParts = (UInt16)_streams.Count; - BinUtils.WriteStruct(writer, ref header); - - foreach (PackagedFileInfo file in writtenFiles) - { - FileEntry7 entry = file.MakeEntryV7(); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile -= header.DataOffset; - } + BinUtils.WriteStruct(writer, ref entry); + } + } - BinUtils.WriteStruct(writer, ref entry); - } + public void WriteV10(FileStream mainStream) + { + using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); + var header = new LSPKHeader10 + { + Version = (uint)Version, + NumFiles = (UInt32)package.Files.Count, + FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry13)) * package.Files.Count) + }; + header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader10)) + 4 + header.FileListSize; + int paddingLength = PaddingLength(); + if (header.DataOffset % paddingLength > 0) + { + header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); } - public void WriteV10(FileStream mainStream) + // Write a placeholder instead of the actual headers; we'll write them after we + // compressed and flushed all files to disk + var placeholder = new byte[header.DataOffset]; + writer.Write(placeholder); + + long totalSize = package.Files.Sum(p => (long)p.Size()); + long currentSize = 0; + var writtenFiles = new List(); + foreach (AbstractFileInfo file in package.Files) { - using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - var header = new LSPKHeader10 - { - Version = (uint)Version, - NumFiles = (UInt32)package.Files.Count, - FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry13)) * package.Files.Count) - }; - header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader10)) + 4 + header.FileListSize; - int paddingLength = PaddingLength(); - if (header.DataOffset % paddingLength > 0) - { - header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); - } + WriteProgress(file, currentSize, totalSize); + writtenFiles.Add(WriteFile(file)); + currentSize += (long)file.Size(); + } - // Write a placeholder instead of the actual headers; we'll write them after we - // compressed and flushed all files to disk - var placeholder = new byte[header.DataOffset]; - writer.Write(placeholder); + mainStream.Seek(0, SeekOrigin.Begin); + writer.Write(Package.Signature); + header.NumParts = (UInt16)_streams.Count; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; + BinUtils.WriteStruct(writer, ref header); - long totalSize = package.Files.Sum(p => (long)p.Size()); - long currentSize = 0; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) + foreach (PackagedFileInfo file in writtenFiles) + { + FileEntry13 entry = file.MakeEntryV13(); + if (entry.ArchivePart == 0) { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); + entry.OffsetInFile -= header.DataOffset; } - mainStream.Seek(0, SeekOrigin.Begin); - writer.Write(Package.Signature); - header.NumParts = (UInt16)_streams.Count; - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - BinUtils.WriteStruct(writer, ref header); + // v10 packages don't support compression level in the flags field + entry.Flags &= 0x0f; + BinUtils.WriteStruct(writer, ref entry); + } + } - foreach (PackagedFileInfo file in writtenFiles) - { - FileEntry13 entry = file.MakeEntryV13(); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile -= header.DataOffset; - } + public void WriteV13(FileStream mainStream) + { + long totalSize = package.Files.Sum(p => (long) p.Size()); + long currentSize = 0; - // v10 packages don't support compression level in the flags field - entry.Flags &= 0x0f; - BinUtils.WriteStruct(writer, ref entry); - } + var writtenFiles = new List(); + foreach (AbstractFileInfo file in package.Files) + { + WriteProgress(file, currentSize, totalSize); + writtenFiles.Add(WriteFile(file)); + currentSize += (long)file.Size(); } - public void WriteV13(FileStream mainStream) + using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); + var header = new LSPKHeader13 { - long totalSize = package.Files.Sum(p => (long) p.Size()); - long currentSize = 0; + Version = (uint)Version, + FileListOffset = (UInt32)mainStream.Position + }; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } + writer.Write((UInt32)writtenFiles.Count); - using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - var header = new LSPKHeader13 - { - Version = (uint)Version, - FileListOffset = (UInt32)mainStream.Position - }; + var fileList = new MemoryStream(); + var fileListWriter = new BinaryWriter(fileList); + foreach (PackagedFileInfo file in writtenFiles) + { + FileEntry13 entry = file.MakeEntryV13(); + BinUtils.WriteStruct(fileListWriter, ref entry); + } + + byte[] fileListBuf = fileList.ToArray(); + fileListWriter.Dispose(); + byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); + + writer.Write(compressedFileList); - writer.Write((UInt32)writtenFiles.Count); + header.FileListSize = (UInt32)mainStream.Position - header.FileListOffset; + header.NumParts = (UInt16)_streams.Count; + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; + header.Md5 = ComputeArchiveHash(); + BinUtils.WriteStruct(writer, ref header); - var fileList = new MemoryStream(); - var fileListWriter = new BinaryWriter(fileList); - foreach (PackagedFileInfo file in writtenFiles) + writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(LSPKHeader13)))); + writer.Write(Package.Signature); + } + + private List PackFiles() + { + long totalSize = package.Files.Sum(p => (long)p.Size()); + long currentSize = 0; + + var writtenFiles = new List(); + foreach (AbstractFileInfo file in package.Files) + { + WriteProgress(file, currentSize, totalSize); + writtenFiles.Add(WriteFile(file)); + currentSize += (long)file.Size(); + } + + return writtenFiles; + } + + private void WriteFileListV15(BinaryWriter metadataWriter, List files) + { + byte[] fileListBuf; + using (var fileList = new MemoryStream()) + using (var fileListWriter = new BinaryWriter(fileList)) + { + foreach (PackagedFileInfo file in files) { - FileEntry13 entry = file.MakeEntryV13(); + FileEntry15 entry = file.MakeEntryV15(); BinUtils.WriteStruct(fileListWriter, ref entry); } - byte[] fileListBuf = fileList.ToArray(); - fileListWriter.Dispose(); - byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); + fileListBuf = fileList.ToArray(); + } + + byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); - writer.Write(compressedFileList); + metadataWriter.Write((UInt32)files.Count); + metadataWriter.Write((UInt32)compressedFileList.Length); + metadataWriter.Write(compressedFileList); + } - header.FileListSize = (UInt32)mainStream.Position - header.FileListOffset; - header.NumParts = (UInt16)_streams.Count; + public void WriteV15(FileStream mainStream) + { + var header = new LSPKHeader15 + { + Version = (uint)Version + }; + + using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + { + writer.Write(Package.Signature); + BinUtils.WriteStruct(writer, ref header); + } + + var writtenFiles = PackFiles(); + + using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + { + header.FileListOffset = (UInt64)mainStream.Position; + WriteFileListV15(writer, writtenFiles); + + header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); header.Priority = package.Metadata.Priority; header.Flags = (byte)package.Metadata.Flags; header.Md5 = ComputeArchiveHash(); + mainStream.Seek(4, SeekOrigin.Begin); BinUtils.WriteStruct(writer, ref header); + } + } + + public void WriteV16(FileStream mainStream) + { + var header = new LSPKHeader16 + { + Version = (uint)Version + }; - writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(LSPKHeader13)))); + using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + { writer.Write(Package.Signature); + BinUtils.WriteStruct(writer, ref header); } - private List PackFiles() - { - long totalSize = package.Files.Sum(p => (long)p.Size()); - long currentSize = 0; + var writtenFiles = PackFiles(); - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } + using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + { + header.FileListOffset = (UInt64)mainStream.Position; + WriteFileListV15(writer, writtenFiles); - return writtenFiles; + header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; + header.Md5 = ComputeArchiveHash(); + header.NumParts = (UInt16)_streams.Count; + mainStream.Seek(4, SeekOrigin.Begin); + BinUtils.WriteStruct(writer, ref header); } + } - private void WriteFileListV15(BinaryWriter metadataWriter, List files) + private void WriteFileListV18(BinaryWriter metadataWriter, List files) + { + byte[] fileListBuf; + using (var fileList = new MemoryStream()) + using (var fileListWriter = new BinaryWriter(fileList)) { - byte[] fileListBuf; - using (var fileList = new MemoryStream()) - using (var fileListWriter = new BinaryWriter(fileList)) + foreach (PackagedFileInfo file in files) { - foreach (PackagedFileInfo file in files) - { - FileEntry15 entry = file.MakeEntryV15(); - BinUtils.WriteStruct(fileListWriter, ref entry); - } - - fileListBuf = fileList.ToArray(); + FileEntry18 entry = file.MakeEntryV18(); + BinUtils.WriteStruct(fileListWriter, ref entry); } - byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); - - metadataWriter.Write((UInt32)files.Count); - metadataWriter.Write((UInt32)compressedFileList.Length); - metadataWriter.Write(compressedFileList); + fileListBuf = fileList.ToArray(); } - public void WriteV15(FileStream mainStream) - { - var header = new LSPKHeader15 - { - Version = (uint)Version - }; + byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - writer.Write(Package.Signature); - BinUtils.WriteStruct(writer, ref header); - } + metadataWriter.Write((UInt32)files.Count); + metadataWriter.Write((UInt32)compressedFileList.Length); + metadataWriter.Write(compressedFileList); + } - var writtenFiles = PackFiles(); + public void WriteV18(FileStream mainStream) + { + var header = new LSPKHeader16 + { + Version = (uint)Version + }; - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - header.FileListOffset = (UInt64)mainStream.Position; - WriteFileListV15(writer, writtenFiles); - - header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - mainStream.Seek(4, SeekOrigin.Begin); - BinUtils.WriteStruct(writer, ref header); - } + using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + { + writer.Write(Package.Signature); + BinUtils.WriteStruct(writer, ref header); } - public void WriteV16(FileStream mainStream) - { - var header = new LSPKHeader16 - { - Version = (uint)Version - }; + var writtenFiles = PackFiles(); - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - writer.Write(Package.Signature); - BinUtils.WriteStruct(writer, ref header); - } + using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + { + header.FileListOffset = (UInt64)mainStream.Position; + WriteFileListV18(writer, writtenFiles); - var writtenFiles = PackFiles(); + header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); + header.Priority = package.Metadata.Priority; + header.Flags = (byte)package.Metadata.Flags; + header.Md5 = ComputeArchiveHash(); + header.NumParts = (UInt16)_streams.Count; + mainStream.Seek(4, SeekOrigin.Begin); + BinUtils.WriteStruct(writer, ref header); + } + } - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - header.FileListOffset = (UInt64)mainStream.Position; - WriteFileListV15(writer, writtenFiles); - - header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - header.NumParts = (UInt16)_streams.Count; - mainStream.Seek(4, SeekOrigin.Begin); - BinUtils.WriteStruct(writer, ref header); - } + public byte[] ComputeArchiveHash() + { + // MD5 is computed over the contents of all files in an alphabetically sorted order + List orderedFileList = package.Files.Select(item => item).ToList(); + if (Version < PackageVersion.V15) + { + orderedFileList.Sort((a, b) => String.CompareOrdinal(a.Name, b.Name)); } - private void WriteFileListV18(BinaryWriter metadataWriter, List files) + using MD5 md5 = MD5.Create(); + foreach (AbstractFileInfo file in orderedFileList) { - byte[] fileListBuf; - using (var fileList = new MemoryStream()) - using (var fileListWriter = new BinaryWriter(fileList)) + Stream packagedStream = file.MakeStream(); + try { - foreach (PackagedFileInfo file in files) + using (var reader = new BinaryReader(packagedStream)) { - FileEntry18 entry = file.MakeEntryV18(); - BinUtils.WriteStruct(fileListWriter, ref entry); + byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); + md5.TransformBlock(uncompressed, 0, uncompressed.Length, uncompressed, 0); } - - fileListBuf = fileList.ToArray(); } + finally + { + file.ReleaseStream(); + } + } - byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); + md5.TransformFinalBlock(new byte[0], 0, 0); + byte[] hash = md5.Hash; - metadataWriter.Write((UInt32)files.Count); - metadataWriter.Write((UInt32)compressedFileList.Length); - metadataWriter.Write(compressedFileList); + // All hash bytes are incremented by 1 + for (var i = 0; i < hash.Length; i++) + { + hash[i] += 1; } - public void WriteV18(FileStream mainStream) + return hash; + } + + public void Write() + { + var mainStream = File.Open(path, FileMode.Create, FileAccess.Write); + _streams.Add(mainStream); + + switch (Version) { - var header = new LSPKHeader16 + case PackageVersion.V18: { - Version = (uint)Version - }; - - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + WriteV18(mainStream); + break; + } + case PackageVersion.V16: { - writer.Write(Package.Signature); - BinUtils.WriteStruct(writer, ref header); + WriteV16(mainStream); + break; } - - var writtenFiles = PackFiles(); - - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) + case PackageVersion.V15: { - header.FileListOffset = (UInt64)mainStream.Position; - WriteFileListV18(writer, writtenFiles); - - header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - header.NumParts = (UInt16)_streams.Count; - mainStream.Seek(4, SeekOrigin.Begin); - BinUtils.WriteStruct(writer, ref header); + WriteV15(mainStream); + break; } - } - - public byte[] ComputeArchiveHash() - { - // MD5 is computed over the contents of all files in an alphabetically sorted order - List orderedFileList = package.Files.Select(item => item).ToList(); - if (Version < PackageVersion.V15) + case PackageVersion.V13: { - orderedFileList.Sort((a, b) => String.CompareOrdinal(a.Name, b.Name)); + WriteV13(mainStream); + break; } - - using MD5 md5 = MD5.Create(); - foreach (AbstractFileInfo file in orderedFileList) + case PackageVersion.V10: { - Stream packagedStream = file.MakeStream(); - try - { - using (var reader = new BinaryReader(packagedStream)) - { - byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); - md5.TransformBlock(uncompressed, 0, uncompressed.Length, uncompressed, 0); - } - } - finally - { - file.ReleaseStream(); - } + WriteV10(mainStream); + break; } - - md5.TransformFinalBlock(new byte[0], 0, 0); - byte[] hash = md5.Hash; - - // All hash bytes are incremented by 1 - for (var i = 0; i < hash.Length; i++) + case PackageVersion.V9: + case PackageVersion.V7: { - hash[i] += 1; + WriteV7(mainStream); + break; } - - return hash; - } - - public void Write() - { - var mainStream = File.Open(path, FileMode.Create, FileAccess.Write); - _streams.Add(mainStream); - - switch (Version) + default: { - case PackageVersion.V18: - { - WriteV18(mainStream); - break; - } - case PackageVersion.V16: - { - WriteV16(mainStream); - break; - } - case PackageVersion.V15: - { - WriteV15(mainStream); - break; - } - case PackageVersion.V13: - { - WriteV13(mainStream); - break; - } - case PackageVersion.V10: - { - WriteV10(mainStream); - break; - } - case PackageVersion.V9: - case PackageVersion.V7: - { - WriteV7(mainStream); - break; - } - default: - { - throw new ArgumentException($"Cannot write version {Version} packages"); - } + throw new ArgumentException($"Cannot write version {Version} packages"); } } } diff --git a/LSLib/LS/Resource.cs b/LSLib/LS/Resource.cs index 5d6837d3..aa585fcc 100644 --- a/LSLib/LS/Resource.cs +++ b/LSLib/LS/Resource.cs @@ -3,225 +3,224 @@ using System.Linq; using System.Runtime.InteropServices; -namespace LSLib.LS -{ - public class InvalidFormatException(string message) : Exception(message) - { - } - - public struct PackedVersion - { - public UInt32 Major; - public UInt32 Minor; - public UInt32 Revision; - public UInt32 Build; +namespace LSLib.LS; - public static PackedVersion FromInt64(Int64 packed) - { - return new PackedVersion - { - Major = (UInt32)((packed >> 55) & 0x7f), - Minor = (UInt32)((packed >> 47) & 0xff), - Revision = (UInt32)((packed >> 31) & 0xffff), - Build = (UInt32)(packed & 0x7fffffff), - }; - } +public class InvalidFormatException(string message) : Exception(message) +{ +} - public static PackedVersion FromInt32(Int32 packed) - { - return new PackedVersion - { - Major = (UInt32)((packed >> 28) & 0x0f), - Minor = (UInt32)((packed >> 24) & 0x0f), - Revision = (UInt32)((packed >> 16) & 0xff), - Build = (UInt32)(packed & 0xffff), - }; - } +public struct PackedVersion +{ + public UInt32 Major; + public UInt32 Minor; + public UInt32 Revision; + public UInt32 Build; - public readonly Int32 ToVersion32() + public static PackedVersion FromInt64(Int64 packed) + { + return new PackedVersion { - return (Int32)((Major & 0x0f) << 28 | - (Minor & 0x0f) << 24 | - (Revision & 0xff) << 16 | - (Build & 0xffff) << 0); - } + Major = (UInt32)((packed >> 55) & 0x7f), + Minor = (UInt32)((packed >> 47) & 0xff), + Revision = (UInt32)((packed >> 31) & 0xffff), + Build = (UInt32)(packed & 0x7fffffff), + }; + } - public readonly Int64 ToVersion64() + public static PackedVersion FromInt32(Int32 packed) + { + return new PackedVersion { - return (Int64)(((Int64)Major & 0x7f) << 55 | - ((Int64)Minor & 0xff) << 47 | - ((Int64)Revision & 0xffff) << 31 | - ((Int64)Build & 0x7fffffff) << 0); - } + Major = (UInt32)((packed >> 28) & 0x0f), + Minor = (UInt32)((packed >> 24) & 0x0f), + Revision = (UInt32)((packed >> 16) & 0xff), + Build = (UInt32)(packed & 0xffff), + }; } - [StructLayout(LayoutKind.Sequential)] - public struct LSMetadata + public readonly Int32 ToVersion32() { - public const uint CurrentMajorVersion = 33; - - public UInt64 Timestamp; - public UInt32 MajorVersion; - public UInt32 MinorVersion; - public UInt32 Revision; - public UInt32 BuildNumber; + return (Int32)((Major & 0x0f) << 28 | + (Minor & 0x0f) << 24 | + (Revision & 0xff) << 16 | + (Build & 0xffff) << 0); } - [StructLayout(LayoutKind.Sequential)] - public struct LSBHeader + public readonly Int64 ToVersion64() { - /// - /// LSB file signature since BG3 - /// - public readonly static byte[] SignatureBG3 = "LSFM"u8.ToArray(); - - /// - /// LSB signature up to FW3 (DOS2 DE) - /// - public const uint SignatureFW3 = 0x40000000; - - public UInt32 Signature; - public UInt32 TotalSize; - public UInt32 BigEndian; - public UInt32 Unknown; - public LSMetadata Metadata; + return (Int64)(((Int64)Major & 0x7f) << 55 | + ((Int64)Minor & 0xff) << 47 | + ((Int64)Revision & 0xffff) << 31 | + ((Int64)Build & 0x7fffffff) << 0); } +} - public static class AttributeTypeMaps - { - public readonly static Dictionary TypeToId = new() - { - { "None", NodeAttribute.DataType.DT_None }, - { "uint8", NodeAttribute.DataType.DT_Byte }, - { "int16", NodeAttribute.DataType.DT_Short }, - { "uint16", NodeAttribute.DataType.DT_UShort }, - { "int32", NodeAttribute.DataType.DT_Int }, - { "uint32", NodeAttribute.DataType.DT_UInt }, - { "float", NodeAttribute.DataType.DT_Float }, - { "double", NodeAttribute.DataType.DT_Double }, - { "ivec2", NodeAttribute.DataType.DT_IVec2 }, - { "ivec3", NodeAttribute.DataType.DT_IVec3 }, - { "ivec4", NodeAttribute.DataType.DT_IVec4 }, - { "fvec2", NodeAttribute.DataType.DT_Vec2 }, - { "fvec3", NodeAttribute.DataType.DT_Vec3 }, - { "fvec4", NodeAttribute.DataType.DT_Vec4 }, - { "mat2x2", NodeAttribute.DataType.DT_Mat2 }, - { "mat3x3", NodeAttribute.DataType.DT_Mat3 }, - { "mat3x4", NodeAttribute.DataType.DT_Mat3x4 }, - { "mat4x3", NodeAttribute.DataType.DT_Mat4x3 }, - { "mat4x4", NodeAttribute.DataType.DT_Mat4 }, - { "bool", NodeAttribute.DataType.DT_Bool }, - { "string", NodeAttribute.DataType.DT_String }, - { "path", NodeAttribute.DataType.DT_Path }, - { "FixedString", NodeAttribute.DataType.DT_FixedString }, - { "LSString", NodeAttribute.DataType.DT_LSString }, - { "uint64", NodeAttribute.DataType.DT_ULongLong }, - { "ScratchBuffer", NodeAttribute.DataType.DT_ScratchBuffer }, - { "old_int64", NodeAttribute.DataType.DT_Long }, - { "int8", NodeAttribute.DataType.DT_Int8 }, - { "TranslatedString", NodeAttribute.DataType.DT_TranslatedString }, - { "WString", NodeAttribute.DataType.DT_WString }, - { "LSWString", NodeAttribute.DataType.DT_LSWString }, - { "guid", NodeAttribute.DataType.DT_UUID }, - { "int64", NodeAttribute.DataType.DT_Int64 }, - { "TranslatedFSString", NodeAttribute.DataType.DT_TranslatedFSString }, - }; +[StructLayout(LayoutKind.Sequential)] +public struct LSMetadata +{ + public const uint CurrentMajorVersion = 33; - public readonly static Dictionary IdToType = new() - { - { NodeAttribute.DataType.DT_None, "None" }, - { NodeAttribute.DataType.DT_Byte, "uint8" }, - { NodeAttribute.DataType.DT_Short, "int16" }, - { NodeAttribute.DataType.DT_UShort, "uint16" }, - { NodeAttribute.DataType.DT_Int, "int32" }, - { NodeAttribute.DataType.DT_UInt, "uint32" }, - { NodeAttribute.DataType.DT_Float, "float" }, - { NodeAttribute.DataType.DT_Double, "double" }, - { NodeAttribute.DataType.DT_IVec2, "ivec2" }, - { NodeAttribute.DataType.DT_IVec3, "ivec3" }, - { NodeAttribute.DataType.DT_IVec4, "ivec4" }, - { NodeAttribute.DataType.DT_Vec2, "fvec2" }, - { NodeAttribute.DataType.DT_Vec3, "fvec3" }, - { NodeAttribute.DataType.DT_Vec4, "fvec4" }, - { NodeAttribute.DataType.DT_Mat2, "mat2x2" }, - { NodeAttribute.DataType.DT_Mat3, "mat3x3" }, - { NodeAttribute.DataType.DT_Mat3x4, "mat3x4" }, - { NodeAttribute.DataType.DT_Mat4x3, "mat4x3" }, - { NodeAttribute.DataType.DT_Mat4, "mat4x4" }, - { NodeAttribute.DataType.DT_Bool, "bool" }, - { NodeAttribute.DataType.DT_String, "string" }, - { NodeAttribute.DataType.DT_Path, "path" }, - { NodeAttribute.DataType.DT_FixedString, "FixedString" }, - { NodeAttribute.DataType.DT_LSString, "LSString" }, - { NodeAttribute.DataType.DT_ULongLong, "uint64" }, - { NodeAttribute.DataType.DT_ScratchBuffer, "ScratchBuffer" }, - { NodeAttribute.DataType.DT_Long, "old_int64" }, - { NodeAttribute.DataType.DT_Int8, "int8" }, - { NodeAttribute.DataType.DT_TranslatedString, "TranslatedString" }, - { NodeAttribute.DataType.DT_WString, "WString" }, - { NodeAttribute.DataType.DT_LSWString, "LSWString" }, - { NodeAttribute.DataType.DT_UUID, "guid" }, - { NodeAttribute.DataType.DT_Int64, "int64" }, - { NodeAttribute.DataType.DT_TranslatedFSString, "TranslatedFSString" }, - }; - } + public UInt64 Timestamp; + public UInt32 MajorVersion; + public UInt32 MinorVersion; + public UInt32 Revision; + public UInt32 BuildNumber; +} + +[StructLayout(LayoutKind.Sequential)] +public struct LSBHeader +{ + /// + /// LSB file signature since BG3 + /// + public readonly static byte[] SignatureBG3 = "LSFM"u8.ToArray(); + + /// + /// LSB signature up to FW3 (DOS2 DE) + /// + public const uint SignatureFW3 = 0x40000000; + + public UInt32 Signature; + public UInt32 TotalSize; + public UInt32 BigEndian; + public UInt32 Unknown; + public LSMetadata Metadata; +} - public class Resource +public static class AttributeTypeMaps +{ + public readonly static Dictionary TypeToId = new() + { + { "None", NodeAttribute.DataType.DT_None }, + { "uint8", NodeAttribute.DataType.DT_Byte }, + { "int16", NodeAttribute.DataType.DT_Short }, + { "uint16", NodeAttribute.DataType.DT_UShort }, + { "int32", NodeAttribute.DataType.DT_Int }, + { "uint32", NodeAttribute.DataType.DT_UInt }, + { "float", NodeAttribute.DataType.DT_Float }, + { "double", NodeAttribute.DataType.DT_Double }, + { "ivec2", NodeAttribute.DataType.DT_IVec2 }, + { "ivec3", NodeAttribute.DataType.DT_IVec3 }, + { "ivec4", NodeAttribute.DataType.DT_IVec4 }, + { "fvec2", NodeAttribute.DataType.DT_Vec2 }, + { "fvec3", NodeAttribute.DataType.DT_Vec3 }, + { "fvec4", NodeAttribute.DataType.DT_Vec4 }, + { "mat2x2", NodeAttribute.DataType.DT_Mat2 }, + { "mat3x3", NodeAttribute.DataType.DT_Mat3 }, + { "mat3x4", NodeAttribute.DataType.DT_Mat3x4 }, + { "mat4x3", NodeAttribute.DataType.DT_Mat4x3 }, + { "mat4x4", NodeAttribute.DataType.DT_Mat4 }, + { "bool", NodeAttribute.DataType.DT_Bool }, + { "string", NodeAttribute.DataType.DT_String }, + { "path", NodeAttribute.DataType.DT_Path }, + { "FixedString", NodeAttribute.DataType.DT_FixedString }, + { "LSString", NodeAttribute.DataType.DT_LSString }, + { "uint64", NodeAttribute.DataType.DT_ULongLong }, + { "ScratchBuffer", NodeAttribute.DataType.DT_ScratchBuffer }, + { "old_int64", NodeAttribute.DataType.DT_Long }, + { "int8", NodeAttribute.DataType.DT_Int8 }, + { "TranslatedString", NodeAttribute.DataType.DT_TranslatedString }, + { "WString", NodeAttribute.DataType.DT_WString }, + { "LSWString", NodeAttribute.DataType.DT_LSWString }, + { "guid", NodeAttribute.DataType.DT_UUID }, + { "int64", NodeAttribute.DataType.DT_Int64 }, + { "TranslatedFSString", NodeAttribute.DataType.DT_TranslatedFSString }, + }; + + public readonly static Dictionary IdToType = new() { - public LSMetadata Metadata; - public Dictionary Regions = []; + { NodeAttribute.DataType.DT_None, "None" }, + { NodeAttribute.DataType.DT_Byte, "uint8" }, + { NodeAttribute.DataType.DT_Short, "int16" }, + { NodeAttribute.DataType.DT_UShort, "uint16" }, + { NodeAttribute.DataType.DT_Int, "int32" }, + { NodeAttribute.DataType.DT_UInt, "uint32" }, + { NodeAttribute.DataType.DT_Float, "float" }, + { NodeAttribute.DataType.DT_Double, "double" }, + { NodeAttribute.DataType.DT_IVec2, "ivec2" }, + { NodeAttribute.DataType.DT_IVec3, "ivec3" }, + { NodeAttribute.DataType.DT_IVec4, "ivec4" }, + { NodeAttribute.DataType.DT_Vec2, "fvec2" }, + { NodeAttribute.DataType.DT_Vec3, "fvec3" }, + { NodeAttribute.DataType.DT_Vec4, "fvec4" }, + { NodeAttribute.DataType.DT_Mat2, "mat2x2" }, + { NodeAttribute.DataType.DT_Mat3, "mat3x3" }, + { NodeAttribute.DataType.DT_Mat3x4, "mat3x4" }, + { NodeAttribute.DataType.DT_Mat4x3, "mat4x3" }, + { NodeAttribute.DataType.DT_Mat4, "mat4x4" }, + { NodeAttribute.DataType.DT_Bool, "bool" }, + { NodeAttribute.DataType.DT_String, "string" }, + { NodeAttribute.DataType.DT_Path, "path" }, + { NodeAttribute.DataType.DT_FixedString, "FixedString" }, + { NodeAttribute.DataType.DT_LSString, "LSString" }, + { NodeAttribute.DataType.DT_ULongLong, "uint64" }, + { NodeAttribute.DataType.DT_ScratchBuffer, "ScratchBuffer" }, + { NodeAttribute.DataType.DT_Long, "old_int64" }, + { NodeAttribute.DataType.DT_Int8, "int8" }, + { NodeAttribute.DataType.DT_TranslatedString, "TranslatedString" }, + { NodeAttribute.DataType.DT_WString, "WString" }, + { NodeAttribute.DataType.DT_LSWString, "LSWString" }, + { NodeAttribute.DataType.DT_UUID, "guid" }, + { NodeAttribute.DataType.DT_Int64, "int64" }, + { NodeAttribute.DataType.DT_TranslatedFSString, "TranslatedFSString" }, + }; +} - public Resource() - { - Metadata.MajorVersion = 3; - } - } +public class Resource +{ + public LSMetadata Metadata; + public Dictionary Regions = []; - public class Region : Node + public Resource() { - public string RegionName; + Metadata.MajorVersion = 3; } +} - public class Node - { - public string Name; - public Node Parent; - public Dictionary Attributes = []; - public Dictionary> Children = []; +public class Region : Node +{ + public string RegionName; +} - public int ChildCount +public class Node +{ + public string Name; + public Node Parent; + public Dictionary Attributes = []; + public Dictionary> Children = []; + + public int ChildCount + { + get { - get - { - return - (from c in Children - select c.Value.Count).Sum(); - } + return + (from c in Children + select c.Value.Count).Sum(); } + } - public int TotalChildCount() + public int TotalChildCount() + { + int count = 0; + foreach (var key in Children) { - int count = 0; - foreach (var key in Children) + foreach (var child in key.Value) { - foreach (var child in key.Value) - { - count += 1 + child.TotalChildCount(); - } + count += 1 + child.TotalChildCount(); } - - return count; } - public void AppendChild(Node child) - { - if (!Children.TryGetValue(child.Name, out List children)) - { - children = []; - Children.Add(child.Name, children); - } + return count; + } - children.Add(child); + public void AppendChild(Node child) + { + if (!Children.TryGetValue(child.Name, out List children)) + { + children = []; + Children.Add(child.Name, children); } + + children.Add(child); } } diff --git a/LSLib/LS/ResourceUtils.cs b/LSLib/LS/ResourceUtils.cs index 1c51b52e..8ebf02c4 100644 --- a/LSLib/LS/ResourceUtils.cs +++ b/LSLib/LS/ResourceUtils.cs @@ -3,275 +3,274 @@ using System.IO; using LSLib.LS.Enums; -namespace LSLib.LS +namespace LSLib.LS; + +public class ResourceLoadParameters { - public class ResourceLoadParameters + /// + /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string + /// + public bool ByteSwapGuids = true; + + public static ResourceLoadParameters FromGameVersion(Game game) { - /// - /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string - /// - public bool ByteSwapGuids = true; + var p = new ResourceLoadParameters(); + // No game-specific settings yet + return p; + } - public static ResourceLoadParameters FromGameVersion(Game game) - { - var p = new ResourceLoadParameters(); - // No game-specific settings yet - return p; - } + public void ToSerializationSettings(NodeSerializationSettings settings) + { + settings.DefaultByteSwapGuids = ByteSwapGuids; + } +} - public void ToSerializationSettings(NodeSerializationSettings settings) +public class ResourceConversionParameters +{ + /// + /// Format of generated PAK files + /// + public PackageVersion PAKVersion; + + /// + /// Format of generated LSF files + /// + public LSFVersion LSF = LSFVersion.MaxWriteVersion; + + /// + /// Store sibling/neighbour node data in LSF files (usually done by savegames only) + /// + public bool LSFEncodeSiblingData = false; + + /// + /// Format of generated LSX files + /// + public LSXVersion LSX = LSXVersion.V4; + + /// + /// Pretty-print (format) LSX/LSJ files + /// + public bool PrettyPrint = true; + + /// + /// LSF/LSB compression method + /// + public CompressionMethod Compression = CompressionMethod.LZ4; + + /// + /// LSF/LSB compression level (i.e. size/compression time tradeoff) + /// + public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + + /// + /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string + /// + public bool ByteSwapGuids = true; + + public static ResourceConversionParameters FromGameVersion(Game game) + { + return new ResourceConversionParameters { - settings.DefaultByteSwapGuids = ByteSwapGuids; - } + PAKVersion = game.PAKVersion(), + LSF = game.LSFVersion(), + LSX = game.LSXVersion() + }; } - public class ResourceConversionParameters + public void ToSerializationSettings(NodeSerializationSettings settings) { - /// - /// Format of generated PAK files - /// - public PackageVersion PAKVersion; - - /// - /// Format of generated LSF files - /// - public LSFVersion LSF = LSFVersion.MaxWriteVersion; - - /// - /// Store sibling/neighbour node data in LSF files (usually done by savegames only) - /// - public bool LSFEncodeSiblingData = false; - - /// - /// Format of generated LSX files - /// - public LSXVersion LSX = LSXVersion.V4; - - /// - /// Pretty-print (format) LSX/LSJ files - /// - public bool PrettyPrint = true; - - /// - /// LSF/LSB compression method - /// - public CompressionMethod Compression = CompressionMethod.LZ4; - - /// - /// LSF/LSB compression level (i.e. size/compression time tradeoff) - /// - public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; - - /// - /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string - /// - public bool ByteSwapGuids = true; - - public static ResourceConversionParameters FromGameVersion(Game game) - { - return new ResourceConversionParameters - { - PAKVersion = game.PAKVersion(), - LSF = game.LSFVersion(), - LSX = game.LSXVersion() - }; - } + settings.DefaultByteSwapGuids = ByteSwapGuids; + } +} - public void ToSerializationSettings(NodeSerializationSettings settings) +public class ResourceUtils +{ + public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator); + public ProgressUpdateDelegate progressUpdate = delegate { }; + + public delegate void ErrorDelegate(string path, Exception e); + public ErrorDelegate errorDelegate = delegate { }; + + public static ResourceFormat ExtensionToResourceFormat(string path) + { + var extension = Path.GetExtension(path).ToLower(); + + return extension switch { - settings.DefaultByteSwapGuids = ByteSwapGuids; - } + ".lsx" => ResourceFormat.LSX, + ".lsb" => ResourceFormat.LSB, + ".lsf" or ".lsfx" or ".lsbc" or ".lsbs" => ResourceFormat.LSF, + ".lsj" => ResourceFormat.LSJ, + _ => throw new ArgumentException("Unrecognized file extension: " + extension), + }; + } + + public static Resource LoadResource(string inputPath, ResourceLoadParameters loadParams) + { + return LoadResource(inputPath, ExtensionToResourceFormat(inputPath), loadParams); } - public class ResourceUtils + public static Resource LoadResource(string inputPath, ResourceFormat format, ResourceLoadParameters loadParams) { - public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator); - public ProgressUpdateDelegate progressUpdate = delegate { }; - - public delegate void ErrorDelegate(string path, Exception e); - public ErrorDelegate errorDelegate = delegate { }; + using var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read); + return LoadResource(stream, format, loadParams); + } - public static ResourceFormat ExtensionToResourceFormat(string path) + public static Resource LoadResource(Stream stream, ResourceFormat format, ResourceLoadParameters loadParams) + { + switch (format) { - var extension = Path.GetExtension(path).ToLower(); + case ResourceFormat.LSX: + { + using var reader = new LSXReader(stream); + loadParams.ToSerializationSettings(reader.SerializationSettings); + return reader.Read(); + } - return extension switch - { - ".lsx" => ResourceFormat.LSX, - ".lsb" => ResourceFormat.LSB, - ".lsf" or ".lsfx" or ".lsbc" or ".lsbs" => ResourceFormat.LSF, - ".lsj" => ResourceFormat.LSJ, - _ => throw new ArgumentException("Unrecognized file extension: " + extension), - }; - } + case ResourceFormat.LSB: + { + using var reader = new LSBReader(stream); + return reader.Read(); + } - public static Resource LoadResource(string inputPath, ResourceLoadParameters loadParams) - { - return LoadResource(inputPath, ExtensionToResourceFormat(inputPath), loadParams); - } + case ResourceFormat.LSF: + { + using var reader = new LSFReader(stream); + return reader.Read(); + } - public static Resource LoadResource(string inputPath, ResourceFormat format, ResourceLoadParameters loadParams) - { - using var stream = File.Open(inputPath, FileMode.Open, FileAccess.Read, FileShare.Read); - return LoadResource(stream, format, loadParams); + case ResourceFormat.LSJ: + { + using var reader = new LSJReader(stream); + loadParams.ToSerializationSettings(reader.SerializationSettings); + return reader.Read(); + } + + default: + throw new ArgumentException("Invalid resource format"); } + } + + public static void SaveResource(Resource resource, string outputPath, ResourceConversionParameters conversionParams) + { + SaveResource(resource, outputPath, ExtensionToResourceFormat(outputPath), conversionParams); + } - public static Resource LoadResource(Stream stream, ResourceFormat format, ResourceLoadParameters loadParams) + public static void SaveResource(Resource resource, string outputPath, ResourceFormat format, ResourceConversionParameters conversionParams) + { + FileManager.TryToCreateDirectory(outputPath); + + using var file = File.Open(outputPath, FileMode.Create, FileAccess.Write); + switch (format) { - switch (format) - { - case ResourceFormat.LSX: + case ResourceFormat.LSX: + { + var writer = new LSXWriter(file) { - using var reader = new LSXReader(stream); - loadParams.ToSerializationSettings(reader.SerializationSettings); - return reader.Read(); - } + Version = conversionParams.LSX, + PrettyPrint = conversionParams.PrettyPrint + }; + conversionParams.ToSerializationSettings(writer.SerializationSettings); + writer.Write(resource); + break; + } - case ResourceFormat.LSB: - { - using var reader = new LSBReader(stream); - return reader.Read(); - } + case ResourceFormat.LSB: + { + var writer = new LSBWriter(file); + writer.Write(resource); + break; + } - case ResourceFormat.LSF: + case ResourceFormat.LSF: + { + var writer = new LSFWriter(file) { - using var reader = new LSFReader(stream); - return reader.Read(); - } + Version = conversionParams.LSF, + EncodeSiblingData = conversionParams.LSFEncodeSiblingData, + Compression = conversionParams.Compression, + LSCompressionLevel = conversionParams.LSCompressionLevel + }; + writer.Write(resource); + break; + } - case ResourceFormat.LSJ: + case ResourceFormat.LSJ: + { + var writer = new LSJWriter(file) { - using var reader = new LSJReader(stream); - loadParams.ToSerializationSettings(reader.SerializationSettings); - return reader.Read(); - } + PrettyPrint = conversionParams.PrettyPrint + }; + conversionParams.ToSerializationSettings(writer.SerializationSettings); + writer.Write(resource); + break; + } - default: - throw new ArgumentException("Invalid resource format"); - } + default: + throw new ArgumentException("Invalid resource format"); } + } - public static void SaveResource(Resource resource, string outputPath, ResourceConversionParameters conversionParams) + private bool IsA(string path, ResourceFormat format) + { + var extension = Path.GetExtension(path).ToLower(); + return format switch { - SaveResource(resource, outputPath, ExtensionToResourceFormat(outputPath), conversionParams); - } + ResourceFormat.LSX => extension == ".lsx", + ResourceFormat.LSB => extension == ".lsb", + ResourceFormat.LSF => extension == ".lsf" || extension == ".lsbc" || extension == ".lsfx", + ResourceFormat.LSJ => extension == ".lsj", + _ => false, + }; + } - public static void SaveResource(Resource resource, string outputPath, ResourceFormat format, ResourceConversionParameters conversionParams) + private void EnumerateFiles(List paths, string rootPath, string currentPath, ResourceFormat format) + { + foreach (string filePath in Directory.GetFiles(currentPath)) { - FileManager.TryToCreateDirectory(outputPath); - - using var file = File.Open(outputPath, FileMode.Create, FileAccess.Write); - switch (format) + if (IsA(filePath, format)) { - case ResourceFormat.LSX: - { - var writer = new LSXWriter(file) - { - Version = conversionParams.LSX, - PrettyPrint = conversionParams.PrettyPrint - }; - conversionParams.ToSerializationSettings(writer.SerializationSettings); - writer.Write(resource); - break; - } - - case ResourceFormat.LSB: - { - var writer = new LSBWriter(file); - writer.Write(resource); - break; - } + var relativePath = filePath[rootPath.Length..]; + if (relativePath[0] == '/' || relativePath[0] == '\\') + { + relativePath = relativePath[1..]; + } - case ResourceFormat.LSF: - { - var writer = new LSFWriter(file) - { - Version = conversionParams.LSF, - EncodeSiblingData = conversionParams.LSFEncodeSiblingData, - Compression = conversionParams.Compression, - LSCompressionLevel = conversionParams.LSCompressionLevel - }; - writer.Write(resource); - break; - } - - case ResourceFormat.LSJ: - { - var writer = new LSJWriter(file) - { - PrettyPrint = conversionParams.PrettyPrint - }; - conversionParams.ToSerializationSettings(writer.SerializationSettings); - writer.Write(resource); - break; - } - - default: - throw new ArgumentException("Invalid resource format"); + paths.Add(relativePath); } } - private bool IsA(string path, ResourceFormat format) + foreach (string directoryPath in Directory.GetDirectories(currentPath)) { - var extension = Path.GetExtension(path).ToLower(); - return format switch - { - ResourceFormat.LSX => extension == ".lsx", - ResourceFormat.LSB => extension == ".lsb", - ResourceFormat.LSF => extension == ".lsf" || extension == ".lsbc" || extension == ".lsfx", - ResourceFormat.LSJ => extension == ".lsj", - _ => false, - }; + EnumerateFiles(paths, rootPath, directoryPath, format); } + } - private void EnumerateFiles(List paths, string rootPath, string currentPath, ResourceFormat format) + public void ConvertResources(string inputDir, string outputDir, ResourceFormat inputFormat, ResourceFormat outputFormat, + ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) + { + this.progressUpdate("Enumerating files ...", 0, 1); + var paths = new List(); + EnumerateFiles(paths, inputDir, inputDir, inputFormat); + + this.progressUpdate("Converting resources ...", 0, 1); + for (var i = 0; i < paths.Count; i++) { - foreach (string filePath in Directory.GetFiles(currentPath)) - { - if (IsA(filePath, format)) - { - var relativePath = filePath[rootPath.Length..]; - if (relativePath[0] == '/' || relativePath[0] == '\\') - { - relativePath = relativePath[1..]; - } + var path = paths[i]; + var inPath = Path.Join(inputDir, path); + var outPath = Path.Join(outputDir, Path.ChangeExtension(path, outputFormat.ToString().ToLower())); - paths.Add(relativePath); - } - } + FileManager.TryToCreateDirectory(outPath); - foreach (string directoryPath in Directory.GetDirectories(currentPath)) + this.progressUpdate("Converting: " + inPath, i, paths.Count); + try { - EnumerateFiles(paths, rootPath, directoryPath, format); + var resource = LoadResource(inPath, inputFormat, loadParams); + SaveResource(resource, outPath, outputFormat, conversionParams); } - } - - public void ConvertResources(string inputDir, string outputDir, ResourceFormat inputFormat, ResourceFormat outputFormat, - ResourceLoadParameters loadParams, ResourceConversionParameters conversionParams) - { - this.progressUpdate("Enumerating files ...", 0, 1); - var paths = new List(); - EnumerateFiles(paths, inputDir, inputDir, inputFormat); - - this.progressUpdate("Converting resources ...", 0, 1); - for (var i = 0; i < paths.Count; i++) + catch (Exception ex) { - var path = paths[i]; - var inPath = Path.Join(inputDir, path); - var outPath = Path.Join(outputDir, Path.ChangeExtension(path, outputFormat.ToString().ToLower())); - - FileManager.TryToCreateDirectory(outPath); - - this.progressUpdate("Converting: " + inPath, i, paths.Count); - try - { - var resource = LoadResource(inPath, inputFormat, loadParams); - SaveResource(resource, outPath, outputFormat, conversionParams); - } - catch (Exception ex) - { - errorDelegate(inPath, ex); - } + errorDelegate(inPath, ex); } } } diff --git a/LSLib/LS/Resources/LSB/LSBReader.cs b/LSLib/LS/Resources/LSB/LSBReader.cs index 9df8ab78..41965b7a 100644 --- a/LSLib/LS/Resources/LSB/LSBReader.cs +++ b/LSLib/LS/Resources/LSB/LSBReader.cs @@ -2,222 +2,221 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSBReader(Stream stream) : IDisposable { - public class LSBReader(Stream stream) : IDisposable - { - private BinaryReader reader; - private Dictionary staticStrings = []; - private bool IsBG3; + private BinaryReader reader; + private Dictionary staticStrings = []; + private bool IsBG3; - public void Dispose() - { - stream.Dispose(); - } + public void Dispose() + { + stream.Dispose(); + } - public Resource Read() + public Resource Read() + { + using (this.reader = new BinaryReader(stream)) { - using (this.reader = new BinaryReader(stream)) - { - // Check for BG3 header - var header = BinUtils.ReadStruct(reader); - if (header.Signature != BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0) && header.Signature != LSBHeader.SignatureFW3) - throw new InvalidFormatException(String.Format("Illegal signature in LSB header ({0})", header.Signature)); + // Check for BG3 header + var header = BinUtils.ReadStruct(reader); + if (header.Signature != BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0) && header.Signature != LSBHeader.SignatureFW3) + throw new InvalidFormatException(String.Format("Illegal signature in LSB header ({0})", header.Signature)); - if (stream.Length != header.TotalSize) - throw new InvalidFormatException(String.Format("Invalid LSB file size; expected {0}, got {1}", header.TotalSize, stream.Length)); + if (stream.Length != header.TotalSize) + throw new InvalidFormatException(String.Format("Invalid LSB file size; expected {0}, got {1}", header.TotalSize, stream.Length)); - // The game only uses little-endian files on all platforms currently and big-endian support isn't worth the hassle - if (header.BigEndian != 0) - throw new InvalidFormatException("Big-endian LSB files are not supported"); + // The game only uses little-endian files on all platforms currently and big-endian support isn't worth the hassle + if (header.BigEndian != 0) + throw new InvalidFormatException("Big-endian LSB files are not supported"); - IsBG3 = (header.Signature == BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0)); - ReadStaticStrings(); + IsBG3 = (header.Signature == BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0)); + ReadStaticStrings(); - Resource rsrc = new Resource - { - Metadata = header.Metadata - }; - ReadRegions(rsrc); - return rsrc; - } + Resource rsrc = new Resource + { + Metadata = header.Metadata + }; + ReadRegions(rsrc); + return rsrc; } + } - private void ReadRegions(Resource rsrc) + private void ReadRegions(Resource rsrc) + { + UInt32 regions = reader.ReadUInt32(); + for (UInt32 i = 0; i < regions; i++) { - UInt32 regions = reader.ReadUInt32(); - for (UInt32 i = 0; i < regions; i++) - { - UInt32 regionNameId = reader.ReadUInt32(); - UInt32 regionOffset = reader.ReadUInt32(); + UInt32 regionNameId = reader.ReadUInt32(); + UInt32 regionOffset = reader.ReadUInt32(); - Region rgn = new Region - { - RegionName = staticStrings[regionNameId] - }; - var lastRegionPos = stream.Position; - - stream.Seek(regionOffset, SeekOrigin.Begin); - ReadNode(rgn); - rsrc.Regions[rgn.RegionName] = rgn; - stream.Seek(lastRegionPos, SeekOrigin.Begin); - } + Region rgn = new Region + { + RegionName = staticStrings[regionNameId] + }; + var lastRegionPos = stream.Position; + + stream.Seek(regionOffset, SeekOrigin.Begin); + ReadNode(rgn); + rsrc.Regions[rgn.RegionName] = rgn; + stream.Seek(lastRegionPos, SeekOrigin.Begin); } + } - private void ReadNode(Node node) - { - UInt32 nodeNameId = reader.ReadUInt32(); - UInt32 attributeCount = reader.ReadUInt32(); - UInt32 childCount = reader.ReadUInt32(); - node.Name = staticStrings[nodeNameId]; + private void ReadNode(Node node) + { + UInt32 nodeNameId = reader.ReadUInt32(); + UInt32 attributeCount = reader.ReadUInt32(); + UInt32 childCount = reader.ReadUInt32(); + node.Name = staticStrings[nodeNameId]; - for (UInt32 i = 0; i < attributeCount; i++) - { - UInt32 attrNameId = reader.ReadUInt32(); - UInt32 attrTypeId = reader.ReadUInt32(); - if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) - throw new InvalidFormatException(String.Format("Unsupported attribute data type: {0}", attrTypeId)); + for (UInt32 i = 0; i < attributeCount; i++) + { + UInt32 attrNameId = reader.ReadUInt32(); + UInt32 attrTypeId = reader.ReadUInt32(); + if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) + throw new InvalidFormatException(String.Format("Unsupported attribute data type: {0}", attrTypeId)); - node.Attributes[staticStrings[attrNameId]] = ReadAttribute((NodeAttribute.DataType)attrTypeId); - } + node.Attributes[staticStrings[attrNameId]] = ReadAttribute((NodeAttribute.DataType)attrTypeId); + } - for (UInt32 i = 0; i < childCount; i++) + for (UInt32 i = 0; i < childCount; i++) + { + Node child = new Node { - Node child = new Node - { - Parent = node - }; - ReadNode(child); - node.AppendChild(child); - } + Parent = node + }; + ReadNode(child); + node.AppendChild(child); } + } - private NodeAttribute ReadAttribute(NodeAttribute.DataType type) + private NodeAttribute ReadAttribute(NodeAttribute.DataType type) + { + switch (type) { - switch (type) - { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: + case NodeAttribute.DataType.DT_String: + case NodeAttribute.DataType.DT_Path: + case NodeAttribute.DataType.DT_FixedString: + case NodeAttribute.DataType.DT_LSString: + { + var attr = new NodeAttribute(type) { - var attr = new NodeAttribute(type) - { - Value = ReadString(true) - }; - return attr; - } + Value = ReadString(true) + }; + return attr; + } - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case NodeAttribute.DataType.DT_WString: + case NodeAttribute.DataType.DT_LSWString: + { + var attr = new NodeAttribute(type) { - var attr = new NodeAttribute(type) - { - Value = ReadWideString(true) - }; - return attr; - } + Value = ReadWideString(true) + }; + return attr; + } + + case NodeAttribute.DataType.DT_TranslatedString: + { + var attr = new NodeAttribute(type); + var str = new TranslatedString(); - case NodeAttribute.DataType.DT_TranslatedString: + if (IsBG3) { - var attr = new NodeAttribute(type); - var str = new TranslatedString(); + str.Version = reader.ReadUInt16(); - if (IsBG3) + // Sometimes BG3 string keys still contain the value? + // Weird heuristic to find these cases + var test = reader.ReadUInt16(); + if (test == 0) { - str.Version = reader.ReadUInt16(); - - // Sometimes BG3 string keys still contain the value? - // Weird heuristic to find these cases - var test = reader.ReadUInt16(); - if (test == 0) - { - stream.Seek(-4, SeekOrigin.Current); - str.Version = 0; - str.Value = ReadString(true); - } - else - { - stream.Seek(-2, SeekOrigin.Current); - str.Value = null; - } + stream.Seek(-4, SeekOrigin.Current); + str.Version = 0; + str.Value = ReadString(true); } else { - str.Version = 0; - str.Value = ReadString(true); + stream.Seek(-2, SeekOrigin.Current); + str.Value = null; } - - str.Handle = ReadString(true); - attr.Value = str; - return attr; } - - case NodeAttribute.DataType.DT_ScratchBuffer: + else { - var attr = new NodeAttribute(type); - var bufferLength = reader.ReadInt32(); - attr.Value = reader.ReadBytes(bufferLength); - return attr; + str.Version = 0; + str.Value = ReadString(true); } - // DT_TranslatedFSString not supported in LSB - default: - return BinUtils.ReadAttribute(type, reader); - } + str.Handle = ReadString(true); + attr.Value = str; + return attr; + } + + case NodeAttribute.DataType.DT_ScratchBuffer: + { + var attr = new NodeAttribute(type); + var bufferLength = reader.ReadInt32(); + attr.Value = reader.ReadBytes(bufferLength); + return attr; + } + + // DT_TranslatedFSString not supported in LSB + default: + return BinUtils.ReadAttribute(type, reader); } + } - private void ReadStaticStrings() + private void ReadStaticStrings() + { + UInt32 strings = reader.ReadUInt32(); + for (UInt32 i = 0; i < strings; i++) { - UInt32 strings = reader.ReadUInt32(); - for (UInt32 i = 0; i < strings; i++) - { - string s = ReadString(false); - UInt32 index = reader.ReadUInt32(); - if (staticStrings.ContainsKey(index)) - throw new InvalidFormatException(String.Format("String ID {0} duplicated in static string map", index)); - staticStrings.Add(index, s); - } + string s = ReadString(false); + UInt32 index = reader.ReadUInt32(); + if (staticStrings.ContainsKey(index)) + throw new InvalidFormatException(String.Format("String ID {0} duplicated in static string map", index)); + staticStrings.Add(index, s); } + } - private string ReadString(bool nullTerminated) + private string ReadString(bool nullTerminated) + { + int length = reader.ReadInt32() - (nullTerminated ? 1 : 0); + byte[] bytes = reader.ReadBytes(length); + + // Remove stray null bytes at the end of the string + // Some LSB files seem to save translated string keys incurrectly, and append two NULL bytes + // (or one null byte and another stray byte) to the end of the value. + bool hasBogusNullBytes = false; + while (length > 0 && bytes[length - 1] == 0) { - int length = reader.ReadInt32() - (nullTerminated ? 1 : 0); - byte[] bytes = reader.ReadBytes(length); - - // Remove stray null bytes at the end of the string - // Some LSB files seem to save translated string keys incurrectly, and append two NULL bytes - // (or one null byte and another stray byte) to the end of the value. - bool hasBogusNullBytes = false; - while (length > 0 && bytes[length - 1] == 0) - { - length--; - hasBogusNullBytes = true; - } - - string str = System.Text.Encoding.UTF8.GetString(bytes, 0, length); + length--; + hasBogusNullBytes = true; + } - if (nullTerminated) - { - if (reader.ReadByte() != 0 && !hasBogusNullBytes) - throw new InvalidFormatException("Illegal null terminated string"); - } + string str = System.Text.Encoding.UTF8.GetString(bytes, 0, length); - return str; + if (nullTerminated) + { + if (reader.ReadByte() != 0 && !hasBogusNullBytes) + throw new InvalidFormatException("Illegal null terminated string"); } - private string ReadWideString(bool nullTerminated) - { - int length = reader.ReadInt32() - (nullTerminated ? 1 : 0); - byte[] bytes = reader.ReadBytes(length * 2); - string str = System.Text.Encoding.Unicode.GetString(bytes); - if (nullTerminated) - { - if (reader.ReadUInt16() != 0) - throw new InvalidFormatException("Illegal null terminated widestring"); - } + return str; + } - return str; + private string ReadWideString(bool nullTerminated) + { + int length = reader.ReadInt32() - (nullTerminated ? 1 : 0); + byte[] bytes = reader.ReadBytes(length * 2); + string str = System.Text.Encoding.Unicode.GetString(bytes); + if (nullTerminated) + { + if (reader.ReadUInt16() != 0) + throw new InvalidFormatException("Illegal null terminated widestring"); } + + return str; } } diff --git a/LSLib/LS/Resources/LSB/LSBWriter.cs b/LSLib/LS/Resources/LSB/LSBWriter.cs index c45e7903..84254992 100644 --- a/LSLib/LS/Resources/LSB/LSBWriter.cs +++ b/LSLib/LS/Resources/LSB/LSBWriter.cs @@ -2,207 +2,206 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSBWriter(Stream stream) { - public class LSBWriter(Stream stream) - { - private BinaryWriter writer; - private Dictionary staticStrings = []; - private UInt32 nextStaticStringId = 0; - private UInt32 Version; + private BinaryWriter writer; + private Dictionary staticStrings = []; + private UInt32 nextStaticStringId = 0; + private UInt32 Version; - public void Write(Resource rsrc) + public void Write(Resource rsrc) + { + Version = rsrc.Metadata.MajorVersion; + using (this.writer = new BinaryWriter(stream)) { - Version = rsrc.Metadata.MajorVersion; - using (this.writer = new BinaryWriter(stream)) + var header = new LSBHeader { - var header = new LSBHeader - { - TotalSize = 0, // Total size of file, will be updater after we finished serializing - BigEndian = 0, // Little-endian format - Unknown = 0, // Unknown - Metadata = rsrc.Metadata - }; + TotalSize = 0, // Total size of file, will be updater after we finished serializing + BigEndian = 0, // Little-endian format + Unknown = 0, // Unknown + Metadata = rsrc.Metadata + }; - if (rsrc.Metadata.MajorVersion >= 4) - { - header.Signature = BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0); - } - else - { - header.Signature = LSBHeader.SignatureFW3; - } + if (rsrc.Metadata.MajorVersion >= 4) + { + header.Signature = BitConverter.ToUInt32(LSBHeader.SignatureBG3, 0); + } + else + { + header.Signature = LSBHeader.SignatureFW3; + } - BinUtils.WriteStruct(writer, ref header); + BinUtils.WriteStruct(writer, ref header); - CollectStaticStrings(rsrc); - WriteStaticStrings(); + CollectStaticStrings(rsrc); + WriteStaticStrings(); - WriteRegions(rsrc); + WriteRegions(rsrc); - header.TotalSize = (UInt32)stream.Position; - stream.Seek(0, SeekOrigin.Begin); - BinUtils.WriteStruct(writer, ref header); - } + header.TotalSize = (UInt32)stream.Position; + stream.Seek(0, SeekOrigin.Begin); + BinUtils.WriteStruct(writer, ref header); } + } - private void WriteRegions(Resource rsrc) + private void WriteRegions(Resource rsrc) + { + writer.Write((UInt32)rsrc.Regions.Count); + var regionMapOffset = stream.Position; + foreach (var rgn in rsrc.Regions) { - writer.Write((UInt32)rsrc.Regions.Count); - var regionMapOffset = stream.Position; - foreach (var rgn in rsrc.Regions) - { - writer.Write(staticStrings[rgn.Key]); - writer.Write((UInt32)0); // Offset of region, will be updater after we finished serializing - } - - List regionPositions = []; - foreach (var rgn in rsrc.Regions) - { - regionPositions.Add((UInt32)stream.Position); - WriteNode(rgn.Value); - } - - var endOffset = stream.Position; - stream.Seek(regionMapOffset, SeekOrigin.Begin); - foreach (var position in regionPositions) - { - stream.Seek(4, SeekOrigin.Current); - writer.Write(position); - } + writer.Write(staticStrings[rgn.Key]); + writer.Write((UInt32)0); // Offset of region, will be updater after we finished serializing + } - stream.Seek(endOffset, SeekOrigin.Begin); + List regionPositions = []; + foreach (var rgn in rsrc.Regions) + { + regionPositions.Add((UInt32)stream.Position); + WriteNode(rgn.Value); } - private void WriteNode(Node node) + var endOffset = stream.Position; + stream.Seek(regionMapOffset, SeekOrigin.Begin); + foreach (var position in regionPositions) { - writer.Write(staticStrings[node.Name]); - writer.Write((UInt32)node.Attributes.Count); - writer.Write((UInt32)node.ChildCount); + stream.Seek(4, SeekOrigin.Current); + writer.Write(position); + } - foreach (var attribute in node.Attributes) - { - writer.Write(staticStrings[attribute.Key]); - writer.Write((UInt32)attribute.Value.Type); - WriteAttribute(attribute.Value); - } + stream.Seek(endOffset, SeekOrigin.Begin); + } - foreach (var children in node.Children) - { - foreach (var child in children.Value) - WriteNode(child); - } - } + private void WriteNode(Node node) + { + writer.Write(staticStrings[node.Name]); + writer.Write((UInt32)node.Attributes.Count); + writer.Write((UInt32)node.ChildCount); - private void WriteAttribute(NodeAttribute attr) + foreach (var attribute in node.Attributes) { - switch (attr.Type) - { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - WriteString((string)attr.Value, true); - break; + writer.Write(staticStrings[attribute.Key]); + writer.Write((UInt32)attribute.Value.Type); + WriteAttribute(attribute.Value); + } - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: - WriteWideString((string)attr.Value, true); - break; + foreach (var children in node.Children) + { + foreach (var child in children.Value) + WriteNode(child); + } + } - case NodeAttribute.DataType.DT_TranslatedString: + private void WriteAttribute(NodeAttribute attr) + { + switch (attr.Type) + { + case NodeAttribute.DataType.DT_String: + case NodeAttribute.DataType.DT_Path: + case NodeAttribute.DataType.DT_FixedString: + case NodeAttribute.DataType.DT_LSString: + WriteString((string)attr.Value, true); + break; + + case NodeAttribute.DataType.DT_WString: + case NodeAttribute.DataType.DT_LSWString: + WriteWideString((string)attr.Value, true); + break; + + case NodeAttribute.DataType.DT_TranslatedString: + { + var str = (TranslatedString)attr.Value; + if (Version >= 4 && str.Value == null) { - var str = (TranslatedString)attr.Value; - if (Version >= 4 && str.Value == null) - { - writer.Write(str.Version); - } - else - { - WriteString(str.Value ?? "", true); - } - - WriteString(str.Handle, true); - break; + writer.Write(str.Version); } - - case NodeAttribute.DataType.DT_ScratchBuffer: + else { - var buffer = (byte[])attr.Value; - writer.Write((UInt32)buffer.Length); - writer.Write(buffer); - break; + WriteString(str.Value ?? "", true); } - // DT_TranslatedFSString not supported in LSB - default: - BinUtils.WriteAttribute(writer, attr); + WriteString(str.Handle, true); break; - } - } + } - private void CollectStaticStrings(Resource rsrc) - { - staticStrings.Clear(); - foreach (var rgn in rsrc.Regions) - { - AddStaticString(rgn.Key); - CollectStaticStrings(rgn.Value); - } + case NodeAttribute.DataType.DT_ScratchBuffer: + { + var buffer = (byte[])attr.Value; + writer.Write((UInt32)buffer.Length); + writer.Write(buffer); + break; + } + + // DT_TranslatedFSString not supported in LSB + default: + BinUtils.WriteAttribute(writer, attr); + break; } + } - private void CollectStaticStrings(Node node) + private void CollectStaticStrings(Resource rsrc) + { + staticStrings.Clear(); + foreach (var rgn in rsrc.Regions) { - AddStaticString(node.Name); - - foreach (var attr in node.Attributes) - { - AddStaticString(attr.Key); - } - - foreach (var children in node.Children) - { - foreach (var child in children.Value) - CollectStaticStrings(child); - } + AddStaticString(rgn.Key); + CollectStaticStrings(rgn.Value); } + } + + private void CollectStaticStrings(Node node) + { + AddStaticString(node.Name); - private void AddStaticString(string s) + foreach (var attr in node.Attributes) { - if (!staticStrings.ContainsKey(s)) - { - staticStrings.Add(s, nextStaticStringId++); - } + AddStaticString(attr.Key); } - private void WriteStaticStrings() + foreach (var children in node.Children) { - writer.Write((UInt32)staticStrings.Count); - foreach (var s in staticStrings) - { - WriteString(s.Key, false); - writer.Write(s.Value); - } + foreach (var child in children.Value) + CollectStaticStrings(child); } + } - private void WriteString(string s, bool nullTerminated) + private void AddStaticString(string s) + { + if (!staticStrings.ContainsKey(s)) { - byte[] utf = System.Text.Encoding.UTF8.GetBytes(s); - int length = utf.Length + (nullTerminated ? 1 : 0); - writer.Write(length); - writer.Write(utf); - if (nullTerminated) - writer.Write((Byte)0); + staticStrings.Add(s, nextStaticStringId++); } + } - private void WriteWideString(string s, bool nullTerminated) + private void WriteStaticStrings() + { + writer.Write((UInt32)staticStrings.Count); + foreach (var s in staticStrings) { - byte[] unicode = System.Text.Encoding.Unicode.GetBytes(s); - int length = (unicode.Length / 2) + (nullTerminated ? 1 : 0); - writer.Write(length); - writer.Write(unicode); - if (nullTerminated) - writer.Write((UInt16)0); + WriteString(s.Key, false); + writer.Write(s.Value); } } + + private void WriteString(string s, bool nullTerminated) + { + byte[] utf = System.Text.Encoding.UTF8.GetBytes(s); + int length = utf.Length + (nullTerminated ? 1 : 0); + writer.Write(length); + writer.Write(utf); + if (nullTerminated) + writer.Write((Byte)0); + } + + private void WriteWideString(string s, bool nullTerminated) + { + byte[] unicode = System.Text.Encoding.Unicode.GetBytes(s); + int length = (unicode.Length / 2) + (nullTerminated ? 1 : 0); + writer.Write(length); + writer.Write(unicode); + if (nullTerminated) + writer.Write((UInt16)0); + } } diff --git a/LSLib/LS/Resources/LSF/LSFCommon.cs b/LSLib/LS/Resources/LSF/LSFCommon.cs index 70141db9..026827e7 100644 --- a/LSLib/LS/Resources/LSF/LSFCommon.cs +++ b/LSLib/LS/Resources/LSF/LSFCommon.cs @@ -1,401 +1,400 @@ using System; using System.Runtime.InteropServices; -namespace LSLib.LS +namespace LSLib.LS; + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFMagic { - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFMagic - { - /// - /// LSOF file signature - /// - public readonly static byte[] Signature = "LSOF"u8.ToArray(); + /// + /// LSOF file signature + /// + public readonly static byte[] Signature = "LSOF"u8.ToArray(); - /// - /// LSOF file signature; should be the same as LSFHeader.Signature - /// - public UInt32 Magic; + /// + /// LSOF file signature; should be the same as LSFHeader.Signature + /// + public UInt32 Magic; - /// - /// Version of the LSOF file; D:OS EE is version 1/2, D:OS 2 is version 3 - /// - public UInt32 Version; - }; + /// + /// Version of the LSOF file; D:OS EE is version 1/2, D:OS 2 is version 3 + /// + public UInt32 Version; +}; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFHeader - { - /// - /// Possibly version number? (major, minor, rev, build) - /// - public Int32 EngineVersion; - }; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFHeader +{ + /// + /// Possibly version number? (major, minor, rev, build) + /// + public Int32 EngineVersion; +}; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFHeaderV5 - { - /// - /// Possibly version number? (major, minor, rev, build) - /// - public Int64 EngineVersion; - }; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFHeaderV5 +{ + /// + /// Possibly version number? (major, minor, rev, build) + /// + public Int64 EngineVersion; +}; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFMetadataV5 - { - /// - /// Total uncompressed size of the string hash table - /// - public UInt32 StringsUncompressedSize; - /// - /// Compressed size of the string hash table - /// - public UInt32 StringsSizeOnDisk; - /// - /// Total uncompressed size of the node list - /// - public UInt32 NodesUncompressedSize; - /// - /// Compressed size of the node list - /// - public UInt32 NodesSizeOnDisk; - /// - /// Total uncompressed size of the attribute list - /// - public UInt32 AttributesUncompressedSize; - /// - /// Compressed size of the attribute list - /// - public UInt32 AttributesSizeOnDisk; - /// - /// Total uncompressed size of the raw value buffer - /// - public UInt32 ValuesUncompressedSize; - /// - /// Compressed size of the raw value buffer - /// - public UInt32 ValuesSizeOnDisk; - /// - /// Compression method and level used for the string, node, attribute and value buffers. - /// Uses the same format as packages (see BinUtils.MakeCompressionFlags) - /// - public Byte CompressionFlags; - /// - /// Possibly unused, always 0 - /// - public Byte Unknown2; - public UInt16 Unknown3; - /// - /// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 - /// - public UInt32 HasSiblingData; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFMetadataV5 +{ + /// + /// Total uncompressed size of the string hash table + /// + public UInt32 StringsUncompressedSize; + /// + /// Compressed size of the string hash table + /// + public UInt32 StringsSizeOnDisk; + /// + /// Total uncompressed size of the node list + /// + public UInt32 NodesUncompressedSize; + /// + /// Compressed size of the node list + /// + public UInt32 NodesSizeOnDisk; + /// + /// Total uncompressed size of the attribute list + /// + public UInt32 AttributesUncompressedSize; + /// + /// Compressed size of the attribute list + /// + public UInt32 AttributesSizeOnDisk; + /// + /// Total uncompressed size of the raw value buffer + /// + public UInt32 ValuesUncompressedSize; + /// + /// Compressed size of the raw value buffer + /// + public UInt32 ValuesSizeOnDisk; + /// + /// Compression method and level used for the string, node, attribute and value buffers. + /// Uses the same format as packages (see BinUtils.MakeCompressionFlags) + /// + public Byte CompressionFlags; + /// + /// Possibly unused, always 0 + /// + public Byte Unknown2; + public UInt16 Unknown3; + /// + /// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 + /// + public UInt32 HasSiblingData; +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFMetadataV6 +{ + /// + /// Total uncompressed size of the string hash table + /// + public UInt32 StringsUncompressedSize; + /// + /// Compressed size of the string hash table + /// + public UInt32 StringsSizeOnDisk; + public UInt64 Unknown; + /// + /// Total uncompressed size of the node list + /// + public UInt32 NodesUncompressedSize; + /// + /// Compressed size of the node list + /// + public UInt32 NodesSizeOnDisk; + /// + /// Total uncompressed size of the attribute list + /// + public UInt32 AttributesUncompressedSize; + /// + /// Compressed size of the attribute list + /// + public UInt32 AttributesSizeOnDisk; + /// + /// Total uncompressed size of the raw value buffer + /// + public UInt32 ValuesUncompressedSize; + /// + /// Compressed size of the raw value buffer + /// + public UInt32 ValuesSizeOnDisk; + /// + /// Compression method and level used for the string, node, attribute and value buffers. + /// Uses the same format as packages (see BinUtils.MakeCompressionFlags) + /// + public Byte CompressionFlags; + /// + /// Possibly unused, always 0 + /// + public Byte Unknown2; + public UInt16 Unknown3; + /// + /// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 + /// + public UInt32 HasSiblingData; +} + +/// +/// Node (structure) entry in the LSF file +/// +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFNodeEntryV2 +{ + /// + /// Name of this node + /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) + /// + public UInt32 NameHashTableIndex; + /// + /// Index of the first attribute of this node + /// (-1: node has no attributes) + /// + public Int32 FirstAttributeIndex; + /// + /// Index of the parent node + /// (-1: this node is a root region) + /// + public Int32 ParentIndex; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFMetadataV6 + /// + /// Index into name hash table + /// + public int NameIndex { - /// - /// Total uncompressed size of the string hash table - /// - public UInt32 StringsUncompressedSize; - /// - /// Compressed size of the string hash table - /// - public UInt32 StringsSizeOnDisk; - public UInt64 Unknown; - /// - /// Total uncompressed size of the node list - /// - public UInt32 NodesUncompressedSize; - /// - /// Compressed size of the node list - /// - public UInt32 NodesSizeOnDisk; - /// - /// Total uncompressed size of the attribute list - /// - public UInt32 AttributesUncompressedSize; - /// - /// Compressed size of the attribute list - /// - public UInt32 AttributesSizeOnDisk; - /// - /// Total uncompressed size of the raw value buffer - /// - public UInt32 ValuesUncompressedSize; - /// - /// Compressed size of the raw value buffer - /// - public UInt32 ValuesSizeOnDisk; - /// - /// Compression method and level used for the string, node, attribute and value buffers. - /// Uses the same format as packages (see BinUtils.MakeCompressionFlags) - /// - public Byte CompressionFlags; - /// - /// Possibly unused, always 0 - /// - public Byte Unknown2; - public UInt16 Unknown3; - /// - /// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 - /// - public UInt32 HasSiblingData; + get { return (int)(NameHashTableIndex >> 16); } } /// - /// Node (structure) entry in the LSF file + /// Offset in hash chain /// - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFNodeEntryV2 + public int NameOffset { - /// - /// Name of this node - /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) - /// - public UInt32 NameHashTableIndex; - /// - /// Index of the first attribute of this node - /// (-1: node has no attributes) - /// - public Int32 FirstAttributeIndex; - /// - /// Index of the parent node - /// (-1: this node is a root region) - /// - public Int32 ParentIndex; - - /// - /// Index into name hash table - /// - public int NameIndex - { - get { return (int)(NameHashTableIndex >> 16); } - } + get { return (int)(NameHashTableIndex & 0xffff); } + } +}; - /// - /// Offset in hash chain - /// - public int NameOffset - { - get { return (int)(NameHashTableIndex & 0xffff); } - } - }; +/// +/// Node (structure) entry in the LSF file +/// +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFNodeEntryV3 +{ + /// + /// Name of this node + /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) + /// + public UInt32 NameHashTableIndex; + /// + /// Index of the parent node + /// (-1: this node is a root region) + /// + public Int32 ParentIndex; + /// + /// Index of the next sibling of this node + /// (-1: this is the last node) + /// + public Int32 NextSiblingIndex; + /// + /// Index of the first attribute of this node + /// (-1: node has no attributes) + /// + public Int32 FirstAttributeIndex; /// - /// Node (structure) entry in the LSF file + /// Index into name hash table /// - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFNodeEntryV3 + public int NameIndex { - /// - /// Name of this node - /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) - /// - public UInt32 NameHashTableIndex; - /// - /// Index of the parent node - /// (-1: this node is a root region) - /// - public Int32 ParentIndex; - /// - /// Index of the next sibling of this node - /// (-1: this is the last node) - /// - public Int32 NextSiblingIndex; - /// - /// Index of the first attribute of this node - /// (-1: node has no attributes) - /// - public Int32 FirstAttributeIndex; - - /// - /// Index into name hash table - /// - public int NameIndex - { - get { return (int)(NameHashTableIndex >> 16); } - } - - /// - /// Offset in hash chain - /// - public int NameOffset - { - get { return (int)(NameHashTableIndex & 0xffff); } - } - }; + get { return (int)(NameHashTableIndex >> 16); } + } /// - /// Processed node information for a node in the LSF file + /// Offset in hash chain /// - internal class LSFNodeInfo + public int NameOffset { - /// - /// Index of the parent node - /// (-1: this node is a root region) - /// - public int ParentIndex; - /// - /// Index into name hash table - /// - public int NameIndex; - /// - /// Offset in hash chain - /// - public int NameOffset; - /// - /// Index of the first attribute of this node - /// (-1: node has no attributes) - /// - public int FirstAttributeIndex; - }; + get { return (int)(NameHashTableIndex & 0xffff); } + } +}; +/// +/// Processed node information for a node in the LSF file +/// +internal class LSFNodeInfo +{ /// - /// V2 attribute extension in the LSF file + /// Index of the parent node + /// (-1: this node is a root region) /// - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFAttributeEntryV2 - { - /// - /// Name of this attribute - /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) - /// - public UInt32 NameHashTableIndex; + public int ParentIndex; + /// + /// Index into name hash table + /// + public int NameIndex; + /// + /// Offset in hash chain + /// + public int NameOffset; + /// + /// Index of the first attribute of this node + /// (-1: node has no attributes) + /// + public int FirstAttributeIndex; +}; - /// - /// 6-bit LSB: Type of this attribute (see NodeAttribute.DataType) - /// 26-bit MSB: Length of this attribute - /// - public UInt32 TypeAndLength; +/// +/// V2 attribute extension in the LSF file +/// +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFAttributeEntryV2 +{ + /// + /// Name of this attribute + /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) + /// + public UInt32 NameHashTableIndex; - /// - /// Index of the node that this attribute belongs to - /// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list - /// - public Int32 NodeIndex; + /// + /// 6-bit LSB: Type of this attribute (see NodeAttribute.DataType) + /// 26-bit MSB: Length of this attribute + /// + public UInt32 TypeAndLength; - /// - /// Index into name hash table - /// - public int NameIndex - { - get { return (int)(NameHashTableIndex >> 16); } - } + /// + /// Index of the node that this attribute belongs to + /// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list + /// + public Int32 NodeIndex; - /// - /// Offset in hash chain - /// - public int NameOffset - { - get { return (int)(NameHashTableIndex & 0xffff); } - } + /// + /// Index into name hash table + /// + public int NameIndex + { + get { return (int)(NameHashTableIndex >> 16); } + } - /// - /// Type of this attribute (see NodeAttribute.DataType) - /// - public uint TypeId - { - get { return TypeAndLength & 0x3f; } - } + /// + /// Offset in hash chain + /// + public int NameOffset + { + get { return (int)(NameHashTableIndex & 0xffff); } + } - /// - /// Length of this attribute - /// - public uint Length - { - get { return TypeAndLength >> 6; } - } - }; + /// + /// Type of this attribute (see NodeAttribute.DataType) + /// + public uint TypeId + { + get { return TypeAndLength & 0x3f; } + } /// - /// V3 attribute extension in the LSF file + /// Length of this attribute /// - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct LSFAttributeEntryV3 + public uint Length { - /// - /// Name of this attribute - /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) - /// - public UInt32 NameHashTableIndex; + get { return TypeAndLength >> 6; } + } +}; - /// - /// 6-bit LSB: Type of this attribute (see NodeAttribute.DataType) - /// 26-bit MSB: Length of this attribute - /// - public UInt32 TypeAndLength; +/// +/// V3 attribute extension in the LSF file +/// +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFAttributeEntryV3 +{ + /// + /// Name of this attribute + /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) + /// + public UInt32 NameHashTableIndex; - /// - /// Index of the node that this attribute belongs to - /// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list - /// - public Int32 NextAttributeIndex; + /// + /// 6-bit LSB: Type of this attribute (see NodeAttribute.DataType) + /// 26-bit MSB: Length of this attribute + /// + public UInt32 TypeAndLength; - /// - /// Absolute position of attribute value in the value stream - /// - public UInt32 Offset; + /// + /// Index of the node that this attribute belongs to + /// Note: These indexes are assigned seemingly arbitrarily, and are not neccessarily indices into the node list + /// + public Int32 NextAttributeIndex; - /// - /// Index into name hash table - /// - public int NameIndex - { - get { return (int)(NameHashTableIndex >> 16); } - } + /// + /// Absolute position of attribute value in the value stream + /// + public UInt32 Offset; - /// - /// Offset in hash chain - /// - public int NameOffset - { - get { return (int)(NameHashTableIndex & 0xffff); } - } + /// + /// Index into name hash table + /// + public int NameIndex + { + get { return (int)(NameHashTableIndex >> 16); } + } - /// - /// Type of this attribute (see NodeAttribute.DataType) - /// - public uint TypeId - { - get { return TypeAndLength & 0x3f; } - } + /// + /// Offset in hash chain + /// + public int NameOffset + { + get { return (int)(NameHashTableIndex & 0xffff); } + } - /// - /// Length of this attribute - /// - public uint Length - { - get { return TypeAndLength >> 6; } - } - }; + /// + /// Type of this attribute (see NodeAttribute.DataType) + /// + public uint TypeId + { + get { return TypeAndLength & 0x3f; } + } - internal class LSFAttributeInfo + /// + /// Length of this attribute + /// + public uint Length { - /// - /// Index into name hash table - /// - public int NameIndex; - /// - /// Offset in hash chain - /// - public int NameOffset; - /// - /// Type of this attribute (see NodeAttribute.DataType) - /// - public uint TypeId; - /// - /// Length of this attribute - /// - public uint Length; - /// - /// Absolute position of attribute data in the values section - /// - public uint DataOffset; - /// - /// Index of the next attribute in this node - /// (-1: this is the last attribute) - /// - public int NextAttributeIndex; - }; -} + get { return TypeAndLength >> 6; } + } +}; + +internal class LSFAttributeInfo +{ + /// + /// Index into name hash table + /// + public int NameIndex; + /// + /// Offset in hash chain + /// + public int NameOffset; + /// + /// Type of this attribute (see NodeAttribute.DataType) + /// + public uint TypeId; + /// + /// Length of this attribute + /// + public uint Length; + /// + /// Absolute position of attribute data in the values section + /// + public uint DataOffset; + /// + /// Index of the next attribute in this node + /// (-1: this is the last attribute) + /// + public int NextAttributeIndex; +}; diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index 20c53190..24466d70 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -7,609 +7,608 @@ using System.Text; using LSLib.LS.Enums; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSFReader(Stream stream) : IDisposable { - public class LSFReader(Stream stream) : IDisposable + /// + /// Input stream + /// + private readonly Stream Stream = stream; + + /// + /// Static string hash map + /// + private List> Names; + /// + /// Preprocessed list of nodes (structures) + /// + private List Nodes; + /// + /// Preprocessed list of node attributes + /// + private List Attributes; + /// + /// Node instances + /// + private List NodeInstances; + /// + /// Raw value data stream + /// + private Stream Values; + /// + /// Version of the file we're serializing + /// + private LSFVersion Version; + /// + /// Game version that generated the LSF file + /// + private PackedVersion GameVersion; + private LSFMetadataV6 Metadata; + + public void Dispose() { - /// - /// Input stream - /// - private readonly Stream Stream = stream; - - /// - /// Static string hash map - /// - private List> Names; - /// - /// Preprocessed list of nodes (structures) - /// - private List Nodes; - /// - /// Preprocessed list of node attributes - /// - private List Attributes; - /// - /// Node instances - /// - private List NodeInstances; - /// - /// Raw value data stream - /// - private Stream Values; - /// - /// Version of the file we're serializing - /// - private LSFVersion Version; - /// - /// Game version that generated the LSF file - /// - private PackedVersion GameVersion; - private LSFMetadataV6 Metadata; - - public void Dispose() - { - Stream.Dispose(); - } + Stream.Dispose(); + } - /// - /// Reads the static string hash table from the specified stream. - /// - /// Stream to read the hash table from - private void ReadNames(Stream s) - { + /// + /// Reads the static string hash table from the specified stream. + /// + /// Stream to read the hash table from + private void ReadNames(Stream s) + { #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF NAME TABLE -----"); + Console.WriteLine(" ----- DUMP OF NAME TABLE -----"); #endif - // Format: - // 32-bit hash entry count (N) - // N x 16-bit chain length (L) - // L x 16-bit string length (S) - // [S bytes of UTF-8 string data] + // Format: + // 32-bit hash entry count (N) + // N x 16-bit chain length (L) + // L x 16-bit string length (S) + // [S bytes of UTF-8 string data] - using var reader = new BinaryReader(s); - var numHashEntries = reader.ReadUInt32(); - while (numHashEntries-- > 0) - { - var hash = new List(); - Names.Add(hash); + using var reader = new BinaryReader(s); + var numHashEntries = reader.ReadUInt32(); + while (numHashEntries-- > 0) + { + var hash = new List(); + Names.Add(hash); - var numStrings = reader.ReadUInt16(); - while (numStrings-- > 0) - { - var nameLen = reader.ReadUInt16(); - byte[] bytes = reader.ReadBytes(nameLen); - var name = System.Text.Encoding.UTF8.GetString(bytes); - hash.Add(name); + var numStrings = reader.ReadUInt16(); + while (numStrings-- > 0) + { + var nameLen = reader.ReadUInt16(); + byte[] bytes = reader.ReadBytes(nameLen); + var name = System.Text.Encoding.UTF8.GetString(bytes); + hash.Add(name); #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format("{0,3:X}/{1}: {2}", Names.Count - 1, hash.Count - 1, name)); + Console.WriteLine(String.Format("{0,3:X}/{1}: {2}", Names.Count - 1, hash.Count - 1, name)); #endif - } } } + } - /// - /// Reads the structure headers for the LSOF resource - /// - /// Stream to read the node headers from - /// Use the long (V3) on-disk node format - private void ReadNodes(Stream s, bool longNodes) - { + /// + /// Reads the structure headers for the LSOF resource + /// + /// Stream to read the node headers from + /// Use the long (V3) on-disk node format + private void ReadNodes(Stream s, bool longNodes) + { #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF NODE TABLE -----"); + Console.WriteLine(" ----- DUMP OF NODE TABLE -----"); #endif - using var reader = new BinaryReader(s); - Int32 index = 0; - while (s.Position < s.Length) - { - var resolved = new LSFNodeInfo(); + using var reader = new BinaryReader(s); + Int32 index = 0; + while (s.Position < s.Length) + { + var resolved = new LSFNodeInfo(); #if DEBUG_LSF_SERIALIZATION - var pos = s.Position; + var pos = s.Position; #endif - if (longNodes) - { - var item = BinUtils.ReadStruct(reader); - resolved.ParentIndex = item.ParentIndex; - resolved.NameIndex = item.NameIndex; - resolved.NameOffset = item.NameOffset; - resolved.FirstAttributeIndex = item.FirstAttributeIndex; - } - else - { - var item = BinUtils.ReadStruct(reader); - resolved.ParentIndex = item.ParentIndex; - resolved.NameIndex = item.NameIndex; - resolved.NameOffset = item.NameOffset; - resolved.FirstAttributeIndex = item.FirstAttributeIndex; - } + if (longNodes) + { + var item = BinUtils.ReadStruct(reader); + resolved.ParentIndex = item.ParentIndex; + resolved.NameIndex = item.NameIndex; + resolved.NameOffset = item.NameOffset; + resolved.FirstAttributeIndex = item.FirstAttributeIndex; + } + else + { + var item = BinUtils.ReadStruct(reader); + resolved.ParentIndex = item.ParentIndex; + resolved.NameIndex = item.NameIndex; + resolved.NameOffset = item.NameOffset; + resolved.FirstAttributeIndex = item.FirstAttributeIndex; + } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format( - "{0}: {1} @ {2:X} (parent {3}, firstAttribute {4})", - index, Names[resolved.NameIndex][resolved.NameOffset], pos, resolved.ParentIndex, - resolved.FirstAttributeIndex - )); + Console.WriteLine(String.Format( + "{0}: {1} @ {2:X} (parent {3}, firstAttribute {4})", + index, Names[resolved.NameIndex][resolved.NameOffset], pos, resolved.ParentIndex, + resolved.FirstAttributeIndex + )); #endif - Nodes.Add(resolved); - index++; - } + Nodes.Add(resolved); + index++; } + } - /// - /// Reads the V2 attribute headers for the LSOF resource - /// - /// Stream to read the attribute headers from - private void ReadAttributesV2(Stream s) - { - using var reader = new BinaryReader(s); + /// + /// Reads the V2 attribute headers for the LSOF resource + /// + /// Stream to read the attribute headers from + private void ReadAttributesV2(Stream s) + { + using var reader = new BinaryReader(s); #if DEBUG_LSF_SERIALIZATION - var rawAttributes = new List(); + var rawAttributes = new List(); #endif - var prevAttributeRefs = new List(); - UInt32 dataOffset = 0; - Int32 index = 0; - while (s.Position < s.Length) - { - var attribute = BinUtils.ReadStruct(reader); + var prevAttributeRefs = new List(); + UInt32 dataOffset = 0; + Int32 index = 0; + while (s.Position < s.Length) + { + var attribute = BinUtils.ReadStruct(reader); - var resolved = new LSFAttributeInfo - { - NameIndex = attribute.NameIndex, - NameOffset = attribute.NameOffset, - TypeId = attribute.TypeId, - Length = attribute.Length, - DataOffset = dataOffset, - NextAttributeIndex = -1 - }; - - var nodeIndex = attribute.NodeIndex + 1; - if (prevAttributeRefs.Count > nodeIndex) + var resolved = new LSFAttributeInfo + { + NameIndex = attribute.NameIndex, + NameOffset = attribute.NameOffset, + TypeId = attribute.TypeId, + Length = attribute.Length, + DataOffset = dataOffset, + NextAttributeIndex = -1 + }; + + var nodeIndex = attribute.NodeIndex + 1; + if (prevAttributeRefs.Count > nodeIndex) + { + if (prevAttributeRefs[nodeIndex] != -1) { - if (prevAttributeRefs[nodeIndex] != -1) - { - Attributes[prevAttributeRefs[nodeIndex]].NextAttributeIndex = index; - } - - prevAttributeRefs[nodeIndex] = index; + Attributes[prevAttributeRefs[nodeIndex]].NextAttributeIndex = index; } - else - { - while (prevAttributeRefs.Count < nodeIndex) - { - prevAttributeRefs.Add(-1); - } - prevAttributeRefs.Add(index); + prevAttributeRefs[nodeIndex] = index; + } + else + { + while (prevAttributeRefs.Count < nodeIndex) + { + prevAttributeRefs.Add(-1); } + prevAttributeRefs.Add(index); + } + #if DEBUG_LSF_SERIALIZATION - rawAttributes.Add(attribute); + rawAttributes.Add(attribute); #endif - dataOffset += resolved.Length; - Attributes.Add(resolved); - index++; - } + dataOffset += resolved.Length; + Attributes.Add(resolved); + index++; + } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); - for (int i = 0; i < prevAttributeRefs.Count; i++) - { - Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); - } + Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); + for (int i = 0; i < prevAttributeRefs.Count; i++) + { + Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); + } - Console.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); - for (int i = 0; i < Attributes.Count; i++) - { - var resolved = Attributes[i]; - var attribute = rawAttributes[i]; - - var debug = String.Format( - "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4}, node {5})", - i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, - resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex - ); - Console.WriteLine(debug); - } -#endif + Console.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); + for (int i = 0; i < Attributes.Count; i++) + { + var resolved = Attributes[i]; + var attribute = rawAttributes[i]; + + var debug = String.Format( + "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4}, node {5})", + i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, + resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex + ); + Console.WriteLine(debug); } +#endif + } - /// - /// Reads the V3 attribute headers for the LSOF resource - /// - /// Stream to read the attribute headers from - private void ReadAttributesV3(Stream s) + /// + /// Reads the V3 attribute headers for the LSOF resource + /// + /// Stream to read the attribute headers from + private void ReadAttributesV3(Stream s) + { + using var reader = new BinaryReader(s); + while (s.Position < s.Length) { - using var reader = new BinaryReader(s); - while (s.Position < s.Length) - { - var attribute = BinUtils.ReadStruct(reader); + var attribute = BinUtils.ReadStruct(reader); - var resolved = new LSFAttributeInfo - { - NameIndex = attribute.NameIndex, - NameOffset = attribute.NameOffset, - TypeId = attribute.TypeId, - Length = attribute.Length, - DataOffset = attribute.Offset, - NextAttributeIndex = attribute.NextAttributeIndex - }; - - Attributes.Add(resolved); - } + var resolved = new LSFAttributeInfo + { + NameIndex = attribute.NameIndex, + NameOffset = attribute.NameOffset, + TypeId = attribute.TypeId, + Length = attribute.Length, + DataOffset = attribute.Offset, + NextAttributeIndex = attribute.NextAttributeIndex + }; + + Attributes.Add(resolved); + } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); - for (int i = 0; i < Attributes.Count; i++) - { - var resolved = Attributes[i]; - - var debug = String.Format( - "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4})", - i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, - resolved.TypeId, resolved.NextAttributeIndex - ); - Console.WriteLine(debug); - } -#endif + Console.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); + for (int i = 0; i < Attributes.Count; i++) + { + var resolved = Attributes[i]; + + var debug = String.Format( + "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4})", + i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, + resolved.TypeId, resolved.NextAttributeIndex + ); + Console.WriteLine(debug); } +#endif + } - private MemoryStream Decompress(BinaryReader reader, uint sizeOnDisk, uint uncompressedSize, string debugDumpTo, bool allowChunked) + private MemoryStream Decompress(BinaryReader reader, uint sizeOnDisk, uint uncompressedSize, string debugDumpTo, bool allowChunked) + { + if (sizeOnDisk == 0 && uncompressedSize != 0) // data is not compressed { - if (sizeOnDisk == 0 && uncompressedSize != 0) // data is not compressed - { - var buf = reader.ReadBytes((int)uncompressedSize); + var buf = reader.ReadBytes((int)uncompressedSize); #if DUMP_LSF_SERIALIZATION - using (var nodesFile = new FileStream(debugDumpTo, FileMode.Create, FileAccess.Write)) - { - nodesFile.Write(buf, 0, buf.Length); - } + using (var nodesFile = new FileStream(debugDumpTo, FileMode.Create, FileAccess.Write)) + { + nodesFile.Write(buf, 0, buf.Length); + } #endif - return new MemoryStream(buf); - } + return new MemoryStream(buf); + } - if (sizeOnDisk == 0 && uncompressedSize == 0) // no data - { - return new MemoryStream(); - } - - bool chunked = (Version >= LSFVersion.VerChunkedCompress && allowChunked); - bool isCompressed = BinUtils.CompressionFlagsToMethod(Metadata.CompressionFlags) != CompressionMethod.None; - uint compressedSize = isCompressed ? sizeOnDisk : uncompressedSize; - byte[] compressed = reader.ReadBytes((int)compressedSize); - var uncompressed = BinUtils.Decompress(compressed, (int)uncompressedSize, Metadata.CompressionFlags, chunked); + if (sizeOnDisk == 0 && uncompressedSize == 0) // no data + { + return new MemoryStream(); + } + + bool chunked = (Version >= LSFVersion.VerChunkedCompress && allowChunked); + bool isCompressed = BinUtils.CompressionFlagsToMethod(Metadata.CompressionFlags) != CompressionMethod.None; + uint compressedSize = isCompressed ? sizeOnDisk : uncompressedSize; + byte[] compressed = reader.ReadBytes((int)compressedSize); + var uncompressed = BinUtils.Decompress(compressed, (int)uncompressedSize, Metadata.CompressionFlags, chunked); #if DUMP_LSF_SERIALIZATION - using (var nodesFile = new FileStream(debugDumpTo, FileMode.Create, FileAccess.Write)) - { - nodesFile.Write(uncompressed, 0, uncompressed.Length); - } + using (var nodesFile = new FileStream(debugDumpTo, FileMode.Create, FileAccess.Write)) + { + nodesFile.Write(uncompressed, 0, uncompressed.Length); + } #endif - return new MemoryStream(uncompressed); + return new MemoryStream(uncompressed); + } + + private void ReadHeaders(BinaryReader reader) + { + var magic = BinUtils.ReadStruct(reader); + if (magic.Magic != BitConverter.ToUInt32(LSFMagic.Signature, 0)) + { + var msg = String.Format( + "Invalid LSF signature; expected {0,8:X}, got {1,8:X}", + BitConverter.ToUInt32(LSFMagic.Signature, 0), magic.Magic + ); + throw new InvalidDataException(msg); } - private void ReadHeaders(BinaryReader reader) + if (magic.Version < (ulong)LSFVersion.VerInitial || magic.Version > (ulong)LSFVersion.MaxReadVersion) { - var magic = BinUtils.ReadStruct(reader); - if (magic.Magic != BitConverter.ToUInt32(LSFMagic.Signature, 0)) + var msg = String.Format("LSF version {0} is not supported", magic.Version); + throw new InvalidDataException(msg); + } + + Version = (LSFVersion)magic.Version; + + if (Version >= LSFVersion.VerBG3ExtendedHeader) + { + var hdr = BinUtils.ReadStruct(reader); + GameVersion = PackedVersion.FromInt64(hdr.EngineVersion); + + // Workaround for merged LSF files with missing engine version number + if (GameVersion.Major == 0) { - var msg = String.Format( - "Invalid LSF signature; expected {0,8:X}, got {1,8:X}", - BitConverter.ToUInt32(LSFMagic.Signature, 0), magic.Magic - ); - throw new InvalidDataException(msg); + GameVersion.Major = 4; + GameVersion.Minor = 0; + GameVersion.Revision = 9; + GameVersion.Build = 0; } + } + else + { + var hdr = BinUtils.ReadStruct(reader); + GameVersion = PackedVersion.FromInt32(hdr.EngineVersion); + } - if (magic.Version < (ulong)LSFVersion.VerInitial || magic.Version > (ulong)LSFVersion.MaxReadVersion) + if (Version < LSFVersion.VerBG3AdditionalBlob) + { + var meta = BinUtils.ReadStruct(reader); + Metadata = new LSFMetadataV6 { - var msg = String.Format("LSF version {0} is not supported", magic.Version); - throw new InvalidDataException(msg); - } + StringsUncompressedSize = meta.StringsUncompressedSize, + StringsSizeOnDisk = meta.StringsSizeOnDisk, + NodesUncompressedSize = meta.NodesUncompressedSize, + NodesSizeOnDisk = meta.NodesSizeOnDisk, + AttributesUncompressedSize = meta.AttributesUncompressedSize, + AttributesSizeOnDisk = meta.AttributesSizeOnDisk, + ValuesUncompressedSize = meta.ValuesUncompressedSize, + ValuesSizeOnDisk = meta.ValuesSizeOnDisk, + CompressionFlags = meta.CompressionFlags, + HasSiblingData = meta.HasSiblingData + }; + } + else + { + Metadata = BinUtils.ReadStruct(reader); + } + } - Version = (LSFVersion)magic.Version; + public Resource Read() + { + using var reader = new BinaryReader(Stream); + ReadHeaders(reader); - if (Version >= LSFVersion.VerBG3ExtendedHeader) - { - var hdr = BinUtils.ReadStruct(reader); - GameVersion = PackedVersion.FromInt64(hdr.EngineVersion); + Names = []; + var namesStream = Decompress(reader, Metadata.StringsSizeOnDisk, Metadata.StringsUncompressedSize, "strings.bin", false); + using (namesStream) + { + ReadNames(namesStream); + } - // Workaround for merged LSF files with missing engine version number - if (GameVersion.Major == 0) - { - GameVersion.Major = 4; - GameVersion.Minor = 0; - GameVersion.Revision = 9; - GameVersion.Build = 0; - } - } - else - { - var hdr = BinUtils.ReadStruct(reader); - GameVersion = PackedVersion.FromInt32(hdr.EngineVersion); - } + Nodes = []; + var nodesStream = Decompress(reader, Metadata.NodesSizeOnDisk, Metadata.NodesUncompressedSize, "nodes.bin", true); + using (nodesStream) + { + var longNodes = Version >= LSFVersion.VerExtendedNodes + && Metadata.HasSiblingData == 1; + ReadNodes(nodesStream, longNodes); + } - if (Version < LSFVersion.VerBG3AdditionalBlob) + Attributes = []; + var attributesStream = Decompress(reader, Metadata.AttributesSizeOnDisk, Metadata.AttributesUncompressedSize, "attributes.bin", true); + using (attributesStream) + { + var hasSiblingData = Version >= LSFVersion.VerExtendedNodes + && Metadata.HasSiblingData == 1; + if (hasSiblingData) { - var meta = BinUtils.ReadStruct(reader); - Metadata = new LSFMetadataV6 - { - StringsUncompressedSize = meta.StringsUncompressedSize, - StringsSizeOnDisk = meta.StringsSizeOnDisk, - NodesUncompressedSize = meta.NodesUncompressedSize, - NodesSizeOnDisk = meta.NodesSizeOnDisk, - AttributesUncompressedSize = meta.AttributesUncompressedSize, - AttributesSizeOnDisk = meta.AttributesSizeOnDisk, - ValuesUncompressedSize = meta.ValuesUncompressedSize, - ValuesSizeOnDisk = meta.ValuesSizeOnDisk, - CompressionFlags = meta.CompressionFlags, - HasSiblingData = meta.HasSiblingData - }; + ReadAttributesV3(attributesStream); } else { - Metadata = BinUtils.ReadStruct(reader); + ReadAttributesV2(attributesStream); } } - public Resource Read() - { - using var reader = new BinaryReader(Stream); - ReadHeaders(reader); + this.Values = Decompress(reader, Metadata.ValuesSizeOnDisk, Metadata.ValuesUncompressedSize, "values.bin", true); - Names = []; - var namesStream = Decompress(reader, Metadata.StringsSizeOnDisk, Metadata.StringsUncompressedSize, "strings.bin", false); - using (namesStream) - { - ReadNames(namesStream); - } + Resource resource = new(); + ReadRegions(resource); - Nodes = []; - var nodesStream = Decompress(reader, Metadata.NodesSizeOnDisk, Metadata.NodesUncompressedSize, "nodes.bin", true); - using (nodesStream) + resource.Metadata.MajorVersion = GameVersion.Major; + resource.Metadata.MinorVersion = GameVersion.Minor; + resource.Metadata.Revision = GameVersion.Revision; + resource.Metadata.BuildNumber = GameVersion.Build; + + return resource; + } + + private void ReadRegions(Resource resource) + { + var attrReader = new BinaryReader(Values); + NodeInstances = []; + for (int i = 0; i < Nodes.Count; i++) + { + var defn = Nodes[i]; + if (defn.ParentIndex == -1) { - var longNodes = Version >= LSFVersion.VerExtendedNodes - && Metadata.HasSiblingData == 1; - ReadNodes(nodesStream, longNodes); + var region = new Region(); + ReadNode(defn, region, attrReader); + NodeInstances.Add(region); + region.RegionName = region.Name; + resource.Regions[region.Name] = region; } - - Attributes = []; - var attributesStream = Decompress(reader, Metadata.AttributesSizeOnDisk, Metadata.AttributesUncompressedSize, "attributes.bin", true); - using (attributesStream) + else { - var hasSiblingData = Version >= LSFVersion.VerExtendedNodes - && Metadata.HasSiblingData == 1; - if (hasSiblingData) - { - ReadAttributesV3(attributesStream); - } - else - { - ReadAttributesV2(attributesStream); - } + var node = new Node(); + ReadNode(defn, node, attrReader); + node.Parent = NodeInstances[defn.ParentIndex]; + NodeInstances.Add(node); + NodeInstances[defn.ParentIndex].AppendChild(node); } + } + } - this.Values = Decompress(reader, Metadata.ValuesSizeOnDisk, Metadata.ValuesUncompressedSize, "values.bin", true); - - Resource resource = new(); - ReadRegions(resource); - - resource.Metadata.MajorVersion = GameVersion.Major; - resource.Metadata.MinorVersion = GameVersion.Minor; - resource.Metadata.Revision = GameVersion.Revision; - resource.Metadata.BuildNumber = GameVersion.Build; + private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) + { + node.Name = Names[defn.NameIndex][defn.NameOffset]; - return resource; - } +#if DEBUG_LSF_SERIALIZATION + Console.WriteLine(String.Format("Begin node {0}", node.Name)); +#endif - private void ReadRegions(Resource resource) + if (defn.FirstAttributeIndex != -1) { - var attrReader = new BinaryReader(Values); - NodeInstances = []; - for (int i = 0; i < Nodes.Count; i++) + var attribute = Attributes[defn.FirstAttributeIndex]; + while (true) { - var defn = Nodes[i]; - if (defn.ParentIndex == -1) + Values.Position = attribute.DataOffset; + var value = ReadAttribute((NodeAttribute.DataType)attribute.TypeId, attributeReader, attribute.Length); + node.Attributes[Names[attribute.NameIndex][attribute.NameOffset]] = value; + +#if DEBUG_LSF_SERIALIZATION + Console.WriteLine(String.Format(" {0:X}: {1} ({2})", attribute.DataOffset, Names[attribute.NameIndex][attribute.NameOffset], value)); +#endif + + if (attribute.NextAttributeIndex == -1) { - var region = new Region(); - ReadNode(defn, region, attrReader); - NodeInstances.Add(region); - region.RegionName = region.Name; - resource.Regions[region.Name] = region; + break; } else { - var node = new Node(); - ReadNode(defn, node, attrReader); - node.Parent = NodeInstances[defn.ParentIndex]; - NodeInstances.Add(node); - NodeInstances[defn.ParentIndex].AppendChild(node); + attribute = Attributes[attribute.NextAttributeIndex]; } } } + } - private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) + private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader, uint length) + { + // LSF and LSB serialize the buffer types differently, so specialized + // code is added to the LSB and LSf serializers, and the common code is + // available in BinUtils.ReadAttribute() + switch (type) { - node.Name = Names[defn.NameIndex][defn.NameOffset]; - -#if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format("Begin node {0}", node.Name)); -#endif - - if (defn.FirstAttributeIndex != -1) - { - var attribute = Attributes[defn.FirstAttributeIndex]; - while (true) + case NodeAttribute.DataType.DT_String: + case NodeAttribute.DataType.DT_Path: + case NodeAttribute.DataType.DT_FixedString: + case NodeAttribute.DataType.DT_LSString: + case NodeAttribute.DataType.DT_WString: + case NodeAttribute.DataType.DT_LSWString: { - Values.Position = attribute.DataOffset; - var value = ReadAttribute((NodeAttribute.DataType)attribute.TypeId, attributeReader, attribute.Length); - node.Attributes[Names[attribute.NameIndex][attribute.NameOffset]] = value; + var attr = new NodeAttribute(type) + { + Value = ReadString(reader, (int)length) + }; + return attr; + } -#if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format(" {0:X}: {1} ({2})", attribute.DataOffset, Names[attribute.NameIndex][attribute.NameOffset], value)); -#endif + case NodeAttribute.DataType.DT_TranslatedString: + { + var attr = new NodeAttribute(type); + var str = new TranslatedString(); - if (attribute.NextAttributeIndex == -1) + if (Version >= LSFVersion.VerBG3 || + (GameVersion.Major > 4 || + (GameVersion.Major == 4 && GameVersion.Revision > 0) || + (GameVersion.Major == 4 && GameVersion.Revision == 0 && GameVersion.Build >= 0x1a))) { - break; + str.Version = reader.ReadUInt16(); } else { - attribute = Attributes[attribute.NextAttributeIndex]; + str.Version = 0; + var valueLength = reader.ReadInt32(); + str.Value = ReadString(reader, valueLength); } - } - } - } - private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader, uint length) - { - // LSF and LSB serialize the buffer types differently, so specialized - // code is added to the LSB and LSf serializers, and the common code is - // available in BinUtils.ReadAttribute() - switch (type) - { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: - { - var attr = new NodeAttribute(type) - { - Value = ReadString(reader, (int)length) - }; - return attr; - } + var handleLength = reader.ReadInt32(); + str.Handle = ReadString(reader, handleLength); - case NodeAttribute.DataType.DT_TranslatedString: - { - var attr = new NodeAttribute(type); - var str = new TranslatedString(); - - if (Version >= LSFVersion.VerBG3 || - (GameVersion.Major > 4 || - (GameVersion.Major == 4 && GameVersion.Revision > 0) || - (GameVersion.Major == 4 && GameVersion.Revision == 0 && GameVersion.Build >= 0x1a))) - { - str.Version = reader.ReadUInt16(); - } - else - { - str.Version = 0; - var valueLength = reader.ReadInt32(); - str.Value = ReadString(reader, valueLength); - } - - var handleLength = reader.ReadInt32(); - str.Handle = ReadString(reader, handleLength); - - attr.Value = str; - return attr; - } + attr.Value = str; + return attr; + } - case NodeAttribute.DataType.DT_TranslatedFSString: + case NodeAttribute.DataType.DT_TranslatedFSString: + { + var attr = new NodeAttribute(type) { - var attr = new NodeAttribute(type) - { - Value = ReadTranslatedFSString(reader) - }; - return attr; - } + Value = ReadTranslatedFSString(reader) + }; + return attr; + } - case NodeAttribute.DataType.DT_ScratchBuffer: + case NodeAttribute.DataType.DT_ScratchBuffer: + { + var attr = new NodeAttribute(type) { - var attr = new NodeAttribute(type) - { - Value = reader.ReadBytes((int)length) - }; - return attr; - } + Value = reader.ReadBytes((int)length) + }; + return attr; + } - default: - return BinUtils.ReadAttribute(type, reader); - } + default: + return BinUtils.ReadAttribute(type, reader); } + } - private TranslatedFSString ReadTranslatedFSString(BinaryReader reader) - { - var str = new TranslatedFSString(); - - if (Version >= LSFVersion.VerBG3) - { - str.Version = reader.ReadUInt16(); - } - else - { - str.Version = 0; - var valueLength = reader.ReadInt32(); - str.Value = ReadString(reader, valueLength); - } + private TranslatedFSString ReadTranslatedFSString(BinaryReader reader) + { + var str = new TranslatedFSString(); - var handleLength = reader.ReadInt32(); - str.Handle = ReadString(reader, handleLength); + if (Version >= LSFVersion.VerBG3) + { + str.Version = reader.ReadUInt16(); + } + else + { + str.Version = 0; + var valueLength = reader.ReadInt32(); + str.Value = ReadString(reader, valueLength); + } - var arguments = reader.ReadInt32(); - str.Arguments = new List(arguments); - for (int i = 0; i < arguments; i++) - { - var arg = new TranslatedFSStringArgument(); - var argKeyLength = reader.ReadInt32(); - arg.Key = ReadString(reader, argKeyLength); + var handleLength = reader.ReadInt32(); + str.Handle = ReadString(reader, handleLength); - arg.String = ReadTranslatedFSString(reader); + var arguments = reader.ReadInt32(); + str.Arguments = new List(arguments); + for (int i = 0; i < arguments; i++) + { + var arg = new TranslatedFSStringArgument(); + var argKeyLength = reader.ReadInt32(); + arg.Key = ReadString(reader, argKeyLength); - var argValueLength = reader.ReadInt32(); - arg.Value = ReadString(reader, argValueLength); + arg.String = ReadTranslatedFSString(reader); - str.Arguments.Add(arg); - } + var argValueLength = reader.ReadInt32(); + arg.Value = ReadString(reader, argValueLength); - return str; + str.Arguments.Add(arg); } - private string ReadString(BinaryReader reader, int length) - { - var bytes = reader.ReadBytes(length - 1); + return str; + } - // Remove null bytes at the end of the string - int lastNull = bytes.Length; - while (lastNull > 0 && bytes[lastNull - 1] == 0) - lastNull--; + private string ReadString(BinaryReader reader, int length) + { + var bytes = reader.ReadBytes(length - 1); - var nullTerminator = reader.ReadByte(); - if (nullTerminator != 0) - { - throw new InvalidDataException("String is not null-terminated"); - } + // Remove null bytes at the end of the string + int lastNull = bytes.Length; + while (lastNull > 0 && bytes[lastNull - 1] == 0) + lastNull--; - return Encoding.UTF8.GetString(bytes, 0, lastNull); + var nullTerminator = reader.ReadByte(); + if (nullTerminator != 0) + { + throw new InvalidDataException("String is not null-terminated"); } - private string ReadString(BinaryReader reader) + return Encoding.UTF8.GetString(bytes, 0, lastNull); + } + + private string ReadString(BinaryReader reader) + { + List bytes = []; + while (true) { - List bytes = []; - while (true) + var b = reader.ReadByte(); + if (b != 0) { - var b = reader.ReadByte(); - if (b != 0) - { - bytes.Add(b); - } - else - { - break; - } + bytes.Add(b); + } + else + { + break; } - - return Encoding.UTF8.GetString(bytes.ToArray()); } + + return Encoding.UTF8.GetString(bytes.ToArray()); } } diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index ae4df84f..40804e0a 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -4,509 +4,508 @@ using System.Text; using LSLib.LS.Enums; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSFWriter(Stream stream) { - public class LSFWriter(Stream stream) - { - private readonly static int StringHashMapSize = 0x200; + private readonly static int StringHashMapSize = 0x200; + + private readonly Stream Stream = stream; + private BinaryWriter Writer; + private LSMetadata Meta; + + private MemoryStream NodeStream; + private BinaryWriter NodeWriter; + private int NextNodeIndex = 0; + private Dictionary NodeIndices; - private readonly Stream Stream = stream; - private BinaryWriter Writer; - private LSMetadata Meta; + private MemoryStream AttributeStream; + private BinaryWriter AttributeWriter; + private int NextAttributeIndex = 0; - private MemoryStream NodeStream; - private BinaryWriter NodeWriter; - private int NextNodeIndex = 0; - private Dictionary NodeIndices; + private MemoryStream ValueStream; + private BinaryWriter ValueWriter; - private MemoryStream AttributeStream; - private BinaryWriter AttributeWriter; - private int NextAttributeIndex = 0; + private List> StringHashMap; + private List NextSiblingIndices; - private MemoryStream ValueStream; - private BinaryWriter ValueWriter; + public LSFVersion Version = LSFVersion.MaxWriteVersion; + public bool EncodeSiblingData = false; + public CompressionMethod Compression = CompressionMethod.LZ4; + public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; - private List> StringHashMap; - private List NextSiblingIndices; + public void Write(Resource resource) + { + if (Version > LSFVersion.MaxWriteVersion) + { + var msg = String.Format("Writing LSF version {0} is not supported (highest is {1})", Version, LSFVersion.MaxWriteVersion); + throw new InvalidDataException(msg); + } - public LSFVersion Version = LSFVersion.MaxWriteVersion; - public bool EncodeSiblingData = false; - public CompressionMethod Compression = CompressionMethod.LZ4; - public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + Meta = resource.Metadata; - public void Write(Resource resource) + using (this.Writer = new BinaryWriter(Stream, Encoding.Default, true)) + using (this.NodeStream = new MemoryStream()) + using (this.NodeWriter = new BinaryWriter(NodeStream)) + using (this.AttributeStream = new MemoryStream()) + using (this.AttributeWriter = new BinaryWriter(AttributeStream)) + using (this.ValueStream = new MemoryStream()) + using (this.ValueWriter = new BinaryWriter(ValueStream)) { - if (Version > LSFVersion.MaxWriteVersion) + NextNodeIndex = 0; + NextAttributeIndex = 0; + NodeIndices = []; + NextSiblingIndices = null; + StringHashMap = new List>(StringHashMapSize); + while (StringHashMap.Count < StringHashMapSize) { - var msg = String.Format("Writing LSF version {0} is not supported (highest is {1})", Version, LSFVersion.MaxWriteVersion); - throw new InvalidDataException(msg); + StringHashMap.Add([]); } - Meta = resource.Metadata; - - using (this.Writer = new BinaryWriter(Stream, Encoding.Default, true)) - using (this.NodeStream = new MemoryStream()) - using (this.NodeWriter = new BinaryWriter(NodeStream)) - using (this.AttributeStream = new MemoryStream()) - using (this.AttributeWriter = new BinaryWriter(AttributeStream)) - using (this.ValueStream = new MemoryStream()) - using (this.ValueWriter = new BinaryWriter(ValueStream)) + if (EncodeSiblingData) { - NextNodeIndex = 0; - NextAttributeIndex = 0; - NodeIndices = []; - NextSiblingIndices = null; - StringHashMap = new List>(StringHashMapSize); - while (StringHashMap.Count < StringHashMapSize) - { - StringHashMap.Add([]); - } + ComputeSiblingIndices(resource); + } - if (EncodeSiblingData) - { - ComputeSiblingIndices(resource); - } + WriteRegions(resource); - WriteRegions(resource); + byte[] stringBuffer = null; + using (var stringStream = new MemoryStream()) + using (var stringWriter = new BinaryWriter(stringStream)) + { + WriteStaticStrings(stringWriter); + stringBuffer = stringStream.ToArray(); + } - byte[] stringBuffer = null; - using (var stringStream = new MemoryStream()) - using (var stringWriter = new BinaryWriter(stringStream)) - { - WriteStaticStrings(stringWriter); - stringBuffer = stringStream.ToArray(); - } + var nodeBuffer = NodeStream.ToArray(); + var attributeBuffer = AttributeStream.ToArray(); + var valueBuffer = ValueStream.ToArray(); - var nodeBuffer = NodeStream.ToArray(); - var attributeBuffer = AttributeStream.ToArray(); - var valueBuffer = ValueStream.ToArray(); + var magic = new LSFMagic + { + Magic = BitConverter.ToUInt32(LSFMagic.Signature, 0), + Version = (uint)Version + }; + BinUtils.WriteStruct(Writer, ref magic); - var magic = new LSFMagic + PackedVersion gameVersion = new() + { + Major = resource.Metadata.MajorVersion, + Minor = resource.Metadata.MinorVersion, + Revision = resource.Metadata.Revision, + Build = resource.Metadata.BuildNumber + }; + + if (Version < LSFVersion.VerBG3ExtendedHeader) + { + var header = new LSFHeader + { + EngineVersion = gameVersion.ToVersion32() + }; + BinUtils.WriteStruct(Writer, ref header); + } + else + { + var header = new LSFHeaderV5 { - Magic = BitConverter.ToUInt32(LSFMagic.Signature, 0), - Version = (uint)Version + EngineVersion = gameVersion.ToVersion64() }; - BinUtils.WriteStruct(Writer, ref magic); + BinUtils.WriteStruct(Writer, ref header); + } - PackedVersion gameVersion = new() + bool chunked = Version >= LSFVersion.VerChunkedCompress; + byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, LSCompressionLevel); + byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, LSCompressionLevel, chunked); + byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, LSCompressionLevel, chunked); + byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, LSCompressionLevel, chunked); + + if (Version < LSFVersion.VerBG3AdditionalBlob) + { + var meta = new LSFMetadataV5 { - Major = resource.Metadata.MajorVersion, - Minor = resource.Metadata.MinorVersion, - Revision = resource.Metadata.Revision, - Build = resource.Metadata.BuildNumber + StringsUncompressedSize = (UInt32)stringBuffer.Length, + NodesUncompressedSize = (UInt32)nodeBuffer.Length, + AttributesUncompressedSize = (UInt32)attributeBuffer.Length, + ValuesUncompressedSize = (UInt32)valueBuffer.Length }; - if (Version < LSFVersion.VerBG3ExtendedHeader) + if (Compression == CompressionMethod.None) { - var header = new LSFHeader - { - EngineVersion = gameVersion.ToVersion32() - }; - BinUtils.WriteStruct(Writer, ref header); + meta.StringsSizeOnDisk = 0; + meta.NodesSizeOnDisk = 0; + meta.AttributesSizeOnDisk = 0; + meta.ValuesSizeOnDisk = 0; } else { - var header = new LSFHeaderV5 - { - EngineVersion = gameVersion.ToVersion64() - }; - BinUtils.WriteStruct(Writer, ref header); + meta.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; + meta.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; + meta.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; + meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; } - bool chunked = Version >= LSFVersion.VerChunkedCompress; - byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, LSCompressionLevel); - byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, LSCompressionLevel, chunked); - byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, LSCompressionLevel, chunked); - byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, LSCompressionLevel, chunked); + meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); + meta.Unknown2 = 0; + meta.Unknown3 = 0; + meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; - if (Version < LSFVersion.VerBG3AdditionalBlob) + BinUtils.WriteStruct(Writer, ref meta); + } + else + { + var meta = new LSFMetadataV6 { - var meta = new LSFMetadataV5 - { - StringsUncompressedSize = (UInt32)stringBuffer.Length, - NodesUncompressedSize = (UInt32)nodeBuffer.Length, - AttributesUncompressedSize = (UInt32)attributeBuffer.Length, - ValuesUncompressedSize = (UInt32)valueBuffer.Length - }; - - if (Compression == CompressionMethod.None) - { - meta.StringsSizeOnDisk = 0; - meta.NodesSizeOnDisk = 0; - meta.AttributesSizeOnDisk = 0; - meta.ValuesSizeOnDisk = 0; - } - else - { - meta.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; - meta.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; - meta.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; - meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; - } - - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); - meta.Unknown2 = 0; - meta.Unknown3 = 0; - meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; + StringsUncompressedSize = (UInt32)stringBuffer.Length, + NodesUncompressedSize = (UInt32)nodeBuffer.Length, + AttributesUncompressedSize = (UInt32)attributeBuffer.Length, + ValuesUncompressedSize = (UInt32)valueBuffer.Length + }; - BinUtils.WriteStruct(Writer, ref meta); + if (Compression == CompressionMethod.None) + { + meta.StringsSizeOnDisk = 0; + meta.NodesSizeOnDisk = 0; + meta.AttributesSizeOnDisk = 0; + meta.ValuesSizeOnDisk = 0; } else { - var meta = new LSFMetadataV6 - { - StringsUncompressedSize = (UInt32)stringBuffer.Length, - NodesUncompressedSize = (UInt32)nodeBuffer.Length, - AttributesUncompressedSize = (UInt32)attributeBuffer.Length, - ValuesUncompressedSize = (UInt32)valueBuffer.Length - }; - - if (Compression == CompressionMethod.None) - { - meta.StringsSizeOnDisk = 0; - meta.NodesSizeOnDisk = 0; - meta.AttributesSizeOnDisk = 0; - meta.ValuesSizeOnDisk = 0; - } - else - { - meta.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; - meta.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; - meta.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; - meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; - } - - meta.Unknown = 0; - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); - meta.Unknown2 = 0; - meta.Unknown3 = 0; - meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; - - BinUtils.WriteStruct(Writer, ref meta); + meta.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; + meta.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; + meta.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; + meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; } - Writer.Write(stringsCompressed, 0, stringsCompressed.Length); - Writer.Write(nodesCompressed, 0, nodesCompressed.Length); - Writer.Write(attributesCompressed, 0, attributesCompressed.Length); - Writer.Write(valuesCompressed, 0, valuesCompressed.Length); + meta.Unknown = 0; + meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); + meta.Unknown2 = 0; + meta.Unknown3 = 0; + meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; + + BinUtils.WriteStruct(Writer, ref meta); } + + Writer.Write(stringsCompressed, 0, stringsCompressed.Length); + Writer.Write(nodesCompressed, 0, nodesCompressed.Length); + Writer.Write(attributesCompressed, 0, attributesCompressed.Length); + Writer.Write(valuesCompressed, 0, valuesCompressed.Length); } + } - private int ComputeSiblingIndices(Node node) - { - int index = NextNodeIndex; - NextNodeIndex++; - NextSiblingIndices.Add(-1); + private int ComputeSiblingIndices(Node node) + { + int index = NextNodeIndex; + NextNodeIndex++; + NextSiblingIndices.Add(-1); - int lastSiblingIndex = -1; - foreach (var children in node.Children) + int lastSiblingIndex = -1; + foreach (var children in node.Children) + { + foreach (var child in children.Value) { - foreach (var child in children.Value) + int childIndex = ComputeSiblingIndices(child); + if (lastSiblingIndex != -1) { - int childIndex = ComputeSiblingIndices(child); - if (lastSiblingIndex != -1) - { - NextSiblingIndices[lastSiblingIndex] = childIndex; - } - - lastSiblingIndex = childIndex; + NextSiblingIndices[lastSiblingIndex] = childIndex; } - } - return index; + lastSiblingIndex = childIndex; + } } - private void ComputeSiblingIndices(Resource resource) - { - NextNodeIndex = 0; - NextSiblingIndices = []; + return index; + } - int lastRegionIndex = -1; - foreach (var region in resource.Regions) - { - int regionIndex = ComputeSiblingIndices(region.Value); - if (lastRegionIndex != -1) - { - NextSiblingIndices[lastRegionIndex] = regionIndex; - } + private void ComputeSiblingIndices(Resource resource) + { + NextNodeIndex = 0; + NextSiblingIndices = []; - lastRegionIndex = regionIndex; + int lastRegionIndex = -1; + foreach (var region in resource.Regions) + { + int regionIndex = ComputeSiblingIndices(region.Value); + if (lastRegionIndex != -1) + { + NextSiblingIndices[lastRegionIndex] = regionIndex; } + + lastRegionIndex = regionIndex; } + } - private void WriteRegions(Resource resource) + private void WriteRegions(Resource resource) + { + NextNodeIndex = 0; + foreach (var region in resource.Regions) { - NextNodeIndex = 0; - foreach (var region in resource.Regions) + if (Version >= LSFVersion.VerExtendedNodes + && EncodeSiblingData) { - if (Version >= LSFVersion.VerExtendedNodes - && EncodeSiblingData) - { - WriteNodeV3(region.Value); - } - else - { - WriteNodeV2(region.Value); - } + WriteNodeV3(region.Value); + } + else + { + WriteNodeV2(region.Value); } } + } - private void WriteNodeAttributesV2(Node node) + private void WriteNodeAttributesV2(Node node) + { + UInt32 lastOffset = (UInt32)ValueStream.Position; + foreach (KeyValuePair entry in node.Attributes) { - UInt32 lastOffset = (UInt32)ValueStream.Position; - foreach (KeyValuePair entry in node.Attributes) - { - WriteAttributeValue(ValueWriter, entry.Value); + WriteAttributeValue(ValueWriter, entry.Value); - var attributeInfo = new LSFAttributeEntryV2(); - var length = (UInt32)ValueStream.Position - lastOffset; - attributeInfo.TypeAndLength = (UInt32)entry.Value.Type | (length << 6); - attributeInfo.NameHashTableIndex = AddStaticString(entry.Key); - attributeInfo.NodeIndex = NextNodeIndex; - BinUtils.WriteStruct(AttributeWriter, ref attributeInfo); - NextAttributeIndex++; + var attributeInfo = new LSFAttributeEntryV2(); + var length = (UInt32)ValueStream.Position - lastOffset; + attributeInfo.TypeAndLength = (UInt32)entry.Value.Type | (length << 6); + attributeInfo.NameHashTableIndex = AddStaticString(entry.Key); + attributeInfo.NodeIndex = NextNodeIndex; + BinUtils.WriteStruct(AttributeWriter, ref attributeInfo); + NextAttributeIndex++; - lastOffset = (UInt32)ValueStream.Position; - } + lastOffset = (UInt32)ValueStream.Position; } + } - private void WriteNodeAttributesV3(Node node) + private void WriteNodeAttributesV3(Node node) + { + UInt32 lastOffset = (UInt32)ValueStream.Position; + int numWritten = 0; + foreach (KeyValuePair entry in node.Attributes) { - UInt32 lastOffset = (UInt32)ValueStream.Position; - int numWritten = 0; - foreach (KeyValuePair entry in node.Attributes) + WriteAttributeValue(ValueWriter, entry.Value); + numWritten++; + + var attributeInfo = new LSFAttributeEntryV3(); + var length = (UInt32)ValueStream.Position - lastOffset; + attributeInfo.TypeAndLength = (UInt32)entry.Value.Type | (length << 6); + attributeInfo.NameHashTableIndex = AddStaticString(entry.Key); + if (numWritten == node.Attributes.Count) { - WriteAttributeValue(ValueWriter, entry.Value); - numWritten++; - - var attributeInfo = new LSFAttributeEntryV3(); - var length = (UInt32)ValueStream.Position - lastOffset; - attributeInfo.TypeAndLength = (UInt32)entry.Value.Type | (length << 6); - attributeInfo.NameHashTableIndex = AddStaticString(entry.Key); - if (numWritten == node.Attributes.Count) - { - attributeInfo.NextAttributeIndex = -1; - } - else - { - attributeInfo.NextAttributeIndex = NextAttributeIndex + 1; - } - attributeInfo.Offset = lastOffset; - BinUtils.WriteStruct(AttributeWriter, ref attributeInfo); + attributeInfo.NextAttributeIndex = -1; + } + else + { + attributeInfo.NextAttributeIndex = NextAttributeIndex + 1; + } + attributeInfo.Offset = lastOffset; + BinUtils.WriteStruct(AttributeWriter, ref attributeInfo); - NextAttributeIndex++; + NextAttributeIndex++; - lastOffset = (UInt32)ValueStream.Position; - } + lastOffset = (UInt32)ValueStream.Position; } + } - private void WriteNodeChildren(Node node) + private void WriteNodeChildren(Node node) + { + foreach (var children in node.Children) { - foreach (var children in node.Children) + foreach (var child in children.Value) { - foreach (var child in children.Value) + if (Version >= LSFVersion.VerExtendedNodes && EncodeSiblingData) { - if (Version >= LSFVersion.VerExtendedNodes && EncodeSiblingData) - { - WriteNodeV3(child); - } - else - { - WriteNodeV2(child); - } + WriteNodeV3(child); + } + else + { + WriteNodeV2(child); } } } + } - private void WriteNodeV2(Node node) + private void WriteNodeV2(Node node) + { + var nodeInfo = new LSFNodeEntryV2(); + if (node.Parent == null) { - var nodeInfo = new LSFNodeEntryV2(); - if (node.Parent == null) - { - nodeInfo.ParentIndex = -1; - } - else - { - nodeInfo.ParentIndex = NodeIndices[node.Parent]; - } + nodeInfo.ParentIndex = -1; + } + else + { + nodeInfo.ParentIndex = NodeIndices[node.Parent]; + } - nodeInfo.NameHashTableIndex = AddStaticString(node.Name); + nodeInfo.NameHashTableIndex = AddStaticString(node.Name); - if (node.Attributes.Count > 0) - { - nodeInfo.FirstAttributeIndex = NextAttributeIndex; - WriteNodeAttributesV2(node); - } - else - { - nodeInfo.FirstAttributeIndex = -1; - } + if (node.Attributes.Count > 0) + { + nodeInfo.FirstAttributeIndex = NextAttributeIndex; + WriteNodeAttributesV2(node); + } + else + { + nodeInfo.FirstAttributeIndex = -1; + } - BinUtils.WriteStruct(NodeWriter, ref nodeInfo); - NodeIndices[node] = NextNodeIndex; - NextNodeIndex++; + BinUtils.WriteStruct(NodeWriter, ref nodeInfo); + NodeIndices[node] = NextNodeIndex; + NextNodeIndex++; - WriteNodeChildren(node); - } + WriteNodeChildren(node); + } - private void WriteNodeV3(Node node) + private void WriteNodeV3(Node node) + { + var nodeInfo = new LSFNodeEntryV3(); + if (node.Parent == null) { - var nodeInfo = new LSFNodeEntryV3(); - if (node.Parent == null) - { - nodeInfo.ParentIndex = -1; - } - else - { - nodeInfo.ParentIndex = NodeIndices[node.Parent]; - } + nodeInfo.ParentIndex = -1; + } + else + { + nodeInfo.ParentIndex = NodeIndices[node.Parent]; + } - nodeInfo.NameHashTableIndex = AddStaticString(node.Name); + nodeInfo.NameHashTableIndex = AddStaticString(node.Name); - // Assumes we calculated indices first using ComputeSiblingIndices() - nodeInfo.NextSiblingIndex = NextSiblingIndices[NextNodeIndex]; + // Assumes we calculated indices first using ComputeSiblingIndices() + nodeInfo.NextSiblingIndex = NextSiblingIndices[NextNodeIndex]; - if (node.Attributes.Count > 0) - { - nodeInfo.FirstAttributeIndex = NextAttributeIndex; - WriteNodeAttributesV3(node); - } - else - { - nodeInfo.FirstAttributeIndex = -1; - } + if (node.Attributes.Count > 0) + { + nodeInfo.FirstAttributeIndex = NextAttributeIndex; + WriteNodeAttributesV3(node); + } + else + { + nodeInfo.FirstAttributeIndex = -1; + } - BinUtils.WriteStruct(NodeWriter, ref nodeInfo); - NodeIndices[node] = NextNodeIndex; - NextNodeIndex++; + BinUtils.WriteStruct(NodeWriter, ref nodeInfo); + NodeIndices[node] = NextNodeIndex; + NextNodeIndex++; - WriteNodeChildren(node); - } + WriteNodeChildren(node); + } - private void WriteTranslatedFSString(BinaryWriter writer, TranslatedFSString fs) + private void WriteTranslatedFSString(BinaryWriter writer, TranslatedFSString fs) + { + if (Version >= LSFVersion.VerBG3 || + (Meta.MajorVersion > 4 || + (Meta.MajorVersion == 4 && Meta.Revision > 0) || + (Meta.MajorVersion == 4 && Meta.Revision == 0 && Meta.BuildNumber >= 0x1a))) { - if (Version >= LSFVersion.VerBG3 || - (Meta.MajorVersion > 4 || - (Meta.MajorVersion == 4 && Meta.Revision > 0) || - (Meta.MajorVersion == 4 && Meta.Revision == 0 && Meta.BuildNumber >= 0x1a))) - { - writer.Write(fs.Version); - } - else - { - WriteStringWithLength(writer, fs.Value ?? ""); - } + writer.Write(fs.Version); + } + else + { + WriteStringWithLength(writer, fs.Value ?? ""); + } - WriteStringWithLength(writer, fs.Handle); + WriteStringWithLength(writer, fs.Handle); - writer.Write((UInt32)fs.Arguments.Count); - foreach (var arg in fs.Arguments) - { - WriteStringWithLength(writer, arg.Key); - WriteTranslatedFSString(writer, arg.String); - WriteStringWithLength(writer, arg.Value); - } + writer.Write((UInt32)fs.Arguments.Count); + foreach (var arg in fs.Arguments) + { + WriteStringWithLength(writer, arg.Key); + WriteTranslatedFSString(writer, arg.String); + WriteStringWithLength(writer, arg.Value); } + } - private void WriteAttributeValue(BinaryWriter writer, NodeAttribute attr) + private void WriteAttributeValue(BinaryWriter writer, NodeAttribute attr) + { + switch (attr.Type) { - switch (attr.Type) - { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: - WriteString(writer, (string)attr.Value); - break; - - case NodeAttribute.DataType.DT_TranslatedString: + case NodeAttribute.DataType.DT_String: + case NodeAttribute.DataType.DT_Path: + case NodeAttribute.DataType.DT_FixedString: + case NodeAttribute.DataType.DT_LSString: + case NodeAttribute.DataType.DT_WString: + case NodeAttribute.DataType.DT_LSWString: + WriteString(writer, (string)attr.Value); + break; + + case NodeAttribute.DataType.DT_TranslatedString: + { + var ts = (TranslatedString)attr.Value; + if (Version >= LSFVersion.VerBG3) { - var ts = (TranslatedString)attr.Value; - if (Version >= LSFVersion.VerBG3) - { - writer.Write(ts.Version); - } - else - { - WriteStringWithLength(writer, ts.Value ?? ""); - } - - WriteStringWithLength(writer, ts.Handle); - break; + writer.Write(ts.Version); } - - case NodeAttribute.DataType.DT_TranslatedFSString: + else { - var fs = (TranslatedFSString)attr.Value; - WriteTranslatedFSString(writer, fs); - break; + WriteStringWithLength(writer, ts.Value ?? ""); } - case NodeAttribute.DataType.DT_ScratchBuffer: - { - var buffer = (byte[])attr.Value; - writer.Write(buffer); - break; - } + WriteStringWithLength(writer, ts.Handle); + break; + } - default: - BinUtils.WriteAttribute(writer, attr); + case NodeAttribute.DataType.DT_TranslatedFSString: + { + var fs = (TranslatedFSString)attr.Value; + WriteTranslatedFSString(writer, fs); break; - } - } + } - private uint AddStaticString(string s) - { - var hashCode = (uint)s.GetHashCode(); - var bucket = (int)((hashCode & 0x1ff) ^ ((hashCode >> 9) & 0x1ff) ^ ((hashCode >> 18) & 0x1ff) ^ ((hashCode >> 27) & 0x1ff)); - for (int i = 0; i < StringHashMap[bucket].Count; i++) - { - if (StringHashMap[bucket][i].Equals(s)) + case NodeAttribute.DataType.DT_ScratchBuffer: { - return (uint)((bucket << 16) | i); + var buffer = (byte[])attr.Value; + writer.Write(buffer); + break; } - } - StringHashMap[bucket].Add(s); - return (uint)((bucket << 16) | (StringHashMap[bucket].Count - 1)); + default: + BinUtils.WriteAttribute(writer, attr); + break; } + } - private void WriteStaticStrings(BinaryWriter writer) + private uint AddStaticString(string s) + { + var hashCode = (uint)s.GetHashCode(); + var bucket = (int)((hashCode & 0x1ff) ^ ((hashCode >> 9) & 0x1ff) ^ ((hashCode >> 18) & 0x1ff) ^ ((hashCode >> 27) & 0x1ff)); + for (int i = 0; i < StringHashMap[bucket].Count; i++) { - writer.Write((UInt32)StringHashMap.Count); - for (int i = 0; i < StringHashMap.Count; i++) + if (StringHashMap[bucket][i].Equals(s)) { - var entry = StringHashMap[i]; - writer.Write((UInt16)entry.Count); - for (int j = 0; j < entry.Count; j++) - { - WriteStaticString(writer, entry[j]); - } + return (uint)((bucket << 16) | i); } } - private void WriteStaticString(BinaryWriter writer, string s) - { - byte[] utf = Encoding.UTF8.GetBytes(s); - writer.Write((UInt16)utf.Length); - writer.Write(utf); - } + StringHashMap[bucket].Add(s); + return (uint)((bucket << 16) | (StringHashMap[bucket].Count - 1)); + } - private void WriteStringWithLength(BinaryWriter writer, string s) + private void WriteStaticStrings(BinaryWriter writer) + { + writer.Write((UInt32)StringHashMap.Count); + for (int i = 0; i < StringHashMap.Count; i++) { - byte[] utf = Encoding.UTF8.GetBytes(s); - writer.Write((Int32)(utf.Length + 1)); - writer.Write(utf); - writer.Write((Byte)0); + var entry = StringHashMap[i]; + writer.Write((UInt16)entry.Count); + for (int j = 0; j < entry.Count; j++) + { + WriteStaticString(writer, entry[j]); + } } + } - private void WriteString(BinaryWriter writer, string s) - { - byte[] utf = Encoding.UTF8.GetBytes(s); - writer.Write(utf); - writer.Write((Byte)0); - } + private void WriteStaticString(BinaryWriter writer, string s) + { + byte[] utf = Encoding.UTF8.GetBytes(s); + writer.Write((UInt16)utf.Length); + writer.Write(utf); + } + + private void WriteStringWithLength(BinaryWriter writer, string s) + { + byte[] utf = Encoding.UTF8.GetBytes(s); + writer.Write((Int32)(utf.Length + 1)); + writer.Write(utf); + writer.Write((Byte)0); + } + + private void WriteString(BinaryWriter writer, string s) + { + byte[] utf = Encoding.UTF8.GetBytes(s); + writer.Write(utf); + writer.Write((Byte)0); } } diff --git a/LSLib/LS/Resources/LSJ/LSJReader.cs b/LSLib/LS/Resources/LSJ/LSJReader.cs index 1d89c98c..bb420db4 100644 --- a/LSLib/LS/Resources/LSJ/LSJReader.cs +++ b/LSLib/LS/Resources/LSJ/LSJReader.cs @@ -2,27 +2,26 @@ using System.IO; using Newtonsoft.Json; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSJReader(Stream stream) : IDisposable { - public class LSJReader(Stream stream) : IDisposable - { - private readonly Stream stream = stream; - public NodeSerializationSettings SerializationSettings = new(); + private readonly Stream stream = stream; + public NodeSerializationSettings SerializationSettings = new(); - public void Dispose() - { - stream.Dispose(); - } + public void Dispose() + { + stream.Dispose(); + } - public Resource Read() - { - var settings = new JsonSerializerSettings(); - settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); - var serializer = JsonSerializer.Create(settings); + public Resource Read() + { + var settings = new JsonSerializerSettings(); + settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); + var serializer = JsonSerializer.Create(settings); - using var streamReader = new StreamReader(stream); - using var reader = new JsonTextReader(streamReader); - return serializer.Deserialize(reader); - } + using var streamReader = new StreamReader(stream); + using var reader = new JsonTextReader(streamReader); + return serializer.Deserialize(reader); } } diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index d3739b82..dfd91949 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -5,819 +5,818 @@ using System.Numerics; using System.Collections.Generic; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSJResourceConverter(NodeSerializationSettings settings) : JsonConverter { - public class LSJResourceConverter(NodeSerializationSettings settings) : JsonConverter - { - private LSMetadata Metadata; - private readonly NodeSerializationSettings SerializationSettings = settings; + private LSMetadata Metadata; + private readonly NodeSerializationSettings SerializationSettings = settings; - public override bool CanConvert(Type objectType) - { - return objectType == typeof(Node) - || objectType == typeof(Resource); - } + public override bool CanConvert(Type objectType) + { + return objectType == typeof(Node) + || objectType == typeof(Resource); + } - private TranslatedFSStringArgument ReadFSStringArgument(JsonReader reader) + private TranslatedFSStringArgument ReadFSStringArgument(JsonReader reader) + { + var fs = new TranslatedFSStringArgument(); + string key = null; + while (reader.Read()) { - var fs = new TranslatedFSStringArgument(); - string key = null; - while (reader.Read()) + if (reader.TokenType == JsonToken.EndObject) { - if (reader.TokenType == JsonToken.EndObject) - { - break; - } - else if (reader.TokenType == JsonToken.PropertyName) - { - key = reader.Value.ToString(); - } - else if (reader.TokenType == JsonToken.String) + break; + } + else if (reader.TokenType == JsonToken.PropertyName) + { + key = reader.Value.ToString(); + } + else if (reader.TokenType == JsonToken.String) + { + if (key == "key") { - if (key == "key") - { - fs.Key = reader.Value.ToString(); - } - else if (key == "value") - { - fs.Value = reader.Value.ToString(); - } - else - { - throw new InvalidDataException("Unknown property encountered during TranslatedFSString argument parsing: " + key); - } + fs.Key = reader.Value.ToString(); } - else if (reader.TokenType == JsonToken.StartObject && key == "string") + else if (key == "value") { - fs.String = ReadTranslatedFSString(reader); + fs.Value = reader.Value.ToString(); } else { - throw new InvalidDataException("Unexpected JSON token during parsing of TranslatedFSString argument: " + reader.TokenType); + throw new InvalidDataException("Unknown property encountered during TranslatedFSString argument parsing: " + key); } } - - return fs; + else if (reader.TokenType == JsonToken.StartObject && key == "string") + { + fs.String = ReadTranslatedFSString(reader); + } + else + { + throw new InvalidDataException("Unexpected JSON token during parsing of TranslatedFSString argument: " + reader.TokenType); + } } - private TranslatedFSString ReadTranslatedFSString(JsonReader reader) - { - var fs = new TranslatedFSString(); - string key = ""; + return fs; + } - while (reader.Read()) + private TranslatedFSString ReadTranslatedFSString(JsonReader reader) + { + var fs = new TranslatedFSString(); + string key = ""; + + while (reader.Read()) + { + if (reader.TokenType == JsonToken.PropertyName) { - if (reader.TokenType == JsonToken.PropertyName) - { - key = reader.Value.ToString(); - } - else if (reader.TokenType == JsonToken.String) + key = reader.Value.ToString(); + } + else if (reader.TokenType == JsonToken.String) + { + if (key == "value") { - if (key == "value") + if (reader.Value != null) { - if (reader.Value != null) - { - fs.Value = reader.Value.ToString(); - } - else - { - fs.Value = null; - } - } - else if (key == "handle") - { - fs.Handle = reader.Value.ToString(); + fs.Value = reader.Value.ToString(); } else { - throw new InvalidDataException("Unknown TranslatedFSString property: " + key); + fs.Value = null; } } - else if (reader.TokenType == JsonToken.StartArray && key == "arguments") - { - fs.Arguments = ReadFSStringArguments(reader); - } - else if (reader.TokenType == JsonToken.EndObject) + else if (key == "handle") { - break; + fs.Handle = reader.Value.ToString(); } else { - throw new InvalidDataException("Unexpected JSON token during parsing of TranslatedFSString: " + reader.TokenType); + throw new InvalidDataException("Unknown TranslatedFSString property: " + key); } } - - return fs; + else if (reader.TokenType == JsonToken.StartArray && key == "arguments") + { + fs.Arguments = ReadFSStringArguments(reader); + } + else if (reader.TokenType == JsonToken.EndObject) + { + break; + } + else + { + throw new InvalidDataException("Unexpected JSON token during parsing of TranslatedFSString: " + reader.TokenType); + } } - private List ReadFSStringArguments(JsonReader reader) - { - var args = new List(); + return fs; + } + + private List ReadFSStringArguments(JsonReader reader) + { + var args = new List(); - while (reader.Read()) + while (reader.Read()) + { + if (reader.TokenType == JsonToken.StartObject) { - if (reader.TokenType == JsonToken.StartObject) - { - args.Add(ReadFSStringArgument(reader)); - } - else if (reader.TokenType == JsonToken.EndArray) - { - break; - } - else - { - throw new InvalidDataException("Unexpected JSON token during parsing of TranslatedFSString argument list: " + reader.TokenType); - } + args.Add(ReadFSStringArgument(reader)); + } + else if (reader.TokenType == JsonToken.EndArray) + { + break; + } + else + { + throw new InvalidDataException("Unexpected JSON token during parsing of TranslatedFSString argument list: " + reader.TokenType); } - - return args; } - private NodeAttribute ReadAttribute(JsonReader reader) + return args; + } + + private NodeAttribute ReadAttribute(JsonReader reader) + { + string key = "", handle = null; + List fsStringArguments = null; + NodeAttribute attribute = null; + while (reader.Read()) { - string key = "", handle = null; - List fsStringArguments = null; - NodeAttribute attribute = null; - while (reader.Read()) + if (reader.TokenType == JsonToken.EndObject) { - if (reader.TokenType == JsonToken.EndObject) - { - break; - } - else if (reader.TokenType == JsonToken.PropertyName) - { - key = reader.Value.ToString(); - } - else if (reader.TokenType == JsonToken.String - || reader.TokenType == JsonToken.Integer - || reader.TokenType == JsonToken.Float - || reader.TokenType == JsonToken.Boolean - || reader.TokenType == JsonToken.Null) + break; + } + else if (reader.TokenType == JsonToken.PropertyName) + { + key = reader.Value.ToString(); + } + else if (reader.TokenType == JsonToken.String + || reader.TokenType == JsonToken.Integer + || reader.TokenType == JsonToken.Float + || reader.TokenType == JsonToken.Boolean + || reader.TokenType == JsonToken.Null) + { + if (key == "type") { - if (key == "type") + if (!UInt32.TryParse((string)reader.Value, out uint type)) { - if (!UInt32.TryParse((string)reader.Value, out uint type)) - { - type = (uint)AttributeTypeMaps.TypeToId[(string)reader.Value]; - } + type = (uint)AttributeTypeMaps.TypeToId[(string)reader.Value]; + } - attribute = new NodeAttribute((NodeAttribute.DataType)type); - if (type == (uint)NodeAttribute.DataType.DT_TranslatedString) - { - attribute.Value = new TranslatedString - { - Handle = handle - }; - } - else if (type == (uint)NodeAttribute.DataType.DT_TranslatedFSString) + attribute = new NodeAttribute((NodeAttribute.DataType)type); + if (type == (uint)NodeAttribute.DataType.DT_TranslatedString) + { + attribute.Value = new TranslatedString { - attribute.Value = new TranslatedFSString - { - Handle = handle, - Arguments = fsStringArguments - }; - } + Handle = handle + }; } - else if (key == "value") + else if (type == (uint)NodeAttribute.DataType.DT_TranslatedFSString) { - switch (attribute.Type) + attribute.Value = new TranslatedFSString { - case NodeAttribute.DataType.DT_Byte: - attribute.Value = Convert.ToByte(reader.Value); - break; + Handle = handle, + Arguments = fsStringArguments + }; + } + } + else if (key == "value") + { + switch (attribute.Type) + { + case NodeAttribute.DataType.DT_Byte: + attribute.Value = Convert.ToByte(reader.Value); + break; - case NodeAttribute.DataType.DT_Short: - attribute.Value = Convert.ToInt16(reader.Value); - break; + case NodeAttribute.DataType.DT_Short: + attribute.Value = Convert.ToInt16(reader.Value); + break; - case NodeAttribute.DataType.DT_UShort: - attribute.Value = Convert.ToUInt16(reader.Value); - break; + case NodeAttribute.DataType.DT_UShort: + attribute.Value = Convert.ToUInt16(reader.Value); + break; - case NodeAttribute.DataType.DT_Int: - attribute.Value = Convert.ToInt32(reader.Value); - break; + case NodeAttribute.DataType.DT_Int: + attribute.Value = Convert.ToInt32(reader.Value); + break; - case NodeAttribute.DataType.DT_UInt: - attribute.Value = Convert.ToUInt32(reader.Value); - break; + case NodeAttribute.DataType.DT_UInt: + attribute.Value = Convert.ToUInt32(reader.Value); + break; - case NodeAttribute.DataType.DT_Float: - attribute.Value = Convert.ToSingle(reader.Value); - break; + case NodeAttribute.DataType.DT_Float: + attribute.Value = Convert.ToSingle(reader.Value); + break; - case NodeAttribute.DataType.DT_Double: - attribute.Value = Convert.ToDouble(reader.Value); - break; + case NodeAttribute.DataType.DT_Double: + attribute.Value = Convert.ToDouble(reader.Value); + break; - case NodeAttribute.DataType.DT_Bool: - attribute.Value = Convert.ToBoolean(reader.Value); - break; + case NodeAttribute.DataType.DT_Bool: + attribute.Value = Convert.ToBoolean(reader.Value); + break; - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: - attribute.Value = reader.Value.ToString(); - break; + case NodeAttribute.DataType.DT_String: + case NodeAttribute.DataType.DT_Path: + case NodeAttribute.DataType.DT_FixedString: + case NodeAttribute.DataType.DT_LSString: + case NodeAttribute.DataType.DT_WString: + case NodeAttribute.DataType.DT_LSWString: + attribute.Value = reader.Value.ToString(); + break; - case NodeAttribute.DataType.DT_ULongLong: - if (reader.Value.GetType() == typeof(System.Int64)) - attribute.Value = Convert.ToUInt64((long)reader.Value); - else if (reader.Value.GetType() == typeof(BigInteger)) - attribute.Value = (ulong)((BigInteger)reader.Value); - else - attribute.Value = (ulong)reader.Value; - break; + case NodeAttribute.DataType.DT_ULongLong: + if (reader.Value.GetType() == typeof(System.Int64)) + attribute.Value = Convert.ToUInt64((long)reader.Value); + else if (reader.Value.GetType() == typeof(BigInteger)) + attribute.Value = (ulong)((BigInteger)reader.Value); + else + attribute.Value = (ulong)reader.Value; + break; - // TODO: Not sure if this is the correct format - case NodeAttribute.DataType.DT_ScratchBuffer: - attribute.Value = Convert.FromBase64String(reader.Value.ToString()); - break; + // TODO: Not sure if this is the correct format + case NodeAttribute.DataType.DT_ScratchBuffer: + attribute.Value = Convert.FromBase64String(reader.Value.ToString()); + break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: - attribute.Value = Convert.ToInt64(reader.Value); - break; + case NodeAttribute.DataType.DT_Long: + case NodeAttribute.DataType.DT_Int64: + attribute.Value = Convert.ToInt64(reader.Value); + break; - case NodeAttribute.DataType.DT_Int8: - attribute.Value = Convert.ToSByte(reader.Value); - break; + case NodeAttribute.DataType.DT_Int8: + attribute.Value = Convert.ToSByte(reader.Value); + break; - case NodeAttribute.DataType.DT_TranslatedString: - { - attribute.Value ??= new TranslatedString(); - - var ts = (TranslatedString)attribute.Value; - ts.Value = reader.Value.ToString(); - ts.Handle = handle; - break; - } - - case NodeAttribute.DataType.DT_TranslatedFSString: - { - attribute.Value ??= new TranslatedFSString(); - - var fsString = (TranslatedFSString)attribute.Value; - fsString.Value = reader.Value?.ToString(); - fsString.Handle = handle; - fsString.Arguments = fsStringArguments; - attribute.Value = fsString; - break; - } - - case NodeAttribute.DataType.DT_UUID: - if (SerializationSettings.ByteSwapGuids) - { - attribute.Value = NodeAttribute.ByteSwapGuid(new Guid(reader.Value.ToString())); - } - else - { - attribute.Value = new Guid(reader.Value.ToString()); - } + case NodeAttribute.DataType.DT_TranslatedString: + { + attribute.Value ??= new TranslatedString(); + + var ts = (TranslatedString)attribute.Value; + ts.Value = reader.Value.ToString(); + ts.Handle = handle; break; + } - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: - { - string[] nums = reader.Value.ToString().Split(' '); - int length = attribute.GetColumns(); - if (length != nums.Length) - throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - - int[] vec = new int[length]; - for (int i = 0; i < length; i++) - vec[i] = int.Parse(nums[i]); - - attribute.Value = vec; - break; - } - - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: - { - string[] nums = reader.Value.ToString().Split(' '); - int length = attribute.GetColumns(); - if (length != nums.Length) - throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - - float[] vec = new float[length]; - for (int i = 0; i < length; i++) - vec[i] = float.Parse(nums[i]); - - attribute.Value = vec; - break; - } - - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: - var mat = Matrix.Parse(reader.Value.ToString()); - if (mat.cols != attribute.GetColumns() || mat.rows != attribute.GetRows()) - throw new FormatException("Invalid column/row count for matrix"); - attribute.Value = mat; + case NodeAttribute.DataType.DT_TranslatedFSString: + { + attribute.Value ??= new TranslatedFSString(); + + var fsString = (TranslatedFSString)attribute.Value; + fsString.Value = reader.Value?.ToString(); + fsString.Handle = handle; + fsString.Arguments = fsStringArguments; + attribute.Value = fsString; break; + } - case NodeAttribute.DataType.DT_None: - default: - throw new NotImplementedException("Don't know how to unserialize type " + attribute.Type.ToString()); - } - } - else if (key == "handle") - { - if (attribute != null) - { - if (attribute.Type == NodeAttribute.DataType.DT_TranslatedString) + case NodeAttribute.DataType.DT_UUID: + if (SerializationSettings.ByteSwapGuids) { - attribute.Value ??= new TranslatedString(); + attribute.Value = NodeAttribute.ByteSwapGuid(new Guid(reader.Value.ToString())); + } + else + { + attribute.Value = new Guid(reader.Value.ToString()); + } + break; + + case NodeAttribute.DataType.DT_IVec2: + case NodeAttribute.DataType.DT_IVec3: + case NodeAttribute.DataType.DT_IVec4: + { + string[] nums = reader.Value.ToString().Split(' '); + int length = attribute.GetColumns(); + if (length != nums.Length) + throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); + + int[] vec = new int[length]; + for (int i = 0; i < length; i++) + vec[i] = int.Parse(nums[i]); - ((TranslatedString)attribute.Value).Handle = reader.Value.ToString(); + attribute.Value = vec; + break; } - else if (attribute.Type == NodeAttribute.DataType.DT_TranslatedFSString) + + case NodeAttribute.DataType.DT_Vec2: + case NodeAttribute.DataType.DT_Vec3: + case NodeAttribute.DataType.DT_Vec4: { - attribute.Value ??= new TranslatedFSString(); + string[] nums = reader.Value.ToString().Split(' '); + int length = attribute.GetColumns(); + if (length != nums.Length) + throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); - ((TranslatedFSString)attribute.Value).Handle = reader.Value.ToString(); + float[] vec = new float[length]; + for (int i = 0; i < length; i++) + vec[i] = float.Parse(nums[i]); + + attribute.Value = vec; + break; } - } - else - { - handle = reader.Value.ToString(); - } + + case NodeAttribute.DataType.DT_Mat2: + case NodeAttribute.DataType.DT_Mat3: + case NodeAttribute.DataType.DT_Mat3x4: + case NodeAttribute.DataType.DT_Mat4x3: + case NodeAttribute.DataType.DT_Mat4: + var mat = Matrix.Parse(reader.Value.ToString()); + if (mat.cols != attribute.GetColumns() || mat.rows != attribute.GetRows()) + throw new FormatException("Invalid column/row count for matrix"); + attribute.Value = mat; + break; + + case NodeAttribute.DataType.DT_None: + default: + throw new NotImplementedException("Don't know how to unserialize type " + attribute.Type.ToString()); } - else if (key == "version") + } + else if (key == "handle") + { + if (attribute != null) { - attribute.Value ??= new TranslatedString(); + if (attribute.Type == NodeAttribute.DataType.DT_TranslatedString) + { + attribute.Value ??= new TranslatedString(); + + ((TranslatedString)attribute.Value).Handle = reader.Value.ToString(); + } + else if (attribute.Type == NodeAttribute.DataType.DT_TranslatedFSString) + { + attribute.Value ??= new TranslatedFSString(); - var ts = (TranslatedString)attribute.Value; - ts.Version = UInt16.Parse(reader.Value.ToString()); + ((TranslatedFSString)attribute.Value).Handle = reader.Value.ToString(); + } } else { - throw new InvalidDataException("Unknown property encountered during attribute parsing: " + key); + handle = reader.Value.ToString(); } } - else if (reader.TokenType == JsonToken.StartArray && key == "arguments") + else if (key == "version") { - var args = ReadFSStringArguments(reader); + attribute.Value ??= new TranslatedString(); - if (attribute.Value != null) - { - var fs = ((TranslatedFSString)attribute.Value); - fs.Arguments = args; - } - else - { - fsStringArguments = args; - } + var ts = (TranslatedString)attribute.Value; + ts.Version = UInt16.Parse(reader.Value.ToString()); } else { - throw new InvalidDataException("Unexpected JSON token during parsing of attribute: " + reader.TokenType); + throw new InvalidDataException("Unknown property encountered during attribute parsing: " + key); } } - - return attribute; - } - - private Node ReadNode(JsonReader reader, Node node) - { - string key = ""; - while (reader.Read()) + else if (reader.TokenType == JsonToken.StartArray && key == "arguments") { - if (reader.TokenType == JsonToken.EndObject) - { - break; - } - else if (reader.TokenType == JsonToken.PropertyName) - { - key = reader.Value.ToString(); - } - else if (reader.TokenType == JsonToken.StartObject) + var args = ReadFSStringArguments(reader); + + if (attribute.Value != null) { - var attribute = ReadAttribute(reader); - node.Attributes.Add(key, attribute); - } - else if (reader.TokenType == JsonToken.StartArray) - { - while (reader.Read()) - { - if (reader.TokenType == JsonToken.EndArray) - { - break; - } - else if (reader.TokenType == JsonToken.StartObject) - { - var childNode = new Node - { - Name = key - }; - ReadNode(reader, childNode); - node.AppendChild(childNode); - childNode.Parent = node; - } - else - { - throw new InvalidDataException("Unexpected JSON token during parsing of child node list: " + reader.TokenType); - } - } + var fs = ((TranslatedFSString)attribute.Value); + fs.Arguments = args; } else { - throw new InvalidDataException("Unexpected JSON token during parsing of node: " + reader.TokenType); + fsStringArguments = args; } } - - return node; + else + { + throw new InvalidDataException("Unexpected JSON token during parsing of attribute: " + reader.TokenType); + } } - private Resource ReadResource(JsonReader reader, Resource resource) - { - resource ??= new Resource(); + return attribute; + } - if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("save")) + private Node ReadNode(JsonReader reader, Node node) + { + string key = ""; + while (reader.Read()) + { + if (reader.TokenType == JsonToken.EndObject) { - throw new InvalidDataException("Expected JSON property 'save'"); + break; } - - if (!reader.Read() || reader.TokenType != JsonToken.StartObject) + else if (reader.TokenType == JsonToken.PropertyName) { - throw new InvalidDataException("Expected JSON object start token for 'save': " + reader.TokenType); + key = reader.Value.ToString(); } - - if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("header")) + else if (reader.TokenType == JsonToken.StartObject) { - throw new InvalidDataException("Expected JSON property 'header'"); + var attribute = ReadAttribute(reader); + node.Attributes.Add(key, attribute); } - - if (!reader.Read() || reader.TokenType != JsonToken.StartObject) + else if (reader.TokenType == JsonToken.StartArray) { - throw new InvalidDataException("Expected JSON object start token for 'header': " + reader.TokenType); - } - - string key = ""; - while (reader.Read()) - { - if (reader.TokenType == JsonToken.EndObject) + while (reader.Read()) { - break; - } - else if (reader.TokenType == JsonToken.PropertyName) - { - key = reader.Value.ToString(); - } - else if (reader.TokenType == JsonToken.String || reader.TokenType == JsonToken.Integer) - { - if (key == "time") + if (reader.TokenType == JsonToken.EndArray) { - resource.Metadata.Timestamp = Convert.ToUInt32(reader.Value); + break; } - else if (key == "version") + else if (reader.TokenType == JsonToken.StartObject) { - var pattern = @"^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)$"; - var re = new Regex(pattern); - var match = re.Match(reader.Value.ToString()); - if (match.Success) - { - resource.Metadata.MajorVersion = Convert.ToUInt32(match.Groups[1].Value); - resource.Metadata.MinorVersion = Convert.ToUInt32(match.Groups[2].Value); - resource.Metadata.Revision = Convert.ToUInt32(match.Groups[3].Value); - resource.Metadata.BuildNumber = Convert.ToUInt32(match.Groups[4].Value); - } - else + var childNode = new Node { - throw new InvalidDataException("Malformed version string: " + reader.Value.ToString()); - } + Name = key + }; + ReadNode(reader, childNode); + node.AppendChild(childNode); + childNode.Parent = node; } else { - throw new InvalidDataException("Unknown property encountered during header parsing: " + key); + throw new InvalidDataException("Unexpected JSON token during parsing of child node list: " + reader.TokenType); } } - else - { - throw new InvalidDataException("Unexpected JSON token during parsing of header: " + reader.TokenType); - } } - - if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("regions")) + else { - throw new InvalidDataException("Expected JSON property 'regions'"); + throw new InvalidDataException("Unexpected JSON token during parsing of node: " + reader.TokenType); } + } + + return node; + } - if (!reader.Read() || reader.TokenType != JsonToken.StartObject) + private Resource ReadResource(JsonReader reader, Resource resource) + { + resource ??= new Resource(); + + if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("save")) + { + throw new InvalidDataException("Expected JSON property 'save'"); + } + + if (!reader.Read() || reader.TokenType != JsonToken.StartObject) + { + throw new InvalidDataException("Expected JSON object start token for 'save': " + reader.TokenType); + } + + if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("header")) + { + throw new InvalidDataException("Expected JSON property 'header'"); + } + + if (!reader.Read() || reader.TokenType != JsonToken.StartObject) + { + throw new InvalidDataException("Expected JSON object start token for 'header': " + reader.TokenType); + } + + string key = ""; + while (reader.Read()) + { + if (reader.TokenType == JsonToken.EndObject) { - throw new InvalidDataException("Expected JSON object start token for 'regions': " + reader.TokenType); + break; } - - while (reader.Read()) + else if (reader.TokenType == JsonToken.PropertyName) { - if (reader.TokenType == JsonToken.EndObject) - { - break; - } - else if (reader.TokenType == JsonToken.PropertyName) + key = reader.Value.ToString(); + } + else if (reader.TokenType == JsonToken.String || reader.TokenType == JsonToken.Integer) + { + if (key == "time") { - key = reader.Value.ToString(); + resource.Metadata.Timestamp = Convert.ToUInt32(reader.Value); } - else if (reader.TokenType == JsonToken.StartObject) + else if (key == "version") { - var region = new Region(); - ReadNode(reader, region); - region.Name = key; - region.RegionName = key; - resource.Regions.Add(key, region); + var pattern = @"^([0-9]+)\.([0-9]+)\.([0-9]+)\.([0-9]+)$"; + var re = new Regex(pattern); + var match = re.Match(reader.Value.ToString()); + if (match.Success) + { + resource.Metadata.MajorVersion = Convert.ToUInt32(match.Groups[1].Value); + resource.Metadata.MinorVersion = Convert.ToUInt32(match.Groups[2].Value); + resource.Metadata.Revision = Convert.ToUInt32(match.Groups[3].Value); + resource.Metadata.BuildNumber = Convert.ToUInt32(match.Groups[4].Value); + } + else + { + throw new InvalidDataException("Malformed version string: " + reader.Value.ToString()); + } } else { - throw new InvalidDataException("Unexpected JSON token during parsing of region list: " + reader.TokenType); + throw new InvalidDataException("Unknown property encountered during header parsing: " + key); } } + else + { + throw new InvalidDataException("Unexpected JSON token during parsing of header: " + reader.TokenType); + } + } - return resource; + if (!reader.Read() || reader.TokenType != JsonToken.PropertyName || !reader.Value.Equals("regions")) + { + throw new InvalidDataException("Expected JSON property 'regions'"); } - public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + if (!reader.Read() || reader.TokenType != JsonToken.StartObject) { - if (objectType == typeof(Node)) + throw new InvalidDataException("Expected JSON object start token for 'regions': " + reader.TokenType); + } + + while (reader.Read()) + { + if (reader.TokenType == JsonToken.EndObject) { - return ReadNode(reader, existingValue as Node); + break; } - else if (objectType == typeof(Resource)) + else if (reader.TokenType == JsonToken.PropertyName) { - return ReadResource(reader, existingValue as Resource); + key = reader.Value.ToString(); + } + else if (reader.TokenType == JsonToken.StartObject) + { + var region = new Region(); + ReadNode(reader, region); + region.Name = key; + region.RegionName = key; + resource.Regions.Add(key, region); } else { - throw new InvalidOperationException("Cannot unserialize unknown type"); + throw new InvalidDataException("Unexpected JSON token during parsing of region list: " + reader.TokenType); } } - private void WriteResource(JsonWriter writer, Resource resource, JsonSerializer serializer) - { - Metadata = resource.Metadata; - writer.WriteStartObject(); + return resource; + } - writer.WritePropertyName("save"); - writer.WriteStartObject(); + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + if (objectType == typeof(Node)) + { + return ReadNode(reader, existingValue as Node); + } + else if (objectType == typeof(Resource)) + { + return ReadResource(reader, existingValue as Resource); + } + else + { + throw new InvalidOperationException("Cannot unserialize unknown type"); + } + } - writer.WritePropertyName("header"); - writer.WriteStartObject(); - writer.WritePropertyName("time"); - writer.WriteValue(resource.Metadata.Timestamp); - writer.WritePropertyName("version"); - var versionString = resource.Metadata.MajorVersion.ToString() + "." - + resource.Metadata.MinorVersion.ToString() + "." - + resource.Metadata.Revision.ToString() + "." - + resource.Metadata.BuildNumber.ToString(); - writer.WriteValue(versionString); - writer.WriteEndObject(); + private void WriteResource(JsonWriter writer, Resource resource, JsonSerializer serializer) + { + Metadata = resource.Metadata; + writer.WriteStartObject(); + + writer.WritePropertyName("save"); + writer.WriteStartObject(); + + writer.WritePropertyName("header"); + writer.WriteStartObject(); + writer.WritePropertyName("time"); + writer.WriteValue(resource.Metadata.Timestamp); + writer.WritePropertyName("version"); + var versionString = resource.Metadata.MajorVersion.ToString() + "." + + resource.Metadata.MinorVersion.ToString() + "." + + resource.Metadata.Revision.ToString() + "." + + resource.Metadata.BuildNumber.ToString(); + writer.WriteValue(versionString); + writer.WriteEndObject(); + + writer.WritePropertyName("regions"); + writer.WriteStartObject(); + foreach (var region in resource.Regions) + { + writer.WritePropertyName(region.Key); + WriteNode(writer, region.Value, serializer); + } + writer.WriteEndObject(); - writer.WritePropertyName("regions"); - writer.WriteStartObject(); - foreach (var region in resource.Regions) - { - writer.WritePropertyName(region.Key); - WriteNode(writer, region.Value, serializer); - } - writer.WriteEndObject(); + writer.WriteEndObject(); + writer.WriteEndObject(); + } - writer.WriteEndObject(); - writer.WriteEndObject(); - } + private void WriteTranslatedFSString(JsonWriter writer, TranslatedFSString fs) + { + writer.WriteStartObject(); + writer.WritePropertyName("value"); + WriteTranslatedFSStringInner(writer, fs); + writer.WriteEndObject(); + } - private void WriteTranslatedFSString(JsonWriter writer, TranslatedFSString fs) + private void WriteTranslatedFSStringInner(JsonWriter writer, TranslatedFSString fs) + { + writer.WriteValue(fs.Value); + writer.WritePropertyName("handle"); + writer.WriteValue(fs.Handle); + writer.WritePropertyName("arguments"); + writer.WriteStartArray(); + for (int i = 0; i < fs.Arguments.Count; i++) { + var arg = fs.Arguments[i]; writer.WriteStartObject(); + writer.WritePropertyName("key"); + writer.WriteValue(arg.Key); + writer.WritePropertyName("string"); + WriteTranslatedFSString(writer, arg.String); writer.WritePropertyName("value"); - WriteTranslatedFSStringInner(writer, fs); + writer.WriteValue(arg.Value); writer.WriteEndObject(); } - private void WriteTranslatedFSStringInner(JsonWriter writer, TranslatedFSString fs) - { - writer.WriteValue(fs.Value); - writer.WritePropertyName("handle"); - writer.WriteValue(fs.Handle); - writer.WritePropertyName("arguments"); - writer.WriteStartArray(); - for (int i = 0; i < fs.Arguments.Count; i++) - { - var arg = fs.Arguments[i]; - writer.WriteStartObject(); - writer.WritePropertyName("key"); - writer.WriteValue(arg.Key); - writer.WritePropertyName("string"); - WriteTranslatedFSString(writer, arg.String); - writer.WritePropertyName("value"); - writer.WriteValue(arg.Value); - writer.WriteEndObject(); - } + writer.WriteEndArray(); + } - writer.WriteEndArray(); - } + private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) + { + writer.WriteStartObject(); - private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) + foreach (var attribute in node.Attributes) { + writer.WritePropertyName(attribute.Key); writer.WriteStartObject(); + writer.WritePropertyName("type"); + if (Metadata.MajorVersion >= 4) + { + writer.WriteValue(AttributeTypeMaps.IdToType[attribute.Value.Type]); + } + else + { + writer.WriteValue((int)attribute.Value.Type); + } - foreach (var attribute in node.Attributes) + if (attribute.Value.Type != NodeAttribute.DataType.DT_TranslatedString) { - writer.WritePropertyName(attribute.Key); - writer.WriteStartObject(); - writer.WritePropertyName("type"); - if (Metadata.MajorVersion >= 4) - { - writer.WriteValue(AttributeTypeMaps.IdToType[attribute.Value.Type]); - } - else - { - writer.WriteValue((int)attribute.Value.Type); - } + writer.WritePropertyName("value"); + } - if (attribute.Value.Type != NodeAttribute.DataType.DT_TranslatedString) - { - writer.WritePropertyName("value"); - } + switch (attribute.Value.Type) + { + case NodeAttribute.DataType.DT_Byte: + writer.WriteValue(Convert.ToByte(attribute.Value.Value)); + break; - switch (attribute.Value.Type) - { - case NodeAttribute.DataType.DT_Byte: - writer.WriteValue(Convert.ToByte(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Short: + writer.WriteValue(Convert.ToInt16(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Short: - writer.WriteValue(Convert.ToInt16(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_UShort: + writer.WriteValue(Convert.ToUInt16(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_UShort: - writer.WriteValue(Convert.ToUInt16(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Int: + writer.WriteValue(Convert.ToInt32(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Int: - writer.WriteValue(Convert.ToInt32(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_UInt: + writer.WriteValue(Convert.ToUInt32(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_UInt: - writer.WriteValue(Convert.ToUInt32(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Float: + writer.WriteValue(Convert.ToSingle(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Float: - writer.WriteValue(Convert.ToSingle(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Double: + writer.WriteValue(Convert.ToDouble(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Double: - writer.WriteValue(Convert.ToDouble(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Bool: + writer.WriteValue(Convert.ToBoolean(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Bool: - writer.WriteValue(Convert.ToBoolean(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_String: + case NodeAttribute.DataType.DT_Path: + case NodeAttribute.DataType.DT_FixedString: + case NodeAttribute.DataType.DT_LSString: + case NodeAttribute.DataType.DT_WString: + case NodeAttribute.DataType.DT_LSWString: + writer.WriteValue(attribute.Value.AsString(SerializationSettings)); + break; - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: - writer.WriteValue(attribute.Value.AsString(SerializationSettings)); - break; + case NodeAttribute.DataType.DT_ULongLong: + writer.WriteValue(Convert.ToUInt64(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_ULongLong: - writer.WriteValue(Convert.ToUInt64(attribute.Value.Value)); - break; + // TODO: Not sure if this is the correct format + case NodeAttribute.DataType.DT_ScratchBuffer: + writer.WriteValue(Convert.ToBase64String((byte[])attribute.Value.Value)); + break; - // TODO: Not sure if this is the correct format - case NodeAttribute.DataType.DT_ScratchBuffer: - writer.WriteValue(Convert.ToBase64String((byte[])attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Long: + case NodeAttribute.DataType.DT_Int64: + writer.WriteValue(Convert.ToInt64(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: - writer.WriteValue(Convert.ToInt64(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_Int8: + writer.WriteValue(Convert.ToSByte(attribute.Value.Value)); + break; - case NodeAttribute.DataType.DT_Int8: - writer.WriteValue(Convert.ToSByte(attribute.Value.Value)); - break; + case NodeAttribute.DataType.DT_TranslatedString: + { + var ts = (TranslatedString)attribute.Value.Value; - case NodeAttribute.DataType.DT_TranslatedString: + if (ts.Value != null) { - var ts = (TranslatedString)attribute.Value.Value; - - if (ts.Value != null) - { - writer.WritePropertyName("value"); - writer.WriteValue(ts.Value); - } - - if (ts.Version > 0) - { - writer.WritePropertyName("version"); - writer.WriteValue(ts.Version); - } - - writer.WritePropertyName("handle"); - writer.WriteValue(ts.Handle); - break; + writer.WritePropertyName("value"); + writer.WriteValue(ts.Value); } - case NodeAttribute.DataType.DT_TranslatedFSString: + if (ts.Version > 0) { - var fs = (TranslatedFSString)attribute.Value.Value; - WriteTranslatedFSStringInner(writer, fs); - break; + writer.WritePropertyName("version"); + writer.WriteValue(ts.Version); } - case NodeAttribute.DataType.DT_UUID: - if (SerializationSettings.ByteSwapGuids) - { - writer.WriteValue((NodeAttribute.ByteSwapGuid((Guid)attribute.Value.Value)).ToString()); - } - else - { - writer.WriteValue(((Guid)attribute.Value.Value).ToString()); - } + writer.WritePropertyName("handle"); + writer.WriteValue(ts.Handle); break; + } - // TODO: haven't seen any vectors/matrices in D:OS JSON files so far - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: - { - var vec = (float[])attribute.Value.Value; - writer.WriteValue(String.Join(" ", vec)); - break; - } + case NodeAttribute.DataType.DT_TranslatedFSString: + { + var fs = (TranslatedFSString)attribute.Value.Value; + WriteTranslatedFSStringInner(writer, fs); + break; + } - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: - { - var ivec = (int[])attribute.Value.Value; - writer.WriteValue(String.Join(" ", ivec)); - break; - } + case NodeAttribute.DataType.DT_UUID: + if (SerializationSettings.ByteSwapGuids) + { + writer.WriteValue((NodeAttribute.ByteSwapGuid((Guid)attribute.Value.Value)).ToString()); + } + else + { + writer.WriteValue(((Guid)attribute.Value.Value).ToString()); + } + break; - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: - { - var mat = (Matrix)attribute.Value.Value; - var str = ""; - for (var r = 0; r < mat.rows; r++) - { - for (var c = 0; c < mat.cols; c++) - str += mat[r, c].ToString() + " "; - str += Environment.NewLine; - } + // TODO: haven't seen any vectors/matrices in D:OS JSON files so far + case NodeAttribute.DataType.DT_Vec2: + case NodeAttribute.DataType.DT_Vec3: + case NodeAttribute.DataType.DT_Vec4: + { + var vec = (float[])attribute.Value.Value; + writer.WriteValue(String.Join(" ", vec)); + break; + } - writer.WriteValue(str); - break; - } + case NodeAttribute.DataType.DT_IVec2: + case NodeAttribute.DataType.DT_IVec3: + case NodeAttribute.DataType.DT_IVec4: + { + var ivec = (int[])attribute.Value.Value; + writer.WriteValue(String.Join(" ", ivec)); + break; + } - case NodeAttribute.DataType.DT_None: - default: - throw new NotImplementedException("Don't know how to serialize type " + attribute.Value.Type.ToString()); - } + case NodeAttribute.DataType.DT_Mat2: + case NodeAttribute.DataType.DT_Mat3: + case NodeAttribute.DataType.DT_Mat3x4: + case NodeAttribute.DataType.DT_Mat4x3: + case NodeAttribute.DataType.DT_Mat4: + { + var mat = (Matrix)attribute.Value.Value; + var str = ""; + for (var r = 0; r < mat.rows; r++) + { + for (var c = 0; c < mat.cols; c++) + str += mat[r, c].ToString() + " "; + str += Environment.NewLine; + } - writer.WriteEndObject(); - } + writer.WriteValue(str); + break; + } - foreach (var children in node.Children) - { - writer.WritePropertyName(children.Key); - writer.WriteStartArray(); - foreach (var child in children.Value) - WriteNode(writer, child, serializer); - writer.WriteEndArray(); + case NodeAttribute.DataType.DT_None: + default: + throw new NotImplementedException("Don't know how to serialize type " + attribute.Value.Type.ToString()); } writer.WriteEndObject(); } - public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + foreach (var children in node.Children) { - if (value is Node) - { - WriteNode(writer, value as Node, serializer); - } - else if (value is Resource) - { - WriteResource(writer, value as Resource, serializer); - } - else - { - throw new InvalidOperationException("Cannot serialize unknown type"); - } + writer.WritePropertyName(children.Key); + writer.WriteStartArray(); + foreach (var child in children.Value) + WriteNode(writer, child, serializer); + writer.WriteEndArray(); + } + + writer.WriteEndObject(); + } + + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + if (value is Node) + { + WriteNode(writer, value as Node, serializer); + } + else if (value is Resource) + { + WriteResource(writer, value as Resource, serializer); + } + else + { + throw new InvalidOperationException("Cannot serialize unknown type"); } } } diff --git a/LSLib/LS/Resources/LSJ/LSJWriter.cs b/LSLib/LS/Resources/LSJ/LSJWriter.cs index 64ca368f..bf5ef244 100644 --- a/LSLib/LS/Resources/LSJ/LSJWriter.cs +++ b/LSLib/LS/Resources/LSJ/LSJWriter.cs @@ -1,28 +1,27 @@ using System.IO; using Newtonsoft.Json; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSJWriter(Stream stream) { - public class LSJWriter(Stream stream) - { - private readonly Stream stream = stream; - public bool PrettyPrint = false; - public NodeSerializationSettings SerializationSettings = new(); + private readonly Stream stream = stream; + public bool PrettyPrint = false; + public NodeSerializationSettings SerializationSettings = new(); - public void Write(Resource rsrc) + public void Write(Resource rsrc) + { + var settings = new JsonSerializerSettings { - var settings = new JsonSerializerSettings - { - Formatting = Formatting.Indented - }; - settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); - var serializer = JsonSerializer.Create(settings); + Formatting = Formatting.Indented + }; + settings.Converters.Add(new LSJResourceConverter(SerializationSettings)); + var serializer = JsonSerializer.Create(settings); - using var streamWriter = new StreamWriter(stream); - using var writer = new JsonTextWriter(streamWriter); - writer.IndentChar = '\t'; - writer.Indentation = 1; - serializer.Serialize(writer, rsrc); - } + using var streamWriter = new StreamWriter(stream); + using var writer = new JsonTextWriter(streamWriter); + writer.IndentChar = '\t'; + writer.Indentation = 1; + serializer.Serialize(writer, rsrc); } } diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index c8d495f0..bafc32d8 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -6,280 +6,279 @@ using System.Linq; using System.Xml; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSXReader(Stream stream) : IDisposable { - public class LSXReader(Stream stream) : IDisposable + private Stream stream = stream; + private XmlReader reader; + private Resource resource; + private Region currentRegion; + private List stack; + private int lastLine, lastColumn; + private LSXVersion Version = LSXVersion.V3; + public NodeSerializationSettings SerializationSettings = new(); + + public void Dispose() { - private Stream stream = stream; - private XmlReader reader; - private Resource resource; - private Region currentRegion; - private List stack; - private int lastLine, lastColumn; - private LSXVersion Version = LSXVersion.V3; - public NodeSerializationSettings SerializationSettings = new(); - - public void Dispose() - { - stream.Dispose(); - } + stream.Dispose(); + } + + private void ReadTranslatedFSString(TranslatedFSString fs) + { + fs.Value = reader["value"]; + fs.Handle = reader["handle"]; + Debug.Assert(fs.Handle != null); - private void ReadTranslatedFSString(TranslatedFSString fs) + var arguments = Convert.ToInt32(reader["arguments"]); + fs.Arguments = new List(arguments); + if (arguments > 0) { - fs.Value = reader["value"]; - fs.Handle = reader["handle"]; - Debug.Assert(fs.Handle != null); + while (reader.Read() && reader.NodeType != XmlNodeType.Element); + if (reader.Name != "arguments") + { + throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); + } - var arguments = Convert.ToInt32(reader["arguments"]); - fs.Arguments = new List(arguments); - if (arguments > 0) + int processedArgs = 0; + while (processedArgs < arguments && reader.Read()) { - while (reader.Read() && reader.NodeType != XmlNodeType.Element); - if (reader.Name != "arguments") + if (reader.NodeType == XmlNodeType.Element) { - throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); - } + if (reader.Name != "argument") + { + throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); + } - int processedArgs = 0; - while (processedArgs < arguments && reader.Read()) - { - if (reader.NodeType == XmlNodeType.Element) + var arg = new TranslatedFSStringArgument { - if (reader.Name != "argument") - { - throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); - } - - var arg = new TranslatedFSStringArgument - { - Key = reader["key"], - Value = reader["value"] - }; - - while (reader.Read() && reader.NodeType != XmlNodeType.Element); - if (reader.Name != "string") - { - throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); - } - - arg.String = new TranslatedFSString(); - ReadTranslatedFSString(arg.String); - - fs.Arguments.Add(arg); - processedArgs++; - - while (reader.Read() && reader.NodeType != XmlNodeType.EndElement); + Key = reader["key"], + Value = reader["value"] + }; + + while (reader.Read() && reader.NodeType != XmlNodeType.Element); + if (reader.Name != "string") + { + throw new InvalidFormatException(String.Format("Expected : {0}", reader.Name)); } - } - while (reader.Read() && reader.NodeType != XmlNodeType.EndElement); - // Close outer element - while (reader.Read() && reader.NodeType != XmlNodeType.EndElement); - Debug.Assert(processedArgs == arguments); + arg.String = new TranslatedFSString(); + ReadTranslatedFSString(arg.String); + + fs.Arguments.Add(arg); + processedArgs++; + + while (reader.Read() && reader.NodeType != XmlNodeType.EndElement); + } } + + while (reader.Read() && reader.NodeType != XmlNodeType.EndElement); + // Close outer element + while (reader.Read() && reader.NodeType != XmlNodeType.EndElement); + Debug.Assert(processedArgs == arguments); } + } - private void ReadElement() + private void ReadElement() + { + switch (reader.Name) { - switch (reader.Name) - { - case "save": - // Root element - if (stack.Count > 0) - throw new InvalidFormatException("Node was unexpected."); - break; - - case "header": - // LSX metadata part 1 - resource.Metadata.Timestamp = Convert.ToUInt64(reader["time"]); - break; - - case "version": - // LSX metadata part 2 - resource.Metadata.MajorVersion = Convert.ToUInt32(reader["major"]); - resource.Metadata.MinorVersion = Convert.ToUInt32(reader["minor"]); - resource.Metadata.Revision = Convert.ToUInt32(reader["revision"]); - resource.Metadata.BuildNumber = Convert.ToUInt32(reader["build"]); - Version = (resource.Metadata.MajorVersion >= 4) ? LSXVersion.V4 : LSXVersion.V3; - var lslibMeta = reader["lslib_meta"]; - SerializationSettings.InitFromMeta(lslibMeta ?? ""); - break; - - case "region": - if (currentRegion != null) - throw new InvalidFormatException("A can only start at the root level of a resource."); - - Debug.Assert(!reader.IsEmptyElement); - var region = new Region(); - region.RegionName = reader["id"]; - Debug.Assert(region.RegionName != null); - resource.Regions.Add(region.RegionName, region); - currentRegion = region; - break; - - case "node": - if (currentRegion == null) - throw new InvalidFormatException("A must be located inside a region."); - - Node node; - if (stack.Count == 0) - { - // The node is the root node of the region - node = currentRegion; - } - else + case "save": + // Root element + if (stack.Count > 0) + throw new InvalidFormatException("Node was unexpected."); + break; + + case "header": + // LSX metadata part 1 + resource.Metadata.Timestamp = Convert.ToUInt64(reader["time"]); + break; + + case "version": + // LSX metadata part 2 + resource.Metadata.MajorVersion = Convert.ToUInt32(reader["major"]); + resource.Metadata.MinorVersion = Convert.ToUInt32(reader["minor"]); + resource.Metadata.Revision = Convert.ToUInt32(reader["revision"]); + resource.Metadata.BuildNumber = Convert.ToUInt32(reader["build"]); + Version = (resource.Metadata.MajorVersion >= 4) ? LSXVersion.V4 : LSXVersion.V3; + var lslibMeta = reader["lslib_meta"]; + SerializationSettings.InitFromMeta(lslibMeta ?? ""); + break; + + case "region": + if (currentRegion != null) + throw new InvalidFormatException("A can only start at the root level of a resource."); + + Debug.Assert(!reader.IsEmptyElement); + var region = new Region(); + region.RegionName = reader["id"]; + Debug.Assert(region.RegionName != null); + resource.Regions.Add(region.RegionName, region); + currentRegion = region; + break; + + case "node": + if (currentRegion == null) + throw new InvalidFormatException("A must be located inside a region."); + + Node node; + if (stack.Count == 0) + { + // The node is the root node of the region + node = currentRegion; + } + else + { + // New node under the current parent + node = new Node { - // New node under the current parent - node = new Node - { - Parent = stack.Last() - }; - } + Parent = stack.Last() + }; + } - node.Name = reader["id"]; - Debug.Assert(node.Name != null); - node.Parent?.AppendChild(node); + node.Name = reader["id"]; + Debug.Assert(node.Name != null); + node.Parent?.AppendChild(node); - if (!reader.IsEmptyElement) - stack.Add(node); - break; + if (!reader.IsEmptyElement) + stack.Add(node); + break; - case "attribute": - UInt32 attrTypeId; - if (!UInt32.TryParse(reader["type"], out attrTypeId)) - { - attrTypeId = (uint)AttributeTypeMaps.TypeToId[reader["type"]]; - } + case "attribute": + UInt32 attrTypeId; + if (!UInt32.TryParse(reader["type"], out attrTypeId)) + { + attrTypeId = (uint)AttributeTypeMaps.TypeToId[reader["type"]]; + } - var attrName = reader["id"]; - if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) - throw new InvalidFormatException(String.Format("Unsupported attribute data type: {0}", attrTypeId)); + var attrName = reader["id"]; + if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) + throw new InvalidFormatException(String.Format("Unsupported attribute data type: {0}", attrTypeId)); - Debug.Assert(attrName != null); - var attr = new NodeAttribute((NodeAttribute.DataType)attrTypeId); + Debug.Assert(attrName != null); + var attr = new NodeAttribute((NodeAttribute.DataType)attrTypeId); - var attrValue = reader["value"]; - if (attrValue != null) - { - attr.FromString(attrValue, SerializationSettings); - } + var attrValue = reader["value"]; + if (attrValue != null) + { + attr.FromString(attrValue, SerializationSettings); + } - if (attr.Type == NodeAttribute.DataType.DT_TranslatedString) - { - attr.Value ??= new TranslatedString(); + if (attr.Type == NodeAttribute.DataType.DT_TranslatedString) + { + attr.Value ??= new TranslatedString(); - var ts = ((TranslatedString)attr.Value); - ts.Handle = reader["handle"]; - Debug.Assert(ts.Handle != null); + var ts = ((TranslatedString)attr.Value); + ts.Handle = reader["handle"]; + Debug.Assert(ts.Handle != null); - if (attrValue == null) - { - ts.Version = UInt16.Parse(reader["version"]); - } - } - else if (attr.Type == NodeAttribute.DataType.DT_TranslatedFSString) + if (attrValue == null) { - var fs = ((TranslatedFSString)attr.Value); - ReadTranslatedFSString(fs); + ts.Version = UInt16.Parse(reader["version"]); } + } + else if (attr.Type == NodeAttribute.DataType.DT_TranslatedFSString) + { + var fs = ((TranslatedFSString)attr.Value); + ReadTranslatedFSString(fs); + } - stack.Last().Attributes.Add(attrName, attr); - break; + stack.Last().Attributes.Add(attrName, attr); + break; - case "children": - // Child nodes are handled in the "node" case - break; + case "children": + // Child nodes are handled in the "node" case + break; - default: - throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); - } + default: + throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); } + } - private void ReadEndElement() + private void ReadEndElement() + { + switch (reader.Name) { - switch (reader.Name) - { - case "save": - case "header": - case "version": - case "attribute": - case "children": - // These elements don't change the stack, just discard them - break; - - case "region": - Debug.Assert(stack.Count == 0); - Debug.Assert(currentRegion != null); - Debug.Assert(currentRegion.Name != null); - currentRegion = null; - break; - - case "node": - stack.RemoveAt(stack.Count - 1); - break; - - default: - throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); - } + case "save": + case "header": + case "version": + case "attribute": + case "children": + // These elements don't change the stack, just discard them + break; + + case "region": + Debug.Assert(stack.Count == 0); + Debug.Assert(currentRegion != null); + Debug.Assert(currentRegion.Name != null); + currentRegion = null; + break; + + case "node": + stack.RemoveAt(stack.Count - 1); + break; + + default: + throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); } + } - private void ReadInternal() + private void ReadInternal() + { + using (this.reader = XmlReader.Create(stream)) { - using (this.reader = XmlReader.Create(stream)) + try { - try + while (reader.Read()) { - while (reader.Read()) + if (reader.NodeType == XmlNodeType.Element) { - if (reader.NodeType == XmlNodeType.Element) - { - ReadElement(); - } - else if (reader.NodeType == XmlNodeType.EndElement) - { - ReadEndElement(); - } + ReadElement(); + } + else if (reader.NodeType == XmlNodeType.EndElement) + { + ReadEndElement(); } - } catch (Exception) - { - lastLine = ((IXmlLineInfo)reader).LineNumber; - lastColumn = ((IXmlLineInfo)reader).LinePosition; - throw; } + } catch (Exception) + { + lastLine = ((IXmlLineInfo)reader).LineNumber; + lastColumn = ((IXmlLineInfo)reader).LinePosition; + throw; } } + } - public Resource Read() - { - resource = new Resource(); - currentRegion = null; - stack = []; - lastLine = lastColumn = 0; - var resultResource = resource; + public Resource Read() + { + resource = new Resource(); + currentRegion = null; + stack = []; + lastLine = lastColumn = 0; + var resultResource = resource; - try - { - ReadInternal(); - } - catch (Exception e) + try + { + ReadInternal(); + } + catch (Exception e) + { + if (lastLine > 0) { - if (lastLine > 0) - { - throw new Exception($"Parsing error at or near line {lastLine}, column {lastColumn}:{Environment.NewLine}{e.Message}", e); - } - else - { - throw; - } + throw new Exception($"Parsing error at or near line {lastLine}, column {lastColumn}:{Environment.NewLine}{e.Message}", e); } - finally + else { - resource = null; - currentRegion = null; - stack = null; + throw; } - - return resultResource; } + finally + { + resource = null; + currentRegion = null; + stack = null; + } + + return resultResource; } } diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 49488ae4..7a0d9bc5 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -3,148 +3,147 @@ using System.IO; using System.Xml; -namespace LSLib.LS +namespace LSLib.LS; + +public class LSXWriter(Stream stream) { - public class LSXWriter(Stream stream) - { - private readonly Stream stream = stream; - private XmlWriter writer; + private readonly Stream stream = stream; + private XmlWriter writer; - public bool PrettyPrint = false; - public LSXVersion Version = LSXVersion.V3; - public NodeSerializationSettings SerializationSettings = new(); + public bool PrettyPrint = false; + public LSXVersion Version = LSXVersion.V3; + public NodeSerializationSettings SerializationSettings = new(); - public void Write(Resource rsrc) + public void Write(Resource rsrc) + { + if (Version == LSXVersion.V3 && rsrc.Metadata.MajorVersion == 4) { - if (Version == LSXVersion.V3 && rsrc.Metadata.MajorVersion == 4) - { - throw new InvalidDataException("Cannot resave a BG3 (v4.x) resource in D:OS2 (v3.x) file format, maybe you have the wrong game selected?"); - } - - var settings = new XmlWriterSettings - { - Indent = PrettyPrint, - IndentChars = "\t" - }; + throw new InvalidDataException("Cannot resave a BG3 (v4.x) resource in D:OS2 (v3.x) file format, maybe you have the wrong game selected?"); + } - using (this.writer = XmlWriter.Create(stream, settings)) - { - writer.WriteStartElement("save"); + var settings = new XmlWriterSettings + { + Indent = PrettyPrint, + IndentChars = "\t" + }; - writer.WriteStartElement("version"); + using (this.writer = XmlWriter.Create(stream, settings)) + { + writer.WriteStartElement("save"); - writer.WriteAttributeString("major", rsrc.Metadata.MajorVersion.ToString()); - writer.WriteAttributeString("minor", rsrc.Metadata.MinorVersion.ToString()); - writer.WriteAttributeString("revision", rsrc.Metadata.Revision.ToString()); - writer.WriteAttributeString("build", rsrc.Metadata.BuildNumber.ToString()); - writer.WriteAttributeString("lslib_meta", SerializationSettings.BuildMeta()); - writer.WriteEndElement(); + writer.WriteStartElement("version"); - WriteRegions(rsrc); + writer.WriteAttributeString("major", rsrc.Metadata.MajorVersion.ToString()); + writer.WriteAttributeString("minor", rsrc.Metadata.MinorVersion.ToString()); + writer.WriteAttributeString("revision", rsrc.Metadata.Revision.ToString()); + writer.WriteAttributeString("build", rsrc.Metadata.BuildNumber.ToString()); + writer.WriteAttributeString("lslib_meta", SerializationSettings.BuildMeta()); + writer.WriteEndElement(); - writer.WriteEndElement(); - writer.Flush(); - } - } + WriteRegions(rsrc); - private void WriteRegions(Resource rsrc) - { - foreach (var region in rsrc.Regions) - { - writer.WriteStartElement("region"); - writer.WriteAttributeString("id", region.Key); - WriteNode(region.Value); - writer.WriteEndElement(); - } + writer.WriteEndElement(); + writer.Flush(); } + } - private void WriteTranslatedFSString(TranslatedFSString fs) + private void WriteRegions(Resource rsrc) + { + foreach (var region in rsrc.Regions) { - writer.WriteStartElement("string"); - writer.WriteAttributeString("value", fs.Value); - WriteTranslatedFSStringInner(fs); + writer.WriteStartElement("region"); + writer.WriteAttributeString("id", region.Key); + WriteNode(region.Value); writer.WriteEndElement(); } + } - private void WriteTranslatedFSStringInner(TranslatedFSString fs) - { - writer.WriteAttributeString("handle", fs.Handle); - writer.WriteAttributeString("arguments", fs.Arguments.Count.ToString()); + private void WriteTranslatedFSString(TranslatedFSString fs) + { + writer.WriteStartElement("string"); + writer.WriteAttributeString("value", fs.Value); + WriteTranslatedFSStringInner(fs); + writer.WriteEndElement(); + } + + private void WriteTranslatedFSStringInner(TranslatedFSString fs) + { + writer.WriteAttributeString("handle", fs.Handle); + writer.WriteAttributeString("arguments", fs.Arguments.Count.ToString()); - if (fs.Arguments.Count > 0) + if (fs.Arguments.Count > 0) + { + writer.WriteStartElement("arguments"); + for (int i = 0; i < fs.Arguments.Count; i++) { - writer.WriteStartElement("arguments"); - for (int i = 0; i < fs.Arguments.Count; i++) - { - var argument = fs.Arguments[i]; - writer.WriteStartElement("argument"); - writer.WriteAttributeString("key", argument.Key); - writer.WriteAttributeString("value", argument.Value); - WriteTranslatedFSString(argument.String); - writer.WriteEndElement(); - } + var argument = fs.Arguments[i]; + writer.WriteStartElement("argument"); + writer.WriteAttributeString("key", argument.Key); + writer.WriteAttributeString("value", argument.Value); + WriteTranslatedFSString(argument.String); writer.WriteEndElement(); } + writer.WriteEndElement(); } + } - private void WriteNode(Node node) - { - writer.WriteStartElement("node"); - writer.WriteAttributeString("id", node.Name); + private void WriteNode(Node node) + { + writer.WriteStartElement("node"); + writer.WriteAttributeString("id", node.Name); - foreach (var attribute in node.Attributes) + foreach (var attribute in node.Attributes) + { + writer.WriteStartElement("attribute"); + writer.WriteAttributeString("id", attribute.Key); + if (Version >= LSXVersion.V4) { - writer.WriteStartElement("attribute"); - writer.WriteAttributeString("id", attribute.Key); - if (Version >= LSXVersion.V4) - { - writer.WriteAttributeString("type", AttributeTypeMaps.IdToType[attribute.Value.Type]); - } - else - { - writer.WriteAttributeString("type", ((int)attribute.Value.Type).ToString()); - } + writer.WriteAttributeString("type", AttributeTypeMaps.IdToType[attribute.Value.Type]); + } + else + { + writer.WriteAttributeString("type", ((int)attribute.Value.Type).ToString()); + } - if (attribute.Value.Type == NodeAttribute.DataType.DT_TranslatedString) - { - var ts = ((TranslatedString)attribute.Value.Value); - writer.WriteAttributeString("handle", ts.Handle); - if (ts.Value != null) - { - writer.WriteAttributeString("value", ts.ToString()); - } - else - { - writer.WriteAttributeString("version", ts.Version.ToString()); - } - } - else if (attribute.Value.Type == NodeAttribute.DataType.DT_TranslatedFSString) + if (attribute.Value.Type == NodeAttribute.DataType.DT_TranslatedString) + { + var ts = ((TranslatedString)attribute.Value.Value); + writer.WriteAttributeString("handle", ts.Handle); + if (ts.Value != null) { - var fs = ((TranslatedFSString)attribute.Value.Value); - writer.WriteAttributeString("value", fs.Value); - WriteTranslatedFSStringInner(fs); + writer.WriteAttributeString("value", ts.ToString()); } else { - // Replace bogus 001F characters found in certain LSF nodes - writer.WriteAttributeString("value", attribute.Value.AsString(SerializationSettings).Replace("\x1f", "")); + writer.WriteAttributeString("version", ts.Version.ToString()); } - - writer.WriteEndElement(); } - - if (node.ChildCount > 0) + else if (attribute.Value.Type == NodeAttribute.DataType.DT_TranslatedFSString) { - writer.WriteStartElement("children"); - foreach (var children in node.Children) - { - foreach (var child in children.Value) - WriteNode(child); - } - writer.WriteEndElement(); + var fs = ((TranslatedFSString)attribute.Value.Value); + writer.WriteAttributeString("value", fs.Value); + WriteTranslatedFSStringInner(fs); + } + else + { + // Replace bogus 001F characters found in certain LSF nodes + writer.WriteAttributeString("value", attribute.Value.AsString(SerializationSettings).Replace("\x1f", "")); } writer.WriteEndElement(); } + + if (node.ChildCount > 0) + { + writer.WriteStartElement("children"); + foreach (var children in node.Children) + { + foreach (var child in children.Value) + WriteNode(child); + } + writer.WriteEndElement(); + } + + writer.WriteEndElement(); } } diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index 938a8c43..c8fd5f4c 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -6,144 +6,143 @@ using System.IO; using System.Linq; -namespace LSLib.LS.Save +namespace LSLib.LS.Save; + +public class SavegameHelpers : IDisposable { - public class SavegameHelpers : IDisposable + private readonly PackageReader Reader; + private readonly Package Package; + + public SavegameHelpers(string path) { - private readonly PackageReader Reader; - private readonly Package Package; + Reader = new PackageReader(path); + Package = Reader.Read(); + } - public SavegameHelpers(string path) + public void Dispose() + { + Reader.Dispose(); + } + + public Resource LoadGlobals() + { + AbstractFileInfo globalsInfo = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + if (globalsInfo == null) { - Reader = new PackageReader(path); - Package = Reader.Read(); + throw new InvalidDataException("The specified package is not a valid savegame (globals.lsf not found)"); } - public void Dispose() + Resource resource; + Stream rsrcStream = globalsInfo.MakeStream(); + try { - Reader.Dispose(); + using var rsrcReader = new LSFReader(rsrcStream); + resource = rsrcReader.Read(); } - - public Resource LoadGlobals() + finally { - AbstractFileInfo globalsInfo = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - if (globalsInfo == null) - { - throw new InvalidDataException("The specified package is not a valid savegame (globals.lsf not found)"); - } + globalsInfo.ReleaseStream(); + } + + return resource; + } - Resource resource; - Stream rsrcStream = globalsInfo.MakeStream(); + public Story.Story LoadStory(Stream s) + { + var reader = new StoryReader(); + return reader.Read(s); + } + + public Story.Story LoadStory() + { + AbstractFileInfo storyInfo = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + if (storyInfo != null) + { + Stream rsrcStream = storyInfo.MakeStream(); try { - using var rsrcReader = new LSFReader(rsrcStream); - resource = rsrcReader.Read(); + return LoadStory(rsrcStream); } finally { - globalsInfo.ReleaseStream(); + storyInfo.ReleaseStream(); } - - return resource; } - - public Story.Story LoadStory(Stream s) + else { - var reader = new StoryReader(); - return reader.Read(s); + var globals = LoadGlobals(); + + Node storyNode = globals.Regions["Story"].Children["Story"][0]; + var storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[] ?? throw new InvalidOperationException("Cannot proceed with null Story node")); + return LoadStory(storyStream); } + } - public Story.Story LoadStory() + public MemoryStream ResaveStoryToGlobals(Story.Story story, ResourceConversionParameters conversionParams) + { + var globals = LoadGlobals(); + + // Save story resource and pack into the Story.Story attribute in globals.lsf + using (var storyStream = new MemoryStream()) { - AbstractFileInfo storyInfo = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); - if (storyInfo != null) - { - Stream rsrcStream = storyInfo.MakeStream(); - try - { - return LoadStory(rsrcStream); - } - finally - { - storyInfo.ReleaseStream(); - } - } - else - { - var globals = LoadGlobals(); + var storyWriter = new StoryWriter(); + storyWriter.Write(storyStream, story, true); - Node storyNode = globals.Regions["Story"].Children["Story"][0]; - var storyStream = new MemoryStream(storyNode.Attributes["Story"].Value as byte[] ?? throw new InvalidOperationException("Cannot proceed with null Story node")); - return LoadStory(storyStream); - } + var storyNode = globals.Regions["Story"].Children["Story"][0]; + storyNode.Attributes["Story"].Value = storyStream.ToArray(); } - public MemoryStream ResaveStoryToGlobals(Story.Story story, ResourceConversionParameters conversionParams) + // Save globals.lsf + var rewrittenStream = new MemoryStream(); + var rsrcWriter = new LSFWriter(rewrittenStream) { - var globals = LoadGlobals(); - - // Save story resource and pack into the Story.Story attribute in globals.lsf - using (var storyStream = new MemoryStream()) - { - var storyWriter = new StoryWriter(); - storyWriter.Write(storyStream, story, true); - - var storyNode = globals.Regions["Story"].Children["Story"][0]; - storyNode.Attributes["Story"].Value = storyStream.ToArray(); - } + Version = conversionParams.LSF, + EncodeSiblingData = false + }; + rsrcWriter.Write(globals); + rewrittenStream.Seek(0, SeekOrigin.Begin); + return rewrittenStream; + } - // Save globals.lsf - var rewrittenStream = new MemoryStream(); - var rsrcWriter = new LSFWriter(rewrittenStream) - { - Version = conversionParams.LSF, - EncodeSiblingData = false - }; - rsrcWriter.Write(globals); - rewrittenStream.Seek(0, SeekOrigin.Begin); - return rewrittenStream; - } + public void ResaveStory(Story.Story story, Game game, string path) + { + // Re-package global.lsf/StorySave.bin + var rewrittenPackage = new Package(); + var conversionParams = ResourceConversionParameters.FromGameVersion(game); - public void ResaveStory(Story.Story story, Game game, string path) + AbstractFileInfo storyBin = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + if (storyBin == null) { - // Re-package global.lsf/StorySave.bin - var rewrittenPackage = new Package(); - var conversionParams = ResourceConversionParameters.FromGameVersion(game); + var globalsStream = ResaveStoryToGlobals(story, conversionParams); - AbstractFileInfo storyBin = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); - if (storyBin == null) - { - var globalsStream = ResaveStoryToGlobals(story, conversionParams); - - AbstractFileInfo globalsLsf = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - StreamFileInfo globalsRepacked = StreamFileInfo.CreateFromStream(globalsStream, globalsLsf.Name); - rewrittenPackage.Files.Add(globalsRepacked); + AbstractFileInfo globalsLsf = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + StreamFileInfo globalsRepacked = StreamFileInfo.CreateFromStream(globalsStream, globalsLsf.Name); + rewrittenPackage.Files.Add(globalsRepacked); - List files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); - rewrittenPackage.Files.AddRange(files); - } - else - { - // Save story resource and pack into the Story.Story attribute in globals.lsf - var storyStream = new MemoryStream(); - var storyWriter = new StoryWriter(); - storyWriter.Write(storyStream, story, true); - storyStream.Seek(0, SeekOrigin.Begin); + List files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); + rewrittenPackage.Files.AddRange(files); + } + else + { + // Save story resource and pack into the Story.Story attribute in globals.lsf + var storyStream = new MemoryStream(); + var storyWriter = new StoryWriter(); + storyWriter.Write(storyStream, story, true); + storyStream.Seek(0, SeekOrigin.Begin); - StreamFileInfo storyRepacked = StreamFileInfo.CreateFromStream(storyStream, "StorySave.bin"); - rewrittenPackage.Files.Add(storyRepacked); + StreamFileInfo storyRepacked = StreamFileInfo.CreateFromStream(storyStream, "StorySave.bin"); + rewrittenPackage.Files.Add(storyRepacked); - List files = Package.Files.Where(x => x.Name != "StorySave.bin").ToList(); - rewrittenPackage.Files.AddRange(files); - } + List files = Package.Files.Where(x => x.Name != "StorySave.bin").ToList(); + rewrittenPackage.Files.AddRange(files); + } - using (var packageWriter = new PackageWriter(rewrittenPackage, path)) - { - packageWriter.Version = conversionParams.PAKVersion; - packageWriter.Compression = CompressionMethod.Zlib; - packageWriter.LSCompressionLevel = LSCompressionLevel.DefaultCompression; - packageWriter.Write(); - } + using (var packageWriter = new PackageWriter(rewrittenPackage, path)) + { + packageWriter.Version = conversionParams.PAKVersion; + packageWriter.Compression = CompressionMethod.Zlib; + packageWriter.LSCompressionLevel = LSCompressionLevel.DefaultCompression; + packageWriter.Write(); } } } diff --git a/LSLib/LS/Save/VariableManager.cs b/LSLib/LS/Save/VariableManager.cs index bbc3476b..beb6a00e 100644 --- a/LSLib/LS/Save/VariableManager.cs +++ b/LSLib/LS/Save/VariableManager.cs @@ -5,412 +5,411 @@ using System.Runtime.InteropServices; using System.Text; -namespace LSLib.LS.Save +namespace LSLib.LS.Save; + +public class OsirisVariableHelper { - public class OsirisVariableHelper + private Int32 NumericStringId; + private Dictionary IdentifierToKey = []; + private Dictionary KeyToIdentifier = []; + + public void Load(Node helper) { - private Int32 NumericStringId; - private Dictionary IdentifierToKey = []; - private Dictionary KeyToIdentifier = []; + NumericStringId = (Int32)helper.Attributes["NumericStringId"].Value; - public void Load(Node helper) + foreach (var mapping in helper.Children["IdentifierTable"]) { - NumericStringId = (Int32)helper.Attributes["NumericStringId"].Value; - - foreach (var mapping in helper.Children["IdentifierTable"]) - { - string name = (string)mapping.Attributes["MapKey"].Value; - Int32 index = (Int32)mapping.Attributes["MapValue"].Value; - IdentifierToKey.Add(name, index); - KeyToIdentifier.Add(index, name); - } + string name = (string)mapping.Attributes["MapKey"].Value; + Int32 index = (Int32)mapping.Attributes["MapValue"].Value; + IdentifierToKey.Add(name, index); + KeyToIdentifier.Add(index, name); } + } - public Int32 GetKey(string variableName) + public Int32 GetKey(string variableName) + { + return IdentifierToKey[variableName]; + } + + public string GetName(Int32 variableIndex) + { + return KeyToIdentifier[variableIndex]; + } +} + +abstract public class VariableHolder +{ + protected List Values = []; + private List Remaps = []; + + public TValue GetRaw(int index) + { + if (index == 0) { - return IdentifierToKey[variableName]; + return default; } - public string GetName(Int32 variableIndex) + var valueSlot = Remaps[index - 1]; + return Values[valueSlot]; + } + + public void Load(Node variableList) + { + LoadVariables(variableList); + + var remaps = (byte[])variableList.Attributes["Remaps"].Value; + + Remaps.Clear(); + Remaps.Capacity = remaps.Length / 2; + + using var ms = new MemoryStream(remaps); + using var reader = new BinaryReader(ms); + for (var i = 0; i < remaps.Length / 2; i++) { - return KeyToIdentifier[variableIndex]; + Remaps.Add(reader.ReadUInt16()); } } - abstract public class VariableHolder + abstract protected void LoadVariables(Node variableList); +} + +public class IntVariableHolder : VariableHolder +{ + public Int32? Get(int index) { - protected List Values = []; - private List Remaps = []; - - public TValue GetRaw(int index) + var raw = GetRaw(index); + if (raw == -1163005939) /* 0xbaadf00d */ { - if (index == 0) - { - return default; - } - - var valueSlot = Remaps[index - 1]; - return Values[valueSlot]; + return null; } - - public void Load(Node variableList) + else { - LoadVariables(variableList); + return raw; + } + } - var remaps = (byte[])variableList.Attributes["Remaps"].Value; + override protected void LoadVariables(Node variableList) + { + var variables = (byte[])variableList.Attributes["Variables"].Value; + var numVars = variables.Length / 4; - Remaps.Clear(); - Remaps.Capacity = remaps.Length / 2; + Values.Clear(); + Values.Capacity = numVars; - using var ms = new MemoryStream(remaps); - using var reader = new BinaryReader(ms); - for (var i = 0; i < remaps.Length / 2; i++) - { - Remaps.Add(reader.ReadUInt16()); - } + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) + { + Values.Add(reader.ReadInt32()); } - - abstract protected void LoadVariables(Node variableList); } +} - public class IntVariableHolder : VariableHolder +public class Int64VariableHolder : VariableHolder +{ + public Int64? Get(int index) { - public Int32? Get(int index) + var raw = GetRaw(index); + if (raw == -4995072469926809587) /* 0xbaadf00dbaadf00d */ { - var raw = GetRaw(index); - if (raw == -1163005939) /* 0xbaadf00d */ - { - return null; - } - else - { - return raw; - } + return null; } - - override protected void LoadVariables(Node variableList) + else { - var variables = (byte[])variableList.Attributes["Variables"].Value; - var numVars = variables.Length / 4; + return raw; + } + } - Values.Clear(); - Values.Capacity = numVars; + override protected void LoadVariables(Node variableList) + { + var variables = (byte[])variableList.Attributes["Variables"].Value; + var numVars = variables.Length / 8; - using var ms = new MemoryStream(variables); - using var reader = new BinaryReader(ms); - for (var i = 0; i < numVars; i++) - { - Values.Add(reader.ReadInt32()); - } + Values.Clear(); + Values.Capacity = numVars; + + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) + { + Values.Add(reader.ReadInt64()); } } +} - public class Int64VariableHolder : VariableHolder +public class FloatVariableHolder : VariableHolder +{ + public float? Get(int index) { - public Int64? Get(int index) + var raw = GetRaw(index); + var intFloat = BitConverter.ToUInt32(BitConverter.GetBytes(raw), 0); + if (intFloat == 0xbaadf00d) { - var raw = GetRaw(index); - if (raw == -4995072469926809587) /* 0xbaadf00dbaadf00d */ - { - return null; - } - else - { - return raw; - } + return null; } - - override protected void LoadVariables(Node variableList) + else { - var variables = (byte[])variableList.Attributes["Variables"].Value; - var numVars = variables.Length / 8; + return raw; + } + } - Values.Clear(); - Values.Capacity = numVars; + override protected void LoadVariables(Node variableList) + { + var variables = (byte[])variableList.Attributes["Variables"].Value; + var numVars = variables.Length / 4; - using var ms = new MemoryStream(variables); - using var reader = new BinaryReader(ms); - for (var i = 0; i < numVars; i++) - { - Values.Add(reader.ReadInt64()); - } + Values.Clear(); + Values.Capacity = numVars; + + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) + { + Values.Add(reader.ReadSingle()); } } +} - public class FloatVariableHolder : VariableHolder +public class StringVariableHolder : VariableHolder +{ + public string Get(int index) { - public float? Get(int index) + var raw = GetRaw(index); + if (raw == "0xbaadf00d") { - var raw = GetRaw(index); - var intFloat = BitConverter.ToUInt32(BitConverter.GetBytes(raw), 0); - if (intFloat == 0xbaadf00d) - { - return null; - } - else - { - return raw; - } + return null; } - - override protected void LoadVariables(Node variableList) + else { - var variables = (byte[])variableList.Attributes["Variables"].Value; - var numVars = variables.Length / 4; + return raw; + } + } - Values.Clear(); - Values.Capacity = numVars; + override protected void LoadVariables(Node variableList) + { + var variables = (byte[])variableList.Attributes["Variables"].Value; - using var ms = new MemoryStream(variables); - using var reader = new BinaryReader(ms); - for (var i = 0; i < numVars; i++) - { - Values.Add(reader.ReadSingle()); - } + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + var numVars = reader.ReadInt32(); + + Values.Clear(); + Values.Capacity = numVars; + + for (var i = 0; i < numVars; i++) + { + var length = reader.ReadUInt16(); + var bytes = reader.ReadBytes(length); + var str = Encoding.UTF8.GetString(bytes); + Values.Add(str); } } +} - public class StringVariableHolder : VariableHolder +public class Float3VariableHolder : VariableHolder +{ + public Vector3? Get(int index) { - public string Get(int index) + var raw = GetRaw(index); + var intFloat = BitConverter.ToUInt32(BitConverter.GetBytes(raw.X), 0); + if (intFloat == 0xbaadf00d) { - var raw = GetRaw(index); - if (raw == "0xbaadf00d") - { - return null; - } - else - { - return raw; - } + return null; } - - override protected void LoadVariables(Node variableList) + else { - var variables = (byte[])variableList.Attributes["Variables"].Value; + return raw; + } + } - using var ms = new MemoryStream(variables); - using var reader = new BinaryReader(ms); - var numVars = reader.ReadInt32(); + override protected void LoadVariables(Node variableList) + { + var variables = (byte[])variableList.Attributes["Variables"].Value; + var numVars = variables.Length / 12; - Values.Clear(); - Values.Capacity = numVars; + Values.Clear(); + Values.Capacity = numVars; - for (var i = 0; i < numVars; i++) + using var ms = new MemoryStream(variables); + using var reader = new BinaryReader(ms); + for (var i = 0; i < numVars; i++) + { + Vector3 vec = new() { - var length = reader.ReadUInt16(); - var bytes = reader.ReadBytes(length); - var str = Encoding.UTF8.GetString(bytes); - Values.Add(str); - } + X = reader.ReadSingle(), + Y = reader.ReadSingle(), + Z = reader.ReadSingle() + }; + Values.Add(vec); } } +} + +internal enum VariableType +{ + Int = 0, + Int64 = 1, + Float = 2, + String = 3, + FixedString = 4, + Float3 = 5 +}; + +/// +/// Node (structure) entry in the LSF file +/// +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct Key2TableEntry +{ + /// + /// Index of variable from OsirisVariableHelper.IdentifierTable + /// + public UInt32 NameIndex; + /// + /// Index and type of value + /// + public UInt32 ValueIndexAndType; + /// + /// Handle of the object that this variable is assigned to. + /// + public UInt64 Handle; + + /// + /// Index of value in the appropriate variable list + /// + public int ValueIndex + { + get { return (int)((ValueIndexAndType >> 3) & 0x3ff); } + } + + /// + /// Type of value + /// + public VariableType ValueType + { + get { return (VariableType)(ValueIndexAndType & 7); } + } +}; - public class Float3VariableHolder : VariableHolder +public class VariableManager(OsirisVariableHelper variableHelper) +{ + private readonly Dictionary Keys = []; + private readonly IntVariableHolder IntList = new(); + private readonly Int64VariableHolder Int64List = new(); + private readonly FloatVariableHolder FloatList = new(); + private readonly StringVariableHolder StringList = new(); + private readonly StringVariableHolder FixedStringList = new(); + private readonly Float3VariableHolder Float3List = new(); + + public Dictionary GetAll(bool includeDeleted = false) { - public Vector3? Get(int index) + var variables = new Dictionary(); + foreach (var key in Keys.Values) { - var raw = GetRaw(index); - var intFloat = BitConverter.ToUInt32(BitConverter.GetBytes(raw.X), 0); - if (intFloat == 0xbaadf00d) - { - return null; - } - else + var name = variableHelper.GetName((int)key.NameIndex); + var value = includeDeleted ? GetRaw(key.ValueType, key.ValueIndex) : Get(key.ValueType, key.ValueIndex); + if (value != null) { - return raw; + variables.Add(name, value); } } - override protected void LoadVariables(Node variableList) - { - var variables = (byte[])variableList.Attributes["Variables"].Value; - var numVars = variables.Length / 12; + return variables; + } - Values.Clear(); - Values.Capacity = numVars; + public object Get(string name) + { + var index = variableHelper.GetKey(name); + var key = Keys[index]; + return Get(key.ValueType, key.ValueIndex); + } - using var ms = new MemoryStream(variables); - using var reader = new BinaryReader(ms); - for (var i = 0; i < numVars; i++) - { - Vector3 vec = new() - { - X = reader.ReadSingle(), - Y = reader.ReadSingle(), - Z = reader.ReadSingle() - }; - Values.Add(vec); - } - } + private object Get(VariableType type, int index) + { + return type switch + { + VariableType.Int => IntList.Get(index), + VariableType.Int64 => Int64List.Get(index), + VariableType.Float => FloatList.Get(index), + VariableType.String => StringList.Get(index), + VariableType.FixedString => FixedStringList.Get(index), + VariableType.Float3 => Float3List.Get(index), + _ => throw new ArgumentException("Unsupported variable type"), + }; } - internal enum VariableType + public object GetRaw(string name) { - Int = 0, - Int64 = 1, - Float = 2, - String = 3, - FixedString = 4, - Float3 = 5 - }; + var index = variableHelper.GetKey(name); + var key = Keys[index]; + return GetRaw(key.ValueType, key.ValueIndex); + } - /// - /// Node (structure) entry in the LSF file - /// - [StructLayout(LayoutKind.Sequential, Pack = 1)] - internal struct Key2TableEntry + private object GetRaw(VariableType type, int index) { - /// - /// Index of variable from OsirisVariableHelper.IdentifierTable - /// - public UInt32 NameIndex; - /// - /// Index and type of value - /// - public UInt32 ValueIndexAndType; - /// - /// Handle of the object that this variable is assigned to. - /// - public UInt64 Handle; - - /// - /// Index of value in the appropriate variable list - /// - public int ValueIndex + return type switch { - get { return (int)((ValueIndexAndType >> 3) & 0x3ff); } - } + VariableType.Int => IntList.GetRaw(index), + VariableType.Int64 => Int64List.GetRaw(index), + VariableType.Float => FloatList.GetRaw(index), + VariableType.String => StringList.GetRaw(index), + VariableType.FixedString => FixedStringList.GetRaw(index), + VariableType.Float3 => Float3List.GetRaw(index), + _ => throw new ArgumentException("Unsupported variable type"), + }; + } - /// - /// Type of value - /// - public VariableType ValueType + private void LoadKeys(byte[] handleList) + { + Keys.Clear(); + + using var ms = new MemoryStream(handleList); + using var reader = new BinaryReader(ms); + var numHandles = reader.ReadInt32(); + for (var i = 0; i < numHandles; i++) { - get { return (VariableType)(ValueIndexAndType & 7); } + var entry = BinUtils.ReadStruct(reader); + Keys.Add((int)entry.NameIndex, entry); } - }; + } - public class VariableManager(OsirisVariableHelper variableHelper) + public void Load(Node variableManager) { - private readonly Dictionary Keys = []; - private readonly IntVariableHolder IntList = new(); - private readonly Int64VariableHolder Int64List = new(); - private readonly FloatVariableHolder FloatList = new(); - private readonly StringVariableHolder StringList = new(); - private readonly StringVariableHolder FixedStringList = new(); - private readonly Float3VariableHolder Float3List = new(); - - public Dictionary GetAll(bool includeDeleted = false) + List nodes; + if (variableManager.Children.TryGetValue("IntList", out nodes)) { - var variables = new Dictionary(); - foreach (var key in Keys.Values) - { - var name = variableHelper.GetName((int)key.NameIndex); - var value = includeDeleted ? GetRaw(key.ValueType, key.ValueIndex) : Get(key.ValueType, key.ValueIndex); - if (value != null) - { - variables.Add(name, value); - } - } - - return variables; + IntList.Load(nodes[0]); } - public object Get(string name) + if (variableManager.Children.TryGetValue("Int64List", out nodes)) { - var index = variableHelper.GetKey(name); - var key = Keys[index]; - return Get(key.ValueType, key.ValueIndex); + Int64List.Load(nodes[0]); } - private object Get(VariableType type, int index) + if (variableManager.Children.TryGetValue("FloatList", out nodes)) { - return type switch - { - VariableType.Int => IntList.Get(index), - VariableType.Int64 => Int64List.Get(index), - VariableType.Float => FloatList.Get(index), - VariableType.String => StringList.Get(index), - VariableType.FixedString => FixedStringList.Get(index), - VariableType.Float3 => Float3List.Get(index), - _ => throw new ArgumentException("Unsupported variable type"), - }; + FloatList.Load(nodes[0]); } - public object GetRaw(string name) + if (variableManager.Children.TryGetValue("StringList", out nodes)) { - var index = variableHelper.GetKey(name); - var key = Keys[index]; - return GetRaw(key.ValueType, key.ValueIndex); + StringList.Load(nodes[0]); } - private object GetRaw(VariableType type, int index) + if (variableManager.Children.TryGetValue("FixedStringList", out nodes)) { - return type switch - { - VariableType.Int => IntList.GetRaw(index), - VariableType.Int64 => Int64List.GetRaw(index), - VariableType.Float => FloatList.GetRaw(index), - VariableType.String => StringList.GetRaw(index), - VariableType.FixedString => FixedStringList.GetRaw(index), - VariableType.Float3 => Float3List.GetRaw(index), - _ => throw new ArgumentException("Unsupported variable type"), - }; + FixedStringList.Load(nodes[0]); } - private void LoadKeys(byte[] handleList) + if (variableManager.Children.TryGetValue("Float3List", out nodes)) { - Keys.Clear(); - - using var ms = new MemoryStream(handleList); - using var reader = new BinaryReader(ms); - var numHandles = reader.ReadInt32(); - for (var i = 0; i < numHandles; i++) - { - var entry = BinUtils.ReadStruct(reader); - Keys.Add((int)entry.NameIndex, entry); - } + Float3List.Load(nodes[0]); } - public void Load(Node variableManager) + if (variableManager.Children.TryGetValue("Key2TableList", out nodes)) { - List nodes; - if (variableManager.Children.TryGetValue("IntList", out nodes)) - { - IntList.Load(nodes[0]); - } - - if (variableManager.Children.TryGetValue("Int64List", out nodes)) - { - Int64List.Load(nodes[0]); - } - - if (variableManager.Children.TryGetValue("FloatList", out nodes)) - { - FloatList.Load(nodes[0]); - } - - if (variableManager.Children.TryGetValue("StringList", out nodes)) - { - StringList.Load(nodes[0]); - } - - if (variableManager.Children.TryGetValue("FixedStringList", out nodes)) - { - FixedStringList.Load(nodes[0]); - } - - if (variableManager.Children.TryGetValue("Float3List", out nodes)) - { - Float3List.Load(nodes[0]); - } - - if (variableManager.Children.TryGetValue("Key2TableList", out nodes)) - { - var handleList = (byte[])nodes[0].Attributes["HandleList"].Value; - LoadKeys(handleList); - } + var handleList = (byte[])nodes[0].Attributes["HandleList"].Value; + LoadKeys(handleList); } } } diff --git a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs index 24418698..913e742f 100644 --- a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs +++ b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs @@ -1,55 +1,54 @@ using System; using System.Collections.Generic; -namespace LSLib.LS.Stats.Properties +namespace LSLib.LS.Stats.Properties; + +public class Requirement +{ + // Requirement negation ("Immobile" vs. "!Immobile"). + public bool Not; + // Textual name of requirement + public string RequirementName; + // Integer requirement parameter + public int IntParam; + // Tag name parameter ("Tag" requirement only) + public string TagParam; +} + +public class Property +{ + public string TextKey; + public string Context; + public object Condition; + public PropertyAction Action; +} + +public class PropertyAction +{ + public string Action; + public List Arguments; +} + +public enum ConditionOperator +{ + And, + Or +}; + +public class Condition +{ + public bool Not; +} + +public class UnaryCondition : Condition +{ + public string ConditionType; + public string Argument; +} + +public class BinaryCondition : Condition { - public class Requirement - { - // Requirement negation ("Immobile" vs. "!Immobile"). - public bool Not; - // Textual name of requirement - public string RequirementName; - // Integer requirement parameter - public int IntParam; - // Tag name parameter ("Tag" requirement only) - public string TagParam; - } - - public class Property - { - public string TextKey; - public string Context; - public object Condition; - public PropertyAction Action; - } - - public class PropertyAction - { - public string Action; - public List Arguments; - } - - public enum ConditionOperator - { - And, - Or - }; - - public class Condition - { - public bool Not; - } - - public class UnaryCondition : Condition - { - public string ConditionType; - public string Argument; - } - - public class BinaryCondition : Condition - { - public Condition Left; - public Condition Right; - public ConditionOperator Operator; - } + public Condition Left; + public Condition Right; + public ConditionOperator Operator; } diff --git a/LSLib/LS/Stats/Parser/StatLuaParser.cs b/LSLib/LS/Stats/Parser/StatLuaParser.cs index 9aa624b4..f742a92d 100644 --- a/LSLib/LS/Stats/Parser/StatLuaParser.cs +++ b/LSLib/LS/Stats/Parser/StatLuaParser.cs @@ -1,24 +1,23 @@ using QUT.Gppg; -namespace LSLib.LS.Stats.Lua +namespace LSLib.LS.Stats.Lua; + +public partial class StatLuaScanner { - public partial class StatLuaScanner + public LexLocation LastLocation() { - public LexLocation LastLocation() - { - return new LexLocation(tokLin, tokCol, tokELin, tokECol); - } + return new LexLocation(tokLin, tokCol, tokELin, tokECol); } +} - public abstract class StatLuaScanBase : AbstractScanner - { - protected virtual bool yywrap() { return true; } - } +public abstract class StatLuaScanBase : AbstractScanner +{ + protected virtual bool yywrap() { return true; } +} - public partial class StatLuaParser +public partial class StatLuaParser +{ + public StatLuaParser(StatLuaScanner scnr) : base(scnr) { - public StatLuaParser(StatLuaScanner scnr) : base(scnr) - { - } } } \ No newline at end of file diff --git a/LSLib/LS/Stats/Parser/StatNodes.cs b/LSLib/LS/Stats/Parser/StatNodes.cs index ed29f3e2..d8e75e9e 100644 --- a/LSLib/LS/Stats/Parser/StatNodes.cs +++ b/LSLib/LS/Stats/Parser/StatNodes.cs @@ -2,35 +2,34 @@ using System; using System.Collections.Generic; -namespace LSLib.LS.Stats.StatParser +namespace LSLib.LS.Stats.StatParser; + +/// +/// List of stat properties +/// +public class StatDeclaration { - /// - /// List of stat properties - /// - public class StatDeclaration - { - public CodeLocation Location; - public Dictionary Properties = new Dictionary(); - public Dictionary PropertyLocations = new Dictionary(); - public bool WasInstantiated = false; - } + public CodeLocation Location; + public Dictionary Properties = new Dictionary(); + public Dictionary PropertyLocations = new Dictionary(); + public bool WasInstantiated = false; +} - /// - /// A string property of a stat entry (Key/value pair) - /// - public class StatProperty - { - public CodeLocation Location; - public String Key; - public object Value; - } +/// +/// A string property of a stat entry (Key/value pair) +/// +public class StatProperty +{ + public CodeLocation Location; + public String Key; + public object Value; +} - /// - /// An element of collection of a stat entry (Key/value pair) - /// - public class StatElement - { - public String Collection; - public object Value; - } +/// +/// An element of collection of a stat entry (Key/value pair) +/// +public class StatElement +{ + public String Collection; + public object Value; } diff --git a/LSLib/LS/Stats/Parser/StatParser.cs b/LSLib/LS/Stats/Parser/StatParser.cs index 1c3b2281..48687691 100644 --- a/LSLib/LS/Stats/Parser/StatParser.cs +++ b/LSLib/LS/Stats/Parser/StatParser.cs @@ -4,261 +4,260 @@ using System.Collections.Generic; using System.Text.RegularExpressions; -namespace LSLib.LS.Stats.StatParser -{ - /// - /// A collection of sub-stats. - /// - using StatCollection = List; +namespace LSLib.LS.Stats.StatParser; - /// - /// Declarations node - contains every declaration from the story header file. - /// - using StatDeclarations = List; +/// +/// A collection of sub-stats. +/// +using StatCollection = List; +/// +/// Declarations node - contains every declaration from the story header file. +/// +using StatDeclarations = List; - public abstract class StatScanBase : AbstractScanner - { - protected String fileName; - public override CodeLocation yylloc { get; set; } - - protected virtual bool yywrap() { return true; } +public abstract class StatScanBase : AbstractScanner +{ + protected String fileName; + + public override CodeLocation yylloc { get; set; } + + protected virtual bool yywrap() { return true; } - protected string MakeLiteral(string lit) => lit; + protected string MakeLiteral(string lit) => lit; + + protected string MakeString(string lit) + { + return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); + } - protected string MakeString(string lit) + protected StatProperty MakeDataProperty(CodeLocation location, string lit) + { + var re = new Regex(@"data\s+""([^""]+)""\s+""(.*)""\s*", RegexOptions.CultureInvariant); + var matches = re.Match(lit); + if (!matches.Success) { - return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); + throw new Exception("Stat data entry match error"); } - protected StatProperty MakeDataProperty(CodeLocation location, string lit) + return new StatProperty { - var re = new Regex(@"data\s+""([^""]+)""\s+""(.*)""\s*", RegexOptions.CultureInvariant); - var matches = re.Match(lit); - if (!matches.Success) - { - throw new Exception("Stat data entry match error"); - } + Key = matches.Groups[1].Value, + Value = matches.Groups[2].Value, + Location = location + }; + } +} - return new StatProperty - { - Key = matches.Groups[1].Value, - Value = matches.Groups[2].Value, - Location = location - }; - } +public partial class StatScanner +{ + public StatScanner(String fileName) + { + this.fileName = fileName; } - public partial class StatScanner + public CodeLocation LastLocation() { - public StatScanner(String fileName) - { - this.fileName = fileName; - } + return new CodeLocation(null, tokLin, tokCol, tokELin, tokECol); + } +} - public CodeLocation LastLocation() - { - return new CodeLocation(null, tokLin, tokCol, tokELin, tokECol); - } +public partial class StatParser +{ + public StatParser(StatScanner scnr) : base(scnr) + { } - public partial class StatParser + public StatDeclarations GetDeclarations() { - public StatParser(StatScanner scnr) : base(scnr) - { - } + return (StatDeclarations)CurrentSemanticValue; + } - public StatDeclarations GetDeclarations() - { - return (StatDeclarations)CurrentSemanticValue; - } + private StatDeclarations MakeDeclarationList() => new StatDeclarations(); - private StatDeclarations MakeDeclarationList() => new StatDeclarations(); + private StatDeclarations AddDeclaration(object declarations, object declaration) + { + var decls = (StatDeclarations)declarations; + decls.Add((StatDeclaration)declaration); + return decls; + } - private StatDeclarations AddDeclaration(object declarations, object declaration) - { - var decls = (StatDeclarations)declarations; - decls.Add((StatDeclaration)declaration); - return decls; - } + private StatDeclaration MakeDeclaration() => new StatDeclaration(); - private StatDeclaration MakeDeclaration() => new StatDeclaration(); + private StatDeclaration MakeDeclaration(CodeLocation location) => new StatDeclaration() + { + Location = location + }; - private StatDeclaration MakeDeclaration(CodeLocation location) => new StatDeclaration() + private StatDeclaration MakeDeclaration(CodeLocation location, StatProperty[] properties) + { + var decl = new StatDeclaration() { Location = location }; - - private StatDeclaration MakeDeclaration(CodeLocation location, StatProperty[] properties) + foreach (var prop in properties) { - var decl = new StatDeclaration() - { - Location = location - }; - foreach (var prop in properties) - { - AddProperty(decl, prop); - } - - return decl; + AddProperty(decl, prop); } - private StatDeclaration MakeDeclaration(StatProperty[] properties) - { - return MakeDeclaration(null, properties); - } + return decl; + } - private StatDeclaration MergeItemCombo(object comboNode, object resultNode) + private StatDeclaration MakeDeclaration(StatProperty[] properties) + { + return MakeDeclaration(null, properties); + } + + private StatDeclaration MergeItemCombo(object comboNode, object resultNode) + { + var combo = (StatDeclaration)comboNode; + var result = (StatDeclaration)resultNode; + foreach (var kv in result.Properties) { - var combo = (StatDeclaration)comboNode; - var result = (StatDeclaration)resultNode; - foreach (var kv in result.Properties) + if (kv.Key != "EntityType" && kv.Key != "Name") { - if (kv.Key != "EntityType" && kv.Key != "Name") - { - combo.Properties[kv.Key] = kv.Value; - } + combo.Properties[kv.Key] = kv.Value; } - - return combo; } - private StatDeclaration AddProperty(object declaration, object property) + return combo; + } + + private StatDeclaration AddProperty(object declaration, object property) + { + var decl = (StatDeclaration)declaration; + if (property is StatProperty) { - var decl = (StatDeclaration)declaration; - if (property is StatProperty) + var prop = (StatProperty)property; + decl.Properties[prop.Key] = prop.Value; + if (prop.Location != null) { - var prop = (StatProperty)property; - decl.Properties[prop.Key] = prop.Value; - if (prop.Location != null) - { - decl.PropertyLocations[prop.Key] = prop.Location; - } + decl.PropertyLocations[prop.Key] = prop.Location; } - else if (property is StatElement) + } + else if (property is StatElement) + { + var ele = (StatElement)property; + object cont; + if (!decl.Properties.TryGetValue(ele.Collection, out cont)) { - var ele = (StatElement)property; - object cont; - if (!decl.Properties.TryGetValue(ele.Collection, out cont)) - { - cont = new List(); - decl.Properties[ele.Collection] = cont; - } - - (cont as List).Add(ele.Value); + cont = new List(); + decl.Properties[ele.Collection] = cont; } - else if (property is StatDeclaration) + + (cont as List).Add(ele.Value); + } + else if (property is StatDeclaration) + { + var otherDecl = (StatDeclaration)property; + foreach (var kv in otherDecl.Properties) { - var otherDecl = (StatDeclaration)property; - foreach (var kv in otherDecl.Properties) - { - decl.Properties[kv.Key] = kv.Value; - } - - foreach (var kv in otherDecl.PropertyLocations) - { - decl.PropertyLocations[kv.Key] = kv.Value; - } + decl.Properties[kv.Key] = kv.Value; } - else + + foreach (var kv in otherDecl.PropertyLocations) { - throw new Exception("Unknown property type"); + decl.PropertyLocations[kv.Key] = kv.Value; } - - return decl; } - - private StatProperty MakeProperty(object key, object value) => new StatProperty() + else { - Key = (string)key, - Value = (string)value - }; + throw new Exception("Unknown property type"); + } - private StatProperty MakeProperty(String key, object value) => new StatProperty() - { - Key = key, - Value = (string)value - }; + return decl; + } - private StatProperty MakeProperty(String key, String value) => new StatProperty() - { - Key = key, - Value = value - }; + private StatProperty MakeProperty(object key, object value) => new StatProperty() + { + Key = (string)key, + Value = (string)value + }; - private StatProperty MakeProperty(CodeLocation location, object key, object value) => new StatProperty() - { - Key = (string)key, - Value = (string)value, - Location = location - }; + private StatProperty MakeProperty(String key, object value) => new StatProperty() + { + Key = key, + Value = (string)value + }; - private StatProperty MakeProperty(CodeLocation location, String key, object value) => new StatProperty() - { - Key = key, - Value = (string)value, - Location = location - }; + private StatProperty MakeProperty(String key, String value) => new StatProperty() + { + Key = key, + Value = value + }; - private StatProperty MakeProperty(CodeLocation location, String key, String value) => new StatProperty() - { - Key = key, - Value = value, - Location = location - }; + private StatProperty MakeProperty(CodeLocation location, object key, object value) => new StatProperty() + { + Key = (string)key, + Value = (string)value, + Location = location + }; + + private StatProperty MakeProperty(CodeLocation location, String key, object value) => new StatProperty() + { + Key = key, + Value = (string)value, + Location = location + }; - private StatElement MakeElement(String key, object value) + private StatProperty MakeProperty(CodeLocation location, String key, String value) => new StatProperty() + { + Key = key, + Value = value, + Location = location + }; + + private StatElement MakeElement(String key, object value) + { + if (value is string) { - if (value is string) + return new StatElement() { - return new StatElement() - { - Collection = key, - Value = (string)value - }; - } - else if (value is StatCollection) - { - return new StatElement() - { - Collection = key, - Value = (StatCollection)value - }; - } - else if (value is Dictionary) + Collection = key, + Value = (string)value + }; + } + else if (value is StatCollection) + { + return new StatElement() { - return new StatElement() - { - Collection = key, - Value = (Dictionary)value - }; - } - else if (value is StatDeclaration) + Collection = key, + Value = (StatCollection)value + }; + } + else if (value is Dictionary) + { + return new StatElement() { - return new StatElement() - { - Collection = key, - Value = ((StatDeclaration)value).Properties - }; - } - else + Collection = key, + Value = (Dictionary)value + }; + } + else if (value is StatDeclaration) + { + return new StatElement() { - throw new Exception("Unknown stat element type"); - } + Collection = key, + Value = ((StatDeclaration)value).Properties + }; } - - private StatCollection MakeCollection() => new List(); - - private StatCollection AddElement(object collection, object element) + else { - var coll = (StatCollection)collection; - var ele = (string)element; - coll.Add(ele); - - return coll; + throw new Exception("Unknown stat element type"); } + } + + private StatCollection MakeCollection() => new List(); - private string Unwrap(object node) => (string)node; + private StatCollection AddElement(object collection, object element) + { + var coll = (StatCollection)collection; + var ele = (string)element; + coll.Add(ele); + + return coll; } + + private string Unwrap(object node) => (string)node; } \ No newline at end of file diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLib/LS/Stats/Parser/StatPropertyParser.cs index f94c0124..107726a0 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLib/LS/Stats/Parser/StatPropertyParser.cs @@ -6,287 +6,286 @@ using System.Text; using static LSLib.Granny.Model.CurveData.AnimationCurveData; -namespace LSLib.LS.Stats.Properties +namespace LSLib.LS.Stats.Properties; + +public partial class StatPropertyScanner { - public partial class StatPropertyScanner + public LexLocation LastLocation() { - public LexLocation LastLocation() - { - return new LexLocation(tokLin, tokCol, tokELin, tokECol); - } - - public int TokenStartPos() - { - return tokPos; - } - - public int TokenEndPos() - { - return tokEPos; - } - - private object MakeLiteral(string s) => s; + return new LexLocation(tokLin, tokCol, tokELin, tokECol); } - public abstract class StatPropertyScanBase : AbstractScanner + public int TokenStartPos() { - protected virtual bool yywrap() { return true; } + return tokPos; } - public class StatActionValidator + public int TokenEndPos() { - private StatDefinitionRepository Definitions; - private StatValueParserFactory ParserFactory; - private readonly ExpressionType ExprType; + return tokEPos; + } + + private object MakeLiteral(string s) => s; +} + +public abstract class StatPropertyScanBase : AbstractScanner +{ + protected virtual bool yywrap() { return true; } +} + +public class StatActionValidator +{ + private StatDefinitionRepository Definitions; + private StatValueParserFactory ParserFactory; + private readonly ExpressionType ExprType; + + public delegate void ErrorReportingDelegate(string message); + public event ErrorReportingDelegate OnError; - public delegate void ErrorReportingDelegate(string message); - public event ErrorReportingDelegate OnError; + public StatActionValidator(StatDefinitionRepository definitions, StatValueParserFactory parserFactory, ExpressionType type) + { + Definitions = definitions; + ParserFactory = parserFactory; + ExprType = type; + } - public StatActionValidator(StatDefinitionRepository definitions, StatValueParserFactory parserFactory, ExpressionType type) + public void Validate(PropertyAction action) + { + Dictionary functors = null; + switch (ExprType) { - Definitions = definitions; - ParserFactory = parserFactory; - ExprType = type; + case ExpressionType.Boost: functors = Definitions.Boosts; break; + case ExpressionType.Functor: functors = Definitions.Functors; break; + case ExpressionType.DescriptionParams: functors = Definitions.DescriptionParams; break; } - public void Validate(PropertyAction action) + if (!functors.TryGetValue(action.Action, out StatFunctorType functor)) { - Dictionary functors = null; - switch (ExprType) + if (ExprType != ExpressionType.DescriptionParams) { - case ExpressionType.Boost: functors = Definitions.Boosts; break; - case ExpressionType.Functor: functors = Definitions.Functors; break; - case ExpressionType.DescriptionParams: functors = Definitions.DescriptionParams; break; + OnError($"'{action.Action}' is not a valid {ExprType}"); } - if (!functors.TryGetValue(action.Action, out StatFunctorType functor)) - { - if (ExprType != ExpressionType.DescriptionParams) - { - OnError($"'{action.Action}' is not a valid {ExprType}"); - } + return; + } - return; + // Strip property contexts + var firstArg = 0; + while (firstArg < action.Arguments.Count) + { + var arg = action.Arguments[firstArg]; + if (arg == "SELF" + || arg == "OWNER" + || arg == "SWAP" + || arg == "OBSERVER_OBSERVER" + || arg == "OBSERVER_TARGET" + || arg == "OBSERVER_SOURCE") + { + firstArg++; } - - // Strip property contexts - var firstArg = 0; - while (firstArg < action.Arguments.Count) + else { - var arg = action.Arguments[firstArg]; - if (arg == "SELF" - || arg == "OWNER" - || arg == "SWAP" - || arg == "OBSERVER_OBSERVER" - || arg == "OBSERVER_TARGET" - || arg == "OBSERVER_SOURCE") - { - firstArg++; - } - else - { - break; - } + break; } + } - var args = action.Arguments.GetRange(firstArg, action.Arguments.Count - firstArg); + var args = action.Arguments.GetRange(firstArg, action.Arguments.Count - firstArg); - if (args.Count > functor.Args.Count) - { - OnError($"Too many arguments to '{action.Action}'; {args.Count} passed, expected at most {functor.Args.Count}"); - return; - } + if (args.Count > functor.Args.Count) + { + OnError($"Too many arguments to '{action.Action}'; {args.Count} passed, expected at most {functor.Args.Count}"); + return; + } - if (args.Count < functor.RequiredArgs) - { - OnError($"Not enough arguments to '{action.Action}'; {args.Count} passed, expected at least {functor.RequiredArgs}"); - return; - } + if (args.Count < functor.RequiredArgs) + { + OnError($"Not enough arguments to '{action.Action}'; {args.Count} passed, expected at least {functor.RequiredArgs}"); + return; + } - for (var i = 0; i < Math.Min(args.Count, functor.Args.Count); i++) - { - bool succeeded = false; - string errorText = null; + for (var i = 0; i < Math.Min(args.Count, functor.Args.Count); i++) + { + bool succeeded = false; + string errorText = null; - var arg = functor.Args[i]; - if (arg.Type.Length > 0) + var arg = functor.Args[i]; + if (arg.Type.Length > 0) + { + var parser = ParserFactory.CreateParser(arg.Type, null, null, Definitions); + parser.Parse(args[i], ref succeeded, ref errorText); + if (!succeeded) { - var parser = ParserFactory.CreateParser(arg.Type, null, null, Definitions); - parser.Parse(args[i], ref succeeded, ref errorText); - if (!succeeded) - { - OnError($"'{action.Action}' argument {i + 1}: {errorText}"); - } + OnError($"'{action.Action}' argument {i + 1}: {errorText}"); } } } } +} - public partial class StatPropertyParser +public partial class StatPropertyParser +{ + private IStatValueParser RequirementParser; + private StatEnumeration RequirementsWithArgument; + private int LiteralStart; + private StatActionValidator ActionValidator; + private byte[] Source; + + public delegate void ErrorReportingDelegate(string message); + public event ErrorReportingDelegate OnError; + + private StatPropertyScanner StatScanner; + + public StatPropertyParser(StatPropertyScanner scnr, StatDefinitionRepository definitions, + StatValueParserFactory parserFactory, byte[] source, ExpressionType type) : base(scnr) { - private IStatValueParser RequirementParser; - private StatEnumeration RequirementsWithArgument; - private int LiteralStart; - private StatActionValidator ActionValidator; - private byte[] Source; + StatScanner = scnr; + Source = source; + ActionValidator = new StatActionValidator(definitions, parserFactory, type); + ActionValidator.OnError += (message) => { OnError(message); }; + } - public delegate void ErrorReportingDelegate(string message); - public event ErrorReportingDelegate OnError; + public object GetParsedObject() + { + return CurrentSemanticValue; + } - private StatPropertyScanner StatScanner; + private List MakeRequirements() => new List(); - public StatPropertyParser(StatPropertyScanner scnr, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory, byte[] source, ExpressionType type) : base(scnr) - { - StatScanner = scnr; - Source = source; - ActionValidator = new StatActionValidator(definitions, parserFactory, type); - ActionValidator.OnError += (message) => { OnError(message); }; - } + private List AddRequirement(object requirements, object requirement) + { + var req = requirements as List; + req.Add(requirement as Requirement); + return req; + } - public object GetParsedObject() - { - return CurrentSemanticValue; - } + private Requirement MakeNotRequirement(object requirement) + { + var req = requirement as Requirement; + req.Not = true; + return req; + } - private List MakeRequirements() => new List(); + private Requirement MakeRequirement(object name) + { + Validate(RequirementParser, name as string); - private List AddRequirement(object requirements, object requirement) + return new Requirement { - var req = requirements as List; - req.Add(requirement as Requirement); - return req; - } + Not = false, + RequirementName = name as string, + IntParam = 0, + TagParam = "" + }; + } - private Requirement MakeNotRequirement(object requirement) - { - var req = requirement as Requirement; - req.Not = true; - return req; - } + private Requirement MakeIntRequirement(object name, object intArg) + { + var reqmtName = name as string; + Validate(RequirementParser, reqmtName); - private Requirement MakeRequirement(object name) + if (!RequirementsWithArgument.ValueToIndexMap.ContainsKey(reqmtName)) { - Validate(RequirementParser, name as string); - - return new Requirement - { - Not = false, - RequirementName = name as string, - IntParam = 0, - TagParam = "" - }; + OnError?.Invoke($"Requirement '{reqmtName}' doesn't need any arguments"); } - private Requirement MakeIntRequirement(object name, object intArg) + return new Requirement { - var reqmtName = name as string; - Validate(RequirementParser, reqmtName); - - if (!RequirementsWithArgument.ValueToIndexMap.ContainsKey(reqmtName)) - { - OnError?.Invoke($"Requirement '{reqmtName}' doesn't need any arguments"); - } - - return new Requirement - { - Not = false, - RequirementName = reqmtName, - IntParam = Int32.Parse(intArg as string), - TagParam = "" - }; - } + Not = false, + RequirementName = reqmtName, + IntParam = Int32.Parse(intArg as string), + TagParam = "" + }; + } - private Requirement MakeTagRequirement(object name, object tag) + private Requirement MakeTagRequirement(object name, object tag) + { + return new Requirement { - return new Requirement - { - Not = false, - RequirementName = name as string, - IntParam = 0, - TagParam = tag as string - }; - } + Not = false, + RequirementName = name as string, + IntParam = 0, + TagParam = tag as string + }; + } - private List MakePropertyList() => new List(); + private List MakePropertyList() => new List(); - private List SetTextKey(object properties, object textKey) + private List SetTextKey(object properties, object textKey) + { + var props = properties as List; + var tk = (string)textKey; + foreach (var property in props) { - var props = properties as List; - var tk = (string)textKey; - foreach (var property in props) - { - property.TextKey = tk; - } - return props; + property.TextKey = tk; } + return props; + } - private List MergeProperties(object properties, object properties2) - { - var props = properties as List; - props.Concat(properties2 as List); - return props; - } + private List MergeProperties(object properties, object properties2) + { + var props = properties as List; + props.Concat(properties2 as List); + return props; + } - private List AddProperty(object properties, object property) - { - var props = properties as List; - props.Add(property as Property); - return props; - } + private List AddProperty(object properties, object property) + { + var props = properties as List; + props.Add(property as Property); + return props; + } - private Property MakeProperty(object context, object condition, object action) => new Property - { - Context = (string)context, - Condition = condition as object, - Action = action as PropertyAction - }; + private Property MakeProperty(object context, object condition, object action) => new Property + { + Context = (string)context, + Condition = condition as object, + Action = action as PropertyAction + }; - private List MakeArgumentList() => new List(); + private List MakeArgumentList() => new List(); - private List AddArgument(object arguments, object arg) - { - var args = arguments as List; - args.Add(arg == null ? "" : (string)arg); - return args; - } + private List AddArgument(object arguments, object arg) + { + var args = arguments as List; + args.Add(arg == null ? "" : (string)arg); + return args; + } - private PropertyAction MakeAction(object action, object arguments) + private PropertyAction MakeAction(object action, object arguments) + { + var act = new PropertyAction { - var act = new PropertyAction - { - Action = action as string, - Arguments = arguments as List - }; - ActionValidator.Validate(act); - return act; - } + Action = action as string, + Arguments = arguments as List + }; + ActionValidator.Validate(act); + return act; + } - private void Validate(IStatValueParser parser, string value) + private void Validate(IStatValueParser parser, string value) + { + if (parser != null) { - if (parser != null) + bool succeeded = false; + string errorText = null; + parser.Parse(value, ref succeeded, ref errorText); + if (!succeeded) { - bool succeeded = false; - string errorText = null; - parser.Parse(value, ref succeeded, ref errorText); - if (!succeeded) - { - errorText = $"'{value}': {errorText}"; - OnError?.Invoke(errorText); - } + errorText = $"'{value}': {errorText}"; + OnError?.Invoke(errorText); } } - - private object InitLiteral() - { - LiteralStart = StatScanner.TokenStartPos(); - return null; - } + } + + private object InitLiteral() + { + LiteralStart = StatScanner.TokenStartPos(); + return null; + } - private string MakeLiteral() - { - var val = Encoding.UTF8.GetString(Source, LiteralStart, StatScanner.TokenStartPos() - LiteralStart); - return val; - } + private string MakeLiteral() + { + var val = Encoding.UTF8.GetString(Source, LiteralStart, StatScanner.TokenStartPos() - LiteralStart); + return val; } } \ No newline at end of file diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs index 3feddf40..b7f11eef 100644 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ b/LSLib/LS/Stats/StatDefinitions.cs @@ -3,1118 +3,1118 @@ using System.IO; using System.Linq; -namespace LSLib.LS.Stats -{ - public class StatEnumeration(string name) - { - public readonly string Name = name; - public readonly List Values = []; - public readonly Dictionary ValueToIndexMap = []; - - public void AddItem(int index, string value) - { - if (Values.Count != index) - { - throw new Exception("Enumeration items must be added in order."); - } - - Values.Add(value); +namespace LSLib.LS.Stats; - // Some vanilla enums are bogus and contain names multiple times - ValueToIndexMap.TryAdd(value, index); - } +public class StatEnumeration(string name) +{ + public readonly string Name = name; + public readonly List Values = []; + public readonly Dictionary ValueToIndexMap = []; - public void AddItem(string value) + public void AddItem(int index, string value) + { + if (Values.Count != index) { - AddItem(Values.Count, value); + throw new Exception("Enumeration items must be added in order."); } - } - - public class StatField - { - public string Name; - public string Type; - public StatEnumeration EnumType; - public List ReferenceTypes; - private IStatValueParser parser; + Values.Add(value); - public IStatValueParser GetParser(StatValueParserFactory factory, StatDefinitionRepository definitions) - { - parser ??= factory.CreateParser(this, definitions); - return parser; - } + // Some vanilla enums are bogus and contain names multiple times + ValueToIndexMap.TryAdd(value, index); } - public class StatEntryType(string name, string nameProperty, string basedOnProperty) + public void AddItem(string value) { - public readonly string Name = name; - public readonly string NameProperty = nameProperty; - public readonly string BasedOnProperty = basedOnProperty; - public readonly Dictionary Fields = []; + AddItem(Values.Count, value); } +} - public class StatFunctorArgumentType - { - public string Name; - public string Type; - } +public class StatField +{ + public string Name; + public string Type; + public StatEnumeration EnumType; + public List ReferenceTypes; + + private IStatValueParser parser; - public class StatFunctorType + public IStatValueParser GetParser(StatValueParserFactory factory, StatDefinitionRepository definitions) { - public string Name; - public int RequiredArgs; - public List Args; + parser ??= factory.CreateParser(this, definitions); + return parser; } +} - public class StatDefinitionRepository - { - // Version of modified Enumerations.xml and StatObjectDefinitions.sod we expect - public const string CustomizationsVersion = "1"; +public class StatEntryType(string name, string nameProperty, string basedOnProperty) +{ + public readonly string Name = name; + public readonly string NameProperty = nameProperty; + public readonly string BasedOnProperty = basedOnProperty; + public readonly Dictionary Fields = []; +} - public readonly Dictionary Enumerations = []; - public readonly Dictionary Types = []; - public readonly Dictionary Functors = []; - public readonly Dictionary Boosts = []; - public readonly Dictionary DescriptionParams = []; +public class StatFunctorArgumentType +{ + public string Name; + public string Type; +} - private StatField AddField(StatEntryType defn, string name, string typeName) - { - var field = new StatField - { - Name = name, - Type = typeName - }; +public class StatFunctorType +{ + public string Name; + public int RequiredArgs; + public List Args; +} - if (Enumerations.TryGetValue(typeName, out StatEnumeration enumType) && enumType.Values.Count > 0) - { - field.EnumType = enumType; - } +public class StatDefinitionRepository +{ + // Version of modified Enumerations.xml and StatObjectDefinitions.sod we expect + public const string CustomizationsVersion = "1"; - defn.Fields.Add(name, field); - return field; - } + public readonly Dictionary Enumerations = []; + public readonly Dictionary Types = []; + public readonly Dictionary Functors = []; + public readonly Dictionary Boosts = []; + public readonly Dictionary DescriptionParams = []; - private void AddEnumeration(string name, List labels) + private StatField AddField(StatEntryType defn, string name, string typeName) + { + var field = new StatField { - var enumType = new StatEnumeration(name); - foreach (var label in labels) - { - enumType.AddItem(label); - } - Enumerations.Add(name, enumType); - } + Name = name, + Type = typeName + }; - private StatFunctorArgumentType MakeFunctorArg(string name, string type) + if (Enumerations.TryGetValue(typeName, out StatEnumeration enumType) && enumType.Values.Count > 0) { - return new StatFunctorArgumentType - { - Name = name, - Type = type - }; + field.EnumType = enumType; } - public void AddBoost(string name, int requiredArgs, List args) - { - AddFunctor(Boosts, name, requiredArgs, args); - } + defn.Fields.Add(name, field); + return field; + } - public void AddFunctor(string name, int requiredArgs, List args) + private void AddEnumeration(string name, List labels) + { + var enumType = new StatEnumeration(name); + foreach (var label in labels) { - AddFunctor(Functors, name, requiredArgs, args); + enumType.AddItem(label); } + Enumerations.Add(name, enumType); + } - public void AddDescriptionParams(string name, int requiredArgs, List args) + private StatFunctorArgumentType MakeFunctorArg(string name, string type) + { + return new StatFunctorArgumentType { - AddFunctor(DescriptionParams, name, requiredArgs, args); - } + Name = name, + Type = type + }; + } - public void AddFunctor(Dictionary dict, string name, int requiredArgs, List argDescs) - { - var args = new List(); - for (int i = 0; i < argDescs.Count; i += 2) - { - args.Add(MakeFunctorArg(argDescs[i], argDescs[i + 1])); - } + public void AddBoost(string name, int requiredArgs, List args) + { + AddFunctor(Boosts, name, requiredArgs, args); + } - AddFunctor(dict, name, requiredArgs, args); - } + public void AddFunctor(string name, int requiredArgs, List args) + { + AddFunctor(Functors, name, requiredArgs, args); + } - public void AddFunctor(Dictionary dict, string name, int requiredArgs, IEnumerable args) - { - var functor = new StatFunctorType - { - Name = name, - RequiredArgs = requiredArgs, - Args = args.ToList() - }; + public void AddDescriptionParams(string name, int requiredArgs, List args) + { + AddFunctor(DescriptionParams, name, requiredArgs, args); + } - dict.Add(name, functor); + public void AddFunctor(Dictionary dict, string name, int requiredArgs, List argDescs) + { + var args = new List(); + for (int i = 0; i < argDescs.Count; i += 2) + { + args.Add(MakeFunctorArg(argDescs[i], argDescs[i + 1])); } - public void LoadDefinitions(Stream stream) + AddFunctor(dict, name, requiredArgs, args); + } + + public void AddFunctor(Dictionary dict, string name, int requiredArgs, IEnumerable args) + { + var functor = new StatFunctorType { - StatEntryType defn = null; - string line; + Name = name, + RequiredArgs = requiredArgs, + Args = args.ToList() + }; + + dict.Add(name, functor); + } + + public void LoadDefinitions(Stream stream) + { + StatEntryType defn = null; + string line; - using (var reader = new StreamReader(stream)) - while ((line = reader.ReadLine()) != null) + using (var reader = new StreamReader(stream)) + while ((line = reader.ReadLine()) != null) + { + var trimmed = line.Trim(); + if (trimmed.Length > 0) { - var trimmed = line.Trim(); - if (trimmed.Length > 0) + if (trimmed.StartsWith("modifier type ")) { - if (trimmed.StartsWith("modifier type ")) - { - var name = trimmed[15..^1]; - defn = new StatEntryType(name, "Name", "Using"); - Types.Add(defn.Name, defn); - AddField(defn, "Name", "FixedString"); - var usingRef = AddField(defn, "Using", "StatReference"); - usingRef.ReferenceTypes = - [ - new StatReferenceConstraint - { - StatType = name - } - ]; - } - else if (trimmed.StartsWith("modifier \"")) - { - var nameEnd = trimmed.IndexOf('"', 10); - var name = trimmed[10..nameEnd]; - var typeName = trimmed.Substring(nameEnd + 3, trimmed.Length - nameEnd - 4); - AddField(defn, name, typeName); - } + var name = trimmed[15..^1]; + defn = new StatEntryType(name, "Name", "Using"); + Types.Add(defn.Name, defn); + AddField(defn, "Name", "FixedString"); + var usingRef = AddField(defn, "Using", "StatReference"); + usingRef.ReferenceTypes = + [ + new StatReferenceConstraint + { + StatType = name + } + ]; + } + else if (trimmed.StartsWith("modifier \"")) + { + var nameEnd = trimmed.IndexOf('"', 10); + var name = trimmed[10..nameEnd]; + var typeName = trimmed.Substring(nameEnd + 3, trimmed.Length - nameEnd - 4); + AddField(defn, name, typeName); } } + } - // Add builtins - var itemColor = new StatEntryType("ItemColor", "ItemColorName", null); - Types.Add(itemColor.Name, itemColor); - AddField(itemColor, "ItemColorName", "FixedString"); - AddField(itemColor, "Primary Color", "FixedString"); - AddField(itemColor, "Secondary Color", "FixedString"); - AddField(itemColor, "Tertiary Color", "FixedString"); - - var itemProgressionName = new StatEntryType("ItemProgressionNames", "Name", null); - Types.Add(itemProgressionName.Name, itemProgressionName); - AddField(itemProgressionName, "Name", "FixedString"); - AddField(itemProgressionName, "Names", "Passthrough"); - - var itemProgressionVisual = new StatEntryType("ItemProgressionVisuals", "Name", null); - Types.Add(itemProgressionVisual.Name, itemProgressionVisual); - AddField(itemProgressionVisual, "Name", "FixedString"); - // FIXME - AddField(itemProgressionVisual, "LevelGroups", "Passthrough"); - AddField(itemProgressionVisual, "NameGroups", "Passthrough"); - AddField(itemProgressionVisual, "RootGroups", "Passthrough"); - - var dataType = new StatEntryType("Data", "Key", null); - Types.Add(dataType.Name, dataType); - AddField(dataType, "Key", "FixedString"); - AddField(dataType, "Value", "FixedString"); - - AddEnumeration("ResurrectType", - [ - "Living", - "Guaranteed", - "Construct", - "Undead" - ]); - - AddEnumeration("SetStatusDurationType", - [ - "SetMinimum", - "ForceSet", - "Add", - "Multiply" - ]); - - AddEnumeration("ExecuteWeaponFunctorsType", - [ - "MainHand", - "OffHand", - "BothHands" - ]); - - AddEnumeration("SpellCooldownType", - [ - "Default", - "OncePerTurn", - "OncePerCombat", - "UntilRest", - "OncePerTurnNoRealtime", - "UntilShortRest", - "UntilPerRestPerItem", - "OncePerShortRestPerItem" - ]); - - AddEnumeration("SummonDuration", - [ - "UntilLongRest", - "Permanent" - ]); - - AddEnumeration("ForceFunctorOrigin", - [ - "OriginToEntity", - "OriginToTarget", - "TargetToEntity" - ]); - - AddEnumeration("ForceFunctorAggression", - [ - "Aggressive", - "Friendly", - "Neutral" - ]); - - AddEnumeration("StatItemSlot", - [ - "Helmet", - "Breast", - "Cloak", - "MeleeMainHand", - "MeleeOffHand", - "RangedMainHand", - "RangedOffHand", - "Ring", - "Underwear", - "Boots", - "Gloves", - "Amulet", - "Ring2", - "Wings", - "Horns", - "Overhead", - "MusicalInstrument", - "VanityBody", - "VanityBoots", - "MainHand", - "OffHand" - ]); - - AddEnumeration("Magical", - [ - "Magical", - "Nonmagical" - ]); - - AddEnumeration("Nonlethal", - [ - "Lethal", - "Nonlethal" - ]); - - AddEnumeration("AllEnum", - [ - "All" - ]); - - AddEnumeration("ZoneShape", - [ - "Cone", - "Square", - ]); - - AddEnumeration("SurfaceLayer", - [ - "Ground", - "Cloud", - ]); - - AddEnumeration("RollAdjustmentType", - [ - "All", - "Distribute", - ]); - - AddEnumeration("StatsRollType", - [ - "Attack", - "MeleeWeaponAttack", - "RangedWeaponAttack", - "MeleeSpellAttack", - "RangedSpellAttack", - "MeleeUnarmedAttack", - "RangedUnarmedAttack", - "SkillCheck", - "SavingThrow", - "RawAbility", - "Damage", - "MeleeOffHandWeaponAttack", - "RangedOffHandWeaponAttack", - "DeathSavingThrow", - "MeleeWeaponDamage", - "RangedWeaponDamage", - "MeleeSpellDamage", - "RangedSpellDamage", - "MeleeUnarmedDamage", - "RangedUnarmedDamage", - ]); - - AddEnumeration("AdvantageType", - [ - "AttackRoll", - "AttackTarget", - "SavingThrow", - "AllSavingThrows", - "Ability", - "AllAbilities", - "Skill", - "AllSkills", - "SourceDialogue", - "DeathSavingThrow", - "Concentration", - ]); - - AddEnumeration("SkillType", - [ - "Deception", - "Intimidation", - "Performance", - "Persuasion", - "Acrobatics", - "SleightOfHand", - "Stealth", - "Arcana", - "History", - "Investigation", - "Nature", - "Religion", - "Athletics", - "AnimalHandling", - "Insight", - "Medicine", - "Perception", - "Survival", - ]); - - AddEnumeration("CriticalHitType", - [ - "AttackTarget", - "AttackRoll" - ]); - - AddEnumeration("Result", - [ - "Success", - "Failure" - ]); - - AddEnumeration("CriticalHitResult", - [ - "Success", - "Failure" - ]); - - AddEnumeration("CriticalHitWhen", - [ - "Never", - "Always", - "ForcedAlways" - ]); - - AddEnumeration("MovementSpeedType", - [ - "Stroll", - "Walk", - "Run", - "Sprint", - ]); - - AddEnumeration("DamageReductionType", - [ - "Half", - "Flat", - "Threshold" - ]); - - AddEnumeration("AttackRollAbility", - [ - "SpellCastingAbility", - "UnarmedMeleeAbility", - "AttackAbility" - ]); - - AddEnumeration("HealingDirection", - [ - "Incoming", - "Outgoing" - ]); - - AddEnumeration("ResistanceBoostFlags", - [ - "None", - "Resistant", - "Immune", - "Vulnerable", - "BelowDamageThreshold", - "ResistantToMagical", - "ImmuneToMagical", - "VulnerableToMagical", - "ResistantToNonMagical", - "ImmuneToNonMagical", - "VulnerableToNonMagical", - ]); - - AddEnumeration("UnlockSpellType", - [ - "Singular", - "AddChildren", - "MostPowerful" - ]); - - AddEnumeration("ProficiencyBonusBoostType", - [ - "AttackRoll", - "AttackTarget", - "SavingThrow", - "AllSavingThrows", - "Ability", - "AllAbilities", - "Skill", - "AllSkills", - "SourceDialogue", - "WeaponActionDC" - ]); - - AddEnumeration("ResourceReplenishType", - [ - "Never", - "Default", - "Combat", - "Rest", - "ShortRest", - "FullRest", - "ExhaustedRest" - ]); - - AddEnumeration("AttackType", - [ - "DirectHit", - "MeleeWeaponAttack", - "RangedWeaponAttack", - "MeleeOffHandWeaponAttack", - "RangedOffHandWeaponAttack", - "MeleeSpellAttack", - "RangedSpellAttack", - "MeleeUnarmedAttack", - "RangedUnarmedAttack" - ]); - - AddEnumeration("DealDamageWeaponDamageType", - [ - "MainWeaponDamageType", - "OffhandWeaponDamageType", - "MainMeleeWeaponDamageType", - "OffhandMeleeWeaponDamageType", - "MainRangedWeaponDamageType", - "OffhandRangedWeaponDamageType", - "SourceWeaponDamageType", - "ThrownWeaponDamageType", - ]); - - AddEnumeration("EngineStatusType", - [ - "DYING", - "HEAL", - "KNOCKED_DOWN", - "TELEPORT_FALLING", - "BOOST", - "REACTION", - "STORY_FROZEN", - "SNEAKING", - "UNLOCK", - "FEAR", - "SMELLY", - "INVISIBLE", - "ROTATE", - "MATERIAL", - "CLIMBING", - "INCAPACITATED", - "INSURFACE", - "POLYMORPHED", - "EFFECT", - "DEACTIVATED", - "DOWNED", - ]); - - - // Add functors - AddFunctor("ApplyStatus", 1, [ - "StatusId", "StatusId", - "Chance", "Int", - "Duration", "Lua", - "StatusSpecificParam1", "String", - "StatusSpecificParam2", "Int", - "StatusSpecificParam3", "Int", - "StatsConditions", "Conditions", - "RequiresConcentration", "Boolean" - ]); - AddFunctor("SurfaceChange", 1, [ - "SurfaceChange", "Surface Change", - "Chance", "Float", - "Arg3", "Float", - "Arg4", "Float", - "Arg5", "Float" - ]); - AddFunctor("Resurrect", 0, [ - "Chance", "Float", - "HealthPercentage", "Float", - "Type", "ResurrectType" - ]); - AddFunctor("Sabotage", 0, [ - "Amount", "Int" - ]); - AddFunctor("Summon", 1, [ - "Template", "Guid", // Root template GUID - "Duration", "SummonDurationOrInt", - "AIHelper", "SpellId", - "Arg4", "Boolean", - "StackId", "String", - "StatusToApply1", "StatusId", - "StatusToApply2", "StatusId", - "StatusToApply3", "StatusId", - "StatusToApply4", "StatusId", - "Arg10", "Boolean", - ]); - AddFunctor("Force", 1, [ - "Distance", "Lua", - "Origin", "ForceFunctorOrigin", - "Aggression", "ForceFunctorAggression", - "Arg4", "Boolean", - "Arg5", "Boolean", - ]); - AddFunctor("Douse", 0, [ - "Arg1", "Float", - "Arg2", "Float" - ]); - AddFunctor("SwapPlaces", 0, [ - "Animation", "String", - "Arg2", "Boolean", - "Arg3", "Boolean" - ]); - AddFunctor("Pickup", 0, [ - "Arg1", "String" - ]); - AddFunctor("CreateSurface", 3, [ - "Radius", "Float", - "Duration", "Float", - "SurfaceType", "Surface Type", - "IsControlledByConcentration", "Boolean", - "Arg5", "Float", - "Arg6", "Boolean" - ]); - AddFunctor("CreateConeSurface", 3, [ - "Radius", "Float", - "Duration", "Float", - "SurfaceType", "Surface Type", - "IsControlledByConcentration", "Boolean", - "Arg5", "Float", - "Arg6", "Boolean" - ]); - AddFunctor("RemoveStatus", 1, [ - "StatusId", "StatusIdOrGroup" - ]); - AddFunctor("DealDamage", 1, [ - "Damage", "Lua", - "DamageType", "DamageTypeOrDealDamageWeaponDamageType", - "Magical", "Magical", - "Nonlethal", "Nonlethal", - "CoinMultiplier", "Int", - "Tooltip", "Guid", - "Arg7", "Boolean", - "Arg8", "Boolean", - "Arg9", "Boolean", - "Arg10", "Boolean", - ]); - AddFunctor("ExecuteWeaponFunctors", 0, [ - "WeaponType", "ExecuteWeaponFunctorsType" - ]); - AddFunctor("RegainHitPoints", 1, [ - "HitPoints", "Lua", - "Type", "ResurrectType" - ]); - AddFunctor("TeleportSource", 0, [ - "Arg1", "Boolean", - "Arg2", "Boolean", - ]); - AddFunctor("SetStatusDuration", 2, [ - "StatusId", "StatusId", - "Duration", "Float", - "ChangeType", "SetStatusDurationType", - ]); - AddFunctor("UseSpell", 1, [ - "SpellId", "SpellId", - "IgnoreHasSpell", "Boolean", - "IgnoreChecks", "Boolean", - "Arg4", "Boolean", - "SpellCastGuid", "Guid", - ]); - AddFunctor("UseActionResource", 1, [ - "ActionResource", "String", // Action resource name - "Amount", "String", // Float or percentage - "Level", "Int", - "Arg4", "Boolean" - ]); - AddFunctor("UseAttack", 0, [ - "IgnoreChecks", "Boolean" - ]); - AddFunctor("CreateExplosion", 0, [ - "SpellId", "SpellId" - ]); - AddFunctor("BreakConcentration", 0, []); - AddFunctor("ApplyEquipmentStatus", 2, [ - "ItemSlot", "StatItemSlot", - "StatusId", "StatusId", - "Chance", "Int", - "Duration", "Lua", - "StatusSpecificParam1", "String", - "StatusSpecificParam2", "Int", - "StatusSpecificParam3", "Int", - "StatsConditions", "Conditions", - "RequiresConcentration", "Boolean" - ]); - AddFunctor("RestoreResource", 2, [ - "ActionResource", "String", // Action resource name - "Amount", "Lua", // or percentage? - "Level", "Int" - ]); - AddFunctor("Spawn", 1, [ - "TemplateId", "Guid", // Root template Guid - "AiHelper", "String", // Should be SpellId, but seemingly defunct? - "StatusToApply1", "StatusId", - "StatusToApply2", "StatusId", - "StatusToApply3", "StatusId", - "StatusToApply4", "StatusId", - "Arg7", "Boolean" - ]); - AddFunctor("Stabilize", 0, []); - AddFunctor("Unlock", 0, []); - AddFunctor("ResetCombatTurn", 0, []); - AddFunctor("RemoveAuraByChildStatus", 1, [ - "StatusId", "StatusId" - ]); - AddFunctor("SummonInInventory", 1, [ - "TemplateId", "Guid", // Root template Guid - "Duration", "SummonDurationOrInt", - "Arg3", "Int", - "Arg4", "Boolean", - "Arg5", "Boolean", - "Arg6", "Boolean", - "Arg7", "Boolean", - "Arg8", "String", - "Arg9", "String", - "Arg10", "String", - "Arg11", "String", // etc. - ]); - AddFunctor("SpawnInInventory", 1, [ - "TemplateId", "Guid", // Root template Guid - "Arg2", "Int", - "Arg3", "Boolean", - "Arg4", "Boolean", - "Arg5", "Boolean", - "Arg6", "String", - "Arg7", "String", - "Arg8", "String", // etc. - ]); - AddFunctor("RemoveUniqueStatus", 1, [ - "StatusId", "StatusId" - ]); - AddFunctor("DisarmWeapon", 0, []); - AddFunctor("DisarmAndStealWeapon", 0, []); - AddFunctor("SwitchDeathType", 1, [ - "DeathType", "Death Type" - ]); - AddFunctor("TriggerRandomCast", 2, [ - "Arg1", "Int", - "Arg2", "Float", - "Arg3", "String", // RandomCastOutcomesID resource - "Arg4", "String", // RandomCastOutcomesID resource - "Arg5", "String", // RandomCastOutcomesID resource - "Arg6", "String", // RandomCastOutcomesID resource - ]); - AddFunctor("GainTemporaryHitPoints", 1, [ - "Amount", "Lua" - ]); - AddFunctor("FireProjectile", 1, [ - "Arg1", "String" - ]); - AddFunctor("ShortRest", 0, []); - AddFunctor("CreateZone", 0, [ - "Shape", "ZoneShape", - "Arg2", "Float", - "Duration", "Float", - "Arg4", "String", - "Arg5", "Boolean", - ]); - AddFunctor("DoTeleport", 0, [ - "Arg1", "Float" - ]); - AddFunctor("RegainTemporaryHitPoints", 1, [ - "Amount", "Lua" - ]); - AddFunctor("RemoveStatusByLevel", 1, [ - "StatusId", "StatusIdOrGroup", - "Arg2", "Int", - "Arg3", "Ability" - ]); - AddFunctor("SurfaceClearLayer", 0, [ - "Layer1", "SurfaceLayer", - "Layer2", "SurfaceLayer", - ]); - AddFunctor("Unsummon", 0, []); - AddFunctor("CreateWall", 0, []); - AddFunctor("Counterspell", 0, []); - AddFunctor("AdjustRoll", 1, [ - "Amount", "Lua", - "Type", "RollAdjustmentType", - "DamageType", "Damage Type", - ]); - AddFunctor("SpawnExtraProjectiles", 0, [ - "Arg1", "String", // ProjectileTypeId - ]); - AddFunctor("Kill", 0, []); - AddFunctor("TutorialEvent", 0, [ - "Event", "Guid", - ]); - AddFunctor("Drop", 0, [ - "Arg1", "String", - ]); - AddFunctor("ResetCooldowns", 1, [ - "Type", "SpellCooldownType", - ]); - AddFunctor("SetRoll", 1, [ - "Roll", "Int", - "DistributionOrDamageType", "RollAdjustmentTypeOrDamageType" - ]); - AddFunctor("SetDamageResistance", 1, [ - "DamageType", "Damage Type", - ]); - AddFunctor("SetReroll", 0, [ - "Roll", "Int", - "Arg2", "Boolean" - ]); - AddFunctor("SetAdvantage", 0, []); - AddFunctor("SetDisadvantage", 0, []); - AddFunctor("MaximizeRoll", 1, [ - "DamageType", "Damage Type" - ]); - AddFunctor("CameraWait", 0, [ - "Arg1", "Float" - ]); - - - - AddDescriptionParams("DealDamage", 1, [ - "Damage", "Lua", - "DamageType", "DamageTypeOrDealDamageWeaponDamageType", - "Magical", "Magical", - "Nonlethal", "Nonlethal", - "Arg5", "Int", - "Tooltip", "Guid", - ]); - AddDescriptionParams("RegainHitPoints", 1, [ - "HitPoints", "Lua", - "Tooltip", "Guid", - ]); - AddDescriptionParams("Distance", 1, [ - "Distance", "Float" - ]); - AddDescriptionParams("GainTemporaryHitPoints", 1, [ - "Amount", "Lua" - ]); - AddDescriptionParams("LevelMapValue", 1, [ - "LevelMap", "String" - ]); - AddDescriptionParams("ApplyStatus", 1, [ - "StatusId", "StatusId", - "Chance", "Int", - "Duration", "Lua", - "StatusSpecificParam1", "String", - "StatusSpecificParam2", "Int", - "StatusSpecificParam3", "Int", - "StatsConditions", "Conditions", - "RequiresConcentration", "Boolean" - ]); - - - - AddBoost("AC", 1, [ + // Add builtins + var itemColor = new StatEntryType("ItemColor", "ItemColorName", null); + Types.Add(itemColor.Name, itemColor); + AddField(itemColor, "ItemColorName", "FixedString"); + AddField(itemColor, "Primary Color", "FixedString"); + AddField(itemColor, "Secondary Color", "FixedString"); + AddField(itemColor, "Tertiary Color", "FixedString"); + + var itemProgressionName = new StatEntryType("ItemProgressionNames", "Name", null); + Types.Add(itemProgressionName.Name, itemProgressionName); + AddField(itemProgressionName, "Name", "FixedString"); + AddField(itemProgressionName, "Names", "Passthrough"); + + var itemProgressionVisual = new StatEntryType("ItemProgressionVisuals", "Name", null); + Types.Add(itemProgressionVisual.Name, itemProgressionVisual); + AddField(itemProgressionVisual, "Name", "FixedString"); + // FIXME + AddField(itemProgressionVisual, "LevelGroups", "Passthrough"); + AddField(itemProgressionVisual, "NameGroups", "Passthrough"); + AddField(itemProgressionVisual, "RootGroups", "Passthrough"); + + var dataType = new StatEntryType("Data", "Key", null); + Types.Add(dataType.Name, dataType); + AddField(dataType, "Key", "FixedString"); + AddField(dataType, "Value", "FixedString"); + + AddEnumeration("ResurrectType", + [ + "Living", + "Guaranteed", + "Construct", + "Undead" + ]); + + AddEnumeration("SetStatusDurationType", + [ + "SetMinimum", + "ForceSet", + "Add", + "Multiply" + ]); + + AddEnumeration("ExecuteWeaponFunctorsType", + [ + "MainHand", + "OffHand", + "BothHands" + ]); + + AddEnumeration("SpellCooldownType", + [ + "Default", + "OncePerTurn", + "OncePerCombat", + "UntilRest", + "OncePerTurnNoRealtime", + "UntilShortRest", + "UntilPerRestPerItem", + "OncePerShortRestPerItem" + ]); + + AddEnumeration("SummonDuration", + [ + "UntilLongRest", + "Permanent" + ]); + + AddEnumeration("ForceFunctorOrigin", + [ + "OriginToEntity", + "OriginToTarget", + "TargetToEntity" + ]); + + AddEnumeration("ForceFunctorAggression", + [ + "Aggressive", + "Friendly", + "Neutral" + ]); + + AddEnumeration("StatItemSlot", + [ + "Helmet", + "Breast", + "Cloak", + "MeleeMainHand", + "MeleeOffHand", + "RangedMainHand", + "RangedOffHand", + "Ring", + "Underwear", + "Boots", + "Gloves", + "Amulet", + "Ring2", + "Wings", + "Horns", + "Overhead", + "MusicalInstrument", + "VanityBody", + "VanityBoots", + "MainHand", + "OffHand" + ]); + + AddEnumeration("Magical", + [ + "Magical", + "Nonmagical" + ]); + + AddEnumeration("Nonlethal", + [ + "Lethal", + "Nonlethal" + ]); + + AddEnumeration("AllEnum", + [ + "All" + ]); + + AddEnumeration("ZoneShape", + [ + "Cone", + "Square", + ]); + + AddEnumeration("SurfaceLayer", + [ + "Ground", + "Cloud", + ]); + + AddEnumeration("RollAdjustmentType", + [ + "All", + "Distribute", + ]); + + AddEnumeration("StatsRollType", + [ + "Attack", + "MeleeWeaponAttack", + "RangedWeaponAttack", + "MeleeSpellAttack", + "RangedSpellAttack", + "MeleeUnarmedAttack", + "RangedUnarmedAttack", + "SkillCheck", + "SavingThrow", + "RawAbility", + "Damage", + "MeleeOffHandWeaponAttack", + "RangedOffHandWeaponAttack", + "DeathSavingThrow", + "MeleeWeaponDamage", + "RangedWeaponDamage", + "MeleeSpellDamage", + "RangedSpellDamage", + "MeleeUnarmedDamage", + "RangedUnarmedDamage", + ]); + + AddEnumeration("AdvantageType", + [ + "AttackRoll", + "AttackTarget", + "SavingThrow", + "AllSavingThrows", + "Ability", + "AllAbilities", + "Skill", + "AllSkills", + "SourceDialogue", + "DeathSavingThrow", + "Concentration", + ]); + + AddEnumeration("SkillType", + [ + "Deception", + "Intimidation", + "Performance", + "Persuasion", + "Acrobatics", + "SleightOfHand", + "Stealth", + "Arcana", + "History", + "Investigation", + "Nature", + "Religion", + "Athletics", + "AnimalHandling", + "Insight", + "Medicine", + "Perception", + "Survival", + ]); + + AddEnumeration("CriticalHitType", + [ + "AttackTarget", + "AttackRoll" + ]); + + AddEnumeration("Result", + [ + "Success", + "Failure" + ]); + + AddEnumeration("CriticalHitResult", + [ + "Success", + "Failure" + ]); + + AddEnumeration("CriticalHitWhen", + [ + "Never", + "Always", + "ForcedAlways" + ]); + + AddEnumeration("MovementSpeedType", + [ + "Stroll", + "Walk", + "Run", + "Sprint", + ]); + + AddEnumeration("DamageReductionType", + [ + "Half", + "Flat", + "Threshold" + ]); + + AddEnumeration("AttackRollAbility", + [ + "SpellCastingAbility", + "UnarmedMeleeAbility", + "AttackAbility" + ]); + + AddEnumeration("HealingDirection", + [ + "Incoming", + "Outgoing" + ]); + + AddEnumeration("ResistanceBoostFlags", + [ + "None", + "Resistant", + "Immune", + "Vulnerable", + "BelowDamageThreshold", + "ResistantToMagical", + "ImmuneToMagical", + "VulnerableToMagical", + "ResistantToNonMagical", + "ImmuneToNonMagical", + "VulnerableToNonMagical", + ]); + + AddEnumeration("UnlockSpellType", + [ + "Singular", + "AddChildren", + "MostPowerful" + ]); + + AddEnumeration("ProficiencyBonusBoostType", + [ + "AttackRoll", + "AttackTarget", + "SavingThrow", + "AllSavingThrows", + "Ability", + "AllAbilities", + "Skill", + "AllSkills", + "SourceDialogue", + "WeaponActionDC" + ]); + + AddEnumeration("ResourceReplenishType", + [ + "Never", + "Default", + "Combat", + "Rest", + "ShortRest", + "FullRest", + "ExhaustedRest" + ]); + + AddEnumeration("AttackType", + [ + "DirectHit", + "MeleeWeaponAttack", + "RangedWeaponAttack", + "MeleeOffHandWeaponAttack", + "RangedOffHandWeaponAttack", + "MeleeSpellAttack", + "RangedSpellAttack", + "MeleeUnarmedAttack", + "RangedUnarmedAttack" + ]); + + AddEnumeration("DealDamageWeaponDamageType", + [ + "MainWeaponDamageType", + "OffhandWeaponDamageType", + "MainMeleeWeaponDamageType", + "OffhandMeleeWeaponDamageType", + "MainRangedWeaponDamageType", + "OffhandRangedWeaponDamageType", + "SourceWeaponDamageType", + "ThrownWeaponDamageType", + ]); + + AddEnumeration("EngineStatusType", + [ + "DYING", + "HEAL", + "KNOCKED_DOWN", + "TELEPORT_FALLING", + "BOOST", + "REACTION", + "STORY_FROZEN", + "SNEAKING", + "UNLOCK", + "FEAR", + "SMELLY", + "INVISIBLE", + "ROTATE", + "MATERIAL", + "CLIMBING", + "INCAPACITATED", + "INSURFACE", + "POLYMORPHED", + "EFFECT", + "DEACTIVATED", + "DOWNED", + ]); + + + // Add functors + AddFunctor("ApplyStatus", 1, [ + "StatusId", "StatusId", + "Chance", "Int", + "Duration", "Lua", + "StatusSpecificParam1", "String", + "StatusSpecificParam2", "Int", + "StatusSpecificParam3", "Int", + "StatsConditions", "Conditions", + "RequiresConcentration", "Boolean" + ]); + AddFunctor("SurfaceChange", 1, [ + "SurfaceChange", "Surface Change", + "Chance", "Float", + "Arg3", "Float", + "Arg4", "Float", + "Arg5", "Float" + ]); + AddFunctor("Resurrect", 0, [ + "Chance", "Float", + "HealthPercentage", "Float", + "Type", "ResurrectType" + ]); + AddFunctor("Sabotage", 0, [ + "Amount", "Int" + ]); + AddFunctor("Summon", 1, [ + "Template", "Guid", // Root template GUID + "Duration", "SummonDurationOrInt", + "AIHelper", "SpellId", + "Arg4", "Boolean", + "StackId", "String", + "StatusToApply1", "StatusId", + "StatusToApply2", "StatusId", + "StatusToApply3", "StatusId", + "StatusToApply4", "StatusId", + "Arg10", "Boolean", + ]); + AddFunctor("Force", 1, [ + "Distance", "Lua", + "Origin", "ForceFunctorOrigin", + "Aggression", "ForceFunctorAggression", + "Arg4", "Boolean", + "Arg5", "Boolean", + ]); + AddFunctor("Douse", 0, [ + "Arg1", "Float", + "Arg2", "Float" + ]); + AddFunctor("SwapPlaces", 0, [ + "Animation", "String", + "Arg2", "Boolean", + "Arg3", "Boolean" + ]); + AddFunctor("Pickup", 0, [ + "Arg1", "String" + ]); + AddFunctor("CreateSurface", 3, [ + "Radius", "Float", + "Duration", "Float", + "SurfaceType", "Surface Type", + "IsControlledByConcentration", "Boolean", + "Arg5", "Float", + "Arg6", "Boolean" + ]); + AddFunctor("CreateConeSurface", 3, [ + "Radius", "Float", + "Duration", "Float", + "SurfaceType", "Surface Type", + "IsControlledByConcentration", "Boolean", + "Arg5", "Float", + "Arg6", "Boolean" + ]); + AddFunctor("RemoveStatus", 1, [ + "StatusId", "StatusIdOrGroup" + ]); + AddFunctor("DealDamage", 1, [ + "Damage", "Lua", + "DamageType", "DamageTypeOrDealDamageWeaponDamageType", + "Magical", "Magical", + "Nonlethal", "Nonlethal", + "CoinMultiplier", "Int", + "Tooltip", "Guid", + "Arg7", "Boolean", + "Arg8", "Boolean", + "Arg9", "Boolean", + "Arg10", "Boolean", + ]); + AddFunctor("ExecuteWeaponFunctors", 0, [ + "WeaponType", "ExecuteWeaponFunctorsType" + ]); + AddFunctor("RegainHitPoints", 1, [ + "HitPoints", "Lua", + "Type", "ResurrectType" + ]); + AddFunctor("TeleportSource", 0, [ + "Arg1", "Boolean", + "Arg2", "Boolean", + ]); + AddFunctor("SetStatusDuration", 2, [ + "StatusId", "StatusId", + "Duration", "Float", + "ChangeType", "SetStatusDurationType", + ]); + AddFunctor("UseSpell", 1, [ + "SpellId", "SpellId", + "IgnoreHasSpell", "Boolean", + "IgnoreChecks", "Boolean", + "Arg4", "Boolean", + "SpellCastGuid", "Guid", + ]); + AddFunctor("UseActionResource", 1, [ + "ActionResource", "String", // Action resource name + "Amount", "String", // Float or percentage + "Level", "Int", + "Arg4", "Boolean" + ]); + AddFunctor("UseAttack", 0, [ + "IgnoreChecks", "Boolean" + ]); + AddFunctor("CreateExplosion", 0, [ + "SpellId", "SpellId" + ]); + AddFunctor("BreakConcentration", 0, []); + AddFunctor("ApplyEquipmentStatus", 2, [ + "ItemSlot", "StatItemSlot", + "StatusId", "StatusId", + "Chance", "Int", + "Duration", "Lua", + "StatusSpecificParam1", "String", + "StatusSpecificParam2", "Int", + "StatusSpecificParam3", "Int", + "StatsConditions", "Conditions", + "RequiresConcentration", "Boolean" + ]); + AddFunctor("RestoreResource", 2, [ + "ActionResource", "String", // Action resource name + "Amount", "Lua", // or percentage? + "Level", "Int" + ]); + AddFunctor("Spawn", 1, [ + "TemplateId", "Guid", // Root template Guid + "AiHelper", "String", // Should be SpellId, but seemingly defunct? + "StatusToApply1", "StatusId", + "StatusToApply2", "StatusId", + "StatusToApply3", "StatusId", + "StatusToApply4", "StatusId", + "Arg7", "Boolean" + ]); + AddFunctor("Stabilize", 0, []); + AddFunctor("Unlock", 0, []); + AddFunctor("ResetCombatTurn", 0, []); + AddFunctor("RemoveAuraByChildStatus", 1, [ + "StatusId", "StatusId" + ]); + AddFunctor("SummonInInventory", 1, [ + "TemplateId", "Guid", // Root template Guid + "Duration", "SummonDurationOrInt", + "Arg3", "Int", + "Arg4", "Boolean", + "Arg5", "Boolean", + "Arg6", "Boolean", + "Arg7", "Boolean", + "Arg8", "String", + "Arg9", "String", + "Arg10", "String", + "Arg11", "String", // etc. + ]); + AddFunctor("SpawnInInventory", 1, [ + "TemplateId", "Guid", // Root template Guid + "Arg2", "Int", + "Arg3", "Boolean", + "Arg4", "Boolean", + "Arg5", "Boolean", + "Arg6", "String", + "Arg7", "String", + "Arg8", "String", // etc. + ]); + AddFunctor("RemoveUniqueStatus", 1, [ + "StatusId", "StatusId" + ]); + AddFunctor("DisarmWeapon", 0, []); + AddFunctor("DisarmAndStealWeapon", 0, []); + AddFunctor("SwitchDeathType", 1, [ + "DeathType", "Death Type" + ]); + AddFunctor("TriggerRandomCast", 2, [ + "Arg1", "Int", + "Arg2", "Float", + "Arg3", "String", // RandomCastOutcomesID resource + "Arg4", "String", // RandomCastOutcomesID resource + "Arg5", "String", // RandomCastOutcomesID resource + "Arg6", "String", // RandomCastOutcomesID resource + ]); + AddFunctor("GainTemporaryHitPoints", 1, [ + "Amount", "Lua" + ]); + AddFunctor("FireProjectile", 1, [ + "Arg1", "String" + ]); + AddFunctor("ShortRest", 0, []); + AddFunctor("CreateZone", 0, [ + "Shape", "ZoneShape", + "Arg2", "Float", + "Duration", "Float", + "Arg4", "String", + "Arg5", "Boolean", + ]); + AddFunctor("DoTeleport", 0, [ + "Arg1", "Float" + ]); + AddFunctor("RegainTemporaryHitPoints", 1, [ + "Amount", "Lua" + ]); + AddFunctor("RemoveStatusByLevel", 1, [ + "StatusId", "StatusIdOrGroup", + "Arg2", "Int", + "Arg3", "Ability" + ]); + AddFunctor("SurfaceClearLayer", 0, [ + "Layer1", "SurfaceLayer", + "Layer2", "SurfaceLayer", + ]); + AddFunctor("Unsummon", 0, []); + AddFunctor("CreateWall", 0, []); + AddFunctor("Counterspell", 0, []); + AddFunctor("AdjustRoll", 1, [ + "Amount", "Lua", + "Type", "RollAdjustmentType", + "DamageType", "Damage Type", + ]); + AddFunctor("SpawnExtraProjectiles", 0, [ + "Arg1", "String", // ProjectileTypeId + ]); + AddFunctor("Kill", 0, []); + AddFunctor("TutorialEvent", 0, [ + "Event", "Guid", + ]); + AddFunctor("Drop", 0, [ + "Arg1", "String", + ]); + AddFunctor("ResetCooldowns", 1, [ + "Type", "SpellCooldownType", + ]); + AddFunctor("SetRoll", 1, [ + "Roll", "Int", + "DistributionOrDamageType", "RollAdjustmentTypeOrDamageType" + ]); + AddFunctor("SetDamageResistance", 1, [ + "DamageType", "Damage Type", + ]); + AddFunctor("SetReroll", 0, [ + "Roll", "Int", + "Arg2", "Boolean" + ]); + AddFunctor("SetAdvantage", 0, []); + AddFunctor("SetDisadvantage", 0, []); + AddFunctor("MaximizeRoll", 1, [ + "DamageType", "Damage Type" + ]); + AddFunctor("CameraWait", 0, [ + "Arg1", "Float" + ]); + + + + AddDescriptionParams("DealDamage", 1, [ + "Damage", "Lua", + "DamageType", "DamageTypeOrDealDamageWeaponDamageType", + "Magical", "Magical", + "Nonlethal", "Nonlethal", + "Arg5", "Int", + "Tooltip", "Guid", + ]); + AddDescriptionParams("RegainHitPoints", 1, [ + "HitPoints", "Lua", + "Tooltip", "Guid", + ]); + AddDescriptionParams("Distance", 1, [ + "Distance", "Float" + ]); + AddDescriptionParams("GainTemporaryHitPoints", 1, [ + "Amount", "Lua" + ]); + AddDescriptionParams("LevelMapValue", 1, [ + "LevelMap", "String" + ]); + AddDescriptionParams("ApplyStatus", 1, [ + "StatusId", "StatusId", + "Chance", "Int", + "Duration", "Lua", + "StatusSpecificParam1", "String", + "StatusSpecificParam2", "Int", + "StatusSpecificParam3", "Int", + "StatsConditions", "Conditions", + "RequiresConcentration", "Boolean" + ]); + + + + AddBoost("AC", 1, [ "AC", "Int" - ]); - AddBoost("Ability", 2, [ + ]); + AddBoost("Ability", 2, [ "Ability", "Ability", "Amount", "Int", "Arg3", "Int", - ]); - AddBoost("RollBonus", 2, [ + ]); + AddBoost("RollBonus", 2, [ "RollType", "StatsRollType", "Bonus", "Lua", "Arg3", "String", - ]); - AddBoost("Advantage", 1, [ + ]); + AddBoost("Advantage", 1, [ "Type", "AdvantageType", "Arg2", "String", // Depends on type "Tag1", "String", // TagManager resource "Tag2", "String", // TagManager resource "Tag3", "String", // TagManager resource - ]); - AddBoost("Disadvantage", 1, [ + ]); + AddBoost("Disadvantage", 1, [ "Type", "AdvantageType", "Arg2", "String", // Depends on type "Tag1", "String", // TagManager resource "Tag2", "String", // TagManager resource "Tag3", "String", // TagManager resource - ]); - AddBoost("ActionResource", 2, [ + ]); + AddBoost("ActionResource", 2, [ "Resource", "String", // Action resource name "Amount", "Float", "Level", "Int", - "DieType", "DieType", - ]); - AddBoost("CriticalHit", 3, [ + "DieType", "DieType", + ]); + AddBoost("CriticalHit", 3, [ "Type", "CriticalHitType", "Result", "CriticalHitResult", "When", "CriticalHitWhen", "Arg4", "Float", - ]); - AddBoost("AbilityFailedSavingThrow", 1, [ + ]); + AddBoost("AbilityFailedSavingThrow", 1, [ "Ability", "Ability" - ]); - AddBoost("Resistance", 2, [ - "DamageType", "AllOrDamageType", - "ResistanceBoostFlags", "ResistanceBoostFlags" - ]); - AddBoost("WeaponDamageResistance", 1, [ - "DamageType1", "Damage Type", - "DamageType2", "Damage Type", - "DamageType3", "Damage Type", - ]); - AddBoost("ProficiencyBonusOverride", 1, [ + ]); + AddBoost("Resistance", 2, [ + "DamageType", "AllOrDamageType", + "ResistanceBoostFlags", "ResistanceBoostFlags" + ]); + AddBoost("WeaponDamageResistance", 1, [ + "DamageType1", "Damage Type", + "DamageType2", "Damage Type", + "DamageType3", "Damage Type", + ]); + AddBoost("ProficiencyBonusOverride", 1, [ "Bonus", "Lua" - ]); - AddBoost("ActionResourceOverride", 2, [ - "Resource", "String", // Action resource name + ]); + AddBoost("ActionResourceOverride", 2, [ + "Resource", "String", // Action resource name "Amount", "Float", - "Level", "Int", - "DieType", "DieType", - ]); - AddBoost("AddProficiencyToAC", 0, []); - AddBoost("JumpMaxDistanceMultiplier", 1, [ + "Level", "Int", + "DieType", "DieType", + ]); + AddBoost("AddProficiencyToAC", 0, []); + AddBoost("JumpMaxDistanceMultiplier", 1, [ "Multiplier", "Float" - ]); - AddBoost("AddProficiencyToDamage", 0, []); - AddBoost("ActionResourceConsumeMultiplier", 3, [ - "Resource", "String", // Action resource name + ]); + AddBoost("AddProficiencyToDamage", 0, []); + AddBoost("ActionResourceConsumeMultiplier", 3, [ + "Resource", "String", // Action resource name "Multiplier", "Float", - "Level", "Int", - ]); - AddBoost("BlockVerbalComponent", 0, []); - AddBoost("BlockSomaticComponent", 0, []); - AddBoost("HalveWeaponDamage", 1, [ + "Level", "Int", + ]); + AddBoost("BlockVerbalComponent", 0, []); + AddBoost("BlockSomaticComponent", 0, []); + AddBoost("HalveWeaponDamage", 1, [ "Ability", "Ability" - ]); - AddBoost("UnlockSpell", 1, [ + ]); + AddBoost("UnlockSpell", 1, [ "SpellId", "SpellId", - "Type", "UnlockSpellType", - "SpellGuid", "String", // "None" or GUID or "" - "Cooldown", "SpellCooldownType", - "Ability", "Ability" - ]); - AddBoost("SourceAdvantageOnAttack", 0, [ + "Type", "UnlockSpellType", + "SpellGuid", "String", // "None" or GUID or "" + "Cooldown", "SpellCooldownType", + "Ability", "Ability" + ]); + AddBoost("SourceAdvantageOnAttack", 0, [ "Arg1", "Float" - ]); - AddBoost("ProficiencyBonus", 1, [ + ]); + AddBoost("ProficiencyBonus", 1, [ "Type", "ProficiencyBonusBoostType", - "Arg2", "String" - ]); - AddBoost("BlockSpellCast", 0, [ + "Arg2", "String" + ]); + AddBoost("BlockSpellCast", 0, [ "Arg1", "Float" - ]); - AddBoost("Proficiency", 1, [ + ]); + AddBoost("Proficiency", 1, [ "Arg1", "ProficiencyGroupFlags", "Arg2", "ProficiencyGroupFlags", "Arg3", "ProficiencyGroupFlags", - ]); - AddBoost("SourceAllyAdvantageOnAttack", 0, []); - AddBoost("IncreaseMaxHP", 1, [ + ]); + AddBoost("SourceAllyAdvantageOnAttack", 0, []); + AddBoost("IncreaseMaxHP", 1, [ "Amount", "String" // Lua or % - ]); - AddBoost("ActionResourceBlock", 1, [ - "Resource", "String", // Action resource name - "Level", "Int", - ]); - AddBoost("StatusImmunity", 1, [ + ]); + AddBoost("ActionResourceBlock", 1, [ + "Resource", "String", // Action resource name + "Level", "Int", + ]); + AddBoost("StatusImmunity", 1, [ "StatusId", "StatusIdOrGroup", "Tag1", "String", // Tag resource name "Tag2", "String", // Tag resource name "Tag3", "String", // Tag resource name "Tag4", "String", // Tag resource name "Tag5", "String", // Tag resource name - ]); - AddBoost("UseBoosts", 1, [ + ]); + AddBoost("UseBoosts", 1, [ "Arg1", "StatsFunctors" - ]); - AddBoost("CannotHarmCauseEntity", 1, [ + ]); + AddBoost("CannotHarmCauseEntity", 1, [ "Arg1", "String" - ]); - AddBoost("TemporaryHP", 1, [ + ]); + AddBoost("TemporaryHP", 1, [ "Amount", "Lua" - ]); - AddBoost("Weight", 1, [ + ]); + AddBoost("Weight", 1, [ "Weight", "Float" - ]); - AddBoost("WeightCategory", 1, [ + ]); + AddBoost("WeightCategory", 1, [ "Category", "Int" - ]); - AddBoost("FactionOverride", 1, [ + ]); + AddBoost("FactionOverride", 1, [ "Faction", "String" // Faction resource GUID or "Source" - ]); - AddBoost("ActionResourceMultiplier", 2, [ - "Resource", "String", // Action resource name + ]); + AddBoost("ActionResourceMultiplier", 2, [ + "Resource", "String", // Action resource name "Multiplier", "Int", - "Level", "Int", - ]); - AddBoost("BlockRegainHP", 0, [ + "Level", "Int", + ]); + AddBoost("BlockRegainHP", 0, [ "Type", "ResurrectTypes" - ]); - AddBoost("Initiative", 1, [ + ]); + AddBoost("Initiative", 1, [ "Initiative", "Int" - ]); - AddBoost("DarkvisionRange", 1, [ + ]); + AddBoost("DarkvisionRange", 1, [ "Range", "Float" - ]); - AddBoost("DarkvisionRangeMin", 1, [ - "Range", "Float" - ]); - AddBoost("DarkvisionRangeOverride", 1, [ - "Range", "Float" - ]); - AddBoost("Tag", 1, [ + ]); + AddBoost("DarkvisionRangeMin", 1, [ + "Range", "Float" + ]); + AddBoost("DarkvisionRangeOverride", 1, [ + "Range", "Float" + ]); + AddBoost("Tag", 1, [ "Arg1", "String" // Tag resource name - ]); - AddBoost("IgnoreDamageThreshold", 2, [ + ]); + AddBoost("IgnoreDamageThreshold", 2, [ "DamageType", "AllOrDamageType", - "Threshold", "Int" - ]); - AddBoost("Skill", 2, [ + "Threshold", "Int" + ]); + AddBoost("Skill", 2, [ "Skill", "SkillType", - "Amount", "Lua" - ]); - AddBoost("WeaponDamage", 2, [ + "Amount", "Lua" + ]); + AddBoost("WeaponDamage", 2, [ "Amount", "Lua", - "DamageType", "Damage Type", - "Arg3", "Boolean" - ]); - AddBoost("NullifyAbilityScore", 1, [ - "Ability", "Ability" - ]); - AddBoost("IgnoreFallDamage", 0, []); - AddBoost("Reroll", 3, [ + "DamageType", "Damage Type", + "Arg3", "Boolean" + ]); + AddBoost("NullifyAbilityScore", 1, [ + "Ability", "Ability" + ]); + AddBoost("IgnoreFallDamage", 0, []); + AddBoost("Reroll", 3, [ "RollType", "StatsRollType", - "RollBelow", "Int", - "Arg3", "Boolean" - ]); - AddBoost("DownedStatus", 1, [ + "RollBelow", "Int", + "Arg3", "Boolean" + ]); + AddBoost("DownedStatus", 1, [ "StatusId", "StatusId", - "Arg2", "Int" - ]); - AddBoost("Invulnerable", 0, []); - AddBoost("WeaponEnchantment", 1, [ + "Arg2", "Int" + ]); + AddBoost("Invulnerable", 0, []); + AddBoost("WeaponEnchantment", 1, [ "Enchantment", "Int" - ]); - AddBoost("GuaranteedChanceRollOutcome", 1, [ + ]); + AddBoost("GuaranteedChanceRollOutcome", 1, [ "Arg1", "Boolean" - ]); - AddBoost("Attribute", 1, [ + ]); + AddBoost("Attribute", 1, [ "Flags", "AttributeFlags" - ]); - AddBoost("IgnoreLeaveAttackRange", 0, []); - AddBoost("GameplayLight", 2, [ + ]); + AddBoost("IgnoreLeaveAttackRange", 0, []); + AddBoost("GameplayLight", 2, [ "Arg1", "Float", "Arg2", "Boolean", "Arg3", "Float", "Arg4", "Boolean" - ]); - AddBoost("DialogueBlock", 0, []); - AddBoost("DualWielding", 1, [ + ]); + AddBoost("DialogueBlock", 0, []); + AddBoost("DualWielding", 1, [ "DW", "Boolean" - ]); - AddBoost("Savant", 1, [ + ]); + AddBoost("Savant", 1, [ "SpellSchool", "SpellSchool" - ]); - AddBoost("MinimumRollResult", 2, [ + ]); + AddBoost("MinimumRollResult", 2, [ "RollType", "StatsRollType", - "MinResult", "Int" - ]); - AddBoost("Lootable", 0, []); - AddBoost("CharacterWeaponDamage", 1, [ + "MinResult", "Int" + ]); + AddBoost("Lootable", 0, []); + AddBoost("CharacterWeaponDamage", 1, [ "Amount", "Lua", - "DamageType", "Damage Type" - ]); - AddBoost("ProjectileDeflect", 0, [ + "DamageType", "Damage Type" + ]); + AddBoost("ProjectileDeflect", 0, [ "Type1", "String", "Type2", "String", - ]); - AddBoost("AbilityOverrideMinimum", 2, [ + ]); + AddBoost("AbilityOverrideMinimum", 2, [ "Ability", "Ability", - "Minimum", "Int" - ]); - AddBoost("ACOverrideFormula", 2, [ + "Minimum", "Int" + ]); + AddBoost("ACOverrideFormula", 2, [ "AC", "Int", - "Arg2", "Boolean", - "Ability1", "Ability", - "Ability2", "Ability", - "Ability3", "Ability", - ]); - AddBoost("FallDamageMultiplier", 1, [ + "Arg2", "Boolean", + "Ability1", "Ability", + "Ability2", "Ability", + "Ability3", "Ability", + ]); + AddBoost("FallDamageMultiplier", 1, [ "Multiplier", "Float" - ]); - AddBoost("ActiveCharacterLight", 1, [ + ]); + AddBoost("ActiveCharacterLight", 1, [ "Light", "String" - ]); - AddBoost("Invisibility", 0, []); - AddBoost("TwoWeaponFighting", 0, []); - AddBoost("WeaponAttackTypeOverride", 1, [ + ]); + AddBoost("Invisibility", 0, []); + AddBoost("TwoWeaponFighting", 0, []); + AddBoost("WeaponAttackTypeOverride", 1, [ "Type", "AttackType" - ]); - AddBoost("WeaponDamageDieOverride", 1, [ + ]); + AddBoost("WeaponDamageDieOverride", 1, [ "DamageDie", "String", // die, eg. 1d10 - ]); - AddBoost("CarryCapacityMultiplier", 1, [ + ]); + AddBoost("CarryCapacityMultiplier", 1, [ "Multiplier", "Float" - ]); - AddBoost("WeaponProperty", 1, [ + ]); + AddBoost("WeaponProperty", 1, [ "Flags1", "WeaponFlags" - ]); - AddBoost("WeaponAttackRollAbilityOverride", 1, [ + ]); + AddBoost("WeaponAttackRollAbilityOverride", 1, [ "Ability", "AbilityOrAttackRollAbility" - ]); - AddBoost("BlockTravel", 0, []); - AddBoost("BlockGatherAtCamp", 0, []); - AddBoost("BlockAbilityModifierDamageBonus", 0, []); - AddBoost("VoicebarkBlock", 0, []); - AddBoost("HiddenDuringCinematic", 0, []); - AddBoost("SightRangeAdditive", 1, [ + ]); + AddBoost("BlockTravel", 0, []); + AddBoost("BlockGatherAtCamp", 0, []); + AddBoost("BlockAbilityModifierDamageBonus", 0, []); + AddBoost("VoicebarkBlock", 0, []); + AddBoost("HiddenDuringCinematic", 0, []); + AddBoost("SightRangeAdditive", 1, [ "Range", "Float" - ]); - AddBoost("SightRangeMinimum", 1, [ - "Range", "Float" - ]); - AddBoost("SightRangeMaximum", 1, [ - "Range", "Float" - ]); - AddBoost("SightRangeOverride", 1, [ - "Range", "Float" - ]); - AddBoost("CannotBeDisarmed", 0, []); - AddBoost("MovementSpeedLimit", 1, [ + ]); + AddBoost("SightRangeMinimum", 1, [ + "Range", "Float" + ]); + AddBoost("SightRangeMaximum", 1, [ + "Range", "Float" + ]); + AddBoost("SightRangeOverride", 1, [ + "Range", "Float" + ]); + AddBoost("CannotBeDisarmed", 0, []); + AddBoost("MovementSpeedLimit", 1, [ "Type", "MovementSpeedType" - ]); - AddBoost("NonLethal", 0, []); - AddBoost("UnlockSpellVariant", 1, [ + ]); + AddBoost("NonLethal", 0, []); + AddBoost("UnlockSpellVariant", 1, [ "Modification1", "Lua", // TODO - add Modification parser? "Modification2", "Lua", "Modification3", "Lua", @@ -1130,191 +1130,190 @@ public void LoadDefinitions(Stream stream) "Modification13", "Lua", "Modification14", "Lua", "Modification15", "Lua" - ]); - AddBoost("DetectDisturbancesBlock", 1, [ + ]); + AddBoost("DetectDisturbancesBlock", 1, [ "Arg1", "Boolean" - ]); - AddBoost("BlockAbilityModifierFromAC", 1, [ + ]); + AddBoost("BlockAbilityModifierFromAC", 1, [ "Ability", "Ability" - ]); - AddBoost("ScaleMultiplier", 0, [ + ]); + AddBoost("ScaleMultiplier", 0, [ "Multiplier", "Float" - ]); - AddBoost("CriticalDamageOnHit", 0, []); - AddBoost("DamageReduction", 2, [ + ]); + AddBoost("CriticalDamageOnHit", 0, []); + AddBoost("DamageReduction", 2, [ "DamageType", "AllOrDamageType", - "ReductionType", "DamageReductionType", - "Amount", "Lua" - ]); - AddBoost("ReduceCriticalAttackThreshold", 1, [ + "ReductionType", "DamageReductionType", + "Amount", "Lua" + ]); + AddBoost("ReduceCriticalAttackThreshold", 1, [ "Threshold", "Int", - "StatusId", "StatusIdOrGroup" - ]); - AddBoost("PhysicalForceRangeBonus", 1, [ + "StatusId", "StatusIdOrGroup" + ]); + AddBoost("PhysicalForceRangeBonus", 1, [ "Arg1", "String" - ]); - AddBoost("ObjectSize", 1, [ + ]); + AddBoost("ObjectSize", 1, [ "Size", "Int" - ]); - AddBoost("ObjectSizeOverride", 1, [ - "Size", "String" - ]); - AddBoost("ItemReturnToOwner", 0, []); - AddBoost("AiArchetypeOverride", 1, [ + ]); + AddBoost("ObjectSizeOverride", 1, [ + "Size", "String" + ]); + AddBoost("ItemReturnToOwner", 0, []); + AddBoost("AiArchetypeOverride", 1, [ "Archetype", "String", - "Arg2", "Int" - ]); - AddBoost("ExpertiseBonus", 1, [ + "Arg2", "Int" + ]); + AddBoost("ExpertiseBonus", 1, [ "Skill", "SkillType" - ]); - AddBoost("EntityThrowDamage", 1, [ + ]); + AddBoost("EntityThrowDamage", 1, [ "Die", "String", - "DamageType", "Damage Type" - ]); - AddBoost("WeaponDamageTypeOverride", 1, [ + "DamageType", "Damage Type" + ]); + AddBoost("WeaponDamageTypeOverride", 1, [ "DamageType", "Damage Type" - ]); - AddBoost("MaximizeHealing", 1, [ + ]); + AddBoost("MaximizeHealing", 1, [ "Direction", "HealingDirection", - "Type", "ResurrectType" - ]); - AddBoost("IgnoreEnterAttackRange", 0, []); - AddBoost("DamageBonus", 1, [ + "Type", "ResurrectType" + ]); + AddBoost("IgnoreEnterAttackRange", 0, []); + AddBoost("DamageBonus", 1, [ "Amount", "Lua", - "DamageType", "Damage Type", - "Arg3", "Boolean" - ]); - AddBoost("Detach", 0, []); - AddBoost("ConsumeItemBlock", 0, []); - AddBoost("AdvanceSpells", 1, [ + "DamageType", "Damage Type", + "Arg3", "Boolean" + ]); + AddBoost("Detach", 0, []); + AddBoost("ConsumeItemBlock", 0, []); + AddBoost("AdvanceSpells", 1, [ "SpellId", "SpellId", - "Arg2", "Int" - ]); - AddBoost("SpellResistance", 1, [ + "Arg2", "Int" + ]); + AddBoost("SpellResistance", 1, [ "Resistance", "ResistanceBoostFlags" - ]); - AddBoost("WeaponAttackRollBonus", 1, [ + ]); + AddBoost("WeaponAttackRollBonus", 1, [ "Amount", "Lua" - ]); - AddBoost("SpellSaveDC", 1, [ + ]); + AddBoost("SpellSaveDC", 1, [ "DC", "Int" - ]); - AddBoost("RedirectDamage", 1, [ + ]); + AddBoost("RedirectDamage", 1, [ "Arg1", "Float", "DamageType", "Damage Type", "DamageType2", "Damage Type", - "Arg4", "Boolean" - ]); - AddBoost("CanSeeThrough", 1, [ + "Arg4", "Boolean" + ]); + AddBoost("CanSeeThrough", 1, [ "CanSeeThrough", "Boolean" - ]); - AddBoost("CanShootThrough", 1, [ - "CanShootThrough", "Boolean" - ]); - AddBoost("CanWalkThrough", 1, [ - "CanWalkThrough", "Boolean" - ]); - AddBoost("MonkWeaponAttackOverride", 0, []); - AddBoost("MonkWeaponDamageDiceOverride", 1, [ + ]); + AddBoost("CanShootThrough", 1, [ + "CanShootThrough", "Boolean" + ]); + AddBoost("CanWalkThrough", 1, [ + "CanWalkThrough", "Boolean" + ]); + AddBoost("MonkWeaponAttackOverride", 0, []); + AddBoost("MonkWeaponDamageDiceOverride", 1, [ "Arg1", "Lua" - ]); - AddBoost("IntrinsicSummonerProficiency", 0, []); - AddBoost("HorizontalFOVOverride", 1, [ + ]); + AddBoost("IntrinsicSummonerProficiency", 0, []); + AddBoost("HorizontalFOVOverride", 1, [ "FOV", "Float" - ]); - AddBoost("CharacterUnarmedDamage", 1, [ + ]); + AddBoost("CharacterUnarmedDamage", 1, [ "Damage", "Lua", - "DamageType", "Damage Type" - ]); - AddBoost("UnarmedMagicalProperty", 0, []); - AddBoost("ActionResourceReplenishTypeOverride", 2, [ - "ActionResource", "String", // Action resource name - "ReplenishType", "ResourceReplenishType" - ]); - AddBoost("AreaDamageEvade", 0, []); - AddBoost("ActionResourcePreventReduction", 1, [ + "DamageType", "Damage Type" + ]); + AddBoost("UnarmedMagicalProperty", 0, []); + AddBoost("ActionResourceReplenishTypeOverride", 2, [ + "ActionResource", "String", // Action resource name + "ReplenishType", "ResourceReplenishType" + ]); + AddBoost("AreaDamageEvade", 0, []); + AddBoost("ActionResourcePreventReduction", 1, [ "ActionResource", "String", // Action resource name - "Level", "Int" - ]); - AddBoost("AttackSpellOverride", 1, [ + "Level", "Int" + ]); + AddBoost("AttackSpellOverride", 1, [ "AttackSpell", "SpellId", "OriginalSpell", "SpellId" - ]); - AddBoost("Lock", 0, [ + ]); + AddBoost("Lock", 0, [ "DC", "Guid" - ]); - AddBoost("NoAOEDamageOnLand", 0, []); - AddBoost("IgnorePointBlankDisadvantage", 1, [ + ]); + AddBoost("NoAOEDamageOnLand", 0, []); + AddBoost("IgnorePointBlankDisadvantage", 1, [ "Flags", "WeaponFlags" - ]); - AddBoost("CriticalHitExtraDice", 1, [ + ]); + AddBoost("CriticalHitExtraDice", 1, [ "ExtraDice", "Int", - "AttackType", "AttackType" - ]); - AddBoost("DodgeAttackRoll", 2, [ + "AttackType", "AttackType" + ]); + AddBoost("DodgeAttackRoll", 2, [ "Arg1", "Int", "Arg2", "Int", "Status", "StatusIdOrGroup" - ]); - AddBoost("GameplayObscurity", 1, [ + ]); + AddBoost("GameplayObscurity", 1, [ "Obscurity", "Float" - ]); - AddBoost("MaximumRollResult", 2, [ - "RollType", "StatsRollType", - "MinResult", "Int" - ]); - AddBoost("UnlockInterrupt", 1, [ + ]); + AddBoost("MaximumRollResult", 2, [ + "RollType", "StatsRollType", + "MinResult", "Int" + ]); + AddBoost("UnlockInterrupt", 1, [ "Interrupt", "Interrupt" - ]); - AddBoost("IntrinsicSourceProficiency", 0, []); - AddBoost("JumpMaxDistanceBonus", 1, [ + ]); + AddBoost("IntrinsicSourceProficiency", 0, []); + AddBoost("JumpMaxDistanceBonus", 1, [ "Bonus", "Float" - ]); - AddBoost("ArmorAbilityModifierCapOverride", 2, [ + ]); + AddBoost("ArmorAbilityModifierCapOverride", 2, [ "ArmorType", "ArmorType", - "Cap", "Int" - ]); - AddBoost("IgnoreResistance", 2, [ + "Cap", "Int" + ]); + AddBoost("IgnoreResistance", 2, [ "DamageType", "Damage Type", - "Flags", "ResistanceBoostFlags" - ]); - AddBoost("ConcentrationIgnoreDamage", 1, [ + "Flags", "ResistanceBoostFlags" + ]); + AddBoost("ConcentrationIgnoreDamage", 1, [ "SpellSchool", "SpellSchool" - ]); - AddBoost("LeaveTriggers", 0, []); - AddBoost("IgnoreLowGroundPenalty", 1, [ + ]); + AddBoost("LeaveTriggers", 0, []); + AddBoost("IgnoreLowGroundPenalty", 1, [ "RollType", "StatsRollType" - ]); - AddBoost("IgnoreSurfaceCover", 1, [ + ]); + AddBoost("IgnoreSurfaceCover", 1, [ "SurfaceType", "String" // Surface type - ]); - AddBoost("EnableBasicItemInteractions", 0, []); - AddBoost("SoundsBlocked", 0, []); - } + ]); + AddBoost("EnableBasicItemInteractions", 0, []); + AddBoost("SoundsBlocked", 0, []); + } - public void LoadEnumerations(Stream stream) - { - StatEnumeration curEnum = null; + public void LoadEnumerations(Stream stream) + { + StatEnumeration curEnum = null; - string line; + string line; - using var reader = new StreamReader(stream); - while ((line = reader.ReadLine()) != null) + using var reader = new StreamReader(stream); + while ((line = reader.ReadLine()) != null) + { + var trimmed = line.Trim(); + if (trimmed.Length > 0) { - var trimmed = line.Trim(); - if (trimmed.Length > 0) + if (trimmed.StartsWith("valuelist ")) + { + var name = trimmed[11..^1]; + curEnum = new StatEnumeration(name); + Enumerations.Add(curEnum.Name, curEnum); + } + else if (trimmed.StartsWith("value ")) { - if (trimmed.StartsWith("valuelist ")) - { - var name = trimmed[11..^1]; - curEnum = new StatEnumeration(name); - Enumerations.Add(curEnum.Name, curEnum); - } - else if (trimmed.StartsWith("value ")) - { - var label = trimmed[7..^1]; - curEnum.AddItem(label); - } + var label = trimmed[7..^1]; + curEnum.AddItem(label); } } } diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index bc72c6c5..7409b8b6 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -7,450 +7,448 @@ using System.Linq; using System.Xml; -namespace LSLib.LS.Stats +namespace LSLib.LS.Stats; + +public class StatEntry { - public class StatEntry - { - public string Name; - public StatEntryType Type; - public StatEntry BasedOn; - public CodeLocation Location; - public Dictionary Properties = []; - public Dictionary PropertyLocations = []; - } + public string Name; + public StatEntryType Type; + public StatEntry BasedOn; + public CodeLocation Location; + public Dictionary Properties = []; + public Dictionary PropertyLocations = []; +} +/// +/// Holder for stat loader diagnostic codes. +/// +public class DiagnosticCode +{ + /// + /// Syntax error in stat file. + /// + public const string StatSyntaxError = "S00"; + /// + /// Unable to determine type of stat declaration. + /// (Either the type was not specified, or no handler exists for the specified type) + /// + public const string StatEntityTypeUnknown = "S01"; /// - /// Holder for stat loader diagnostic codes. + /// The SkillType/StatusType specified was missing. /// - public class DiagnosticCode + public const string StatSubtypeMissing = "S02"; + /// + /// The base class specified in the "Using" section does not exist. + /// + public const string StatBaseClassNotKnown = "S03"; + /// + /// A stat declaration with the same key already exists. + /// + public const string StatNameDuplicate = "S04"; + /// + /// The property is not supported by the current stat type. + /// + public const string StatPropertyUnsupported = "S05"; + /// + /// Invalid property value. + /// + public const string StatPropertyValueInvalid = "S06"; + /// + /// The stat declaration has no name property. + /// + public const string StatNameMissing = "S07"; +} + +public class StatLoadingError +{ + public string Code; + public string Message; + public string Path; + public Int32 Line; + public string StatObjectName; +} + +public class StatLoadingContext +{ + public StatDefinitionRepository Definitions; + public List Errors = []; + public Dictionary> DeclarationsByType = []; + public Dictionary> ResolvedDeclarationsByType = []; + public Dictionary> GuidResources = []; + + public void LogError(string code, string message, string path = null, int line = 0, string statObjectName = null) { - /// - /// Syntax error in stat file. - /// - public const string StatSyntaxError = "S00"; - /// - /// Unable to determine type of stat declaration. - /// (Either the type was not specified, or no handler exists for the specified type) - /// - public const string StatEntityTypeUnknown = "S01"; - /// - /// The SkillType/StatusType specified was missing. - /// - public const string StatSubtypeMissing = "S02"; - /// - /// The base class specified in the "Using" section does not exist. - /// - public const string StatBaseClassNotKnown = "S03"; - /// - /// A stat declaration with the same key already exists. - /// - public const string StatNameDuplicate = "S04"; - /// - /// The property is not supported by the current stat type. - /// - public const string StatPropertyUnsupported = "S05"; - /// - /// Invalid property value. - /// - public const string StatPropertyValueInvalid = "S06"; - /// - /// The stat declaration has no name property. - /// - public const string StatNameMissing = "S07"; + Errors.Add(new StatLoadingError + { + Code = code, + Message = message, + Path = path, + Line = line > 0 ? (line + 1) : 0, + StatObjectName = statObjectName + }); } +} - public class StatLoadingError +class StatEntryReferenceResolver(StatLoadingContext context) +{ + public bool AllowMappingErrors = false; + + private class BaseClassMapping { - public string Code; - public string Message; - public string Path; - public Int32 Line; - public string StatObjectName; + public StatDeclaration Declaration; + public StatDeclaration BaseClass; } - public class StatLoadingContext + public bool ResolveUsageRef( + StatEntryType type,StatDeclaration declaration, + Dictionary declarations, + out StatDeclaration basedOn) { - public StatDefinitionRepository Definitions; - public List Errors = []; - public Dictionary> DeclarationsByType = []; - public Dictionary> ResolvedDeclarationsByType = []; - public Dictionary> GuidResources = []; - - public void LogError(string code, string message, string path = null, int line = 0, string statObjectName = null) + var props = declaration.Properties; + var name = (string)props[type.NameProperty]; + if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out object value)) { - Errors.Add(new StatLoadingError + var baseClass = (string)value; + + if (declarations.TryGetValue(baseClass, out StatDeclaration baseDeclaration)) + { + basedOn = baseDeclaration; + return true; + } + else { - Code = code, - Message = message, - Path = path, - Line = line > 0 ? (line + 1) : 0, - StatObjectName = statObjectName - }); + context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stats entry '{name}' references nonexistent base '{baseClass}'", + declaration.Location.FileName, declaration.Location.StartLine, name); + basedOn = null; + return false; + } } + + basedOn = null; + return true; } - class StatEntryReferenceResolver(StatLoadingContext context) + private void PropagateInheritedProperties(StatDeclaration parent, StatDeclaration descendant) { - public bool AllowMappingErrors = false; - - private class BaseClassMapping + foreach (var prop in parent.Properties) { - public StatDeclaration Declaration; - public StatDeclaration BaseClass; + if (!descendant.Properties.ContainsKey(prop.Key)) + { + descendant.Properties[prop.Key] = prop.Value; + descendant.PropertyLocations[prop.Key] = parent.PropertyLocations[prop.Key]; + } } + } - public bool ResolveUsageRef( - StatEntryType type,StatDeclaration declaration, - Dictionary declarations, - out StatDeclaration basedOn) + private void PropagateInheritedProperties(List mappings) + { + foreach (var mapping in mappings) { - var props = declaration.Properties; - var name = (string)props[type.NameProperty]; - if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out object value)) + if (mapping.BaseClass != null) { - var baseClass = (string)value; - - if (declarations.TryGetValue(baseClass, out StatDeclaration baseDeclaration)) - { - basedOn = baseDeclaration; - return true; - } - else - { - context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stats entry '{name}' references nonexistent base '{baseClass}'", - declaration.Location.FileName, declaration.Location.StartLine, name); - basedOn = null; - return false; - } + PropagateInheritedProperties(mapping.BaseClass, mapping.Declaration); } - - basedOn = null; - return true; } + } + + public Dictionary ResolveUsageRefs(StatEntryType type, Dictionary declarations) + { + var mappings = new List(); + var resolved = new Dictionary(); - private void PropagateInheritedProperties(StatDeclaration parent, StatDeclaration descendant) + foreach (var declaration in declarations) { - foreach (var prop in parent.Properties) + if (declaration.Value.WasInstantiated) continue; + + var succeeded = ResolveUsageRef(type, declaration.Value, declarations, out StatDeclaration baseClass); + if (succeeded && baseClass != null) { - if (!descendant.Properties.ContainsKey(prop.Key)) + mappings.Add(new BaseClassMapping { - descendant.Properties[prop.Key] = prop.Value; - descendant.PropertyLocations[prop.Key] = parent.PropertyLocations[prop.Key]; - } + Declaration = declaration.Value, + BaseClass = baseClass + }); } - } - private void PropagateInheritedProperties(List mappings) - { - foreach (var mapping in mappings) + if (succeeded || AllowMappingErrors) { - if (mapping.BaseClass != null) - { - PropagateInheritedProperties(mapping.BaseClass, mapping.Declaration); - } + resolved.Add(declaration.Key, declaration.Value); } } - public Dictionary ResolveUsageRefs(StatEntryType type, Dictionary declarations) + PropagateInheritedProperties(mappings); + + return resolved; + } +} + +class StatLoaderReferenceValidator(StatLoadingContext ctx) : IStatReferenceValidator +{ + public bool IsValidReference(string reference, string statType) + { + if (ctx.DeclarationsByType.TryGetValue(statType, out var stats)) { - var mappings = new List(); - var resolved = new Dictionary(); + return stats.TryGetValue(reference, out _); + } - foreach (var declaration in declarations) - { - if (declaration.Value.WasInstantiated) continue; + return false; + } - var succeeded = ResolveUsageRef(type, declaration.Value, declarations, out StatDeclaration baseClass); - if (succeeded && baseClass != null) - { - mappings.Add(new BaseClassMapping - { - Declaration = declaration.Value, - BaseClass = baseClass - }); - } + public bool IsValidGuidResource(string name, string resourceType) + { + if (ctx.GuidResources.TryGetValue(resourceType, out var resources)) + { + return resources.TryGetValue(name, out _); + } - if (succeeded || AllowMappingErrors) - { - resolved.Add(declaration.Key, declaration.Value); - } - } + return false; + } +} + +public class StatLoader +{ + private readonly StatLoadingContext Context; + private readonly StatValueParserFactory ParserFactory; + private readonly StatLoaderReferenceValidator ReferenceValidator; - PropagateInheritedProperties(mappings); + public StatLoader(StatLoadingContext ctx) + { + Context = ctx; + ReferenceValidator = new StatLoaderReferenceValidator(ctx); + ParserFactory = new StatValueParserFactory(ReferenceValidator); + } - return resolved; + private List ParseStatStream(string path, Stream stream) + { + var scanner = new StatScanner(path); + scanner.SetSource(stream); + var parser = new StatParser.StatParser(scanner); + bool parsed = parser.Parse(); + if (!parsed) + { + var location = scanner.LastLocation(); + Context.LogError(DiagnosticCode.StatSyntaxError, $"Syntax error at or near line {location.StartLine}, column {location.StartColumn}", path, location.StartLine); } + + return parsed ? parser.GetDeclarations() : null; } - class StatLoaderReferenceValidator(StatLoadingContext ctx) : IStatReferenceValidator + private void AddDeclarations(List declarations) { - public bool IsValidReference(string reference, string statType) + foreach (var declaration in declarations) { - if (ctx.DeclarationsByType.TryGetValue(statType, out var stats)) + // Fixup type + if (!declaration.Properties.ContainsKey("EntityType")) { - return stats.TryGetValue(reference, out _); + Context.LogError(DiagnosticCode.StatEntityTypeUnknown, "Unable to determine type of stat declaration", declaration.Location.FileName, declaration.Location.StartLine); + continue; } + + var statType = declaration.Properties["EntityType"].ToString(); - return false; - } + if (!Context.Definitions.Types.TryGetValue(statType, out StatEntryType type)) + { + Context.LogError(DiagnosticCode.StatEntityTypeUnknown, $"No definition exists for stat type '{statType}'", declaration.Location.FileName, declaration.Location.StartLine); + continue; + } - public bool IsValidGuidResource(string name, string resourceType) - { - if (ctx.GuidResources.TryGetValue(resourceType, out var resources)) + if (!declaration.Properties.ContainsKey(type.NameProperty)) + { + Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{type.NameProperty}' property", declaration.Location.FileName, declaration.Location.StartLine); + continue; + } + + if (!Context.DeclarationsByType.TryGetValue(statType, out Dictionary declarationsByType)) { - return resources.TryGetValue(name, out _); + declarationsByType = []; + Context.DeclarationsByType[statType] = declarationsByType; } - return false; + // TODO - duplicate declaration check? + var name = declaration.Properties[type.NameProperty].ToString(); + declarationsByType[name] = declaration; } } - public class StatLoader + public void LoadStatsFromStream(string path, Stream stream) { - private readonly StatLoadingContext Context; - private readonly StatValueParserFactory ParserFactory; - private readonly StatLoaderReferenceValidator ReferenceValidator; - - public StatLoader(StatLoadingContext ctx) + var stats = ParseStatStream(path, stream); + if (stats != null) { - Context = ctx; - ReferenceValidator = new StatLoaderReferenceValidator(ctx); - ParserFactory = new StatValueParserFactory(ReferenceValidator); + AddDeclarations(stats); } + } - private List ParseStatStream(string path, Stream stream) + public void ResolveUsageRef() + { + var resolver = new StatEntryReferenceResolver(Context); + foreach (var type in Context.DeclarationsByType) { - var scanner = new StatScanner(path); - scanner.SetSource(stream); - var parser = new StatParser.StatParser(scanner); - bool parsed = parser.Parse(); - if (!parsed) - { - var location = scanner.LastLocation(); - Context.LogError(DiagnosticCode.StatSyntaxError, $"Syntax error at or near line {location.StartLine}, column {location.StartColumn}", path, location.StartLine); - } - - return parsed ? parser.GetDeclarations() : null; + var typeDefn = Context.Definitions.Types[type.Key]; + Context.ResolvedDeclarationsByType[type.Key] = resolver.ResolveUsageRefs(typeDefn, type.Value); } + } - private void AddDeclarations(List declarations) + private object ParseProperty(StatEntryType type, string propertyName, object value, CodeLocation location, + string declarationName) + { + if (!type.Fields.TryGetValue(propertyName, out StatField field)) { - foreach (var declaration in declarations) - { - // Fixup type - if (!declaration.Properties.ContainsKey("EntityType")) - { - Context.LogError(DiagnosticCode.StatEntityTypeUnknown, "Unable to determine type of stat declaration", declaration.Location.FileName, declaration.Location.StartLine); - continue; - } - - var statType = declaration.Properties["EntityType"].ToString(); - - if (!Context.Definitions.Types.TryGetValue(statType, out StatEntryType type)) - { - Context.LogError(DiagnosticCode.StatEntityTypeUnknown, $"No definition exists for stat type '{statType}'", declaration.Location.FileName, declaration.Location.StartLine); - continue; - } - - if (!declaration.Properties.ContainsKey(type.NameProperty)) - { - Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{type.NameProperty}' property", declaration.Location.FileName, declaration.Location.StartLine); - continue; - } + Context.LogError(DiagnosticCode.StatPropertyUnsupported, $"Property '{propertyName}' is not supported on {type.Name} '{declarationName}'", + location?.FileName, location?.StartLine ?? 0, declarationName); + return null; + } - if (!Context.DeclarationsByType.TryGetValue(statType, out Dictionary declarationsByType)) - { - declarationsByType = []; - Context.DeclarationsByType[statType] = declarationsByType; - } + bool succeeded = false; + string errorText = null; + object parsed; - // TODO - duplicate declaration check? - var name = declaration.Properties[type.NameProperty].ToString(); - declarationsByType[name] = declaration; - } + if (value is String && propertyName.Length + ((string)value).Length > 4085) + { + parsed = null; + Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: Line cannot be longer than 4095 characters", + location?.FileName, location?.StartLine ?? 0, declarationName); } - - public void LoadStatsFromStream(string path, Stream stream) + else if (field.Type != "Passthrough") { - var stats = ParseStatStream(path, stream); - if (stats != null) - { - AddDeclarations(stats); - } + var parser = field.GetParser(ParserFactory, Context.Definitions); + parsed = parser.Parse((string)value, ref succeeded, ref errorText); } - - public void ResolveUsageRef() + else { - var resolver = new StatEntryReferenceResolver(Context); - foreach (var type in Context.DeclarationsByType) - { - var typeDefn = Context.Definitions.Types[type.Key]; - Context.ResolvedDeclarationsByType[type.Key] = resolver.ResolveUsageRefs(typeDefn, type.Value); - } + parsed = value; + succeeded = true; } - private object ParseProperty(StatEntryType type, string propertyName, object value, CodeLocation location, - string declarationName) + if (errorText != null) { - if (!type.Fields.TryGetValue(propertyName, out StatField field)) - { - Context.LogError(DiagnosticCode.StatPropertyUnsupported, $"Property '{propertyName}' is not supported on {type.Name} '{declarationName}'", - location?.FileName, location?.StartLine ?? 0, declarationName); - return null; - } - - bool succeeded = false; - string errorText = null; - object parsed; - - if (value is String && propertyName.Length + ((string)value).Length > 4085) + if (value is string v && v.Length > 500) { - parsed = null; - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: Line cannot be longer than 4095 characters", + Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: {errorText}", location?.FileName, location?.StartLine ?? 0, declarationName); } - else if (field.Type != "Passthrough") - { - var parser = field.GetParser(ParserFactory, Context.Definitions); - parsed = parser.Parse((string)value, ref succeeded, ref errorText); - } - else - { - parsed = value; - succeeded = true; - } - - if (errorText != null) - { - if (value is string v && v.Length > 500) - { - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: {errorText}", - location?.FileName, location?.StartLine ?? 0, declarationName); - } - else - { - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: '{value}' ({errorText})", - location?.FileName, location?.StartLine ?? 0, declarationName); - } - } - - if (succeeded) - { - return parsed; - } else { - return null; + Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: '{value}' ({errorText})", + location?.FileName, location?.StartLine ?? 0, declarationName); } } - private StatEntry InstantiateEntry(StatEntryType type, string declarationName, StatDeclaration declaration) + if (succeeded) + { + return parsed; + } + else { - return InstantiateEntryInternal(type, declarationName, declaration.Location, - declaration.Properties, declaration.PropertyLocations); + return null; } + } + + private StatEntry InstantiateEntry(StatEntryType type, string declarationName, StatDeclaration declaration) + { + return InstantiateEntryInternal(type, declarationName, declaration.Location, + declaration.Properties, declaration.PropertyLocations); + } - private StatEntry InstantiateEntryInternal(StatEntryType type, string declarationName, - CodeLocation location, Dictionary properties, Dictionary propertyLocations) + private StatEntry InstantiateEntryInternal(StatEntryType type, string declarationName, + CodeLocation location, Dictionary properties, Dictionary propertyLocations) + { + var entity = new StatEntry { - var entity = new StatEntry - { - Name = declarationName, - Type = type, - BasedOn = null, // FIXME - Location = location, - Properties = [], - PropertyLocations = propertyLocations - }; - - foreach (var property in properties) + Name = declarationName, + Type = type, + BasedOn = null, // FIXME + Location = location, + Properties = [], + PropertyLocations = propertyLocations + }; + + foreach (var property in properties) + { + if (property.Key == "EntityType") { - if (property.Key == "EntityType") - { - continue; - } - - propertyLocations.TryGetValue(property.Key, out CodeLocation propLocation); - var parsed = ParseProperty(type, property.Key, property.Value, propLocation, declarationName); - if (parsed != null) - { - entity.Properties.Add(property.Key, parsed); - } + continue; } - return entity; + propertyLocations.TryGetValue(property.Key, out CodeLocation propLocation); + var parsed = ParseProperty(type, property.Key, property.Value, propLocation, declarationName); + if (parsed != null) + { + entity.Properties.Add(property.Key, parsed); + } } - public void InstantiateEntries() + return entity; + } + + public void InstantiateEntries() + { + foreach (var type in Context.ResolvedDeclarationsByType) { - foreach (var type in Context.ResolvedDeclarationsByType) + var typeDefn = Context.Definitions.Types[type.Key]; + foreach (var declaration in type.Value) { - var typeDefn = Context.Definitions.Types[type.Key]; - foreach (var declaration in type.Value) + if (!declaration.Value.WasInstantiated) { - if (!declaration.Value.WasInstantiated) - { - InstantiateEntry(typeDefn, declaration.Key, declaration.Value); - declaration.Value.WasInstantiated = true; - } + InstantiateEntry(typeDefn, declaration.Key, declaration.Value); + declaration.Value.WasInstantiated = true; } } } + } - private void LoadGuidResources(Dictionary guidResources, XmlNodeList nodes) + private void LoadGuidResources(Dictionary guidResources, XmlNodeList nodes) + { + foreach (var node in nodes) { - foreach (var node in nodes) + var attributes = (node as XmlElement).GetElementsByTagName("attribute"); + foreach (var attribute in attributes) { - var attributes = (node as XmlElement).GetElementsByTagName("attribute"); - foreach (var attribute in attributes) + var attr = attribute as XmlElement; + if (attr.GetAttribute("id") == "Name") { - var attr = attribute as XmlElement; - if (attr.GetAttribute("id") == "Name") - { - var name = attr.GetAttribute("value"); - guidResources[name] = name; - break; - } + var name = attr.GetAttribute("value"); + guidResources[name] = name; + break; } } } + } - public void LoadGuidResources(XmlDocument doc, string typeName, string regionName) + public void LoadGuidResources(XmlDocument doc, string typeName, string regionName) + { + if (!Context.GuidResources.TryGetValue(typeName, out Dictionary guidResources)) { - if (!Context.GuidResources.TryGetValue(typeName, out Dictionary guidResources)) - { - guidResources = []; - Context.GuidResources[typeName] = guidResources; - } + guidResources = []; + Context.GuidResources[typeName] = guidResources; + } - var regions = doc.DocumentElement.GetElementsByTagName("region"); - foreach (var region in regions) + var regions = doc.DocumentElement.GetElementsByTagName("region"); + foreach (var region in regions) + { + if ((region as XmlElement).GetAttribute("id") == regionName) { - if ((region as XmlElement).GetAttribute("id") == regionName) + var root = (region as XmlElement).GetElementsByTagName("node"); + if (root.Count > 0) { - var root = (region as XmlElement).GetElementsByTagName("node"); - if (root.Count > 0) + var children = (root[0] as XmlElement).GetElementsByTagName("children"); + if (children.Count > 0) { - var children = (root[0] as XmlElement).GetElementsByTagName("children"); - if (children.Count > 0) - { - var resources = (children[0] as XmlElement).GetElementsByTagName("node"); - LoadGuidResources(guidResources, resources); - } + var resources = (children[0] as XmlElement).GetElementsByTagName("node"); + LoadGuidResources(guidResources, resources); } } } } + } - public void LoadActionResources(XmlDocument doc) - { - LoadGuidResources(doc, "ActionResource", "ActionResourceDefinitions"); - } - - public void LoadActionResourceGroups(XmlDocument doc) - { - LoadGuidResources(doc, "ActionResourceGroup", "ActionResourceGroupDefinitions"); - } + public void LoadActionResources(XmlDocument doc) + { + LoadGuidResources(doc, "ActionResource", "ActionResourceDefinitions"); } + public void LoadActionResourceGroups(XmlDocument doc) + { + LoadGuidResources(doc, "ActionResourceGroup", "ActionResourceGroupDefinitions"); + } } diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs index fce4b975..43e5a679 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -6,634 +6,633 @@ using System.Linq; using System.Text; -namespace LSLib.LS.Stats +namespace LSLib.LS.Stats; + +public interface IStatValueParser { - public interface IStatValueParser - { - object Parse(string value, ref bool succeeded, ref string errorText); - } + object Parse(string value, ref bool succeeded, ref string errorText); +} - public class StatReferenceConstraint - { - public string StatType; - } +public class StatReferenceConstraint +{ + public string StatType; +} - public interface IStatReferenceValidator +public interface IStatReferenceValidator +{ + bool IsValidReference(string reference, string statType); + bool IsValidGuidResource(string name, string resourceType); +} + +public class BooleanParser : IStatValueParser +{ + public object Parse(string value, ref bool succeeded, ref string errorText) { - bool IsValidReference(string reference, string statType); - bool IsValidGuidResource(string name, string resourceType); + if (value == "true" || value == "false" || value == "") + { + succeeded = true; + return (value == "true"); + } + else + { + succeeded = false; + errorText = "expected boolean value 'true' or 'false'"; + return null; + } } +} - public class BooleanParser : IStatValueParser +public class Int32Parser : IStatValueParser +{ + public object Parse(string value, ref bool succeeded, ref string errorText) { - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value == "") { - if (value == "true" || value == "false" || value == "") - { - succeeded = true; - return (value == "true"); - } - else - { - succeeded = false; - errorText = "expected boolean value 'true' or 'false'"; - return null; - } + succeeded = true; + return 0; + } + else if (Int32.TryParse(value, out int intval)) + { + succeeded = true; + return intval; + } + else + { + succeeded = false; + errorText = "expected an integer value"; + return null; } } +} - public class Int32Parser : IStatValueParser +public class FloatParser : IStatValueParser +{ + public object Parse(string value, ref bool succeeded, ref string errorText) { - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value == "") { - if (value == "") - { - succeeded = true; - return 0; - } - else if (Int32.TryParse(value, out int intval)) - { - succeeded = true; - return intval; - } - else - { - succeeded = false; - errorText = "expected an integer value"; - return null; - } + succeeded = true; + return 0.0f; + } + else if (Single.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) + { + succeeded = true; + return floatval; + } + else + { + succeeded = false; + errorText = "expected a float value"; + return null; } } +} - public class FloatParser : IStatValueParser +public class EnumParser(StatEnumeration enumeration) : IStatValueParser +{ + private readonly StatEnumeration Enumeration = enumeration ?? throw new ArgumentNullException(); + + public object Parse(string value, ref bool succeeded, ref string errorText) { - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value == null || value == "") { - if (value == "") - { - succeeded = true; - return 0.0f; - } - else if (Single.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) + value = Enumeration.Values[0]; + } + + if (Enumeration.ValueToIndexMap.ContainsKey(value)) + { + succeeded = true; + return value; + } + else + { + succeeded = false; + if (Enumeration.Values.Count > 4) { - succeeded = true; - return floatval; + errorText = "expected one of: " + String.Join(", ", Enumeration.Values.Take(4)) + ", ..."; } else { - succeeded = false; - errorText = "expected a float value"; - return null; + errorText = "expected one of: " + String.Join(", ", Enumeration.Values); } + return null; } } +} + +public class MultiValueEnumParser(StatEnumeration enumeration) : IStatValueParser +{ + private readonly EnumParser Parser = new(enumeration); - public class EnumParser(StatEnumeration enumeration) : IStatValueParser + public object Parse(string value, ref bool succeeded, ref string errorText) { - private readonly StatEnumeration Enumeration = enumeration ?? throw new ArgumentNullException(); + succeeded = true; - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value.Length == 0) { - if (value == null || value == "") - { - value = Enumeration.Values[0]; - } + return true; + } - if (Enumeration.ValueToIndexMap.ContainsKey(value)) - { - succeeded = true; - return value; - } - else + foreach (var item in value.Split([';'])) + { + Parser.Parse(item.Trim([' ']), ref succeeded, ref errorText); + if (!succeeded) { - succeeded = false; - if (Enumeration.Values.Count > 4) - { - errorText = "expected one of: " + String.Join(", ", Enumeration.Values.Take(4)) + ", ..."; - } - else - { - errorText = "expected one of: " + String.Join(", ", Enumeration.Values); - } + errorText = $"Value '{item}' not supported; {errorText}"; return null; } } + + return value; } +} - public class MultiValueEnumParser(StatEnumeration enumeration) : IStatValueParser +public class StringParser : IStatValueParser +{ + public object Parse(string value, ref bool succeeded, ref string errorText) { - private readonly EnumParser Parser = new(enumeration); - - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value.Length > 2048) + { + errorText = "Value cannot be longer than 2048 characters"; + succeeded = false; + return null; + } + else { + errorText = null; succeeded = true; - - if (value.Length == 0) - { - return true; - } - - foreach (var item in value.Split([';'])) - { - Parser.Parse(item.Trim([' ']), ref succeeded, ref errorText); - if (!succeeded) - { - errorText = $"Value '{item}' not supported; {errorText}"; - return null; - } - } - return value; } } +} - public class StringParser : IStatValueParser +public class UUIDParser : IStatValueParser +{ + public object Parse(string value, ref bool succeeded, ref string errorText) { - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value == "") { - if (value.Length > 2048) - { - errorText = "Value cannot be longer than 2048 characters"; - succeeded = false; - return null; - } - else - { - errorText = null; - succeeded = true; - return value; - } + succeeded = true; + return Guid.Empty; + } + else if (Guid.TryParseExact(value, "D", out Guid parsed)) + { + succeeded = true; + return parsed; + } + else + { + errorText = $"'{value}' is not a valid UUID"; + succeeded = false; + return null; } } +} - public class UUIDParser : IStatValueParser +public class StatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser +{ + public object Parse(string value, ref bool succeeded, ref string errorText) { - public object Parse(string value, ref bool succeeded, ref string errorText) + if (value == "") { - if (value == "") - { - succeeded = true; - return Guid.Empty; - } - else if (Guid.TryParseExact(value, "D", out Guid parsed)) - { - succeeded = true; - return parsed; - } - else - { - errorText = $"'{value}' is not a valid UUID"; - succeeded = false; - return null; - } + succeeded = true; + return value; } - } - public class StatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser - { - public object Parse(string value, ref bool succeeded, ref string errorText) + foreach (var constraint in constraints) { - if (value == "") + if (validator.IsValidReference(value, constraint.StatType)) { succeeded = true; return value; } + } - foreach (var constraint in constraints) + var refTypes = String.Join("/", constraints.Select(c => c.StatType)); + errorText = $"'{value}' is not a valid {refTypes} reference"; + succeeded = false; + return null; + } +} + +public class MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser +{ + private readonly StatReferenceParser Parser = new(validator, constraints); + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + succeeded = true; + + foreach (var item in value.Split([';'])) + { + var trimmed = item.Trim([' ']); + if (trimmed.Length > 0) { - if (validator.IsValidReference(value, constraint.StatType)) + Parser.Parse(trimmed, ref succeeded, ref errorText); + if (!succeeded) { - succeeded = true; - return value; + return null; } } + } + + return value; + } +} + +public enum ExpressionType +{ + Boost, + Functor, + DescriptionParams +}; - var refTypes = String.Join("/", constraints.Select(c => c.StatType)); - errorText = $"'{value}' is not a valid {refTypes} reference"; +public class ExpressionParser(String validatorType, StatDefinitionRepository definitions, + StatValueParserFactory parserFactory, ExpressionType type) : IStatValueParser +{ + public virtual object Parse(string value, ref bool succeeded, ref string errorText) + { + var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + validatorType + "__ " + value.TrimEnd()); + using var buf = new MemoryStream(valueBytes); + List errorTexts = []; + + var scanner = new StatPropertyScanner(); + scanner.SetSource(buf); + var parser = new StatPropertyParser(scanner, definitions, parserFactory, valueBytes, type); + parser.OnError += (string message) => errorTexts.Add(message); + succeeded = parser.Parse(); + if (!succeeded) + { + var location = scanner.LastLocation(); + var column = location.StartColumn - 10 - validatorType.Length + 1; + errorText = $"Syntax error at or near character {column}"; + return null; + } + else if (errorTexts.Count > 0) + { succeeded = false; + errorText = String.Join("; ", errorTexts); return null; } + else + { + succeeded = true; + return parser.GetParsedObject(); + } } +} - public class MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser +public class LuaExpressionParser : IStatValueParser +{ + public virtual object Parse(string value, ref bool succeeded, ref string errorText) { - private readonly StatReferenceParser Parser = new(validator, constraints); - - public object Parse(string value, ref bool succeeded, ref string errorText) + value = "BHAALS_BOON_SLAYER.Duration-1"; + var valueBytes = Encoding.UTF8.GetBytes(value); + using var buf = new MemoryStream(valueBytes); + var scanner = new Lua.StatLuaScanner(); + scanner.SetSource(buf); + var parser = new Lua.StatLuaParser(scanner); + succeeded = parser.Parse(); + if (!succeeded) + { + var location = scanner.LastLocation(); + errorText = $"Syntax error at or near character {location.StartColumn}"; + return null; + } + else { succeeded = true; - - foreach (var item in value.Split([';'])) - { - var trimmed = item.Trim([' ']); - if (trimmed.Length > 0) - { - Parser.Parse(trimmed, ref succeeded, ref errorText); - if (!succeeded) - { - return null; - } - } - } - - return value; + return null; } } +} - public enum ExpressionType +public class UseCostsParser(IStatReferenceValidator validator) : IStatValueParser +{ + public virtual object Parse(string value, ref bool succeeded, ref string errorText) { - Boost, - Functor, - DescriptionParams - }; + if (value.Length == 0) return value; - public class ExpressionParser(String validatorType, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory, ExpressionType type) : IStatValueParser - { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) + foreach (var resource in value.Split(';')) { - var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + validatorType + "__ " + value.TrimEnd()); - using var buf = new MemoryStream(valueBytes); - List errorTexts = []; + var res = resource.Trim(); + if (res.Length == 0) continue; - var scanner = new StatPropertyScanner(); - scanner.SetSource(buf); - var parser = new StatPropertyParser(scanner, definitions, parserFactory, valueBytes, type); - parser.OnError += (string message) => errorTexts.Add(message); - succeeded = parser.Parse(); - if (!succeeded) - { - var location = scanner.LastLocation(); - var column = location.StartColumn - 10 - validatorType.Length + 1; - errorText = $"Syntax error at or near character {column}"; - return null; - } - else if (errorTexts.Count > 0) + var parts = res.Split(':'); + if (parts.Length < 2 || parts.Length > 4) { - succeeded = false; - errorText = String.Join("; ", errorTexts); + errorText = $"Malformed use costs"; return null; } - else - { - succeeded = true; - return parser.GetParsedObject(); - } - } - } - public class LuaExpressionParser : IStatValueParser - { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) - { - value = "BHAALS_BOON_SLAYER.Duration-1"; - var valueBytes = Encoding.UTF8.GetBytes(value); - using var buf = new MemoryStream(valueBytes); - var scanner = new Lua.StatLuaScanner(); - scanner.SetSource(buf); - var parser = new Lua.StatLuaParser(scanner); - succeeded = parser.Parse(); - if (!succeeded) + if (!validator.IsValidGuidResource(parts[0], "ActionResource") && !validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) { - var location = scanner.LastLocation(); - errorText = $"Syntax error at or near character {location.StartColumn}"; + errorText = $"Nonexistent action resource or action resource group: {parts[0]}"; return null; } - else - { - succeeded = true; - return null; - } - } - } - public class UseCostsParser(IStatReferenceValidator validator) : IStatValueParser - { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) - { - if (value.Length == 0) return value; - - foreach (var resource in value.Split(';')) + var distanceExpr = parts[1].Split('*'); + if (distanceExpr[0] == "Distance") { - var res = resource.Trim(); - if (res.Length == 0) continue; - - var parts = res.Split(':'); - if (parts.Length < 2 || parts.Length > 4) + if (distanceExpr.Length > 1 && !Single.TryParse(distanceExpr[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) { - errorText = $"Malformed use costs"; + errorText = $"Malformed distance multiplier: {distanceExpr[1]}"; return null; } - if (!validator.IsValidGuidResource(parts[0], "ActionResource") && !validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) - { - errorText = $"Nonexistent action resource or action resource group: {parts[0]}"; - return null; - } - - var distanceExpr = parts[1].Split('*'); - if (distanceExpr[0] == "Distance") - { - if (distanceExpr.Length > 1 && !Single.TryParse(distanceExpr[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) - { - errorText = $"Malformed distance multiplier: {distanceExpr[1]}"; - return null; - } - - } - else if (!Single.TryParse(parts[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) - { - errorText = $"Malformed resource amount: {parts[1]}"; - return null; - } - - if (parts.Length == 3 && !Int32.TryParse(parts[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out int intval)) - { - errorText = $"Malformed level: {parts[2]}"; - return null; - } - - if (parts.Length == 4 && !Int32.TryParse(parts[3], NumberStyles.Integer, CultureInfo.InvariantCulture, out intval)) - { - errorText = $"Malformed level: {parts[3]}"; - return null; - } } - - succeeded = true; - return value; - } - } - - public class DiceRollParser : IStatValueParser - { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) - { - if (value.Length == 0) return value; - - var parts = value.Split('d'); - if (parts.Length != 2 - || !Int32.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out int numDice) - || !Int32.TryParse(parts[1], NumberStyles.Integer, CultureInfo.InvariantCulture, out int dieSize)) + else if (!Single.TryParse(parts[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) { - errorText = $"Malformed dice roll"; + errorText = $"Malformed resource amount: {parts[1]}"; return null; } - if (dieSize != 4 && dieSize != 6 && dieSize != 8 && dieSize != 10 && dieSize != 12 && dieSize != 20 && dieSize != 100) + if (parts.Length == 3 && !Int32.TryParse(parts[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out int intval)) { - errorText = $"Invalid die size: {dieSize}"; + errorText = $"Malformed level: {parts[2]}"; return null; } - succeeded = true; - return value; + if (parts.Length == 4 && !Int32.TryParse(parts[3], NumberStyles.Integer, CultureInfo.InvariantCulture, out intval)) + { + errorText = $"Malformed level: {parts[3]}"; + return null; + } } + + succeeded = true; + return value; } +} - public class AnyParser(IEnumerable parsers, string message = null) : IStatValueParser +public class DiceRollParser : IStatValueParser +{ + public virtual object Parse(string value, ref bool succeeded, ref string errorText) { - private readonly List Parsers = parsers.ToList(); + if (value.Length == 0) return value; - public object Parse(string value, ref bool succeeded, ref string errorText) + var parts = value.Split('d'); + if (parts.Length != 2 + || !Int32.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out int numDice) + || !Int32.TryParse(parts[1], NumberStyles.Integer, CultureInfo.InvariantCulture, out int dieSize)) { - List errors = []; - foreach (var parser in Parsers) - { - succeeded = false; - string error = null; - var result = parser.Parse(value, ref succeeded, ref error); - if (succeeded) - { - return result; - } - else - { - errors.Add(error); - } - } + errorText = $"Malformed dice roll"; + return null; + } - if (message != null && message.Length > 0) + if (dieSize != 4 && dieSize != 6 && dieSize != 8 && dieSize != 10 && dieSize != 12 && dieSize != 20 && dieSize != 100) + { + errorText = $"Invalid die size: {dieSize}"; + return null; + } + + succeeded = true; + return value; + } +} + +public class AnyParser(IEnumerable parsers, string message = null) : IStatValueParser +{ + private readonly List Parsers = parsers.ToList(); + + public object Parse(string value, ref bool succeeded, ref string errorText) + { + List errors = []; + foreach (var parser in Parsers) + { + succeeded = false; + string error = null; + var result = parser.Parse(value, ref succeeded, ref error); + if (succeeded) { - errorText = $"'{value}': {message}"; + return result; } else { - errorText = String.Join("; ", errors); + errors.Add(error); } + } - return null; + if (message != null && message.Length > 0) + { + errorText = $"'{value}': {message}"; + } + else + { + errorText = String.Join("; ", errors); } + + return null; } +} - public class AnyType +public class AnyType +{ + public List Types; + public string Message; +} + +public class StatValueParserFactory(IStatReferenceValidator referenceValidator) +{ + public IStatValueParser CreateReferenceParser(List constraints) { - public List Types; - public string Message; + return new StatReferenceParser(referenceValidator, constraints); } - public class StatValueParserFactory(IStatReferenceValidator referenceValidator) + public IStatValueParser CreateParser(StatField field, StatDefinitionRepository definitions) { - public IStatValueParser CreateReferenceParser(List constraints) + switch (field.Name) { - return new StatReferenceParser(referenceValidator, constraints); + case "Boosts": + case "DefaultBoosts": + case "BoostsOnEquipMainHand": + case "BoostsOnEquipOffHand": + return new ExpressionParser("Properties", definitions, this, ExpressionType.Boost); + + case "TooltipDamage": + case "TooltipDamageList": + case "TooltipStatusApply": + case "TooltipConditionalDamage": + return new ExpressionParser("Properties", definitions, this, ExpressionType.DescriptionParams); + + case "DescriptionParams": + case "ExtraDescriptionParams": + case "ShortDescriptionParams": + case "TooltipUpcastDescriptionParams": + return new ExpressionParser("DescriptionParams", definitions, this, ExpressionType.DescriptionParams); + + case "ConcentrationSpellID": + case "CombatAIOverrideSpell": + case "SpellContainerID": + case "FollowUpOriginalSpell": + case "RootSpellID": + return new StatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "SpellData" } + ]); + + case "ContainerSpells": + return new MultiValueStatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "SpellData" } + ]); + + case "InterruptPrototype": + return new StatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "InterruptData" } + ]); + + case "Passives": + case "PassivesOnEquip": + case "PassivesMainHand": + case "PassivesOffHand": + return new MultiValueStatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "PassiveData" } + ]); + + case "StatusOnEquip": + case "StatusInInventory": + return new MultiValueStatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "StatusData" } + ]); + + case "Cost": + case "UseCosts": + case "DualWieldingUseCosts": + case "ActionResources": + case "TooltipUseCosts": + case "RitualCosts": + case "HitCosts": + return new UseCostsParser(referenceValidator); + + case "Damage": + case "VersatileDamage": + case "StableRoll": + return new DiceRollParser(); + + case "Template": + case "StatusEffectOverride": + case "StatusEffectOnTurn": + case "ManagedStatusEffectGroup": + case "ApplyEffect": + case "SpellEffect": + case "StatusEffect": + case "DisappearEffect": + case "PreviewEffect": + case "PositionEffect": + case "HitEffect": + case "TargetEffect": + case "BeamEffect": + case "CastEffect": + case "PrepareEffect": + case "TooltipOnSave": + return new UUIDParser(); + + case "AmountOfTargets": + return new LuaExpressionParser(); } - public IStatValueParser CreateParser(StatField field, StatDefinitionRepository definitions) - { - switch (field.Name) - { - case "Boosts": - case "DefaultBoosts": - case "BoostsOnEquipMainHand": - case "BoostsOnEquipOffHand": - return new ExpressionParser("Properties", definitions, this, ExpressionType.Boost); - - case "TooltipDamage": - case "TooltipDamageList": - case "TooltipStatusApply": - case "TooltipConditionalDamage": - return new ExpressionParser("Properties", definitions, this, ExpressionType.DescriptionParams); - - case "DescriptionParams": - case "ExtraDescriptionParams": - case "ShortDescriptionParams": - case "TooltipUpcastDescriptionParams": - return new ExpressionParser("DescriptionParams", definitions, this, ExpressionType.DescriptionParams); - - case "ConcentrationSpellID": - case "CombatAIOverrideSpell": - case "SpellContainerID": - case "FollowUpOriginalSpell": - case "RootSpellID": - return new StatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "SpellData" } - ]); - - case "ContainerSpells": - return new MultiValueStatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "SpellData" } - ]); - - case "InterruptPrototype": - return new StatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "InterruptData" } - ]); - - case "Passives": - case "PassivesOnEquip": - case "PassivesMainHand": - case "PassivesOffHand": - return new MultiValueStatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "PassiveData" } - ]); - - case "StatusOnEquip": - case "StatusInInventory": - return new MultiValueStatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "StatusData" } - ]); - - case "Cost": - case "UseCosts": - case "DualWieldingUseCosts": - case "ActionResources": - case "TooltipUseCosts": - case "RitualCosts": - case "HitCosts": - return new UseCostsParser(referenceValidator); - - case "Damage": - case "VersatileDamage": - case "StableRoll": - return new DiceRollParser(); - - case "Template": - case "StatusEffectOverride": - case "StatusEffectOnTurn": - case "ManagedStatusEffectGroup": - case "ApplyEffect": - case "SpellEffect": - case "StatusEffect": - case "DisappearEffect": - case "PreviewEffect": - case "PositionEffect": - case "HitEffect": - case "TargetEffect": - case "BeamEffect": - case "CastEffect": - case "PrepareEffect": - case "TooltipOnSave": - return new UUIDParser(); - - case "AmountOfTargets": - return new LuaExpressionParser(); - } + return CreateParser(field.Type, field.EnumType, field.ReferenceTypes, definitions); + } - return CreateParser(field.Type, field.EnumType, field.ReferenceTypes, definitions); + public IStatValueParser CreateParser(string type, StatEnumeration enumType, List constraints, StatDefinitionRepository definitions) + { + if (enumType == null && definitions.Enumerations.TryGetValue(type, out StatEnumeration enumInfo) && enumInfo.Values.Count > 0) + { + enumType = enumInfo; } - public IStatValueParser CreateParser(string type, StatEnumeration enumType, List constraints, StatDefinitionRepository definitions) + if (enumType != null) { - if (enumType == null && definitions.Enumerations.TryGetValue(type, out StatEnumeration enumInfo) && enumInfo.Values.Count > 0) - { - enumType = enumInfo; + if (type == "SpellFlagList" + || type == "SpellCategoryFlags" + || type == "CinematicArenaFlags" + || type == "RestErrorFlags" + || type == "AuraFlags" + || type == "StatusEvent" + || type == "AIFlags" + || type == "WeaponFlags" + || type == "ProficiencyGroupFlags" + || type == "InterruptContext" + || type == "InterruptDefaultValue" + || type == "AttributeFlags" + || type == "PassiveFlags" + || type == "ResistanceFlags" + || type == "LineOfSightFlags" + || type == "StatusPropertyFlags" + || type == "StatusGroupFlags" + || type == "StatsFunctorContext") + { + return new MultiValueEnumParser(enumType); } - - if (enumType != null) + else { - if (type == "SpellFlagList" - || type == "SpellCategoryFlags" - || type == "CinematicArenaFlags" - || type == "RestErrorFlags" - || type == "AuraFlags" - || type == "StatusEvent" - || type == "AIFlags" - || type == "WeaponFlags" - || type == "ProficiencyGroupFlags" - || type == "InterruptContext" - || type == "InterruptDefaultValue" - || type == "AttributeFlags" - || type == "PassiveFlags" - || type == "ResistanceFlags" - || type == "LineOfSightFlags" - || type == "StatusPropertyFlags" - || type == "StatusGroupFlags" - || type == "StatsFunctorContext") - { - return new MultiValueEnumParser(enumType); - } - else - { - return new EnumParser(enumType); - } + return new EnumParser(enumType); } + } - return type switch - { - "Boolean" => new BooleanParser(), - "ConstantInt" or "Int" => new Int32Parser(), - "ConstantFloat" or "Float" => new FloatParser(), - "String" or "FixedString" or "TranslatedString" => new StringParser(), - "Guid" => new UUIDParser(), - "Requirements" => new ExpressionParser("Requirements", definitions, this, ExpressionType.Functor), - "StatsFunctors" => new ExpressionParser("Properties", definitions, this, ExpressionType.Functor), - "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionParser(), - "UseCosts" => new UseCostsParser(referenceValidator), - "StatReference" => new StatReferenceParser(referenceValidator, constraints), - "StatusId" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["EngineStatusType"]), - new StatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "StatusData" } - ]) - }, "Expected a status name"), - "ResurrectTypes" => new MultiValueEnumParser(definitions.Enumerations["ResurrectType"]), - "StatusIdOrGroup" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["StatusGroupFlags"]), - new EnumParser(definitions.Enumerations["EngineStatusType"]), - new StatReferenceParser(referenceValidator, - [ - new StatReferenceConstraint{ StatType = "StatusData" } - ]) - }, "Expected a status or StatusGroup name"), - "SummonDurationOrInt" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["SummonDuration"]), - new Int32Parser() - }), - "AllOrDamageType" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["AllEnum"]), - new EnumParser(definitions.Enumerations["Damage Type"]), - }), - "RollAdjustmentTypeOrDamageType" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["RollAdjustmentType"]), - new EnumParser(definitions.Enumerations["Damage Type"]), - }), - "AbilityOrAttackRollAbility" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["Ability"]), - new EnumParser(definitions.Enumerations["AttackRollAbility"]), - }), - "DamageTypeOrDealDamageWeaponDamageType" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["Damage Type"]), - new EnumParser(definitions.Enumerations["DealDamageWeaponDamageType"]), - }), - "SpellId" => new StatReferenceParser(referenceValidator, + return type switch + { + "Boolean" => new BooleanParser(), + "ConstantInt" or "Int" => new Int32Parser(), + "ConstantFloat" or "Float" => new FloatParser(), + "String" or "FixedString" or "TranslatedString" => new StringParser(), + "Guid" => new UUIDParser(), + "Requirements" => new ExpressionParser("Requirements", definitions, this, ExpressionType.Functor), + "StatsFunctors" => new ExpressionParser("Properties", definitions, this, ExpressionType.Functor), + "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionParser(), + "UseCosts" => new UseCostsParser(referenceValidator), + "StatReference" => new StatReferenceParser(referenceValidator, constraints), + "StatusId" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["EngineStatusType"]), + new StatReferenceParser(referenceValidator, [ - new StatReferenceConstraint{ StatType = "SpellData" } - ]), - "Interrupt" => new StatReferenceParser(referenceValidator, + new StatReferenceConstraint{ StatType = "StatusData" } + ]) + }, "Expected a status name"), + "ResurrectTypes" => new MultiValueEnumParser(definitions.Enumerations["ResurrectType"]), + "StatusIdOrGroup" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["StatusGroupFlags"]), + new EnumParser(definitions.Enumerations["EngineStatusType"]), + new StatReferenceParser(referenceValidator, [ - new StatReferenceConstraint{ StatType = "InterruptData" } - ]), - // THESE NEED TO BE FIXED! - "StatusIDs" => new StringParser(), - _ => throw new ArgumentException($"Could not create parser for type '{type}'"), - }; - } + new StatReferenceConstraint{ StatType = "StatusData" } + ]) + }, "Expected a status or StatusGroup name"), + "SummonDurationOrInt" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["SummonDuration"]), + new Int32Parser() + }), + "AllOrDamageType" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["AllEnum"]), + new EnumParser(definitions.Enumerations["Damage Type"]), + }), + "RollAdjustmentTypeOrDamageType" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["RollAdjustmentType"]), + new EnumParser(definitions.Enumerations["Damage Type"]), + }), + "AbilityOrAttackRollAbility" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["Ability"]), + new EnumParser(definitions.Enumerations["AttackRollAbility"]), + }), + "DamageTypeOrDealDamageWeaponDamageType" => new AnyParser(new List { + new EnumParser(definitions.Enumerations["Damage Type"]), + new EnumParser(definitions.Enumerations["DealDamageWeaponDamageType"]), + }), + "SpellId" => new StatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "SpellData" } + ]), + "Interrupt" => new StatReferenceParser(referenceValidator, + [ + new StatReferenceConstraint{ StatType = "InterruptData" } + ]), + // THESE NEED TO BE FIXED! + "StatusIDs" => new StringParser(), + _ => throw new ArgumentException($"Could not create parser for type '{type}'"), + }; } } diff --git a/LSLib/LS/Story/Adapter.cs b/LSLib/LS/Story/Adapter.cs index c7cc799b..d08154a8 100644 --- a/LSLib/LS/Story/Adapter.cs +++ b/LSLib/LS/Story/Adapter.cs @@ -2,170 +2,169 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class Adapter : OsirisSerializable { - public class Adapter : OsirisSerializable + /// + /// Unique identifier of this adapter + /// + public UInt32 Index; + /// + /// Constant output values + /// + public Tuple Constants; + /// + /// Contains input logical column indices for each output physical column. + /// A -1 means that the output column is a constant or null value; otherwise + /// the output column maps to the specified logical index from the input tuple. + /// + public List LogicalIndices; + /// + /// Logical index => physical index map of the output tuple + /// + public Dictionary LogicalToPhysicalMap; + /// + /// Node that we're attached to + /// + public Node OwnerNode; + + public void Read(OsiReader reader) { - /// - /// Unique identifier of this adapter - /// - public UInt32 Index; - /// - /// Constant output values - /// - public Tuple Constants; - /// - /// Contains input logical column indices for each output physical column. - /// A -1 means that the output column is a constant or null value; otherwise - /// the output column maps to the specified logical index from the input tuple. - /// - public List LogicalIndices; - /// - /// Logical index => physical index map of the output tuple - /// - public Dictionary LogicalToPhysicalMap; - /// - /// Node that we're attached to - /// - public Node OwnerNode; + Index = reader.ReadUInt32(); + Constants = new Tuple(); + Constants.Read(reader); - public void Read(OsiReader reader) + LogicalIndices = new List(); + var count = reader.ReadByte(); + while (count-- > 0) { - Index = reader.ReadUInt32(); - Constants = new Tuple(); - Constants.Read(reader); - - LogicalIndices = new List(); - var count = reader.ReadByte(); - while (count-- > 0) - { - LogicalIndices.Add(reader.ReadSByte()); - } - - LogicalToPhysicalMap = new Dictionary(); - count = reader.ReadByte(); - while (count-- > 0) - { - var key = reader.ReadByte(); - var value = reader.ReadByte(); - LogicalToPhysicalMap.Add(key, value); - } + LogicalIndices.Add(reader.ReadSByte()); } - public void Write(OsiWriter writer) + LogicalToPhysicalMap = new Dictionary(); + count = reader.ReadByte(); + while (count-- > 0) { - Constants.Write(writer); + var key = reader.ReadByte(); + var value = reader.ReadByte(); + LogicalToPhysicalMap.Add(key, value); + } + } - writer.Write((byte)LogicalIndices.Count); - foreach (var index in LogicalIndices) - { - writer.Write(index); - } + public void Write(OsiWriter writer) + { + Constants.Write(writer); - writer.Write((byte)LogicalToPhysicalMap.Count); - foreach (var pair in LogicalToPhysicalMap) - { - writer.Write(pair.Key); - writer.Write(pair.Value); - } + writer.Write((byte)LogicalIndices.Count); + foreach (var index in LogicalIndices) + { + writer.Write(index); + } + + writer.Write((byte)LogicalToPhysicalMap.Count); + foreach (var pair in LogicalToPhysicalMap) + { + writer.Write(pair.Key); + writer.Write(pair.Value); } + } - public Tuple Adapt(Tuple columns) + public Tuple Adapt(Tuple columns) + { + var result = new Tuple(); + for (var i = 0; i < LogicalIndices.Count; i++) { - var result = new Tuple(); - for (var i = 0; i < LogicalIndices.Count; i++) + var index = LogicalIndices[i]; + // If a logical index is present, emit a column from the input tuple + if (index != -1) { - var index = LogicalIndices[i]; - // If a logical index is present, emit a column from the input tuple - if (index != -1) + if (columns.Logical.ContainsKey(index)) { - if (columns.Logical.ContainsKey(index)) - { - var value = columns.Logical[index]; - result.Physical.Add(value); - } - else if (index == 0) - { - // Special case for savegames where adapters are padded with 0 logical indices - var nullValue = new Variable - { - TypeId = (uint)Value.Type.None, - Unused = true - }; - result.Physical.Add(nullValue); - } - else - { - throw new InvalidDataException($"Logical column index {index} does not exist in tuple."); - } + var value = columns.Logical[index]; + result.Physical.Add(value); } - // Otherwise check if a constant is mapped to the specified logical index - else if (Constants.Logical.ContainsKey(i)) + else if (index == 0) { - var value = Constants.Logical[i]; - result.Physical.Add(value); + // Special case for savegames where adapters are padded with 0 logical indices + var nullValue = new Variable + { + TypeId = (uint)Value.Type.None, + Unused = true + }; + result.Physical.Add(nullValue); } - // If we haven't found a constant, emit a null variable else { - var nullValue = new Variable(); - nullValue.TypeId = (uint)Value.Type.None; - nullValue.Unused = true; - result.Physical.Add(nullValue); + throw new InvalidDataException($"Logical column index {index} does not exist in tuple."); } } - - // Generate logical => physical mappings for the output tuple - foreach (var map in LogicalToPhysicalMap) + // Otherwise check if a constant is mapped to the specified logical index + else if (Constants.Logical.ContainsKey(i)) { - result.Logical.Add(map.Key, result.Physical[map.Value]); + var value = Constants.Logical[i]; + result.Physical.Add(value); } + // If we haven't found a constant, emit a null variable + else + { + var nullValue = new Variable(); + nullValue.TypeId = (uint)Value.Type.None; + nullValue.Unused = true; + result.Physical.Add(nullValue); + } + } - return result; + // Generate logical => physical mappings for the output tuple + foreach (var map in LogicalToPhysicalMap) + { + result.Logical.Add(map.Key, result.Physical[map.Value]); } - public void DebugDump(TextWriter writer, Story story) + return result; + } + + public void DebugDump(TextWriter writer, Story story) + { + writer.Write("Adapter - "); + if (OwnerNode != null && OwnerNode.Name.Length > 0) { - writer.Write("Adapter - "); - if (OwnerNode != null && OwnerNode.Name.Length > 0) - { - writer.WriteLine("Node {0}({1})", OwnerNode.Name, OwnerNode.NumParams); - } - else if (OwnerNode != null) - { - writer.WriteLine("Node <{0}>", OwnerNode.TypeName()); - } - else - { - writer.WriteLine("(Not owned)"); - } + writer.WriteLine("Node {0}({1})", OwnerNode.Name, OwnerNode.NumParams); + } + else if (OwnerNode != null) + { + writer.WriteLine("Node <{0}>", OwnerNode.TypeName()); + } + else + { + writer.WriteLine("(Not owned)"); + } - if (Constants.Logical.Count > 0) - { - writer.Write(" Constants: "); - Constants.DebugDump(writer, story); - writer.WriteLine(""); - } + if (Constants.Logical.Count > 0) + { + writer.Write(" Constants: "); + Constants.DebugDump(writer, story); + writer.WriteLine(""); + } - if (LogicalIndices.Count > 0) + if (LogicalIndices.Count > 0) + { + writer.Write(" Logical indices: "); + foreach (var index in LogicalIndices) { - writer.Write(" Logical indices: "); - foreach (var index in LogicalIndices) - { - writer.Write("{0}, ", index); - } - writer.WriteLine(""); + writer.Write("{0}, ", index); } + writer.WriteLine(""); + } - if (LogicalToPhysicalMap.Count > 0) + if (LogicalToPhysicalMap.Count > 0) + { + writer.Write(" Logical to physical mappings: "); + foreach (var pair in LogicalToPhysicalMap) { - writer.Write(" Logical to physical mappings: "); - foreach (var pair in LogicalToPhysicalMap) - { - writer.Write("{0} -> {1}, ", pair.Key, pair.Value); - } - writer.WriteLine(""); + writer.Write("{0} -> {1}, ", pair.Key, pair.Value); } + writer.WriteLine(""); } } } diff --git a/LSLib/LS/Story/Call.cs b/LSLib/LS/Story/Call.cs index bec1eb95..9c28a722 100644 --- a/LSLib/LS/Story/Call.cs +++ b/LSLib/LS/Story/Call.cs @@ -2,122 +2,121 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class Call : OsirisSerializable { - public class Call : OsirisSerializable - { - public string Name; - public List Parameters; - public bool Negate; - public Int32 GoalIdOrDebugHook; + public string Name; + public List Parameters; + public bool Negate; + public Int32 GoalIdOrDebugHook; - public void Read(OsiReader reader) + public void Read(OsiReader reader) + { + Name = reader.ReadString(); + if (Name.Length > 0) { - Name = reader.ReadString(); - if (Name.Length > 0) + var hasParams = reader.ReadByte(); + if (hasParams > 0) { - var hasParams = reader.ReadByte(); - if (hasParams > 0) + Parameters = new List(); + var numParams = reader.ReadByte(); + while (numParams-- > 0) { - Parameters = new List(); - var numParams = reader.ReadByte(); - while (numParams-- > 0) - { - TypedValue param; - var type = reader.ReadByte(); - if (type == 1) - param = new Variable(); - else - param = new TypedValue(); - param.Read(reader); - Parameters.Add(param); - } + TypedValue param; + var type = reader.ReadByte(); + if (type == 1) + param = new Variable(); + else + param = new TypedValue(); + param.Read(reader); + Parameters.Add(param); } - - Negate = reader.ReadBoolean(); } - GoalIdOrDebugHook = reader.ReadInt32(); + Negate = reader.ReadBoolean(); } - public void Write(OsiWriter writer) + GoalIdOrDebugHook = reader.ReadInt32(); + } + + public void Write(OsiWriter writer) + { + writer.Write(Name); + if (Name.Length > 0) { - writer.Write(Name); - if (Name.Length > 0) + writer.Write(Parameters != null); + if (Parameters != null) { - writer.Write(Parameters != null); - if (Parameters != null) + writer.Write((byte)Parameters.Count); + foreach (var param in Parameters) { - writer.Write((byte)Parameters.Count); - foreach (var param in Parameters) - { - writer.Write(param is Variable); - param.Write(writer); - } + writer.Write(param is Variable); + param.Write(writer); } - - writer.Write(Negate); } - writer.Write(GoalIdOrDebugHook); + writer.Write(Negate); } - public void DebugDump(TextWriter writer, Story story) + writer.Write(GoalIdOrDebugHook); + } + + public void DebugDump(TextWriter writer, Story story) + { + if (Name.Length > 0) { - if (Name.Length > 0) + if (Negate) writer.Write("!"); + writer.Write("{0}(", Name); + if (Parameters != null) { - if (Negate) writer.Write("!"); - writer.Write("{0}(", Name); - if (Parameters != null) + for (var i = 0; i < Parameters.Count; i++) { - for (var i = 0; i < Parameters.Count; i++) - { - Parameters[i].DebugDump(writer, story); - if (i < Parameters.Count - 1) writer.Write(", "); - } + Parameters[i].DebugDump(writer, story); + if (i < Parameters.Count - 1) writer.Write(", "); } - - writer.Write(") "); } - if (GoalIdOrDebugHook != 0) + writer.Write(") "); + } + + if (GoalIdOrDebugHook != 0) + { + if (GoalIdOrDebugHook < 0) { - if (GoalIdOrDebugHook < 0) - { - writer.Write("", -GoalIdOrDebugHook); - } - else - { - var goal = story.Goals[(uint)GoalIdOrDebugHook]; - writer.Write("", GoalIdOrDebugHook, goal.Name); - } + writer.Write("", -GoalIdOrDebugHook); + } + else + { + var goal = story.Goals[(uint)GoalIdOrDebugHook]; + writer.Write("", GoalIdOrDebugHook, goal.Name); } } + } - public void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + public void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + if (Name.Length > 0) { - if (Name.Length > 0) + if (Negate) writer.Write("NOT "); + writer.Write("{0}(", Name); + if (Parameters != null) { - if (Negate) writer.Write("NOT "); - writer.Write("{0}(", Name); - if (Parameters != null) + for (var i = 0; i < Parameters.Count; i++) { - for (var i = 0; i < Parameters.Count; i++) - { - var param = Parameters[i]; - param.MakeScript(writer, story, tuple, printTypes); - if (i < Parameters.Count - 1) - writer.Write(", "); - } + var param = Parameters[i]; + param.MakeScript(writer, story, tuple, printTypes); + if (i < Parameters.Count - 1) + writer.Write(", "); } - - writer.Write(")"); } - if (GoalIdOrDebugHook > 0) - { - writer.Write("GoalCompleted"); - } + writer.Write(")"); + } + + if (GoalIdOrDebugHook > 0) + { + writer.Write("GoalCompleted"); } } } diff --git a/LSLib/LS/Story/Common.cs b/LSLib/LS/Story/Common.cs index 7047c734..c6c1d93b 100644 --- a/LSLib/LS/Story/Common.cs +++ b/LSLib/LS/Story/Common.cs @@ -5,526 +5,525 @@ using System.Reflection; using System.Text; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public interface OsirisSerializable { - public interface OsirisSerializable - { - void Read(OsiReader reader); - void Write(OsiWriter writer); - } + void Read(OsiReader reader); + void Write(OsiWriter writer); +} +/// +/// Osiris file format version numbers +/// +public static class OsiVersion +{ /// - /// Osiris file format version numbers + /// Initial version /// - public static class OsiVersion - { - /// - /// Initial version - /// - public const uint VerInitial = 0x0100; - - /// - /// Added Init/Exit calls to goals - /// - public const uint VerAddInitExitCalls = 0x0101; - - /// - /// Added version string at the beginning of the OSI file - /// - public const uint VerAddVersionString = 0x0102; - - /// - /// Added debug flags in the header - /// - public const uint VerAddDebugFlags = 0x0103; - - /// - /// Started scrambling strings by xor-ing with 0xAD - /// - public const uint VerScramble = 0x0104; - - /// - /// Added custom (string) types - /// - public const uint VerAddTypeMap = 0x0105; - - /// - /// Added Query nodes - /// - public const uint VerAddQuery = 0x0106; - - /// - /// Types can be aliases of any builtin type, not just strings - /// - public const uint VerTypeAliases = 0x0109; - - /// - /// Added INT64, GUIDSTRING types - /// - public const uint VerEnhancedTypes = 0x010a; - - /// - /// Added external string table - /// - public const uint VerExternalStringTable = 0x010b; - - /// - /// Removed external string table - /// - public const uint VerRemoveExternalStringTable = 0x010c; - - /// - /// Added enumerations - /// - public const uint VerEnums = 0x010d; - - /// - /// Last supported Osi version - /// - public const uint VerLastSupported = VerEnums; - } - - public class OsiReader : BinaryReader - { - public byte Scramble = 0x00; - public UInt32 MinorVersion; - public UInt32 MajorVersion; - // Use 16-bit instead of 32-bit type IDs, BG3 Patch8+ - public bool? ShortTypeIds = null; - public Dictionary TypeAliases = new Dictionary(); - // TODO: Make RO! - public Story Story; - - public uint Ver - { - get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } - } + public const uint VerInitial = 0x0100; - public OsiReader(Stream stream, Story story) - : base(stream) - { - Story = story; - } + /// + /// Added Init/Exit calls to goals + /// + public const uint VerAddInitExitCalls = 0x0101; - public override string ReadString() - { - List bytes = new List(); - while (true) - { - var b = (byte)(ReadByte() ^ Scramble); - if (b != 0) - { - bytes.Add(b); - } - else - { - break; - } - } + /// + /// Added version string at the beginning of the OSI file + /// + public const uint VerAddVersionString = 0x0102; - return Encoding.UTF8.GetString(bytes.ToArray()); - } + /// + /// Added debug flags in the header + /// + public const uint VerAddDebugFlags = 0x0103; - public override bool ReadBoolean() - { - var b = ReadByte(); - if (b != 0 && b != 1) - { - throw new InvalidDataException("Invalid boolean value; expected 0 or 1."); - } + /// + /// Started scrambling strings by xor-ing with 0xAD + /// + public const uint VerScramble = 0x0104; - return b == 1; - } + /// + /// Added custom (string) types + /// + public const uint VerAddTypeMap = 0x0105; - public Guid ReadGuid() - { - var guid = ReadBytes(16); - return new Guid(guid); - } + /// + /// Added Query nodes + /// + public const uint VerAddQuery = 0x0106; - public List ReadList() where T : OsirisSerializable, new() - { - var items = new List(); - ReadList(items); - return items; - } + /// + /// Types can be aliases of any builtin type, not just strings + /// + public const uint VerTypeAliases = 0x0109; - public void ReadList(List items) where T : OsirisSerializable, new() - { - var count = ReadUInt32(); - while (count-- > 0) - { - var item = new T(); - item.Read(this); - items.Add(item); - } - } + /// + /// Added INT64, GUIDSTRING types + /// + public const uint VerEnhancedTypes = 0x010a; - public List ReadRefList() where T : OsiReference, new() - { - var items = new List(); - ReadRefList(items); - return items; - } + /// + /// Added external string table + /// + public const uint VerExternalStringTable = 0x010b; + + /// + /// Removed external string table + /// + public const uint VerRemoveExternalStringTable = 0x010c; + + /// + /// Added enumerations + /// + public const uint VerEnums = 0x010d; - public void ReadRefList(List items) where T : OsiReference, new() + /// + /// Last supported Osi version + /// + public const uint VerLastSupported = VerEnums; +} + +public class OsiReader : BinaryReader +{ + public byte Scramble = 0x00; + public UInt32 MinorVersion; + public UInt32 MajorVersion; + // Use 16-bit instead of 32-bit type IDs, BG3 Patch8+ + public bool? ShortTypeIds = null; + public Dictionary TypeAliases = new Dictionary(); + // TODO: Make RO! + public Story Story; + + public uint Ver + { + get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } + } + + public OsiReader(Stream stream, Story story) + : base(stream) + { + Story = story; + } + + public override string ReadString() + { + List bytes = new List(); + while (true) { - var count = ReadUInt32(); - while (count-- > 0) + var b = (byte)(ReadByte() ^ Scramble); + if (b != 0) + { + bytes.Add(b); + } + else { - var item = new T(); - item.BindStory(Story); - item.Read(this); - items.Add(item); + break; } } - public NodeReference ReadNodeRef() - { - var nodeRef = new NodeReference(); - nodeRef.BindStory(Story); - nodeRef.Read(this); - return nodeRef; - } + return Encoding.UTF8.GetString(bytes.ToArray()); + } - public AdapterReference ReadAdapterRef() + public override bool ReadBoolean() + { + var b = ReadByte(); + if (b != 0 && b != 1) { - var adapterRef = new AdapterReference(); - adapterRef.BindStory(Story); - adapterRef.Read(this); - return adapterRef; + throw new InvalidDataException("Invalid boolean value; expected 0 or 1."); } - public DatabaseReference ReadDatabaseRef() - { - var databaseRef = new DatabaseReference(); - databaseRef.BindStory(Story); - databaseRef.Read(this); - return databaseRef; - } + return b == 1; + } - public GoalReference ReadGoalRef() - { - var goalRef = new GoalReference(); - goalRef.BindStory(Story); - goalRef.Read(this); - return goalRef; - } + public Guid ReadGuid() + { + var guid = ReadBytes(16); + return new Guid(guid); } - public class OsiWriter : BinaryWriter + public List ReadList() where T : OsirisSerializable, new() { - public byte Scramble = 0x00; - public UInt32 MinorVersion; - public UInt32 MajorVersion; - // Use 16-bit instead of 32-bit type IDs, BG3 Patch8+ - public bool ShortTypeIds; - public Dictionary TypeAliases = new Dictionary(); - public Dictionary Enums = new Dictionary(); + var items = new List(); + ReadList(items); + return items; + } - public uint Ver + public void ReadList(List items) where T : OsirisSerializable, new() + { + var count = ReadUInt32(); + while (count-- > 0) { - get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } + var item = new T(); + item.Read(this); + items.Add(item); } + } - public OsiWriter(Stream stream, bool leaveOpen) - : base(stream, Encoding.UTF8, leaveOpen) - { - } + public List ReadRefList() where T : OsiReference, new() + { + var items = new List(); + ReadRefList(items); + return items; + } - public override void Write(String s) + public void ReadRefList(List items) where T : OsiReference, new() + { + var count = ReadUInt32(); + while (count-- > 0) { - var bytes = Encoding.UTF8.GetBytes(s); - for (var i = 0; i < bytes.Length; i++) - { - bytes[i] = (byte)(bytes[i] ^ Scramble); - } - Write(bytes, 0, bytes.Length); - Write(Scramble); + var item = new T(); + item.BindStory(Story); + item.Read(this); + items.Add(item); } + } - public override void Write(bool b) - { - Write((byte)(b ? 1 : 0)); - } + public NodeReference ReadNodeRef() + { + var nodeRef = new NodeReference(); + nodeRef.BindStory(Story); + nodeRef.Read(this); + return nodeRef; + } - public void Write(Guid guid) - { - var bytes = guid.ToByteArray(); - Write(bytes, 0, bytes.Length); - } + public AdapterReference ReadAdapterRef() + { + var adapterRef = new AdapterReference(); + adapterRef.BindStory(Story); + adapterRef.Read(this); + return adapterRef; + } - public void WriteList(List list) where T : OsirisSerializable - { - Write((UInt32)list.Count); - foreach (var item in list) - { - item.Write(this); - } - } + public DatabaseReference ReadDatabaseRef() + { + var databaseRef = new DatabaseReference(); + databaseRef.BindStory(Story); + databaseRef.Read(this); + return databaseRef; + } + + public GoalReference ReadGoalRef() + { + var goalRef = new GoalReference(); + goalRef.BindStory(Story); + goalRef.Read(this); + return goalRef; } +} - public class SaveFileHeader : OsirisSerializable +public class OsiWriter : BinaryWriter +{ + public byte Scramble = 0x00; + public UInt32 MinorVersion; + public UInt32 MajorVersion; + // Use 16-bit instead of 32-bit type IDs, BG3 Patch8+ + public bool ShortTypeIds; + public Dictionary TypeAliases = new Dictionary(); + public Dictionary Enums = new Dictionary(); + + public uint Ver { - public string Version; - public byte MajorVersion; - public byte MinorVersion; - public bool BigEndian; - public byte Unused; - public UInt32 DebugFlags; + get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } + } + + public OsiWriter(Stream stream, bool leaveOpen) + : base(stream, Encoding.UTF8, leaveOpen) + { + } - public uint Ver + public override void Write(String s) + { + var bytes = Encoding.UTF8.GetBytes(s); + for (var i = 0; i < bytes.Length; i++) { - get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } + bytes[i] = (byte)(bytes[i] ^ Scramble); } + Write(bytes, 0, bytes.Length); + Write(Scramble); + } + + public override void Write(bool b) + { + Write((byte)(b ? 1 : 0)); + } + + public void Write(Guid guid) + { + var bytes = guid.ToByteArray(); + Write(bytes, 0, bytes.Length); + } - public void Read(OsiReader reader) + public void WriteList(List list) where T : OsirisSerializable + { + Write((UInt32)list.Count); + foreach (var item in list) { - reader.ReadByte(); - Version = reader.ReadString(); - MajorVersion = reader.ReadByte(); - MinorVersion = reader.ReadByte(); - BigEndian = reader.ReadBoolean(); - Unused = reader.ReadByte(); - - if (Ver >= OsiVersion.VerAddVersionString) - reader.ReadBytes(0x80); // Version string buffer - - if (Ver >= OsiVersion.VerAddDebugFlags) - DebugFlags = reader.ReadUInt32(); - else - DebugFlags = 0; + item.Write(this); } + } +} - public void Write(OsiWriter writer) - { - writer.Write((byte)0); - writer.Write(Version); - writer.Write(MajorVersion); - writer.Write(MinorVersion); - writer.Write(BigEndian); - writer.Write(Unused); - - if (Ver >= OsiVersion.VerAddVersionString) - { - var versionString = String.Format("{0}.{1}", MajorVersion, MinorVersion); - var versionBytes = Encoding.UTF8.GetBytes(versionString); - byte[] version = new byte[0x80]; - versionBytes.CopyTo(version, 0); - writer.Write(version, 0, version.Length); - } +public class SaveFileHeader : OsirisSerializable +{ + public string Version; + public byte MajorVersion; + public byte MinorVersion; + public bool BigEndian; + public byte Unused; + public UInt32 DebugFlags; + + public uint Ver + { + get { return ((uint)MajorVersion << 8) | (uint)MinorVersion; } + } - if (Ver >= OsiVersion.VerAddDebugFlags) - writer.Write(DebugFlags); - } + public void Read(OsiReader reader) + { + reader.ReadByte(); + Version = reader.ReadString(); + MajorVersion = reader.ReadByte(); + MinorVersion = reader.ReadByte(); + BigEndian = reader.ReadBoolean(); + Unused = reader.ReadByte(); + + if (Ver >= OsiVersion.VerAddVersionString) + reader.ReadBytes(0x80); // Version string buffer + + if (Ver >= OsiVersion.VerAddDebugFlags) + DebugFlags = reader.ReadUInt32(); + else + DebugFlags = 0; } - public class OsirisType : OsirisSerializable + public void Write(OsiWriter writer) { - public byte Index; - public byte Alias; - public string Name; - public bool IsBuiltin; + writer.Write((byte)0); + writer.Write(Version); + writer.Write(MajorVersion); + writer.Write(MinorVersion); + writer.Write(BigEndian); + writer.Write(Unused); - public static OsirisType MakeBuiltin(byte index, string name) + if (Ver >= OsiVersion.VerAddVersionString) { - var type = new OsirisType(); - type.Index = index; - type.Alias = 0; - type.Name = name; - type.IsBuiltin = true; - return type; + var versionString = String.Format("{0}.{1}", MajorVersion, MinorVersion); + var versionBytes = Encoding.UTF8.GetBytes(versionString); + byte[] version = new byte[0x80]; + versionBytes.CopyTo(version, 0); + writer.Write(version, 0, version.Length); } - public void Read(OsiReader reader) - { - Name = reader.ReadString(); - Index = reader.ReadByte(); - IsBuiltin = false; + if (Ver >= OsiVersion.VerAddDebugFlags) + writer.Write(DebugFlags); + } +} - if (reader.Ver >= OsiVersion.VerTypeAliases) - { - Alias = reader.ReadByte(); - } - else - { - // D:OS 1 only supported string aliases - Alias = (int)Value.Type_OS1.String; - } - } +public class OsirisType : OsirisSerializable +{ + public byte Index; + public byte Alias; + public string Name; + public bool IsBuiltin; - public void Write(OsiWriter writer) - { - writer.Write(Name); - writer.Write(Index); + public static OsirisType MakeBuiltin(byte index, string name) + { + var type = new OsirisType(); + type.Index = index; + type.Alias = 0; + type.Name = name; + type.IsBuiltin = true; + return type; + } - if (writer.Ver >= OsiVersion.VerTypeAliases) - { - writer.Write(Alias); - } - } + public void Read(OsiReader reader) + { + Name = reader.ReadString(); + Index = reader.ReadByte(); + IsBuiltin = false; - public void DebugDump(TextWriter writer) + if (reader.Ver >= OsiVersion.VerTypeAliases) { - if (Alias == 0) - { - writer.WriteLine("{0}: {1}", Index, Name); - } - else - { - writer.WriteLine("{0}: {1} (Alias: {2})", Index, Name, Alias); - } + Alias = reader.ReadByte(); + } + else + { + // D:OS 1 only supported string aliases + Alias = (int)Value.Type_OS1.String; } } - public class OsirisEnumElement : OsirisSerializable + public void Write(OsiWriter writer) { - public String Name; - public UInt64 Value; + writer.Write(Name); + writer.Write(Index); - - public void Read(OsiReader reader) + if (writer.Ver >= OsiVersion.VerTypeAliases) { - Name = reader.ReadString(); - Value = reader.ReadUInt64(); + writer.Write(Alias); } + } - public void Write(OsiWriter writer) + public void DebugDump(TextWriter writer) + { + if (Alias == 0) { - writer.Write(Name); - writer.Write(Value); + writer.WriteLine("{0}: {1}", Index, Name); } - - public void DebugDump(TextWriter writer) + else { - writer.WriteLine("{0}: {1}", Name, Value); + writer.WriteLine("{0}: {1} (Alias: {2})", Index, Name, Alias); } } +} + +public class OsirisEnumElement : OsirisSerializable +{ + public String Name; + public UInt64 Value; - public class OsirisEnum : OsirisSerializable + + public void Read(OsiReader reader) { - public UInt16 UnderlyingType; - public List Elements; + Name = reader.ReadString(); + Value = reader.ReadUInt64(); + } + public void Write(OsiWriter writer) + { + writer.Write(Name); + writer.Write(Value); + } - public void Read(OsiReader reader) - { - UnderlyingType = reader.ReadUInt16(); - var elements = reader.ReadUInt32(); - Elements = new List(); - while (elements-- > 0) - { - var e = new OsirisEnumElement(); - e.Read(reader); - Elements.Add(e); - } - } + public void DebugDump(TextWriter writer) + { + writer.WriteLine("{0}: {1}", Name, Value); + } +} - public void Write(OsiWriter writer) - { - writer.Write(UnderlyingType); - writer.Write((UInt32)Elements.Count); +public class OsirisEnum : OsirisSerializable +{ + public UInt16 UnderlyingType; + public List Elements; - foreach (var e in Elements) - { - e.Write(writer); - } - } - public void DebugDump(TextWriter writer) + public void Read(OsiReader reader) + { + UnderlyingType = reader.ReadUInt16(); + var elements = reader.ReadUInt32(); + Elements = new List(); + while (elements-- > 0) { - writer.WriteLine("Type {0}", UnderlyingType); - foreach (var e in Elements) - { - e.DebugDump(writer); - } + var e = new OsirisEnumElement(); + e.Read(reader); + Elements.Add(e); } } - public class OsirisDivObject : OsirisSerializable + public void Write(OsiWriter writer) { - public string Name; - public byte Type; - public UInt32 Key1; - public UInt32 Key2; // Some ref? - public UInt32 Key3; // Type again? - public UInt32 Key4; + writer.Write(UnderlyingType); + writer.Write((UInt32)Elements.Count); - public void Read(OsiReader reader) + foreach (var e in Elements) { - Name = reader.ReadString(); - Type = reader.ReadByte(); - Key1 = reader.ReadUInt32(); - Key2 = reader.ReadUInt32(); - Key3 = reader.ReadUInt32(); - Key4 = reader.ReadUInt32(); + e.Write(writer); } + } - public void Write(OsiWriter writer) + public void DebugDump(TextWriter writer) + { + writer.WriteLine("Type {0}", UnderlyingType); + foreach (var e in Elements) { - writer.Write(Name); - writer.Write(Type); - writer.Write(Key1); - writer.Write(Key2); - writer.Write(Key3); - writer.Write(Key4); + e.DebugDump(writer); } + } +} - public void DebugDump(TextWriter writer) - { - writer.WriteLine("{0} {1} ({2}, {3}, {4}, {5})", Type, Name, Key1, Key2, Key3, Key4); - } +public class OsirisDivObject : OsirisSerializable +{ + public string Name; + public byte Type; + public UInt32 Key1; + public UInt32 Key2; // Some ref? + public UInt32 Key3; // Type again? + public UInt32 Key4; + + public void Read(OsiReader reader) + { + Name = reader.ReadString(); + Type = reader.ReadByte(); + Key1 = reader.ReadUInt32(); + Key2 = reader.ReadUInt32(); + Key3 = reader.ReadUInt32(); + Key4 = reader.ReadUInt32(); } - public enum EntryPoint : UInt32 + public void Write(OsiWriter writer) { - // The next node is not an AND/NOT AND expression - None = 0, - // This node is on the left side of the next AND/NOT AND expression - Left = 1, - // This node is on the right side of the next AND/NOT AND expression - Right = 2 - }; + writer.Write(Name); + writer.Write(Type); + writer.Write(Key1); + writer.Write(Key2); + writer.Write(Key3); + writer.Write(Key4); + } - public class NodeEntryItem : OsirisSerializable + public void DebugDump(TextWriter writer) { - public NodeReference NodeRef; - public EntryPoint EntryPoint; - public GoalReference GoalRef; + writer.WriteLine("{0} {1} ({2}, {3}, {4}, {5})", Type, Name, Key1, Key2, Key3, Key4); + } +} - public void Read(OsiReader reader) - { - NodeRef = reader.ReadNodeRef(); - EntryPoint = (EntryPoint)reader.ReadUInt32(); - GoalRef = reader.ReadGoalRef(); - } +public enum EntryPoint : UInt32 +{ + // The next node is not an AND/NOT AND expression + None = 0, + // This node is on the left side of the next AND/NOT AND expression + Left = 1, + // This node is on the right side of the next AND/NOT AND expression + Right = 2 +}; + +public class NodeEntryItem : OsirisSerializable +{ + public NodeReference NodeRef; + public EntryPoint EntryPoint; + public GoalReference GoalRef; - public void Write(OsiWriter writer) - { - NodeRef.Write(writer); - writer.Write((UInt32)EntryPoint); - GoalRef.Write(writer); - } + public void Read(OsiReader reader) + { + NodeRef = reader.ReadNodeRef(); + EntryPoint = (EntryPoint)reader.ReadUInt32(); + GoalRef = reader.ReadGoalRef(); + } - public void DebugDump(TextWriter writer, Story story) + public void Write(OsiWriter writer) + { + NodeRef.Write(writer); + writer.Write((UInt32)EntryPoint); + GoalRef.Write(writer); + } + + public void DebugDump(TextWriter writer, Story story) + { + if (NodeRef.IsValid) { - if (NodeRef.IsValid) + writer.Write("("); + NodeRef.DebugDump(writer, story); + if (GoalRef.IsValid) { - writer.Write("("); - NodeRef.DebugDump(writer, story); - if (GoalRef.IsValid) - { - writer.Write(", Entry Point {0}, Goal {1})", EntryPoint, GoalRef.Resolve().Name); - } - else - { - writer.Write(")"); - } + writer.Write(", Entry Point {0}, Goal {1})", EntryPoint, GoalRef.Resolve().Name); } else { - writer.Write("(none)"); + writer.Write(")"); } } + else + { + writer.Write("(none)"); + } } } diff --git a/LSLib/LS/Story/Compiler/CompilationContext.cs b/LSLib/LS/Story/Compiler/CompilationContext.cs index bd98efd0..dc1f54fb 100644 --- a/LSLib/LS/Story/Compiler/CompilationContext.cs +++ b/LSLib/LS/Story/Compiler/CompilationContext.cs @@ -2,644 +2,643 @@ using System; using System.Collections.Generic; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +/// +/// Determines the game version we're targeting during compilation. +/// +public enum TargetGame +{ + DOS2, + DOS2DE, + BG3 +} + +/// +/// Type declaration +/// +public class ValueType { + // Type ID + public uint TypeId; + // Osiris builtin type ID + public Value.Type IntrinsicTypeId; + // Type name + public String Name; + /// - /// Determines the game version we're targeting during compilation. + /// Returns whether this type is an alias of the specified type. /// - public enum TargetGame + public bool IsAliasOf(ValueType type) { - DOS2, - DOS2DE, - BG3 + return + // The base types match + IntrinsicTypeId == type.IntrinsicTypeId + // The alias ID doesn't match + && TypeId != type.TypeId + // This type is an alias type + && TypeId != (uint)IntrinsicTypeId + // The other type is a base type + && type.TypeId == (uint)type.IntrinsicTypeId; } +} - /// - /// Type declaration - /// - public class ValueType +/// +/// Parameter direction. +/// Only relevant for queries, which get both input and output parameters. +/// +public enum ParamDirection +{ + In, + Out +} + +/// +/// Osiris internal function type. +/// +public enum FunctionType +{ + // Osiris items + // Query defined by the Osiris runtime + SysQuery, + // Call defined by the Osiris runtime + SysCall, + + // Application defined items + // Event defined by the application (D:OS) + Event, + // Query defined by the application (D:OS) + Query, + // Call defined by the application (D:OS) + Call, + + // User-defined items + // Proc (~call) defined in user code + Proc, + // Query defined in user code + UserQuery, + // Database defined in user code + Database +}; + +/// +/// Function name +/// In Osiris, multiple functions are allowed with the same name, +/// if they have different arity (number of parameters). +/// +public class FunctionNameAndArity : IEquatable +{ + // Function name + public readonly String Name; + // Number of parameters + public readonly int Arity; + + public FunctionNameAndArity(String name, int arity) { - // Type ID - public uint TypeId; - // Osiris builtin type ID - public Value.Type IntrinsicTypeId; - // Type name - public String Name; - - /// - /// Returns whether this type is an alias of the specified type. - /// - public bool IsAliasOf(ValueType type) - { - return - // The base types match - IntrinsicTypeId == type.IntrinsicTypeId - // The alias ID doesn't match - && TypeId != type.TypeId - // This type is an alias type - && TypeId != (uint)IntrinsicTypeId - // The other type is a base type - && type.TypeId == (uint)type.IntrinsicTypeId; - } + Name = name; + Arity = arity; } + public override bool Equals(object fun) + { + return Equals(fun as FunctionNameAndArity); + } + + public bool Equals(FunctionNameAndArity fun) + { + return Name.ToLowerInvariant() == fun.Name.ToLowerInvariant() + && Arity == fun.Arity; + } + + public override int GetHashCode() + { + return Name.ToLowerInvariant().GetHashCode() | Arity; + } + + public override string ToString() + { + return Name + "(" + Arity.ToString() + ")"; + } +} + +/// +/// Function parameter (or database column, depending on the function type) +/// +public class FunctionParam +{ + // Parameter direction, i.e. either In or Out. + public ParamDirection Direction; + // Parameter type + // For builtin functions this is always taken from the function header. + // For user defined functions this is inferred from code. + public ValueType Type; + // Parameter name + public String Name; +} + +public class FunctionSignature +{ + // Type of function (call, query, database, etc.) + public FunctionType Type; + // Function name + public String Name; + // List of arguments + public List Params; + // Indicates that we were able to infer the type of all parameters + public Boolean FullyTyped; + // Indicates that the database is "inserted into" in at least one place + public Boolean Inserted; + // Indicates that the database is "deleted from" in at least one place + public Boolean Deleted; + // Indicates that the function is "read" in at least one place + public Boolean Read; + + public FunctionNameAndArity GetNameAndArity() => new FunctionNameAndArity(Name, Params.Count); +} + +/// +/// Metadata for built-in functions +/// +public class BuiltinFunction +{ + public FunctionSignature Signature; + // Metadata passed from story headers. + // These aren't used at all during compilation and are only used in the compiled story file. + public UInt32 Meta1; + public UInt32 Meta2; + public UInt32 Meta3; + public UInt32 Meta4; +} + +/// +/// Diagnostic message level +/// +public enum MessageLevel +{ + Error, + Warning +} + +/// +/// Holder for compiler diagnostic codes. +/// +public class DiagnosticCode +{ + /// + /// Miscellaenous internal error - should not happen. + /// + public const String InternalError = "E00"; + /// + /// A type ID was declared multiple times in the story definition file. + /// + public const String TypeIdAlreadyDefined = "E01"; + /// + /// A type name (alias) was declared multiple times in the story definition file. + /// + public const String TypeNameAlreadyDefined = "E02"; + /// + /// The type ID is either an intrinsic ID or is outside the allowed range. + /// + public const String TypeIdInvalid = "E03"; + /// + /// The alias type ID doesn't point to a valid intrinsic type ID + /// + public const String IntrinsicTypeIdInvalid = "E04"; + + /// + /// A function with the same signature already exists. + /// + public const String SignatureAlreadyDefined = "E05"; + /// + /// The type of an argument could not be resolved in a builtin function. + /// (This only occurs when parsing story headers, not in goal code) + /// + public const String UnresolvedTypeInSignature = "E06"; + /// + /// A goal with the same name was seen earlier. + /// + public const String GoalAlreadyDefined = "E07"; + /// + /// The parent goal specified in the goal script was not found. + /// + public const String UnresolvedGoal = "E08"; + /// + /// Failed to infer the type of a rule-local variable. + /// + public const String UnresolvedVariableType = "E09"; + /// + /// The function signature (full typed parameter list) of a function + /// could not be determined. This is likely the result of a failed type inference. + /// + public const String UnresolvedSignature = "E10"; + /// + /// The intrinsic type of a function parameter does not match the expected type. + /// + public const String LocalTypeMismatch = "E11"; + /// + /// Value with unknown type encountered during IR generation. + /// + public const String UnresolvedType = "E12"; + /// + /// PROC/QRY declarations must start with a PROC/QRY name as the first condition. + /// + public const String InvalidProcDefinition = "E13"; + /// + /// Fact contains a function that is not callable + /// (the function is not a call, database or proc). + /// + public const String InvalidSymbolInFact = "E14"; + /// + /// Rule action contains a function that is not callable + /// (the function is not a call, database or proc). + /// + public const String InvalidSymbolInStatement = "E15"; + /// + /// "NOT" action contains a non-database function. + /// + public const String CanOnlyDeleteFromDatabase = "E16"; /// - /// Parameter direction. - /// Only relevant for queries, which get both input and output parameters. + /// Initial PROC/QRY/IF function type differs from allowed type. /// - public enum ParamDirection + public const String InvalidSymbolInInitialCondition = "E17"; + /// + /// Condition contains a function that is not a query or database. + /// + public const String InvalidFunctionTypeInCondition = "E18"; + /// + /// Function name could not be resolved. + /// + public const String UnresolvedSymbol = "E19"; + /// + /// Use of less/greater operators on strings or guidstrings. + /// + public const String StringLtGtComparison = "W20"; + /// + /// The alias type of a function parameter does not match the expected type. + /// + public const String GuidAliasMismatch = "E21"; + /// + /// Object name GUID is prefixed with a type that is not known. + /// + public const String GuidPrefixNotKnown = "W22"; + /// + /// PROC_/QRY_ naming style violation. + /// + public const String RuleNamingStyle = "W23"; + /// + /// A rule variable was used in a read context, but was not yet bound. + /// + public const String ParamNotBound = "E24"; + /// + /// The database is likely unused or unpopulated. + /// (Written but not read, or vice versa) + /// + public const String UnusedDatabaseWarning = "W25"; + /// + /// The database is likely unused or unpopulated. + /// (Written but not read, or vice versa) + /// + public const String UnusedDatabaseError = "E25"; + /// + /// Database "DB_" naming convention violation. + /// + public const String DbNamingStyle = "W26"; + /// + /// Object name GUID could not be resolved to a game object. + /// + public const String UnresolvedGameObjectName = "W27"; + /// + /// Type of name GUID differs from type of game object. + /// + public const String GameObjectTypeMismatch = "W28"; + /// + /// Name part of name GUID differs from name of game object. + /// + public const String GameObjectNameMismatch = "W29"; + /// + /// Multiple definitions seen for the same function with different signatures. + /// + public const String ProcTypeMismatch = "E30"; + /// + /// Attempted to cast a type to an unrelated/incompatible type (i.e. STRING to INTEGER) + /// + public const String CastToUnrelatedType = "E31"; + /// + /// Attempted to cast an alias to an unrelated alias (i.e. CHARACTERGUID to ITEMGUID) + /// + public const String CastToUnrelatedGuidAlias = "E32"; + /// + /// Left-hand side and right-hand side variables are the same in a binary operation. + /// This will result in an "invalid compare" error in runtime. + /// + public const String BinaryOperationSameRhsLhs = "E33"; + /// + /// comparison on types that have known bugs or side effects + /// (currently this only triggers on GUIDSTRING - STRING comparison) + /// + public const String RiskyComparison = "E34"; + /// + /// The database is possibly used in an incorrect way. + /// (Deleted and read, but not written) + /// + public const String UnwrittenDatabase = "W35"; +} + +public class Diagnostic +{ + public readonly CodeLocation Location; + public readonly MessageLevel Level; + public readonly String Code; + public readonly String Message; + + public Diagnostic(CodeLocation location, MessageLevel level, String code, String message) { - In, - Out + Location = location; + Level = level; + Code = code; + Message = message; } +} +public class CompilationLog +{ + public List Log = new List(); /// - /// Osiris internal function type. + /// Controls whether specific warnings are enabled or disabled. + /// All are enabled by default. /// - public enum FunctionType - { - // Osiris items - // Query defined by the Osiris runtime - SysQuery, - // Call defined by the Osiris runtime - SysCall, - - // Application defined items - // Event defined by the application (D:OS) - Event, - // Query defined by the application (D:OS) - Query, - // Call defined by the application (D:OS) - Call, - - // User-defined items - // Proc (~call) defined in user code - Proc, - // Query defined in user code - UserQuery, - // Database defined in user code - Database - }; - - /// - /// Function name - /// In Osiris, multiple functions are allowed with the same name, - /// if they have different arity (number of parameters). - /// - public class FunctionNameAndArity : IEquatable + public Dictionary WarningSwitches = new Dictionary(); + + public CompilationLog() { - // Function name - public readonly String Name; - // Number of parameters - public readonly int Arity; + WarningSwitches.Add(DiagnosticCode.RuleNamingStyle, false); + WarningSwitches.Add(DiagnosticCode.UnwrittenDatabase, false); + } - public FunctionNameAndArity(String name, int arity) - { - Name = name; - Arity = arity; - } + public void Warn(CodeLocation location, String code, String message) + { + if (WarningSwitches.TryGetValue(code, out bool enabled) && !enabled) return; - public override bool Equals(object fun) - { - return Equals(fun as FunctionNameAndArity); - } + var diag = new Diagnostic(location, MessageLevel.Warning, code, message); + Log.Add(diag); + } - public bool Equals(FunctionNameAndArity fun) - { - return Name.ToLowerInvariant() == fun.Name.ToLowerInvariant() - && Arity == fun.Arity; - } + public void Warn(CodeLocation location, String code, String format, object arg1) + { + var message = String.Format(format, arg1); + Warn(location, code, message); + } - public override int GetHashCode() - { - return Name.ToLowerInvariant().GetHashCode() | Arity; - } + public void Warn(CodeLocation location, String code, String format, object arg1, object arg2) + { + var message = String.Format(format, arg1, arg2); + Warn(location, code, message); + } - public override string ToString() - { - return Name + "(" + Arity.ToString() + ")"; - } + public void Warn(CodeLocation location, String code, String format, object arg1, object arg2, object arg3) + { + var message = String.Format(format, arg1, arg2, arg3); + Warn(location, code, message); } - /// - /// Function parameter (or database column, depending on the function type) - /// - public class FunctionParam + public void Warn(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4) { - // Parameter direction, i.e. either In or Out. - public ParamDirection Direction; - // Parameter type - // For builtin functions this is always taken from the function header. - // For user defined functions this is inferred from code. - public ValueType Type; - // Parameter name - public String Name; + var message = String.Format(format, arg1, arg2, arg3, arg4); + Warn(location, code, message); } - public class FunctionSignature + public void Warn(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4, object arg5) { - // Type of function (call, query, database, etc.) - public FunctionType Type; - // Function name - public String Name; - // List of arguments - public List Params; - // Indicates that we were able to infer the type of all parameters - public Boolean FullyTyped; - // Indicates that the database is "inserted into" in at least one place - public Boolean Inserted; - // Indicates that the database is "deleted from" in at least one place - public Boolean Deleted; - // Indicates that the function is "read" in at least one place - public Boolean Read; - - public FunctionNameAndArity GetNameAndArity() => new FunctionNameAndArity(Name, Params.Count); + var message = String.Format(format, arg1, arg2, arg3, arg4, arg5); + Warn(location, code, message); } - /// - /// Metadata for built-in functions - /// - public class BuiltinFunction + public void Error(CodeLocation location, String code, String message) { - public FunctionSignature Signature; - // Metadata passed from story headers. - // These aren't used at all during compilation and are only used in the compiled story file. - public UInt32 Meta1; - public UInt32 Meta2; - public UInt32 Meta3; - public UInt32 Meta4; + var diag = new Diagnostic(location, MessageLevel.Error, code, message); + Log.Add(diag); } - /// - /// Diagnostic message level - /// - public enum MessageLevel + public void Error(CodeLocation location, String code, String format, object arg1) { - Error, - Warning + var message = String.Format(format, arg1); + Error(location, code, message); } - /// - /// Holder for compiler diagnostic codes. - /// - public class DiagnosticCode + public void Error(CodeLocation location, String code, String format, object arg1, object arg2) { - /// - /// Miscellaenous internal error - should not happen. - /// - public const String InternalError = "E00"; - /// - /// A type ID was declared multiple times in the story definition file. - /// - public const String TypeIdAlreadyDefined = "E01"; - /// - /// A type name (alias) was declared multiple times in the story definition file. - /// - public const String TypeNameAlreadyDefined = "E02"; - /// - /// The type ID is either an intrinsic ID or is outside the allowed range. - /// - public const String TypeIdInvalid = "E03"; - /// - /// The alias type ID doesn't point to a valid intrinsic type ID - /// - public const String IntrinsicTypeIdInvalid = "E04"; - - /// - /// A function with the same signature already exists. - /// - public const String SignatureAlreadyDefined = "E05"; - /// - /// The type of an argument could not be resolved in a builtin function. - /// (This only occurs when parsing story headers, not in goal code) - /// - public const String UnresolvedTypeInSignature = "E06"; - /// - /// A goal with the same name was seen earlier. - /// - public const String GoalAlreadyDefined = "E07"; - /// - /// The parent goal specified in the goal script was not found. - /// - public const String UnresolvedGoal = "E08"; - /// - /// Failed to infer the type of a rule-local variable. - /// - public const String UnresolvedVariableType = "E09"; - /// - /// The function signature (full typed parameter list) of a function - /// could not be determined. This is likely the result of a failed type inference. - /// - public const String UnresolvedSignature = "E10"; - /// - /// The intrinsic type of a function parameter does not match the expected type. - /// - public const String LocalTypeMismatch = "E11"; - /// - /// Value with unknown type encountered during IR generation. - /// - public const String UnresolvedType = "E12"; - /// - /// PROC/QRY declarations must start with a PROC/QRY name as the first condition. - /// - public const String InvalidProcDefinition = "E13"; - /// - /// Fact contains a function that is not callable - /// (the function is not a call, database or proc). - /// - public const String InvalidSymbolInFact = "E14"; - /// - /// Rule action contains a function that is not callable - /// (the function is not a call, database or proc). - /// - public const String InvalidSymbolInStatement = "E15"; - /// - /// "NOT" action contains a non-database function. - /// - public const String CanOnlyDeleteFromDatabase = "E16"; - /// - /// Initial PROC/QRY/IF function type differs from allowed type. - /// - public const String InvalidSymbolInInitialCondition = "E17"; - /// - /// Condition contains a function that is not a query or database. - /// - public const String InvalidFunctionTypeInCondition = "E18"; - /// - /// Function name could not be resolved. - /// - public const String UnresolvedSymbol = "E19"; - /// - /// Use of less/greater operators on strings or guidstrings. - /// - public const String StringLtGtComparison = "W20"; - /// - /// The alias type of a function parameter does not match the expected type. - /// - public const String GuidAliasMismatch = "E21"; - /// - /// Object name GUID is prefixed with a type that is not known. - /// - public const String GuidPrefixNotKnown = "W22"; - /// - /// PROC_/QRY_ naming style violation. - /// - public const String RuleNamingStyle = "W23"; - /// - /// A rule variable was used in a read context, but was not yet bound. - /// - public const String ParamNotBound = "E24"; - /// - /// The database is likely unused or unpopulated. - /// (Written but not read, or vice versa) - /// - public const String UnusedDatabaseWarning = "W25"; - /// - /// The database is likely unused or unpopulated. - /// (Written but not read, or vice versa) - /// - public const String UnusedDatabaseError = "E25"; - /// - /// Database "DB_" naming convention violation. - /// - public const String DbNamingStyle = "W26"; - /// - /// Object name GUID could not be resolved to a game object. - /// - public const String UnresolvedGameObjectName = "W27"; - /// - /// Type of name GUID differs from type of game object. - /// - public const String GameObjectTypeMismatch = "W28"; - /// - /// Name part of name GUID differs from name of game object. - /// - public const String GameObjectNameMismatch = "W29"; - /// - /// Multiple definitions seen for the same function with different signatures. - /// - public const String ProcTypeMismatch = "E30"; - /// - /// Attempted to cast a type to an unrelated/incompatible type (i.e. STRING to INTEGER) - /// - public const String CastToUnrelatedType = "E31"; - /// - /// Attempted to cast an alias to an unrelated alias (i.e. CHARACTERGUID to ITEMGUID) - /// - public const String CastToUnrelatedGuidAlias = "E32"; - /// - /// Left-hand side and right-hand side variables are the same in a binary operation. - /// This will result in an "invalid compare" error in runtime. - /// - public const String BinaryOperationSameRhsLhs = "E33"; - /// - /// comparison on types that have known bugs or side effects - /// (currently this only triggers on GUIDSTRING - STRING comparison) - /// - public const String RiskyComparison = "E34"; - /// - /// The database is possibly used in an incorrect way. - /// (Deleted and read, but not written) - /// - public const String UnwrittenDatabase = "W35"; + var message = String.Format(format, arg1, arg2); + Error(location, code, message); } - public class Diagnostic + public void Error(CodeLocation location, String code, String format, object arg1, object arg2, object arg3) { - public readonly CodeLocation Location; - public readonly MessageLevel Level; - public readonly String Code; - public readonly String Message; + var message = String.Format(format, arg1, arg2, arg3); + Error(location, code, message); + } - public Diagnostic(CodeLocation location, MessageLevel level, String code, String message) - { - Location = location; - Level = level; - Code = code; - Message = message; - } + public void Error(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4) + { + var message = String.Format(format, arg1, arg2, arg3, arg4); + Error(location, code, message); } - public class CompilationLog + public void Error(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4, object arg5) { - public List Log = new List(); - /// - /// Controls whether specific warnings are enabled or disabled. - /// All are enabled by default. - /// - public Dictionary WarningSwitches = new Dictionary(); - - public CompilationLog() - { - WarningSwitches.Add(DiagnosticCode.RuleNamingStyle, false); - WarningSwitches.Add(DiagnosticCode.UnwrittenDatabase, false); - } + var message = String.Format(format, arg1, arg2, arg3, arg4, arg5); + Error(location, code, message); + } +} - public void Warn(CodeLocation location, String code, String message) - { - if (WarningSwitches.TryGetValue(code, out bool enabled) && !enabled) return; +public class GameObjectInfo +{ + public String Name; + public ValueType Type; +} - var diag = new Diagnostic(location, MessageLevel.Warning, code, message); - Log.Add(diag); - } +/// +/// Compilation context that holds input and intermediate data used during the compilation process. +/// +public class CompilationContext +{ + public const uint MaxIntrinsicTypeId = 5; - public void Warn(CodeLocation location, String code, String format, object arg1) - { - var message = String.Format(format, arg1); - Warn(location, code, message); - } + public Dictionary TypesById = new Dictionary(); + public Dictionary TypesByName = new Dictionary(); + public Dictionary GoalsByName = new Dictionary(); + public Dictionary Signatures = new Dictionary(); + public Dictionary Functions = new Dictionary(); + public Dictionary GameObjects = new Dictionary(); + public CompilationLog Log = new CompilationLog(); - public void Warn(CodeLocation location, String code, String format, object arg1, object arg2) + public CompilationContext() + { + RegisterIntrinsicTypes(); + } + + /// + /// Registers all Osiris builtin types that are not declared in the story header separately + /// + private void RegisterIntrinsicTypes() + { + var tUnknown = new ValueType { - var message = String.Format(format, arg1, arg2); - Warn(location, code, message); - } + Name = "NONE", + TypeId = 0, + IntrinsicTypeId = Value.Type.None + }; + AddType(tUnknown); - public void Warn(CodeLocation location, String code, String format, object arg1, object arg2, object arg3) + var tInteger = new ValueType { - var message = String.Format(format, arg1, arg2, arg3); - Warn(location, code, message); - } + Name = "INTEGER", + TypeId = 1, + IntrinsicTypeId = Value.Type.Integer + }; + AddType(tInteger); - public void Warn(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4) + var tInteger64 = new ValueType { - var message = String.Format(format, arg1, arg2, arg3, arg4); - Warn(location, code, message); - } + Name = "INTEGER64", + TypeId = 2, + IntrinsicTypeId = Value.Type.Integer64 + }; + AddType(tInteger64); - public void Warn(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4, object arg5) + var tReal = new ValueType { - var message = String.Format(format, arg1, arg2, arg3, arg4, arg5); - Warn(location, code, message); - } + Name = "REAL", + TypeId = 3, + IntrinsicTypeId = Value.Type.Float + }; + AddType(tReal); - public void Error(CodeLocation location, String code, String message) + var tString = new ValueType { - var diag = new Diagnostic(location, MessageLevel.Error, code, message); - Log.Add(diag); - } + Name = "STRING", + TypeId = 4, + IntrinsicTypeId = Value.Type.String + }; + AddType(tString); - public void Error(CodeLocation location, String code, String format, object arg1) + var tGuidString = new ValueType { - var message = String.Format(format, arg1); - Error(location, code, message); - } + Name = "GUIDSTRING", + TypeId = 5, + IntrinsicTypeId = Value.Type.GuidString + }; + AddType(tGuidString); + } + + private void AddType(ValueType type) + { + TypesById.Add(type.TypeId, type); + TypesByName.Add(type.Name, type); + } - public void Error(CodeLocation location, String code, String format, object arg1, object arg2) + public bool RegisterType(ValueType type) + { + if (TypesById.ContainsKey(type.TypeId)) { - var message = String.Format(format, arg1, arg2); - Error(location, code, message); + Log.Error(null, DiagnosticCode.TypeIdAlreadyDefined, "Type ID already in use"); + return false; } - public void Error(CodeLocation location, String code, String format, object arg1, object arg2, object arg3) + if (TypesByName.ContainsKey(type.Name)) { - var message = String.Format(format, arg1, arg2, arg3); - Error(location, code, message); + Log.Error(null, DiagnosticCode.TypeNameAlreadyDefined, "Type name already in use"); + return false; } - public void Error(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4) + if (type.TypeId < MaxIntrinsicTypeId || type.TypeId > 255) { - var message = String.Format(format, arg1, arg2, arg3, arg4); - Error(location, code, message); + Log.Error(null, DiagnosticCode.TypeIdInvalid, "Type ID must be in the range 5..255"); + return false; } - public void Error(CodeLocation location, String code, String format, object arg1, object arg2, object arg3, object arg4, object arg5) + if (type.IntrinsicTypeId <= 0 || (uint)type.IntrinsicTypeId > MaxIntrinsicTypeId) { - var message = String.Format(format, arg1, arg2, arg3, arg4, arg5); - Error(location, code, message); + Log.Error(null, DiagnosticCode.TypeIdInvalid, "Alias type ID must refer to an intrinsic type"); + return false; } - } - public class GameObjectInfo - { - public String Name; - public ValueType Type; + AddType(type); + return true; } - /// - /// Compilation context that holds input and intermediate data used during the compilation process. - /// - public class CompilationContext + public bool RegisterFunction(FunctionSignature signature, object func) { - public const uint MaxIntrinsicTypeId = 5; - - public Dictionary TypesById = new Dictionary(); - public Dictionary TypesByName = new Dictionary(); - public Dictionary GoalsByName = new Dictionary(); - public Dictionary Signatures = new Dictionary(); - public Dictionary Functions = new Dictionary(); - public Dictionary GameObjects = new Dictionary(); - public CompilationLog Log = new CompilationLog(); - - public CompilationContext() + var nameAndArity = signature.GetNameAndArity(); + if (Signatures.ContainsKey(nameAndArity)) { - RegisterIntrinsicTypes(); + Log.Error(null, DiagnosticCode.SignatureAlreadyDefined, + String.Format("Signature already registered: {0}({1})", nameAndArity.Name, nameAndArity.Arity)); + return false; } - /// - /// Registers all Osiris builtin types that are not declared in the story header separately - /// - private void RegisterIntrinsicTypes() + Signatures.Add(nameAndArity, signature); + Functions.Add(nameAndArity, func); + return true; + } + + public bool RegisterGoal(IRGoal goal) + { + if (GoalsByName.ContainsKey(goal.Name)) { - var tUnknown = new ValueType - { - Name = "NONE", - TypeId = 0, - IntrinsicTypeId = Value.Type.None - }; - AddType(tUnknown); - - var tInteger = new ValueType - { - Name = "INTEGER", - TypeId = 1, - IntrinsicTypeId = Value.Type.Integer - }; - AddType(tInteger); - - var tInteger64 = new ValueType - { - Name = "INTEGER64", - TypeId = 2, - IntrinsicTypeId = Value.Type.Integer64 - }; - AddType(tInteger64); - - var tReal = new ValueType - { - Name = "REAL", - TypeId = 3, - IntrinsicTypeId = Value.Type.Float - }; - AddType(tReal); - - var tString = new ValueType - { - Name = "STRING", - TypeId = 4, - IntrinsicTypeId = Value.Type.String - }; - AddType(tString); - - var tGuidString = new ValueType - { - Name = "GUIDSTRING", - TypeId = 5, - IntrinsicTypeId = Value.Type.GuidString - }; - AddType(tGuidString); + Log.Error(null, DiagnosticCode.GoalAlreadyDefined, + String.Format("Goal already registered: {0}", goal.Name)); + return false; } - private void AddType(ValueType type) + GoalsByName.Add(goal.Name, goal); + return true; + } + + public ValueType LookupType(String typeName) + { + if (TypesByName.TryGetValue(typeName, out ValueType type)) { - TypesById.Add(type.TypeId, type); - TypesByName.Add(type.Name, type); + return type; } - - public bool RegisterType(ValueType type) + else { - if (TypesById.ContainsKey(type.TypeId)) - { - Log.Error(null, DiagnosticCode.TypeIdAlreadyDefined, "Type ID already in use"); - return false; - } - - if (TypesByName.ContainsKey(type.Name)) - { - Log.Error(null, DiagnosticCode.TypeNameAlreadyDefined, "Type name already in use"); - return false; - } - - if (type.TypeId < MaxIntrinsicTypeId || type.TypeId > 255) - { - Log.Error(null, DiagnosticCode.TypeIdInvalid, "Type ID must be in the range 5..255"); - return false; - } - - if (type.IntrinsicTypeId <= 0 || (uint)type.IntrinsicTypeId > MaxIntrinsicTypeId) - { - Log.Error(null, DiagnosticCode.TypeIdInvalid, "Alias type ID must refer to an intrinsic type"); - return false; - } - - AddType(type); - return true; + return null; } + } - public bool RegisterFunction(FunctionSignature signature, object func) + public FunctionSignature LookupSignature(FunctionNameAndArity name) + { + if (Signatures.TryGetValue(name, out FunctionSignature signature)) { - var nameAndArity = signature.GetNameAndArity(); - if (Signatures.ContainsKey(nameAndArity)) - { - Log.Error(null, DiagnosticCode.SignatureAlreadyDefined, - String.Format("Signature already registered: {0}({1})", nameAndArity.Name, nameAndArity.Arity)); - return false; - } - - Signatures.Add(nameAndArity, signature); - Functions.Add(nameAndArity, func); - return true; + return signature; } - - public bool RegisterGoal(IRGoal goal) + else { - if (GoalsByName.ContainsKey(goal.Name)) - { - Log.Error(null, DiagnosticCode.GoalAlreadyDefined, - String.Format("Goal already registered: {0}", goal.Name)); - return false; - } - - GoalsByName.Add(goal.Name, goal); - return true; + return null; } + } - public ValueType LookupType(String typeName) + public object LookupName(FunctionNameAndArity name) + { + if (Functions.TryGetValue(name, out object function)) { - if (TypesByName.TryGetValue(typeName, out ValueType type)) - { - return type; - } - else - { - return null; - } + return function; } - - public FunctionSignature LookupSignature(FunctionNameAndArity name) + else { - if (Signatures.TryGetValue(name, out FunctionSignature signature)) - { - return signature; - } - else - { - return null; - } + return null; } + } - public object LookupName(FunctionNameAndArity name) + public IRGoal LookupGoal(String name) + { + if (GoalsByName.TryGetValue(name, out IRGoal goal)) { - if (Functions.TryGetValue(name, out object function)) - { - return function; - } - else - { - return null; - } + return goal; } - - public IRGoal LookupGoal(String name) + else { - if (GoalsByName.TryGetValue(name, out IRGoal goal)) - { - return goal; - } - else - { - return null; - } + return null; } } } diff --git a/LSLib/LS/Story/Compiler/Compiler.cs b/LSLib/LS/Story/Compiler/Compiler.cs index bb4779db..109694a2 100644 --- a/LSLib/LS/Story/Compiler/Compiler.cs +++ b/LSLib/LS/Story/Compiler/Compiler.cs @@ -3,1255 +3,1254 @@ using System.Diagnostics; using System.Linq; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +public class Compiler { - public class Compiler + public CompilationContext Context = new CompilationContext(); + public HashSet IgnoreUnusedDatabases = new HashSet(); + public TargetGame Game = TargetGame.DOS2; + public bool AllowTypeCoercion = false; + public HashSet TypeCoercionWhitelist; + + private string TypeToName(uint typeId) + { + var type = Context.TypesById[typeId]; + return type.Name; + } + + private string TypeToName(Value.Type typeId) { - public CompilationContext Context = new CompilationContext(); - public HashSet IgnoreUnusedDatabases = new HashSet(); - public TargetGame Game = TargetGame.DOS2; - public bool AllowTypeCoercion = false; - public HashSet TypeCoercionWhitelist; + return TypeToName((uint)typeId); + } - private string TypeToName(uint typeId) + private void VerifyParamCompatibility(FunctionSignature func, int paramIndex, FunctionParam param, IRValue value) + { + if (param.Type.IntrinsicTypeId != value.Type.IntrinsicTypeId) { - var type = Context.TypesById[typeId]; - return type.Name; + // BG3 allows promoting integer constants to float + if (Game == TargetGame.BG3 && value is IRConstant + && (param.Type.IntrinsicTypeId == Value.Type.Float || param.Type.IntrinsicTypeId == Value.Type.Integer64) + && value.Type.IntrinsicTypeId == Value.Type.Integer) + { + return; + } + + object paramName = (param.Name != null) ? (object)param.Name : paramIndex; + Context.Log.Error(value.Location, + DiagnosticCode.LocalTypeMismatch, + "Parameter {0} of {1} \"{2}\" expects {3}; {4} specified", + paramName, func.Type, func.Name, param.Type.Name, value.Type.Name); + return; } - private string TypeToName(Value.Type typeId) + if (IsGuidAliasToAliasCast(param.Type, value.Type)) { - return TypeToName((uint)typeId); + object paramName = (param.Name != null) ? (object)param.Name : paramIndex; + Context.Log.Error(value.Location, + DiagnosticCode.GuidAliasMismatch, + "Parameter {0} of {1} \"{2}\" has GUID type {3}; {4} specified", + paramName, func.Type, func.Name, param.Type.Name, value.Type.Name); + return; } + } - private void VerifyParamCompatibility(FunctionSignature func, int paramIndex, FunctionParam param, IRValue value) + private void VerifyIRFact(IRFact fact) + { + if (fact.Database == null) { - if (param.Type.IntrinsicTypeId != value.Type.IntrinsicTypeId) - { - // BG3 allows promoting integer constants to float - if (Game == TargetGame.BG3 && value is IRConstant - && (param.Type.IntrinsicTypeId == Value.Type.Float || param.Type.IntrinsicTypeId == Value.Type.Integer64) - && value.Type.IntrinsicTypeId == Value.Type.Integer) - { - return; - } - - object paramName = (param.Name != null) ? (object)param.Name : paramIndex; - Context.Log.Error(value.Location, - DiagnosticCode.LocalTypeMismatch, - "Parameter {0} of {1} \"{2}\" expects {3}; {4} specified", - paramName, func.Type, func.Name, param.Type.Name, value.Type.Name); - return; - } + return; + } - if (IsGuidAliasToAliasCast(param.Type, value.Type)) - { - object paramName = (param.Name != null) ? (object)param.Name : paramIndex; - Context.Log.Error(value.Location, - DiagnosticCode.GuidAliasMismatch, - "Parameter {0} of {1} \"{2}\" has GUID type {3}; {4} specified", - paramName, func.Type, func.Name, param.Type.Name, value.Type.Name); - return; - } + var db = Context.LookupSignature(fact.Database.Name); + if (db == null) + { + Context.Log.Error(fact.Location, + DiagnosticCode.UnresolvedSymbol, + "Database \"{0}\" could not be resolved", + fact.Database.Name); + return; } - private void VerifyIRFact(IRFact fact) + if (db.Type != FunctionType.Database + && db.Type != FunctionType.Call + && db.Type != FunctionType.SysCall + && db.Type != FunctionType.Proc) { - if (fact.Database == null) - { - return; - } + Context.Log.Error(fact.Location, + DiagnosticCode.InvalidSymbolInFact, + "Init/Exit actions can only reference databases, calls and PROCs; \"{0}\" is a {1}", + fact.Database.Name, db.Type); + return; + } - var db = Context.LookupSignature(fact.Database.Name); - if (db == null) - { - Context.Log.Error(fact.Location, - DiagnosticCode.UnresolvedSymbol, - "Database \"{0}\" could not be resolved", - fact.Database.Name); - return; - } + if (fact.Not) + { + db.Deleted = true; + } + else + { + db.Inserted = true; + } - if (db.Type != FunctionType.Database - && db.Type != FunctionType.Call - && db.Type != FunctionType.SysCall - && db.Type != FunctionType.Proc) - { - Context.Log.Error(fact.Location, - DiagnosticCode.InvalidSymbolInFact, - "Init/Exit actions can only reference databases, calls and PROCs; \"{0}\" is a {1}", - fact.Database.Name, db.Type); - return; - } + int index = 0; + foreach (var param in db.Params) + { + var ele = fact.Elements[index]; + index++; - if (fact.Not) - { - db.Deleted = true; - } - else + if (ele.Type == null) { - db.Inserted = true; + Context.Log.Error(ele.Location, + DiagnosticCode.InternalError, + "No type information available for fact argument"); + continue; } - int index = 0; - foreach (var param in db.Params) - { - var ele = fact.Elements[index]; - index++; + VerifyParamCompatibility(db, index, param, ele); + } + } - if (ele.Type == null) - { - Context.Log.Error(ele.Location, - DiagnosticCode.InternalError, - "No type information available for fact argument"); - continue; - } + private void VerifyIRStatement(IRRule rule, IRStatement statement) + { + if (statement.Func == null) return; - VerifyParamCompatibility(db, index, param, ele); - } + var func = Context.LookupSignature(statement.Func.Name); + if (func == null) + { + Context.Log.Error(statement.Location, + DiagnosticCode.UnresolvedSymbol, + "Symbol \"{0}\" could not be resolved", + statement.Func.Name); + return; } - private void VerifyIRStatement(IRRule rule, IRStatement statement) + if (!func.FullyTyped) { - if (statement.Func == null) return; + Context.Log.Error(statement.Location, + DiagnosticCode.UnresolvedSignature, + "Signature of \"{0}\" could not be determined", + statement.Func.Name); + return; + } - var func = Context.LookupSignature(statement.Func.Name); - if (func == null) - { - Context.Log.Error(statement.Location, - DiagnosticCode.UnresolvedSymbol, - "Symbol \"{0}\" could not be resolved", - statement.Func.Name); - return; - } + if (func.Type != FunctionType.Database + && func.Type != FunctionType.Call + && func.Type != FunctionType.SysCall + && func.Type != FunctionType.Proc) + { + Context.Log.Error(statement.Location, + DiagnosticCode.InvalidSymbolInStatement, + "KB rule actions can only reference databases, calls and PROCs; \"{0}\" is a {1}", + statement.Func.Name, func.Type); + return; + } - if (!func.FullyTyped) - { - Context.Log.Error(statement.Location, - DiagnosticCode.UnresolvedSignature, - "Signature of \"{0}\" could not be determined", - statement.Func.Name); - return; - } + if (statement.Not + && func.Type != FunctionType.Database) + { + Context.Log.Error(statement.Location, + DiagnosticCode.CanOnlyDeleteFromDatabase, + "KB rule NOT actions can only reference databases; \"{0}\" is a {1}", + statement.Func.Name, func.Type); + return; + } - if (func.Type != FunctionType.Database - && func.Type != FunctionType.Call - && func.Type != FunctionType.SysCall - && func.Type != FunctionType.Proc) - { - Context.Log.Error(statement.Location, - DiagnosticCode.InvalidSymbolInStatement, - "KB rule actions can only reference databases, calls and PROCs; \"{0}\" is a {1}", - statement.Func.Name, func.Type); - return; - } + if (statement.Not) + { + func.Deleted = true; + } + else + { + func.Inserted = true; + } - if (statement.Not - && func.Type != FunctionType.Database) - { - Context.Log.Error(statement.Location, - DiagnosticCode.CanOnlyDeleteFromDatabase, - "KB rule NOT actions can only reference databases; \"{0}\" is a {1}", - statement.Func.Name, func.Type); - return; - } + int index = 0; + foreach (var param in func.Params) + { + var ele = statement.Params[index]; - if (statement.Not) + ValueType type = ele.Type; + if (type == null) { - func.Deleted = true; - } - else - { - func.Inserted = true; + Context.Log.Error(ele.Location, + DiagnosticCode.InternalError, + "No type information available for statement argument"); + continue; } + + VerifyIRValue(rule, ele, func); + VerifyIRValueCall(rule, ele, func, index, -1, statement.Not); + VerifyParamCompatibility(func, index, param, ele); - int index = 0; - foreach (var param in func.Params) - { - var ele = statement.Params[index]; + index++; + } + } - ValueType type = ele.Type; - if (type == null) - { - Context.Log.Error(ele.Location, - DiagnosticCode.InternalError, - "No type information available for statement argument"); - continue; - } - - VerifyIRValue(rule, ele, func); - VerifyIRValueCall(rule, ele, func, index, -1, statement.Not); - VerifyParamCompatibility(func, index, param, ele); + private void VerifyIRVariable(IRRule rule, IRVariable variable, FunctionSignature func) + { + var ruleVar = rule.Variables[variable.Index]; + if (variable.Type == null) + { + Context.Log.Error(variable.Location, + DiagnosticCode.UnresolvedType, + "Type of variable {0} could not be determined", + ruleVar.Name); + return; + } - index++; - } + if (ruleVar.Type == null) + { + Context.Log.Error(variable.Location, + DiagnosticCode.UnresolvedType, + "Type of rule variable {0} could not be determined", + ruleVar.Name); + return; } - private void VerifyIRVariable(IRRule rule, IRVariable variable, FunctionSignature func) + if ((func == null || TypeCoercionWhitelist == null || !TypeCoercionWhitelist.Contains(func.GetNameAndArity().ToString())) + && !AllowTypeCoercion) { - var ruleVar = rule.Variables[variable.Index]; - if (variable.Type == null) + if (!AreIntrinsicTypesCompatible(ruleVar.Type.IntrinsicTypeId, variable.Type.IntrinsicTypeId)) { Context.Log.Error(variable.Location, - DiagnosticCode.UnresolvedType, - "Type of variable {0} could not be determined", - ruleVar.Name); + DiagnosticCode.CastToUnrelatedType, + "Cannot cast {1} variable {0} to unrelated type {2}", + ruleVar.Name, ruleVar.Type.Name, variable.Type.Name); return; } - if (ruleVar.Type == null) + if (IsRiskyComparison(ruleVar.Type.IntrinsicTypeId, variable.Type.IntrinsicTypeId)) { Context.Log.Error(variable.Location, - DiagnosticCode.UnresolvedType, - "Type of rule variable {0} could not be determined", - ruleVar.Name); + DiagnosticCode.RiskyComparison, + "Coercion of {1} variable {0} to {2} may trigger incorrect behavior", + ruleVar.Name, ruleVar.Type.Name, variable.Type.Name); return; } - if ((func == null || TypeCoercionWhitelist == null || !TypeCoercionWhitelist.Contains(func.GetNameAndArity().ToString())) - && !AllowTypeCoercion) + if (IsGuidAliasToAliasCast(ruleVar.Type, variable.Type)) { - if (!AreIntrinsicTypesCompatible(ruleVar.Type.IntrinsicTypeId, variable.Type.IntrinsicTypeId)) - { - Context.Log.Error(variable.Location, - DiagnosticCode.CastToUnrelatedType, - "Cannot cast {1} variable {0} to unrelated type {2}", - ruleVar.Name, ruleVar.Type.Name, variable.Type.Name); - return; - } - - if (IsRiskyComparison(ruleVar.Type.IntrinsicTypeId, variable.Type.IntrinsicTypeId)) - { - Context.Log.Error(variable.Location, - DiagnosticCode.RiskyComparison, - "Coercion of {1} variable {0} to {2} may trigger incorrect behavior", - ruleVar.Name, ruleVar.Type.Name, variable.Type.Name); - return; - } - - if (IsGuidAliasToAliasCast(ruleVar.Type, variable.Type)) - { - Context.Log.Error(variable.Location, - DiagnosticCode.CastToUnrelatedGuidAlias, - "{1} variable {0} converted to unrelated type {2}", - ruleVar.Name, ruleVar.Type.Name, variable.Type.Name); - } + Context.Log.Error(variable.Location, + DiagnosticCode.CastToUnrelatedGuidAlias, + "{1} variable {0} converted to unrelated type {2}", + ruleVar.Name, ruleVar.Type.Name, variable.Type.Name); } } + } - private void VerifyIRConstant(IRConstant constant) + private void VerifyIRConstant(IRConstant constant) + { + if (constant.Type.IntrinsicTypeId == Value.Type.GuidString) { - if (constant.Type.IntrinsicTypeId == Value.Type.GuidString) - { - var nameWithoutType = constant.StringValue; - ValueType type = null; + var nameWithoutType = constant.StringValue; + ValueType type = null; - // Check if the value is prefixed by any of the known GUID subtypes. - // If a match is found, verify that the type of the constant matched the GUID subtype. - var underscore = constant.StringValue.IndexOf('_'); - if (underscore != -1) + // Check if the value is prefixed by any of the known GUID subtypes. + // If a match is found, verify that the type of the constant matched the GUID subtype. + var underscore = constant.StringValue.IndexOf('_'); + if (underscore != -1) + { + var prefix = constant.StringValue.Substring(0, underscore); + type = Context.LookupType(prefix); + if (type != null) { - var prefix = constant.StringValue.Substring(0, underscore); - type = Context.LookupType(prefix); - if (type != null) - { - nameWithoutType = constant.StringValue.Substring(underscore + 1); - if (constant.Type.TypeId > CompilationContext.MaxIntrinsicTypeId - && type.TypeId != constant.Type.TypeId) - { - Context.Log.Error(constant.Location, - DiagnosticCode.GuidAliasMismatch, - "GUID constant \"{0}\" has inferred type {1}", - constant.StringValue, constant.Type.Name); - } - } - else if (prefix.Contains("GUID") && Game != TargetGame.BG3) + nameWithoutType = constant.StringValue.Substring(underscore + 1); + if (constant.Type.TypeId > CompilationContext.MaxIntrinsicTypeId + && type.TypeId != constant.Type.TypeId) { - Context.Log.Warn(constant.Location, - DiagnosticCode.GuidPrefixNotKnown, - "GUID constant \"{0}\" is prefixed with unknown type {1}", - constant.StringValue, prefix); + Context.Log.Error(constant.Location, + DiagnosticCode.GuidAliasMismatch, + "GUID constant \"{0}\" has inferred type {1}", + constant.StringValue, constant.Type.Name); } } - - var guid = constant.StringValue.Substring(constant.StringValue.Length - 36); - if (!Context.GameObjects.TryGetValue(guid, out GameObjectInfo objectInfo)) + else if (prefix.Contains("GUID") && Game != TargetGame.BG3) { - Context.Log.Warn(constant.Location, - DiagnosticCode.UnresolvedGameObjectName, - "Object \"{0}\" could not be resolved", - constant.StringValue); - } - else - { - if (objectInfo.Name != nameWithoutType) - { - Context.Log.Warn(constant.Location, - DiagnosticCode.GameObjectNameMismatch, - "Constant \"{0}\" references game object with different name (\"{1}\")", - nameWithoutType, objectInfo.Name); - } - - if (constant.Type.TypeId != (uint)Value.Type.GuidString - && objectInfo.Type.TypeId != (uint)Value.Type.GuidString - && constant.Type.TypeId != objectInfo.Type.TypeId) - { - Context.Log.Warn(constant.Location, - DiagnosticCode.GameObjectTypeMismatch, - "Constant \"{0}\" of type {1} references game object of type {2}", - constant.StringValue, constant.Type.Name, objectInfo.Type.Name); - } + Context.Log.Warn(constant.Location, + DiagnosticCode.GuidPrefixNotKnown, + "GUID constant \"{0}\" is prefixed with unknown type {1}", + constant.StringValue, prefix); } } - } - private void VerifyIRValue(IRRule rule, IRValue value, FunctionSignature func) - { - if (value is IRConstant) + var guid = constant.StringValue.Substring(constant.StringValue.Length - 36); + if (!Context.GameObjects.TryGetValue(guid, out GameObjectInfo objectInfo)) { - VerifyIRConstant(value as IRConstant); + Context.Log.Warn(constant.Location, + DiagnosticCode.UnresolvedGameObjectName, + "Object \"{0}\" could not be resolved", + constant.StringValue); } else { - VerifyIRVariable(rule, value as IRVariable, func); + if (objectInfo.Name != nameWithoutType) + { + Context.Log.Warn(constant.Location, + DiagnosticCode.GameObjectNameMismatch, + "Constant \"{0}\" references game object with different name (\"{1}\")", + nameWithoutType, objectInfo.Name); + } + + if (constant.Type.TypeId != (uint)Value.Type.GuidString + && objectInfo.Type.TypeId != (uint)Value.Type.GuidString + && constant.Type.TypeId != objectInfo.Type.TypeId) + { + Context.Log.Warn(constant.Location, + DiagnosticCode.GameObjectTypeMismatch, + "Constant \"{0}\" of type {1} references game object of type {2}", + constant.StringValue, constant.Type.Name, objectInfo.Type.Name); + } } } + } - private void VerifyIRVariableCall(IRRule rule, IRVariable variable, FunctionSignature signature, Int32 parameterIndex, - Int32 conditionIndex, bool not) + private void VerifyIRValue(IRRule rule, IRValue value, FunctionSignature func) + { + if (value is IRConstant) { - var ruleVar = rule.Variables[variable.Index]; - var param = signature.Params[parameterIndex]; + VerifyIRConstant(value as IRConstant); + } + else + { + VerifyIRVariable(rule, value as IRVariable, func); + } + } + + private void VerifyIRVariableCall(IRRule rule, IRVariable variable, FunctionSignature signature, Int32 parameterIndex, + Int32 conditionIndex, bool not) + { + var ruleVar = rule.Variables[variable.Index]; + var param = signature.Params[parameterIndex]; - if (param.Direction == ParamDirection.Out && !not) + if (param.Direction == ParamDirection.Out && !not) + { + Debug.Assert(conditionIndex != -1); + if (ruleVar.FirstBindingIndex == -1) { - Debug.Assert(conditionIndex != -1); - if (ruleVar.FirstBindingIndex == -1) - { - ruleVar.FirstBindingIndex = conditionIndex; - } + ruleVar.FirstBindingIndex = conditionIndex; } - else if ( - // We're in the THEN section of a rule, so we cannot bind here - conditionIndex == -1 - // NOT conditions never bind, but they allow unbound unused variables - || (!ruleVar.IsUnused() && not) - || ( - // Databases and events always bind - signature.Type != FunctionType.Database - && signature.Type != FunctionType.Event - // PROC/QRYs bind if they're the first condition in a rule - && !(rule.Type == RuleType.Proc && conditionIndex == 0 && signature.Type == FunctionType.Proc) - && !(rule.Type == RuleType.Query && conditionIndex == 0 && signature.Type == FunctionType.UserQuery) - && param.Direction != ParamDirection.Out - ) + } + else if ( + // We're in the THEN section of a rule, so we cannot bind here + conditionIndex == -1 + // NOT conditions never bind, but they allow unbound unused variables + || (!ruleVar.IsUnused() && not) + || ( + // Databases and events always bind + signature.Type != FunctionType.Database + && signature.Type != FunctionType.Event + // PROC/QRYs bind if they're the first condition in a rule + && !(rule.Type == RuleType.Proc && conditionIndex == 0 && signature.Type == FunctionType.Proc) + && !(rule.Type == RuleType.Query && conditionIndex == 0 && signature.Type == FunctionType.UserQuery) + && param.Direction != ParamDirection.Out + ) + ) { + + if ( + // The variable was never bound + ruleVar.FirstBindingIndex == -1 + // The variable was bound after this node (so it is still unbound here) + || (conditionIndex != -1 && ruleVar.FirstBindingIndex >= conditionIndex) ) { - - if ( - // The variable was never bound - ruleVar.FirstBindingIndex == -1 - // The variable was bound after this node (so it is still unbound here) - || (conditionIndex != -1 && ruleVar.FirstBindingIndex >= conditionIndex) - ) { - object paramName = (param.Name != null) ? (object)param.Name : (parameterIndex + 1); - if (!ruleVar.IsUnused()) - { - Context.Log.Error(variable.Location, - DiagnosticCode.ParamNotBound, - "Variable {0} is not bound here (when used as parameter {1} of {2} \"{3}\")", - ruleVar.Name, paramName, signature.Type, signature.GetNameAndArity()); - } - else - { - Context.Log.Error(variable.Location, - DiagnosticCode.ParamNotBound, - "Parameter {0} of {1} \"{2}\" requires a variable or constant, not a placeholder", - paramName, signature.Type, signature.GetNameAndArity()); - } + object paramName = (param.Name != null) ? (object)param.Name : (parameterIndex + 1); + if (!ruleVar.IsUnused()) + { + Context.Log.Error(variable.Location, + DiagnosticCode.ParamNotBound, + "Variable {0} is not bound here (when used as parameter {1} of {2} \"{3}\")", + ruleVar.Name, paramName, signature.Type, signature.GetNameAndArity()); } - } - else - { - if (conditionIndex != -1 && ruleVar.FirstBindingIndex == -1 && !not) + else { - ruleVar.FirstBindingIndex = conditionIndex; + Context.Log.Error(variable.Location, + DiagnosticCode.ParamNotBound, + "Parameter {0} of {1} \"{2}\" requires a variable or constant, not a placeholder", + paramName, signature.Type, signature.GetNameAndArity()); } } } - - private void VerifyIRValueCall(IRRule rule, IRValue value, FunctionSignature signature, Int32 parameterIndex, - Int32 conditionIndex, bool not) + else { - if (value is IRVariable) + if (conditionIndex != -1 && ruleVar.FirstBindingIndex == -1 && !not) { - VerifyIRVariableCall(rule, value as IRVariable, signature, parameterIndex, conditionIndex, not); + ruleVar.FirstBindingIndex = conditionIndex; } } + } - private void VerifyIRFuncCondition(IRRule rule, IRFuncCondition condition, int conditionIndex) + private void VerifyIRValueCall(IRRule rule, IRValue value, FunctionSignature signature, Int32 parameterIndex, + Int32 conditionIndex, bool not) + { + if (value is IRVariable) { - // TODO - Merge FuncCondition and IRStatement base? - // Base --> IRParameterizedCall --> FuncCond: has (NOT) field - var func = Context.LookupSignature(condition.Func.Name); - if (func == null) + VerifyIRVariableCall(rule, value as IRVariable, signature, parameterIndex, conditionIndex, not); + } + } + + private void VerifyIRFuncCondition(IRRule rule, IRFuncCondition condition, int conditionIndex) + { + // TODO - Merge FuncCondition and IRStatement base? + // Base --> IRParameterizedCall --> FuncCond: has (NOT) field + var func = Context.LookupSignature(condition.Func.Name); + if (func == null) + { + Context.Log.Error(condition.Location, + DiagnosticCode.UnresolvedSymbol, + "Symbol \"{0}\" could not be resolved", + condition.Func.Name); + return; + } + + if (!func.FullyTyped) + { + Context.Log.Error(condition.Location, + DiagnosticCode.UnresolvedSignature, + "Signature of \"{0}\" could not be determined", + condition.Func.Name); + return; + } + + func.Read = true; + + if (conditionIndex == 0) + { + switch (rule.Type) { - Context.Log.Error(condition.Location, - DiagnosticCode.UnresolvedSymbol, - "Symbol \"{0}\" could not be resolved", - condition.Func.Name); - return; - } + case RuleType.Proc: + if (func.Type != FunctionType.Proc) + { + Context.Log.Error(condition.Location, + DiagnosticCode.InvalidSymbolInInitialCondition, + "Initial proc condition can only be a PROC name; \"{0}\" is a {1}", + condition.Func.Name, func.Type); + return; + } + break; + + case RuleType.Query: + if (func.Type != FunctionType.UserQuery) + { + Context.Log.Error(condition.Location, + DiagnosticCode.InvalidSymbolInInitialCondition, + "Initial query condition can only be a user-defined QRY name; \"{0}\" is a {1}", + condition.Func.Name, func.Type); + return; + } + break; - if (!func.FullyTyped) + case RuleType.Rule: + if (func.Type != FunctionType.Event + && func.Type != FunctionType.Database) + { + Context.Log.Error(condition.Location, + DiagnosticCode.InvalidSymbolInInitialCondition, + "Initial rule condition can only be an event or a DB; \"{0}\" is a {1}", + condition.Func.Name, func.Type); + return; + } + break; + + default: + throw new Exception("Unknown rule type"); + } + } + else + { + if (func.Type != FunctionType.SysQuery + && func.Type != FunctionType.Query + && func.Type != FunctionType.Database + && func.Type != FunctionType.UserQuery) { Context.Log.Error(condition.Location, - DiagnosticCode.UnresolvedSignature, - "Signature of \"{0}\" could not be determined", - condition.Func.Name); + DiagnosticCode.InvalidFunctionTypeInCondition, + "Subsequent rule conditions can only be queries or DBs; \"{0}\" is a {1}", + condition.Func.Name, func.Type); return; } + } - func.Read = true; + int index = 0; + foreach (var param in func.Params) + { + var condParam = condition.Params[index]; + ValueType type = condParam.Type; - if (conditionIndex == 0) - { - switch (rule.Type) - { - case RuleType.Proc: - if (func.Type != FunctionType.Proc) - { - Context.Log.Error(condition.Location, - DiagnosticCode.InvalidSymbolInInitialCondition, - "Initial proc condition can only be a PROC name; \"{0}\" is a {1}", - condition.Func.Name, func.Type); - return; - } - break; - - case RuleType.Query: - if (func.Type != FunctionType.UserQuery) - { - Context.Log.Error(condition.Location, - DiagnosticCode.InvalidSymbolInInitialCondition, - "Initial query condition can only be a user-defined QRY name; \"{0}\" is a {1}", - condition.Func.Name, func.Type); - return; - } - break; - - case RuleType.Rule: - if (func.Type != FunctionType.Event - && func.Type != FunctionType.Database) - { - Context.Log.Error(condition.Location, - DiagnosticCode.InvalidSymbolInInitialCondition, - "Initial rule condition can only be an event or a DB; \"{0}\" is a {1}", - condition.Func.Name, func.Type); - return; - } - break; - - default: - throw new Exception("Unknown rule type"); - } - } - else + if (type == null) { - if (func.Type != FunctionType.SysQuery - && func.Type != FunctionType.Query - && func.Type != FunctionType.Database - && func.Type != FunctionType.UserQuery) - { - Context.Log.Error(condition.Location, - DiagnosticCode.InvalidFunctionTypeInCondition, - "Subsequent rule conditions can only be queries or DBs; \"{0}\" is a {1}", - condition.Func.Name, func.Type); - return; - } + Context.Log.Error(condParam.Location, + DiagnosticCode.InternalError, + "No type information available for func condition arg"); + continue; } - int index = 0; - foreach (var param in func.Params) - { - var condParam = condition.Params[index]; - ValueType type = condParam.Type; + VerifyIRValue(rule, condParam, func); + VerifyIRValueCall(rule, condParam, func, index, conditionIndex, condition.Not); + VerifyParamCompatibility(func, index, param, condParam); - if (type == null) - { - Context.Log.Error(condParam.Location, - DiagnosticCode.InternalError, - "No type information available for func condition arg"); - continue; - } + index++; + } + } + + private Value.Type IntrinsicTypeToCompatibilityType(Value.Type typeId) + { + switch ((Value.Type)typeId) + { + case Value.Type.Integer: + case Value.Type.Integer64: + case Value.Type.Float: + return Value.Type.Integer; - VerifyIRValue(rule, condParam, func); - VerifyIRValueCall(rule, condParam, func, index, conditionIndex, condition.Not); - VerifyParamCompatibility(func, index, param, condParam); + case Value.Type.String: + case Value.Type.GuidString: + return Value.Type.String; - index++; - } + default: + throw new ArgumentException("Cannot check compatibility of unknown types"); } + } - private Value.Type IntrinsicTypeToCompatibilityType(Value.Type typeId) - { - switch ((Value.Type)typeId) - { - case Value.Type.Integer: - case Value.Type.Integer64: - case Value.Type.Float: - return Value.Type.Integer; + private bool AreIntrinsicTypesCompatible(Value.Type type1, Value.Type type2) + { + Value.Type translatedType1 = IntrinsicTypeToCompatibilityType(type1), + translatedType2 = IntrinsicTypeToCompatibilityType(type2); + return translatedType1 == translatedType2; + } + + /// + /// Returns whether comparing the specified types is "risky", + /// i.e. if there is unexpected behavior or side effects. + /// + private bool IsRiskyComparison(Value.Type type1, Value.Type type2) + { + return (type1 == Value.Type.String && type2 == Value.Type.GuidString) + || (type1 == Value.Type.GuidString && type2 == Value.Type.String); + } - case Value.Type.String: - case Value.Type.GuidString: - return Value.Type.String; + private bool IsGuidAliasToAliasCast(ValueType type1, ValueType type2) + { + return + type1.IntrinsicTypeId == type2.IntrinsicTypeId + && type1.IntrinsicTypeId == Value.Type.GuidString + && type1.TypeId != (int)Value.Type.GuidString + && type2.TypeId != (int)Value.Type.GuidString + && type1.TypeId != type2.TypeId; + } - default: - throw new ArgumentException("Cannot check compatibility of unknown types"); + private void VerifyIRBinaryConditionValue(IRRule rule, IRValue value, Int32 conditionIndex) + { + VerifyIRValue(rule, value, null); + + if (value is IRVariable) + { + var variable = value as IRVariable; + var ruleVar = rule.Variables[variable.Index]; + if (ruleVar.FirstBindingIndex == -1 || ruleVar.FirstBindingIndex >= conditionIndex) + { + Context.Log.Error(variable.Location, + DiagnosticCode.ParamNotBound, + "Variable {0} is not bound (when used in a binary expression)", + ruleVar.Name); } } + } + + private void VerifyIRBinaryCondition(IRRule rule, IRBinaryCondition condition, Int32 conditionIndex) + { + ValueType lhs = condition.LValue.Type, + rhs = condition.RValue.Type; - private bool AreIntrinsicTypesCompatible(Value.Type type1, Value.Type type2) + // Don't raise compiler errors if the untyped value is a variable, + // as we already have a separate rule-level error for untyped variables. + if ((lhs == null && condition.LValue is IRVariable) + || (rhs == null && condition.RValue is IRVariable)) { - Value.Type translatedType1 = IntrinsicTypeToCompatibilityType(type1), - translatedType2 = IntrinsicTypeToCompatibilityType(type2); - return translatedType1 == translatedType2; + return; } - /// - /// Returns whether comparing the specified types is "risky", - /// i.e. if there is unexpected behavior or side effects. - /// - private bool IsRiskyComparison(Value.Type type1, Value.Type type2) + if (condition.LValue is IRVariable + && condition.RValue is IRVariable + && (condition.LValue as IRVariable).Index == (condition.RValue as IRVariable).Index + // This bug was fixed in DOS2 DE + && Game == TargetGame.DOS2 + // There is a known bug in the main campaign that we have to ignore + && rule.Goal.Name != "EndGame_PrisonersDilemma") { - return (type1 == Value.Type.String && type2 == Value.Type.GuidString) - || (type1 == Value.Type.GuidString && type2 == Value.Type.String); + Context.Log.Error(condition.Location, + DiagnosticCode.BinaryOperationSameRhsLhs, + "Same variable used on both sides of a binary expression; this will result in an invalid compare in runtime"); + return; } - private bool IsGuidAliasToAliasCast(ValueType type1, ValueType type2) + VerifyIRBinaryConditionValue(rule, condition.LValue, conditionIndex); + VerifyIRBinaryConditionValue(rule, condition.RValue, conditionIndex); + + if (!AreIntrinsicTypesCompatible(lhs.IntrinsicTypeId, rhs.IntrinsicTypeId)) { - return - type1.IntrinsicTypeId == type2.IntrinsicTypeId - && type1.IntrinsicTypeId == Value.Type.GuidString - && type1.TypeId != (int)Value.Type.GuidString - && type2.TypeId != (int)Value.Type.GuidString - && type1.TypeId != type2.TypeId; + Context.Log.Error(condition.Location, + DiagnosticCode.LocalTypeMismatch, + "Type of left expression ({0}) differs from type of right expression ({1})", + TypeToName(lhs.IntrinsicTypeId), TypeToName(rhs.IntrinsicTypeId)); + return; } - private void VerifyIRBinaryConditionValue(IRRule rule, IRValue value, Int32 conditionIndex) + if (IsRiskyComparison(lhs.IntrinsicTypeId, rhs.IntrinsicTypeId)) { - VerifyIRValue(rule, value, null); - - if (value is IRVariable) - { - var variable = value as IRVariable; - var ruleVar = rule.Variables[variable.Index]; - if (ruleVar.FirstBindingIndex == -1 || ruleVar.FirstBindingIndex >= conditionIndex) - { - Context.Log.Error(variable.Location, - DiagnosticCode.ParamNotBound, - "Variable {0} is not bound (when used in a binary expression)", - ruleVar.Name); - } - } + Context.Log.Error(condition.Location, + DiagnosticCode.RiskyComparison, + "Comparison between {0} and {1} may trigger incorrect behavior", + TypeToName(lhs.IntrinsicTypeId), TypeToName(rhs.IntrinsicTypeId)); + return; } - private void VerifyIRBinaryCondition(IRRule rule, IRBinaryCondition condition, Int32 conditionIndex) + if (IsGuidAliasToAliasCast(lhs, rhs)) { - ValueType lhs = condition.LValue.Type, - rhs = condition.RValue.Type; - - // Don't raise compiler errors if the untyped value is a variable, - // as we already have a separate rule-level error for untyped variables. - if ((lhs == null && condition.LValue is IRVariable) - || (rhs == null && condition.RValue is IRVariable)) - { - return; - } - - if (condition.LValue is IRVariable - && condition.RValue is IRVariable - && (condition.LValue as IRVariable).Index == (condition.RValue as IRVariable).Index - // This bug was fixed in DOS2 DE - && Game == TargetGame.DOS2 - // There is a known bug in the main campaign that we have to ignore - && rule.Goal.Name != "EndGame_PrisonersDilemma") - { - Context.Log.Error(condition.Location, - DiagnosticCode.BinaryOperationSameRhsLhs, - "Same variable used on both sides of a binary expression; this will result in an invalid compare in runtime"); - return; - } + Context.Log.Error(condition.Location, + DiagnosticCode.GuidAliasMismatch, + "GUID alias type of left expression ({0}) differs from type of right expression ({1})", + TypeToName(lhs.TypeId), TypeToName(rhs.TypeId)); + return; + } - VerifyIRBinaryConditionValue(rule, condition.LValue, conditionIndex); - VerifyIRBinaryConditionValue(rule, condition.RValue, conditionIndex); - - if (!AreIntrinsicTypesCompatible(lhs.IntrinsicTypeId, rhs.IntrinsicTypeId)) - { - Context.Log.Error(condition.Location, - DiagnosticCode.LocalTypeMismatch, - "Type of left expression ({0}) differs from type of right expression ({1})", - TypeToName(lhs.IntrinsicTypeId), TypeToName(rhs.IntrinsicTypeId)); - return; - } + // Using greater than/less than operators for strings and GUIDs is probably a mistake. + if ((lhs.IntrinsicTypeId == Value.Type.String + || lhs.IntrinsicTypeId == Value.Type.GuidString) + && (condition.Op == RelOpType.Greater + || condition.Op == RelOpType.GreaterOrEqual + || condition.Op == RelOpType.Less + || condition.Op == RelOpType.LessOrEqual)) + { + Context.Log.Warn(condition.Location, + DiagnosticCode.StringLtGtComparison, + "String comparison using operator {0} - probably a mistake?", + condition.Op); + return; + } + } - if (IsRiskyComparison(lhs.IntrinsicTypeId, rhs.IntrinsicTypeId)) + private void VerifyIRRule(IRRule rule) + { + if (rule.Type == RuleType.Proc || rule.Type == RuleType.Query) + { + var initialName = (rule.Conditions[0] as IRFuncCondition).Func.Name; + if (rule.Type == RuleType.Proc && initialName.Name.Length > 4 && initialName.Name.Substring(0, 4).ToUpper() != "PROC") { - Context.Log.Error(condition.Location, - DiagnosticCode.RiskyComparison, - "Comparison between {0} and {1} may trigger incorrect behavior", - TypeToName(lhs.IntrinsicTypeId), TypeToName(rhs.IntrinsicTypeId)); - return; + Context.Log.Warn(rule.Conditions[0].Location, + DiagnosticCode.RuleNamingStyle, + "Name of PROC \"{0}\" should start with the prefix \"PROC\"", + initialName); } - if (IsGuidAliasToAliasCast(lhs, rhs)) + if (rule.Type == RuleType.Query && initialName.Name.Length > 3 && initialName.Name.Substring(0, 3).ToUpper() != "QRY") { - Context.Log.Error(condition.Location, - DiagnosticCode.GuidAliasMismatch, - "GUID alias type of left expression ({0}) differs from type of right expression ({1})", - TypeToName(lhs.TypeId), TypeToName(rhs.TypeId)); - return; - } - - // Using greater than/less than operators for strings and GUIDs is probably a mistake. - if ((lhs.IntrinsicTypeId == Value.Type.String - || lhs.IntrinsicTypeId == Value.Type.GuidString) - && (condition.Op == RelOpType.Greater - || condition.Op == RelOpType.GreaterOrEqual - || condition.Op == RelOpType.Less - || condition.Op == RelOpType.LessOrEqual)) - { - Context.Log.Warn(condition.Location, - DiagnosticCode.StringLtGtComparison, - "String comparison using operator {0} - probably a mistake?", - condition.Op); - return; + Context.Log.Warn(rule.Conditions[0].Location, + DiagnosticCode.RuleNamingStyle, + "Name of Query \"{0}\" should start with the prefix \"QRY\"", + initialName); } } - private void VerifyIRRule(IRRule rule) + for (var i = 0; i < rule.Conditions.Count; i++) { - if (rule.Type == RuleType.Proc || rule.Type == RuleType.Query) + var condition = rule.Conditions[i]; + if (condition is IRBinaryCondition) { - var initialName = (rule.Conditions[0] as IRFuncCondition).Func.Name; - if (rule.Type == RuleType.Proc && initialName.Name.Length > 4 && initialName.Name.Substring(0, 4).ToUpper() != "PROC") - { - Context.Log.Warn(rule.Conditions[0].Location, - DiagnosticCode.RuleNamingStyle, - "Name of PROC \"{0}\" should start with the prefix \"PROC\"", - initialName); - } - - if (rule.Type == RuleType.Query && initialName.Name.Length > 3 && initialName.Name.Substring(0, 3).ToUpper() != "QRY") - { - Context.Log.Warn(rule.Conditions[0].Location, - DiagnosticCode.RuleNamingStyle, - "Name of Query \"{0}\" should start with the prefix \"QRY\"", - initialName); - } + VerifyIRBinaryCondition(rule, condition as IRBinaryCondition, i); } - - for (var i = 0; i < rule.Conditions.Count; i++) + else { - var condition = rule.Conditions[i]; - if (condition is IRBinaryCondition) - { - VerifyIRBinaryCondition(rule, condition as IRBinaryCondition, i); - } - else - { - VerifyIRFuncCondition(rule, condition as IRFuncCondition, i); - } + VerifyIRFuncCondition(rule, condition as IRFuncCondition, i); } + } - foreach (var action in rule.Actions) - { - VerifyIRStatement(rule, action); - } + foreach (var action in rule.Actions) + { + VerifyIRStatement(rule, action); + } - foreach (var variable in rule.Variables) + foreach (var variable in rule.Variables) + { + if (variable.Type == null) { - if (variable.Type == null) - { - // TODO - return location of first variable reference instead of rule - Context.Log.Error(rule.Location, - DiagnosticCode.UnresolvedVariableType, - "Variable \"{0}\" of rule could not be typed", - variable.Name); - } + // TODO - return location of first variable reference instead of rule + Context.Log.Error(rule.Location, + DiagnosticCode.UnresolvedVariableType, + "Variable \"{0}\" of rule could not be typed", + variable.Name); } } - - private void VerifyDatabases() + } + + private void VerifyDatabases() + { + foreach (var signature in Context.Signatures) { - foreach (var signature in Context.Signatures) + if (signature.Value.Type == FunctionType.Database + && signature.Key.Name.Substring(0, 2).ToUpper() != "DB") { - if (signature.Value.Type == FunctionType.Database - && signature.Key.Name.Substring(0, 2).ToUpper() != "DB") - { - // TODO - return location of declaration - Context.Log.Warn(null, - DiagnosticCode.DbNamingStyle, - "Name of database \"{0}\" should start with the prefix \"DB\"", - signature.Key.Name); - } + // TODO - return location of declaration + Context.Log.Warn(null, + DiagnosticCode.DbNamingStyle, + "Name of database \"{0}\" should start with the prefix \"DB\"", + signature.Key.Name); } } + } - private void VerifyUnusedDatabases() + private void VerifyUnusedDatabases() + { + foreach (var signature in Context.Signatures) { - foreach (var signature in Context.Signatures) + if (signature.Value.Type == FunctionType.Database + && !IgnoreUnusedDatabases.Contains(signature.Key)) { - if (signature.Value.Type == FunctionType.Database - && !IgnoreUnusedDatabases.Contains(signature.Key)) - { - Debug.Assert(signature.Value.Inserted - || signature.Value.Deleted - || signature.Value.Read); + Debug.Assert(signature.Value.Inserted + || signature.Value.Deleted + || signature.Value.Read); - if (!signature.Value.Read) + if (!signature.Value.Read) + { + // Unused databases are considered an error in DOS:2 DE. + if (Game == TargetGame.DOS2DE || Game == TargetGame.BG3) + { + // TODO - return location of declaration + Context.Log.Error(null, + DiagnosticCode.UnusedDatabaseError, + "{0} \"{1}\" is written to, but is never read", + signature.Value.Type, signature.Key); + } + else { - // Unused databases are considered an error in DOS:2 DE. - if (Game == TargetGame.DOS2DE || Game == TargetGame.BG3) - { - // TODO - return location of declaration - Context.Log.Error(null, - DiagnosticCode.UnusedDatabaseError, - "{0} \"{1}\" is written to, but is never read", - signature.Value.Type, signature.Key); - } - else - { - Context.Log.Warn(null, - DiagnosticCode.UnusedDatabaseWarning, - "{0} \"{1}\" is written to, but is never read", - signature.Value.Type, signature.Key); - } + Context.Log.Warn(null, + DiagnosticCode.UnusedDatabaseWarning, + "{0} \"{1}\" is written to, but is never read", + signature.Value.Type, signature.Key); } - - if (!signature.Value.Inserted - && !signature.Value.Deleted - && signature.Value.Read) + } + + if (!signature.Value.Inserted + && !signature.Value.Deleted + && signature.Value.Read) + { + // Unused databases are considered an error in DOS:2 DE. + if (Game == TargetGame.DOS2DE || Game == TargetGame.BG3) { - // Unused databases are considered an error in DOS:2 DE. - if (Game == TargetGame.DOS2DE || Game == TargetGame.BG3) - { - Context.Log.Error(null, - DiagnosticCode.UnusedDatabaseError, - "{0} \"{1}\" is read, but is never written to", - signature.Value.Type, signature.Key); - } - else - { - Context.Log.Warn(null, - DiagnosticCode.UnusedDatabaseWarning, - "{0} \"{1}\" is read, but is never written to", - signature.Value.Type, signature.Key); - } + Context.Log.Error(null, + DiagnosticCode.UnusedDatabaseError, + "{0} \"{1}\" is read, but is never written to", + signature.Value.Type, signature.Key); } - - if (!signature.Value.Inserted - && signature.Value.Deleted - && signature.Value.Read) + else { - // TODO - return location of declaration Context.Log.Warn(null, - DiagnosticCode.UnwrittenDatabase, - "{0} \"{1}\" is read and deleted, but is never inserted into", + DiagnosticCode.UnusedDatabaseWarning, + "{0} \"{1}\" is read, but is never written to", signature.Value.Type, signature.Key); } } + + if (!signature.Value.Inserted + && signature.Value.Deleted + && signature.Value.Read) + { + // TODO - return location of declaration + Context.Log.Warn(null, + DiagnosticCode.UnwrittenDatabase, + "{0} \"{1}\" is read and deleted, but is never inserted into", + signature.Value.Type, signature.Key); + } } } + } - public void VerifyIR() + public void VerifyIR() + { + foreach (var goal in Context.GoalsByName.Values) { - foreach (var goal in Context.GoalsByName.Values) + foreach (var parentGoal in goal.ParentTargetEdges) { - foreach (var parentGoal in goal.ParentTargetEdges) - { - if (Context.LookupGoal(parentGoal.Goal.Name) == null) - { - Context.Log.Error(parentGoal.Location, - DiagnosticCode.UnresolvedGoal, - "Parent goal of \"{0}\" could not be resolved: \"{1}\"", - goal.Name, parentGoal.Goal.Name); - } - } - - foreach (var fact in goal.InitSection) + if (Context.LookupGoal(parentGoal.Goal.Name) == null) { - VerifyIRFact(fact); + Context.Log.Error(parentGoal.Location, + DiagnosticCode.UnresolvedGoal, + "Parent goal of \"{0}\" could not be resolved: \"{1}\"", + goal.Name, parentGoal.Goal.Name); } + } - foreach (var rule in goal.KBSection) - { - VerifyIRRule(rule); - } + foreach (var fact in goal.InitSection) + { + VerifyIRFact(fact); + } - foreach (var fact in goal.ExitSection) - { - VerifyIRFact(fact); - } + foreach (var rule in goal.KBSection) + { + VerifyIRRule(rule); } - // Validate database names - // We do this here as there is no explicit declaration for databases, - // they are created implicitly on first use. - VerifyDatabases(); - VerifyUnusedDatabases(); + foreach (var fact in goal.ExitSection) + { + VerifyIRFact(fact); + } } - private ValueType ConstantTypeToValueType(IRConstantType type) + // Validate database names + // We do this here as there is no explicit declaration for databases, + // they are created implicitly on first use. + VerifyDatabases(); + VerifyUnusedDatabases(); + } + + private ValueType ConstantTypeToValueType(IRConstantType type) + { + switch (type) { - switch (type) - { - case IRConstantType.Unknown: return null; - // TODO - lookup type ID from enum - case IRConstantType.Integer: return Context.TypesById[1]; - case IRConstantType.Float: return Context.TypesById[3]; - case IRConstantType.String: return Context.TypesById[4]; - case IRConstantType.Name: return Context.TypesById[5]; - default: throw new ArgumentException("Invalid IR constant type"); - } + case IRConstantType.Unknown: return null; + // TODO - lookup type ID from enum + case IRConstantType.Integer: return Context.TypesById[1]; + case IRConstantType.Float: return Context.TypesById[3]; + case IRConstantType.String: return Context.TypesById[4]; + case IRConstantType.Name: return Context.TypesById[5]; + default: throw new ArgumentException("Invalid IR constant type"); } + } - private ValueType DetermineSignature(IRConstant value) + private ValueType DetermineSignature(IRConstant value) + { + var irConst = value as IRConstant; + if (irConst.Type != null) { - var irConst = value as IRConstant; - if (irConst.Type != null) - { - return Context.LookupType(irConst.Type.Name); - } - else - { - return ConstantTypeToValueType(irConst.ValueType); - } + return Context.LookupType(irConst.Type.Name); + } + else + { + return ConstantTypeToValueType(irConst.ValueType); } + } - private ValueType DetermineSignature(IRRule rule, IRValue value) + private ValueType DetermineSignature(IRRule rule, IRValue value) + { + if (value is IRConstant) + { + return DetermineSignature(value as IRConstant); + } + else if (value is IRVariable) { - if (value is IRConstant) + if (value.Type != null) { - return DetermineSignature(value as IRConstant); + return value.Type; } - else if (value is IRVariable) - { - if (value.Type != null) - { - return value.Type; - } - var irVar = value as IRVariable; - var ruleVar = rule.Variables[irVar.Index]; - if (ruleVar.Type != null) - { - return ruleVar.Type; - } - else - { - return null; - } + var irVar = value as IRVariable; + var ruleVar = rule.Variables[irVar.Index]; + if (ruleVar.Type != null) + { + return ruleVar.Type; } else { - throw new ArgumentException("Invalid IR value type"); + return null; } } + else + { + throw new ArgumentException("Invalid IR value type"); + } + } - private bool ApplySignature(FunctionNameAndArity name, FunctionType? type, List paramTypes) + private bool ApplySignature(FunctionNameAndArity name, FunctionType? type, List paramTypes) + { + var registeredSignature = Context.LookupSignature(name); + var signature = registeredSignature; + if (signature != null && signature.FullyTyped) { - var registeredSignature = Context.LookupSignature(name); - var signature = registeredSignature; - if (signature != null && signature.FullyTyped) - { - throw new InvalidOperationException("Cannot apply signature to an already typed name"); - } + throw new InvalidOperationException("Cannot apply signature to an already typed name"); + } - if (signature == null) - { - signature = new FunctionSignature - { - Name = name.Name, - Type = (type == null) ? FunctionType.Database : (FunctionType)type, - Inserted = false, - Deleted = false, - Read = false - }; - } - else + if (signature == null) + { + signature = new FunctionSignature + { + Name = name.Name, + Type = (type == null) ? FunctionType.Database : (FunctionType)type, + Inserted = false, + Deleted = false, + Read = false + }; + } + else + { + if (type != null && signature.Type != type) { - if (type != null && signature.Type != type) - { - // TODO error code! - // TODO location of definition - Context.Log.Error(null, - DiagnosticCode.ProcTypeMismatch, - "Auto-typing name {0}: first seen as {1}, now seen as {2}", - name, signature.Type, type); - } + // TODO error code! + // TODO location of definition + Context.Log.Error(null, + DiagnosticCode.ProcTypeMismatch, + "Auto-typing name {0}: first seen as {1}, now seen as {2}", + name, signature.Type, type); } + } - signature.FullyTyped = !paramTypes.Any(ty => ty == null); - signature.Params = new List(paramTypes.Count); - foreach (var paramType in paramTypes) - { - var sigParam = new FunctionParam - { - Type = paramType, - Direction = ParamDirection.In, - Name = null - }; - signature.Params.Add(sigParam); - } - - if (registeredSignature == null) + signature.FullyTyped = !paramTypes.Any(ty => ty == null); + signature.Params = new List(paramTypes.Count); + foreach (var paramType in paramTypes) + { + var sigParam = new FunctionParam { - Context.RegisterFunction(signature, null); - } - - return signature.FullyTyped; + Type = paramType, + Direction = ParamDirection.In, + Name = null + }; + signature.Params.Add(sigParam); + } + + if (registeredSignature == null) + { + Context.RegisterFunction(signature, null); } - private bool TryPropagateSignature(IRRule rule, FunctionNameAndArity name, FunctionType? type, List parameters, - bool allowPartial, ref bool updated) + return signature.FullyTyped; + } + + private bool TryPropagateSignature(IRRule rule, FunctionNameAndArity name, FunctionType? type, List parameters, + bool allowPartial, ref bool updated) + { + // Build a signature with all parameters to make sure that all types can be resolved + var sig = new List(parameters.Count); + foreach (var param in parameters) { - // Build a signature with all parameters to make sure that all types can be resolved - var sig = new List(parameters.Count); - foreach (var param in parameters) + var paramSignature = DetermineSignature(rule, param); + if (paramSignature != null) + { + sig.Add(paramSignature); + } + else { - var paramSignature = DetermineSignature(rule, param); - if (paramSignature != null) + if (allowPartial) { - sig.Add(paramSignature); + sig.Add(null); } else { - if (allowPartial) - { - sig.Add(null); - } - else - { - return false; - } + return false; } } + } - // Apply signature to symbol - updated = true; - return ApplySignature(name, type, sig); + // Apply signature to symbol + updated = true; + return ApplySignature(name, type, sig); + } + + private bool PropagateSignature(FunctionNameAndArity name, FunctionType? type, List parameters) + { + // Build a signature with all parameters to make sure that all types can be resolved + var sig = new List(parameters.Count); + foreach (var param in parameters) + { + var paramSignature = DetermineSignature(param); + sig.Add(paramSignature); + } + + // Apply signature to symbol + ApplySignature(name, type, sig); + + return true; + } + + private bool PropagateSignatureIfRequired(IRRule rule, FunctionNameAndArity name, FunctionType? type, List parameters, bool allowPartial, ref bool updated) + { + var signature = Context.LookupSignature(name); + bool signatureOk = (signature != null && signature.FullyTyped); + if (!signatureOk && TryPropagateSignature(rule, name, type, parameters, allowPartial, ref updated)) + { + signature = Context.LookupSignature(name); + signatureOk = signature.FullyTyped; } - private bool PropagateSignature(FunctionNameAndArity name, FunctionType? type, List parameters) + if (signatureOk) { - // Build a signature with all parameters to make sure that all types can be resolved - var sig = new List(parameters.Count); - foreach (var param in parameters) + if (PropagateRuleTypesFromParamList(rule, parameters, signature)) { - var paramSignature = DetermineSignature(param); - sig.Add(paramSignature); + updated = true; } + } - // Apply signature to symbol - ApplySignature(name, type, sig); + return signatureOk; + } + private bool PropagateSignatureIfRequired(FunctionNameAndArity name, FunctionType? type, List parameters, ref bool updated) + { + var signature = Context.LookupSignature(name); + if (signature == null || !signature.FullyTyped) + { + updated = true; + return PropagateSignature(name, type, parameters); + } + else + { return true; } + } + + private bool PropagateIRVariableType(IRRule rule, IRVariable variable, ValueType type) + { + bool updated = false; + var ruleVar = rule.Variables[variable.Index]; + if (ruleVar.Type == null) + { + ruleVar.Type = type; + updated = true; + } - private bool PropagateSignatureIfRequired(IRRule rule, FunctionNameAndArity name, FunctionType? type, List parameters, bool allowPartial, ref bool updated) + if (variable.Type == null) { - var signature = Context.LookupSignature(name); - bool signatureOk = (signature != null && signature.FullyTyped); - if (!signatureOk && TryPropagateSignature(rule, name, type, parameters, allowPartial, ref updated)) + // If a more specific type alias is available from the rule variable, apply the + // rule type instead of the function argument type + if (ruleVar.Type.IsAliasOf(type)) { - signature = Context.LookupSignature(name); - signatureOk = signature.FullyTyped; + variable.Type = ruleVar.Type; } + else + { + variable.Type = type; + } + + updated = true; + } + + return updated; + } - if (signatureOk) + private bool PropagateRuleTypesFromParamList(IRRule rule, List parameters, FunctionSignature signature) + { + bool updated = false; + Int32 index = 0; + foreach (var param in parameters) + { + if (param is IRVariable) { - if (PropagateRuleTypesFromParamList(rule, parameters, signature)) + var irVar = param as IRVariable; + if (PropagateIRVariableType(rule, param as IRVariable, signature.Params[index].Type)) { updated = true; } } - return signatureOk; + index++; + } + + return updated; + } + + private bool PropagateRuleTypes(IRFact fact) + { + bool updated = false; + if (fact.Database != null) + { + PropagateSignatureIfRequired(fact.Database.Name, FunctionType.Database, fact.Elements, ref updated); } + return updated; + } - private bool PropagateSignatureIfRequired(FunctionNameAndArity name, FunctionType? type, List parameters, ref bool updated) + private bool PropagateRuleTypes(IRRule rule, IRBinaryCondition condition) + { + bool updated = false; + if (condition.LValue.Type == null + && condition.LValue is IRVariable) { - var signature = Context.LookupSignature(name); - if (signature == null || !signature.FullyTyped) + var lval = condition.LValue as IRVariable; + var ruleVariable = rule.Variables[lval.Index]; + if (ruleVariable.Type != null) { + lval.Type = ruleVariable.Type; updated = true; - return PropagateSignature(name, type, parameters); - } - else - { - return true; } } - private bool PropagateIRVariableType(IRRule rule, IRVariable variable, ValueType type) + if (condition.RValue.Type == null + && condition.RValue is IRVariable) { - bool updated = false; - var ruleVar = rule.Variables[variable.Index]; - if (ruleVar.Type == null) + var rval = condition.RValue as IRVariable; + var ruleVariable = rule.Variables[rval.Index]; + if (ruleVariable.Type != null) { - ruleVar.Type = type; + rval.Type = ruleVariable.Type; updated = true; } + } - if (variable.Type == null) + // TODO - handle implicit re-typing of rule variables? + + return updated; + } + + private Int32 ComputeTupleSize(IRRule rule, IRFuncCondition condition, Int32 lastTupleSize) + { + Int32 tupleSize = lastTupleSize; + foreach (var param in condition.Params) + { + if (param is IRVariable) { - // If a more specific type alias is available from the rule variable, apply the - // rule type instead of the function argument type - if (ruleVar.Type.IsAliasOf(type)) - { - variable.Type = ruleVar.Type; - } - else + var variable = param as IRVariable; + if (variable.Index >= tupleSize) { - variable.Type = type; + tupleSize = variable.Index + 1; } - - updated = true; } - - return updated; } - private bool PropagateRuleTypesFromParamList(IRRule rule, List parameters, FunctionSignature signature) + return tupleSize; + } + + private Int32 ComputeTupleSize(IRRule rule, IRBinaryCondition condition, Int32 lastTupleSize) + { + Int32 tupleSize = lastTupleSize; + if (condition.LValue is IRVariable) { - bool updated = false; - Int32 index = 0; - foreach (var param in parameters) + var variable = condition.LValue as IRVariable; + if (variable.Index >= tupleSize) { - if (param is IRVariable) - { - var irVar = param as IRVariable; - if (PropagateIRVariableType(rule, param as IRVariable, signature.Params[index].Type)) - { - updated = true; - } - } - - index++; + tupleSize = variable.Index + 1; } - - return updated; } - private bool PropagateRuleTypes(IRFact fact) + if (condition.RValue is IRVariable) { - bool updated = false; - if (fact.Database != null) + var variable = condition.RValue as IRVariable; + if (variable.Index >= tupleSize) { - PropagateSignatureIfRequired(fact.Database.Name, FunctionType.Database, fact.Elements, ref updated); + tupleSize = variable.Index + 1; } - return updated; } - private bool PropagateRuleTypes(IRRule rule, IRBinaryCondition condition) + return tupleSize; + } + + private bool PropagateRuleTypes(IRRule rule) + { + bool updated = false; + + Int32 lastTupleSize = 0; + foreach (var condition in rule.Conditions) { - bool updated = false; - if (condition.LValue.Type == null - && condition.LValue is IRVariable) + if (condition is IRFuncCondition) { - var lval = condition.LValue as IRVariable; - var ruleVariable = rule.Variables[lval.Index]; - if (ruleVariable.Type != null) + var func = condition as IRFuncCondition; + PropagateSignatureIfRequired(rule, func.Func.Name, null, func.Params, false, ref updated); + if (func.TupleSize == -1) { - lval.Type = ruleVariable.Type; + func.TupleSize = ComputeTupleSize(rule, func, lastTupleSize); updated = true; } } - - if (condition.RValue.Type == null - && condition.RValue is IRVariable) + else { - var rval = condition.RValue as IRVariable; - var ruleVariable = rule.Variables[rval.Index]; - if (ruleVariable.Type != null) + var bin = condition as IRBinaryCondition; + if (PropagateRuleTypes(rule, bin)) { - rval.Type = ruleVariable.Type; updated = true; } - } - - // TODO - handle implicit re-typing of rule variables? - - return updated; - } - private Int32 ComputeTupleSize(IRRule rule, IRFuncCondition condition, Int32 lastTupleSize) - { - Int32 tupleSize = lastTupleSize; - foreach (var param in condition.Params) - { - if (param is IRVariable) + if (bin.TupleSize == -1) { - var variable = param as IRVariable; - if (variable.Index >= tupleSize) - { - tupleSize = variable.Index + 1; - } + bin.TupleSize = ComputeTupleSize(rule, bin, lastTupleSize); + updated = true; } } - return tupleSize; + lastTupleSize = condition.TupleSize; } - private Int32 ComputeTupleSize(IRRule rule, IRBinaryCondition condition, Int32 lastTupleSize) + foreach (var action in rule.Actions) { - Int32 tupleSize = lastTupleSize; - if (condition.LValue is IRVariable) + if (action.Func != null) { - var variable = condition.LValue as IRVariable; - if (variable.Index >= tupleSize) - { - tupleSize = variable.Index + 1; - } + PropagateSignatureIfRequired(rule, action.Func.Name, null, action.Params, false, ref updated); } + } - if (condition.RValue is IRVariable) - { - var variable = condition.RValue as IRVariable; - if (variable.Index >= tupleSize) - { - tupleSize = variable.Index + 1; - } - } + return updated; + } - return tupleSize; - } + public bool PropagateRuleTypes() + { + bool updated = false; - private bool PropagateRuleTypes(IRRule rule) + foreach (var goal in Context.GoalsByName.Values) { - bool updated = false; - - Int32 lastTupleSize = 0; - foreach (var condition in rule.Conditions) + foreach (var fact in goal.InitSection) { - if (condition is IRFuncCondition) - { - var func = condition as IRFuncCondition; - PropagateSignatureIfRequired(rule, func.Func.Name, null, func.Params, false, ref updated); - if (func.TupleSize == -1) - { - func.TupleSize = ComputeTupleSize(rule, func, lastTupleSize); - updated = true; - } - } - else + if (PropagateRuleTypes(fact)) { - var bin = condition as IRBinaryCondition; - if (PropagateRuleTypes(rule, bin)) - { - updated = true; - } - - if (bin.TupleSize == -1) - { - bin.TupleSize = ComputeTupleSize(rule, bin, lastTupleSize); - updated = true; - } + updated = true; } - - lastTupleSize = condition.TupleSize; } - foreach (var action in rule.Actions) + foreach (var rule in goal.KBSection) { - if (action.Func != null) + if (PropagateRuleTypes(rule)) { - PropagateSignatureIfRequired(rule, action.Func.Name, null, action.Params, false, ref updated); + updated = true; } } - return updated; - } - - public bool PropagateRuleTypes() - { - bool updated = false; - - foreach (var goal in Context.GoalsByName.Values) + foreach (var fact in goal.ExitSection) { - foreach (var fact in goal.InitSection) + if (PropagateRuleTypes(fact)) { - if (PropagateRuleTypes(fact)) - { - updated = true; - } - } - - foreach (var rule in goal.KBSection) - { - if (PropagateRuleTypes(rule)) - { - updated = true; - } - } - - foreach (var fact in goal.ExitSection) - { - if (PropagateRuleTypes(fact)) - { - updated = true; - } + updated = true; } } - - return updated; } - private void AddQueryOrProc(IRRule rule) + return updated; + } + + private void AddQueryOrProc(IRRule rule) + { + // Check if all parameters in the PROC/QRY declaration are typed. + var procDefn = rule.Conditions[0]; + if (procDefn is IRFuncCondition) { - // Check if all parameters in the PROC/QRY declaration are typed. - var procDefn = rule.Conditions[0]; - if (procDefn is IRFuncCondition) + var def = procDefn as IRFuncCondition; + FunctionType type; + switch (rule.Type) { - var def = procDefn as IRFuncCondition; - FunctionType type; - switch (rule.Type) - { - case RuleType.Proc: type = FunctionType.Proc; break; - case RuleType.Query: type = FunctionType.UserQuery; break; - default: throw new InvalidOperationException("Cannot register this type as a PROC or QUERY"); - } - - bool updated = false; - if (!PropagateSignatureIfRequired(rule, def.Func.Name, type, def.Params, true, ref updated)) - { - // TODO - possibly a warning? - /*Context.Log.Error(procDefn.Location, - DiagnosticCode.InvalidProcDefinition, - "Signature must be completely typed in declaration of {0} {1}", - rule.Type, def.Func.Name);*/ - } + case RuleType.Proc: type = FunctionType.Proc; break; + case RuleType.Query: type = FunctionType.UserQuery; break; + default: throw new InvalidOperationException("Cannot register this type as a PROC or QUERY"); } - else + + bool updated = false; + if (!PropagateSignatureIfRequired(rule, def.Func.Name, type, def.Params, true, ref updated)) { - Context.Log.Error(procDefn.Location, + // TODO - possibly a warning? + /*Context.Log.Error(procDefn.Location, DiagnosticCode.InvalidProcDefinition, - "Declaration of a {0} must start with a {0} name and signature.", - rule.Type); + "Signature must be completely typed in declaration of {0} {1}", + rule.Type, def.Func.Name);*/ } } + else + { + Context.Log.Error(procDefn.Location, + DiagnosticCode.InvalidProcDefinition, + "Declaration of a {0} must start with a {0} name and signature.", + rule.Type); + } + } - public void AddGoal(IRGoal goal) + public void AddGoal(IRGoal goal) + { + Context.RegisterGoal(goal); + foreach (var rule in goal.KBSection) { - Context.RegisterGoal(goal); - foreach (var rule in goal.KBSection) + if (rule.Type == RuleType.Query + || rule.Type == RuleType.Proc) { - if (rule.Type == RuleType.Query - || rule.Type == RuleType.Proc) - { - AddQueryOrProc(rule); - } + AddQueryOrProc(rule); } } } diff --git a/LSLib/LS/Story/Compiler/DebugInfo.cs b/LSLib/LS/Story/Compiler/DebugInfo.cs index c8617fff..65d5cb23 100644 --- a/LSLib/LS/Story/Compiler/DebugInfo.cs +++ b/LSLib/LS/Story/Compiler/DebugInfo.cs @@ -4,125 +4,124 @@ using System.Text; using System.Threading.Tasks; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +public class DatabaseDebugInfo { - public class DatabaseDebugInfo - { - // ID of database in generated story file - public UInt32 Id; - public String Name; - public List ParamTypes; - } + // ID of database in generated story file + public UInt32 Id; + public String Name; + public List ParamTypes; +} - public class ActionDebugInfo - { - // Location of action in source file - public UInt32 Line; - } +public class ActionDebugInfo +{ + // Location of action in source file + public UInt32 Line; +} - public class GoalDebugInfo - { - // ID of goal in generated story file - public UInt32 Id; - // Goal name - public String Name; - // Absolute path of goal source file - public String Path; - // Actions in INIT section - public List InitActions; - // Actions in EXIT section - public List ExitActions; - } +public class GoalDebugInfo +{ + // ID of goal in generated story file + public UInt32 Id; + // Goal name + public String Name; + // Absolute path of goal source file + public String Path; + // Actions in INIT section + public List InitActions; + // Actions in EXIT section + public List ExitActions; +} - public class RuleVariableDebugInfo - { - // Index of rule variable in local tuple - public UInt32 Index; - // Name of rule variable - public String Name; - // Type ID of rule variable - public UInt32 Type; - // Is the variable slot unused? (i.e. not bound to a physical column) - public bool Unused; - } +public class RuleVariableDebugInfo +{ + // Index of rule variable in local tuple + public UInt32 Index; + // Name of rule variable + public String Name; + // Type ID of rule variable + public UInt32 Type; + // Is the variable slot unused? (i.e. not bound to a physical column) + public bool Unused; +} - public class RuleDebugInfo - { - // Local index of rule - // (this is not stored in the story file and is only used by the debugger) - public UInt32 Id; - // ID of parent goal node - public UInt32 GoalId; - // Generated rule name (usually the name of the first condition) - public String Name; - // Rule local variables - public List Variables; - // Actions in THEN-part - public List Actions; - // Line number of the beginning of the "IF" section - public UInt32 ConditionsStartLine; - // Line number of the end of the "IF" section - public UInt32 ConditionsEndLine; - // Line number of the beginning of the "THEN" section - public UInt32 ActionsStartLine; - // Line number of the end of the "THEN" section - public UInt32 ActionsEndLine; - } +public class RuleDebugInfo +{ + // Local index of rule + // (this is not stored in the story file and is only used by the debugger) + public UInt32 Id; + // ID of parent goal node + public UInt32 GoalId; + // Generated rule name (usually the name of the first condition) + public String Name; + // Rule local variables + public List Variables; + // Actions in THEN-part + public List Actions; + // Line number of the beginning of the "IF" section + public UInt32 ConditionsStartLine; + // Line number of the end of the "IF" section + public UInt32 ConditionsEndLine; + // Line number of the beginning of the "THEN" section + public UInt32 ActionsStartLine; + // Line number of the end of the "THEN" section + public UInt32 ActionsEndLine; +} - public class NodeDebugInfo - { - // ID of node in generated story file - public UInt32 Id; - // Index of parent rule - public UInt32 RuleId; - // Location of action in source file - public Int32 Line; - // Local tuple to rule variable index mappings - public Dictionary ColumnToVariableMaps; - // ID of associated database node - public UInt32 DatabaseId; - // Name of node - public String Name; - // Type of node - public Node.Type Type; - // ID of left parent node - public UInt32 ParentNodeId; - // Function (query, proc, etc.) attached to this node - public FunctionNameAndArity FunctionName; - } +public class NodeDebugInfo +{ + // ID of node in generated story file + public UInt32 Id; + // Index of parent rule + public UInt32 RuleId; + // Location of action in source file + public Int32 Line; + // Local tuple to rule variable index mappings + public Dictionary ColumnToVariableMaps; + // ID of associated database node + public UInt32 DatabaseId; + // Name of node + public String Name; + // Type of node + public Node.Type Type; + // ID of left parent node + public UInt32 ParentNodeId; + // Function (query, proc, etc.) attached to this node + public FunctionNameAndArity FunctionName; +} - public class FunctionParamDebugInfo - { - // Intrinsic type ID - public UInt32 TypeId; - // Name of parameter - public String Name; - // Is an out param (ie. return value)? - public bool Out; - } +public class FunctionParamDebugInfo +{ + // Intrinsic type ID + public UInt32 TypeId; + // Name of parameter + public String Name; + // Is an out param (ie. return value)? + public bool Out; +} - public class FunctionDebugInfo - { - // Name of function - public String Name; - // Type of node - public List Params; - // Function type ID - public UInt32 TypeId; - } +public class FunctionDebugInfo +{ + // Name of function + public String Name; + // Type of node + public List Params; + // Function type ID + public UInt32 TypeId; +} - public class StoryDebugInfo - { - /// - /// Story debug info format version. Increment each time the format changes. - /// - public const UInt32 CurrentVersion = 2; +public class StoryDebugInfo +{ + /// + /// Story debug info format version. Increment each time the format changes. + /// + public const UInt32 CurrentVersion = 2; - public UInt32 Version; - public Dictionary Databases = new Dictionary(); - public Dictionary Goals = new Dictionary(); - public Dictionary Rules = new Dictionary(); - public Dictionary Nodes = new Dictionary(); - public Dictionary Functions = new Dictionary(); - } + public UInt32 Version; + public Dictionary Databases = new Dictionary(); + public Dictionary Goals = new Dictionary(); + public Dictionary Rules = new Dictionary(); + public Dictionary Nodes = new Dictionary(); + public Dictionary Functions = new Dictionary(); } diff --git a/LSLib/LS/Story/Compiler/HeaderLoader.cs b/LSLib/LS/Story/Compiler/HeaderLoader.cs index 2f4895a1..7b63f5c0 100644 --- a/LSLib/LS/Story/Compiler/HeaderLoader.cs +++ b/LSLib/LS/Story/Compiler/HeaderLoader.cs @@ -6,121 +6,120 @@ using System.Text; using System.Threading.Tasks; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +/// +/// Responsible for parsing story header files (story_header.div), +/// and loading header definitions to the compilation context. +/// +public class StoryHeaderLoader { + private CompilationContext Context; + + public StoryHeaderLoader(CompilationContext context) + { + Context = context; + } + /// - /// Responsible for parsing story header files (story_header.div), - /// and loading header definitions to the compilation context. + /// Creates and loads a type alias (e.g. CHARACTERGUID, ITEMGUID, etc.) from an AST node. /// - public class StoryHeaderLoader + private bool LoadAliasFromAST(ASTAlias astAlias) { - private CompilationContext Context; - - public StoryHeaderLoader(CompilationContext context) + var type = new ValueType { - Context = context; - } + Name = astAlias.TypeName, + TypeId = astAlias.TypeId, + IntrinsicTypeId = (Value.Type)astAlias.AliasId + }; + return Context.RegisterType(type); + } - /// - /// Creates and loads a type alias (e.g. CHARACTERGUID, ITEMGUID, etc.) from an AST node. - /// - private bool LoadAliasFromAST(ASTAlias astAlias) + /// + /// Creates and loads a function declaration from an AST node. + /// + private bool LoadFunctionFromAST(ASTFunction astFunction) + { + var args = new List(astFunction.Params.Count); + foreach (var astParam in astFunction.Params) { - var type = new ValueType + var type = Context.LookupType(astParam.Type); + // Since types and alias types are declared at the beginning of the + // story header, we shold have full type information here, so any + // unresolved types will be flagged as an error. + if (type == null) { - Name = astAlias.TypeName, - TypeId = astAlias.TypeId, - IntrinsicTypeId = (Value.Type)astAlias.AliasId + Context.Log.Error(null, DiagnosticCode.UnresolvedTypeInSignature, + String.Format("Function \"{0}({1})\" argument \"{2}\" has unresolved type \"{3}\"", + astFunction.Name, astFunction.Params.Count, astParam.Name, astParam.Type)); + continue; + } + + var param = new FunctionParam + { + Name = astParam.Name, + Type = type, + Direction = astParam.Direction }; - return Context.RegisterType(type); + args.Add(param); } - /// - /// Creates and loads a function declaration from an AST node. - /// - private bool LoadFunctionFromAST(ASTFunction astFunction) + var signature = new FunctionSignature { - var args = new List(astFunction.Params.Count); - foreach (var astParam in astFunction.Params) - { - var type = Context.LookupType(astParam.Type); - // Since types and alias types are declared at the beginning of the - // story header, we shold have full type information here, so any - // unresolved types will be flagged as an error. - if (type == null) - { - Context.Log.Error(null, DiagnosticCode.UnresolvedTypeInSignature, - String.Format("Function \"{0}({1})\" argument \"{2}\" has unresolved type \"{3}\"", - astFunction.Name, astFunction.Params.Count, astParam.Name, astParam.Type)); - continue; - } + Name = astFunction.Name, + Type = astFunction.Type, + Params = args, + FullyTyped = true, + Inserted = false, + Deleted = false, + Read = false + }; - var param = new FunctionParam - { - Name = astParam.Name, - Type = type, - Direction = astParam.Direction - }; - args.Add(param); - } + var func = new BuiltinFunction + { + Signature = signature, + Meta1 = astFunction.Meta1, + Meta2 = astFunction.Meta2, + Meta3 = astFunction.Meta3, + Meta4 = astFunction.Meta4 + }; - var signature = new FunctionSignature - { - Name = astFunction.Name, - Type = astFunction.Type, - Params = args, - FullyTyped = true, - Inserted = false, - Deleted = false, - Read = false - }; + return Context.RegisterFunction(signature, func); + } - var func = new BuiltinFunction - { - Signature = signature, - Meta1 = astFunction.Meta1, - Meta2 = astFunction.Meta2, - Meta3 = astFunction.Meta3, - Meta4 = astFunction.Meta4 - }; + /// + /// Parses a story header file into an AST. + /// + public ASTDeclarations ParseHeader(Stream stream) + { + var scanner = new HeaderScanner(); + scanner.SetSource(stream); + var parser = new HeaderParser.HeaderParser(scanner); + bool parsed = parser.Parse(); - return Context.RegisterFunction(signature, func); + if (parsed) + { + return parser.GetDeclarations(); } - - /// - /// Parses a story header file into an AST. - /// - public ASTDeclarations ParseHeader(Stream stream) + else { - var scanner = new HeaderScanner(); - scanner.SetSource(stream); - var parser = new HeaderParser.HeaderParser(scanner); - bool parsed = parser.Parse(); - - if (parsed) - { - return parser.GetDeclarations(); - } - else - { - return null; - } + return null; } + } - /// - /// Loads all declarations from a story header file. - /// - public void LoadHeader(ASTDeclarations declarations) + /// + /// Loads all declarations from a story header file. + /// + public void LoadHeader(ASTDeclarations declarations) + { + foreach (var alias in declarations.Aliases) { - foreach (var alias in declarations.Aliases) - { - LoadAliasFromAST(alias); - } + LoadAliasFromAST(alias); + } - foreach (var func in declarations.Functions) - { - LoadFunctionFromAST(func); - } + foreach (var func in declarations.Functions) + { + LoadFunctionFromAST(func); } } } diff --git a/LSLib/LS/Story/Compiler/IR.cs b/LSLib/LS/Story/Compiler/IR.cs index 64ae8c85..d6fb4bc1 100644 --- a/LSLib/LS/Story/Compiler/IR.cs +++ b/LSLib/LS/Story/Compiler/IR.cs @@ -5,341 +5,340 @@ using System.Text; using System.Threading.Tasks; -namespace LSLib.LS.Story.Compiler -{ - /// - /// Parent class for IR (Intermediate Representation) references. - /// These are names that were passed on from the AST, but - /// may not be defined at the time of parsing. - /// - public abstract class IRReference - { - public NameType Name; - protected CompilationContext Context; +namespace LSLib.LS.Story.Compiler; - public bool IsNull - { - get { return Name == null; } - } - - public bool IsValid - { - get { return Name != null; } - } - - public IRReference() - { - } +/// +/// Parent class for IR (Intermediate Representation) references. +/// These are names that were passed on from the AST, but +/// may not be defined at the time of parsing. +/// +public abstract class IRReference +{ + public NameType Name; + protected CompilationContext Context; - public IRReference(NameType name) - { - Name = name; - } + public bool IsNull + { + get { return Name == null; } + } - public void Bind(CompilationContext context) - { - if (Context == null) - Context = context; - else - throw new InvalidOperationException("Reference already bound to a compilation context!"); - } - - abstract public ReferencedType Resolve(); + public bool IsValid + { + get { return Name != null; } } - /// - /// Named reference to a story goal. - /// - public class IRGoalRef : IRReference + public IRReference() { - public IRGoalRef(String name) - : base(name) - { - } + } - public override IRGoal Resolve() - { - if (IsNull) - return null; - else - return Context.LookupGoal(Name); - } + public IRReference(NameType name) + { + Name = name; } - /// - /// Named reference to a story symbol (proc, query, event). - /// - public class IRSymbolRef : IRReference + public void Bind(CompilationContext context) { - public IRSymbolRef(FunctionNameAndArity name) - : base(name) - { - } + if (Context == null) + Context = context; + else + throw new InvalidOperationException("Reference already bound to a compilation context!"); + } + + abstract public ReferencedType Resolve(); +} - public override FunctionSignature Resolve() - { - if (IsNull) - return null; - else - return Context.LookupSignature(Name); - } +/// +/// Named reference to a story goal. +/// +public class IRGoalRef : IRReference +{ + public IRGoalRef(String name) + : base(name) + { } - /// - /// Goal dependency edge from subgial to parent - /// - public class IRTargetEdge + public override IRGoal Resolve() { - // Goal name - public IRGoalRef Goal; - // Location of code reference - public CodeLocation Location; + if (IsNull) + return null; + else + return Context.LookupGoal(Name); } +} - /// - /// Goal node - contains everything from a goal file. - /// - public class IRGoal +/// +/// Named reference to a story symbol (proc, query, event). +/// +public class IRSymbolRef : IRReference +{ + public IRSymbolRef(FunctionNameAndArity name) + : base(name) { - // Goal name (derived from filename) - public String Name; - // Facts in the INITSECTION part - public List InitSection; - // List of all production rules (including procs and queries) from the KBSECTION part - public List KBSection; - // Ffacts in the EXITSECTION part - public List ExitSection; - // Parent goals (if any) - public List ParentTargetEdges; - // Location of node in source code - public CodeLocation Location; } - /// - /// Osiris fact statement from the INIT or EXIT section. - /// - public class IRFact + public override FunctionSignature Resolve() { - // Database we're inserting into / deleting from - public IRSymbolRef Database; - // Fact negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). - public bool Not; - // List of values in the fact tuple - public List Elements; - // Goal that we're completing - public IRGoal Goal; - // Location of node in source code - public CodeLocation Location; + if (IsNull) + return null; + else + return Context.LookupSignature(Name); } +} - /// - /// Describes a production rule in the KB section - /// - public class IRRule +/// +/// Goal dependency edge from subgial to parent +/// +public class IRTargetEdge +{ + // Goal name + public IRGoalRef Goal; + // Location of code reference + public CodeLocation Location; +} + +/// +/// Goal node - contains everything from a goal file. +/// +public class IRGoal +{ + // Goal name (derived from filename) + public String Name; + // Facts in the INITSECTION part + public List InitSection; + // List of all production rules (including procs and queries) from the KBSECTION part + public List KBSection; + // Ffacts in the EXITSECTION part + public List ExitSection; + // Parent goals (if any) + public List ParentTargetEdges; + // Location of node in source code + public CodeLocation Location; +} + +/// +/// Osiris fact statement from the INIT or EXIT section. +/// +public class IRFact +{ + // Database we're inserting into / deleting from + public IRSymbolRef Database; + // Fact negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). + public bool Not; + // List of values in the fact tuple + public List Elements; + // Goal that we're completing + public IRGoal Goal; + // Location of node in source code + public CodeLocation Location; +} + +/// +/// Describes a production rule in the KB section +/// +public class IRRule +{ + public IRGoal Goal; + // Type of rule (if, proc or query) + public RuleType Type; + // Conditions/predicates + public List Conditions; + // Actions to execute on tuples that satisfy the conditions + public List Actions; + // Rule-local variables + public List Variables; + // Rule-local variables by name + public Dictionary VariablesByName; + // Location of node in source code + public CodeLocation Location; + + public IRRuleVariable FindOrAddVariable(String name, ValueType type) { - public IRGoal Goal; - // Type of rule (if, proc or query) - public RuleType Type; - // Conditions/predicates - public List Conditions; - // Actions to execute on tuples that satisfy the conditions - public List Actions; - // Rule-local variables - public List Variables; - // Rule-local variables by name - public Dictionary VariablesByName; - // Location of node in source code - public CodeLocation Location; - - public IRRuleVariable FindOrAddVariable(String name, ValueType type) + if (name.Length < 1 || name[0] != '_') { - if (name.Length < 1 || name[0] != '_') - { - throw new ArgumentException("Local variable name must start with an underscore"); - } + throw new ArgumentException("Local variable name must start with an underscore"); + } - IRRuleVariable v = null; - // Only resolve the variable if it has a name. - // Unnamed variables are never resolved by name, and all references are assigned - // to a separate variable "slot" - if (name.Length > 1) + IRRuleVariable v = null; + // Only resolve the variable if it has a name. + // Unnamed variables are never resolved by name, and all references are assigned + // to a separate variable "slot" + if (name.Length > 1) + { + VariablesByName.TryGetValue(name.ToLowerInvariant(), out v); + } + + if (v == null) + { + // Allocate a new variable slot if no variable with the same name exists + v = new IRRuleVariable { - VariablesByName.TryGetValue(name.ToLowerInvariant(), out v); - } + Index = Variables.Count, + Name = name, + Type = type, + FirstBindingIndex = -1 + }; + + Variables.Add(v); - if (v == null) + if (name.Length > 1) { - // Allocate a new variable slot if no variable with the same name exists - v = new IRRuleVariable - { - Index = Variables.Count, - Name = name, - Type = type, - FirstBindingIndex = -1 - }; - - Variables.Add(v); - - if (name.Length > 1) - { - VariablesByName.Add(name.ToLowerInvariant(), v); - } + VariablesByName.Add(name.ToLowerInvariant(), v); } - - return v; } - } - /// - /// Rule-level local variable. - /// - public class IRRuleVariable - { - // Index of the variable within the rule. - // Indices start from zero. - public Int32 Index; - // Local name of the variable. - // This is only used during compilation and is discarded - // when emitting the final story file. - public String Name; - // Type of the rule variable - public ValueType Type; - // Index of condition that first bound this variable - public Int32 FirstBindingIndex; - // TODO - add inferred type marker! - - public bool IsUnused() - { - return Name.Length == 1; - } + return v; } +} - /// - /// Production rule condition/predicate. - /// - public class IRCondition - { - // Number of columns in the output tuple of this condition. - public Int32 TupleSize; - // Location of node in source code - public CodeLocation Location; - } +/// +/// Rule-level local variable. +/// +public class IRRuleVariable +{ + // Index of the variable within the rule. + // Indices start from zero. + public Int32 Index; + // Local name of the variable. + // This is only used during compilation and is discarded + // when emitting the final story file. + public String Name; + // Type of the rule variable + public ValueType Type; + // Index of condition that first bound this variable + public Int32 FirstBindingIndex; + // TODO - add inferred type marker! - /// - /// "Function call-like" predicate - a div query, a user query or a database filter. - /// (i.e. "AND SomeFunc(1, 2)" or "AND NOT SomeFunc(1, 2)") - /// - public class IRFuncCondition : IRCondition + public bool IsUnused() { - // Query/Database name - // (We don't know yet whether this is a query or a database - this info will only be - // available during phase2 parsing) - public IRSymbolRef Func; - // Condition negation ("AND DB_Something(1)" vs. "AND NOT DB_Something(1)"). - public bool Not; - // List of query parameters / database tuple columns - public List Params; + return Name.Length == 1; } +} - /// - /// Predicate with a binary operator (i.e. "A >= B", "A == B", ...) - /// - public class IRBinaryCondition : IRCondition - { - // Left-hand value - public IRValue LValue; - // Operator - public RelOpType Op; - // Right-hand value - public IRValue RValue; - } - - /// - /// Statement in the THEN part of a rule. - /// This is either a builtin PROC call, user PROC call, a database insert/delete operation, - /// or a goal completion statement. - /// - public class IRStatement - { - // Proc/Database name - // (We don't know yet whether this is a PROC or a DB - this info will only be - // available during phase2 parsing) - public IRSymbolRef Func; - // Goal to complete - // (Reference is empty if this statement doesn't trigger a goal completion) - public IRGoal Goal; - // Statement negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). - public bool Not; - // List of PROC parameters / database tuple columns - public List Params; - // Location of node in source code - public CodeLocation Location; - } +/// +/// Production rule condition/predicate. +/// +public class IRCondition +{ + // Number of columns in the output tuple of this condition. + public Int32 TupleSize; + // Location of node in source code + public CodeLocation Location; +} - public class IRValue - { - // Type of variable, if specified in the code. - // (e.g. "(ITEMGUID)_Var") - public ValueType Type; - // Location of node in source code - public CodeLocation Location; - } +/// +/// "Function call-like" predicate - a div query, a user query or a database filter. +/// (i.e. "AND SomeFunc(1, 2)" or "AND NOT SomeFunc(1, 2)") +/// +public class IRFuncCondition : IRCondition +{ + // Query/Database name + // (We don't know yet whether this is a query or a database - this info will only be + // available during phase2 parsing) + public IRSymbolRef Func; + // Condition negation ("AND DB_Something(1)" vs. "AND NOT DB_Something(1)"). + public bool Not; + // List of query parameters / database tuple columns + public List Params; +} - /// - /// Constant value type. This is the type we see during story - /// script parsing, which is not necessarily the same as the - /// Osiris type. - /// - public enum IRConstantType - { - Unknown = 0, - Integer = 1, - Float = 2, - String = 3, - Name = 4 - } +/// +/// Predicate with a binary operator (i.e. "A >= B", "A == B", ...) +/// +public class IRBinaryCondition : IRCondition +{ + // Left-hand value + public IRValue LValue; + // Operator + public RelOpType Op; + // Right-hand value + public IRValue RValue; +} + +/// +/// Statement in the THEN part of a rule. +/// This is either a builtin PROC call, user PROC call, a database insert/delete operation, +/// or a goal completion statement. +/// +public class IRStatement +{ + // Proc/Database name + // (We don't know yet whether this is a PROC or a DB - this info will only be + // available during phase2 parsing) + public IRSymbolRef Func; + // Goal to complete + // (Reference is empty if this statement doesn't trigger a goal completion) + public IRGoal Goal; + // Statement negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). + public bool Not; + // List of PROC parameters / database tuple columns + public List Params; + // Location of node in source code + public CodeLocation Location; +} - /// - /// Constant scalar value. - /// - public class IRConstant : IRValue +public class IRValue +{ + // Type of variable, if specified in the code. + // (e.g. "(ITEMGUID)_Var") + public ValueType Type; + // Location of node in source code + public CodeLocation Location; +} + +/// +/// Constant value type. This is the type we see during story +/// script parsing, which is not necessarily the same as the +/// Osiris type. +/// +public enum IRConstantType +{ + Unknown = 0, + Integer = 1, + Float = 2, + String = 3, + Name = 4 +} + +/// +/// Constant scalar value. +/// +public class IRConstant : IRValue +{ + // Internal type of the constant + // This is not the same as the Osiris type; e.g. a value of type CHARACTERGUID + // will be stored with a constant type of "Name". It also doesn't differentiate + // between INT and INT64 as we don't know the exact Osiris type without contextual + // type inference, which will happen in later stages. + public IRConstantType ValueType; + // Was the type info retrieved from the AST or inferred? + public bool InferredType; + // Value of this constant if the type is Integer. + public Int64 IntegerValue; + // Value of this constant if the type is Float. + public Single FloatValue; + // Value of this constant if the type is String or Name. + public String StringValue; + + public override string ToString() { - // Internal type of the constant - // This is not the same as the Osiris type; e.g. a value of type CHARACTERGUID - // will be stored with a constant type of "Name". It also doesn't differentiate - // between INT and INT64 as we don't know the exact Osiris type without contextual - // type inference, which will happen in later stages. - public IRConstantType ValueType; - // Was the type info retrieved from the AST or inferred? - public bool InferredType; - // Value of this constant if the type is Integer. - public Int64 IntegerValue; - // Value of this constant if the type is Float. - public Single FloatValue; - // Value of this constant if the type is String or Name. - public String StringValue; - - public override string ToString() + switch (ValueType) { - switch (ValueType) - { - case IRConstantType.Unknown: return "(unknown)"; - case IRConstantType.Integer: return IntegerValue.ToString(); - case IRConstantType.Float: return FloatValue.ToString(); - case IRConstantType.String: return "\"" + StringValue + "\""; - case IRConstantType.Name: return StringValue; - default: return "(unknown type)"; - } + case IRConstantType.Unknown: return "(unknown)"; + case IRConstantType.Integer: return IntegerValue.ToString(); + case IRConstantType.Float: return FloatValue.ToString(); + case IRConstantType.String: return "\"" + StringValue + "\""; + case IRConstantType.Name: return StringValue; + default: return "(unknown type)"; } } +} - /// - /// Rule-local variable name. - /// (Any variable that begins with an underscore in the IF or THEN part of a rule) - /// - public class IRVariable : IRValue - { - // Index of variable in the rule variable list - public Int32 Index; - } +/// +/// Rule-local variable name. +/// (Any variable that begins with an underscore in the IF or THEN part of a rule) +/// +public class IRVariable : IRValue +{ + // Index of variable in the rule variable list + public Int32 Index; } diff --git a/LSLib/LS/Story/Compiler/IRGenerator.cs b/LSLib/LS/Story/Compiler/IRGenerator.cs index eebe685a..b3922eb1 100644 --- a/LSLib/LS/Story/Compiler/IRGenerator.cs +++ b/LSLib/LS/Story/Compiler/IRGenerator.cs @@ -6,306 +6,305 @@ using System.Text; using System.Threading.Tasks; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +/// +/// Generates IR from story AST. +/// +public class IRGenerator { - /// - /// Generates IR from story AST. - /// - public class IRGenerator + private CompilationContext Context; + public CodeLocation LastLocation; + + public IRGenerator(CompilationContext context) { - private CompilationContext Context; - public CodeLocation LastLocation; + Context = context; + } - public IRGenerator(CompilationContext context) + private IRGoal ASTGoalToIR(ASTGoal astGoal) + { + var goal = new IRGoal { - Context = context; - } + InitSection = new List(astGoal.InitSection.Count), + KBSection = new List(astGoal.KBSection.Count), + ExitSection = new List(astGoal.ExitSection.Count), + ParentTargetEdges = new List(astGoal.ParentTargetEdges.Count), + Location = astGoal.Location + }; - private IRGoal ASTGoalToIR(ASTGoal astGoal) + foreach (var fact in astGoal.InitSection) { - var goal = new IRGoal - { - InitSection = new List(astGoal.InitSection.Count), - KBSection = new List(astGoal.KBSection.Count), - ExitSection = new List(astGoal.ExitSection.Count), - ParentTargetEdges = new List(astGoal.ParentTargetEdges.Count), - Location = astGoal.Location - }; - - foreach (var fact in astGoal.InitSection) - { - goal.InitSection.Add(ASTFactToIR(goal, fact)); - } - - foreach (var rule in astGoal.KBSection) - { - goal.KBSection.Add(ASTRuleToIR(goal, rule)); - } - - foreach (var fact in astGoal.ExitSection) - { - goal.ExitSection.Add(ASTFactToIR(goal, fact)); - } + goal.InitSection.Add(ASTFactToIR(goal, fact)); + } - foreach (var refGoal in astGoal.ParentTargetEdges) - { - var edge = new IRTargetEdge(); - edge.Goal = new IRGoalRef(refGoal.Goal); - edge.Location = refGoal.Location; - goal.ParentTargetEdges.Add(edge); - } + foreach (var rule in astGoal.KBSection) + { + goal.KBSection.Add(ASTRuleToIR(goal, rule)); + } - return goal; + foreach (var fact in astGoal.ExitSection) + { + goal.ExitSection.Add(ASTFactToIR(goal, fact)); } - private IRRule ASTRuleToIR(IRGoal goal, ASTRule astRule) + foreach (var refGoal in astGoal.ParentTargetEdges) { - var rule = new IRRule - { - Goal = goal, - Type = astRule.Type, - Conditions = new List(astRule.Conditions.Count), - Actions = new List(astRule.Actions.Count), - Variables = new List(), - VariablesByName = new Dictionary(), - Location = astRule.Location - }; + var edge = new IRTargetEdge(); + edge.Goal = new IRGoalRef(refGoal.Goal); + edge.Location = refGoal.Location; + goal.ParentTargetEdges.Add(edge); + } - foreach (var condition in astRule.Conditions) - { - rule.Conditions.Add(ASTConditionToIR(rule, condition)); - } + return goal; + } - foreach (var action in astRule.Actions) - { - rule.Actions.Add(ASTActionToIR(rule, action)); - } + private IRRule ASTRuleToIR(IRGoal goal, ASTRule astRule) + { + var rule = new IRRule + { + Goal = goal, + Type = astRule.Type, + Conditions = new List(astRule.Conditions.Count), + Actions = new List(astRule.Actions.Count), + Variables = new List(), + VariablesByName = new Dictionary(), + Location = astRule.Location + }; - return rule; + foreach (var condition in astRule.Conditions) + { + rule.Conditions.Add(ASTConditionToIR(rule, condition)); } - private IRStatement ASTActionToIR(IRRule rule, ASTAction astAction) + foreach (var action in astRule.Actions) { - if (astAction is ASTGoalCompletedAction) - { - var astGoal = astAction as ASTGoalCompletedAction; - return new IRStatement - { - Func = null, - Goal = rule.Goal, - Not = false, - Params = new List(), - Location = astAction.Location - }; - } - else if (astAction is ASTStatement) - { - var astStmt = astAction as ASTStatement; - var stmt = new IRStatement - { - Func = new IRSymbolRef(new FunctionNameAndArity(astStmt.Name, astStmt.Params.Count)), - Goal = null, - Not = astStmt.Not, - Params = new List(astStmt.Params.Count), - Location = astAction.Location - }; + rule.Actions.Add(ASTActionToIR(rule, action)); + } - foreach (var param in astStmt.Params) - { - stmt.Params.Add(ASTValueToIR(rule, param)); - } + return rule; + } - return stmt; - } - else + private IRStatement ASTActionToIR(IRRule rule, ASTAction astAction) + { + if (astAction is ASTGoalCompletedAction) + { + var astGoal = astAction as ASTGoalCompletedAction; + return new IRStatement { - throw new InvalidOperationException("Cannot convert unknown AST condition type to IR"); - } + Func = null, + Goal = rule.Goal, + Not = false, + Params = new List(), + Location = astAction.Location + }; } - - private IRCondition ASTConditionToIR(IRRule rule, ASTCondition astCondition) + else if (astAction is ASTStatement) { - if (astCondition is ASTFuncCondition) + var astStmt = astAction as ASTStatement; + var stmt = new IRStatement { - var astFunc = astCondition as ASTFuncCondition; - var func = new IRFuncCondition - { - Func = new IRSymbolRef(new FunctionNameAndArity(astFunc.Name, astFunc.Params.Count)), - Not = astFunc.Not, - Params = new List(astFunc.Params.Count), - TupleSize = -1, - Location = astCondition.Location - }; - - foreach (var param in astFunc.Params) - { - func.Params.Add(ASTValueToIR(rule, param)); - } + Func = new IRSymbolRef(new FunctionNameAndArity(astStmt.Name, astStmt.Params.Count)), + Goal = null, + Not = astStmt.Not, + Params = new List(astStmt.Params.Count), + Location = astAction.Location + }; - return func; - } - else if (astCondition is ASTBinaryCondition) - { - var astBin = astCondition as ASTBinaryCondition; - return new IRBinaryCondition - { - LValue = ASTValueToIR(rule, astBin.LValue), - Op = astBin.Op, - RValue = ASTValueToIR(rule, astBin.RValue), - TupleSize = -1, - Location = astCondition.Location - }; - } - else + foreach (var param in astStmt.Params) { - throw new InvalidOperationException("Cannot convert unknown AST condition type to IR"); + stmt.Params.Add(ASTValueToIR(rule, param)); } - } - private IRValue ASTValueToIR(IRRule rule, ASTRValue astValue) + return stmt; + } + else { - if (astValue is ASTConstantValue) - { - return ASTConstantToIR(astValue as ASTConstantValue); - } - else if (astValue is ASTLocalVar) - { - var astVar = astValue as ASTLocalVar; - // TODO - compiler error if type resolution fails - ValueType type; - if (astVar.Type != null) - { - type = Context.LookupType(astVar.Type); - if (type == null) - { - Context.Log.Error(astVar.Location, DiagnosticCode.UnresolvedType, - String.Format("Type \"{0}\" does not exist", astVar.Type)); - } - } - else - { - type = null; - } - - var ruleVar = rule.FindOrAddVariable(astVar.Name, type); - - return new IRVariable - { - Index = ruleVar.Index, - Type = type, - Location = astValue.Location - }; - } - else - { - throw new InvalidOperationException("Cannot convert unknown AST value type to IR"); - } + throw new InvalidOperationException("Cannot convert unknown AST condition type to IR"); } + } - private IRFact ASTFactToIR(IRGoal goal, ASTBaseFact astFact) + private IRCondition ASTConditionToIR(IRRule rule, ASTCondition astCondition) + { + if (astCondition is ASTFuncCondition) { - if (astFact is ASTFact) + var astFunc = astCondition as ASTFuncCondition; + var func = new IRFuncCondition { - var f = astFact as ASTFact; - var fact = new IRFact - { - Database = new IRSymbolRef(new FunctionNameAndArity(f.Database, f.Elements.Count)), - Not = f.Not, - Elements = new List(f.Elements.Count), - Goal = null, - Location = f.Location - }; - - foreach (var element in f.Elements) - { - fact.Elements.Add(ASTConstantToIR(element)); - } + Func = new IRSymbolRef(new FunctionNameAndArity(astFunc.Name, astFunc.Params.Count)), + Not = astFunc.Not, + Params = new List(astFunc.Params.Count), + TupleSize = -1, + Location = astCondition.Location + }; - return fact; - } - else if (astFact is ASTGoalCompletedFact) - { - var f = astFact as ASTGoalCompletedFact; - return new IRFact - { - Database = null, - Not = false, - Elements = new List(), - Goal = goal, - Location = f.Location - }; - } - else + foreach (var param in astFunc.Params) { - throw new InvalidOperationException("Cannot convert unknown AST fact type to IR"); + func.Params.Add(ASTValueToIR(rule, param)); } - } - // TODO - un-copy + move to constant code? - private ValueType ConstantTypeToValueType(IRConstantType type) + return func; + } + else if (astCondition is ASTBinaryCondition) { - switch (type) + var astBin = astCondition as ASTBinaryCondition; + return new IRBinaryCondition { - case IRConstantType.Unknown: return null; - // TODO - lookup type ID from enum - case IRConstantType.Integer: return Context.TypesById[1]; - case IRConstantType.Float: return Context.TypesById[3]; - case IRConstantType.String: return Context.TypesById[4]; - case IRConstantType.Name: return Context.TypesById[5]; - default: throw new ArgumentException("Invalid IR constant type"); - } + LValue = ASTValueToIR(rule, astBin.LValue), + Op = astBin.Op, + RValue = ASTValueToIR(rule, astBin.RValue), + TupleSize = -1, + Location = astCondition.Location + }; + } + else + { + throw new InvalidOperationException("Cannot convert unknown AST condition type to IR"); } + } - private IRConstant ASTConstantToIR(ASTConstantValue astConstant) + private IRValue ASTValueToIR(IRRule rule, ASTRValue astValue) + { + if (astValue is ASTConstantValue) + { + return ASTConstantToIR(astValue as ASTConstantValue); + } + else if (astValue is ASTLocalVar) { + var astVar = astValue as ASTLocalVar; + // TODO - compiler error if type resolution fails ValueType type; - if (astConstant.TypeName != null) + if (astVar.Type != null) { - type = Context.LookupType(astConstant.TypeName); + type = Context.LookupType(astVar.Type); if (type == null) { - Context.Log.Error(astConstant.Location, DiagnosticCode.UnresolvedType, - String.Format("Type \"{0}\" does not exist", astConstant.TypeName)); + Context.Log.Error(astVar.Location, DiagnosticCode.UnresolvedType, + String.Format("Type \"{0}\" does not exist", astVar.Type)); } } else { - type = ConstantTypeToValueType(astConstant.Type); + type = null; } - return new IRConstant + var ruleVar = rule.FindOrAddVariable(astVar.Name, type); + + return new IRVariable { - ValueType = astConstant.Type, + Index = ruleVar.Index, Type = type, - InferredType = astConstant.TypeName != null, - IntegerValue = astConstant.IntegerValue, - FloatValue = astConstant.FloatValue, - StringValue = astConstant.StringValue, - Location = astConstant.Location + Location = astValue.Location }; } + else + { + throw new InvalidOperationException("Cannot convert unknown AST value type to IR"); + } + } - public ASTGoal ParseGoal(String path, Stream stream) + private IRFact ASTFactToIR(IRGoal goal, ASTBaseFact astFact) + { + if (astFact is ASTFact) { - var scanner = new GoalScanner(path); - scanner.SetSource(stream); - var parser = new GoalParser.GoalParser(scanner); - bool parsed = parser.Parse(); + var f = astFact as ASTFact; + var fact = new IRFact + { + Database = new IRSymbolRef(new FunctionNameAndArity(f.Database, f.Elements.Count)), + Not = f.Not, + Elements = new List(f.Elements.Count), + Goal = null, + Location = f.Location + }; - if (parsed) + foreach (var element in f.Elements) { - return parser.GetGoal(); + fact.Elements.Add(ASTConstantToIR(element)); } - else + + return fact; + } + else if (astFact is ASTGoalCompletedFact) + { + var f = astFact as ASTGoalCompletedFact; + return new IRFact + { + Database = null, + Not = false, + Elements = new List(), + Goal = goal, + Location = f.Location + }; + } + else + { + throw new InvalidOperationException("Cannot convert unknown AST fact type to IR"); + } + } + + // TODO - un-copy + move to constant code? + private ValueType ConstantTypeToValueType(IRConstantType type) + { + switch (type) + { + case IRConstantType.Unknown: return null; + // TODO - lookup type ID from enum + case IRConstantType.Integer: return Context.TypesById[1]; + case IRConstantType.Float: return Context.TypesById[3]; + case IRConstantType.String: return Context.TypesById[4]; + case IRConstantType.Name: return Context.TypesById[5]; + default: throw new ArgumentException("Invalid IR constant type"); + } + } + + private IRConstant ASTConstantToIR(ASTConstantValue astConstant) + { + ValueType type; + if (astConstant.TypeName != null) + { + type = Context.LookupType(astConstant.TypeName); + if (type == null) { - this.LastLocation = scanner.LastLocation(); - return null; + Context.Log.Error(astConstant.Location, DiagnosticCode.UnresolvedType, + String.Format("Type \"{0}\" does not exist", astConstant.TypeName)); } } + else + { + type = ConstantTypeToValueType(astConstant.Type); + } - public IRGoal GenerateGoalIR(ASTGoal goal) + return new IRConstant { - return ASTGoalToIR(goal); + ValueType = astConstant.Type, + Type = type, + InferredType = astConstant.TypeName != null, + IntegerValue = astConstant.IntegerValue, + FloatValue = astConstant.FloatValue, + StringValue = astConstant.StringValue, + Location = astConstant.Location + }; + } + + public ASTGoal ParseGoal(String path, Stream stream) + { + var scanner = new GoalScanner(path); + scanner.SetSource(stream); + var parser = new GoalParser.GoalParser(scanner); + bool parsed = parser.Parse(); + + if (parsed) + { + return parser.GetGoal(); + } + else + { + this.LastLocation = scanner.LastLocation(); + return null; } } + + public IRGoal GenerateGoalIR(ASTGoal goal) + { + return ASTGoalToIR(goal); + } } diff --git a/LSLib/LS/Story/Compiler/Preprocessor.cs b/LSLib/LS/Story/Compiler/Preprocessor.cs index eb3da2a6..d90fc5e1 100644 --- a/LSLib/LS/Story/Compiler/Preprocessor.cs +++ b/LSLib/LS/Story/Compiler/Preprocessor.cs @@ -4,68 +4,67 @@ using System.Text; using System.Threading.Tasks; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +public class Preprocessor { - public class Preprocessor + public bool Preprocess(String script, ref String preprocessed) { - public bool Preprocess(String script, ref String preprocessed) + if (script.IndexOf("/* [OSITOOLS_ONLY]", StringComparison.Ordinal) == -1 + && script.IndexOf("// [BEGIN_NO_OSITOOLS]", StringComparison.Ordinal) == -1) + { + return false; + } + + var builder = new StringBuilder(script.Length); + + int pos = 0; + while (pos < script.Length) { - if (script.IndexOf("/* [OSITOOLS_ONLY]", StringComparison.Ordinal) == -1 - && script.IndexOf("// [BEGIN_NO_OSITOOLS]", StringComparison.Ordinal) == -1) + var next = script.IndexOf("/* [OSITOOLS_ONLY]", pos, StringComparison.Ordinal); + if (next == -1) { - return false; + builder.Append(script.Substring(pos)); + break; } - var builder = new StringBuilder(script.Length); - - int pos = 0; - while (pos < script.Length) + var end = script.IndexOf("*/", next, StringComparison.Ordinal); + if (end == -1) { - var next = script.IndexOf("/* [OSITOOLS_ONLY]", pos, StringComparison.Ordinal); - if (next == -1) - { - builder.Append(script.Substring(pos)); - break; - } - - var end = script.IndexOf("*/", next, StringComparison.Ordinal); - if (end == -1) - { - builder.Append(script.Substring(pos)); - break; - } - - builder.Append(script.Substring(pos, next - pos)); - builder.Append(script.Substring(next + 19, end - next - 19)); - pos = end + 2; + builder.Append(script.Substring(pos)); + break; } - var ph1 = builder.ToString(); - var builderPh2 = new StringBuilder(ph1.Length); + builder.Append(script.Substring(pos, next - pos)); + builder.Append(script.Substring(next + 19, end - next - 19)); + pos = end + 2; + } - pos = 0; - while (pos < ph1.Length) - { - int next = ph1.IndexOf("// [BEGIN_NO_OSITOOLS]", pos, StringComparison.Ordinal); - if (next == -1) - { - builderPh2.Append(ph1.Substring(pos)); - break; - } + var ph1 = builder.ToString(); + var builderPh2 = new StringBuilder(ph1.Length); - var end = ph1.IndexOf("// [END_NO_OSITOOLS]", next, StringComparison.Ordinal); - if (end == -1) - { - builderPh2.Append(ph1.Substring(pos)); - break; - } + pos = 0; + while (pos < ph1.Length) + { + int next = ph1.IndexOf("// [BEGIN_NO_OSITOOLS]", pos, StringComparison.Ordinal); + if (next == -1) + { + builderPh2.Append(ph1.Substring(pos)); + break; + } - builderPh2.Append(ph1.Substring(pos, next - pos)); - pos = end + 21; + var end = ph1.IndexOf("// [END_NO_OSITOOLS]", next, StringComparison.Ordinal); + if (end == -1) + { + builderPh2.Append(ph1.Substring(pos)); + break; } - preprocessed = builderPh2.ToString(); - return true; + builderPh2.Append(ph1.Substring(pos, next - pos)); + pos = end + 21; } + + preprocessed = builderPh2.ToString(); + return true; } } diff --git a/LSLib/LS/Story/Compiler/StoryEmitter.cs b/LSLib/LS/Story/Compiler/StoryEmitter.cs index ca544ea5..c1c8d42b 100644 --- a/LSLib/LS/Story/Compiler/StoryEmitter.cs +++ b/LSLib/LS/Story/Compiler/StoryEmitter.cs @@ -7,359 +7,324 @@ using System.Text; using System.Threading.Tasks; -namespace LSLib.LS.Story.Compiler +namespace LSLib.LS.Story.Compiler; + +/// +/// Type of reference being made to a named function. +/// +public enum NameRefType { - /// - /// Type of reference being made to a named function. - /// - public enum NameRefType + // Function is not referenced, only emitted + None, + // Function is referenced in the IF part of a rule + Condition, + // Function referenced in the THEN part of a rule, or init/exit section of a goal + Action +} + +public class StoryEmitter +{ + private CompilationContext Context; + private Story Story; + private Dictionary Goals = new Dictionary(); + private Dictionary Databases = new Dictionary(); + private Dictionary Funcs = new Dictionary(); + private Dictionary FuncEntries = new Dictionary(); + private Dictionary Rules = new Dictionary(); + public StoryDebugInfo DebugInfo; + + public StoryEmitter(CompilationContext context) { - // Function is not referenced, only emitted - None, - // Function is referenced in the IF part of a rule - Condition, - // Function referenced in the THEN part of a rule, or init/exit section of a goal - Action + Context = context; } - public class StoryEmitter + public void EnableDebugInfo() { - private CompilationContext Context; - private Story Story; - private Dictionary Goals = new Dictionary(); - private Dictionary Databases = new Dictionary(); - private Dictionary Funcs = new Dictionary(); - private Dictionary FuncEntries = new Dictionary(); - private Dictionary Rules = new Dictionary(); - public StoryDebugInfo DebugInfo; + DebugInfo = new StoryDebugInfo(); + DebugInfo.Version = StoryDebugInfo.CurrentVersion; + } - public StoryEmitter(CompilationContext context) + private void AddStoryTypes() + { + foreach (var type in Context.TypesById) { - Context = context; - } + var osiType = new OsirisType(); + osiType.Index = (byte)type.Value.TypeId; + if (type.Value.TypeId == (uint)type.Value.IntrinsicTypeId) + { + osiType.Alias = (byte)0; + osiType.IsBuiltin = true; + } + else + { + osiType.Alias = (byte)type.Value.IntrinsicTypeId; + osiType.IsBuiltin = false; + } - public void EnableDebugInfo() - { - DebugInfo = new StoryDebugInfo(); - DebugInfo.Version = StoryDebugInfo.CurrentVersion; + osiType.Name = type.Value.Name; + Story.Types.Add(osiType.Index, osiType); } + } - private void AddStoryTypes() + private TypedValue EmitTypedValue(IRConstant constant) + { + var osiValue = new TypedValue { - foreach (var type in Context.TypesById) - { - var osiType = new OsirisType(); - osiType.Index = (byte)type.Value.TypeId; - if (type.Value.TypeId == (uint)type.Value.IntrinsicTypeId) - { - osiType.Alias = (byte)0; - osiType.IsBuiltin = true; - } - else - { - osiType.Alias = (byte)type.Value.IntrinsicTypeId; - osiType.IsBuiltin = false; - } - - osiType.Name = type.Value.Name; - Story.Types.Add(osiType.Index, osiType); - } - } + TypeId = constant.Type.TypeId, + IntValue = (int)constant.IntegerValue, + Int64Value = constant.IntegerValue, + FloatValue = constant.FloatValue, + StringValue = constant.StringValue, + + IsValid = true, + OutParam = false, + IsAType = false + }; + + return osiValue; + } - private TypedValue EmitTypedValue(IRConstant constant) + private TypedValue EmitTypedValue(IRValue val) + { + if (val is IRVariable) { - var osiValue = new TypedValue + var variable = val as IRVariable; + return new Variable { - TypeId = constant.Type.TypeId, - IntValue = (int)constant.IntegerValue, - Int64Value = constant.IntegerValue, - FloatValue = constant.FloatValue, - StringValue = constant.StringValue, - - IsValid = true, + TypeId = val.Type.TypeId, + IsValid = false, OutParam = false, - IsAType = false + IsAType = true, + Index = (sbyte)variable.Index, + Unused = false, + Adapted = true }; - - return osiValue; } - - private TypedValue EmitTypedValue(IRValue val) + else { - if (val is IRVariable) - { - var variable = val as IRVariable; - return new Variable - { - TypeId = val.Type.TypeId, - IsValid = false, - OutParam = false, - IsAType = true, - Index = (sbyte)variable.Index, - Unused = false, - Adapted = true - }; - } - else - { - return EmitTypedValue(val as IRConstant); - } + return EmitTypedValue(val as IRConstant); } + } - private Value EmitValue(IRConstant constant) + private Value EmitValue(IRConstant constant) + { + var osiValue = new Value { - var osiValue = new Value - { - TypeId = constant.Type.TypeId, - IntValue = (int)constant.IntegerValue, - Int64Value = constant.IntegerValue, - FloatValue = constant.FloatValue, - StringValue = constant.StringValue - }; - - return osiValue; - } + TypeId = constant.Type.TypeId, + IntValue = (int)constant.IntegerValue, + Int64Value = constant.IntegerValue, + FloatValue = constant.FloatValue, + StringValue = constant.StringValue + }; + + return osiValue; + } - private LS.Story.FunctionSignature EmitFunctionSignature(FunctionSignature signature) + private LS.Story.FunctionSignature EmitFunctionSignature(FunctionSignature signature) + { + var osiSignature = new LS.Story.FunctionSignature { - var osiSignature = new LS.Story.FunctionSignature - { - Name = signature.Name, - OutParamMask = new List(signature.Params.Count / 8 + 1), - Parameters = new ParameterList - { - Types = new List(signature.Params.Count) - } - }; - - var outParamBytes = ((signature.Params.Count + 7) & ~7) >> 3; - for (var outByte = 0; outByte < outParamBytes; outByte++) - { - byte outParamByte = 0; - for (var i = outByte * 8; i < Math.Min((outByte + 1) * 8, signature.Params.Count); i++) - { - if (signature.Params[i].Direction == ParamDirection.Out) - { - outParamByte |= (byte)(0x80 >> (i & 7)); - } - } - - osiSignature.OutParamMask.Add(outParamByte); - } - - foreach (var param in signature.Params) + Name = signature.Name, + OutParamMask = new List(signature.Params.Count / 8 + 1), + Parameters = new ParameterList { - osiSignature.Parameters.Types.Add(param.Type.TypeId); + Types = new List(signature.Params.Count) } + }; - return osiSignature; - } - - private void AddNodeDebugInfo(Node node, CodeLocation location, Int32 numColumns, IRRule rule) + var outParamBytes = ((signature.Params.Count + 7) & ~7) >> 3; + for (var outByte = 0; outByte < outParamBytes; outByte++) { - if (DebugInfo != null) + byte outParamByte = 0; + for (var i = outByte * 8; i < Math.Min((outByte + 1) * 8, signature.Params.Count); i++) { - var nodeDebug = new NodeDebugInfo - { - Id = node.Index, - RuleId = 0, - Line = location != null ? location.StartLine : 0, - ColumnToVariableMaps = new Dictionary(), - DatabaseId = node.DatabaseRef.Index, - Name = node.Name, - Type = node.NodeType(), - ParentNodeId = 0 - }; - - if (node is JoinNode) - { - nodeDebug.ParentNodeId = (node as JoinNode).LeftParentRef.Index; - } - else if (node is RelNode) + if (signature.Params[i].Direction == ParamDirection.Out) { - nodeDebug.ParentNodeId = (node as RelNode).ParentRef.Index; - } - - if (node.Name != "") - { - nodeDebug.FunctionName = new FunctionNameAndArity(node.Name, node.NumParams); - } - - if (location != null) - { - var columnIndex = 0; - var variableIndex = 0; - while (columnIndex < numColumns) - { - if (!rule.Variables[variableIndex].IsUnused()) - { - nodeDebug.ColumnToVariableMaps.Add(columnIndex, variableIndex); - columnIndex++; - } - - variableIndex++; - } + outParamByte |= (byte)(0x80 >> (i & 7)); } - - DebugInfo.Nodes.Add(nodeDebug.Id, nodeDebug); } - } - private void AddNodeWithoutDebugInfo(Node node) - { - node.Index = (uint)Story.Nodes.Count + 1; - Story.Nodes.Add(node.Index, node); + osiSignature.OutParamMask.Add(outParamByte); } - private void AddNode(Node node) + foreach (var param in signature.Params) { - AddNodeWithoutDebugInfo(node); - AddNodeDebugInfo(node, null, 0, null); + osiSignature.Parameters.Types.Add(param.Type.TypeId); } - private Function EmitFunction(LS.Story.FunctionType type, FunctionSignature signature, NodeReference nodeRef) + return osiSignature; + } + + private void AddNodeDebugInfo(Node node, CodeLocation location, Int32 numColumns, IRRule rule) + { + if (DebugInfo != null) { - var osiFunc = new Function - { - Line = 0, - ConditionReferences = 0, - ActionReferences = 0, - NodeRef = nodeRef, - Type = type, - Meta1 = 0, - Meta2 = 0, - Meta3 = 0, - Meta4 = 0, - Name = EmitFunctionSignature(signature) + var nodeDebug = new NodeDebugInfo + { + Id = node.Index, + RuleId = 0, + Line = location != null ? location.StartLine : 0, + ColumnToVariableMaps = new Dictionary(), + DatabaseId = node.DatabaseRef.Index, + Name = node.Name, + Type = node.NodeType(), + ParentNodeId = 0 }; - var sig = signature.GetNameAndArity(); - FuncEntries.Add(sig, osiFunc); - Story.Functions.Add(osiFunc); - Story.FunctionSignatureMap.Add(sig.Name + "/" + sig.Arity.ToString(), osiFunc); + if (node is JoinNode) + { + nodeDebug.ParentNodeId = (node as JoinNode).LeftParentRef.Index; + } + else if (node is RelNode) + { + nodeDebug.ParentNodeId = (node as RelNode).ParentRef.Index; + } - if (DebugInfo != null) + if (node.Name != "") { - var funcDebug = new FunctionDebugInfo - { - Name = osiFunc.Name.Name, - Params = new List(), - TypeId = (UInt32)osiFunc.Type - }; + nodeDebug.FunctionName = new FunctionNameAndArity(node.Name, node.NumParams); + } - foreach (var param in signature.Params) + if (location != null) + { + var columnIndex = 0; + var variableIndex = 0; + while (columnIndex < numColumns) { - funcDebug.Params.Add(new FunctionParamDebugInfo + if (!rule.Variables[variableIndex].IsUnused()) { - TypeId = (UInt32)param.Type.IntrinsicTypeId, - Name = param.Name, - Out = param.Direction == ParamDirection.Out - }); - } + nodeDebug.ColumnToVariableMaps.Add(columnIndex, variableIndex); + columnIndex++; + } - DebugInfo.Functions.Add(signature.GetNameAndArity(), funcDebug); + variableIndex++; + } } - return osiFunc; + DebugInfo.Nodes.Add(nodeDebug.Id, nodeDebug); } + } - private Function EmitFunction(LS.Story.FunctionType type, FunctionSignature signature, NodeReference nodeRef, BuiltinFunction builtin) - { - var osiFunc = EmitFunction(type, signature, nodeRef); - osiFunc.Meta1 = builtin.Meta1; - osiFunc.Meta2 = builtin.Meta2; - osiFunc.Meta3 = builtin.Meta3; - osiFunc.Meta4 = builtin.Meta4; - return osiFunc; - } + private void AddNodeWithoutDebugInfo(Node node) + { + node.Index = (uint)Story.Nodes.Count + 1; + Story.Nodes.Add(node.Index, node); + } + + private void AddNode(Node node) + { + AddNodeWithoutDebugInfo(node); + AddNodeDebugInfo(node, null, 0, null); + } - private InternalQueryNode EmitSysQuery(FunctionSignature signature, NameRefType refType) + private Function EmitFunction(LS.Story.FunctionType type, FunctionSignature signature, NodeReference nodeRef) + { + var osiFunc = new Function { - var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; - InternalQueryNode osiQuery = null; - if (refType == NameRefType.Condition) + Line = 0, + ConditionReferences = 0, + ActionReferences = 0, + NodeRef = nodeRef, + Type = type, + Meta1 = 0, + Meta2 = 0, + Meta3 = 0, + Meta4 = 0, + Name = EmitFunctionSignature(signature) + }; + + var sig = signature.GetNameAndArity(); + FuncEntries.Add(sig, osiFunc); + Story.Functions.Add(osiFunc); + Story.FunctionSignatureMap.Add(sig.Name + "/" + sig.Arity.ToString(), osiFunc); + + if (DebugInfo != null) + { + var funcDebug = new FunctionDebugInfo + { + Name = osiFunc.Name.Name, + Params = new List(), + TypeId = (UInt32)osiFunc.Type + }; + + foreach (var param in signature.Params) { - osiQuery = new InternalQueryNode + funcDebug.Params.Add(new FunctionParamDebugInfo { - DatabaseRef = new DatabaseReference(), - Name = signature.Name, - NumParams = (byte)signature.Params.Count - }; - AddNode(osiQuery); + TypeId = (UInt32)param.Type.IntrinsicTypeId, + Name = param.Name, + Out = param.Direction == ParamDirection.Out + }); } - EmitFunction(LS.Story.FunctionType.SysQuery, signature, new NodeReference(Story, osiQuery), builtin); - return osiQuery; + DebugInfo.Functions.Add(signature.GetNameAndArity(), funcDebug); } - private void EmitSysCall(FunctionSignature signature) - { - var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; - EmitFunction(LS.Story.FunctionType.SysCall, signature, new NodeReference(), builtin); - } + return osiFunc; + } + + private Function EmitFunction(LS.Story.FunctionType type, FunctionSignature signature, NodeReference nodeRef, BuiltinFunction builtin) + { + var osiFunc = EmitFunction(type, signature, nodeRef); + osiFunc.Meta1 = builtin.Meta1; + osiFunc.Meta2 = builtin.Meta2; + osiFunc.Meta3 = builtin.Meta3; + osiFunc.Meta4 = builtin.Meta4; + return osiFunc; + } - private ProcNode EmitEvent(FunctionSignature signature, NameRefType refType) + private InternalQueryNode EmitSysQuery(FunctionSignature signature, NameRefType refType) + { + var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; + InternalQueryNode osiQuery = null; + if (refType == NameRefType.Condition) { - var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; - ProcNode osiProc = null; - if (refType == NameRefType.Condition) + osiQuery = new InternalQueryNode { - osiProc = new ProcNode - { - DatabaseRef = new DatabaseReference(), - Name = signature.Name, - NumParams = (byte)signature.Params.Count, - ReferencedBy = new List() - }; - AddNode(osiProc); - } - - EmitFunction(LS.Story.FunctionType.Event, signature, new NodeReference(Story, osiProc), builtin); - return osiProc; + DatabaseRef = new DatabaseReference(), + Name = signature.Name, + NumParams = (byte)signature.Params.Count + }; + AddNode(osiQuery); } - private ProcNode EmitCall(FunctionSignature signature, NameRefType refType) - { - var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; - ProcNode osiProc = null; - if (refType == NameRefType.Condition) - { - osiProc = new ProcNode - { - DatabaseRef = new DatabaseReference(), - Name = signature.Name, - NumParams = (byte)signature.Params.Count, - ReferencedBy = new List() - }; - AddNode(osiProc); - } + EmitFunction(LS.Story.FunctionType.SysQuery, signature, new NodeReference(Story, osiQuery), builtin); + return osiQuery; + } - EmitFunction(LS.Story.FunctionType.Call, signature, new NodeReference(Story, osiProc), builtin); - return osiProc; - } + private void EmitSysCall(FunctionSignature signature) + { + var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; + EmitFunction(LS.Story.FunctionType.SysCall, signature, new NodeReference(), builtin); + } - private DivQueryNode EmitQuery(FunctionSignature signature, NameRefType refType) + private ProcNode EmitEvent(FunctionSignature signature, NameRefType refType) + { + var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; + ProcNode osiProc = null; + if (refType == NameRefType.Condition) { - var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; - DivQueryNode osiQuery = null; - if (refType == NameRefType.Condition) + osiProc = new ProcNode { - osiQuery = new DivQueryNode - { - DatabaseRef = new DatabaseReference(), - Name = signature.Name, - NumParams = (byte)signature.Params.Count - }; - AddNode(osiQuery); - } - - EmitFunction(LS.Story.FunctionType.Query, signature, new NodeReference(Story, osiQuery), builtin); - return osiQuery; + DatabaseRef = new DatabaseReference(), + Name = signature.Name, + NumParams = (byte)signature.Params.Count, + ReferencedBy = new List() + }; + AddNode(osiProc); } - private ProcNode EmitProc(FunctionSignature signature) + EmitFunction(LS.Story.FunctionType.Event, signature, new NodeReference(Story, osiProc), builtin); + return osiProc; + } + + private ProcNode EmitCall(FunctionSignature signature, NameRefType refType) + { + var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; + ProcNode osiProc = null; + if (refType == NameRefType.Condition) { - var osiProc = new ProcNode + osiProc = new ProcNode { DatabaseRef = new DatabaseReference(), Name = signature.Name, @@ -367,1108 +332,1142 @@ private ProcNode EmitProc(FunctionSignature signature) ReferencedBy = new List() }; AddNode(osiProc); - - EmitFunction(LS.Story.FunctionType.Proc, signature, new NodeReference(Story, osiProc)); - return osiProc; } - private UserQueryNode EmitUserQuery(FunctionSignature signature) + EmitFunction(LS.Story.FunctionType.Call, signature, new NodeReference(Story, osiProc), builtin); + return osiProc; + } + + private DivQueryNode EmitQuery(FunctionSignature signature, NameRefType refType) + { + var builtin = Context.LookupName(signature.GetNameAndArity()) as BuiltinFunction; + DivQueryNode osiQuery = null; + if (refType == NameRefType.Condition) { - var osiQuery = new UserQueryNode + osiQuery = new DivQueryNode { DatabaseRef = new DatabaseReference(), Name = signature.Name, NumParams = (byte)signature.Params.Count }; AddNode(osiQuery); - - EmitFunction(LS.Story.FunctionType.Database, signature, new NodeReference(Story, osiQuery)); - return osiQuery; } - private DatabaseNode EmitDatabase(FunctionSignature signature) - { - var osiDb = new Database - { - Index = (uint)Story.Databases.Count + 1, - Parameters = new ParameterList - { - Types = new List(signature.Params.Count) - }, - OwnerNode = null, - FactsPosition = 0 - }; + EmitFunction(LS.Story.FunctionType.Query, signature, new NodeReference(Story, osiQuery), builtin); + return osiQuery; + } - foreach (var param in signature.Params) - { - osiDb.Parameters.Types.Add(param.Type.TypeId); - } + private ProcNode EmitProc(FunctionSignature signature) + { + var osiProc = new ProcNode + { + DatabaseRef = new DatabaseReference(), + Name = signature.Name, + NumParams = (byte)signature.Params.Count, + ReferencedBy = new List() + }; + AddNode(osiProc); + + EmitFunction(LS.Story.FunctionType.Proc, signature, new NodeReference(Story, osiProc)); + return osiProc; + } - osiDb.Facts = new FactCollection(osiDb, Story); - Story.Databases.Add(osiDb.Index, osiDb); + private UserQueryNode EmitUserQuery(FunctionSignature signature) + { + var osiQuery = new UserQueryNode + { + DatabaseRef = new DatabaseReference(), + Name = signature.Name, + NumParams = (byte)signature.Params.Count + }; + AddNode(osiQuery); + + EmitFunction(LS.Story.FunctionType.Database, signature, new NodeReference(Story, osiQuery)); + return osiQuery; + } - var osiDbNode = new DatabaseNode + private DatabaseNode EmitDatabase(FunctionSignature signature) + { + var osiDb = new Database + { + Index = (uint)Story.Databases.Count + 1, + Parameters = new ParameterList { - DatabaseRef = new DatabaseReference(Story, osiDb), - Name = signature.Name, - NumParams = (byte)signature.Params.Count, - ReferencedBy = new List() - }; - AddNode(osiDbNode); + Types = new List(signature.Params.Count) + }, + OwnerNode = null, + FactsPosition = 0 + }; - osiDb.OwnerNode = osiDbNode; + foreach (var param in signature.Params) + { + osiDb.Parameters.Types.Add(param.Type.TypeId); + } - EmitFunction(LS.Story.FunctionType.Database, signature, new NodeReference(Story, osiDbNode)); + osiDb.Facts = new FactCollection(osiDb, Story); + Story.Databases.Add(osiDb.Index, osiDb); - if (DebugInfo != null) - { - var dbDebug = new DatabaseDebugInfo - { - Id = osiDb.Index, - Name = signature.Name, - ParamTypes = new List() - }; - foreach (var param in signature.Params) - { - dbDebug.ParamTypes.Add(param.Type.TypeId); - } + var osiDbNode = new DatabaseNode + { + DatabaseRef = new DatabaseReference(Story, osiDb), + Name = signature.Name, + NumParams = (byte)signature.Params.Count, + ReferencedBy = new List() + }; + AddNode(osiDbNode); - DebugInfo.Databases.Add(dbDebug.Id, dbDebug); - } + osiDb.OwnerNode = osiDbNode; - return osiDbNode; - } + EmitFunction(LS.Story.FunctionType.Database, signature, new NodeReference(Story, osiDbNode)); - private Database EmitIntermediateDatabase(IRRule rule, int tupleSize, Node ownerNode) + if (DebugInfo != null) { - var paramTypes = new List(tupleSize); - for (var i = 0; i < tupleSize; i++) + var dbDebug = new DatabaseDebugInfo { - var param = rule.Variables[i]; - if (!param.IsUnused()) - { - paramTypes.Add(param.Type.TypeId); - } - } - - if (paramTypes.Count == 0) + Id = osiDb.Index, + Name = signature.Name, + ParamTypes = new List() + }; + foreach (var param in signature.Params) { - return null; + dbDebug.ParamTypes.Add(param.Type.TypeId); } - var osiDb = new Database - { - Index = (uint)Story.Databases.Count + 1, - Parameters = new ParameterList - { - Types = paramTypes - }, - OwnerNode = ownerNode, - FactsPosition = 0 - }; + DebugInfo.Databases.Add(dbDebug.Id, dbDebug); + } - osiDb.Facts = new FactCollection(osiDb, Story); - Story.Databases.Add(osiDb.Index, osiDb); + return osiDbNode; + } - if (DebugInfo != null) + private Database EmitIntermediateDatabase(IRRule rule, int tupleSize, Node ownerNode) + { + var paramTypes = new List(tupleSize); + for (var i = 0; i < tupleSize; i++) + { + var param = rule.Variables[i]; + if (!param.IsUnused()) { - var dbDebug = new DatabaseDebugInfo - { - Id = osiDb.Index, - Name = "", - ParamTypes = new List() - }; - foreach (var paramType in paramTypes) - { - dbDebug.ParamTypes.Add(paramType); - } - - DebugInfo.Databases.Add(dbDebug.Id, dbDebug); + paramTypes.Add(param.Type.TypeId); } + } - return osiDb; + if (paramTypes.Count == 0) + { + return null; } - private Node EmitName(FunctionNameAndArity name, NameRefType refType) + var osiDb = new Database { - Node node = null; - if (!Funcs.TryGetValue(name, out node)) + Index = (uint)Story.Databases.Count + 1, + Parameters = new ParameterList { - var signature = Context.LookupSignature(name); - switch (signature.Type) - { - case FunctionType.SysQuery: node = EmitSysQuery(signature, refType); break; - case FunctionType.SysCall: EmitSysCall(signature); break; - case FunctionType.Event: node = EmitEvent(signature, refType); break; - case FunctionType.Query: node = EmitQuery(signature, refType); break; - case FunctionType.Call: node = EmitCall(signature, refType); break; - case FunctionType.Database: node = EmitDatabase(signature); break; - case FunctionType.Proc: node = EmitProc(signature); break; - case FunctionType.UserQuery: node = EmitUserQuery(signature); break; - default: throw new ArgumentException("Invalid function type"); - } + Types = paramTypes + }, + OwnerNode = ownerNode, + FactsPosition = 0 + }; - Funcs.Add(name, node); - } + osiDb.Facts = new FactCollection(osiDb, Story); + Story.Databases.Add(osiDb.Index, osiDb); - var func = FuncEntries[name]; - switch (refType) + if (DebugInfo != null) + { + var dbDebug = new DatabaseDebugInfo { - case NameRefType.None: - break; - case NameRefType.Condition: - func.ConditionReferences++; - if (node == null) - { - throw new InvalidOperationException("Tried to emit a condition reference after a node was already generated"); - } - break; - case NameRefType.Action: - func.ActionReferences++; - break; - } - - if (node is UserQueryNode) + Id = osiDb.Index, + Name = "", + ParamTypes = new List() + }; + foreach (var paramType in paramTypes) { - // We need to add a reference to the user query definition entry as well - var defnName = new FunctionNameAndArity(name.Name + "__DEF__", name.Arity); - if (FuncEntries.TryGetValue(defnName, out Function defn)) - { - switch (refType) - { - case NameRefType.Condition: - defn.ConditionReferences++; - break; - case NameRefType.Action: - defn.ActionReferences++; - break; - } - } + dbDebug.ParamTypes.Add(paramType); } - - return node; + + DebugInfo.Databases.Add(dbDebug.Id, dbDebug); } - private Call EmitCall(IRFact fact) + return osiDb; + } + + private Node EmitName(FunctionNameAndArity name, NameRefType refType) + { + Node node = null; + if (!Funcs.TryGetValue(name, out node)) { - if (fact.Database != null) + var signature = Context.LookupSignature(name); + switch (signature.Type) { - EmitName(fact.Database.Name, NameRefType.Action); + case FunctionType.SysQuery: node = EmitSysQuery(signature, refType); break; + case FunctionType.SysCall: EmitSysCall(signature); break; + case FunctionType.Event: node = EmitEvent(signature, refType); break; + case FunctionType.Query: node = EmitQuery(signature, refType); break; + case FunctionType.Call: node = EmitCall(signature, refType); break; + case FunctionType.Database: node = EmitDatabase(signature); break; + case FunctionType.Proc: node = EmitProc(signature); break; + case FunctionType.UserQuery: node = EmitUserQuery(signature); break; + default: throw new ArgumentException("Invalid function type"); + } - var osiCall = new Call - { - Name = fact.Database.Name.Name, - Parameters = new List(fact.Elements.Count), - Negate = fact.Not, - // TODO const - InvalidGoalId? - GoalIdOrDebugHook = 0 - }; + Funcs.Add(name, node); + } - foreach (var param in fact.Elements) + var func = FuncEntries[name]; + switch (refType) + { + case NameRefType.None: + break; + case NameRefType.Condition: + func.ConditionReferences++; + if (node == null) { - var osiParam = EmitTypedValue(param); - osiCall.Parameters.Add(osiParam); + throw new InvalidOperationException("Tried to emit a condition reference after a node was already generated"); } - - return osiCall; - } - else - { - return new Call - { - Name = "", - Parameters = new List(), - Negate = false, - GoalIdOrDebugHook = (int)Goals[fact.Goal].Index - }; - } + break; + case NameRefType.Action: + func.ActionReferences++; + break; } - private Call EmitCall(IRStatement statement) + if (node is UserQueryNode) { - if (statement.Goal != null) - { - return new Call - { - Name = "", - Parameters = new List(statement.Params.Count), - Negate = false, - GoalIdOrDebugHook = (int)Goals[statement.Goal].Index - }; - } - else - { - var name = Context.LookupSignature(statement.Func.Name); - EmitName(statement.Func.Name, NameRefType.Action); - - var osiCall = new Call - { - Name = statement.Func.Name.Name, - Parameters = new List(statement.Params.Count), - Negate = statement.Not, - // TODO const - InvalidGoalId? - // TODO - use statement goal id if available? - GoalIdOrDebugHook = 0 - }; - - foreach (var param in statement.Params) - { - var osiParam = EmitTypedValue(param); - osiCall.Parameters.Add(osiParam); + // We need to add a reference to the user query definition entry as well + var defnName = new FunctionNameAndArity(name.Name + "__DEF__", name.Arity); + if (FuncEntries.TryGetValue(defnName, out Function defn)) + { + switch (refType) + { + case NameRefType.Condition: + defn.ConditionReferences++; + break; + case NameRefType.Action: + defn.ActionReferences++; + break; } - - return osiCall; } } + + return node; + } - private void AddJoinTarget(Node node, Node target, EntryPoint entryPoint, Goal goal) + private Call EmitCall(IRFact fact) + { + if (fact.Database != null) { - var targetRef = new NodeEntryItem + EmitName(fact.Database.Name, NameRefType.Action); + + var osiCall = new Call { - NodeRef = new NodeReference(Story, target), - EntryPoint = entryPoint, - GoalRef = new GoalReference(Story, goal) + Name = fact.Database.Name.Name, + Parameters = new List(fact.Elements.Count), + Negate = fact.Not, + // TODO const - InvalidGoalId? + GoalIdOrDebugHook = 0 }; - if (node is TreeNode) - { - var treeNode = node as TreeNode; - Debug.Assert(treeNode.NextNode == null); - treeNode.NextNode = targetRef; - } - else if (node is DataNode) + foreach (var param in fact.Elements) { - var dataNode = node as DataNode; - dataNode.ReferencedBy.Add(targetRef); + var osiParam = EmitTypedValue(param); + osiCall.Parameters.Add(osiParam); } - if (target is RelNode) - { - Debug.Assert(entryPoint == EntryPoint.None); - var relNode = target as RelNode; - relNode.ParentRef = new NodeReference(Story, node); - } - else + return osiCall; + } + else + { + return new Call { - var joinNode = target as JoinNode; - if (entryPoint == EntryPoint.Left) - { - joinNode.LeftParentRef = new NodeReference(Story, node); - } - else - { - Debug.Assert(entryPoint == EntryPoint.Right); - joinNode.RightParentRef = new NodeReference(Story, node); - } - } + Name = "", + Parameters = new List(), + Negate = false, + GoalIdOrDebugHook = (int)Goals[fact.Goal].Index + }; } + } - private Adapter EmitAdapter() + private Call EmitCall(IRStatement statement) + { + if (statement.Goal != null) { - var adapter = new Adapter + return new Call { - Index = (uint)Story.Adapters.Count + 1, - Constants = new Tuple(), - LogicalIndices = new List(), - LogicalToPhysicalMap = new Dictionary() + Name = "", + Parameters = new List(statement.Params.Count), + Negate = false, + GoalIdOrDebugHook = (int)Goals[statement.Goal].Index }; - Story.Adapters.Add(adapter.Index, adapter); - return adapter; } - - private Adapter EmitIdentityMappingAdapter(IRRule rule, int tupleSize, bool allowPartialPhysicalRow) + else { - var adapter = EmitAdapter(); + var name = Context.LookupSignature(statement.Func.Name); + EmitName(statement.Func.Name, NameRefType.Action); + + var osiCall = new Call + { + Name = statement.Func.Name.Name, + Parameters = new List(statement.Params.Count), + Negate = statement.Not, + // TODO const - InvalidGoalId? + // TODO - use statement goal id if available? + GoalIdOrDebugHook = 0 + }; - if (tupleSize > rule.Variables.Count) + foreach (var param in statement.Params) { - tupleSize = rule.Variables.Count; + var osiParam = EmitTypedValue(param); + osiCall.Parameters.Add(osiParam); } - for (var i = 0; i < tupleSize; i++) - { - if (rule.Variables[i].IsUnused()) - { - if (!allowPartialPhysicalRow) - { - adapter.LogicalIndices.Add((sbyte)-1); - } - } - else - { - adapter.LogicalIndices.Add((sbyte)i); - adapter.LogicalToPhysicalMap.Add((byte)i, (byte)(adapter.LogicalIndices.Count - 1)); - } - } - - return adapter; + return osiCall; } + } - private Adapter EmitJoinAdapter(IRFuncCondition condition, IRRule rule) + private void AddJoinTarget(Node node, Node target, EntryPoint entryPoint, Goal goal) + { + var targetRef = new NodeEntryItem { - var adapter = EmitAdapter(); - - for (var i = 0; i < condition.Params.Count; i++) - { - var param = condition.Params[i]; - if (param is IRConstant) - { - var osiConst = EmitValue(param as IRConstant); - adapter.Constants.Physical.Add(osiConst); - adapter.Constants.Logical.Add(i, osiConst); - adapter.LogicalIndices.Add((sbyte)-1); - } - else - { - var variable = param as IRVariable; - if (rule.Variables[variable.Index].IsUnused()) - { - adapter.LogicalIndices.Add((sbyte)-1); - } - else - { - adapter.LogicalIndices.Add((sbyte)variable.Index); - if (!adapter.LogicalToPhysicalMap.ContainsKey((byte)variable.Index)) - { - adapter.LogicalToPhysicalMap.Add((byte)variable.Index, (byte)(adapter.LogicalIndices.Count - 1)); - } - } - } - } - - var sortedMap = new Dictionary(adapter.LogicalToPhysicalMap.Count); - foreach (var mapping in adapter.LogicalToPhysicalMap.OrderBy(v => v.Key)) - { - sortedMap.Add(mapping.Key, mapping.Value); - } - adapter.LogicalToPhysicalMap = sortedMap; + NodeRef = new NodeReference(Story, target), + EntryPoint = entryPoint, + GoalRef = new GoalReference(Story, goal) + }; - return adapter; + if (node is TreeNode) + { + var treeNode = node as TreeNode; + Debug.Assert(treeNode.NextNode == null); + treeNode.NextNode = targetRef; + } + else if (node is DataNode) + { + var dataNode = node as DataNode; + dataNode.ReferencedBy.Add(targetRef); } - private Adapter EmitNodeAdapter(IRRule rule, IRCondition condition, Node node) + if (target is RelNode) { - if (node is DataNode || node is QueryNode) - { - return EmitJoinAdapter(condition as IRFuncCondition, rule); - } - else if (node is RelOpNode) - { - // (node as RelOpNode).AdapterRef.Resolve().LogicalIndices.Count - return EmitIdentityMappingAdapter(rule, (int)condition.TupleSize, true); - } - else if (node is JoinNode) + Debug.Assert(entryPoint == EntryPoint.None); + var relNode = target as RelNode; + relNode.ParentRef = new NodeReference(Story, node); + } + else + { + var joinNode = target as JoinNode; + if (entryPoint == EntryPoint.Left) { - return EmitIdentityMappingAdapter(rule, (int)condition.TupleSize, true); + joinNode.LeftParentRef = new NodeReference(Story, node); } else { - throw new ArgumentException("Unable to emit an adapter for this node type."); + Debug.Assert(entryPoint == EntryPoint.Right); + joinNode.RightParentRef = new NodeReference(Story, node); } } + } - private JoinNode EmitJoin(Node left, IRCondition leftCondition, IRFuncCondition rightCondition, IRRule rule, Goal goal, ReferencedDatabaseInfo referencedDb) + private Adapter EmitAdapter() + { + var adapter = new Adapter { - if (referencedDb.DbNodeRef.IsValid) - { - referencedDb.Indirection++; - } + Index = (uint)Story.Adapters.Count + 1, + Constants = new Tuple(), + LogicalIndices = new List(), + LogicalToPhysicalMap = new Dictionary() + }; + Story.Adapters.Add(adapter.Index, adapter); + return adapter; + } - var right = EmitName(rightCondition.Func.Name, NameRefType.Condition); - JoinNode osiCall; - if (rightCondition.Not) - { - osiCall = new NotAndNode(); - } - else - { - osiCall = new AndNode(); - } + private Adapter EmitIdentityMappingAdapter(IRRule rule, int tupleSize, bool allowPartialPhysicalRow) + { + var adapter = EmitAdapter(); - var leftAdapter = EmitNodeAdapter(rule, leftCondition, left); - var rightAdapter = EmitNodeAdapter(rule, rightCondition, right); + if (tupleSize > rule.Variables.Count) + { + tupleSize = rule.Variables.Count; + } - DatabaseReference database; - Database db = null; - if (left.DatabaseRef.IsValid && right.DatabaseRef.IsValid) + for (var i = 0; i < tupleSize; i++) + { + if (rule.Variables[i].IsUnused()) { - db = EmitIntermediateDatabase(rule, (int)rightCondition.TupleSize, null); - if (db != null) - { - database = new DatabaseReference(Story, db); - } - else + if (!allowPartialPhysicalRow) { - database = new DatabaseReference(); + adapter.LogicalIndices.Add((sbyte)-1); } } else { - database = new DatabaseReference(); + adapter.LogicalIndices.Add((sbyte)i); + adapter.LogicalToPhysicalMap.Add((byte)i, (byte)(adapter.LogicalIndices.Count - 1)); } + } - // VERY TODO - osiCall.DatabaseRef = database; - osiCall.Name = ""; - osiCall.NumParams = 0; - osiCall.LeftParentRef = new NodeReference(); - osiCall.RightParentRef = new NodeReference(); - osiCall.LeftAdapterRef = new AdapterReference(Story, leftAdapter); - osiCall.RightAdapterRef = new AdapterReference(Story, rightAdapter); - if (db == null) - { - osiCall.LeftDatabaseNodeRef = referencedDb.DbNodeRef; - osiCall.LeftDatabaseIndirection = referencedDb.Indirection; - osiCall.LeftDatabaseJoin = referencedDb.JoinRef; - } - else - { - osiCall.LeftDatabaseNodeRef = new NodeReference(); - osiCall.LeftDatabaseIndirection = 0; - osiCall.LeftDatabaseJoin = new NodeEntryItem - { - NodeRef = new NodeReference(), - EntryPoint = EntryPoint.None, - GoalRef = new GoalReference() - }; - } + return adapter; + } - SortedSet uniqueLogicalIndices = new SortedSet(); - foreach (var columnIndex in leftAdapter.LogicalToPhysicalMap.Keys) - { - uniqueLogicalIndices.Add(columnIndex); - } + private Adapter EmitJoinAdapter(IRFuncCondition condition, IRRule rule) + { + var adapter = EmitAdapter(); - foreach (var columnIndex in rightAdapter.LogicalToPhysicalMap.Keys) + for (var i = 0; i < condition.Params.Count; i++) + { + var param = condition.Params[i]; + if (param is IRConstant) { - uniqueLogicalIndices.Add(columnIndex); + var osiConst = EmitValue(param as IRConstant); + adapter.Constants.Physical.Add(osiConst); + adapter.Constants.Logical.Add(i, osiConst); + adapter.LogicalIndices.Add((sbyte)-1); } - - AddNodeWithoutDebugInfo(osiCall); - - if (db != null) + else { - referencedDb.DbNodeRef = new NodeReference(Story, osiCall); - referencedDb.Indirection = 0; - referencedDb.JoinRef = new NodeEntryItem + var variable = param as IRVariable; + if (rule.Variables[variable.Index].IsUnused()) { - NodeRef = new NodeReference(Story, osiCall), - GoalRef = new GoalReference(Story, goal), - EntryPoint = EntryPoint.None - }; - } - else if (referencedDb.DbNodeRef.IsValid - && left.DatabaseRef.IsValid) - { - referencedDb.JoinRef = new NodeEntryItem + adapter.LogicalIndices.Add((sbyte)-1); + } + else { - NodeRef = new NodeReference(Story, osiCall), - GoalRef = new GoalReference(Story, goal), - EntryPoint = EntryPoint.Left - }; - osiCall.LeftDatabaseJoin = referencedDb.JoinRef; + adapter.LogicalIndices.Add((sbyte)variable.Index); + if (!adapter.LogicalToPhysicalMap.ContainsKey((byte)variable.Index)) + { + adapter.LogicalToPhysicalMap.Add((byte)variable.Index, (byte)(adapter.LogicalIndices.Count - 1)); + } + } } + } - if (right is DatabaseNode && db == null) - { - osiCall.RightDatabaseNodeRef = new NodeReference(Story, right); - osiCall.RightDatabaseIndirection = 1; - osiCall.RightDatabaseJoin = new NodeEntryItem - { - NodeRef = new NodeReference(Story, osiCall), - EntryPoint = EntryPoint.Right, - GoalRef = new GoalReference(Story, goal) - }; - } - else - { - osiCall.RightDatabaseNodeRef = new NodeReference(); - osiCall.RightDatabaseIndirection = 0; - osiCall.RightDatabaseJoin = new NodeEntryItem - { - NodeRef = new NodeReference(), - EntryPoint = EntryPoint.None, - GoalRef = new GoalReference() - }; - } + var sortedMap = new Dictionary(adapter.LogicalToPhysicalMap.Count); + foreach (var mapping in adapter.LogicalToPhysicalMap.OrderBy(v => v.Key)) + { + sortedMap.Add(mapping.Key, mapping.Value); + } + adapter.LogicalToPhysicalMap = sortedMap; - AddJoinTarget(left, osiCall, EntryPoint.Left, goal); - AddJoinTarget(right, osiCall, EntryPoint.Right, goal); - - AddNodeDebugInfo(osiCall, rightCondition.Location, uniqueLogicalIndices.Count, rule); + return adapter; + } - if (osiCall.RightDatabaseIndirection != 0 - && osiCall.LeftDatabaseIndirection != 0 - && osiCall.RightDatabaseIndirection < osiCall.LeftDatabaseIndirection) - { - referencedDb.DbNodeRef = osiCall.RightDatabaseNodeRef; - referencedDb.Indirection = osiCall.RightDatabaseIndirection; - referencedDb.JoinRef = osiCall.RightDatabaseJoin; - } + private Adapter EmitNodeAdapter(IRRule rule, IRCondition condition, Node node) + { + if (node is DataNode || node is QueryNode) + { + return EmitJoinAdapter(condition as IRFuncCondition, rule); + } + else if (node is RelOpNode) + { + // (node as RelOpNode).AdapterRef.Resolve().LogicalIndices.Count + return EmitIdentityMappingAdapter(rule, (int)condition.TupleSize, true); + } + else if (node is JoinNode) + { + return EmitIdentityMappingAdapter(rule, (int)condition.TupleSize, true); + } + else + { + throw new ArgumentException("Unable to emit an adapter for this node type."); + } + } - return osiCall; + private JoinNode EmitJoin(Node left, IRCondition leftCondition, IRFuncCondition rightCondition, IRRule rule, Goal goal, ReferencedDatabaseInfo referencedDb) + { + if (referencedDb.DbNodeRef.IsValid) + { + referencedDb.Indirection++; } - private RelOpNode EmitRelOp(IRRule rule, IRBinaryCondition condition, ReferencedDatabaseInfo referencedDb, - IRCondition previousCondition, Node previousNode) + var right = EmitName(rightCondition.Func.Name, NameRefType.Condition); + JoinNode osiCall; + if (rightCondition.Not) { - if (referencedDb.DbNodeRef.IsValid) - { - referencedDb.Indirection++; - } + osiCall = new NotAndNode(); + } + else + { + osiCall = new AndNode(); + } + + var leftAdapter = EmitNodeAdapter(rule, leftCondition, left); + var rightAdapter = EmitNodeAdapter(rule, rightCondition, right); - DatabaseReference database; - Database db = null; - if (previousNode.DatabaseRef.IsValid) + DatabaseReference database; + Database db = null; + if (left.DatabaseRef.IsValid && right.DatabaseRef.IsValid) + { + db = EmitIntermediateDatabase(rule, (int)rightCondition.TupleSize, null); + if (db != null) { - db = EmitIntermediateDatabase(rule, (int)condition.TupleSize, null); database = new DatabaseReference(Story, db); } else { database = new DatabaseReference(); } + } + else + { + database = new DatabaseReference(); + } - var adapter = EmitNodeAdapter(rule, previousCondition, previousNode); - var osiRelOp = new RelOpNode + // VERY TODO + osiCall.DatabaseRef = database; + osiCall.Name = ""; + osiCall.NumParams = 0; + osiCall.LeftParentRef = new NodeReference(); + osiCall.RightParentRef = new NodeReference(); + osiCall.LeftAdapterRef = new AdapterReference(Story, leftAdapter); + osiCall.RightAdapterRef = new AdapterReference(Story, rightAdapter); + if (db == null) + { + osiCall.LeftDatabaseNodeRef = referencedDb.DbNodeRef; + osiCall.LeftDatabaseIndirection = referencedDb.Indirection; + osiCall.LeftDatabaseJoin = referencedDb.JoinRef; + } + else + { + osiCall.LeftDatabaseNodeRef = new NodeReference(); + osiCall.LeftDatabaseIndirection = 0; + osiCall.LeftDatabaseJoin = new NodeEntryItem { - DatabaseRef = database, - Name = "", - NumParams = 0, - - ParentRef = null, - AdapterRef = new AdapterReference(Story, adapter), - - RelOp = condition.Op + NodeRef = new NodeReference(), + EntryPoint = EntryPoint.None, + GoalRef = new GoalReference() }; + } + + SortedSet uniqueLogicalIndices = new SortedSet(); + foreach (var columnIndex in leftAdapter.LogicalToPhysicalMap.Keys) + { + uniqueLogicalIndices.Add(columnIndex); + } + + foreach (var columnIndex in rightAdapter.LogicalToPhysicalMap.Keys) + { + uniqueLogicalIndices.Add(columnIndex); + } + + AddNodeWithoutDebugInfo(osiCall); - if (condition.LValue is IRConstant) + if (db != null) + { + referencedDb.DbNodeRef = new NodeReference(Story, osiCall); + referencedDb.Indirection = 0; + referencedDb.JoinRef = new NodeEntryItem { - osiRelOp.LeftValue = EmitValue(condition.LValue as IRConstant); - osiRelOp.LeftValueIndex = -1; - } - else + NodeRef = new NodeReference(Story, osiCall), + GoalRef = new GoalReference(Story, goal), + EntryPoint = EntryPoint.None + }; + } + else if (referencedDb.DbNodeRef.IsValid + && left.DatabaseRef.IsValid) + { + referencedDb.JoinRef = new NodeEntryItem { - osiRelOp.LeftValue = new Value - { - TypeId = (uint)Value.Type.None - }; - osiRelOp.LeftValueIndex = (sbyte)(condition.LValue as IRVariable).Index; - } + NodeRef = new NodeReference(Story, osiCall), + GoalRef = new GoalReference(Story, goal), + EntryPoint = EntryPoint.Left + }; + osiCall.LeftDatabaseJoin = referencedDb.JoinRef; + } - if (condition.RValue is IRConstant) + if (right is DatabaseNode && db == null) + { + osiCall.RightDatabaseNodeRef = new NodeReference(Story, right); + osiCall.RightDatabaseIndirection = 1; + osiCall.RightDatabaseJoin = new NodeEntryItem { - osiRelOp.RightValue = EmitValue(condition.RValue as IRConstant); - osiRelOp.RightValueIndex = -1; - } - else + NodeRef = new NodeReference(Story, osiCall), + EntryPoint = EntryPoint.Right, + GoalRef = new GoalReference(Story, goal) + }; + } + else + { + osiCall.RightDatabaseNodeRef = new NodeReference(); + osiCall.RightDatabaseIndirection = 0; + osiCall.RightDatabaseJoin = new NodeEntryItem { - osiRelOp.RightValue = new Value - { - TypeId = (uint)Value.Type.None - }; - osiRelOp.RightValueIndex = (sbyte)(condition.RValue as IRVariable).Index; - } + NodeRef = new NodeReference(), + EntryPoint = EntryPoint.None, + GoalRef = new GoalReference() + }; + } - if (db != null) - { - db.OwnerNode = osiRelOp; + AddJoinTarget(left, osiCall, EntryPoint.Left, goal); + AddJoinTarget(right, osiCall, EntryPoint.Right, goal); + + AddNodeDebugInfo(osiCall, rightCondition.Location, uniqueLogicalIndices.Count, rule); - osiRelOp.RelDatabaseNodeRef = new NodeReference(); - osiRelOp.RelJoin = new NodeEntryItem - { - NodeRef = new NodeReference(), - GoalRef = new GoalReference(), - EntryPoint = EntryPoint.None - }; - osiRelOp.RelDatabaseIndirection = 0; - } - else - { - osiRelOp.RelDatabaseNodeRef = referencedDb.DbNodeRef; - osiRelOp.RelJoin = referencedDb.JoinRef; - osiRelOp.RelDatabaseIndirection = referencedDb.Indirection; - } + if (osiCall.RightDatabaseIndirection != 0 + && osiCall.LeftDatabaseIndirection != 0 + && osiCall.RightDatabaseIndirection < osiCall.LeftDatabaseIndirection) + { + referencedDb.DbNodeRef = osiCall.RightDatabaseNodeRef; + referencedDb.Indirection = osiCall.RightDatabaseIndirection; + referencedDb.JoinRef = osiCall.RightDatabaseJoin; + } - AddNodeWithoutDebugInfo(osiRelOp); + return osiCall; + } - if (db != null) - { - referencedDb.DbNodeRef = new NodeReference(Story, osiRelOp); - referencedDb.Indirection = 0; - referencedDb.JoinRef = new NodeEntryItem - { - NodeRef = new NodeReference(), - EntryPoint = EntryPoint.None, - GoalRef = new GoalReference() - }; - } + private RelOpNode EmitRelOp(IRRule rule, IRBinaryCondition condition, ReferencedDatabaseInfo referencedDb, + IRCondition previousCondition, Node previousNode) + { + if (referencedDb.DbNodeRef.IsValid) + { + referencedDb.Indirection++; + } - return osiRelOp; + DatabaseReference database; + Database db = null; + if (previousNode.DatabaseRef.IsValid) + { + db = EmitIntermediateDatabase(rule, (int)condition.TupleSize, null); + database = new DatabaseReference(Story, db); + } + else + { + database = new DatabaseReference(); } - private Variable EmitVariable(IRRuleVariable variable) + var adapter = EmitNodeAdapter(rule, previousCondition, previousNode); + var osiRelOp = new RelOpNode { - return new Variable + DatabaseRef = database, + Name = "", + NumParams = 0, + + ParentRef = null, + AdapterRef = new AdapterReference(Story, adapter), + + RelOp = condition.Op + }; + + if (condition.LValue is IRConstant) + { + osiRelOp.LeftValue = EmitValue(condition.LValue as IRConstant); + osiRelOp.LeftValueIndex = -1; + } + else + { + osiRelOp.LeftValue = new Value { - TypeId = variable.Type.TypeId, - IsValid = false, - OutParam = false, - IsAType = true, - Index = (sbyte)variable.Index, - Unused = variable.IsUnused(), - Adapted = !variable.IsUnused(), - VariableName = variable.Name + TypeId = (uint)Value.Type.None }; + osiRelOp.LeftValueIndex = (sbyte)(condition.LValue as IRVariable).Index; } - private RuleNode EmitRuleNode(IRRule rule, Goal goal, ReferencedDatabaseInfo referencedDb, IRCondition lastCondition, Node previousNode) + if (condition.RValue is IRConstant) { - if (referencedDb.DbNodeRef.IsValid) + osiRelOp.RightValue = EmitValue(condition.RValue as IRConstant); + osiRelOp.RightValueIndex = -1; + } + else + { + osiRelOp.RightValue = new Value { - referencedDb.Indirection++; - } + TypeId = (uint)Value.Type.None + }; + osiRelOp.RightValueIndex = (sbyte)(condition.RValue as IRVariable).Index; + } - DatabaseReference database; - Database db = null; - if (previousNode.DatabaseRef.IsValid) - { - db = EmitIntermediateDatabase(rule, (int)rule.Variables.Count, null); - if (db != null) - { - database = new DatabaseReference(Story, db); + if (db != null) + { + db.OwnerNode = osiRelOp; - // TODO - set Dummy referencedDb - referencedDb = new ReferencedDatabaseInfo - { - DbNodeRef = new NodeReference(), - Indirection = 0, - JoinRef = new NodeEntryItem - { - NodeRef = new NodeReference(), - GoalRef = new GoalReference(), - EntryPoint = EntryPoint.None - } - }; - } - else - { - database = new DatabaseReference(); - } - } - else + osiRelOp.RelDatabaseNodeRef = new NodeReference(); + osiRelOp.RelJoin = new NodeEntryItem { - database = new DatabaseReference(); - } + NodeRef = new NodeReference(), + GoalRef = new GoalReference(), + EntryPoint = EntryPoint.None + }; + osiRelOp.RelDatabaseIndirection = 0; + } + else + { + osiRelOp.RelDatabaseNodeRef = referencedDb.DbNodeRef; + osiRelOp.RelJoin = referencedDb.JoinRef; + osiRelOp.RelDatabaseIndirection = referencedDb.Indirection; + } - Adapter adapter = EmitNodeAdapter(rule, lastCondition, previousNode); - var osiRule = new RuleNode - { - DatabaseRef = database, - Name = "", - NumParams = 0, + AddNodeWithoutDebugInfo(osiRelOp); - NextNode = new NodeEntryItem - { - NodeRef = new NodeReference(), - EntryPoint = EntryPoint.None, - GoalRef = new GoalReference() - }, - ParentRef = null, - AdapterRef = new AdapterReference(Story, adapter), - RelDatabaseNodeRef = referencedDb.DbNodeRef, - RelJoin = referencedDb.JoinRef, - RelDatabaseIndirection = referencedDb.Indirection, - - Calls = new List(rule.Actions.Count), - Variables = new List(rule.Variables.Count), - Line = 0, - DerivedGoalRef = new GoalReference(Story, goal), - IsQuery = (rule.Type == RuleType.Query) + if (db != null) + { + referencedDb.DbNodeRef = new NodeReference(Story, osiRelOp); + referencedDb.Indirection = 0; + referencedDb.JoinRef = new NodeEntryItem + { + NodeRef = new NodeReference(), + EntryPoint = EntryPoint.None, + GoalRef = new GoalReference() }; + } - foreach (var variable in rule.Variables) - { - osiRule.Variables.Add(EmitVariable(variable)); - } + return osiRelOp; + } - if (db != null) - { - db.OwnerNode = osiRule; - } + private Variable EmitVariable(IRRuleVariable variable) + { + return new Variable + { + TypeId = variable.Type.TypeId, + IsValid = false, + OutParam = false, + IsAType = true, + Index = (sbyte)variable.Index, + Unused = variable.IsUnused(), + Adapted = !variable.IsUnused(), + VariableName = variable.Name + }; + } - AddNodeWithoutDebugInfo(osiRule); + private RuleNode EmitRuleNode(IRRule rule, Goal goal, ReferencedDatabaseInfo referencedDb, IRCondition lastCondition, Node previousNode) + { + if (referencedDb.DbNodeRef.IsValid) + { + referencedDb.Indirection++; + } - if (referencedDb.DbNodeRef.IsValid && referencedDb.Indirection == 1) + DatabaseReference database; + Database db = null; + if (previousNode.DatabaseRef.IsValid) + { + db = EmitIntermediateDatabase(rule, (int)rule.Variables.Count, null); + if (db != null) { - osiRule.RelJoin = new NodeEntryItem + database = new DatabaseReference(Story, db); + + // TODO - set Dummy referencedDb + referencedDb = new ReferencedDatabaseInfo { - NodeRef = new NodeReference(Story, osiRule), - GoalRef = new GoalReference(Story, goal), - EntryPoint = EntryPoint.None + DbNodeRef = new NodeReference(), + Indirection = 0, + JoinRef = new NodeEntryItem + { + NodeRef = new NodeReference(), + GoalRef = new GoalReference(), + EntryPoint = EntryPoint.None + } }; } - - return osiRule; + else + { + database = new DatabaseReference(); + } + } + else + { + database = new DatabaseReference(); } - private void EmitRuleActions(IRRule rule, RuleNode osiRule) + Adapter adapter = EmitNodeAdapter(rule, lastCondition, previousNode); + var osiRule = new RuleNode { - foreach (var action in rule.Actions) + DatabaseRef = database, + Name = "", + NumParams = 0, + + NextNode = new NodeEntryItem { - osiRule.Calls.Add(EmitCall(action)); - } + NodeRef = new NodeReference(), + EntryPoint = EntryPoint.None, + GoalRef = new GoalReference() + }, + ParentRef = null, + AdapterRef = new AdapterReference(Story, adapter), + RelDatabaseNodeRef = referencedDb.DbNodeRef, + RelJoin = referencedDb.JoinRef, + RelDatabaseIndirection = referencedDb.Indirection, + + Calls = new List(rule.Actions.Count), + Variables = new List(rule.Variables.Count), + Line = 0, + DerivedGoalRef = new GoalReference(Story, goal), + IsQuery = (rule.Type == RuleType.Query) + }; + + foreach (var variable in rule.Variables) + { + osiRule.Variables.Add(EmitVariable(variable)); } - private ProcNode EmitUserQueryDefinition(FunctionSignature signature, Function queryFunc) + if (db != null) { - var osiProc = new ProcNode - { - DatabaseRef = new DatabaseReference(), - Name = signature.Name, - NumParams = (byte)signature.Params.Count, - ReferencedBy = new List() - }; - AddNode(osiProc); + db.OwnerNode = osiRule; + } + + AddNodeWithoutDebugInfo(osiRule); - var aliasedSignature = new FunctionSignature + if (referencedDb.DbNodeRef.IsValid && referencedDb.Indirection == 1) + { + osiRule.RelJoin = new NodeEntryItem { - FullyTyped = signature.FullyTyped, - Name = signature.Name + "__DEF__", - Params = signature.Params, - Type = signature.Type, - Inserted = signature.Inserted, - Deleted = signature.Deleted, - Read = signature.Read + NodeRef = new NodeReference(Story, osiRule), + GoalRef = new GoalReference(Story, goal), + EntryPoint = EntryPoint.None }; + } - var osiFunc = EmitFunction(LS.Story.FunctionType.UserQuery, aliasedSignature, new NodeReference(Story, osiProc)); - if (queryFunc != null) - { - osiFunc.ConditionReferences = queryFunc.ConditionReferences; - osiFunc.ActionReferences = queryFunc.ActionReferences; - } - return osiProc; + return osiRule; + } + + private void EmitRuleActions(IRRule rule, RuleNode osiRule) + { + foreach (var action in rule.Actions) + { + osiRule.Calls.Add(EmitCall(action)); } + } - private Node EmitUserQueryInitialFunc(IRFuncCondition condition) + private ProcNode EmitUserQueryDefinition(FunctionSignature signature, Function queryFunc) + { + var osiProc = new ProcNode { - var signature = Context.LookupSignature(condition.Func.Name); - var name = new FunctionNameAndArity(signature.Name + "__DEF__", signature.Params.Count); - if (!Funcs.TryGetValue(name, out Node initialFunc)) - { - Function osiUserQuery = null; - FuncEntries.TryGetValue(signature.GetNameAndArity(), out osiUserQuery); - initialFunc = EmitUserQueryDefinition(signature, osiUserQuery); - Funcs.Add(name, initialFunc); - } + DatabaseRef = new DatabaseReference(), + Name = signature.Name, + NumParams = (byte)signature.Params.Count, + ReferencedBy = new List() + }; + AddNode(osiProc); + + var aliasedSignature = new FunctionSignature + { + FullyTyped = signature.FullyTyped, + Name = signature.Name + "__DEF__", + Params = signature.Params, + Type = signature.Type, + Inserted = signature.Inserted, + Deleted = signature.Deleted, + Read = signature.Read + }; + + var osiFunc = EmitFunction(LS.Story.FunctionType.UserQuery, aliasedSignature, new NodeReference(Story, osiProc)); + if (queryFunc != null) + { + osiFunc.ConditionReferences = queryFunc.ConditionReferences; + osiFunc.ActionReferences = queryFunc.ActionReferences; + } + return osiProc; + } - return initialFunc; + private Node EmitUserQueryInitialFunc(IRFuncCondition condition) + { + var signature = Context.LookupSignature(condition.Func.Name); + var name = new FunctionNameAndArity(signature.Name + "__DEF__", signature.Params.Count); + if (!Funcs.TryGetValue(name, out Node initialFunc)) + { + Function osiUserQuery = null; + FuncEntries.TryGetValue(signature.GetNameAndArity(), out osiUserQuery); + initialFunc = EmitUserQueryDefinition(signature, osiUserQuery); + Funcs.Add(name, initialFunc); } - private class ReferencedDatabaseInfo + return initialFunc; + } + + private class ReferencedDatabaseInfo + { + public NodeReference DbNodeRef = new NodeReference(); + public byte Indirection = 0; + public NodeEntryItem JoinRef = new NodeEntryItem + { + NodeRef = new NodeReference(), + EntryPoint = EntryPoint.None, + GoalRef = new GoalReference() + }; + } + + private RuleNode EmitRule(IRRule rule, Goal goal) + { + var referencedDb = new ReferencedDatabaseInfo(); + var initialCall = rule.Conditions[0] as IRFuncCondition; + Node initialFunc; + if (rule.Type == RuleType.Query) + { + initialFunc = EmitUserQueryInitialFunc(initialCall); + } + else { - public NodeReference DbNodeRef = new NodeReference(); - public byte Indirection = 0; - public NodeEntryItem JoinRef = new NodeEntryItem + initialFunc = EmitName(initialCall.Func.Name, NameRefType.Condition); + if (initialFunc is DatabaseNode) { - NodeRef = new NodeReference(), - EntryPoint = EntryPoint.None, - GoalRef = new GoalReference() - }; + referencedDb.Indirection = 0; + referencedDb.DbNodeRef = new NodeReference(Story, initialFunc); + } } - private RuleNode EmitRule(IRRule rule, Goal goal) + var lastConditionNode = initialFunc; + IRCondition lastCondition = initialCall; + for (var i = 1; i < rule.Conditions.Count; i++) { - var referencedDb = new ReferencedDatabaseInfo(); - var initialCall = rule.Conditions[0] as IRFuncCondition; - Node initialFunc; - if (rule.Type == RuleType.Query) + var condition = rule.Conditions[i]; + if (condition is IRBinaryCondition) { - initialFunc = EmitUserQueryInitialFunc(initialCall); + var relOp = EmitRelOp(rule, condition as IRBinaryCondition, referencedDb, lastCondition, lastConditionNode); + AddJoinTarget(lastConditionNode, relOp, EntryPoint.None, goal); + AddNodeDebugInfo(relOp, condition.Location, relOp.AdapterRef.Resolve().LogicalToPhysicalMap.Count, rule); + lastConditionNode = relOp; } else { - initialFunc = EmitName(initialCall.Func.Name, NameRefType.Condition); - if (initialFunc is DatabaseNode) - { - referencedDb.Indirection = 0; - referencedDb.DbNodeRef = new NodeReference(Story, initialFunc); - } - } - - var lastConditionNode = initialFunc; - IRCondition lastCondition = initialCall; - for (var i = 1; i < rule.Conditions.Count; i++) - { - var condition = rule.Conditions[i]; - if (condition is IRBinaryCondition) - { - var relOp = EmitRelOp(rule, condition as IRBinaryCondition, referencedDb, lastCondition, lastConditionNode); - AddJoinTarget(lastConditionNode, relOp, EntryPoint.None, goal); - AddNodeDebugInfo(relOp, condition.Location, relOp.AdapterRef.Resolve().LogicalToPhysicalMap.Count, rule); - lastConditionNode = relOp; - } - else - { - var func = condition as IRFuncCondition; - var leftFunc = (i == 1) ? initialCall : null; - var join = EmitJoin(lastConditionNode, lastCondition, func, rule, goal, referencedDb); - lastConditionNode = join; - } - lastCondition = condition; + var func = condition as IRFuncCondition; + var leftFunc = (i == 1) ? initialCall : null; + var join = EmitJoin(lastConditionNode, lastCondition, func, rule, goal, referencedDb); + lastConditionNode = join; } + lastCondition = condition; + } - var osiRule = EmitRuleNode(rule, goal, referencedDb, lastCondition, lastConditionNode); - AddJoinTarget(lastConditionNode, osiRule, EntryPoint.None, goal); - Rules.Add(rule, osiRule); + var osiRule = EmitRuleNode(rule, goal, referencedDb, lastCondition, lastConditionNode); + AddJoinTarget(lastConditionNode, osiRule, EntryPoint.None, goal); + Rules.Add(rule, osiRule); - var validVariables = rule.Variables.Where(v => !v.IsUnused()).Count(); - AddNodeDebugInfo(osiRule, rule.Location, validVariables, rule); + var validVariables = rule.Variables.Where(v => !v.IsUnused()).Count(); + AddNodeDebugInfo(osiRule, rule.Location, validVariables, rule); - if (DebugInfo != null) + if (DebugInfo != null) + { + var ruleDebug = new RuleDebugInfo + { + Id = osiRule.Index, + GoalId = (UInt32)Story.Goals.Count, + Name = (rule.Conditions.First() as IRFuncCondition).Func.Name.ToString(), + Variables = new List(), + Actions = new List(), + ConditionsStartLine = (uint)rule.Location.StartLine, + ConditionsEndLine = (uint)rule.Conditions.Last().Location.EndLine, + ActionsStartLine = (uint)rule.Actions.First().Location.StartLine, + ActionsEndLine = (uint)rule.Location.EndLine + }; + + foreach (var variable in rule.Variables) { - var ruleDebug = new RuleDebugInfo + var varDebug = new RuleVariableDebugInfo { - Id = osiRule.Index, - GoalId = (UInt32)Story.Goals.Count, - Name = (rule.Conditions.First() as IRFuncCondition).Func.Name.ToString(), - Variables = new List(), - Actions = new List(), - ConditionsStartLine = (uint)rule.Location.StartLine, - ConditionsEndLine = (uint)rule.Conditions.Last().Location.EndLine, - ActionsStartLine = (uint)rule.Actions.First().Location.StartLine, - ActionsEndLine = (uint)rule.Location.EndLine + Index = (UInt32)variable.Index, + Name = variable.Name, + Type = (UInt32)variable.Type.IntrinsicTypeId, + Unused = variable.IsUnused() }; - - foreach (var variable in rule.Variables) - { - var varDebug = new RuleVariableDebugInfo - { - Index = (UInt32)variable.Index, - Name = variable.Name, - Type = (UInt32)variable.Type.IntrinsicTypeId, - Unused = variable.IsUnused() - }; - ruleDebug.Variables.Add(varDebug); - } - - foreach (var action in rule.Actions) + ruleDebug.Variables.Add(varDebug); + } + + foreach (var action in rule.Actions) + { + ruleDebug.Actions.Add(new ActionDebugInfo { - ruleDebug.Actions.Add(new ActionDebugInfo - { - Line = (uint)action.Location.StartLine - }); - } - - DebugInfo.Rules.Add(ruleDebug.Id, ruleDebug); + Line = (uint)action.Location.StartLine + }); } - return osiRule; + DebugInfo.Rules.Add(ruleDebug.Id, ruleDebug); } - private void EmitGoalActions(IRGoal goal, Goal osiGoal) + return osiRule; + } + + private void EmitGoalActions(IRGoal goal, Goal osiGoal) + { + foreach (var fact in goal.InitSection) { - foreach (var fact in goal.InitSection) - { - var call = EmitCall(fact); - osiGoal.InitCalls.Add(call); - } + var call = EmitCall(fact); + osiGoal.InitCalls.Add(call); + } - foreach (var fact in goal.ExitSection) - { - var call = EmitCall(fact); - osiGoal.ExitCalls.Add(call); - } + foreach (var fact in goal.ExitSection) + { + var call = EmitCall(fact); + osiGoal.ExitCalls.Add(call); } + } - private Goal EmitGoal(IRGoal goal) + private Goal EmitGoal(IRGoal goal) + { + var osiGoal = new Goal(Story) { - var osiGoal = new Goal(Story) - { - Index = (uint)(Story.Goals.Count + 1), - Name = goal.Name, - InitCalls = new List(goal.InitSection.Count), - ExitCalls = new List(goal.ExitSection.Count), - ParentGoals = new List(), - SubGoals = new List() - }; + Index = (uint)(Story.Goals.Count + 1), + Name = goal.Name, + InitCalls = new List(goal.InitSection.Count), + ExitCalls = new List(goal.ExitSection.Count), + ParentGoals = new List(), + SubGoals = new List() + }; + + if (goal.ParentTargetEdges.Count > 0) + { + // TODO const + osiGoal.SubGoalCombination = 1; // SGC_AND ? + osiGoal.Flags = 2; // HasParentGoal flag ? + } + else + { + osiGoal.SubGoalCombination = 0; + osiGoal.Flags = 0; + } - if (goal.ParentTargetEdges.Count > 0) + if (DebugInfo != null) + { + string canonicalizedPath; + if (File.Exists(goal.Location.FileName)) { - // TODO const - osiGoal.SubGoalCombination = 1; // SGC_AND ? - osiGoal.Flags = 2; // HasParentGoal flag ? + canonicalizedPath = Path.GetFullPath(goal.Location.FileName); } else { - osiGoal.SubGoalCombination = 0; - osiGoal.Flags = 0; + canonicalizedPath = goal.Location.FileName; } - if (DebugInfo != null) + var goalDebug = new GoalDebugInfo { - string canonicalizedPath; - if (File.Exists(goal.Location.FileName)) - { - canonicalizedPath = Path.GetFullPath(goal.Location.FileName); - } - else - { - canonicalizedPath = goal.Location.FileName; - } - - var goalDebug = new GoalDebugInfo - { - Id = osiGoal.Index, - Name = goal.Name, - Path = canonicalizedPath, - InitActions = new List(), - ExitActions = new List() - }; + Id = osiGoal.Index, + Name = goal.Name, + Path = canonicalizedPath, + InitActions = new List(), + ExitActions = new List() + }; - foreach (var action in goal.InitSection) + foreach (var action in goal.InitSection) + { + goalDebug.InitActions.Add(new ActionDebugInfo { - goalDebug.InitActions.Add(new ActionDebugInfo - { - Line = (uint)action.Location.StartLine - }); - } + Line = (uint)action.Location.StartLine + }); + } - foreach (var action in goal.ExitSection) + foreach (var action in goal.ExitSection) + { + goalDebug.ExitActions.Add(new ActionDebugInfo { - goalDebug.ExitActions.Add(new ActionDebugInfo - { - Line = (uint)action.Location.StartLine - }); - } - - DebugInfo.Goals.Add(goalDebug.Id, goalDebug); + Line = (uint)action.Location.StartLine + }); } - return osiGoal; + DebugInfo.Goals.Add(goalDebug.Id, goalDebug); } - - private void EmitGoals() + + return osiGoal; + } + + private void EmitGoals() + { + foreach (var goal in Context.GoalsByName) { - foreach (var goal in Context.GoalsByName) + var osiGoal = EmitGoal(goal.Value); + osiGoal.Index = (uint)Story.Goals.Count + 1; + Goals.Add(goal.Value, osiGoal); + Story.Goals.Add(osiGoal.Index, osiGoal); + + foreach (var rule in goal.Value.KBSection) { - var osiGoal = EmitGoal(goal.Value); - osiGoal.Index = (uint)Story.Goals.Count + 1; - Goals.Add(goal.Value, osiGoal); - Story.Goals.Add(osiGoal.Index, osiGoal); + var firstNodeIndex = (uint)Story.Nodes.Count + 1; + var osiRule = EmitRule(rule, osiGoal); - foreach (var rule in goal.Value.KBSection) + if (DebugInfo != null) { - var firstNodeIndex = (uint)Story.Nodes.Count + 1; - var osiRule = EmitRule(rule, osiGoal); - - if (DebugInfo != null) + var lastNodeIndex = (uint)Story.Nodes.Count; + for (var i = firstNodeIndex; i <= lastNodeIndex; i++) { - var lastNodeIndex = (uint)Story.Nodes.Count; - for (var i = firstNodeIndex; i <= lastNodeIndex; i++) + var osiNode = Story.Nodes[i]; + if (osiNode is TreeNode + || osiNode is RelNode + || i == lastNodeIndex) { - var osiNode = Story.Nodes[i]; - if (osiNode is TreeNode - || osiNode is RelNode - || i == lastNodeIndex) - { - DebugInfo.Nodes[i].RuleId = osiRule.Index; - } + DebugInfo.Nodes[i].RuleId = osiRule.Index; } } } } + } - foreach (var goal in Goals) - { - EmitGoalActions(goal.Key, goal.Value); - } - - foreach (var rule in Rules) - { - EmitRuleActions(rule.Key, rule.Value); - } + foreach (var goal in Goals) + { + EmitGoalActions(goal.Key, goal.Value); + } + + foreach (var rule in Rules) + { + EmitRuleActions(rule.Key, rule.Value); } + } - /// - /// Add parent goal/subgoal mapping to the story. - /// This needs to be done after all goals were generated, as we need the Osiris goal - /// object ID-s to make goal references. - /// - private void EmitParentGoals() + /// + /// Add parent goal/subgoal mapping to the story. + /// This needs to be done after all goals were generated, as we need the Osiris goal + /// object ID-s to make goal references. + /// + private void EmitParentGoals() + { + foreach (var goal in Context.GoalsByName) { - foreach (var goal in Context.GoalsByName) + var osiGoal = Goals[goal.Value]; + foreach (var parent in goal.Value.ParentTargetEdges) { - var osiGoal = Goals[goal.Value]; - foreach (var parent in goal.Value.ParentTargetEdges) - { - var parentGoal = Context.LookupGoal(parent.Goal.Name); - var osiParentGoal = Goals[parentGoal]; - osiGoal.ParentGoals.Add(new GoalReference(Story, osiParentGoal)); - osiParentGoal.SubGoals.Add(new GoalReference(Story, osiGoal)); - } + var parentGoal = Context.LookupGoal(parent.Goal.Name); + var osiParentGoal = Goals[parentGoal]; + osiGoal.ParentGoals.Add(new GoalReference(Story, osiParentGoal)); + osiParentGoal.SubGoals.Add(new GoalReference(Story, osiGoal)); } } + } - /// - /// Generates a function entry for each function in the story header that was not referenced - /// from the story scripts. The Osiris runtime crashes if some functions from the story - /// header are not included in the final story file. - /// - private void EmitHeaderFunctions() + /// + /// Generates a function entry for each function in the story header that was not referenced + /// from the story scripts. The Osiris runtime crashes if some functions from the story + /// header are not included in the final story file. + /// + private void EmitHeaderFunctions() + { + foreach (var signature in Context.Signatures) { - foreach (var signature in Context.Signatures) + if (signature.Value.Type == FunctionType.SysCall + || signature.Value.Type == FunctionType.SysQuery + || signature.Value.Type == FunctionType.Call + || signature.Value.Type == FunctionType.Query + || signature.Value.Type == FunctionType.Event) { - if (signature.Value.Type == FunctionType.SysCall - || signature.Value.Type == FunctionType.SysQuery - || signature.Value.Type == FunctionType.Call - || signature.Value.Type == FunctionType.Query - || signature.Value.Type == FunctionType.Event) + if (!Funcs.TryGetValue(signature.Key, out Node funcNode)) { - if (!Funcs.TryGetValue(signature.Key, out Node funcNode)) - { - EmitName(signature.Value.GetNameAndArity(), NameRefType.None); - } + EmitName(signature.Value.GetNameAndArity(), NameRefType.None); } } } + } - public Story EmitStory() + public Story EmitStory() + { + Story = new Story { - Story = new Story + MajorVersion = (byte)(OsiVersion.VerLastSupported >> 8), + MinorVersion = (byte)(OsiVersion.VerLastSupported & 0xff), + Header = new SaveFileHeader { + Version = "Osiris save file dd. 03/30/17 07:28:20. Version 1.8.", + BigEndian = false, + DebugFlags = 0x000C10A0, MajorVersion = (byte)(OsiVersion.VerLastSupported >> 8), MinorVersion = (byte)(OsiVersion.VerLastSupported & 0xff), - Header = new SaveFileHeader - { - Version = "Osiris save file dd. 03/30/17 07:28:20. Version 1.8.", - BigEndian = false, - DebugFlags = 0x000C10A0, - MajorVersion = (byte)(OsiVersion.VerLastSupported >> 8), - MinorVersion = (byte)(OsiVersion.VerLastSupported & 0xff), - Unused = 0 - }, - Types = new Dictionary(), - DivObjects = new List(), - Functions = new List(), - Nodes = new Dictionary(), - Adapters = new Dictionary(), - Databases = new Dictionary(), - Goals = new Dictionary(), - GlobalActions = new List(), - ExternalStringTable = new List(), - FunctionSignatureMap = new Dictionary() - }; - - // TODO HEADER - - AddStoryTypes(); - EmitGoals(); - EmitHeaderFunctions(); - EmitParentGoals(); - - return Story; - } + Unused = 0 + }, + Types = new Dictionary(), + DivObjects = new List(), + Functions = new List(), + Nodes = new Dictionary(), + Adapters = new Dictionary(), + Databases = new Dictionary(), + Goals = new Dictionary(), + GlobalActions = new List(), + ExternalStringTable = new List(), + FunctionSignatureMap = new Dictionary() + }; + + // TODO HEADER + + AddStoryTypes(); + EmitGoals(); + EmitHeaderFunctions(); + EmitParentGoals(); + + return Story; } } diff --git a/LSLib/LS/Story/DataNode.cs b/LSLib/LS/Story/DataNode.cs index 45545534..ff31500d 100644 --- a/LSLib/LS/Story/DataNode.cs +++ b/LSLib/LS/Story/DataNode.cs @@ -1,55 +1,54 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public abstract class DataNode : Node { - public abstract class DataNode : Node + public List ReferencedBy; + + public override void Read(OsiReader reader) { - public List ReferencedBy; + base.Read(reader); + ReferencedBy = reader.ReadList(); + } - public override void Read(OsiReader reader) - { - base.Read(reader); - ReferencedBy = reader.ReadList(); - } + public override void Write(OsiWriter writer) + { + base.Write(writer); + writer.WriteList(ReferencedBy); + } - public override void Write(OsiWriter writer) - { - base.Write(writer); - writer.WriteList(ReferencedBy); - } + public override void PostLoad(Story story) + { + base.PostLoad(story); - public override void PostLoad(Story story) + foreach (var reference in ReferencedBy) { - base.PostLoad(story); - - foreach (var reference in ReferencedBy) + if (reference.NodeRef.IsValid) { - if (reference.NodeRef.IsValid) + var ruleNode = reference.NodeRef.Resolve(); + if (!reference.GoalRef.IsNull && + ruleNode is RuleNode) { - var ruleNode = reference.NodeRef.Resolve(); - if (!reference.GoalRef.IsNull && - ruleNode is RuleNode) - { - (ruleNode as RuleNode).DerivedGoalRef = new GoalReference(story, reference.GoalRef.Index); - } + (ruleNode as RuleNode).DerivedGoalRef = new GoalReference(story, reference.GoalRef.Index); } } } + } - public override void DebugDump(TextWriter writer, Story story) - { - base.DebugDump(writer, story); + public override void DebugDump(TextWriter writer, Story story) + { + base.DebugDump(writer, story); - if (ReferencedBy.Count > 0) + if (ReferencedBy.Count > 0) + { + writer.WriteLine(" Referenced By:"); + foreach (var entry in ReferencedBy) { - writer.WriteLine(" Referenced By:"); - foreach (var entry in ReferencedBy) - { - writer.Write(" "); - entry.DebugDump(writer, story); - writer.WriteLine(""); - } + writer.Write(" "); + entry.DebugDump(writer, story); + writer.WriteLine(""); } } } diff --git a/LSLib/LS/Story/Database.cs b/LSLib/LS/Story/Database.cs index 810b3117..538fb194 100644 --- a/LSLib/LS/Story/Database.cs +++ b/LSLib/LS/Story/Database.cs @@ -3,246 +3,245 @@ using System.ComponentModel; using System.IO; -namespace LSLib.LS.Story -{ - public class Fact : OsirisSerializable - { - public List Columns; +namespace LSLib.LS.Story; - public void Read(OsiReader reader) - { - Columns = new List(); - var count = reader.ReadByte(); - while (count-- > 0) - { - var value = new Value(); - value.Read(reader); - Columns.Add(value); - } - } +public class Fact : OsirisSerializable +{ + public List Columns; - public void Write(OsiWriter writer) + public void Read(OsiReader reader) + { + Columns = new List(); + var count = reader.ReadByte(); + while (count-- > 0) { - writer.Write((byte)Columns.Count); - foreach (var column in Columns) - { - column.Write(writer); - } + var value = new Value(); + value.Read(reader); + Columns.Add(value); } + } - public void DebugDump(TextWriter writer, Story story) + public void Write(OsiWriter writer) + { + writer.Write((byte)Columns.Count); + foreach (var column in Columns) { - writer.Write("("); - for (var i = 0; i < Columns.Count; i++) - { - Columns[i].DebugDump(writer, story); - if (i < Columns.Count - 1) writer.Write(", "); - } - writer.Write(")"); + column.Write(writer); } } - internal class FactPropertyDescriptor : PropertyDescriptor + public void DebugDump(TextWriter writer, Story story) { - public int Index { get; private set; } - public Value.Type BaseType { get; private set; } - public byte Type { get; private set; } - - public FactPropertyDescriptor(int index, Value.Type baseType, byte type) - : base(index.ToString(), new Attribute[0]) + writer.Write("("); + for (var i = 0; i < Columns.Count; i++) { - Index = index; - BaseType = baseType; - Type = type; + Columns[i].DebugDump(writer, story); + if (i < Columns.Count - 1) writer.Write(", "); } + writer.Write(")"); + } +} - public override bool CanResetValue(object component) - { - return false; - } +internal class FactPropertyDescriptor : PropertyDescriptor +{ + public int Index { get; private set; } + public Value.Type BaseType { get; private set; } + public byte Type { get; private set; } - public override Type ComponentType - { - get { return typeof(Fact); } - } + public FactPropertyDescriptor(int index, Value.Type baseType, byte type) + : base(index.ToString(), new Attribute[0]) + { + Index = index; + BaseType = baseType; + Type = type; + } - public override object GetValue(object component) - { - Fact fact = (Fact)component; - return fact.Columns[Index].ToString(); - } + public override bool CanResetValue(object component) + { + return false; + } - public override bool IsReadOnly - { - get { return false; } - } + public override Type ComponentType + { + get { return typeof(Fact); } + } - public override Type PropertyType - { - get - { - switch (BaseType) - { - case Value.Type.Integer: return typeof(Int32); - case Value.Type.Integer64: return typeof(Int64); - case Value.Type.Float: return typeof(Single); - case Value.Type.String: - case Value.Type.GuidString: return typeof(String); - case Value.Type.None: - default: throw new InvalidOperationException("Cannot retrieve type of an unknown column"); - } - } - } + public override object GetValue(object component) + { + Fact fact = (Fact)component; + return fact.Columns[Index].ToString(); + } - public override void ResetValue(object component) - { - throw new NotImplementedException(); - } + public override bool IsReadOnly + { + get { return false; } + } - public override void SetValue(object component, object value) + public override Type PropertyType + { + get { - Fact fact = (Fact)component; - var column = fact.Columns[Index]; - switch (BaseType) { - case Value.Type.Integer: - { - if (value is String) column.IntValue = Int32.Parse((String)value); - else if (value is Int32) column.IntValue = (Int32)value; - else throw new ArgumentException("Invalid Int32 value"); - break; - } - - case Value.Type.Integer64: - { - if (value is String) column.Int64Value = Int64.Parse((String)value); - else if (value is Int64) column.Int64Value = (Int64)value; - else throw new ArgumentException("Invalid Int64 value"); - break; - } - - case Value.Type.Float: - { - if (value is String) column.FloatValue = Single.Parse((String)value); - else if (value is Single) column.FloatValue = (Single)value; - else throw new ArgumentException("Invalid float value"); - break; - } - + case Value.Type.Integer: return typeof(Int32); + case Value.Type.Integer64: return typeof(Int64); + case Value.Type.Float: return typeof(Single); case Value.Type.String: - case Value.Type.GuidString: - { - column.StringValue = (String)value; - break; - } - + case Value.Type.GuidString: return typeof(String); case Value.Type.None: - default: - throw new InvalidOperationException("Cannot retrieve type of an unknown column"); + default: throw new InvalidOperationException("Cannot retrieve type of an unknown column"); } } - - public override bool ShouldSerializeValue(object component) - { - return false; - } } + public override void ResetValue(object component) + { + throw new NotImplementedException(); + } - public class FactCollection : List, ITypedList + public override void SetValue(object component, object value) { - private Story Story; - private Database Database; - private PropertyDescriptorCollection Properties; + Fact fact = (Fact)component; + var column = fact.Columns[Index]; - public FactCollection(Database database, Story story) - : base() + switch (BaseType) { - Database = database; - Story = story; - } + case Value.Type.Integer: + { + if (value is String) column.IntValue = Int32.Parse((String)value); + else if (value is Int32) column.IntValue = (Int32)value; + else throw new ArgumentException("Invalid Int32 value"); + break; + } - public PropertyDescriptorCollection GetItemProperties(PropertyDescriptor[] listAccessors) - { - if (Properties == null) - { - var props = new List(); - var types = Database.Parameters.Types; - for (var i = 0; i < types.Count; i++) + case Value.Type.Integer64: { - var type = Story.Types[types[i]]; - Value.Type baseType; - if (type.Alias != 0) - baseType = (Value.Type)type.Alias; - else - baseType = (Value.Type)type.Index; - props.Add(new FactPropertyDescriptor(i, baseType, type.Index)); + if (value is String) column.Int64Value = Int64.Parse((String)value); + else if (value is Int64) column.Int64Value = (Int64)value; + else throw new ArgumentException("Invalid Int64 value"); + break; } - Properties = new PropertyDescriptorCollection(props.ToArray(), true); - } + case Value.Type.Float: + { + if (value is String) column.FloatValue = Single.Parse((String)value); + else if (value is Single) column.FloatValue = (Single)value; + else throw new ArgumentException("Invalid float value"); + break; + } - return Properties; - } + case Value.Type.String: + case Value.Type.GuidString: + { + column.StringValue = (String)value; + break; + } - public string GetListName(PropertyDescriptor[] listAccessors) - { - return ""; + case Value.Type.None: + default: + throw new InvalidOperationException("Cannot retrieve type of an unknown column"); } } - public class Database : OsirisSerializable + public override bool ShouldSerializeValue(object component) + { + return false; + } +} + + +public class FactCollection : List, ITypedList +{ + private Story Story; + private Database Database; + private PropertyDescriptorCollection Properties; + + public FactCollection(Database database, Story story) + : base() { - public UInt32 Index; - public ParameterList Parameters; - public FactCollection Facts; - public Node OwnerNode; - public long FactsPosition; + Database = database; + Story = story; + } - public void Read(OsiReader reader) + public PropertyDescriptorCollection GetItemProperties(PropertyDescriptor[] listAccessors) + { + if (Properties == null) { - Index = reader.ReadUInt32(); - Parameters = new ParameterList(); - Parameters.Read(reader); + var props = new List(); + var types = Database.Parameters.Types; + for (var i = 0; i < types.Count; i++) + { + var type = Story.Types[types[i]]; + Value.Type baseType; + if (type.Alias != 0) + baseType = (Value.Type)type.Alias; + else + baseType = (Value.Type)type.Index; + props.Add(new FactPropertyDescriptor(i, baseType, type.Index)); + } - FactsPosition = reader.BaseStream.Position; - Facts = new FactCollection(this, reader.Story); - reader.ReadList(Facts); + Properties = new PropertyDescriptorCollection(props.ToArray(), true); } - public void Write(OsiWriter writer) + return Properties; + } + + public string GetListName(PropertyDescriptor[] listAccessors) + { + return ""; + } +} + +public class Database : OsirisSerializable +{ + public UInt32 Index; + public ParameterList Parameters; + public FactCollection Facts; + public Node OwnerNode; + public long FactsPosition; + + public void Read(OsiReader reader) + { + Index = reader.ReadUInt32(); + Parameters = new ParameterList(); + Parameters.Read(reader); + + FactsPosition = reader.BaseStream.Position; + Facts = new FactCollection(this, reader.Story); + reader.ReadList(Facts); + } + + public void Write(OsiWriter writer) + { + Parameters.Write(writer); + writer.WriteList(Facts); + } + + public void DebugDump(TextWriter writer, Story story) + { + if (OwnerNode != null && OwnerNode.Name.Length > 0) { - Parameters.Write(writer); - writer.WriteList(Facts); + writer.Write("{0}({1})", OwnerNode.Name, OwnerNode.NumParams); } - - public void DebugDump(TextWriter writer, Story story) + else if (OwnerNode != null) { - if (OwnerNode != null && OwnerNode.Name.Length > 0) - { - writer.Write("{0}({1})", OwnerNode.Name, OwnerNode.NumParams); - } - else if (OwnerNode != null) - { - writer.Write("<{0}>", OwnerNode.TypeName()); - } - else - { - writer.Write("(Not owned)"); - } + writer.Write("<{0}>", OwnerNode.TypeName()); + } + else + { + writer.Write("(Not owned)"); + } - writer.Write(" @ {0:X}: ", FactsPosition); - Parameters.DebugDump(writer, story); + writer.Write(" @ {0:X}: ", FactsPosition); + Parameters.DebugDump(writer, story); - writer.WriteLine(""); - writer.WriteLine(" Facts: "); - foreach (var fact in Facts) - { - writer.Write(" "); - fact.DebugDump(writer, story); - writer.WriteLine(); - } + writer.WriteLine(""); + writer.WriteLine(" Facts: "); + foreach (var fact in Facts) + { + writer.Write(" "); + fact.DebugDump(writer, story); + writer.WriteLine(); } } } diff --git a/LSLib/LS/Story/DatabaseNode.cs b/LSLib/LS/Story/DatabaseNode.cs index 3184049c..9e04b1ff 100644 --- a/LSLib/LS/Story/DatabaseNode.cs +++ b/LSLib/LS/Story/DatabaseNode.cs @@ -1,24 +1,23 @@ using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class DatabaseNode : DataNode { - public class DatabaseNode : DataNode + public override Type NodeType() { - public override Type NodeType() - { - return Type.Database; - } + return Type.Database; + } - public override string TypeName() - { - return "Database"; - } + public override string TypeName() + { + return "Database"; + } - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) - { - writer.Write("{0}(", Name); - tuple.MakeScript(writer, story, printTypes); - writer.WriteLine(")"); - } + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + writer.Write("{0}(", Name); + tuple.MakeScript(writer, story, printTypes); + writer.WriteLine(")"); } } diff --git a/LSLib/LS/Story/DebugExport.cs b/LSLib/LS/Story/DebugExport.cs index 026a69a5..0fbdf83d 100644 --- a/LSLib/LS/Story/DebugExport.cs +++ b/LSLib/LS/Story/DebugExport.cs @@ -4,533 +4,532 @@ using System.Linq; using Newtonsoft.Json; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class StoryDebugExportVisitor { - public class StoryDebugExportVisitor - { - private Stream stream; - private JsonTextWriter writer; + private Stream stream; + private JsonTextWriter writer; - public StoryDebugExportVisitor(Stream outputStream) - { - stream = outputStream; - } + public StoryDebugExportVisitor(Stream outputStream) + { + stream = outputStream; + } - public void Visit(Story story) + public void Visit(Story story) + { + using (var streamWriter = new StreamWriter(stream)) + using (this.writer = new JsonTextWriter(streamWriter)) { - using (var streamWriter = new StreamWriter(stream)) - using (this.writer = new JsonTextWriter(streamWriter)) - { - writer.IndentChar = '\t'; - writer.Indentation = 1; - writer.Formatting = Newtonsoft.Json.Formatting.Indented; - - writer.WriteStartObject(); - - writer.WritePropertyName("types"); - writer.WriteStartObject(); - foreach (var type in story.Types) - { - writer.WritePropertyName(type.Key.ToString()); - Visit(type.Value); - } - writer.WriteEndObject(); - - writer.WritePropertyName("objects"); - writer.WriteStartObject(); - foreach (var obj in story.DivObjects) - { - writer.WritePropertyName(obj.Name); - Visit(obj); - } - writer.WriteEndObject(); + writer.IndentChar = '\t'; + writer.Indentation = 1; + writer.Formatting = Newtonsoft.Json.Formatting.Indented; - writer.WritePropertyName("functions"); - writer.WriteStartObject(); - Int32 funcId = 1; - foreach (var fun in story.Functions) - { - writer.WritePropertyName(funcId.ToString()); - funcId++; - Visit(fun); - } - writer.WriteEndObject(); + writer.WriteStartObject(); - writer.WritePropertyName("nodes"); - writer.WriteStartObject(); - foreach (var node in story.Nodes) - { - writer.WritePropertyName(node.Key.ToString()); - writer.WriteStartObject(); - VisitNode(node.Value); - writer.WriteEndObject(); - } - writer.WriteEndObject(); + writer.WritePropertyName("types"); + writer.WriteStartObject(); + foreach (var type in story.Types) + { + writer.WritePropertyName(type.Key.ToString()); + Visit(type.Value); + } + writer.WriteEndObject(); - writer.WritePropertyName("adapters"); - writer.WriteStartObject(); - foreach (var adapter in story.Adapters) - { - writer.WritePropertyName(adapter.Key.ToString()); - Visit(adapter.Value); - } - writer.WriteEndObject(); + writer.WritePropertyName("objects"); + writer.WriteStartObject(); + foreach (var obj in story.DivObjects) + { + writer.WritePropertyName(obj.Name); + Visit(obj); + } + writer.WriteEndObject(); - writer.WritePropertyName("databases"); - writer.WriteStartObject(); - foreach (var database in story.Databases) - { - writer.WritePropertyName(database.Key.ToString()); - Visit(database.Value); - } - writer.WriteEndObject(); + writer.WritePropertyName("functions"); + writer.WriteStartObject(); + Int32 funcId = 1; + foreach (var fun in story.Functions) + { + writer.WritePropertyName(funcId.ToString()); + funcId++; + Visit(fun); + } + writer.WriteEndObject(); - writer.WritePropertyName("goals"); + writer.WritePropertyName("nodes"); + writer.WriteStartObject(); + foreach (var node in story.Nodes) + { + writer.WritePropertyName(node.Key.ToString()); writer.WriteStartObject(); - foreach (var goal in story.Goals) - { - writer.WritePropertyName(goal.Key.ToString()); - Visit(goal.Value); - } - writer.WriteEndObject(); - + VisitNode(node.Value); writer.WriteEndObject(); } - } + writer.WriteEndObject(); - public void Visit(OsirisType type) - { + writer.WritePropertyName("adapters"); writer.WriteStartObject(); - writer.WritePropertyName("name"); - writer.WriteValue(type.Name); + foreach (var adapter in story.Adapters) + { + writer.WritePropertyName(adapter.Key.ToString()); + Visit(adapter.Value); + } writer.WriteEndObject(); - } - public void Visit(OsirisDivObject obj) - { + writer.WritePropertyName("databases"); writer.WriteStartObject(); - writer.WritePropertyName("name"); - writer.WriteValue(obj.Name); - writer.WritePropertyName("type"); - writer.WriteValue(obj.Type); + foreach (var database in story.Databases) + { + writer.WritePropertyName(database.Key.ToString()); + Visit(database.Value); + } writer.WriteEndObject(); - } - - public void Visit(NodeReference r) - { - if (r.IsNull) - writer.WriteNull(); - else - writer.WriteValue(r.Index); - } - public void Visit(FunctionSignature fun) - { + writer.WritePropertyName("goals"); writer.WriteStartObject(); - writer.WritePropertyName("name"); - writer.WriteValue(fun.Name); - writer.WritePropertyName("out"); - writer.WriteValue(fun.OutParamMask[0]); - writer.WritePropertyName("params"); - Visit(fun.Parameters); + foreach (var goal in story.Goals) + { + writer.WritePropertyName(goal.Key.ToString()); + Visit(goal.Value); + } writer.WriteEndObject(); - } - public void Visit(Function fun) - { - writer.WriteStartObject(); - writer.WritePropertyName("signature"); - Visit(fun.Name); - writer.WritePropertyName("type"); - writer.WriteValue(fun.Type.ToString()); - writer.WritePropertyName("ref"); - Visit(fun.NodeRef); writer.WriteEndObject(); } + } - public void VisitNode(Node node) - { - if (node is RelOpNode) - Visit(node as RelOpNode); - else if (node is RuleNode) - Visit(node as RuleNode); - //else if (node is RelNode) - // Visit(node as RelNode); - else if (node is UserQueryNode) - Visit(node as QueryNode); - else if (node is InternalQueryNode) - Visit(node as QueryNode); - else if (node is DivQueryNode) - Visit(node as QueryNode); - //else if (node is QueryNode) - // Visit(node as QueryNode); - else if (node is AndNode) - Visit(node as JoinNode); - else if (node is NotAndNode) - Visit(node as JoinNode); - //else if (node is JoinNode) - // Visit(node as JoinNode); - //else if (node is TreeNode) - // Visit(node as TreeNode); - else if (node is ProcNode) - Visit(node as DataNode); - else if (node is DatabaseNode) - Visit(node as DataNode); - // else if (node is DataNode) - // Visit(node as DataNode); - else - throw new Exception("Unsupported node type"); - } + public void Visit(OsirisType type) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteValue(type.Name); + writer.WriteEndObject(); + } - public void Visit(Value val) - { - writer.WritePropertyName("type"); - writer.WriteValue(val.TypeId); - writer.WritePropertyName("value"); - writer.WriteValue(val.ToString()); - } + public void Visit(OsirisDivObject obj) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteValue(obj.Name); + writer.WritePropertyName("type"); + writer.WriteValue(obj.Type); + writer.WriteEndObject(); + } - public void Visit(TypedValue val) - { - Visit(val as Value); - writer.WritePropertyName("valid"); - writer.WriteValue(val.IsValid); - writer.WritePropertyName("out"); - writer.WriteValue(val.OutParam); - writer.WritePropertyName("isType"); - writer.WriteValue(val.IsAType); - } + public void Visit(NodeReference r) + { + if (r.IsNull) + writer.WriteNull(); + else + writer.WriteValue(r.Index); + } - public void Visit(Variable var) - { - Visit(var as TypedValue); - writer.WritePropertyName("index"); - writer.WriteValue(var.Index); - writer.WritePropertyName("unused"); - writer.WriteValue(var.Unused); - writer.WritePropertyName("adapted"); - writer.WriteValue(var.Adapted); - writer.WritePropertyName("name"); - writer.WriteValue(var.VariableName); - } + public void Visit(FunctionSignature fun) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteValue(fun.Name); + writer.WritePropertyName("out"); + writer.WriteValue(fun.OutParamMask[0]); + writer.WritePropertyName("params"); + Visit(fun.Parameters); + writer.WriteEndObject(); + } - public void VisitVar(Value val) - { - writer.WriteStartObject(); - if (val is Variable) - Visit(val as Variable); - else if (val is TypedValue) - Visit(val as TypedValue); - else - Visit(val); - writer.WriteEndObject(); - } + public void Visit(Function fun) + { + writer.WriteStartObject(); + writer.WritePropertyName("signature"); + Visit(fun.Name); + writer.WritePropertyName("type"); + writer.WriteValue(fun.Type.ToString()); + writer.WritePropertyName("ref"); + Visit(fun.NodeRef); + writer.WriteEndObject(); + } - public void Visit(AdapterReference r) - { - if (r.IsNull) - writer.WriteNull(); - else - writer.WriteValue(r.Index); - } + public void VisitNode(Node node) + { + if (node is RelOpNode) + Visit(node as RelOpNode); + else if (node is RuleNode) + Visit(node as RuleNode); + //else if (node is RelNode) + // Visit(node as RelNode); + else if (node is UserQueryNode) + Visit(node as QueryNode); + else if (node is InternalQueryNode) + Visit(node as QueryNode); + else if (node is DivQueryNode) + Visit(node as QueryNode); + //else if (node is QueryNode) + // Visit(node as QueryNode); + else if (node is AndNode) + Visit(node as JoinNode); + else if (node is NotAndNode) + Visit(node as JoinNode); + //else if (node is JoinNode) + // Visit(node as JoinNode); + //else if (node is TreeNode) + // Visit(node as TreeNode); + else if (node is ProcNode) + Visit(node as DataNode); + else if (node is DatabaseNode) + Visit(node as DataNode); + // else if (node is DataNode) + // Visit(node as DataNode); + else + throw new Exception("Unsupported node type"); + } - public void Visit(DatabaseReference r) - { - if (r.IsNull) - writer.WriteNull(); - else - writer.WriteValue(r.Index); - } + public void Visit(Value val) + { + writer.WritePropertyName("type"); + writer.WriteValue(val.TypeId); + writer.WritePropertyName("value"); + writer.WriteValue(val.ToString()); + } - public void Visit(GoalReference r) - { - if (r.IsNull) - writer.WriteNull(); - else - writer.WriteValue(r.Index); - } + public void Visit(TypedValue val) + { + Visit(val as Value); + writer.WritePropertyName("valid"); + writer.WriteValue(val.IsValid); + writer.WritePropertyName("out"); + writer.WriteValue(val.OutParam); + writer.WritePropertyName("isType"); + writer.WriteValue(val.IsAType); + } - public void Visit(NodeEntryItem entry) - { - writer.WriteStartObject(); - writer.WritePropertyName("node"); - Visit(entry.NodeRef); - writer.WritePropertyName("entry"); - writer.WriteValue(entry.EntryPoint); - writer.WritePropertyName("goal"); - Visit(entry.GoalRef); - writer.WriteEndObject(); - } + public void Visit(Variable var) + { + Visit(var as TypedValue); + writer.WritePropertyName("index"); + writer.WriteValue(var.Index); + writer.WritePropertyName("unused"); + writer.WriteValue(var.Unused); + writer.WritePropertyName("adapted"); + writer.WriteValue(var.Adapted); + writer.WritePropertyName("name"); + writer.WriteValue(var.VariableName); + } - public void Visit(RelOpNode node) - { - Visit(node as RelNode); - writer.WritePropertyName("op"); - writer.WriteValue(node.RelOp.ToString()); - writer.WritePropertyName("left"); - VisitVar(node.LeftValue); - writer.WritePropertyName("leftIndex"); - writer.WriteValue(node.LeftValueIndex); - writer.WritePropertyName("right"); - VisitVar(node.RightValue); - writer.WritePropertyName("rightIndex"); - writer.WriteValue(node.RightValueIndex); - } + public void VisitVar(Value val) + { + writer.WriteStartObject(); + if (val is Variable) + Visit(val as Variable); + else if (val is TypedValue) + Visit(val as TypedValue); + else + Visit(val); + writer.WriteEndObject(); + } - public void Visit(RuleNode node) - { - Visit(node as RelNode); - writer.WritePropertyName("calls"); - writer.WriteStartArray(); - foreach (var call in node.Calls) - { - Visit(call); - } - writer.WriteEndArray(); + public void Visit(AdapterReference r) + { + if (r.IsNull) + writer.WriteNull(); + else + writer.WriteValue(r.Index); + } - writer.WritePropertyName("variables"); - writer.WriteStartArray(); - foreach (var v in node.Variables) - { - VisitVar(v); - } - writer.WriteEndArray(); + public void Visit(DatabaseReference r) + { + if (r.IsNull) + writer.WriteNull(); + else + writer.WriteValue(r.Index); + } - writer.WritePropertyName("line"); - writer.WriteValue(node.Line); - writer.WritePropertyName("query"); - writer.WriteValue(node.IsQuery); - } + public void Visit(GoalReference r) + { + if (r.IsNull) + writer.WriteNull(); + else + writer.WriteValue(r.Index); + } - public void Visit(RelNode node) - { - Visit(node as TreeNode); - writer.WritePropertyName("parent"); - Visit(node.ParentRef); - writer.WritePropertyName("adapter"); - Visit(node.AdapterRef); - writer.WritePropertyName("databaseNode"); - Visit(node.RelDatabaseNodeRef); - writer.WritePropertyName("databaseJoin"); - Visit(node.RelJoin); - writer.WritePropertyName("databaseIndirection"); - writer.WriteValue(node.RelDatabaseIndirection); - } + public void Visit(NodeEntryItem entry) + { + writer.WriteStartObject(); + writer.WritePropertyName("node"); + Visit(entry.NodeRef); + writer.WritePropertyName("entry"); + writer.WriteValue(entry.EntryPoint); + writer.WritePropertyName("goal"); + Visit(entry.GoalRef); + writer.WriteEndObject(); + } - public void Visit(TreeNode node) - { - Visit(node as Node); - writer.WritePropertyName("next"); - Visit(node.NextNode); - } + public void Visit(RelOpNode node) + { + Visit(node as RelNode); + writer.WritePropertyName("op"); + writer.WriteValue(node.RelOp.ToString()); + writer.WritePropertyName("left"); + VisitVar(node.LeftValue); + writer.WritePropertyName("leftIndex"); + writer.WriteValue(node.LeftValueIndex); + writer.WritePropertyName("right"); + VisitVar(node.RightValue); + writer.WritePropertyName("rightIndex"); + writer.WriteValue(node.RightValueIndex); + } - public void Visit(Node node) + public void Visit(RuleNode node) + { + Visit(node as RelNode); + writer.WritePropertyName("calls"); + writer.WriteStartArray(); + foreach (var call in node.Calls) { - writer.WritePropertyName("type"); - writer.WriteValue(node.TypeName()); - writer.WritePropertyName("name"); - writer.WriteValue(node.Name); - writer.WritePropertyName("numParams"); - writer.WriteValue(node.NumParams); - writer.WritePropertyName("nodeDb"); - Visit(node.DatabaseRef); + Visit(call); } + writer.WriteEndArray(); - public void Visit(QueryNode node) + writer.WritePropertyName("variables"); + writer.WriteStartArray(); + foreach (var v in node.Variables) { - Visit(node as Node); + VisitVar(v); } + writer.WriteEndArray(); - public void Visit(JoinNode node) - { - Visit(node as Node); + writer.WritePropertyName("line"); + writer.WriteValue(node.Line); + writer.WritePropertyName("query"); + writer.WriteValue(node.IsQuery); + } - writer.WritePropertyName("left"); - writer.WriteStartObject(); - writer.WritePropertyName("parent"); - Visit(node.LeftParentRef); - writer.WritePropertyName("adapter"); - Visit(node.LeftAdapterRef); - writer.WritePropertyName("databaseNode"); - Visit(node.LeftDatabaseNodeRef); - writer.WritePropertyName("databaseJoin"); - Visit(node.LeftDatabaseJoin); - writer.WritePropertyName("databaseIndirection"); - writer.WriteValue(node.LeftDatabaseIndirection); - writer.WriteEndObject(); + public void Visit(RelNode node) + { + Visit(node as TreeNode); + writer.WritePropertyName("parent"); + Visit(node.ParentRef); + writer.WritePropertyName("adapter"); + Visit(node.AdapterRef); + writer.WritePropertyName("databaseNode"); + Visit(node.RelDatabaseNodeRef); + writer.WritePropertyName("databaseJoin"); + Visit(node.RelJoin); + writer.WritePropertyName("databaseIndirection"); + writer.WriteValue(node.RelDatabaseIndirection); + } - writer.WritePropertyName("right"); - writer.WriteStartObject(); - writer.WritePropertyName("parent"); - Visit(node.RightParentRef); - writer.WritePropertyName("adapter"); - Visit(node.RightAdapterRef); - writer.WritePropertyName("databaseNode"); - Visit(node.RightDatabaseNodeRef); - writer.WritePropertyName("databaseJoin"); - Visit(node.RightDatabaseJoin); - writer.WritePropertyName("databaseIndirection"); - writer.WriteValue(node.RightDatabaseIndirection); - writer.WriteEndObject(); - } + public void Visit(TreeNode node) + { + Visit(node as Node); + writer.WritePropertyName("next"); + Visit(node.NextNode); + } + + public void Visit(Node node) + { + writer.WritePropertyName("type"); + writer.WriteValue(node.TypeName()); + writer.WritePropertyName("name"); + writer.WriteValue(node.Name); + writer.WritePropertyName("numParams"); + writer.WriteValue(node.NumParams); + writer.WritePropertyName("nodeDb"); + Visit(node.DatabaseRef); + } + + public void Visit(QueryNode node) + { + Visit(node as Node); + } + + public void Visit(JoinNode node) + { + Visit(node as Node); + + writer.WritePropertyName("left"); + writer.WriteStartObject(); + writer.WritePropertyName("parent"); + Visit(node.LeftParentRef); + writer.WritePropertyName("adapter"); + Visit(node.LeftAdapterRef); + writer.WritePropertyName("databaseNode"); + Visit(node.LeftDatabaseNodeRef); + writer.WritePropertyName("databaseJoin"); + Visit(node.LeftDatabaseJoin); + writer.WritePropertyName("databaseIndirection"); + writer.WriteValue(node.LeftDatabaseIndirection); + writer.WriteEndObject(); + + writer.WritePropertyName("right"); + writer.WriteStartObject(); + writer.WritePropertyName("parent"); + Visit(node.RightParentRef); + writer.WritePropertyName("adapter"); + Visit(node.RightAdapterRef); + writer.WritePropertyName("databaseNode"); + Visit(node.RightDatabaseNodeRef); + writer.WritePropertyName("databaseJoin"); + Visit(node.RightDatabaseJoin); + writer.WritePropertyName("databaseIndirection"); + writer.WriteValue(node.RightDatabaseIndirection); + writer.WriteEndObject(); + } - public void Visit(DataNode node) + public void Visit(DataNode node) + { + Visit(node as Node); + + writer.WritePropertyName("references"); + writer.WriteStartArray(); + foreach (var r in node.ReferencedBy) { - Visit(node as Node); + Visit(r); + } + writer.WriteEndArray(); + } - writer.WritePropertyName("references"); - writer.WriteStartArray(); - foreach (var r in node.ReferencedBy) - { - Visit(r); - } - writer.WriteEndArray(); + public void Visit(Tuple tuple) + { + writer.WriteStartObject(); + var keys = tuple.Logical.Keys.ToArray(); + for (var i = 0; i < tuple.Logical.Count; i++) + { + writer.WritePropertyName(keys[i].ToString()); + VisitVar(tuple.Logical[keys[i]]); } + writer.WriteEndObject(); + } - public void Visit(Tuple tuple) + public void Visit(Adapter adapter) + { + writer.WriteStartObject(); + writer.WritePropertyName("constants"); + Visit(adapter.Constants); + + writer.WritePropertyName("logical"); + writer.WriteStartArray(); + foreach (var index in adapter.LogicalIndices) { - writer.WriteStartObject(); - var keys = tuple.Logical.Keys.ToArray(); - for (var i = 0; i < tuple.Logical.Count; i++) - { - writer.WritePropertyName(keys[i].ToString()); - VisitVar(tuple.Logical[keys[i]]); - } - writer.WriteEndObject(); + writer.WriteValue(index); } + writer.WriteEndArray(); - public void Visit(Adapter adapter) + writer.WritePropertyName("mappings"); + writer.WriteStartObject(); + foreach (var index in adapter.LogicalToPhysicalMap) { - writer.WriteStartObject(); - writer.WritePropertyName("constants"); - Visit(adapter.Constants); + writer.WritePropertyName(index.Key.ToString()); + writer.WriteValue(index.Value); + } + writer.WriteEndObject(); - writer.WritePropertyName("logical"); - writer.WriteStartArray(); - foreach (var index in adapter.LogicalIndices) + writer.WritePropertyName("output"); + writer.WriteStartArray(); + for (var i = 0; i < adapter.LogicalIndices.Count; i++) + { + var index = adapter.LogicalIndices[i]; + // If a logical index is present, emit a column from the input tuple + if (index != -1) { - writer.WriteValue(index); + writer.WriteValue(String.Format("input[{0}]", index)); } - writer.WriteEndArray(); - - writer.WritePropertyName("mappings"); - writer.WriteStartObject(); - foreach (var index in adapter.LogicalToPhysicalMap) + // Otherwise check if a constant is mapped to the specified logical index + else if (adapter.Constants.Logical.ContainsKey(i)) { - writer.WritePropertyName(index.Key.ToString()); - writer.WriteValue(index.Value); + var value = adapter.Constants.Logical[i]; + VisitVar(value); } - writer.WriteEndObject(); - - writer.WritePropertyName("output"); - writer.WriteStartArray(); - for (var i = 0; i < adapter.LogicalIndices.Count; i++) + // If we haven't found a constant, emit a null variable + else { - var index = adapter.LogicalIndices[i]; - // If a logical index is present, emit a column from the input tuple - if (index != -1) - { - writer.WriteValue(String.Format("input[{0}]", index)); - } - // Otherwise check if a constant is mapped to the specified logical index - else if (adapter.Constants.Logical.ContainsKey(i)) - { - var value = adapter.Constants.Logical[i]; - VisitVar(value); - } - // If we haven't found a constant, emit a null variable - else - { - writer.WriteNull(); - } + writer.WriteNull(); } - writer.WriteEndArray(); - - writer.WriteEndObject(); } + writer.WriteEndArray(); - public void Visit(ParameterList args) - { - writer.WriteStartArray(); - foreach (var arg in args.Types) - { - writer.WriteValue(arg); - } - writer.WriteEndArray(); - } + writer.WriteEndObject(); + } - public void Visit(Fact fact) + public void Visit(ParameterList args) + { + writer.WriteStartArray(); + foreach (var arg in args.Types) { - writer.WriteStartArray(); - foreach (var val in fact.Columns) - { - VisitVar(val); - } - writer.WriteEndArray(); + writer.WriteValue(arg); } + writer.WriteEndArray(); + } - public void Visit(FactCollection facts) + public void Visit(Fact fact) + { + writer.WriteStartArray(); + foreach (var val in fact.Columns) { - writer.WriteStartArray(); - foreach (var fact in facts) - { - Visit(fact); - } - writer.WriteEndArray(); + VisitVar(val); } + writer.WriteEndArray(); + } - public void Visit(Database db) + public void Visit(FactCollection facts) + { + writer.WriteStartArray(); + foreach (var fact in facts) { - writer.WriteStartObject(); - writer.WritePropertyName("columns"); - Visit(db.Parameters); - writer.WritePropertyName("facts"); - Visit(db.Facts); - writer.WriteEndObject(); + Visit(fact); } + writer.WriteEndArray(); + } - public void Visit(Call call) - { - writer.WriteStartObject(); - writer.WritePropertyName("negate"); - writer.WriteValue(call.Negate); - writer.WritePropertyName("name"); - writer.WriteValue(call.Name); - if (call.Parameters != null) - { - writer.WritePropertyName("params"); - writer.WriteStartArray(); - foreach (var arg in call.Parameters) - { - VisitVar(arg); - } - writer.WriteEndArray(); - } - writer.WriteEndObject(); - } + public void Visit(Database db) + { + writer.WriteStartObject(); + writer.WritePropertyName("columns"); + Visit(db.Parameters); + writer.WritePropertyName("facts"); + Visit(db.Facts); + writer.WriteEndObject(); + } - public void Visit(List calls) + public void Visit(Call call) + { + writer.WriteStartObject(); + writer.WritePropertyName("negate"); + writer.WriteValue(call.Negate); + writer.WritePropertyName("name"); + writer.WriteValue(call.Name); + if (call.Parameters != null) { + writer.WritePropertyName("params"); writer.WriteStartArray(); - foreach (var call in calls) + foreach (var arg in call.Parameters) { - Visit(call); + VisitVar(arg); } writer.WriteEndArray(); } + writer.WriteEndObject(); + } - public void Visit(Goal goal) + public void Visit(List calls) + { + writer.WriteStartArray(); + foreach (var call in calls) { - writer.WriteStartObject(); - writer.WritePropertyName("name"); - writer.WriteValue(goal.Name); - writer.WritePropertyName("sgc"); - writer.WriteValue(goal.SubGoalCombination); - writer.WritePropertyName("init"); - Visit(goal.InitCalls); - writer.WritePropertyName("exit"); - Visit(goal.ExitCalls); - writer.WriteEndObject(); + Visit(call); } + writer.WriteEndArray(); + } + + public void Visit(Goal goal) + { + writer.WriteStartObject(); + writer.WritePropertyName("name"); + writer.WriteValue(goal.Name); + writer.WritePropertyName("sgc"); + writer.WriteValue(goal.SubGoalCombination); + writer.WritePropertyName("init"); + Visit(goal.InitCalls); + writer.WritePropertyName("exit"); + Visit(goal.ExitCalls); + writer.WriteEndObject(); } } diff --git a/LSLib/LS/Story/Function.cs b/LSLib/LS/Story/Function.cs index c85bc27e..0058aeaf 100644 --- a/LSLib/LS/Story/Function.cs +++ b/LSLib/LS/Story/Function.cs @@ -2,179 +2,178 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class FunctionSignature : OsirisSerializable { - public class FunctionSignature : OsirisSerializable - { - public string Name; - public List OutParamMask; - public ParameterList Parameters; + public string Name; + public List OutParamMask; + public ParameterList Parameters; - public void Read(OsiReader reader) + public void Read(OsiReader reader) + { + Name = reader.ReadString(); + OutParamMask = new List(); + var outParamBytes = reader.ReadUInt32(); + while (outParamBytes-- > 0) { - Name = reader.ReadString(); - OutParamMask = new List(); - var outParamBytes = reader.ReadUInt32(); - while (outParamBytes-- > 0) - { - OutParamMask.Add(reader.ReadByte()); - } - - Parameters = new ParameterList(); - Parameters.Read(reader); + OutParamMask.Add(reader.ReadByte()); } - public void Write(OsiWriter writer) - { - writer.Write(Name); + Parameters = new ParameterList(); + Parameters.Read(reader); + } - writer.Write((UInt32)OutParamMask.Count); - foreach (var b in OutParamMask) - { - writer.Write(b); - } + public void Write(OsiWriter writer) + { + writer.Write(Name); - Parameters.Write(writer); + writer.Write((UInt32)OutParamMask.Count); + foreach (var b in OutParamMask) + { + writer.Write(b); } - public void DebugDump(TextWriter writer, Story story) + Parameters.Write(writer); + } + + public void DebugDump(TextWriter writer, Story story) + { + writer.Write(Name); + writer.Write("("); + for (var i = 0; i < Parameters.Types.Count; i++) { - writer.Write(Name); - writer.Write("("); - for (var i = 0; i < Parameters.Types.Count; i++) - { - var type = story.Types[Parameters.Types[i]]; - var isOutParam = ((OutParamMask[i >> 3] << (i & 7)) & 0x80) == 0x80; - if (isOutParam) writer.Write("out "); - writer.Write(type.Name); - if (i < Parameters.Types.Count - 1) writer.Write(", "); - } - writer.Write(")"); + var type = story.Types[Parameters.Types[i]]; + var isOutParam = ((OutParamMask[i >> 3] << (i & 7)) & 0x80) == 0x80; + if (isOutParam) writer.Write("out "); + writer.Write(type.Name); + if (i < Parameters.Types.Count - 1) writer.Write(", "); } + writer.Write(")"); } +} - public class ParameterList : OsirisSerializable - { - public List Types; +public class ParameterList : OsirisSerializable +{ + public List Types; - public void Read(OsiReader reader) + public void Read(OsiReader reader) + { + Types = new List(); + var count = reader.ReadByte(); + while (count-- > 0) { - Types = new List(); - var count = reader.ReadByte(); - while (count-- > 0) + // BG3 heuristic: Patch 8 doesn't increment the version number but changes type ID format, + // so we need to detect it by checking if a 32-bit type ID would be valid. + if (reader.ShortTypeIds == null) { - // BG3 heuristic: Patch 8 doesn't increment the version number but changes type ID format, - // so we need to detect it by checking if a 32-bit type ID would be valid. - if (reader.ShortTypeIds == null) - { - var id = reader.ReadUInt32(); - reader.BaseStream.Position -= 4; - reader.ShortTypeIds = (id > 0xff); - } - - if (reader.ShortTypeIds == true) - { - Types.Add(reader.ReadUInt16()); - } - else - { - Types.Add(reader.ReadUInt32()); - } + var id = reader.ReadUInt32(); + reader.BaseStream.Position -= 4; + reader.ShortTypeIds = (id > 0xff); } - } - public void Write(OsiWriter writer) - { - writer.Write((byte)Types.Count); - foreach (var type in Types) + if (reader.ShortTypeIds == true) + { + Types.Add(reader.ReadUInt16()); + } + else { - if (writer.ShortTypeIds) - { - writer.Write((UInt16)type); - } - else - { - writer.Write(type); - } + Types.Add(reader.ReadUInt32()); } } + } - public void DebugDump(TextWriter writer, Story story) + public void Write(OsiWriter writer) + { + writer.Write((byte)Types.Count); + foreach (var type in Types) { - for (var i = 0; i < Types.Count; i++) + if (writer.ShortTypeIds) { - writer.Write(story.Types[Types[i]].Name); - if (i < Types.Count - 1) writer.Write(", "); + writer.Write((UInt16)type); } + else + { + writer.Write(type); + } + } + } + + public void DebugDump(TextWriter writer, Story story) + { + for (var i = 0; i < Types.Count; i++) + { + writer.Write(story.Types[Types[i]].Name); + if (i < Types.Count - 1) writer.Write(", "); } } +} + +public enum FunctionType +{ + Event = 1, + Query = 2, + Call = 3, + Database = 4, + Proc = 5, + SysQuery = 6, + SysCall = 7, + UserQuery = 8 +} - public enum FunctionType +public class Function : OsirisSerializable +{ + public UInt32 Line; + public UInt32 ConditionReferences; + public UInt32 ActionReferences; + public NodeReference NodeRef; + public FunctionType Type; + public UInt32 Meta1; + public UInt32 Meta2; + public UInt32 Meta3; + public UInt32 Meta4; + public FunctionSignature Name; + + public void Read(OsiReader reader) { - Event = 1, - Query = 2, - Call = 3, - Database = 4, - Proc = 5, - SysQuery = 6, - SysCall = 7, - UserQuery = 8 + Line = reader.ReadUInt32(); + ConditionReferences = reader.ReadUInt32(); + ActionReferences = reader.ReadUInt32(); + NodeRef = reader.ReadNodeRef(); + Type = (FunctionType)reader.ReadByte(); + Meta1 = reader.ReadUInt32(); + Meta2 = reader.ReadUInt32(); + Meta3 = reader.ReadUInt32(); + Meta4 = reader.ReadUInt32(); + Name = new FunctionSignature(); + Name.Read(reader); } - public class Function : OsirisSerializable + public void Write(OsiWriter writer) { - public UInt32 Line; - public UInt32 ConditionReferences; - public UInt32 ActionReferences; - public NodeReference NodeRef; - public FunctionType Type; - public UInt32 Meta1; - public UInt32 Meta2; - public UInt32 Meta3; - public UInt32 Meta4; - public FunctionSignature Name; - - public void Read(OsiReader reader) - { - Line = reader.ReadUInt32(); - ConditionReferences = reader.ReadUInt32(); - ActionReferences = reader.ReadUInt32(); - NodeRef = reader.ReadNodeRef(); - Type = (FunctionType)reader.ReadByte(); - Meta1 = reader.ReadUInt32(); - Meta2 = reader.ReadUInt32(); - Meta3 = reader.ReadUInt32(); - Meta4 = reader.ReadUInt32(); - Name = new FunctionSignature(); - Name.Read(reader); - } + writer.Write(Line); + writer.Write(ConditionReferences); + writer.Write(ActionReferences); + NodeRef.Write(writer); + writer.Write((byte)Type); + writer.Write(Meta1); + writer.Write(Meta2); + writer.Write(Meta3); + writer.Write(Meta4); + Name.Write(writer); + } - public void Write(OsiWriter writer) + public void DebugDump(TextWriter writer, Story story) + { + writer.Write("{0} ", Type.ToString()); + Name.DebugDump(writer, story); + if (NodeRef.IsValid) { - writer.Write(Line); - writer.Write(ConditionReferences); - writer.Write(ActionReferences); - NodeRef.Write(writer); - writer.Write((byte)Type); - writer.Write(Meta1); - writer.Write(Meta2); - writer.Write(Meta3); - writer.Write(Meta4); - Name.Write(writer); + var node = NodeRef.Resolve(); + writer.Write(" @ {0}({1})", node.Name, node.NumParams); } - public void DebugDump(TextWriter writer, Story story) - { - writer.Write("{0} ", Type.ToString()); - Name.DebugDump(writer, story); - if (NodeRef.IsValid) - { - var node = NodeRef.Resolve(); - writer.Write(" @ {0}({1})", node.Name, node.NumParams); - } - - writer.Write(" CondRefs {0}, ActRefs {1}", ConditionReferences, ActionReferences); - writer.WriteLine(" Meta ({0}, {1}, {2}, {3})", Meta1, Meta2, Meta3, Meta4); - } + writer.Write(" CondRefs {0}, ActRefs {1}", ConditionReferences, ActionReferences); + writer.WriteLine(" Meta ({0}, {1}, {2}, {3})", Meta1, Meta2, Meta3, Meta4); } } diff --git a/LSLib/LS/Story/Goal.cs b/LSLib/LS/Story/Goal.cs index 8a3ea045..a26d9c11 100644 --- a/LSLib/LS/Story/Goal.cs +++ b/LSLib/LS/Story/Goal.cs @@ -2,162 +2,161 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class Goal : OsirisSerializable { - public class Goal : OsirisSerializable + public UInt32 Index; + public string Name; + public byte SubGoalCombination; + public List ParentGoals; + public List SubGoals; + public byte Flags; // 0x02 = Child goal + public List InitCalls; + public List ExitCalls; + public Story Story; + + public Goal(Story story) { - public UInt32 Index; - public string Name; - public byte SubGoalCombination; - public List ParentGoals; - public List SubGoals; - public byte Flags; // 0x02 = Child goal - public List InitCalls; - public List ExitCalls; - public Story Story; - - public Goal(Story story) + Story = story; + } + + public void Read(OsiReader reader) + { + Index = reader.ReadUInt32(); + Name = reader.ReadString(); + SubGoalCombination = reader.ReadByte(); + + ParentGoals = reader.ReadRefList(); + SubGoals = reader.ReadRefList(); + + Flags = reader.ReadByte(); + + if (reader.Ver >= OsiVersion.VerAddInitExitCalls) { - Story = story; + InitCalls = reader.ReadList(); + ExitCalls = reader.ReadList(); } - - public void Read(OsiReader reader) + else { - Index = reader.ReadUInt32(); - Name = reader.ReadString(); - SubGoalCombination = reader.ReadByte(); + InitCalls = new List(); + ExitCalls = new List(); + } + } - ParentGoals = reader.ReadRefList(); - SubGoals = reader.ReadRefList(); + public void Write(OsiWriter writer) + { + writer.Write(Index); + writer.Write(Name); + writer.Write(SubGoalCombination); - Flags = reader.ReadByte(); + writer.WriteList(ParentGoals); + writer.WriteList(SubGoals); - if (reader.Ver >= OsiVersion.VerAddInitExitCalls) - { - InitCalls = reader.ReadList(); - ExitCalls = reader.ReadList(); - } - else - { - InitCalls = new List(); - ExitCalls = new List(); - } - } + writer.Write(Flags); - public void Write(OsiWriter writer) + if (writer.Ver >= OsiVersion.VerAddInitExitCalls) { - writer.Write(Index); - writer.Write(Name); - writer.Write(SubGoalCombination); - - writer.WriteList(ParentGoals); - writer.WriteList(SubGoals); + writer.WriteList(InitCalls); + writer.WriteList(ExitCalls); + } + } - writer.Write(Flags); + public void DebugDump(TextWriter writer, Story story) + { + writer.WriteLine("{0}: SGC {1}, Flags {2}", Name, SubGoalCombination, Flags); - if (writer.Ver >= OsiVersion.VerAddInitExitCalls) + if (ParentGoals.Count > 0) + { + writer.Write(" Parent goals: "); + foreach (var goalRef in ParentGoals) { - writer.WriteList(InitCalls); - writer.WriteList(ExitCalls); + var goal = goalRef.Resolve(); + writer.Write("#{0} {1}, ", goal.Index, goal.Name); } + writer.WriteLine(); } - public void DebugDump(TextWriter writer, Story story) + if (SubGoals.Count > 0) { - writer.WriteLine("{0}: SGC {1}, Flags {2}", Name, SubGoalCombination, Flags); - - if (ParentGoals.Count > 0) + writer.Write(" Subgoals: "); + foreach (var goalRef in SubGoals) { - writer.Write(" Parent goals: "); - foreach (var goalRef in ParentGoals) - { - var goal = goalRef.Resolve(); - writer.Write("#{0} {1}, ", goal.Index, goal.Name); - } - writer.WriteLine(); + var goal = goalRef.Resolve(); + writer.Write("#{0} {1}, ", goal.Index, goal.Name); } + writer.WriteLine(); + } - if (SubGoals.Count > 0) + if (InitCalls.Count > 0) + { + writer.WriteLine(" Init Calls: "); + foreach (var call in InitCalls) { - writer.Write(" Subgoals: "); - foreach (var goalRef in SubGoals) - { - var goal = goalRef.Resolve(); - writer.Write("#{0} {1}, ", goal.Index, goal.Name); - } + writer.Write(" "); + call.DebugDump(writer, story); writer.WriteLine(); } + } - if (InitCalls.Count > 0) - { - writer.WriteLine(" Init Calls: "); - foreach (var call in InitCalls) - { - writer.Write(" "); - call.DebugDump(writer, story); - writer.WriteLine(); - } - } - - if (ExitCalls.Count > 0) + if (ExitCalls.Count > 0) + { + writer.WriteLine(" Exit Calls: "); + foreach (var call in ExitCalls) { - writer.WriteLine(" Exit Calls: "); - foreach (var call in ExitCalls) - { - writer.Write(" "); - call.DebugDump(writer, story); - writer.WriteLine(); - } + writer.Write(" "); + call.DebugDump(writer, story); + writer.WriteLine(); } } + } - public void MakeScript(TextWriter writer, Story story) - { - writer.WriteLine("Version 1"); - writer.WriteLine("SubGoalCombiner SGC_AND"); - writer.WriteLine(); - writer.WriteLine("INITSECTION"); + public void MakeScript(TextWriter writer, Story story) + { + writer.WriteLine("Version 1"); + writer.WriteLine("SubGoalCombiner SGC_AND"); + writer.WriteLine(); + writer.WriteLine("INITSECTION"); - var nullTuple = new Tuple(); - foreach (var call in InitCalls) - { - call.MakeScript(writer, story, nullTuple, false); - writer.WriteLine(";"); - } + var nullTuple = new Tuple(); + foreach (var call in InitCalls) + { + call.MakeScript(writer, story, nullTuple, false); + writer.WriteLine(";"); + } - writer.WriteLine(); - writer.WriteLine("KBSECTION"); + writer.WriteLine(); + writer.WriteLine("KBSECTION"); - foreach (var node in story.Nodes) + foreach (var node in story.Nodes) + { + if (node.Value is RuleNode) { - if (node.Value is RuleNode) + var rule = node.Value as RuleNode; + if (rule.DerivedGoalRef != null && rule.DerivedGoalRef.Index == Index) { - var rule = node.Value as RuleNode; - if (rule.DerivedGoalRef != null && rule.DerivedGoalRef.Index == Index) - { - node.Value.MakeScript(writer, story, nullTuple, false); - writer.WriteLine(); - } + node.Value.MakeScript(writer, story, nullTuple, false); + writer.WriteLine(); } } + } - writer.WriteLine(); - writer.WriteLine("EXITSECTION"); + writer.WriteLine(); + writer.WriteLine("EXITSECTION"); - foreach (var call in ExitCalls) - { - call.MakeScript(writer, story, nullTuple, false); - writer.WriteLine(";"); - } + foreach (var call in ExitCalls) + { + call.MakeScript(writer, story, nullTuple, false); + writer.WriteLine(";"); + } - writer.WriteLine("ENDEXITSECTION"); - writer.WriteLine(); + writer.WriteLine("ENDEXITSECTION"); + writer.WriteLine(); - foreach (var goalRef in ParentGoals) - { - var goal = goalRef.Resolve(); - writer.WriteLine("ParentTargetEdge \"{0}\"", goal.Name); - } + foreach (var goalRef in ParentGoals) + { + var goal = goalRef.Resolve(); + writer.WriteLine("ParentTargetEdge \"{0}\"", goal.Name); } } } diff --git a/LSLib/LS/Story/GoalParser/ASTNodes.cs b/LSLib/LS/Story/GoalParser/ASTNodes.cs index 7990b343..5089736d 100644 --- a/LSLib/LS/Story/GoalParser/ASTNodes.cs +++ b/LSLib/LS/Story/GoalParser/ASTNodes.cs @@ -2,183 +2,182 @@ using System; using System.Collections.Generic; -namespace LSLib.LS.Story.GoalParser +namespace LSLib.LS.Story.GoalParser; + +/// +/// Goal node - contains everything from a goal file. +/// +public class ASTGoal +{ + // Facts in the INITSECTION part + public List InitSection; + // List of all production rules (including procs and queries) from the KBSECTION part + public List KBSection; + // Ffacts in the EXITSECTION part + public List ExitSection; + // Names of parent goals (if any) + public List ParentTargetEdges; + // Location of node in source code + public CodeLocation Location; +} + +/// +/// Name of a single parent target edge (i.e. parent goal name). +/// This is discarded during parsing and does not appear in the final AST. +/// +public class ASTParentTargetEdge +{ + // Location of node in source code + public CodeLocation Location; + // Parent goal name + public String Goal; +} + +/// +/// Osiris statement from the INIT or EXIT section. +/// +public class ASTBaseFact +{ + // Location of fact in source code + public CodeLocation Location; +} + +/// +/// Osiris fact statement from the INIT or EXIT section. +/// +public class ASTFact : ASTBaseFact +{ + // Name of database we're inserting into / deleting from + public String Database; + // Fact negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). + public bool Not; + // List of values in the fact tuple + public List Elements; +} + +/// +/// Osiris GoalCompleted statement from the INIT or EXIT section. +/// +public class ASTGoalCompletedFact : ASTBaseFact +{ +} + +/// +/// Describes a production rule in the KB section +/// +public class ASTRule +{ + // Location of rule in source code + public CodeLocation Location; + // Type of rule (if, proc or query) + public RuleType Type; + // Conditions/predicates + public List Conditions; + // Actions to execute on tuples that satisfy the conditions + public List Actions; +} + +/// +/// Production rule condition/predicate. +/// +public class ASTCondition +{ + // Location of condition in source code + public CodeLocation Location; +} + +/// +/// "Function call-like" predicate - a div query, a user query or a database filter. +/// (i.e. "AND SomeFunc(1, 2)" or "AND NOT SomeFunc(1, 2)") +/// +public class ASTFuncCondition : ASTCondition +{ + // Query/Database name + // (We don't know yet whether this is a query or a database - this info will only be + // available during phase2 parsing) + public String Name; + // Condition negation ("AND DB_Something(1)" vs. "AND NOT DB_Something(1)"). + public bool Not; + // List of query parameters / database tuple columns + public List Params; +} + +/// +/// Predicate with a binary operator (i.e. "A >= B", "A == B", ...) +/// +public class ASTBinaryCondition : ASTCondition +{ + // Left-hand value + public ASTRValue LValue; + // Operator + public RelOpType Op; + // Right-hand value + public ASTRValue RValue; +} + +public class ASTAction +{ + // Location of action in source code + public CodeLocation Location; +} + +public class ASTGoalCompletedAction : ASTAction +{ +} + +/// +/// Statement in the THEN part of a rule. +/// This is either a builtin PROC call, user PROC call, or a database insert/delete operation. +/// +public class ASTStatement : ASTAction +{ + // Proc/Database name + // (We don't know yet whether this is a PROC or a DB - this info will only be + // available during phase2 parsing) + public String Name; + // Statement negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). + public bool Not; + // List of PROC parameters / database tuple columns + public List Params; +} + +public class ASTRValue +{ + // Location of node in source code + public CodeLocation Location; +} + +/// +/// Constant scalar value. +/// +public class ASTConstantValue : ASTRValue +{ + // Type of value, if specified in the code. + // (e.g. "(INT64)123") + public String TypeName; + // Internal type of the constant + // This is not the same as the Osiris type; e.g. a value of type CHARACTERGUID + // will be stored with a constant type of "Name". It also doesn't differentiate + // between INT and INT64 as we don't know the exact Osiris type without contextual + // type inference, which will happen in later stages. + public IRConstantType Type; + // Value of this constant if the type is Integer. + public Int64 IntegerValue; + // Value of this constant if the type is Float. + public Single FloatValue; + // Value of this constant if the type is String or Name. + public String StringValue; +} + +/// +/// Rule-local variable name. +/// (Any variable that begins with an underscore in the IF or THEN part of a rule) +/// +public class ASTLocalVar : ASTRValue { - /// - /// Goal node - contains everything from a goal file. - /// - public class ASTGoal - { - // Facts in the INITSECTION part - public List InitSection; - // List of all production rules (including procs and queries) from the KBSECTION part - public List KBSection; - // Ffacts in the EXITSECTION part - public List ExitSection; - // Names of parent goals (if any) - public List ParentTargetEdges; - // Location of node in source code - public CodeLocation Location; - } - - /// - /// Name of a single parent target edge (i.e. parent goal name). - /// This is discarded during parsing and does not appear in the final AST. - /// - public class ASTParentTargetEdge - { - // Location of node in source code - public CodeLocation Location; - // Parent goal name - public String Goal; - } - - /// - /// Osiris statement from the INIT or EXIT section. - /// - public class ASTBaseFact - { - // Location of fact in source code - public CodeLocation Location; - } - - /// - /// Osiris fact statement from the INIT or EXIT section. - /// - public class ASTFact : ASTBaseFact - { - // Name of database we're inserting into / deleting from - public String Database; - // Fact negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). - public bool Not; - // List of values in the fact tuple - public List Elements; - } - - /// - /// Osiris GoalCompleted statement from the INIT or EXIT section. - /// - public class ASTGoalCompletedFact : ASTBaseFact - { - } - - /// - /// Describes a production rule in the KB section - /// - public class ASTRule - { - // Location of rule in source code - public CodeLocation Location; - // Type of rule (if, proc or query) - public RuleType Type; - // Conditions/predicates - public List Conditions; - // Actions to execute on tuples that satisfy the conditions - public List Actions; - } - - /// - /// Production rule condition/predicate. - /// - public class ASTCondition - { - // Location of condition in source code - public CodeLocation Location; - } - - /// - /// "Function call-like" predicate - a div query, a user query or a database filter. - /// (i.e. "AND SomeFunc(1, 2)" or "AND NOT SomeFunc(1, 2)") - /// - public class ASTFuncCondition : ASTCondition - { - // Query/Database name - // (We don't know yet whether this is a query or a database - this info will only be - // available during phase2 parsing) - public String Name; - // Condition negation ("AND DB_Something(1)" vs. "AND NOT DB_Something(1)"). - public bool Not; - // List of query parameters / database tuple columns - public List Params; - } - - /// - /// Predicate with a binary operator (i.e. "A >= B", "A == B", ...) - /// - public class ASTBinaryCondition : ASTCondition - { - // Left-hand value - public ASTRValue LValue; - // Operator - public RelOpType Op; - // Right-hand value - public ASTRValue RValue; - } - - public class ASTAction - { - // Location of action in source code - public CodeLocation Location; - } - - public class ASTGoalCompletedAction : ASTAction - { - } - - /// - /// Statement in the THEN part of a rule. - /// This is either a builtin PROC call, user PROC call, or a database insert/delete operation. - /// - public class ASTStatement : ASTAction - { - // Proc/Database name - // (We don't know yet whether this is a PROC or a DB - this info will only be - // available during phase2 parsing) - public String Name; - // Statement negation ("DB_Something(1)" vs. "NOT DB_Something(1)"). - public bool Not; - // List of PROC parameters / database tuple columns - public List Params; - } - - public class ASTRValue - { - // Location of node in source code - public CodeLocation Location; - } - - /// - /// Constant scalar value. - /// - public class ASTConstantValue : ASTRValue - { - // Type of value, if specified in the code. - // (e.g. "(INT64)123") - public String TypeName; - // Internal type of the constant - // This is not the same as the Osiris type; e.g. a value of type CHARACTERGUID - // will be stored with a constant type of "Name". It also doesn't differentiate - // between INT and INT64 as we don't know the exact Osiris type without contextual - // type inference, which will happen in later stages. - public IRConstantType Type; - // Value of this constant if the type is Integer. - public Int64 IntegerValue; - // Value of this constant if the type is Float. - public Single FloatValue; - // Value of this constant if the type is String or Name. - public String StringValue; - } - - /// - /// Rule-local variable name. - /// (Any variable that begins with an underscore in the IF or THEN part of a rule) - /// - public class ASTLocalVar : ASTRValue - { - // Type of variable, if specified in the code. - // (e.g. "(ITEMGUID)_Var") - public String Type; - // Name of variable. - public String Name; - } + // Type of variable, if specified in the code. + // (e.g. "(ITEMGUID)_Var") + public String Type; + // Name of variable. + public String Name; } diff --git a/LSLib/LS/Story/GoalParser/GoalParser.cs b/LSLib/LS/Story/GoalParser/GoalParser.cs index 0c0cfb8d..f9d9a9f4 100644 --- a/LSLib/LS/Story/GoalParser/GoalParser.cs +++ b/LSLib/LS/Story/GoalParser/GoalParser.cs @@ -6,450 +6,449 @@ using QUT.Gppg; using System.Collections.Generic; -namespace LSLib.LS.Story.GoalParser +namespace LSLib.LS.Story.GoalParser; + +/// +/// Parameter list of a statement in the THEN part of a rule. +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTStatementParamList = List; + +/// +/// List of parent goals. +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTParentTargetEdgeList = List; + +/// +/// List of facts in an INIT or EXIT section. +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTFactList = List; + +/// +/// List of scalar values in a fact tuple +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTFactElementList = List; + +/// +/// List of production rules in the KB section +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTRuleList = List; + +/// +/// List of conditions/predicates in a production rule +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTConditionList = List; + +/// +/// Condition query parameter / database tuple column list +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTConditionParamList = List; + +/// +/// List of actions in the THEN part of a rule +/// This is discarded during parsing and does not appear in the final AST. +/// +using ASTActionList = List; + + +internal class ParserConstants { + public static CultureInfo ParserCulture = new CultureInfo("en-US"); +} + +public class CodeLocation : IMerge +{ + private string fileName; + private int startLine; // start line + private int startColumn; // start column + private int endLine; // end line + private int endColumn; // end column + /// - /// Parameter list of a statement in the THEN part of a rule. - /// This is discarded during parsing and does not appear in the final AST. + /// The line at which the text span starts. /// - using ASTStatementParamList = List; + public string FileName { get { return fileName; } } /// - /// List of parent goals. - /// This is discarded during parsing and does not appear in the final AST. + /// The line at which the text span starts. /// - using ASTParentTargetEdgeList = List; + public int StartLine { get { return startLine; } } /// - /// List of facts in an INIT or EXIT section. - /// This is discarded during parsing and does not appear in the final AST. + /// The column at which the text span starts. /// - using ASTFactList = List; + public int StartColumn { get { return startColumn; } } /// - /// List of scalar values in a fact tuple - /// This is discarded during parsing and does not appear in the final AST. + /// The line on which the text span ends. /// - using ASTFactElementList = List; + public int EndLine { get { return endLine; } } /// - /// List of production rules in the KB section - /// This is discarded during parsing and does not appear in the final AST. + /// The column of the first character + /// beyond the end of the text span. /// - using ASTRuleList = List; + public int EndColumn { get { return endColumn; } } /// - /// List of conditions/predicates in a production rule - /// This is discarded during parsing and does not appear in the final AST. + /// Default no-arg constructor. /// - using ASTConditionList = List; + public CodeLocation() { } /// - /// Condition query parameter / database tuple column list - /// This is discarded during parsing and does not appear in the final AST. + /// Constructor for text-span with given start and end. /// - using ASTConditionParamList = List; + /// start line + /// start column + /// end line + /// end column + public CodeLocation(string fl, int sl, int sc, int el, int ec) + { + fileName = fl; + startLine = sl; + startColumn = sc; + endLine = el; + endColumn = ec; + } /// - /// List of actions in the THEN part of a rule - /// This is discarded during parsing and does not appear in the final AST. + /// Create a text location which spans from the + /// start of "this" to the end of the argument "last" /// - using ASTActionList = List; - - - internal class ParserConstants + /// The last location in the result span + /// The merged span + public CodeLocation Merge(CodeLocation last) { - public static CultureInfo ParserCulture = new CultureInfo("en-US"); + return new CodeLocation(this.fileName, this.startLine, this.startColumn, last.endLine, last.endColumn); } +} - public class CodeLocation : IMerge - { - private string fileName; - private int startLine; // start line - private int startColumn; // start column - private int endLine; // end line - private int endColumn; // end column - - /// - /// The line at which the text span starts. - /// - public string FileName { get { return fileName; } } - - /// - /// The line at which the text span starts. - /// - public int StartLine { get { return startLine; } } - - /// - /// The column at which the text span starts. - /// - public int StartColumn { get { return startColumn; } } - - /// - /// The line on which the text span ends. - /// - public int EndLine { get { return endLine; } } - - /// - /// The column of the first character - /// beyond the end of the text span. - /// - public int EndColumn { get { return endColumn; } } - - /// - /// Default no-arg constructor. - /// - public CodeLocation() { } - - /// - /// Constructor for text-span with given start and end. - /// - /// start line - /// start column - /// end line - /// end column - public CodeLocation(string fl, int sl, int sc, int el, int ec) - { - fileName = fl; - startLine = sl; - startColumn = sc; - endLine = el; - endColumn = ec; - } +public abstract class GoalScanBase : AbstractScanner +{ + protected String fileName; - /// - /// Create a text location which spans from the - /// start of "this" to the end of the argument "last" - /// - /// The last location in the result span - /// The merged span - public CodeLocation Merge(CodeLocation last) - { - return new CodeLocation(this.fileName, this.startLine, this.startColumn, last.endLine, last.endColumn); - } - } + public override CodeLocation yylloc { get; set; } - public abstract class GoalScanBase : AbstractScanner - { - protected String fileName; + protected virtual bool yywrap() { return true; } - public override CodeLocation yylloc { get; set; } + protected string MakeLiteral(string lit) => lit; - protected virtual bool yywrap() { return true; } + protected string MakeString(string lit) + { + return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); + } +} - protected string MakeLiteral(string lit) => lit; +public sealed partial class GoalScanner : GoalScanBase +{ + public GoalScanner(String fileName) + { + this.fileName = fileName; + } - protected string MakeString(string lit) - { - return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); - } + public CodeLocation LastLocation() + { + return new CodeLocation(null, tokLin, tokCol, tokELin, tokECol); } +} - public sealed partial class GoalScanner : GoalScanBase +public partial class GoalParser +{ + public GoalParser(GoalScanner scnr) : base(scnr) { - public GoalScanner(String fileName) - { - this.fileName = fileName; - } + } - public CodeLocation LastLocation() - { - return new CodeLocation(null, tokLin, tokCol, tokELin, tokECol); - } + public ASTGoal GetGoal() + { + return (ASTGoal)CurrentSemanticValue; } - public partial class GoalParser + private ASTGoal MakeGoal(CodeLocation location, object version, object subGoalCombiner, object initSection, + object kbSection, object exitSection, object parentTargetEdges) => new ASTGoal() { - public GoalParser(GoalScanner scnr) : base(scnr) - { - } + // TODO verison, SGC + InitSection = (ASTFactList)initSection, + KBSection = (ASTRuleList)kbSection, + ExitSection = (ASTFactList)exitSection, + ParentTargetEdges = (ASTParentTargetEdgeList)parentTargetEdges, + Location = location + }; - public ASTGoal GetGoal() - { - return (ASTGoal)CurrentSemanticValue; - } + private ASTParentTargetEdgeList MakeParentTargetEdgeList() => new ASTParentTargetEdgeList(); - private ASTGoal MakeGoal(CodeLocation location, object version, object subGoalCombiner, object initSection, - object kbSection, object exitSection, object parentTargetEdges) => new ASTGoal() - { - // TODO verison, SGC - InitSection = (ASTFactList)initSection, - KBSection = (ASTRuleList)kbSection, - ExitSection = (ASTFactList)exitSection, - ParentTargetEdges = (ASTParentTargetEdgeList)parentTargetEdges, - Location = location - }; + private ASTParentTargetEdgeList MakeParentTargetEdgeList(object parentTargetEdgeList, object edge) + { + var edges = (ASTParentTargetEdgeList)parentTargetEdgeList; + edges.Add((ASTParentTargetEdge)edge); + return edges; + } - private ASTParentTargetEdgeList MakeParentTargetEdgeList() => new ASTParentTargetEdgeList(); + private ASTParentTargetEdge MakeParentTargetEdge(CodeLocation location, object goal) => new ASTParentTargetEdge() + { + Location = location, + Goal = (string)goal + }; - private ASTParentTargetEdgeList MakeParentTargetEdgeList(object parentTargetEdgeList, object edge) - { - var edges = (ASTParentTargetEdgeList)parentTargetEdgeList; - edges.Add((ASTParentTargetEdge)edge); - return edges; - } + private ASTFactList MakeFactList() => new ASTFactList(); + + private ASTFactList MakeFactList(object factList, object fact) + { + var facts = (ASTFactList)factList; + facts.Add((ASTBaseFact)fact); + return facts; + } - private ASTParentTargetEdge MakeParentTargetEdge(CodeLocation location, object goal) => new ASTParentTargetEdge() - { - Location = location, - Goal = (string)goal - }; + private ASTFact MakeNotFact(CodeLocation location, object fact) + { + var factStmt = (ASTFact)fact; + factStmt.Location = location; + factStmt.Not = true; + return factStmt; + } - private ASTFactList MakeFactList() => new ASTFactList(); - - private ASTFactList MakeFactList(object factList, object fact) - { - var facts = (ASTFactList)factList; - facts.Add((ASTBaseFact)fact); - return facts; - } + private ASTFact MakeFactStatement(CodeLocation location, object database, object elements) => new ASTFact() + { + Location = location, + Database = (string)database, + Not = false, + Elements = (ASTFactElementList)elements + }; - private ASTFact MakeNotFact(CodeLocation location, object fact) - { - var factStmt = (ASTFact)fact; - factStmt.Location = location; - factStmt.Not = true; - return factStmt; - } + private ASTGoalCompletedFact MakeGoalCompletedFact(CodeLocation location) => new ASTGoalCompletedFact + { + Location = location + }; - private ASTFact MakeFactStatement(CodeLocation location, object database, object elements) => new ASTFact() - { - Location = location, - Database = (string)database, - Not = false, - Elements = (ASTFactElementList)elements - }; + private ASTFactElementList MakeFactElementList() => new ASTFactElementList(); - private ASTGoalCompletedFact MakeGoalCompletedFact(CodeLocation location) => new ASTGoalCompletedFact - { - Location = location - }; + private ASTFactElementList MakeFactElementList(object element) + { + var elements = new ASTFactElementList(); + elements.Add((ASTConstantValue)element); + return elements; + } - private ASTFactElementList MakeFactElementList() => new ASTFactElementList(); + private ASTFactElementList MakeFactElementList(object elementList, object element) + { + var elements = (ASTFactElementList)elementList; + elements.Add((ASTConstantValue)element); + return elements; + } - private ASTFactElementList MakeFactElementList(object element) - { - var elements = new ASTFactElementList(); - elements.Add((ASTConstantValue)element); - return elements; - } + private ASTRuleList MakeRuleList() => new ASTRuleList(); - private ASTFactElementList MakeFactElementList(object elementList, object element) - { - var elements = (ASTFactElementList)elementList; - elements.Add((ASTConstantValue)element); - return elements; - } + private ASTRuleList MakeRuleList(object ruleList, object rule) + { + var rules = (ASTRuleList)ruleList; + rules.Add((ASTRule)rule); + return rules; + } - private ASTRuleList MakeRuleList() => new ASTRuleList(); + private ASTRule MakeRule(CodeLocation location, object ruleType, object conditions, object actions) => new ASTRule() + { + Location = location, + Type = (RuleType)ruleType, + Conditions = (ASTConditionList)conditions, + Actions = (ASTActionList)actions + }; - private ASTRuleList MakeRuleList(object ruleList, object rule) - { - var rules = (ASTRuleList)ruleList; - rules.Add((ASTRule)rule); - return rules; - } + private RuleType MakeRuleType(RuleType type) => type; - private ASTRule MakeRule(CodeLocation location, object ruleType, object conditions, object actions) => new ASTRule() + private ASTConditionList MakeConditionList() => new ASTConditionList(); + + private ASTConditionList MakeConditionList(object condition) + { + var conditions = new ASTConditionList { - Location = location, - Type = (RuleType)ruleType, - Conditions = (ASTConditionList)conditions, - Actions = (ASTActionList)actions + (ASTCondition)condition }; + return conditions; + } - private RuleType MakeRuleType(RuleType type) => type; - - private ASTConditionList MakeConditionList() => new ASTConditionList(); - - private ASTConditionList MakeConditionList(object condition) - { - var conditions = new ASTConditionList - { - (ASTCondition)condition - }; - return conditions; - } + private ASTConditionList MakeConditionList(object conditionList, object condition) + { + var conditions = (ASTConditionList)conditionList; + conditions.Add((ASTCondition)condition); + return conditions; + } - private ASTConditionList MakeConditionList(object conditionList, object condition) - { - var conditions = (ASTConditionList)conditionList; - conditions.Add((ASTCondition)condition); - return conditions; - } + private ASTFuncCondition MakeFuncCondition(CodeLocation location, object name, object paramList, bool not) => new ASTFuncCondition() + { + Location = location, + Name = (string)name, + Not = not, + Params = (ASTConditionParamList)paramList + }; - private ASTFuncCondition MakeFuncCondition(CodeLocation location, object name, object paramList, bool not) => new ASTFuncCondition() + private ASTFuncCondition MakeObjectFuncCondition(CodeLocation location, object thisValue, object name, object paramList, bool not) + { + var condParams = (ASTConditionParamList)paramList; + condParams.Insert(0, (ASTRValue)thisValue); + return new ASTFuncCondition() { Location = location, Name = (string)name, Not = not, - Params = (ASTConditionParamList)paramList + Params = condParams }; + } - private ASTFuncCondition MakeObjectFuncCondition(CodeLocation location, object thisValue, object name, object paramList, bool not) - { - var condParams = (ASTConditionParamList)paramList; - condParams.Insert(0, (ASTRValue)thisValue); - return new ASTFuncCondition() - { - Location = location, - Name = (string)name, - Not = not, - Params = condParams - }; + private ASTBinaryCondition MakeNegatedBinaryCondition(CodeLocation location, object lvalue, object op, object rvalue) + { + var cond = MakeBinaryCondition(location, lvalue, op, rvalue); + switch (cond.Op) + { + case RelOpType.Less: cond.Op = RelOpType.GreaterOrEqual; break; + case RelOpType.LessOrEqual: cond.Op = RelOpType.Greater; break; + case RelOpType.Greater: cond.Op = RelOpType.LessOrEqual; break; + case RelOpType.GreaterOrEqual: cond.Op = RelOpType.Less; break; + case RelOpType.Equal: cond.Op = RelOpType.NotEqual; break; + case RelOpType.NotEqual: cond.Op = RelOpType.Equal; break; + default: throw new InvalidOperationException("Cannot negate unknown binary operator"); } - private ASTBinaryCondition MakeNegatedBinaryCondition(CodeLocation location, object lvalue, object op, object rvalue) - { - var cond = MakeBinaryCondition(location, lvalue, op, rvalue); - switch (cond.Op) - { - case RelOpType.Less: cond.Op = RelOpType.GreaterOrEqual; break; - case RelOpType.LessOrEqual: cond.Op = RelOpType.Greater; break; - case RelOpType.Greater: cond.Op = RelOpType.LessOrEqual; break; - case RelOpType.GreaterOrEqual: cond.Op = RelOpType.Less; break; - case RelOpType.Equal: cond.Op = RelOpType.NotEqual; break; - case RelOpType.NotEqual: cond.Op = RelOpType.Equal; break; - default: throw new InvalidOperationException("Cannot negate unknown binary operator"); - } - - return cond; - } + return cond; + } - private ASTBinaryCondition MakeBinaryCondition(CodeLocation location, object lvalue, object op, object rvalue) => new ASTBinaryCondition() - { - Location = location, - LValue = (ASTRValue)lvalue, - Op = (RelOpType)op, - RValue = (ASTRValue)rvalue - }; + private ASTBinaryCondition MakeBinaryCondition(CodeLocation location, object lvalue, object op, object rvalue) => new ASTBinaryCondition() + { + Location = location, + LValue = (ASTRValue)lvalue, + Op = (RelOpType)op, + RValue = (ASTRValue)rvalue + }; - private ASTConditionParamList MakeConditionParamList() => new ASTConditionParamList(); + private ASTConditionParamList MakeConditionParamList() => new ASTConditionParamList(); - private ASTConditionParamList MakeConditionParamList(object param) + private ASTConditionParamList MakeConditionParamList(object param) + { + var list = new ASTConditionParamList { - var list = new ASTConditionParamList - { - (ASTRValue)param - }; - return list; - } + (ASTRValue)param + }; + return list; + } - private ASTConditionParamList MakeConditionParamList(object list, object param) - { - var conditionParamList = (ASTConditionParamList)list; - conditionParamList.Add((ASTRValue)param); - return conditionParamList; - } + private ASTConditionParamList MakeConditionParamList(object list, object param) + { + var conditionParamList = (ASTConditionParamList)list; + conditionParamList.Add((ASTRValue)param); + return conditionParamList; + } - private RelOpType MakeOperator(RelOpType op) => op; + private RelOpType MakeOperator(RelOpType op) => op; - private ASTActionList MakeActionList() => new ASTActionList(); + private ASTActionList MakeActionList() => new ASTActionList(); - private ASTActionList MakeActionList(object actionList, object action) - { - var actions = (ASTActionList)actionList; - actions.Add((ASTAction)action); - return actions; - } + private ASTActionList MakeActionList(object actionList, object action) + { + var actions = (ASTActionList)actionList; + actions.Add((ASTAction)action); + return actions; + } - private ASTAction MakeGoalCompletedAction(CodeLocation location) => new ASTGoalCompletedAction - { - Location = location - }; + private ASTAction MakeGoalCompletedAction(CodeLocation location) => new ASTGoalCompletedAction + { + Location = location + }; - private ASTStatement MakeActionStatement(CodeLocation location, object name, object paramList, bool not) => new ASTStatement + private ASTStatement MakeActionStatement(CodeLocation location, object name, object paramList, bool not) => new ASTStatement + { + Location = location, + Name = (string)name, + Not = not, + Params = (ASTStatementParamList)paramList + }; + + private ASTStatement MakeActionStatement(CodeLocation location, object thisValue, object name, object paramList, bool not) + { + var stmt = new ASTStatement { Location = location, Name = (string)name, Not = not, Params = (ASTStatementParamList)paramList }; + stmt.Params.Insert(0, (ASTRValue)thisValue); + return stmt; + } - private ASTStatement MakeActionStatement(CodeLocation location, object thisValue, object name, object paramList, bool not) - { - var stmt = new ASTStatement - { - Location = location, - Name = (string)name, - Not = not, - Params = (ASTStatementParamList)paramList - }; - stmt.Params.Insert(0, (ASTRValue)thisValue); - return stmt; - } - - private ASTStatementParamList MakeActionParamList() => new ASTStatementParamList(); - - private ASTStatementParamList MakeActionParamList(object param) - { - var list = new ASTStatementParamList - { - (ASTRValue)param - }; - return list; - } - - private ASTStatementParamList MakeActionParamList(object list, object param) - { - var actionParamList = (ASTStatementParamList)list; - actionParamList.Add((ASTRValue)param); - return actionParamList; - } + private ASTStatementParamList MakeActionParamList() => new ASTStatementParamList(); - private ASTLocalVar MakeLocalVar(CodeLocation location, object varName) => new ASTLocalVar() + private ASTStatementParamList MakeActionParamList(object param) + { + var list = new ASTStatementParamList { - Location = location, - Name = (string)varName + (ASTRValue)param }; + return list; + } - private ASTLocalVar MakeLocalVar(CodeLocation location, object typeName, object varName) => new ASTLocalVar() - { - Location = location, - Type = (string)typeName, - Name = (string)varName - }; + private ASTStatementParamList MakeActionParamList(object list, object param) + { + var actionParamList = (ASTStatementParamList)list; + actionParamList.Add((ASTRValue)param); + return actionParamList; + } - private ASTConstantValue MakeTypedConstant(CodeLocation location, object typeName, object constant) - { - var c = (ASTConstantValue)constant; - return new ASTConstantValue() - { - Location = location, - TypeName = (string)typeName, - Type = c.Type, - StringValue = c.StringValue, - FloatValue = c.FloatValue, - IntegerValue = c.IntegerValue, - }; - } + private ASTLocalVar MakeLocalVar(CodeLocation location, object varName) => new ASTLocalVar() + { + Location = location, + Name = (string)varName + }; - private ASTConstantValue MakeConstGuidString(CodeLocation location, object val) => new ASTConstantValue() - { - Location = location, - Type = IRConstantType.Name, - StringValue = (string)val - }; + private ASTLocalVar MakeLocalVar(CodeLocation location, object typeName, object varName) => new ASTLocalVar() + { + Location = location, + Type = (string)typeName, + Name = (string)varName + }; - private ASTConstantValue MakeConstString(CodeLocation location, object val) => new ASTConstantValue() + private ASTConstantValue MakeTypedConstant(CodeLocation location, object typeName, object constant) + { + var c = (ASTConstantValue)constant; + return new ASTConstantValue() { Location = location, - Type = IRConstantType.String, - StringValue = (string)val + TypeName = (string)typeName, + Type = c.Type, + StringValue = c.StringValue, + FloatValue = c.FloatValue, + IntegerValue = c.IntegerValue, }; + } - private ASTConstantValue MakeConstInteger(CodeLocation location, object val) => new ASTConstantValue() - { - Location = location, - Type = IRConstantType.Integer, - IntegerValue = Int64.Parse((string)val, ParserConstants.ParserCulture.NumberFormat) - }; + private ASTConstantValue MakeConstGuidString(CodeLocation location, object val) => new ASTConstantValue() + { + Location = location, + Type = IRConstantType.Name, + StringValue = (string)val + }; - private ASTConstantValue MakeConstFloat(CodeLocation location, object val) => new ASTConstantValue() - { - Location = location, - Type = IRConstantType.Float, - FloatValue = Single.Parse((string)val, ParserConstants.ParserCulture.NumberFormat) - }; - } + private ASTConstantValue MakeConstString(CodeLocation location, object val) => new ASTConstantValue() + { + Location = location, + Type = IRConstantType.String, + StringValue = (string)val + }; + + private ASTConstantValue MakeConstInteger(CodeLocation location, object val) => new ASTConstantValue() + { + Location = location, + Type = IRConstantType.Integer, + IntegerValue = Int64.Parse((string)val, ParserConstants.ParserCulture.NumberFormat) + }; + + private ASTConstantValue MakeConstFloat(CodeLocation location, object val) => new ASTConstantValue() + { + Location = location, + Type = IRConstantType.Float, + FloatValue = Single.Parse((string)val, ParserConstants.ParserCulture.NumberFormat) + }; } \ No newline at end of file diff --git a/LSLib/LS/Story/HeaderParser/ASTNodes.cs b/LSLib/LS/Story/HeaderParser/ASTNodes.cs index 3e9c8ad2..4bc27a44 100644 --- a/LSLib/LS/Story/HeaderParser/ASTNodes.cs +++ b/LSLib/LS/Story/HeaderParser/ASTNodes.cs @@ -2,123 +2,122 @@ using System; using System.Collections.Generic; -namespace LSLib.LS.Story.HeaderParser +namespace LSLib.LS.Story.HeaderParser; + +/// +/// Base class for all AST nodes. +/// (This doesn't do anything meaningful, it is needed only to +/// provide the GPPG parser a semantic value base class.) +/// +public class ASTNode { - /// - /// Base class for all AST nodes. - /// (This doesn't do anything meaningful, it is needed only to - /// provide the GPPG parser a semantic value base class.) - /// - public class ASTNode - { - } +} + +/// +/// Declarations node - contains every declaration from the story header file. +/// +public class ASTDeclarations : ASTNode +{ + // Debug options + public List Options = new List(); + // Declared type aliases + public List Aliases = new List(); + // Declared functions + public List Functions = new List(); +} - /// - /// Declarations node - contains every declaration from the story header file. - /// - public class ASTDeclarations : ASTNode - { - // Debug options - public List Options = new List(); - // Declared type aliases - public List Aliases = new List(); - // Declared functions - public List Functions = new List(); - } +/// +/// Function type wrapper node +/// This is discarded during parsing and does not appear in the final AST. +/// +public class ASTFunctionTypeNode : ASTNode +{ + // Type of function (SysQuery, SysCall, Event, etc.) + public Compiler.FunctionType Type; +} - /// - /// Function type wrapper node - /// This is discarded during parsing and does not appear in the final AST. - /// - public class ASTFunctionTypeNode : ASTNode - { - // Type of function (SysQuery, SysCall, Event, etc.) - public Compiler.FunctionType Type; - } +/// +/// Function meta-information +/// This is discarded during parsing and does not appear in the final AST. +/// +public class ASTFunctionMetadata : ASTNode +{ + public UInt32 Meta1; + public UInt32 Meta2; + public UInt32 Meta3; + public UInt32 Meta4; +} - /// - /// Function meta-information - /// This is discarded during parsing and does not appear in the final AST. - /// - public class ASTFunctionMetadata : ASTNode - { - public UInt32 Meta1; - public UInt32 Meta2; - public UInt32 Meta3; - public UInt32 Meta4; - } +/// +/// Describes a built-in function with its name, number and parameters. +/// +public class ASTFunction : ASTNode +{ + // Type of function (SysQuery, SysCall, Event, etc.) + public Compiler.FunctionType Type; + // Name of the function + public String Name; + // Function parameters + public List Params; + // Function metadata for Osiris internal use - mostly unknown. + public UInt32 Meta1; + public UInt32 Meta2; + public UInt32 Meta3; + public UInt32 Meta4; +} - /// - /// Describes a built-in function with its name, number and parameters. - /// - public class ASTFunction : ASTNode - { - // Type of function (SysQuery, SysCall, Event, etc.) - public Compiler.FunctionType Type; - // Name of the function - public String Name; - // Function parameters - public List Params; - // Function metadata for Osiris internal use - mostly unknown. - public UInt32 Meta1; - public UInt32 Meta2; - public UInt32 Meta3; - public UInt32 Meta4; - } +/// +/// List of function parameters +/// This is discarded during parsing and does not appear in the final AST. +/// +public class ASTFunctionParamList : ASTNode +{ + // Function parameters + public List Params = new List(); +} - /// - /// List of function parameters - /// This is discarded during parsing and does not appear in the final AST. - /// - public class ASTFunctionParamList : ASTNode - { - // Function parameters - public List Params = new List(); - } +/// +/// Typed (and optionally direction marked) parameter of a function +/// +public class ASTFunctionParam : ASTNode +{ + // Parameter name + public String Name; + // Parameter type + public String Type; + // Parameter direction (IN/OUT) + // This is only meaningful for Query and SysQuery, for all other types direction is always "IN". + public ParamDirection Direction; +} - /// - /// Typed (and optionally direction marked) parameter of a function - /// - public class ASTFunctionParam : ASTNode - { - // Parameter name - public String Name; - // Parameter type - public String Type; - // Parameter direction (IN/OUT) - // This is only meaningful for Query and SysQuery, for all other types direction is always "IN". - public ParamDirection Direction; - } - - /// - /// Type alias - defines a new type name and type ID, and maps it to an existing base type. - /// - public class ASTAlias : ASTNode - { - // Name of the new type - public String TypeName; - // ID of the new type (must be a new type ID) - public uint TypeId; - // ID of the type this type is mapped to (must be an existing type ID) - public uint AliasId; - } +/// +/// Type alias - defines a new type name and type ID, and maps it to an existing base type. +/// +public class ASTAlias : ASTNode +{ + // Name of the new type + public String TypeName; + // ID of the new type (must be a new type ID) + public uint TypeId; + // ID of the type this type is mapped to (must be an existing type ID) + public uint AliasId; +} - /// - /// Debug/compiler option - /// This is discarded during parsing and does not appear in the final AST. - /// - public class ASTOption : ASTNode - { - // Name of debug option - public String Name; - } +/// +/// Debug/compiler option +/// This is discarded during parsing and does not appear in the final AST. +/// +public class ASTOption : ASTNode +{ + // Name of debug option + public String Name; +} - /// - /// String literal from lexing stage (yytext). - /// This is discarded during parsing and does not appear in the final AST. - /// - public class ASTLiteral : ASTNode - { - public String Literal; - } +/// +/// String literal from lexing stage (yytext). +/// This is discarded during parsing and does not appear in the final AST. +/// +public class ASTLiteral : ASTNode +{ + public String Literal; } diff --git a/LSLib/LS/Story/HeaderParser/HeaderParser.cs b/LSLib/LS/Story/HeaderParser/HeaderParser.cs index 4522827d..e93a3cac 100644 --- a/LSLib/LS/Story/HeaderParser/HeaderParser.cs +++ b/LSLib/LS/Story/HeaderParser/HeaderParser.cs @@ -3,126 +3,125 @@ using System; using System.Text.RegularExpressions; -namespace LSLib.LS.Story.HeaderParser +namespace LSLib.LS.Story.HeaderParser; + +public abstract class HeaderScanBase : AbstractScanner { - public abstract class HeaderScanBase : AbstractScanner + protected virtual bool yywrap() { return true; } + + protected ASTLiteral MakeLiteral(string lit) => new ASTLiteral() { - protected virtual bool yywrap() { return true; } + Literal = lit + }; - protected ASTLiteral MakeLiteral(string lit) => new ASTLiteral() - { - Literal = lit - }; + protected ASTLiteral MakeString(string lit) + { + return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); + } +} - protected ASTLiteral MakeString(string lit) - { - return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); - } +public partial class HeaderParser +{ + public HeaderParser(HeaderScanner scnr) : base(scnr) + { + } + + public ASTDeclarations GetDeclarations() + { + return CurrentSemanticValue as ASTDeclarations; } + + private ASTDeclarations MakeDeclarationList() => new ASTDeclarations(); - public partial class HeaderParser + private ASTDeclarations MakeDeclarationList(ASTNode declarations, ASTNode declaration) { - public HeaderParser(HeaderScanner scnr) : base(scnr) + var decls = declarations as ASTDeclarations; + if (declaration is ASTOption) { + decls.Options.Add((declaration as ASTOption).Name); } - - public ASTDeclarations GetDeclarations() + else if (declaration is ASTAlias) { - return CurrentSemanticValue as ASTDeclarations; + decls.Aliases.Add(declaration as ASTAlias); } - - private ASTDeclarations MakeDeclarationList() => new ASTDeclarations(); - - private ASTDeclarations MakeDeclarationList(ASTNode declarations, ASTNode declaration) + else if (declaration is ASTFunction) { - var decls = declarations as ASTDeclarations; - if (declaration is ASTOption) - { - decls.Options.Add((declaration as ASTOption).Name); - } - else if (declaration is ASTAlias) - { - decls.Aliases.Add(declaration as ASTAlias); - } - else if (declaration is ASTFunction) - { - decls.Functions.Add(declaration as ASTFunction); - } - else - { - throw new InvalidOperationException("Tried to add unknown node to ASTDeclaration"); - } - return decls; + decls.Functions.Add(declaration as ASTFunction); } - - private ASTFunction MakeFunction(ASTNode type, ASTNode name, ASTNode args, ASTNode metadata) + else { - var meta = metadata as ASTFunctionMetadata; - return new ASTFunction() - { - Type = (type as ASTFunctionTypeNode).Type, - Name = (name as ASTLiteral).Literal, - Params = (args as ASTFunctionParamList).Params, - Meta1 = meta.Meta1, - Meta2 = meta.Meta2, - Meta3 = meta.Meta3, - Meta4 = meta.Meta4 - }; + throw new InvalidOperationException("Tried to add unknown node to ASTDeclaration"); } + return decls; + } - private ASTFunctionTypeNode MakeFunctionType(Compiler.FunctionType type) => new ASTFunctionTypeNode() + private ASTFunction MakeFunction(ASTNode type, ASTNode name, ASTNode args, ASTNode metadata) + { + var meta = metadata as ASTFunctionMetadata; + return new ASTFunction() { - Type = type + Type = (type as ASTFunctionTypeNode).Type, + Name = (name as ASTLiteral).Literal, + Params = (args as ASTFunctionParamList).Params, + Meta1 = meta.Meta1, + Meta2 = meta.Meta2, + Meta3 = meta.Meta3, + Meta4 = meta.Meta4 }; + } - private ASTFunctionMetadata MakeFunctionMetadata(ASTNode meta1, ASTNode meta2, ASTNode meta3, ASTNode meta4) => new ASTFunctionMetadata() - { - Meta1 = uint.Parse((meta1 as ASTLiteral).Literal), - Meta2 = uint.Parse((meta2 as ASTLiteral).Literal), - Meta3 = uint.Parse((meta3 as ASTLiteral).Literal), - Meta4 = uint.Parse((meta4 as ASTLiteral).Literal) - }; - - private ASTFunctionParamList MakeFunctionParamList() => new ASTFunctionParamList(); + private ASTFunctionTypeNode MakeFunctionType(Compiler.FunctionType type) => new ASTFunctionTypeNode() + { + Type = type + }; - private ASTFunctionParamList MakeFunctionParamList(ASTNode param) - { - var list = new ASTFunctionParamList(); - list.Params.Add(param as ASTFunctionParam); - return list; - } + private ASTFunctionMetadata MakeFunctionMetadata(ASTNode meta1, ASTNode meta2, ASTNode meta3, ASTNode meta4) => new ASTFunctionMetadata() + { + Meta1 = uint.Parse((meta1 as ASTLiteral).Literal), + Meta2 = uint.Parse((meta2 as ASTLiteral).Literal), + Meta3 = uint.Parse((meta3 as ASTLiteral).Literal), + Meta4 = uint.Parse((meta4 as ASTLiteral).Literal) + }; + + private ASTFunctionParamList MakeFunctionParamList() => new ASTFunctionParamList(); - private ASTFunctionParamList MakeFunctionParamList(ASTNode list, ASTNode param) - { - var paramList = list as ASTFunctionParamList; - paramList.Params.Add(param as ASTFunctionParam); - return paramList; - } + private ASTFunctionParamList MakeFunctionParamList(ASTNode param) + { + var list = new ASTFunctionParamList(); + list.Params.Add(param as ASTFunctionParam); + return list; + } - private ASTFunctionParam MakeParam(ASTNode type, ASTNode name) => new ASTFunctionParam() - { - Name = (name as ASTLiteral).Literal, - Type = (type as ASTLiteral).Literal, - Direction = ParamDirection.In - }; + private ASTFunctionParamList MakeFunctionParamList(ASTNode list, ASTNode param) + { + var paramList = list as ASTFunctionParamList; + paramList.Params.Add(param as ASTFunctionParam); + return paramList; + } - private ASTFunctionParam MakeParam(ParamDirection direction, ASTNode type, ASTNode name) => new ASTFunctionParam() - { - Name = (name as ASTLiteral).Literal, - Type = (type as ASTLiteral).Literal, - Direction = direction - }; + private ASTFunctionParam MakeParam(ASTNode type, ASTNode name) => new ASTFunctionParam() + { + Name = (name as ASTLiteral).Literal, + Type = (type as ASTLiteral).Literal, + Direction = ParamDirection.In + }; - private ASTAlias MakeAlias(ASTNode typeName, ASTNode typeId, ASTNode aliasId) => new ASTAlias() - { - TypeName = (typeName as ASTLiteral).Literal, - TypeId = uint.Parse((typeId as ASTLiteral).Literal), - AliasId = uint.Parse((aliasId as ASTLiteral).Literal) - }; + private ASTFunctionParam MakeParam(ParamDirection direction, ASTNode type, ASTNode name) => new ASTFunctionParam() + { + Name = (name as ASTLiteral).Literal, + Type = (type as ASTLiteral).Literal, + Direction = direction + }; - private ASTOption MakeOption(ASTNode option) => new ASTOption() - { - Name = (option as ASTLiteral).Literal - }; - } + private ASTAlias MakeAlias(ASTNode typeName, ASTNode typeId, ASTNode aliasId) => new ASTAlias() + { + TypeName = (typeName as ASTLiteral).Literal, + TypeId = uint.Parse((typeId as ASTLiteral).Literal), + AliasId = uint.Parse((aliasId as ASTLiteral).Literal) + }; + + private ASTOption MakeOption(ASTNode option) => new ASTOption() + { + Name = (option as ASTLiteral).Literal + }; } \ No newline at end of file diff --git a/LSLib/LS/Story/Join.cs b/LSLib/LS/Story/Join.cs index 4370542f..10bc7283 100644 --- a/LSLib/LS/Story/Join.cs +++ b/LSLib/LS/Story/Join.cs @@ -1,178 +1,177 @@ using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public abstract class JoinNode : TreeNode { - public abstract class JoinNode : TreeNode + public NodeReference LeftParentRef; + public NodeReference RightParentRef; + public AdapterReference LeftAdapterRef; + public AdapterReference RightAdapterRef; + public NodeReference LeftDatabaseNodeRef; + public byte LeftDatabaseIndirection; + public NodeEntryItem LeftDatabaseJoin; + public NodeReference RightDatabaseNodeRef; + public byte RightDatabaseIndirection; + public NodeEntryItem RightDatabaseJoin; + + public override void Read(OsiReader reader) { - public NodeReference LeftParentRef; - public NodeReference RightParentRef; - public AdapterReference LeftAdapterRef; - public AdapterReference RightAdapterRef; - public NodeReference LeftDatabaseNodeRef; - public byte LeftDatabaseIndirection; - public NodeEntryItem LeftDatabaseJoin; - public NodeReference RightDatabaseNodeRef; - public byte RightDatabaseIndirection; - public NodeEntryItem RightDatabaseJoin; - - public override void Read(OsiReader reader) - { - base.Read(reader); - LeftParentRef = reader.ReadNodeRef(); - RightParentRef = reader.ReadNodeRef(); - LeftAdapterRef = reader.ReadAdapterRef(); - RightAdapterRef = reader.ReadAdapterRef(); - - LeftDatabaseNodeRef = reader.ReadNodeRef(); - LeftDatabaseJoin = new NodeEntryItem(); - LeftDatabaseJoin.Read(reader); - LeftDatabaseIndirection = reader.ReadByte(); - - RightDatabaseNodeRef = reader.ReadNodeRef(); - RightDatabaseJoin = new NodeEntryItem(); - RightDatabaseJoin.Read(reader); - RightDatabaseIndirection = reader.ReadByte(); - } + base.Read(reader); + LeftParentRef = reader.ReadNodeRef(); + RightParentRef = reader.ReadNodeRef(); + LeftAdapterRef = reader.ReadAdapterRef(); + RightAdapterRef = reader.ReadAdapterRef(); + + LeftDatabaseNodeRef = reader.ReadNodeRef(); + LeftDatabaseJoin = new NodeEntryItem(); + LeftDatabaseJoin.Read(reader); + LeftDatabaseIndirection = reader.ReadByte(); + + RightDatabaseNodeRef = reader.ReadNodeRef(); + RightDatabaseJoin = new NodeEntryItem(); + RightDatabaseJoin.Read(reader); + RightDatabaseIndirection = reader.ReadByte(); + } - public override void Write(OsiWriter writer) - { - base.Write(writer); - LeftParentRef.Write(writer); - RightParentRef.Write(writer); - LeftAdapterRef.Write(writer); - RightAdapterRef.Write(writer); - - LeftDatabaseNodeRef.Write(writer); - LeftDatabaseJoin.Write(writer); - writer.Write(LeftDatabaseIndirection); - - RightDatabaseNodeRef.Write(writer); - RightDatabaseJoin.Write(writer); - writer.Write(RightDatabaseIndirection); - } + public override void Write(OsiWriter writer) + { + base.Write(writer); + LeftParentRef.Write(writer); + RightParentRef.Write(writer); + LeftAdapterRef.Write(writer); + RightAdapterRef.Write(writer); + + LeftDatabaseNodeRef.Write(writer); + LeftDatabaseJoin.Write(writer); + writer.Write(LeftDatabaseIndirection); + + RightDatabaseNodeRef.Write(writer); + RightDatabaseJoin.Write(writer); + writer.Write(RightDatabaseIndirection); + } - public override void PostLoad(Story story) - { - base.PostLoad(story); + public override void PostLoad(Story story) + { + base.PostLoad(story); - if (LeftAdapterRef.IsValid) + if (LeftAdapterRef.IsValid) + { + var adapter = LeftAdapterRef.Resolve(); + if (adapter.OwnerNode != null) { - var adapter = LeftAdapterRef.Resolve(); - if (adapter.OwnerNode != null) - { - throw new InvalidDataException("An adapter cannot be assigned to multiple join/rel nodes!"); - } - - adapter.OwnerNode = this; + throw new InvalidDataException("An adapter cannot be assigned to multiple join/rel nodes!"); } - if (RightAdapterRef.IsValid) - { - var adapter = RightAdapterRef.Resolve(); - if (adapter.OwnerNode != null) - { - throw new InvalidDataException("An adapter cannot be assigned to multiple join/rel nodes!"); - } - - adapter.OwnerNode = this; - } + adapter.OwnerNode = this; } - public override void DebugDump(TextWriter writer, Story story) + if (RightAdapterRef.IsValid) { - base.DebugDump(writer, story); - - writer.Write(" Left:"); - if (LeftParentRef.IsValid) - { - writer.Write(" Parent "); - LeftParentRef.DebugDump(writer, story); - } - - if (LeftAdapterRef.IsValid) + var adapter = RightAdapterRef.Resolve(); + if (adapter.OwnerNode != null) { - writer.Write(" Adapter "); - LeftAdapterRef.DebugDump(writer, story); + throw new InvalidDataException("An adapter cannot be assigned to multiple join/rel nodes!"); } - if (LeftDatabaseNodeRef.IsValid) - { - writer.Write(" DbNode "); - LeftDatabaseNodeRef.DebugDump(writer, story); - writer.Write(" Indirection {0}", LeftDatabaseIndirection); - writer.Write(" Join "); - LeftDatabaseJoin.DebugDump(writer, story); - } - - writer.WriteLine(""); - - writer.Write(" Right:"); - if (RightParentRef.IsValid) - { - writer.Write(" Parent "); - RightParentRef.DebugDump(writer, story); - } - - if (RightAdapterRef.IsValid) - { - writer.Write(" Adapter "); - RightAdapterRef.DebugDump(writer, story); - } - - if (RightDatabaseNodeRef.IsValid) - { - writer.Write(" DbNode "); - RightDatabaseNodeRef.DebugDump(writer, story); - writer.Write(" Indirection {0}", RightDatabaseIndirection); - writer.Write(" Join "); - RightDatabaseJoin.DebugDump(writer, story); - } - - writer.WriteLine(""); + adapter.OwnerNode = this; } } - public class AndNode : JoinNode + public override void DebugDump(TextWriter writer, Story story) { - public override Type NodeType() + base.DebugDump(writer, story); + + writer.Write(" Left:"); + if (LeftParentRef.IsValid) { - return Type.And; + writer.Write(" Parent "); + LeftParentRef.DebugDump(writer, story); } - public override string TypeName() + if (LeftAdapterRef.IsValid) { - return "And"; + writer.Write(" Adapter "); + LeftAdapterRef.DebugDump(writer, story); } - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + if (LeftDatabaseNodeRef.IsValid) { - var leftTuple = LeftAdapterRef.Resolve().Adapt(tuple); - LeftParentRef.Resolve().MakeScript(writer, story, leftTuple, printTypes); - writer.WriteLine("AND"); - var rightTuple = RightAdapterRef.Resolve().Adapt(tuple); - RightParentRef.Resolve().MakeScript(writer, story, rightTuple, false); + writer.Write(" DbNode "); + LeftDatabaseNodeRef.DebugDump(writer, story); + writer.Write(" Indirection {0}", LeftDatabaseIndirection); + writer.Write(" Join "); + LeftDatabaseJoin.DebugDump(writer, story); } - } - public class NotAndNode : JoinNode - { - public override Type NodeType() + writer.WriteLine(""); + + writer.Write(" Right:"); + if (RightParentRef.IsValid) { - return Type.NotAnd; + writer.Write(" Parent "); + RightParentRef.DebugDump(writer, story); } - public override string TypeName() + if (RightAdapterRef.IsValid) { - return "Not And"; + writer.Write(" Adapter "); + RightAdapterRef.DebugDump(writer, story); } - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + if (RightDatabaseNodeRef.IsValid) { - var leftTuple = LeftAdapterRef.Resolve().Adapt(tuple); - LeftParentRef.Resolve().MakeScript(writer, story, leftTuple, printTypes); - writer.WriteLine("AND NOT"); - var rightTuple = RightAdapterRef.Resolve().Adapt(tuple); - RightParentRef.Resolve().MakeScript(writer, story, rightTuple, false); + writer.Write(" DbNode "); + RightDatabaseNodeRef.DebugDump(writer, story); + writer.Write(" Indirection {0}", RightDatabaseIndirection); + writer.Write(" Join "); + RightDatabaseJoin.DebugDump(writer, story); } + + writer.WriteLine(""); + } +} + +public class AndNode : JoinNode +{ + public override Type NodeType() + { + return Type.And; + } + + public override string TypeName() + { + return "And"; + } + + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + var leftTuple = LeftAdapterRef.Resolve().Adapt(tuple); + LeftParentRef.Resolve().MakeScript(writer, story, leftTuple, printTypes); + writer.WriteLine("AND"); + var rightTuple = RightAdapterRef.Resolve().Adapt(tuple); + RightParentRef.Resolve().MakeScript(writer, story, rightTuple, false); + } +} + +public class NotAndNode : JoinNode +{ + public override Type NodeType() + { + return Type.NotAnd; + } + + public override string TypeName() + { + return "Not And"; + } + + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + var leftTuple = LeftAdapterRef.Resolve().Adapt(tuple); + LeftParentRef.Resolve().MakeScript(writer, story, leftTuple, printTypes); + writer.WriteLine("AND NOT"); + var rightTuple = RightAdapterRef.Resolve().Adapt(tuple); + RightParentRef.Resolve().MakeScript(writer, story, rightTuple, false); } } diff --git a/LSLib/LS/Story/Node.cs b/LSLib/LS/Story/Node.cs index 4823a4bf..358bfbc1 100644 --- a/LSLib/LS/Story/Node.cs +++ b/LSLib/LS/Story/Node.cs @@ -1,131 +1,130 @@ using System; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public abstract class Node : OsirisSerializable { - public abstract class Node : OsirisSerializable + public enum Type : byte { - public enum Type : byte - { - Database = 1, - Proc = 2, - DivQuery = 3, - And = 4, - NotAnd = 5, - RelOp = 6, - Rule = 7, - InternalQuery = 8, - UserQuery = 9 - }; - - public UInt32 Index; - public DatabaseReference DatabaseRef; - public string Name; - public byte NumParams; - - public virtual void Read(OsiReader reader) + Database = 1, + Proc = 2, + DivQuery = 3, + And = 4, + NotAnd = 5, + RelOp = 6, + Rule = 7, + InternalQuery = 8, + UserQuery = 9 + }; + + public UInt32 Index; + public DatabaseReference DatabaseRef; + public string Name; + public byte NumParams; + + public virtual void Read(OsiReader reader) + { + DatabaseRef = reader.ReadDatabaseRef(); + Name = reader.ReadString(); + if (Name.Length > 0) { - DatabaseRef = reader.ReadDatabaseRef(); - Name = reader.ReadString(); - if (Name.Length > 0) - { - NumParams = reader.ReadByte(); - } + NumParams = reader.ReadByte(); } + } - public virtual void Write(OsiWriter writer) - { - DatabaseRef.Write(writer); - writer.Write(Name); - if (Name.Length > 0) - writer.Write(NumParams); - } + public virtual void Write(OsiWriter writer) + { + DatabaseRef.Write(writer); + writer.Write(Name); + if (Name.Length > 0) + writer.Write(NumParams); + } - public abstract Type NodeType(); + public abstract Type NodeType(); - public abstract string TypeName(); + public abstract string TypeName(); - public abstract void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes = false); + public abstract void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes = false); - public virtual void PostLoad(Story story) + public virtual void PostLoad(Story story) + { + if (DatabaseRef.IsValid) { - if (DatabaseRef.IsValid) + var database = DatabaseRef.Resolve(); + if (database.OwnerNode != null) { - var database = DatabaseRef.Resolve(); - if (database.OwnerNode != null) - { - throw new InvalidDataException("A database cannot be assigned to multiple database nodes!"); - } - - database.OwnerNode = this; + throw new InvalidDataException("A database cannot be assigned to multiple database nodes!"); } - } - public virtual void PreSave(Story story) - { + database.OwnerNode = this; } + } + + public virtual void PreSave(Story story) + { + } - public virtual void PostSave(Story story) + public virtual void PostSave(Story story) + { + } + + public virtual void DebugDump(TextWriter writer, Story story) + { + if (Name.Length > 0) { + writer.Write("{0}({1}): ", Name, NumParams); } - public virtual void DebugDump(TextWriter writer, Story story) + writer.Write("<{0}>", TypeName()); + if (DatabaseRef.IsValid) { - if (Name.Length > 0) - { - writer.Write("{0}({1}): ", Name, NumParams); - } - - writer.Write("<{0}>", TypeName()); - if (DatabaseRef.IsValid) - { - writer.Write(", Database "); - DatabaseRef.DebugDump(writer, story); - } - - writer.WriteLine(); + writer.Write(", Database "); + DatabaseRef.DebugDump(writer, story); } + + writer.WriteLine(); } +} + +public abstract class TreeNode : Node +{ + public NodeEntryItem NextNode; - public abstract class TreeNode : Node + public override void Read(OsiReader reader) { - public NodeEntryItem NextNode; + base.Read(reader); + NextNode = new NodeEntryItem(); + NextNode.Read(reader); + } - public override void Read(OsiReader reader) - { - base.Read(reader); - NextNode = new NodeEntryItem(); - NextNode.Read(reader); - } + public override void Write(OsiWriter writer) + { + base.Write(writer); + NextNode.Write(writer); + } - public override void Write(OsiWriter writer) - { - base.Write(writer); - NextNode.Write(writer); - } + public override void PostLoad(Story story) + { + base.PostLoad(story); - public override void PostLoad(Story story) + if (NextNode.NodeRef.IsValid) { - base.PostLoad(story); - - if (NextNode.NodeRef.IsValid) + var nextNode = NextNode.NodeRef.Resolve(); + if (nextNode is RuleNode) { - var nextNode = NextNode.NodeRef.Resolve(); - if (nextNode is RuleNode) - { - (nextNode as RuleNode).DerivedGoalRef = new GoalReference(story, NextNode.GoalRef.Index); - } + (nextNode as RuleNode).DerivedGoalRef = new GoalReference(story, NextNode.GoalRef.Index); } } + } - public override void DebugDump(TextWriter writer, Story story) - { - base.DebugDump(writer, story); + public override void DebugDump(TextWriter writer, Story story) + { + base.DebugDump(writer, story); - writer.Write(" Next: "); - NextNode.DebugDump(writer, story); - writer.WriteLine(""); - } + writer.Write(" Next: "); + NextNode.DebugDump(writer, story); + writer.WriteLine(""); } } diff --git a/LSLib/LS/Story/Proc.cs b/LSLib/LS/Story/Proc.cs index 1ef3f0c3..e2f780d3 100644 --- a/LSLib/LS/Story/Proc.cs +++ b/LSLib/LS/Story/Proc.cs @@ -1,24 +1,23 @@ using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class ProcNode : DataNode { - public class ProcNode : DataNode + public override Type NodeType() { - public override Type NodeType() - { - return Type.Proc; - } + return Type.Proc; + } - public override string TypeName() - { - return "Proc"; - } + public override string TypeName() + { + return "Proc"; + } - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) - { - writer.Write("{0}(", Name); - tuple.MakeScript(writer, story, true); - writer.WriteLine(")"); - } + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + writer.Write("{0}(", Name); + tuple.MakeScript(writer, story, true); + writer.WriteLine(")"); } } diff --git a/LSLib/LS/Story/Query.cs b/LSLib/LS/Story/Query.cs index 8ebf4849..0286088f 100644 --- a/LSLib/LS/Story/Query.cs +++ b/LSLib/LS/Story/Query.cs @@ -1,53 +1,52 @@ using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public abstract class QueryNode : Node +{ + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + writer.Write("{0}(", Name); + tuple.MakeScript(writer, story, printTypes); + writer.WriteLine(")"); + } +} + +public class DivQueryNode : QueryNode +{ + public override Type NodeType() + { + return Type.DivQuery; + } + + public override string TypeName() + { + return "Div Query"; + } +} + +public class InternalQueryNode : QueryNode { - public abstract class QueryNode : Node + public override Type NodeType() { - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) - { - writer.Write("{0}(", Name); - tuple.MakeScript(writer, story, printTypes); - writer.WriteLine(")"); - } + return Type.InternalQuery; } - public class DivQueryNode : QueryNode + public override string TypeName() { - public override Type NodeType() - { - return Type.DivQuery; - } - - public override string TypeName() - { - return "Div Query"; - } + return "Internal Query"; } +} - public class InternalQueryNode : QueryNode +public class UserQueryNode : QueryNode +{ + public override Type NodeType() { - public override Type NodeType() - { - return Type.InternalQuery; - } - - public override string TypeName() - { - return "Internal Query"; - } + return Type.UserQuery; } - public class UserQueryNode : QueryNode + public override string TypeName() { - public override Type NodeType() - { - return Type.UserQuery; - } - - public override string TypeName() - { - return "User Query"; - } + return "User Query"; } } diff --git a/LSLib/LS/Story/Reference.cs b/LSLib/LS/Story/Reference.cs index a9b14a77..29a1f8ff 100644 --- a/LSLib/LS/Story/Reference.cs +++ b/LSLib/LS/Story/Reference.cs @@ -1,216 +1,215 @@ using System; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public abstract class OsiReference : OsirisSerializable { - public abstract class OsiReference : OsirisSerializable + public const UInt32 NullReference = 0; + // TODO: hide! + public UInt32 Index = NullReference; + protected Story Story; + + public bool IsNull { - public const UInt32 NullReference = 0; - // TODO: hide! - public UInt32 Index = NullReference; - protected Story Story; + get { return Index == NullReference; } + } - public bool IsNull - { - get { return Index == NullReference; } - } + public bool IsValid + { + get { return Index != NullReference; } + } - public bool IsValid - { - get { return Index != NullReference; } - } + public OsiReference() + { + } - public OsiReference() - { - } + public OsiReference(Story story, UInt32 reference) + { + Story = story; + Index = reference; + } - public OsiReference(Story story, UInt32 reference) - { + public void BindStory(Story story) + { + if (Story == null) Story = story; - Index = reference; - } + else + throw new InvalidOperationException("Reference already bound to a story!"); + } - public void BindStory(Story story) - { - if (Story == null) - Story = story; - else - throw new InvalidOperationException("Reference already bound to a story!"); - } + public void Read(OsiReader reader) + { + Index = reader.ReadUInt32(); + } - public void Read(OsiReader reader) - { - Index = reader.ReadUInt32(); - } + public void Write(OsiWriter writer) + { + writer.Write(Index); + } - public void Write(OsiWriter writer) - { - writer.Write(Index); - } + abstract public T Resolve(); - abstract public T Resolve(); + abstract public void DebugDump(TextWriter writer, Story story); +} - abstract public void DebugDump(TextWriter writer, Story story); +public class NodeReference : OsiReference +{ + public NodeReference() + : base() + { } - public class NodeReference : OsiReference + public NodeReference(Story story, UInt32 reference) + : base(story, reference) { - public NodeReference() - : base() - { - } + } - public NodeReference(Story story, UInt32 reference) - : base(story, reference) - { - } + public NodeReference(Story story, Node reference) + : base(story, reference == null ? NullReference : reference.Index) + { + } - public NodeReference(Story story, Node reference) - : base(story, reference == null ? NullReference : reference.Index) - { - } + public override Node Resolve() + { + if (Index == NullReference) + return null; + else + return Story.Nodes[Index]; + } - public override Node Resolve() + public override void DebugDump(TextWriter writer, Story story) + { + if (!IsValid) { - if (Index == NullReference) - return null; - else - return Story.Nodes[Index]; + writer.Write("(None)"); } - - public override void DebugDump(TextWriter writer, Story story) + else { - if (!IsValid) + var node = Resolve(); + if (node.Name.Length > 0) { - writer.Write("(None)"); + writer.Write("#{0} <{1}({2}) {3}>", Index, node.Name, node.NumParams, node.TypeName()); } else { - var node = Resolve(); - if (node.Name.Length > 0) - { - writer.Write("#{0} <{1}({2}) {3}>", Index, node.Name, node.NumParams, node.TypeName()); - } - else - { - writer.Write("#{0} <{1}>", Index, node.TypeName()); - } + writer.Write("#{0} <{1}>", Index, node.TypeName()); } } } +} - public class AdapterReference : OsiReference +public class AdapterReference : OsiReference +{ + public AdapterReference() + : base() { - public AdapterReference() - : base() - { - } + } - public AdapterReference(Story story, UInt32 reference) - : base(story, reference) - { - } + public AdapterReference(Story story, UInt32 reference) + : base(story, reference) + { + } - public AdapterReference(Story story, Adapter reference) - : base(story, reference.Index) - { - } + public AdapterReference(Story story, Adapter reference) + : base(story, reference.Index) + { + } - public override Adapter Resolve() + public override Adapter Resolve() + { + if (Index == NullReference) + return null; + else + return Story.Adapters[Index]; + } + + public override void DebugDump(TextWriter writer, Story story) + { + if (!IsValid) { - if (Index == NullReference) - return null; - else - return Story.Adapters[Index]; + writer.Write("(None)"); } - - public override void DebugDump(TextWriter writer, Story story) + else { - if (!IsValid) - { - writer.Write("(None)"); - } - else - { - writer.Write("#{0}", Index); - } + writer.Write("#{0}", Index); } } +} - public class DatabaseReference : OsiReference +public class DatabaseReference : OsiReference +{ + public DatabaseReference() + : base() { - public DatabaseReference() - : base() - { - } + } - public DatabaseReference(Story story, UInt32 reference) - : base(story, reference) - { - } + public DatabaseReference(Story story, UInt32 reference) + : base(story, reference) + { + } - public DatabaseReference(Story story, Database reference) - : base(story, reference.Index) - { - } + public DatabaseReference(Story story, Database reference) + : base(story, reference.Index) + { + } + + public override Database Resolve() + { + if (Index == NullReference) + return null; + else + return Story.Databases[Index]; + } - public override Database Resolve() + public override void DebugDump(TextWriter writer, Story story) + { + if (!IsValid) { - if (Index == NullReference) - return null; - else - return Story.Databases[Index]; + writer.Write("(None)"); } - - public override void DebugDump(TextWriter writer, Story story) + else { - if (!IsValid) - { - writer.Write("(None)"); - } - else - { - writer.Write("#{0}", Index); - } + writer.Write("#{0}", Index); } } +} - public class GoalReference : OsiReference +public class GoalReference : OsiReference +{ + public GoalReference() + : base() { - public GoalReference() - : base() - { - } + } - public GoalReference(Story story, UInt32 reference) - : base(story, reference) - { - } + public GoalReference(Story story, UInt32 reference) + : base(story, reference) + { + } - public GoalReference(Story story, Goal reference) - : base(story, reference.Index) - { - } + public GoalReference(Story story, Goal reference) + : base(story, reference.Index) + { + } - public override Goal Resolve() + public override Goal Resolve() + { + if (Index == NullReference) + return null; + else + return Story.Goals[Index]; + } + + public override void DebugDump(TextWriter writer, Story story) + { + if (!IsValid) { - if (Index == NullReference) - return null; - else - return Story.Goals[Index]; + writer.Write("(None)"); } - - public override void DebugDump(TextWriter writer, Story story) + else { - if (!IsValid) - { - writer.Write("(None)"); - } - else - { - var goal = Resolve(); - writer.Write("#{0} <{1}>", Index, goal.Name); - } + var goal = Resolve(); + writer.Write("#{0} <{1}>", Index, goal.Name); } } } diff --git a/LSLib/LS/Story/Rel.cs b/LSLib/LS/Story/Rel.cs index 726b0c3b..f2e7efc2 100644 --- a/LSLib/LS/Story/Rel.cs +++ b/LSLib/LS/Story/Rel.cs @@ -1,81 +1,80 @@ using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public abstract class RelNode : TreeNode { - public abstract class RelNode : TreeNode + public NodeReference ParentRef; + public AdapterReference AdapterRef; + public NodeReference RelDatabaseNodeRef; + public NodeEntryItem RelJoin; + public byte RelDatabaseIndirection; + + public override void Read(OsiReader reader) { - public NodeReference ParentRef; - public AdapterReference AdapterRef; - public NodeReference RelDatabaseNodeRef; - public NodeEntryItem RelJoin; - public byte RelDatabaseIndirection; + base.Read(reader); + ParentRef = reader.ReadNodeRef(); + AdapterRef = reader.ReadAdapterRef(); - public override void Read(OsiReader reader) - { - base.Read(reader); - ParentRef = reader.ReadNodeRef(); - AdapterRef = reader.ReadAdapterRef(); + RelDatabaseNodeRef = reader.ReadNodeRef(); + RelJoin = new NodeEntryItem(); + RelJoin.Read(reader); + RelDatabaseIndirection = reader.ReadByte(); + } - RelDatabaseNodeRef = reader.ReadNodeRef(); - RelJoin = new NodeEntryItem(); - RelJoin.Read(reader); - RelDatabaseIndirection = reader.ReadByte(); - } + public override void Write(OsiWriter writer) + { + base.Write(writer); + ParentRef.Write(writer); + AdapterRef.Write(writer); - public override void Write(OsiWriter writer) - { - base.Write(writer); - ParentRef.Write(writer); - AdapterRef.Write(writer); + RelDatabaseNodeRef.Write(writer); + RelJoin.Write(writer); + writer.Write(RelDatabaseIndirection); + } - RelDatabaseNodeRef.Write(writer); - RelJoin.Write(writer); - writer.Write(RelDatabaseIndirection); - } + public override void PostLoad(Story story) + { + base.PostLoad(story); - public override void PostLoad(Story story) + if (AdapterRef.IsValid) { - base.PostLoad(story); - - if (AdapterRef.IsValid) + var adapter = AdapterRef.Resolve(); + if (adapter.OwnerNode != null) { - var adapter = AdapterRef.Resolve(); - if (adapter.OwnerNode != null) - { - throw new InvalidDataException("An adapter cannot be assigned to multiple join/rel nodes!"); - } - - adapter.OwnerNode = this; + throw new InvalidDataException("An adapter cannot be assigned to multiple join/rel nodes!"); } - } - public override void DebugDump(TextWriter writer, Story story) - { - base.DebugDump(writer, story); + adapter.OwnerNode = this; + } + } - writer.Write(" "); - if (ParentRef.IsValid) - { - writer.Write(" Parent "); - ParentRef.DebugDump(writer, story); - } + public override void DebugDump(TextWriter writer, Story story) + { + base.DebugDump(writer, story); - if (AdapterRef.IsValid) - { - writer.Write(" Adapter "); - AdapterRef.DebugDump(writer, story); - } + writer.Write(" "); + if (ParentRef.IsValid) + { + writer.Write(" Parent "); + ParentRef.DebugDump(writer, story); + } - if (RelDatabaseNodeRef.IsValid) - { - writer.Write(" DbNode "); - RelDatabaseNodeRef.DebugDump(writer, story); - writer.Write(" Indirection {0}", RelDatabaseIndirection); - writer.Write(" Join "); - RelJoin.DebugDump(writer, story); - } + if (AdapterRef.IsValid) + { + writer.Write(" Adapter "); + AdapterRef.DebugDump(writer, story); + } - writer.WriteLine(""); + if (RelDatabaseNodeRef.IsValid) + { + writer.Write(" DbNode "); + RelDatabaseNodeRef.DebugDump(writer, story); + writer.Write(" Indirection {0}", RelDatabaseIndirection); + writer.Write(" Join "); + RelJoin.DebugDump(writer, story); } + + writer.WriteLine(""); } } diff --git a/LSLib/LS/Story/RelOp.cs b/LSLib/LS/Story/RelOp.cs index 334a0180..56044555 100644 --- a/LSLib/LS/Story/RelOp.cs +++ b/LSLib/LS/Story/RelOp.cs @@ -1,107 +1,106 @@ using System; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public enum RelOpType : byte { - public enum RelOpType : byte - { - Less = 0, - LessOrEqual = 1, - Greater = 2, - GreaterOrEqual = 3, - Equal = 4, - NotEqual = 5 - }; - - public class RelOpNode : RelNode + Less = 0, + LessOrEqual = 1, + Greater = 2, + GreaterOrEqual = 3, + Equal = 4, + NotEqual = 5 +}; + +public class RelOpNode : RelNode +{ + public sbyte LeftValueIndex; + public sbyte RightValueIndex; + public Value LeftValue; + public Value RightValue; + public RelOpType RelOp; + + public override void Read(OsiReader reader) { - public sbyte LeftValueIndex; - public sbyte RightValueIndex; - public Value LeftValue; - public Value RightValue; - public RelOpType RelOp; + base.Read(reader); + LeftValueIndex = reader.ReadSByte(); + RightValueIndex = reader.ReadSByte(); - public override void Read(OsiReader reader) - { - base.Read(reader); - LeftValueIndex = reader.ReadSByte(); - RightValueIndex = reader.ReadSByte(); + LeftValue = new Value(); + LeftValue.Read(reader); - LeftValue = new Value(); - LeftValue.Read(reader); + RightValue = new Value(); + RightValue.Read(reader); - RightValue = new Value(); - RightValue.Read(reader); + RelOp = (RelOpType)reader.ReadInt32(); + } - RelOp = (RelOpType)reader.ReadInt32(); - } + public override void Write(OsiWriter writer) + { + base.Write(writer); + writer.Write(LeftValueIndex); + writer.Write(RightValueIndex); - public override void Write(OsiWriter writer) - { - base.Write(writer); - writer.Write(LeftValueIndex); - writer.Write(RightValueIndex); + LeftValue.Write(writer); + RightValue.Write(writer); + writer.Write((UInt32)RelOp); + } - LeftValue.Write(writer); - RightValue.Write(writer); - writer.Write((UInt32)RelOp); - } + public override Type NodeType() + { + return Type.RelOp; + } - public override Type NodeType() - { - return Type.RelOp; - } + public override string TypeName() + { + return String.Format("RelOp {0}", RelOp); + } - public override string TypeName() - { - return String.Format("RelOp {0}", RelOp); - } + public override void DebugDump(TextWriter writer, Story story) + { + base.DebugDump(writer, story); + + writer.Write(" Left Value: "); + if (LeftValueIndex != -1) + writer.Write("[Source Column {0}]", LeftValueIndex); + else + LeftValue.DebugDump(writer, story); + writer.WriteLine(); + + writer.Write(" Right Value: "); + if (RightValueIndex != -1) + writer.Write("[Source Column {0}]", RightValueIndex); + else + RightValue.DebugDump(writer, story); + writer.WriteLine(); + } - public override void DebugDump(TextWriter writer, Story story) - { - base.DebugDump(writer, story); - - writer.Write(" Left Value: "); - if (LeftValueIndex != -1) - writer.Write("[Source Column {0}]", LeftValueIndex); - else - LeftValue.DebugDump(writer, story); - writer.WriteLine(); - - writer.Write(" Right Value: "); - if (RightValueIndex != -1) - writer.Write("[Source Column {0}]", RightValueIndex); - else - RightValue.DebugDump(writer, story); - writer.WriteLine(); - } + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + var adaptedTuple = AdapterRef.Resolve().Adapt(tuple); + ParentRef.Resolve().MakeScript(writer, story, adaptedTuple, printTypes); + writer.WriteLine("AND"); + + if (LeftValueIndex != -1) + adaptedTuple.Logical[LeftValueIndex].MakeScript(writer, story, tuple); + else + LeftValue.MakeScript(writer, story, tuple); - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + switch (RelOp) { - var adaptedTuple = AdapterRef.Resolve().Adapt(tuple); - ParentRef.Resolve().MakeScript(writer, story, adaptedTuple, printTypes); - writer.WriteLine("AND"); - - if (LeftValueIndex != -1) - adaptedTuple.Logical[LeftValueIndex].MakeScript(writer, story, tuple); - else - LeftValue.MakeScript(writer, story, tuple); - - switch (RelOp) - { - case RelOpType.Less: writer.Write(" < "); break; - case RelOpType.LessOrEqual: writer.Write(" <= "); break; - case RelOpType.Greater: writer.Write(" > "); break; - case RelOpType.GreaterOrEqual: writer.Write(" >= "); break; - case RelOpType.Equal: writer.Write(" == "); break; - case RelOpType.NotEqual: writer.Write(" != "); break; - } - - if (RightValueIndex != -1) - adaptedTuple.Logical[RightValueIndex].MakeScript(writer, story, tuple); - else - RightValue.MakeScript(writer, story, tuple); - writer.WriteLine(); + case RelOpType.Less: writer.Write(" < "); break; + case RelOpType.LessOrEqual: writer.Write(" <= "); break; + case RelOpType.Greater: writer.Write(" > "); break; + case RelOpType.GreaterOrEqual: writer.Write(" >= "); break; + case RelOpType.Equal: writer.Write(" == "); break; + case RelOpType.NotEqual: writer.Write(" != "); break; } + + if (RightValueIndex != -1) + adaptedTuple.Logical[RightValueIndex].MakeScript(writer, story, tuple); + else + RightValue.MakeScript(writer, story, tuple); + writer.WriteLine(); } } diff --git a/LSLib/LS/Story/Rule.cs b/LSLib/LS/Story/Rule.cs index 816e6328..cea2c30a 100644 --- a/LSLib/LS/Story/Rule.cs +++ b/LSLib/LS/Story/Rule.cs @@ -2,250 +2,249 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public enum RuleType { - public enum RuleType - { - Rule, - Proc, - Query - }; + Rule, + Proc, + Query +}; - public class RuleNode : RelNode +public class RuleNode : RelNode +{ + public List Calls; + public List Variables; + public UInt32 Line; + public GoalReference DerivedGoalRef; + public bool IsQuery; + + public override void Read(OsiReader reader) { - public List Calls; - public List Variables; - public UInt32 Line; - public GoalReference DerivedGoalRef; - public bool IsQuery; + base.Read(reader); + Calls = reader.ReadList(); - public override void Read(OsiReader reader) + Variables = new List(); + var variables = reader.ReadByte(); + while (variables-- > 0) { - base.Read(reader); - Calls = reader.ReadList(); - - Variables = new List(); - var variables = reader.ReadByte(); - while (variables-- > 0) + var type = reader.ReadByte(); + if (type != 1) throw new InvalidDataException("Illegal value type in rule variable list"); + var variable = new Variable(); + variable.Read(reader); + if (variable.Adapted) { - var type = reader.ReadByte(); - if (type != 1) throw new InvalidDataException("Illegal value type in rule variable list"); - var variable = new Variable(); - variable.Read(reader); - if (variable.Adapted) - { - variable.VariableName = String.Format("_Var{0}", Variables.Count + 1); - } - - Variables.Add(variable); + variable.VariableName = String.Format("_Var{0}", Variables.Count + 1); } - Line = reader.ReadUInt32(); - - if (reader.Ver >= OsiVersion.VerAddQuery) - IsQuery = reader.ReadBoolean(); - else - IsQuery = false; + Variables.Add(variable); } - public override void Write(OsiWriter writer) - { - base.Write(writer); - writer.WriteList(Calls); + Line = reader.ReadUInt32(); - writer.Write((byte)Variables.Count); - foreach (var variable in Variables) - { - writer.Write((byte)1); - variable.Write(writer); - } + if (reader.Ver >= OsiVersion.VerAddQuery) + IsQuery = reader.ReadBoolean(); + else + IsQuery = false; + } - writer.Write(Line); - if (writer.Ver >= OsiVersion.VerAddQuery) - writer.Write(IsQuery); - } + public override void Write(OsiWriter writer) + { + base.Write(writer); + writer.WriteList(Calls); - public override Type NodeType() + writer.Write((byte)Variables.Count); + foreach (var variable in Variables) { - return Type.Rule; + writer.Write((byte)1); + variable.Write(writer); } - public override string TypeName() - { - if (IsQuery) - return "Query Rule"; - else - return "Rule"; - } + writer.Write(Line); + if (writer.Ver >= OsiVersion.VerAddQuery) + writer.Write(IsQuery); + } - public override void DebugDump(TextWriter writer, Story story) - { - base.DebugDump(writer, story); + public override Type NodeType() + { + return Type.Rule; + } - writer.WriteLine(" Variables: "); - foreach (var v in Variables) - { - writer.Write(" "); - v.DebugDump(writer, story); - writer.WriteLine(""); - } + public override string TypeName() + { + if (IsQuery) + return "Query Rule"; + else + return "Rule"; + } - writer.WriteLine(" Calls: "); - foreach (var call in Calls) - { - writer.Write(" "); - call.DebugDump(writer, story); - writer.WriteLine(""); - } + public override void DebugDump(TextWriter writer, Story story) + { + base.DebugDump(writer, story); + + writer.WriteLine(" Variables: "); + foreach (var v in Variables) + { + writer.Write(" "); + v.DebugDump(writer, story); + writer.WriteLine(""); } - public Node GetRoot(Story story) + writer.WriteLine(" Calls: "); + foreach (var call in Calls) { - Node parent = this; - for (;;) - { - if (parent is RelNode) - { - var rel = parent as RelNode; - parent = rel.ParentRef.Resolve(); - } - else if (parent is JoinNode) - { - var join = parent as JoinNode; - parent = join.LeftParentRef.Resolve(); - } - else - { - return parent; - } - } + writer.Write(" "); + call.DebugDump(writer, story); + writer.WriteLine(""); } + } - public RuleType? GetRuleType(Story story) + public Node GetRoot(Story story) + { + Node parent = this; + for (;;) { - var root = GetRoot(story); - if (root is DatabaseNode) + if (parent is RelNode) { - return RuleType.Rule; + var rel = parent as RelNode; + parent = rel.ParentRef.Resolve(); } - else if (root is ProcNode) + else if (parent is JoinNode) { - var querySig = root.Name + "__DEF__/" + root.NumParams.ToString(); - var sig = root.Name + "/" + root.NumParams.ToString(); - - if (!story.FunctionSignatureMap.TryGetValue(querySig, out Function func) - && !story.FunctionSignatureMap.TryGetValue(sig, out func)) - { - return null; - } - - switch (func.Type) - { - case FunctionType.Event: - return RuleType.Rule; - - case FunctionType.Proc: - return RuleType.Proc; - - case FunctionType.UserQuery: - return RuleType.Query; - - default: - throw new InvalidDataException($"Unsupported root function type: {func.Type}"); - } + var join = parent as JoinNode; + parent = join.LeftParentRef.Resolve(); } else { - throw new InvalidDataException("Cannot export rules with this root node"); + return parent; } } + } - public Tuple MakeInitialTuple() + public RuleType? GetRuleType(Story story) + { + var root = GetRoot(story); + if (root is DatabaseNode) { - var tuple = new Tuple(); - for (int i = 0; i < Variables.Count; i++) - { - tuple.Physical.Add(Variables[i]); - tuple.Logical.Add(i, Variables[i]); - } - - return tuple; + return RuleType.Rule; } - - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + else if (root is ProcNode) { - var ruleType = GetRuleType(story); - if (ruleType == null) - { - return; - } + var querySig = root.Name + "__DEF__/" + root.NumParams.ToString(); + var sig = root.Name + "/" + root.NumParams.ToString(); - switch (ruleType) + if (!story.FunctionSignatureMap.TryGetValue(querySig, out Function func) + && !story.FunctionSignatureMap.TryGetValue(sig, out func)) { - case RuleType.Proc: writer.WriteLine("PROC"); break; - case RuleType.Query: writer.WriteLine("QRY"); break; - case RuleType.Rule: writer.WriteLine("IF"); break; + return null; } - var initialTuple = MakeInitialTuple(); - if (AdapterRef.IsValid) + switch (func.Type) { - var adapter = AdapterRef.Resolve(); - initialTuple = adapter.Adapt(initialTuple); - } + case FunctionType.Event: + return RuleType.Rule; - printTypes = printTypes || ruleType == RuleType.Proc || ruleType == RuleType.Query; - ParentRef.Resolve().MakeScript(writer, story, initialTuple, printTypes); - writer.WriteLine("THEN"); - foreach (var call in Calls) - { - call.MakeScript(writer, story, initialTuple, false); - writer.WriteLine(";"); + case FunctionType.Proc: + return RuleType.Proc; + + case FunctionType.UserQuery: + return RuleType.Query; + + default: + throw new InvalidDataException($"Unsupported root function type: {func.Type}"); } } + else + { + throw new InvalidDataException("Cannot export rules with this root node"); + } + } - private void RemoveQueryPostfix(Story story) + public Tuple MakeInitialTuple() + { + var tuple = new Tuple(); + for (int i = 0; i < Variables.Count; i++) { - // Remove the __DEF__ postfix that is added to the end of Query nodes - if (IsQuery) - { - var ruleRoot = GetRoot(story); - if (ruleRoot.Name != null && - ruleRoot.Name.Length > 7 && - ruleRoot.Name.Substring(ruleRoot.Name.Length - 7) == "__DEF__") - { - ruleRoot.Name = ruleRoot.Name.Substring(0, ruleRoot.Name.Length - 7); - } - } + tuple.Physical.Add(Variables[i]); + tuple.Logical.Add(i, Variables[i]); + } + + return tuple; + } + + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes) + { + var ruleType = GetRuleType(story); + if (ruleType == null) + { + return; + } + + switch (ruleType) + { + case RuleType.Proc: writer.WriteLine("PROC"); break; + case RuleType.Query: writer.WriteLine("QRY"); break; + case RuleType.Rule: writer.WriteLine("IF"); break; } - public override void PostLoad(Story story) + var initialTuple = MakeInitialTuple(); + if (AdapterRef.IsValid) { - base.PostLoad(story); - RemoveQueryPostfix(story); + var adapter = AdapterRef.Resolve(); + initialTuple = adapter.Adapt(initialTuple); } - public override void PreSave(Story story) + printTypes = printTypes || ruleType == RuleType.Proc || ruleType == RuleType.Query; + ParentRef.Resolve().MakeScript(writer, story, initialTuple, printTypes); + writer.WriteLine("THEN"); + foreach (var call in Calls) { - base.PreSave(story); + call.MakeScript(writer, story, initialTuple, false); + writer.WriteLine(";"); + } + } - // Re-add the __DEF__ postfix that is added to the end of Query nodes - if (IsQuery) + private void RemoveQueryPostfix(Story story) + { + // Remove the __DEF__ postfix that is added to the end of Query nodes + if (IsQuery) + { + var ruleRoot = GetRoot(story); + if (ruleRoot.Name != null && + ruleRoot.Name.Length > 7 && + ruleRoot.Name.Substring(ruleRoot.Name.Length - 7) == "__DEF__") { - var ruleRoot = GetRoot(story); - if (ruleRoot.Name != null && - (ruleRoot.Name.Length < 7 || - ruleRoot.Name.Substring(ruleRoot.Name.Length - 7) != "__DEF__")) - { - ruleRoot.Name += "__DEF__"; - } + ruleRoot.Name = ruleRoot.Name.Substring(0, ruleRoot.Name.Length - 7); } } + } - public override void PostSave(Story story) + public override void PostLoad(Story story) + { + base.PostLoad(story); + RemoveQueryPostfix(story); + } + + public override void PreSave(Story story) + { + base.PreSave(story); + + // Re-add the __DEF__ postfix that is added to the end of Query nodes + if (IsQuery) { - base.PostSave(story); - RemoveQueryPostfix(story); + var ruleRoot = GetRoot(story); + if (ruleRoot.Name != null && + (ruleRoot.Name.Length < 7 || + ruleRoot.Name.Substring(ruleRoot.Name.Length - 7) != "__DEF__")) + { + ruleRoot.Name += "__DEF__"; + } } } + + public override void PostSave(Story story) + { + base.PostSave(story); + RemoveQueryPostfix(story); + } } diff --git a/LSLib/LS/Story/Story.cs b/LSLib/LS/Story/Story.cs index bf11b970..6805c373 100644 --- a/LSLib/LS/Story/Story.cs +++ b/LSLib/LS/Story/Story.cs @@ -3,558 +3,557 @@ using System.IO; using System.Linq; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class Story { - public class Story + public byte MinorVersion; + public byte MajorVersion; + // Use 16-bit instead of 32-bit type IDs, BG3 Patch8+ + public bool ShortTypeIds; + public SaveFileHeader Header; + public Dictionary Enums; + public Dictionary Types; + public List DivObjects; + public List Functions; + public Dictionary Nodes; + public Dictionary Adapters; + public Dictionary Databases; + public Dictionary Goals; + public List GlobalActions; + public List ExternalStringTable; + public Dictionary FunctionSignatureMap; + + public uint Version { - public byte MinorVersion; - public byte MajorVersion; - // Use 16-bit instead of 32-bit type IDs, BG3 Patch8+ - public bool ShortTypeIds; - public SaveFileHeader Header; - public Dictionary Enums; - public Dictionary Types; - public List DivObjects; - public List Functions; - public Dictionary Nodes; - public Dictionary Adapters; - public Dictionary Databases; - public Dictionary Goals; - public List GlobalActions; - public List ExternalStringTable; - public Dictionary FunctionSignatureMap; - - public uint Version + get { - get - { - return ((uint)MajorVersion << 8) | (uint)MinorVersion; - } + return ((uint)MajorVersion << 8) | (uint)MinorVersion; } + } - public void DebugDump(TextWriter writer) + public void DebugDump(TextWriter writer) + { + writer.WriteLine(" --- ENUMS ---"); + foreach (var e in Enums) { - writer.WriteLine(" --- ENUMS ---"); - foreach (var e in Enums) - { - e.Value.DebugDump(writer); - } - - writer.WriteLine(" --- TYPES ---"); - foreach (var type in Types) - { - type.Value.DebugDump(writer); - } - - writer.WriteLine(); - writer.WriteLine(" --- DIV OBJECTS ---"); - foreach (var obj in DivObjects) - { - obj.DebugDump(writer); - } + e.Value.DebugDump(writer); + } + + writer.WriteLine(" --- TYPES ---"); + foreach (var type in Types) + { + type.Value.DebugDump(writer); + } - writer.WriteLine(); - writer.WriteLine(" --- FUNCTIONS ---"); - foreach (var function in Functions) - { - function.DebugDump(writer, this); - } + writer.WriteLine(); + writer.WriteLine(" --- DIV OBJECTS ---"); + foreach (var obj in DivObjects) + { + obj.DebugDump(writer); + } - writer.WriteLine(); - writer.WriteLine(" --- NODES ---"); - foreach (var node in Nodes) - { - writer.Write("#{0} ", node.Key); - node.Value.DebugDump(writer, this); - writer.WriteLine(); - } + writer.WriteLine(); + writer.WriteLine(" --- FUNCTIONS ---"); + foreach (var function in Functions) + { + function.DebugDump(writer, this); + } + writer.WriteLine(); + writer.WriteLine(" --- NODES ---"); + foreach (var node in Nodes) + { + writer.Write("#{0} ", node.Key); + node.Value.DebugDump(writer, this); writer.WriteLine(); - writer.WriteLine(" --- ADAPTERS ---"); - foreach (var adapter in Adapters) - { - writer.Write("#{0} ", adapter.Key); - adapter.Value.DebugDump(writer, this); - } + } - writer.WriteLine(); - writer.WriteLine(" --- DATABASES ---"); - foreach (var database in Databases) - { - writer.Write("#{0} ", database.Key); - database.Value.DebugDump(writer, this); - } + writer.WriteLine(); + writer.WriteLine(" --- ADAPTERS ---"); + foreach (var adapter in Adapters) + { + writer.Write("#{0} ", adapter.Key); + adapter.Value.DebugDump(writer, this); + } - writer.WriteLine(); - writer.WriteLine(" --- GOALS ---"); - foreach (var goal in Goals) - { - writer.Write("#{0} ", goal.Key); - goal.Value.DebugDump(writer, this); - writer.WriteLine(); - } + writer.WriteLine(); + writer.WriteLine(" --- DATABASES ---"); + foreach (var database in Databases) + { + writer.Write("#{0} ", database.Key); + database.Value.DebugDump(writer, this); + } + writer.WriteLine(); + writer.WriteLine(" --- GOALS ---"); + foreach (var goal in Goals) + { + writer.Write("#{0} ", goal.Key); + goal.Value.DebugDump(writer, this); writer.WriteLine(); - writer.WriteLine(" --- GLOBAL ACTIONS ---"); - foreach (var call in GlobalActions) - { - call.DebugDump(writer, this); - writer.WriteLine(); - } } - public uint FindBuiltinTypeId(uint typeId) + writer.WriteLine(); + writer.WriteLine(" --- GLOBAL ACTIONS ---"); + foreach (var call in GlobalActions) { - var aliasId = typeId; + call.DebugDump(writer, this); + writer.WriteLine(); + } + } - while (typeId != 0 && Types[aliasId].Alias != 0) - { - aliasId = Types[aliasId].Alias; - } + public uint FindBuiltinTypeId(uint typeId) + { + var aliasId = typeId; - return aliasId; + while (typeId != 0 && Types[aliasId].Alias != 0) + { + aliasId = Types[aliasId].Alias; } + + return aliasId; } +} - public class StoryReader +public class StoryReader +{ + public StoryReader() { - public StoryReader() - { - } + } - private List ReadStrings(OsiReader reader) + private List ReadStrings(OsiReader reader) + { + var stringTable = new List(); + var count = reader.ReadUInt32(); + while (count-- > 0) { - var stringTable = new List(); - var count = reader.ReadUInt32(); - while (count-- > 0) - { - stringTable.Add(reader.ReadString()); - } - - return stringTable; + stringTable.Add(reader.ReadString()); } - private Dictionary ReadTypes(OsiReader reader) - { - var types = new Dictionary(); - var count = reader.ReadUInt32(); - while (count-- > 0) - { - var type = new OsirisType(); - type.Read(reader); - types.Add(type.Index, type); - } + return stringTable; + } - return types; + private Dictionary ReadTypes(OsiReader reader) + { + var types = new Dictionary(); + var count = reader.ReadUInt32(); + while (count-- > 0) + { + var type = new OsirisType(); + type.Read(reader); + types.Add(type.Index, type); } - private Dictionary ReadEnums(OsiReader reader) - { - var enums = new Dictionary(); - var count = reader.ReadUInt32(); - while (count-- > 0) - { - var e = new OsirisEnum(); - e.Read(reader); - enums.Add(e.UnderlyingType, e); - } + return types; + } - return enums; + private Dictionary ReadEnums(OsiReader reader) + { + var enums = new Dictionary(); + var count = reader.ReadUInt32(); + while (count-- > 0) + { + var e = new OsirisEnum(); + e.Read(reader); + enums.Add(e.UnderlyingType, e); } - private Dictionary ReadNodes(OsiReader reader) + return enums; + } + + private Dictionary ReadNodes(OsiReader reader) + { + var nodes = new Dictionary(); + var count = reader.ReadUInt32(); + while (count-- > 0) { - var nodes = new Dictionary(); - var count = reader.ReadUInt32(); - while (count-- > 0) + Node node = null; + var type = reader.ReadByte(); + var nodeId = reader.ReadUInt32(); + switch ((Node.Type)type) { - Node node = null; - var type = reader.ReadByte(); - var nodeId = reader.ReadUInt32(); - switch ((Node.Type)type) - { - case Node.Type.Database: - node = new DatabaseNode(); - break; + case Node.Type.Database: + node = new DatabaseNode(); + break; - case Node.Type.Proc: - node = new ProcNode(); - break; + case Node.Type.Proc: + node = new ProcNode(); + break; - case Node.Type.DivQuery: - node = new DivQueryNode(); - break; + case Node.Type.DivQuery: + node = new DivQueryNode(); + break; - case Node.Type.InternalQuery: - node = new InternalQueryNode(); - break; + case Node.Type.InternalQuery: + node = new InternalQueryNode(); + break; - case Node.Type.And: - node = new AndNode(); - break; + case Node.Type.And: + node = new AndNode(); + break; - case Node.Type.NotAnd: - node = new NotAndNode(); - break; + case Node.Type.NotAnd: + node = new NotAndNode(); + break; - case Node.Type.RelOp: - node = new RelOpNode(); - break; + case Node.Type.RelOp: + node = new RelOpNode(); + break; - case Node.Type.Rule: - node = new RuleNode(); - break; + case Node.Type.Rule: + node = new RuleNode(); + break; - case Node.Type.UserQuery: - node = new UserQueryNode(); - break; + case Node.Type.UserQuery: + node = new UserQueryNode(); + break; - default: - throw new NotImplementedException("No serializer found for this node type"); - } - - node.Read(reader); - nodes.Add(nodeId, node); + default: + throw new NotImplementedException("No serializer found for this node type"); } - return nodes; + node.Read(reader); + nodes.Add(nodeId, node); } - private Dictionary ReadAdapters(OsiReader reader) - { - var adapters = new Dictionary(); - var count = reader.ReadUInt32(); - while (count-- > 0) - { - var adapter = new Adapter(); - adapter.Read(reader); - adapters.Add(adapter.Index, adapter); - } + return nodes; + } - return adapters; + private Dictionary ReadAdapters(OsiReader reader) + { + var adapters = new Dictionary(); + var count = reader.ReadUInt32(); + while (count-- > 0) + { + var adapter = new Adapter(); + adapter.Read(reader); + adapters.Add(adapter.Index, adapter); } - private Dictionary ReadDatabases(OsiReader reader) - { - var databases = new Dictionary(); - var count = reader.ReadUInt32(); - while (count-- > 0) - { - var database = new Database(); - database.Read(reader); - databases.Add(database.Index, database); - } + return adapters; + } - return databases; + private Dictionary ReadDatabases(OsiReader reader) + { + var databases = new Dictionary(); + var count = reader.ReadUInt32(); + while (count-- > 0) + { + var database = new Database(); + database.Read(reader); + databases.Add(database.Index, database); } - private Dictionary ReadGoals(OsiReader reader, Story story) - { - var goals = new Dictionary(); - var count = reader.ReadUInt32(); - while (count-- > 0) - { - var goal = new Goal(story); - goal.Read(reader); - goals.Add(goal.Index, goal); - } + return databases; + } - return goals; + private Dictionary ReadGoals(OsiReader reader, Story story) + { + var goals = new Dictionary(); + var count = reader.ReadUInt32(); + while (count-- > 0) + { + var goal = new Goal(story); + goal.Read(reader); + goals.Add(goal.Index, goal); } - private Dictionary ReadTypes(OsiReader reader, Story story) + return goals; + } + + private Dictionary ReadTypes(OsiReader reader, Story story) + { + if (reader.Ver < OsiVersion.VerAddTypeMap) { - if (reader.Ver < OsiVersion.VerAddTypeMap) - { - return new Dictionary(); - } + return new Dictionary(); + } - var types = ReadTypes(reader); + var types = ReadTypes(reader); - // Find outermost types - foreach (var type in types) + // Find outermost types + foreach (var type in types) + { + if (type.Value.Alias != 0) { - if (type.Value.Alias != 0) - { - var aliasId = type.Value.Alias; - - while (aliasId != 0 && types.ContainsKey(aliasId) && types[aliasId].Alias != 0) - { - aliasId = types[aliasId].Alias; - } + var aliasId = type.Value.Alias; - reader.TypeAliases.Add(type.Key, aliasId); + while (aliasId != 0 && types.ContainsKey(aliasId) && types[aliasId].Alias != 0) + { + aliasId = types[aliasId].Alias; } - } - return types; + reader.TypeAliases.Add(type.Key, aliasId); + } } - public Story Read(Stream stream) + return types; + } + + public Story Read(Stream stream) + { + var story = new Story(); + using (var reader = new OsiReader(stream, story)) { - var story = new Story(); - using (var reader = new OsiReader(stream, story)) + var header = new SaveFileHeader(); + header.Read(reader); + reader.MinorVersion = header.MinorVersion; + reader.MajorVersion = header.MajorVersion; + story.MinorVersion = header.MinorVersion; + story.MajorVersion = header.MajorVersion; + + if (reader.Ver > OsiVersion.VerLastSupported) { - var header = new SaveFileHeader(); - header.Read(reader); - reader.MinorVersion = header.MinorVersion; - reader.MajorVersion = header.MajorVersion; - story.MinorVersion = header.MinorVersion; - story.MajorVersion = header.MajorVersion; - - if (reader.Ver > OsiVersion.VerLastSupported) - { - var msg = String.Format( - "Osiris version v{0}.{1} unsupported; this tool supports loading up to version 1.12.", - reader.MajorVersion, reader.MinorVersion - ); - throw new InvalidDataException(msg); - } + var msg = String.Format( + "Osiris version v{0}.{1} unsupported; this tool supports loading up to version 1.12.", + reader.MajorVersion, reader.MinorVersion + ); + throw new InvalidDataException(msg); + } - if (reader.Ver < OsiVersion.VerRemoveExternalStringTable) - { - reader.ShortTypeIds = false; - } - else if (reader.Ver >= OsiVersion.VerEnums) - { - reader.ShortTypeIds = true; - } + if (reader.Ver < OsiVersion.VerRemoveExternalStringTable) + { + reader.ShortTypeIds = false; + } + else if (reader.Ver >= OsiVersion.VerEnums) + { + reader.ShortTypeIds = true; + } - if (reader.Ver >= OsiVersion.VerScramble) - reader.Scramble = 0xAD; + if (reader.Ver >= OsiVersion.VerScramble) + reader.Scramble = 0xAD; - story.Types = ReadTypes(reader, story); + story.Types = ReadTypes(reader, story); - if (reader.Ver >= OsiVersion.VerExternalStringTable && reader.Ver < OsiVersion.VerRemoveExternalStringTable) - story.ExternalStringTable = ReadStrings(reader); - else - story.ExternalStringTable = new List(); + if (reader.Ver >= OsiVersion.VerExternalStringTable && reader.Ver < OsiVersion.VerRemoveExternalStringTable) + story.ExternalStringTable = ReadStrings(reader); + else + story.ExternalStringTable = new List(); - story.Types[0] = OsirisType.MakeBuiltin(0, "UNKNOWN"); - story.Types[1] = OsirisType.MakeBuiltin(1, "INTEGER"); + story.Types[0] = OsirisType.MakeBuiltin(0, "UNKNOWN"); + story.Types[1] = OsirisType.MakeBuiltin(1, "INTEGER"); - if (reader.Ver >= OsiVersion.VerEnhancedTypes) + if (reader.Ver >= OsiVersion.VerEnhancedTypes) + { + story.Types[2] = OsirisType.MakeBuiltin(2, "INTEGER64"); + story.Types[3] = OsirisType.MakeBuiltin(3, "REAL"); + story.Types[4] = OsirisType.MakeBuiltin(4, "STRING"); + // BG3 defines GUIDSTRING in the .osi file + if (!story.Types.ContainsKey(5)) { - story.Types[2] = OsirisType.MakeBuiltin(2, "INTEGER64"); - story.Types[3] = OsirisType.MakeBuiltin(3, "REAL"); - story.Types[4] = OsirisType.MakeBuiltin(4, "STRING"); - // BG3 defines GUIDSTRING in the .osi file - if (!story.Types.ContainsKey(5)) - { - story.Types[5] = OsirisType.MakeBuiltin(5, "GUIDSTRING"); - } + story.Types[5] = OsirisType.MakeBuiltin(5, "GUIDSTRING"); } - else - { - story.Types[2] = OsirisType.MakeBuiltin(2, "FLOAT"); - story.Types[3] = OsirisType.MakeBuiltin(3, "STRING"); + } + else + { + story.Types[2] = OsirisType.MakeBuiltin(2, "FLOAT"); + story.Types[3] = OsirisType.MakeBuiltin(3, "STRING"); - // Populate custom type IDs for versions that had no type alias map - if (reader.Ver < OsiVersion.VerAddTypeMap) + // Populate custom type IDs for versions that had no type alias map + if (reader.Ver < OsiVersion.VerAddTypeMap) + { + for (byte typeId = 4; typeId <= 17; typeId++) { - for (byte typeId = 4; typeId <= 17; typeId++) - { - story.Types[typeId] = OsirisType.MakeBuiltin(typeId, $"TYPE{typeId}"); - story.Types[typeId].Alias = 3; - reader.TypeAliases.Add(typeId, 3); - } + story.Types[typeId] = OsirisType.MakeBuiltin(typeId, $"TYPE{typeId}"); + story.Types[typeId].Alias = 3; + reader.TypeAliases.Add(typeId, 3); } } + } - if (reader.Ver >= OsiVersion.VerEnums) - { - story.Enums = ReadEnums(reader); - } - else - { - story.Enums = new Dictionary(); - } - - story.DivObjects = reader.ReadList(); - story.Functions = reader.ReadList(); - story.Nodes = ReadNodes(reader); - story.Adapters = ReadAdapters(reader); - story.Databases = ReadDatabases(reader); - story.Goals = ReadGoals(reader, story); - story.GlobalActions = reader.ReadList(); - story.ShortTypeIds = (bool)reader.ShortTypeIds; - - story.FunctionSignatureMap = new Dictionary(); - foreach (var func in story.Functions) - { - story.FunctionSignatureMap.Add(func.Name.Name + "/" + func.Name.Parameters.Types.Count.ToString(), func); - } + if (reader.Ver >= OsiVersion.VerEnums) + { + story.Enums = ReadEnums(reader); + } + else + { + story.Enums = new Dictionary(); + } - foreach (var node in story.Nodes) - { - node.Value.PostLoad(story); - } + story.DivObjects = reader.ReadList(); + story.Functions = reader.ReadList(); + story.Nodes = ReadNodes(reader); + story.Adapters = ReadAdapters(reader); + story.Databases = ReadDatabases(reader); + story.Goals = ReadGoals(reader, story); + story.GlobalActions = reader.ReadList(); + story.ShortTypeIds = (bool)reader.ShortTypeIds; + + story.FunctionSignatureMap = new Dictionary(); + foreach (var func in story.Functions) + { + story.FunctionSignatureMap.Add(func.Name.Name + "/" + func.Name.Parameters.Types.Count.ToString(), func); + } - return story; + foreach (var node in story.Nodes) + { + node.Value.PostLoad(story); } + + return story; } } +} - public class StoryWriter - { - private OsiWriter Writer; +public class StoryWriter +{ + private OsiWriter Writer; - public StoryWriter() - { + public StoryWriter() + { - } + } - private void WriteStrings(List stringTable) + private void WriteStrings(List stringTable) + { + Writer.Write((UInt32)stringTable.Count); + foreach (var s in stringTable) { - Writer.Write((UInt32)stringTable.Count); - foreach (var s in stringTable) - { - Writer.Write(s); - } + Writer.Write(s); } + } - private void WriteTypes(IList types, Story story) + private void WriteTypes(IList types, Story story) + { + Writer.Write((UInt32)types.Count); + foreach (var type in types) { - Writer.Write((UInt32)types.Count); - foreach (var type in types) + type.Write(Writer); + if (type.Alias != 0) { - type.Write(Writer); - if (type.Alias != 0) - { - Writer.TypeAliases.Add(type.Index, story.FindBuiltinTypeId(type.Index)); - } + Writer.TypeAliases.Add(type.Index, story.FindBuiltinTypeId(type.Index)); } } + } - private void WriteNodes(Dictionary nodes) + private void WriteNodes(Dictionary nodes) + { + Writer.Write((UInt32)nodes.Count); + foreach (var node in nodes) { - Writer.Write((UInt32)nodes.Count); - foreach (var node in nodes) - { - Writer.Write((byte)node.Value.NodeType()); - Writer.Write(node.Key); - node.Value.Write(Writer); - } + Writer.Write((byte)node.Value.NodeType()); + Writer.Write(node.Key); + node.Value.Write(Writer); } + } - private void WriteAdapters(Dictionary adapters) + private void WriteAdapters(Dictionary adapters) + { + Writer.Write((UInt32)adapters.Count); + foreach (var adapter in adapters) { - Writer.Write((UInt32)adapters.Count); - foreach (var adapter in adapters) - { - Writer.Write(adapter.Key); - adapter.Value.Write(Writer); - } + Writer.Write(adapter.Key); + adapter.Value.Write(Writer); } + } - private void WriteDatabases(Dictionary databases) + private void WriteDatabases(Dictionary databases) + { + Writer.Write((UInt32)databases.Count); + foreach (var database in databases) { - Writer.Write((UInt32)databases.Count); - foreach (var database in databases) - { - Writer.Write(database.Key); - database.Value.Write(Writer); - } + Writer.Write(database.Key); + database.Value.Write(Writer); } + } - private void WriteGoals(Dictionary goals) + private void WriteGoals(Dictionary goals) + { + Writer.Write((UInt32)goals.Count); + foreach (var goal in goals) { - Writer.Write((UInt32)goals.Count); - foreach (var goal in goals) - { - goal.Value.Write(Writer); - } + goal.Value.Write(Writer); } + } - public void Write(Stream stream, Story story, bool leaveOpen) + public void Write(Stream stream, Story story, bool leaveOpen) + { + using (Writer = new OsiWriter(stream, leaveOpen)) { - using (Writer = new OsiWriter(stream, leaveOpen)) + foreach (var node in story.Nodes) { - foreach (var node in story.Nodes) - { - node.Value.PreSave(story); - } + node.Value.PreSave(story); + } - Writer.MajorVersion = story.MajorVersion; - Writer.MinorVersion = story.MinorVersion; - Writer.ShortTypeIds = story.ShortTypeIds; - Writer.Enums = story.Enums; + Writer.MajorVersion = story.MajorVersion; + Writer.MinorVersion = story.MinorVersion; + Writer.ShortTypeIds = story.ShortTypeIds; + Writer.Enums = story.Enums; - var header = new SaveFileHeader(); - if (Writer.Ver >= OsiVersion.VerExternalStringTable) + var header = new SaveFileHeader(); + if (Writer.Ver >= OsiVersion.VerExternalStringTable) + { + if (Writer.ShortTypeIds) { - if (Writer.ShortTypeIds) - { - header.Version = "Osiris save file dd. 07/09/22 00:20:54. Version 1.8."; - } - else - { - header.Version = "Osiris save file dd. 03/30/17 07:28:20. Version 1.8."; - } + header.Version = "Osiris save file dd. 07/09/22 00:20:54. Version 1.8."; } else { - header.Version = "Osiris save file dd. 02/10/15 12:44:13. Version 1.5."; - } - header.MajorVersion = story.MajorVersion; - header.MinorVersion = story.MinorVersion; - header.BigEndian = false; - header.Unused = 0; - // Debug flags used in D:OS EE and D:OS 2 - header.DebugFlags = 0x000C10A0; - header.Write(Writer); - - if (Writer.Ver > OsiVersion.VerLastSupported) - { - var msg = String.Format( - "Osiris version v{0}.{1} unsupported; this tool supports saving up to version 1.11.", - Writer.MajorVersion, Writer.MinorVersion - ); - throw new InvalidDataException(msg); + header.Version = "Osiris save file dd. 03/30/17 07:28:20. Version 1.8."; } + } + else + { + header.Version = "Osiris save file dd. 02/10/15 12:44:13. Version 1.5."; + } + header.MajorVersion = story.MajorVersion; + header.MinorVersion = story.MinorVersion; + header.BigEndian = false; + header.Unused = 0; + // Debug flags used in D:OS EE and D:OS 2 + header.DebugFlags = 0x000C10A0; + header.Write(Writer); + + if (Writer.Ver > OsiVersion.VerLastSupported) + { + var msg = String.Format( + "Osiris version v{0}.{1} unsupported; this tool supports saving up to version 1.11.", + Writer.MajorVersion, Writer.MinorVersion + ); + throw new InvalidDataException(msg); + } - if (Writer.Ver >= OsiVersion.VerScramble) - Writer.Scramble = 0xAD; + if (Writer.Ver >= OsiVersion.VerScramble) + Writer.Scramble = 0xAD; - if (Writer.Ver >= OsiVersion.VerAddTypeMap) + if (Writer.Ver >= OsiVersion.VerAddTypeMap) + { + List types; + if (Writer.Ver >= OsiVersion.VerEnums) { - List types; - if (Writer.Ver >= OsiVersion.VerEnums) - { - // BG3 Patch 9 writes all types to the blob except type 0 - types = story.Types.Values.Where(t => t.Name != "UNKNOWN").ToList(); - } - else - { - // Don't export builtin types, only externally declared ones - types = story.Types.Values.Where(t => !t.IsBuiltin).ToList(); - } - - WriteTypes(types, story); + // BG3 Patch 9 writes all types to the blob except type 0 + types = story.Types.Values.Where(t => t.Name != "UNKNOWN").ToList(); } - - if (Writer.Ver >= OsiVersion.VerEnums) + else { - Writer.WriteList(story.Enums.Values.ToList()); + // Don't export builtin types, only externally declared ones + types = story.Types.Values.Where(t => !t.IsBuiltin).ToList(); } - // TODO: regenerate string table? - if (Writer.Ver >= OsiVersion.VerExternalStringTable && Writer.Ver < OsiVersion.VerRemoveExternalStringTable) - WriteStrings(story.ExternalStringTable); + WriteTypes(types, story); + } - Writer.WriteList(story.DivObjects); - Writer.WriteList(story.Functions); - WriteNodes(story.Nodes); - WriteAdapters(story.Adapters); - WriteDatabases(story.Databases); - WriteGoals(story.Goals); - Writer.WriteList(story.GlobalActions); + if (Writer.Ver >= OsiVersion.VerEnums) + { + Writer.WriteList(story.Enums.Values.ToList()); + } - foreach (var node in story.Nodes) - { - node.Value.PostSave(story); - } + // TODO: regenerate string table? + if (Writer.Ver >= OsiVersion.VerExternalStringTable && Writer.Ver < OsiVersion.VerRemoveExternalStringTable) + WriteStrings(story.ExternalStringTable); + + Writer.WriteList(story.DivObjects); + Writer.WriteList(story.Functions); + WriteNodes(story.Nodes); + WriteAdapters(story.Adapters); + WriteDatabases(story.Databases); + WriteGoals(story.Goals); + Writer.WriteList(story.GlobalActions); + + foreach (var node in story.Nodes) + { + node.Value.PostSave(story); } } } diff --git a/LSLib/LS/Story/Value.cs b/LSLib/LS/Story/Value.cs index 449995b8..a854d77f 100644 --- a/LSLib/LS/Story/Value.cs +++ b/LSLib/LS/Story/Value.cs @@ -3,250 +3,154 @@ using System.IO; using System.Linq; -namespace LSLib.LS.Story +namespace LSLib.LS.Story; + +public class Value : OsirisSerializable { - public class Value : OsirisSerializable + // Original Sin 2 (v1.11) Type ID-s + public enum Type : uint { - // Original Sin 2 (v1.11) Type ID-s - public enum Type : uint - { - None = 0, - Integer = 1, - Integer64 = 2, - Float = 3, - String = 4, - GuidString = 5 - } + None = 0, + Integer = 1, + Integer64 = 2, + Float = 3, + String = 4, + GuidString = 5 + } - // Original Sin 1 (v1.0 - v1.7) Type ID-s - public enum Type_OS1 : uint - { - None = 0, - Integer = 1, - Float = 2, - String = 3 - } + // Original Sin 1 (v1.0 - v1.7) Type ID-s + public enum Type_OS1 : uint + { + None = 0, + Integer = 1, + Float = 2, + String = 3 + } - public UInt32 TypeId; - public Int32 IntValue; - public Int64 Int64Value; - public Single FloatValue; - public String StringValue; + public UInt32 TypeId; + public Int32 IntValue; + public Int64 Int64Value; + public Single FloatValue; + public String StringValue; - public override string ToString() + public override string ToString() + { + switch ((Type)TypeId) { - switch ((Type)TypeId) - { - case Type.None: return ""; - case Type.Integer: return IntValue.ToString(); - case Type.Integer64: return Int64Value.ToString(); - case Type.Float: return FloatValue.ToString(); - case Type.String: return StringValue; - case Type.GuidString: return StringValue; - default: return StringValue; - } + case Type.None: return ""; + case Type.Integer: return IntValue.ToString(); + case Type.Integer64: return Int64Value.ToString(); + case Type.Float: return FloatValue.ToString(); + case Type.String: return StringValue; + case Type.GuidString: return StringValue; + default: return StringValue; } + } - public static uint ConvertOS1ToOS2Type(uint os1TypeId) + public static uint ConvertOS1ToOS2Type(uint os1TypeId) + { + // Convert D:OS 1 type ID to D:OS 2 type ID + switch ((Type_OS1)os1TypeId) { - // Convert D:OS 1 type ID to D:OS 2 type ID - switch ((Type_OS1)os1TypeId) - { - case Type_OS1.None: - return (uint)Type.None; + case Type_OS1.None: + return (uint)Type.None; - case Type_OS1.Integer: - return (uint)Type.Integer; + case Type_OS1.Integer: + return (uint)Type.Integer; - case Type_OS1.Float: - return (uint)Type.Float; + case Type_OS1.Float: + return (uint)Type.Float; - case Type_OS1.String: - return (uint)Type.String; + case Type_OS1.String: + return (uint)Type.String; - default: - return os1TypeId; - } + default: + return os1TypeId; } + } - public static uint ConvertOS2ToOS1Type(uint os2TypeId) + public static uint ConvertOS2ToOS1Type(uint os2TypeId) + { + // Convert D:OS 2 type ID to D:OS 1 type ID + switch ((Type)os2TypeId) { - // Convert D:OS 2 type ID to D:OS 1 type ID - switch ((Type)os2TypeId) - { - case Type.None: - return (uint)Type_OS1.None; + case Type.None: + return (uint)Type_OS1.None; - case Type.Integer: - case Type.Integer64: - return (uint)Type_OS1.Integer; + case Type.Integer: + case Type.Integer64: + return (uint)Type_OS1.Integer; - case Type.Float: - return (uint)Type_OS1.Float; + case Type.Float: + return (uint)Type_OS1.Float; - case Type.String: - case Type.GuidString: - return (uint)Type_OS1.String; + case Type.String: + case Type.GuidString: + return (uint)Type_OS1.String; - default: - return os2TypeId; - } + default: + return os2TypeId; } + } - public Type GetBuiltinTypeId(Story story) + public Type GetBuiltinTypeId(Story story) + { + var aliasId = story.FindBuiltinTypeId(TypeId); + + if (story.Version < OsiVersion.VerEnhancedTypes) + { + return (Type)ConvertOS1ToOS2Type(aliasId); + } + else { - var aliasId = story.FindBuiltinTypeId(TypeId); + return (Type)aliasId; + } + } - if (story.Version < OsiVersion.VerEnhancedTypes) + public virtual void Read(OsiReader reader) + { + // possibly isReference? + var wtf = reader.ReadByte(); + if (wtf == '1') + { + if (reader.ShortTypeIds == true) { - return (Type)ConvertOS1ToOS2Type(aliasId); + TypeId = reader.ReadUInt16(); } else { - return (Type)aliasId; + TypeId = reader.ReadUInt32(); } - } - public virtual void Read(OsiReader reader) + IntValue = reader.ReadInt32(); + } + else if (wtf == '0') { - // possibly isReference? - var wtf = reader.ReadByte(); - if (wtf == '1') - { - if (reader.ShortTypeIds == true) - { - TypeId = reader.ReadUInt16(); - } - else - { - TypeId = reader.ReadUInt32(); - } - - IntValue = reader.ReadInt32(); - } - else if (wtf == '0') - { - if (reader.ShortTypeIds == true) - { - TypeId = reader.ReadUInt16(); - } - else - { - TypeId = reader.ReadUInt32(); - } - - uint writtenTypeId = TypeId; - - uint alias; - bool dos1alias = false; - if (reader.TypeAliases.TryGetValue(writtenTypeId, out alias)) - { - writtenTypeId = alias; - if (reader.Ver < OsiVersion.VerEnhancedTypes) - { - dos1alias = true; - } - } - - if (reader.Ver < OsiVersion.VerEnhancedTypes) - { - // Convert D:OS 1 type ID to D:OS 2 type ID - writtenTypeId = ConvertOS1ToOS2Type(writtenTypeId); - } - - switch ((Type)writtenTypeId) - { - case Type.None: - break; - - case Type.Integer: - IntValue = reader.ReadInt32(); - break; - - case Type.Integer64: - Int64Value = reader.ReadInt64(); - break; - - case Type.Float: - FloatValue = reader.ReadSingle(); - break; - - case Type.GuidString: - case Type.String: - // D:OS 1 aliased strings didn't have a flag byte - if (dos1alias) - { - StringValue = reader.ReadString(); - } - else if (reader.ReadByte() > 0) - { - StringValue = reader.ReadString(); - } - break; - - default: - StringValue = reader.ReadString(); - break; - } - } - else if (wtf == 'e') + if (reader.ShortTypeIds == true) { TypeId = reader.ReadUInt16(); - - OsirisEnum e; - if (!reader.Story.Enums.TryGetValue(TypeId, out e)) - { - throw new InvalidDataException($"Enum label serialized for a non-enum type: {TypeId}"); - } - - StringValue = reader.ReadString(); - var ele = e.Elements.Find(v => v.Name == StringValue); - if (ele == null) - { - throw new InvalidDataException($"Enumeration {TypeId} has no label named '{StringValue}'"); - } } else { - throw new InvalidDataException("Unrecognized value type"); + TypeId = reader.ReadUInt32(); } - } - - public virtual void Write(OsiWriter writer) - { - if (writer.Enums.ContainsKey(TypeId)) - { - writer.Write((byte)'e'); - writer.Write((UInt16)TypeId); - writer.Write(StringValue); - return; - } - - // TODO: Is the == 0x31 case ever used when reading? - writer.Write((byte)'0'); uint writtenTypeId = TypeId; - bool aliased = false; + uint alias; - if (writer.TypeAliases.TryGetValue(TypeId, out alias)) + bool dos1alias = false; + if (reader.TypeAliases.TryGetValue(writtenTypeId, out alias)) { - aliased = true; writtenTypeId = alias; + if (reader.Ver < OsiVersion.VerEnhancedTypes) + { + dos1alias = true; + } } - if (writer.ShortTypeIds) - { - writer.Write((UInt16)TypeId); - } - else - { - writer.Write(TypeId); - } - - if (writer.Ver < OsiVersion.VerEnhancedTypes) + if (reader.Ver < OsiVersion.VerEnhancedTypes) { - // Make sure that we're serializing using the D:OS2 type ID - // (The alias map contains the D:OS 1 ID) + // Convert D:OS 1 type ID to D:OS 2 type ID writtenTypeId = ConvertOS1ToOS2Type(writtenTypeId); } @@ -256,272 +160,367 @@ public virtual void Write(OsiWriter writer) break; case Type.Integer: - writer.Write(IntValue); + IntValue = reader.ReadInt32(); break; case Type.Integer64: - // D:OS 1 aliased strings didn't have a flag byte - if (writer.Ver >= OsiVersion.VerEnhancedTypes) - { - writer.Write(Int64Value); - } - else - { - writer.Write((int)Int64Value); - } - + Int64Value = reader.ReadInt64(); break; case Type.Float: - writer.Write(FloatValue); + FloatValue = reader.ReadSingle(); break; - case Type.String: case Type.GuidString: - if (!aliased || (writer.Ver >= OsiVersion.VerEnhancedTypes)) + case Type.String: + // D:OS 1 aliased strings didn't have a flag byte + if (dos1alias) { - writer.Write(StringValue != null); + StringValue = reader.ReadString(); + } + else if (reader.ReadByte() > 0) + { + StringValue = reader.ReadString(); } - - if (StringValue != null) - writer.Write(StringValue); break; default: - writer.Write(StringValue); + StringValue = reader.ReadString(); break; } } - - public virtual void DebugDump(TextWriter writer, Story story) + else if (wtf == 'e') { - var builtinTypeId = GetBuiltinTypeId(story); + TypeId = reader.ReadUInt16(); - switch (builtinTypeId) + OsirisEnum e; + if (!reader.Story.Enums.TryGetValue(TypeId, out e)) { - case Type.None: - writer.Write(""); - break; + throw new InvalidDataException($"Enum label serialized for a non-enum type: {TypeId}"); + } - case Type.Integer: - writer.Write(IntValue); - break; + StringValue = reader.ReadString(); + var ele = e.Elements.Find(v => v.Name == StringValue); + if (ele == null) + { + throw new InvalidDataException($"Enumeration {TypeId} has no label named '{StringValue}'"); + } + } + else + { + throw new InvalidDataException("Unrecognized value type"); + } + } - case Type.Integer64: - writer.Write(Int64Value); - break; + public virtual void Write(OsiWriter writer) + { + if (writer.Enums.ContainsKey(TypeId)) + { + writer.Write((byte)'e'); + writer.Write((UInt16)TypeId); + writer.Write(StringValue); + return; + } - case Type.Float: - writer.Write(FloatValue); - break; + // TODO: Is the == 0x31 case ever used when reading? + writer.Write((byte)'0'); - case Type.String: - writer.Write("'{0}'", StringValue); - break; + uint writtenTypeId = TypeId; + bool aliased = false; + uint alias; + if (writer.TypeAliases.TryGetValue(TypeId, out alias)) + { + aliased = true; + writtenTypeId = alias; + } - case Type.GuidString: - writer.Write(StringValue); - break; + if (writer.ShortTypeIds) + { + writer.Write((UInt16)TypeId); + } + else + { + writer.Write(TypeId); + } - default: - throw new Exception("Unsupported builtin type ID"); - } + if (writer.Ver < OsiVersion.VerEnhancedTypes) + { + // Make sure that we're serializing using the D:OS2 type ID + // (The alias map contains the D:OS 1 ID) + writtenTypeId = ConvertOS1ToOS2Type(writtenTypeId); } - public virtual void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes = false) + switch ((Type)writtenTypeId) { - var builtinTypeId = GetBuiltinTypeId(story); + case Type.None: + break; - switch (builtinTypeId) - { - case Type.None: - throw new InvalidDataException("Script cannot contain unknown values"); + case Type.Integer: + writer.Write(IntValue); + break; - case Type.Integer: - writer.Write(IntValue); - break; + case Type.Integer64: + // D:OS 1 aliased strings didn't have a flag byte + if (writer.Ver >= OsiVersion.VerEnhancedTypes) + { + writer.Write(Int64Value); + } + else + { + writer.Write((int)Int64Value); + } - case Type.Integer64: - writer.Write(IntValue); - break; + break; - case Type.Float: - writer.Write((decimal)FloatValue); - break; + case Type.Float: + writer.Write(FloatValue); + break; - case Type.String: - writer.Write("\"{0}\"", StringValue); - break; + case Type.String: + case Type.GuidString: + if (!aliased || (writer.Ver >= OsiVersion.VerEnhancedTypes)) + { + writer.Write(StringValue != null); + } - case Type.GuidString: + if (StringValue != null) writer.Write(StringValue); - break; + break; - default: - throw new Exception("Unsupported builtin type ID"); - } + default: + writer.Write(StringValue); + break; } } - public class TypedValue : Value + public virtual void DebugDump(TextWriter writer, Story story) { - public bool IsValid; - public bool OutParam; - public bool IsAType; + var builtinTypeId = GetBuiltinTypeId(story); - public override void Read(OsiReader reader) + switch (builtinTypeId) { - base.Read(reader); - IsValid = reader.ReadBoolean(); - OutParam = reader.ReadBoolean(); - IsAType = reader.ReadBoolean(); - } + case Type.None: + writer.Write(""); + break; - public override void Write(OsiWriter writer) - { - base.Write(writer); - writer.Write(IsValid); - writer.Write(OutParam); - writer.Write(IsAType); + case Type.Integer: + writer.Write(IntValue); + break; + + case Type.Integer64: + writer.Write(Int64Value); + break; + + case Type.Float: + writer.Write(FloatValue); + break; + + case Type.String: + writer.Write("'{0}'", StringValue); + break; + + case Type.GuidString: + writer.Write(StringValue); + break; + + default: + throw new Exception("Unsupported builtin type ID"); } + } + + public virtual void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes = false) + { + var builtinTypeId = GetBuiltinTypeId(story); - public override void DebugDump(TextWriter writer, Story story) + switch (builtinTypeId) { - if (IsValid) writer.Write("valid "); - if (OutParam) writer.Write("out "); - if (IsAType) writer.Write("type "); + case Type.None: + throw new InvalidDataException("Script cannot contain unknown values"); - if (IsValid) - { - base.DebugDump(writer, story); - } - else - { - writer.Write("<{0}>", story.Types[TypeId].Name); - } + case Type.Integer: + writer.Write(IntValue); + break; + + case Type.Integer64: + writer.Write(IntValue); + break; + + case Type.Float: + writer.Write((decimal)FloatValue); + break; + + case Type.String: + writer.Write("\"{0}\"", StringValue); + break; + + case Type.GuidString: + writer.Write(StringValue); + break; + + default: + throw new Exception("Unsupported builtin type ID"); } } +} + +public class TypedValue : Value +{ + public bool IsValid; + public bool OutParam; + public bool IsAType; + + public override void Read(OsiReader reader) + { + base.Read(reader); + IsValid = reader.ReadBoolean(); + OutParam = reader.ReadBoolean(); + IsAType = reader.ReadBoolean(); + } + + public override void Write(OsiWriter writer) + { + base.Write(writer); + writer.Write(IsValid); + writer.Write(OutParam); + writer.Write(IsAType); + } - public class Variable : TypedValue + public override void DebugDump(TextWriter writer, Story story) { - public sbyte Index; - public bool Unused; - public bool Adapted; - public string VariableName; + if (IsValid) writer.Write("valid "); + if (OutParam) writer.Write("out "); + if (IsAType) writer.Write("type "); - public override void Read(OsiReader reader) + if (IsValid) { - base.Read(reader); - Index = reader.ReadSByte(); - Unused = reader.ReadBoolean(); - Adapted = reader.ReadBoolean(); + base.DebugDump(writer, story); } - - public override void Write(OsiWriter writer) + else { - base.Write(writer); - writer.Write(Index); - writer.Write(Unused); - writer.Write(Adapted); + writer.Write("<{0}>", story.Types[TypeId].Name); } + } +} + +public class Variable : TypedValue +{ + public sbyte Index; + public bool Unused; + public bool Adapted; + public string VariableName; - public override void DebugDump(TextWriter writer, Story story) + public override void Read(OsiReader reader) + { + base.Read(reader); + Index = reader.ReadSByte(); + Unused = reader.ReadBoolean(); + Adapted = reader.ReadBoolean(); + } + + public override void Write(OsiWriter writer) + { + base.Write(writer); + writer.Write(Index); + writer.Write(Unused); + writer.Write(Adapted); + } + + public override void DebugDump(TextWriter writer, Story story) + { + writer.Write("#{0} ", Index); + if (VariableName != null && VariableName.Length > 0) writer.Write("'{0}' ", VariableName); + if (Unused) writer.Write("unused "); + if (Adapted) writer.Write("adapted "); + base.DebugDump(writer, story); + } + + public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes = false) + { + if (Unused) { - writer.Write("#{0} ", Index); - if (VariableName != null && VariableName.Length > 0) writer.Write("'{0}' ", VariableName); - if (Unused) writer.Write("unused "); - if (Adapted) writer.Write("adapted "); - base.DebugDump(writer, story); - } + if (printTypes && TypeId > 0) + { + writer.Write("({0})", story.Types[TypeId].Name); + } - public override void MakeScript(TextWriter writer, Story story, Tuple tuple, bool printTypes = false) + writer.Write("_"); + } + else if (Adapted) { - if (Unused) + if (VariableName != null && VariableName.Length > 0) { if (printTypes && TypeId > 0) { writer.Write("({0})", story.Types[TypeId].Name); } - writer.Write("_"); - } - else if (Adapted) - { - if (VariableName != null && VariableName.Length > 0) - { - if (printTypes && TypeId > 0) - { - writer.Write("({0})", story.Types[TypeId].Name); - } - - writer.Write(VariableName); - } - else - { - tuple.Logical[Index].MakeScript(writer, story, null); - } + writer.Write(VariableName); } else { - base.MakeScript(writer, story, tuple); + tuple.Logical[Index].MakeScript(writer, story, null); } } + else + { + base.MakeScript(writer, story, tuple); + } } +} + +public class Tuple : OsirisSerializable +{ + public List Physical = new List(); + public Dictionary Logical = new Dictionary(); - public class Tuple : OsirisSerializable + public void Read(OsiReader reader) { - public List Physical = new List(); - public Dictionary Logical = new Dictionary(); + Physical.Clear(); + Logical.Clear(); - public void Read(OsiReader reader) + var count = reader.ReadByte(); + while (count-- > 0) { - Physical.Clear(); - Logical.Clear(); + var index = reader.ReadByte(); + var value = new Value(); + value.Read(reader); - var count = reader.ReadByte(); - while (count-- > 0) - { - var index = reader.ReadByte(); - var value = new Value(); - value.Read(reader); - - Physical.Add(value); - Logical.Add(index, value); - } + Physical.Add(value); + Logical.Add(index, value); } + } - public void Write(OsiWriter writer) + public void Write(OsiWriter writer) + { + writer.Write((byte)Logical.Count); + foreach (var logical in Logical) { - writer.Write((byte)Logical.Count); - foreach (var logical in Logical) - { - writer.Write((byte)logical.Key); - logical.Value.Write(writer); - } + writer.Write((byte)logical.Key); + logical.Value.Write(writer); } + } - public void DebugDump(TextWriter writer, Story story) + public void DebugDump(TextWriter writer, Story story) + { + writer.Write("("); + var keys = Logical.Keys.ToArray(); + for (var i = 0; i < Logical.Count; i++) { - writer.Write("("); - var keys = Logical.Keys.ToArray(); - for (var i = 0; i < Logical.Count; i++) - { - writer.Write("{0}: ", keys[i]); - Logical[keys[i]].DebugDump(writer, story); - if (i < Logical.Count - 1) writer.Write(", "); - } - writer.Write(")"); + writer.Write("{0}: ", keys[i]); + Logical[keys[i]].DebugDump(writer, story); + if (i < Logical.Count - 1) writer.Write(", "); } + writer.Write(")"); + } - public void MakeScript(TextWriter writer, Story story, bool printTypes = false) + public void MakeScript(TextWriter writer, Story story, bool printTypes = false) + { + for (var i = 0; i < Physical.Count; i++) { - for (var i = 0; i < Physical.Count; i++) - { - var value = Physical[i]; - value.MakeScript(writer, story, null, printTypes); - if (i < Physical.Count - 1) - writer.Write(", "); - } + var value = Physical[i]; + value.MakeScript(writer, story, null, printTypes); + if (i < Physical.Count - 1) + writer.Write(", "); } } } diff --git a/LSLib/VirtualTextures/BC5Image.cs b/LSLib/VirtualTextures/BC5Image.cs index 37b0aae1..96327a82 100644 --- a/LSLib/VirtualTextures/BC5Image.cs +++ b/LSLib/VirtualTextures/BC5Image.cs @@ -4,154 +4,153 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +public class BC5Image { - public class BC5Image + public byte[] Data; + public int Width; + public int Height; + + public BC5Image(byte[] data, int width, int height) { - public byte[] Data; - public int Width; - public int Height; + Data = data; + Width = width; + Height = height; + } - public BC5Image(byte[] data, int width, int height) + public BC5Image(int width, int height) + { + Data = new byte[width * height]; + Width = width; + Height = height; + } + + public int CalculateOffset(int x, int y) + { + if ((x % 4) != 0 || (y % 4) != 0) { - Data = data; - Width = width; - Height = height; + throw new ArgumentException("BC coordinates must be multiples if 4"); } - public BC5Image(int width, int height) + return ((x >> 2) + (y >> 2) * (Width >> 2)) << 4; + } + + public void CopyTo(BC5Image destination, int srcX, int srcY, int dstX, int dstY, int width, int height) + { + if ((srcX % 4) != 0 || (srcY % 4) != 0 || (dstX % 4) != 0 || (dstY % 4) != 0 || (width % 4) != 0 || (height % 4) != 0) { - Data = new byte[width * height]; - Width = width; - Height = height; + throw new ArgumentException("BC coordinates must be multiples of 4"); } - public int CalculateOffset(int x, int y) + if (srcX < 0 || dstX < 0 || srcY < 0 || dstY < 0 + || srcX + width > Width + || srcY + height > Height + || dstX + width > destination.Width + || dstY + height > destination.Height) { - if ((x % 4) != 0 || (y % 4) != 0) - { - throw new ArgumentException("BC coordinates must be multiples if 4"); - } - - return ((x >> 2) + (y >> 2) * (Width >> 2)) << 4; + throw new ArgumentException("Texture coordinates out of bounds"); } - public void CopyTo(BC5Image destination, int srcX, int srcY, int dstX, int dstY, int width, int height) + var wrX = dstX; + var wrY = dstY; + for (var y = srcY; y < srcY + height; y += 4) { - if ((srcX % 4) != 0 || (srcY % 4) != 0 || (dstX % 4) != 0 || (dstY % 4) != 0 || (width % 4) != 0 || (height % 4) != 0) - { - throw new ArgumentException("BC coordinates must be multiples of 4"); - } - - if (srcX < 0 || dstX < 0 || srcY < 0 || dstY < 0 - || srcX + width > Width - || srcY + height > Height - || dstX + width > destination.Width - || dstY + height > destination.Height) + for (var x = srcX; x < srcX + width; x += 4) { - throw new ArgumentException("Texture coordinates out of bounds"); + var srcoff = CalculateOffset(x, y); + var dstoff = destination.CalculateOffset(wrX, wrY); + Array.Copy(Data, srcoff, destination.Data, dstoff, 16); + wrX += 4; } - var wrX = dstX; - var wrY = dstY; - for (var y = srcY; y < srcY + height; y += 4) - { - for (var x = srcX; x < srcX + width; x += 4) - { - var srcoff = CalculateOffset(x, y); - var dstoff = destination.CalculateOffset(wrX, wrY); - Array.Copy(Data, srcoff, destination.Data, dstoff, 16); - wrX += 4; - } - - wrY += 4; - wrX = dstX; - } + wrY += 4; + wrX = dstX; } + } - public void SaveDDS(string path) + public void SaveDDS(string path) + { + var header = new DDSHeader { - var header = new DDSHeader - { - dwMagic = DDSHeader.DDSMagic, - dwSize = DDSHeader.HeaderSize, - dwFlags = 0x1007, - dwWidth = (uint)Width, - dwHeight = (uint)Height, - dwPitchOrLinearSize = (uint)(Width * Height), - dwDepth = 1, - dwMipMapCount = 1, - - dwPFSize = 32, - dwPFFlags = 0x04, - dwFourCC = DDSHeader.FourCC_DXT5, - - dwCaps = 0x1000 - }; - - using var pagef = new FileStream(path, FileMode.Create, FileAccess.Write); - using var bw = new BinaryWriter(pagef); - BinUtils.WriteStruct(bw, ref header); - bw.Write(Data, 0, Data.Length); - } + dwMagic = DDSHeader.DDSMagic, + dwSize = DDSHeader.HeaderSize, + dwFlags = 0x1007, + dwWidth = (uint)Width, + dwHeight = (uint)Height, + dwPitchOrLinearSize = (uint)(Width * Height), + dwDepth = 1, + dwMipMapCount = 1, + + dwPFSize = 32, + dwPFFlags = 0x04, + dwFourCC = DDSHeader.FourCC_DXT5, + + dwCaps = 0x1000 + }; + + using var pagef = new FileStream(path, FileMode.Create, FileAccess.Write); + using var bw = new BinaryWriter(pagef); + BinUtils.WriteStruct(bw, ref header); + bw.Write(Data, 0, Data.Length); } +} + +public class BC5Mips +{ + public List Mips; - public class BC5Mips + public void LoadDDS(string path) { - public List Mips; + using var f = new FileStream(path, FileMode.Open, FileAccess.Read); + using var reader = new BinaryReader(f); + var header = BinUtils.ReadStruct(reader); + Mips = []; - public void LoadDDS(string path) + if (header.dwMagic != DDSHeader.DDSMagic) { - using var f = new FileStream(path, FileMode.Open, FileAccess.Read); - using var reader = new BinaryReader(f); - var header = BinUtils.ReadStruct(reader); - Mips = []; - - if (header.dwMagic != DDSHeader.DDSMagic) - { - throw new InvalidDataException($"{path}: Incorrect DDS signature, or file is not a DDS file"); - } + throw new InvalidDataException($"{path}: Incorrect DDS signature, or file is not a DDS file"); + } - if (header.dwSize != DDSHeader.HeaderSize) - { - throw new InvalidDataException($"{path}: Incorrect DDS header size"); - } + if (header.dwSize != DDSHeader.HeaderSize) + { + throw new InvalidDataException($"{path}: Incorrect DDS header size"); + } - if ((header.dwFlags & 0xffff) != 0x1007) - { - throw new InvalidDataException($"{path}: Incorrect DDS texture flags"); - } + if ((header.dwFlags & 0xffff) != 0x1007) + { + throw new InvalidDataException($"{path}: Incorrect DDS texture flags"); + } - if (header.dwDepth != 0 && header.dwDepth != 1) - { - throw new InvalidDataException($"{path}: Only single-layer textures are supported"); - } + if (header.dwDepth != 0 && header.dwDepth != 1) + { + throw new InvalidDataException($"{path}: Only single-layer textures are supported"); + } - if ((header.dwPFFlags & 4) != 4) - { - throw new InvalidDataException($"{path}: DDS does not have a valid FourCC code"); - } + if ((header.dwPFFlags & 4) != 4) + { + throw new InvalidDataException($"{path}: DDS does not have a valid FourCC code"); + } - if (header.FourCCName != "DXT5") - { - throw new InvalidDataException($"{path}: Expected a DXT5 encoded texture, got: " + header.FourCCName); - } + if (header.FourCCName != "DXT5") + { + throw new InvalidDataException($"{path}: Expected a DXT5 encoded texture, got: " + header.FourCCName); + } - Int32 mips = 1; - if ((header.dwFlags & 0x20000) == 0x20000) - { - mips = (Int32)header.dwMipMapCount; - } + Int32 mips = 1; + if ((header.dwFlags & 0x20000) == 0x20000) + { + mips = (Int32)header.dwMipMapCount; + } - Mips = new List(mips); - for (var i = 0; i < mips; i++) - { - var width = Math.Max((int)header.dwWidth >> i, 1); - var height = Math.Max((int)header.dwHeight >> i, 1); - var bytes = Math.Max(width / 4, 1) * Math.Max(height / 4, 1) * 16; - var blob = reader.ReadBytes(bytes); - Mips.Add(new BC5Image(blob, width, height)); - } + Mips = new List(mips); + for (var i = 0; i < mips; i++) + { + var width = Math.Max((int)header.dwWidth >> i, 1); + var height = Math.Max((int)header.dwHeight >> i, 1); + var bytes = Math.Max(width / 4, 1) * Math.Max(height / 4, 1) * 16; + var blob = reader.ReadBytes(bytes); + Mips.Add(new BC5Image(blob, width, height)); } } } diff --git a/LSLib/VirtualTextures/Build.cs b/LSLib/VirtualTextures/Build.cs index 0e5c5627..b202e43a 100644 --- a/LSLib/VirtualTextures/Build.cs +++ b/LSLib/VirtualTextures/Build.cs @@ -5,1128 +5,1127 @@ using System.Runtime.InteropServices; using System.Xml; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +public class TextureDescriptor +{ + public string Name; + public List Layers; +} + +public class TileSetDescriptor { - public class TextureDescriptor + public string Name; + public List Textures = []; + public TileSetConfiguration Config = new(); + public string RootPath; + public string SourceTexturePath; + public string VirtualTexturePath; + + public void Load(string path) { - public string Name; - public List Layers; + using var f = new FileStream(path, FileMode.Open, FileAccess.Read); + var doc = new XmlDocument(); + doc.Load(f); + Load(doc); } - public class TileSetDescriptor + public void Load(XmlDocument doc) { - public string Name; - public List Textures = []; - public TileSetConfiguration Config = new(); - public string RootPath; - public string SourceTexturePath; - public string VirtualTexturePath; - - public void Load(string path) + var version = doc.DocumentElement.GetAttribute("Version"); + if (version == null || !Int32.TryParse(version, out int versionNum) || versionNum != 2) { - using var f = new FileStream(path, FileMode.Open, FileAccess.Read); - var doc = new XmlDocument(); - doc.Load(f); - Load(doc); + throw new InvalidDataException("Expected TileSet XML descriptor version 2"); } - public void Load(XmlDocument doc) + Name = doc.DocumentElement.GetAttribute("Name"); + Config.GTSName = Name; + Config.Layers = []; + + var tileSetConfig = doc.DocumentElement.GetElementsByTagName("TileSetConfig"); + foreach (var node in (tileSetConfig[0] as XmlElement).ChildNodes) { - var version = doc.DocumentElement.GetAttribute("Version"); - if (version == null || !Int32.TryParse(version, out int versionNum) || versionNum != 2) + if (node is XmlElement) { - throw new InvalidDataException("Expected TileSet XML descriptor version 2"); - } + var key = (node as XmlElement).Name; + var value = (node as XmlElement).InnerText; - Name = doc.DocumentElement.GetAttribute("Name"); - Config.GTSName = Name; - Config.Layers = []; - - var tileSetConfig = doc.DocumentElement.GetElementsByTagName("TileSetConfig"); - foreach (var node in (tileSetConfig[0] as XmlElement).ChildNodes) - { - if (node is XmlElement) + switch (key) { - var key = (node as XmlElement).Name; - var value = (node as XmlElement).InnerText; - - switch (key) - { - case "TileWidth": Config.TileWidth = Int32.Parse(value); break; - case "TileHeight": Config.TileHeight = Int32.Parse(value); break; - case "TileBorder": Config.TileBorder = Int32.Parse(value); break; - case "Compression": Config.Compression = (TileCompressionPreference)Enum.Parse(typeof(TileCompressionPreference), value); break; - case "PageSize": Config.PageSize = Int32.Parse(value); break; - case "OneFilePerGTex": Config.OneFilePerGTex = Boolean.Parse(value); break; - case "BackfillPages": Config.BackfillPages = Boolean.Parse(value); break; - case "EmbedMips": Config.EmbedMips = Boolean.Parse(value); break; - case "EmbedTopLevelMips": Config.EmbedTopLevelMips = Boolean.Parse(value); break; - default: throw new InvalidDataException($"Unsupported configuration key: {key}"); - } + case "TileWidth": Config.TileWidth = Int32.Parse(value); break; + case "TileHeight": Config.TileHeight = Int32.Parse(value); break; + case "TileBorder": Config.TileBorder = Int32.Parse(value); break; + case "Compression": Config.Compression = (TileCompressionPreference)Enum.Parse(typeof(TileCompressionPreference), value); break; + case "PageSize": Config.PageSize = Int32.Parse(value); break; + case "OneFilePerGTex": Config.OneFilePerGTex = Boolean.Parse(value); break; + case "BackfillPages": Config.BackfillPages = Boolean.Parse(value); break; + case "EmbedMips": Config.EmbedMips = Boolean.Parse(value); break; + case "EmbedTopLevelMips": Config.EmbedTopLevelMips = Boolean.Parse(value); break; + default: throw new InvalidDataException($"Unsupported configuration key: {key}"); } } + } - var paths = doc.DocumentElement.GetElementsByTagName("Paths"); - foreach (var node in (paths[0] as XmlElement).ChildNodes) + var paths = doc.DocumentElement.GetElementsByTagName("Paths"); + foreach (var node in (paths[0] as XmlElement).ChildNodes) + { + if (node is XmlElement) { - if (node is XmlElement) - { - var key = (node as XmlElement).Name; - var value = (node as XmlElement).InnerText; + var key = (node as XmlElement).Name; + var value = (node as XmlElement).InnerText; - switch (key) - { - case "SourceTextures": SourceTexturePath = Path.Combine(RootPath, value); break; - case "VirtualTextures": VirtualTexturePath = Path.Combine(RootPath, value); break; - default: throw new InvalidDataException($"Unsupported path type: {key}"); - } + switch (key) + { + case "SourceTextures": SourceTexturePath = Path.Combine(RootPath, value); break; + case "VirtualTextures": VirtualTexturePath = Path.Combine(RootPath, value); break; + default: throw new InvalidDataException($"Unsupported path type: {key}"); } } + } - var layers = doc.DocumentElement.GetElementsByTagName("Layers"); - foreach (var node in (layers[0] as XmlElement).GetElementsByTagName("Layer")) + var layers = doc.DocumentElement.GetElementsByTagName("Layers"); + foreach (var node in (layers[0] as XmlElement).GetElementsByTagName("Layer")) + { + Config.Layers.Add(new BuildLayer { - Config.Layers.Add(new BuildLayer - { - DataType = (GTSDataType)Enum.Parse(typeof(GTSDataType), (node as XmlElement).GetAttribute("Type")), - Name = (node as XmlElement).GetAttribute("Name") - }); - } + DataType = (GTSDataType)Enum.Parse(typeof(GTSDataType), (node as XmlElement).GetAttribute("Type")), + Name = (node as XmlElement).GetAttribute("Name") + }); + } + + if (Config.Layers.Count == 0) + { + throw new InvalidDataException("No tile set layers specified"); + } - if (Config.Layers.Count == 0) + var textures = doc.DocumentElement.GetElementsByTagName("Texture"); + foreach (var texture in textures) + { + var tex = new TextureDescriptor() { - throw new InvalidDataException("No tile set layers specified"); - } + Name = (texture as XmlElement).GetAttribute("Name"), + Layers = [] + }; + Textures.Add(tex); - var textures = doc.DocumentElement.GetElementsByTagName("Texture"); - foreach (var texture in textures) + foreach (var layer in Config.Layers) { - var tex = new TextureDescriptor() - { - Name = (texture as XmlElement).GetAttribute("Name"), - Layers = [] - }; - Textures.Add(tex); + tex.Layers.Add(null); + } - foreach (var layer in Config.Layers) + var texLayers = (texture as XmlElement).GetElementsByTagName("Layer"); + foreach (var layerNode in texLayers) + { + var name = (layerNode as XmlElement).GetAttribute("Name"); + var index = Config.Layers.FindIndex(ly => ly.Name == name); + if (index == -1) { - tex.Layers.Add(null); + throw new InvalidDataException($"Layer does not exist: '{name}'"); } - var texLayers = (texture as XmlElement).GetElementsByTagName("Layer"); - foreach (var layerNode in texLayers) - { - var name = (layerNode as XmlElement).GetAttribute("Name"); - var index = Config.Layers.FindIndex(ly => ly.Name == name); - if (index == -1) - { - throw new InvalidDataException($"Layer does not exist: '{name}'"); - } - - tex.Layers[index] = (layerNode as XmlElement).GetAttribute("Source"); - } + tex.Layers[index] = (layerNode as XmlElement).GetAttribute("Source"); } } } +} - public class BuildTile - { - public BC5Image Image; - public BC5Image EmbeddedMip; - public CompressedTile Compressed; - - // Set during initialization - public int Layer; - public GTSCodec Codec; - public GTSDataType DataType; - - // Set during layout - public int Level; - public int X; - public int Y; - - // Set during page file build - public bool AddedToPageFile = false; - public int PageFileIndex; - public int PageIndex; - public int ChunkIndex; - } +public class BuildTile +{ + public BC5Image Image; + public BC5Image EmbeddedMip; + public CompressedTile Compressed; + + // Set during initialization + public int Layer; + public GTSCodec Codec; + public GTSDataType DataType; + + // Set during layout + public int Level; + public int X; + public int Y; + + // Set during page file build + public bool AddedToPageFile = false; + public int PageFileIndex; + public int PageIndex; + public int ChunkIndex; +} - public class BuildLayer - { - public GTSDataType DataType; - public string Name; +public class BuildLayer +{ + public GTSDataType DataType; + public string Name; - public List Levels; - } + public List Levels; +} - public class TileSetConfiguration - { - public string GTSName; - public Int32 TileWidth = 0x80; - public Int32 TileHeight = 0x80; - public Int32 TileBorder = 8; - public List Layers; - public TileCompressionPreference Compression = TileCompressionPreference.Best; - public Int32 PageSize = 0x100000; - public bool OneFilePerGTex = true; - public bool BackfillPages = false; - public bool EmbedMips = true; - public bool EmbedTopLevelMips = false; - } +public class TileSetConfiguration +{ + public string GTSName; + public Int32 TileWidth = 0x80; + public Int32 TileHeight = 0x80; + public Int32 TileBorder = 8; + public List Layers; + public TileCompressionPreference Compression = TileCompressionPreference.Best; + public Int32 PageSize = 0x100000; + public bool OneFilePerGTex = true; + public bool BackfillPages = false; + public bool EmbedMips = true; + public bool EmbedTopLevelMips = false; +} - public class BuildLayerTexture - { - public string Path; - public int FirstMip; - public BC5Mips Mips; - } +public class BuildLayerTexture +{ + public string Path; + public int FirstMip; + public BC5Mips Mips; +} - public class BuildLevel +public class BuildLevel +{ + public int Level; // Level index (0..n) + public int Width; + public int Height; + public int TilesX; + public int TilesY; + public int PaddedTileWidth; + public int PaddedTileHeight; + public BuildTile[] Tiles; + + public BuildTile Get(int x, int y) { - public int Level; // Level index (0..n) - public int Width; - public int Height; - public int TilesX; - public int TilesY; - public int PaddedTileWidth; - public int PaddedTileHeight; - public BuildTile[] Tiles; - - public BuildTile Get(int x, int y) - { - if (x >= TilesX || y >= TilesY) - { - throw new ArgumentException("Invalid tile index"); - } - - var off = x + TilesX * y; - return Tiles[off]; - } - - public BuildTile GetOrCreateTile(int x, int y, int layer, GTSCodec codec, GTSDataType dataType) + if (x >= TilesX || y >= TilesY) { - if (x >= TilesX || y >= TilesY) - { - throw new ArgumentException("Invalid tile index"); - } - - var off = x + TilesX * y; - if (Tiles[off] == null) - { - Tiles[off] = new BuildTile - { - Image = new BC5Image(PaddedTileWidth, PaddedTileHeight), - Layer = layer, - Codec = codec, - DataType = dataType - }; - } - - return Tiles[off]; + throw new ArgumentException("Invalid tile index"); } - } - public class BuildTexture - { - public string Name; - public int Width; - public int Height; - // Position at level 0 (including FirstMip) - public int X; - public int Y; - public List Layers; + var off = x + TilesX * y; + return Tiles[off]; } - public class TileSetBuildData + public BuildTile GetOrCreateTile(int x, int y, int layer, GTSCodec codec, GTSDataType dataType) { - public List Layers; - public string GTSName; - // Size of tile including borders - public int PaddedTileWidth; - public int PaddedTileHeight; - // Size of tile excluding borders from adjacent tiles - public int RawTileWidth; - public int RawTileHeight; - // Size of tile border - public int TileBorder; - // Total size of tileset in pixels - public int TotalWidth; - public int TotalHeight; - // Number of mip levels to save in page files - public int PageFileLevels; - // Number of mip levels to generate - public int BuildLevels; - // First mip level to save in a separate mip page file - public int MipFileStartLevel; - } - - public class ParameterBlock - { - public GTSCodec Codec; - public GTSDataType DataType; - public TileCompressionMethod Compression; - public UInt32 ParameterBlockID; - } - - public class ParameterBlockContainer - { - public List ParameterBlocks = []; - private UInt32 NextParameterBlockID = 1; - - public ParameterBlock GetOrAdd(GTSCodec codec, GTSDataType dataType, TileCompressionMethod compression) + if (x >= TilesX || y >= TilesY) { - foreach (var block in ParameterBlocks) - { - if (block.Codec == codec && block.DataType == dataType && block.Compression == compression) - { - return block; - } - } + throw new ArgumentException("Invalid tile index"); + } - var newBlock = new ParameterBlock + var off = x + TilesX * y; + if (Tiles[off] == null) + { + Tiles[off] = new BuildTile { + Image = new BC5Image(PaddedTileWidth, PaddedTileHeight), + Layer = layer, Codec = codec, - DataType = dataType, - Compression = compression, - ParameterBlockID = NextParameterBlockID++ + DataType = dataType }; - ParameterBlocks.Add(newBlock); - - return newBlock; } + + return Tiles[off]; } +} - public class TileSetBuilder - { - private readonly TileSetBuildData BuildData; - private readonly TileSetConfiguration Config; - private readonly TileCompressor Compressor; - private readonly ParameterBlockContainer ParameterBlocks; +public class BuildTexture +{ + public string Name; + public int Width; + public int Height; + // Position at level 0 (including FirstMip) + public int X; + public int Y; + public List Layers; +} - public VirtualTileSet TileSet; - public List Textures; - public List PageFiles; +public class TileSetBuildData +{ + public List Layers; + public string GTSName; + // Size of tile including borders + public int PaddedTileWidth; + public int PaddedTileHeight; + // Size of tile excluding borders from adjacent tiles + public int RawTileWidth; + public int RawTileHeight; + // Size of tile border + public int TileBorder; + // Total size of tileset in pixels + public int TotalWidth; + public int TotalHeight; + // Number of mip levels to save in page files + public int PageFileLevels; + // Number of mip levels to generate + public int BuildLevels; + // First mip level to save in a separate mip page file + public int MipFileStartLevel; +} - public delegate void BuildStepDelegate(string step); - public BuildStepDelegate OnStepStarted = delegate { }; - public delegate void BuildStepProgressDelegate(int numerator, int denumerator); - public BuildStepProgressDelegate OnStepProgress = delegate { }; +public class ParameterBlock +{ + public GTSCodec Codec; + public GTSDataType DataType; + public TileCompressionMethod Compression; + public UInt32 ParameterBlockID; +} - private List PerLevelFlatTiles; +public class ParameterBlockContainer +{ + public List ParameterBlocks = []; + private UInt32 NextParameterBlockID = 1; - public TileSetBuilder(TileSetConfiguration config) + public ParameterBlock GetOrAdd(GTSCodec codec, GTSDataType dataType, TileCompressionMethod compression) + { + foreach (var block in ParameterBlocks) { - BuildData = new TileSetBuildData + if (block.Codec == codec && block.DataType == dataType && block.Compression == compression) { - Layers = config.Layers, - GTSName = config.GTSName, - PaddedTileWidth = config.TileWidth + 2 * config.TileBorder, - PaddedTileHeight = config.TileHeight + 2 * config.TileBorder, - RawTileWidth = config.TileWidth, - RawTileHeight = config.TileHeight, - TileBorder = config.TileBorder - }; - Config = config; - - Compressor = new TileCompressor(); - ParameterBlocks = new ParameterBlockContainer(); - Compressor.Preference = Config.Compression; - Compressor.ParameterBlocks = ParameterBlocks; - - Textures = []; + return block; + } } - public void AddTexture(string name, List texturePaths) + var newBlock = new ParameterBlock { - var tex = new BuildTexture - { - Name = name, - Width = 0, - Height = 0, - X = 0, - Y = 0, - Layers = [] - }; - - foreach (var path in texturePaths) - { - if (path != null) - { - var mips = new BC5Mips(); - mips.LoadDDS(path); - if (mips.Mips.Count <= 1) - { - throw new InvalidDataException($"Texture must include mipmaps: {path}"); - } + Codec = codec, + DataType = dataType, + Compression = compression, + ParameterBlockID = NextParameterBlockID++ + }; + ParameterBlocks.Add(newBlock); + + return newBlock; + } +} - var mip = mips.Mips[0]; - if ((mip.Width % BuildData.RawTileWidth) != 0 - || (mip.Height % BuildData.RawTileHeight) != 0) - { - throw new InvalidDataException($"Texture {path} size ({mip.Width}x{mip.Height}) must be a multiple of the virtual tile size ({BuildData.RawTileWidth}x{BuildData.RawTileHeight})"); - } +public class TileSetBuilder +{ + private readonly TileSetBuildData BuildData; + private readonly TileSetConfiguration Config; + private readonly TileCompressor Compressor; + private readonly ParameterBlockContainer ParameterBlocks; - if ((mip.Width & (mip.Width - 1)) != 0 - || (mip.Height & (mip.Height - 1)) != 0) - { - throw new InvalidDataException($"Texture {path} size ({mip.Width}x{mip.Height}) must be a multiple of two"); - } + public VirtualTileSet TileSet; + public List Textures; + public List PageFiles; - tex.Layers.Add(new BuildLayerTexture - { - Path = path, - FirstMip = 0, - Mips = mips - }); - } - else - { - tex.Layers.Add(null); - } - } + public delegate void BuildStepDelegate(string step); + public BuildStepDelegate OnStepStarted = delegate { }; + public delegate void BuildStepProgressDelegate(int numerator, int denumerator); + public BuildStepProgressDelegate OnStepProgress = delegate { }; - // Figure out top-level size for texture across all layers - foreach (var layer in tex.Layers) - { - if (layer == null) continue; + private List PerLevelFlatTiles; - tex.Width = Math.Max(tex.Width, layer.Mips.Mips[0].Width); - tex.Height = Math.Max(tex.Height, layer.Mips.Mips[0].Height); - } + public TileSetBuilder(TileSetConfiguration config) + { + BuildData = new TileSetBuildData + { + Layers = config.Layers, + GTSName = config.GTSName, + PaddedTileWidth = config.TileWidth + 2 * config.TileBorder, + PaddedTileHeight = config.TileHeight + 2 * config.TileBorder, + RawTileWidth = config.TileWidth, + RawTileHeight = config.TileHeight, + TileBorder = config.TileBorder + }; + Config = config; + + Compressor = new TileCompressor(); + ParameterBlocks = new ParameterBlockContainer(); + Compressor.Preference = Config.Compression; + Compressor.ParameterBlocks = ParameterBlocks; + + Textures = []; + } - // Adjust first layer index for textures - foreach (var layer in tex.Layers) + public void AddTexture(string name, List texturePaths) + { + var tex = new BuildTexture + { + Name = name, + Width = 0, + Height = 0, + X = 0, + Y = 0, + Layers = [] + }; + + foreach (var path in texturePaths) + { + if (path != null) { - if (layer == null) continue; - - var mip = layer.Mips.Mips[0]; - if (mip.Width > tex.Width || mip.Height > tex.Height) + var mips = new BC5Mips(); + mips.LoadDDS(path); + if (mips.Mips.Count <= 1) { - throw new InvalidDataException($"Top-level texture size mismatch; texture {layer.Path} is {mip.Width}x{mip.Height}, size across all layers is {tex.Width}x{tex.Height}"); + throw new InvalidDataException($"Texture must include mipmaps: {path}"); } - var mulW = tex.Width / mip.Width; - var mulH = tex.Height / mip.Height; - - if ((tex.Width % mip.Width) != 0 || (tex.Height % mip.Height) != 0 - || mulW != mulH - // Check if total layer size size is a power-of-two of the texture size - || (mulW & (mulW - 1)) != 0) + var mip = mips.Mips[0]; + if ((mip.Width % BuildData.RawTileWidth) != 0 + || (mip.Height % BuildData.RawTileHeight) != 0) { - throw new InvalidDataException($"Texture sizes within all layers should be multiples of each other; texture {layer.Path} is {mip.Width}x{mip.Height}, size across all layers is {tex.Width}x{tex.Height}"); + throw new InvalidDataException($"Texture {path} size ({mip.Width}x{mip.Height}) must be a multiple of the virtual tile size ({BuildData.RawTileWidth}x{BuildData.RawTileHeight})"); } - // Adjust first mip index based on texture size - while (mulW > 1) + if ((mip.Width & (mip.Width - 1)) != 0 + || (mip.Height & (mip.Height - 1)) != 0) { - mulW >>= 1; - layer.FirstMip++; + throw new InvalidDataException($"Texture {path} size ({mip.Width}x{mip.Height}) must be a multiple of two"); } + + tex.Layers.Add(new BuildLayerTexture + { + Path = path, + FirstMip = 0, + Mips = mips + }); + } + else + { + tex.Layers.Add(null); } + } - Console.WriteLine($"Added GTex {tex.Name} ({tex.Width}x{tex.Height})"); - Textures.Add(tex); + // Figure out top-level size for texture across all layers + foreach (var layer in tex.Layers) + { + if (layer == null) continue; + + tex.Width = Math.Max(tex.Width, layer.Mips.Mips[0].Width); + tex.Height = Math.Max(tex.Height, layer.Mips.Mips[0].Height); } - private void BuildParameterBlocks() + // Adjust first layer index for textures + foreach (var layer in tex.Layers) { - var blocks = ParameterBlocks.ParameterBlocks; - TileSet.ParameterBlockHeaders = new GTSParameterBlockHeader[blocks.Count]; - TileSet.ParameterBlocks = []; + if (layer == null) continue; - for (var i = 0; i < blocks.Count; i++) + var mip = layer.Mips.Mips[0]; + if (mip.Width > tex.Width || mip.Height > tex.Height) { - var block = blocks[i]; - ref var header = ref TileSet.ParameterBlockHeaders[i]; - - header.ParameterBlockID = block.ParameterBlockID; - header.Codec = block.Codec; + throw new InvalidDataException($"Top-level texture size mismatch; texture {layer.Path} is {mip.Width}x{mip.Height}, size across all layers is {tex.Width}x{tex.Height}"); + } - switch (block.Codec) - { - case GTSCodec.BC: - header.ParameterBlockSize = (uint)Marshal.SizeOf(typeof(GTSBCParameterBlock)); + var mulW = tex.Width / mip.Width; + var mulH = tex.Height / mip.Height; - string compression1, compression2; - switch (block.Compression) - { - case TileCompressionMethod.Raw: - compression1 = "raw"; - compression2 = ""; - break; - - case TileCompressionMethod.LZ4: - compression1 = "lz4"; - compression2 = "lz40.1.0"; - break; - - case TileCompressionMethod.LZ77: - compression1 = "lz77"; - compression2 = "fastlz0.1.0"; - break; - - default: - throw new ArgumentException("Unsupported compression method"); - } + if ((tex.Width % mip.Width) != 0 || (tex.Height % mip.Height) != 0 + || mulW != mulH + // Check if total layer size size is a power-of-two of the texture size + || (mulW & (mulW - 1)) != 0) + { + throw new InvalidDataException($"Texture sizes within all layers should be multiples of each other; texture {layer.Path} is {mip.Width}x{mip.Height}, size across all layers is {tex.Width}x{tex.Height}"); + } - TileSet.ParameterBlocks[block.ParameterBlockID] = new GTSBCParameterBlock - { - Version = 0x238e, - CompressionName1 = compression1, - CompressionName2 = compression2, - B = 0, - C1 = 0, - C2 = 0, - BCField3 = 0, - DataType = (Byte)block.DataType, - D = 0, - FourCC = 0x20334342, - E1 = 0, - SaveMip = 1, - E3 = 0, - E4 = 0, - F = 0 - }; - break; - - case GTSCodec.Uniform: - header.ParameterBlockSize = (uint)Marshal.SizeOf(typeof(GTSUniformParameterBlock)); - TileSet.ParameterBlocks[block.ParameterBlockID] = new GTSUniformParameterBlock - { - Version = 0x42, - A_Unused = 0, - Width = 4, - Height = 1, - DataType = block.DataType - }; - break; - - default: - throw new ArgumentException("Unsupported codec type"); - } + // Adjust first mip index based on texture size + while (mulW > 1) + { + mulW >>= 1; + layer.FirstMip++; } } - private void BuildFourCC() - { - var fourCC = new TileSetFourCC(); - var meta = FourCCElement.Make("META"); - fourCC.Root = meta; - - var atlas = FourCCElement.Make("ATLS"); - meta.Children.Add(atlas); - - var textures = FourCCElement.Make("TXTS"); - atlas.Children.Add(textures); + Console.WriteLine($"Added GTex {tex.Name} ({tex.Width}x{tex.Height})"); + Textures.Add(tex); + } - foreach (var texture in Textures) - { - var tex = FourCCElement.Make("TXTR"); - textures.Children.Add(tex); - tex.Children.Add(FourCCElement.Make("NAME", texture.Name)); - tex.Children.Add(FourCCElement.Make("WDTH", (uint)texture.Width)); - tex.Children.Add(FourCCElement.Make("HGHT", (uint)texture.Height)); - tex.Children.Add(FourCCElement.Make("XXXX", (uint)texture.X)); - tex.Children.Add(FourCCElement.Make("YYYY", (uint)texture.Y)); - tex.Children.Add(FourCCElement.Make("ADDR", "None")); - tex.Children.Add(FourCCElement.Make("SRGB", FourCCElementType.BinaryInt, [ - 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00 - ])); - tex.Children.Add(FourCCElement.Make("THMB", FourCCElementType.BinaryGuid, [ - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - ])); - } + private void BuildParameterBlocks() + { + var blocks = ParameterBlocks.ParameterBlocks; + TileSet.ParameterBlockHeaders = new GTSParameterBlockHeader[blocks.Count]; + TileSet.ParameterBlocks = []; - var project = FourCCElement.Make("PROJ", ""); - meta.Children.Add(project); + for (var i = 0; i < blocks.Count; i++) + { + var block = blocks[i]; + ref var header = ref TileSet.ParameterBlockHeaders[i]; - var layers = FourCCElement.Make("LINF"); - meta.Children.Add(layers); + header.ParameterBlockID = block.ParameterBlockID; + header.Codec = block.Codec; - for (var i = 0; i < BuildData.Layers.Count; i++) + switch (block.Codec) { - var layerInfo = FourCCElement.Make("LAYR"); - layers.Children.Add(layerInfo); - layerInfo.Children.Add(FourCCElement.Make("INDX", (uint)i)); - layerInfo.Children.Add(FourCCElement.Make("TYPE", "BC3")); - layerInfo.Children.Add(FourCCElement.Make("NAME", BuildData.Layers[i].Name)); - } + case GTSCodec.BC: + header.ParameterBlockSize = (uint)Marshal.SizeOf(typeof(GTSBCParameterBlock)); - var info = FourCCElement.Make("INFO"); - meta.Children.Add(info); + string compression1, compression2; + switch (block.Compression) + { + case TileCompressionMethod.Raw: + compression1 = "raw"; + compression2 = ""; + break; + + case TileCompressionMethod.LZ4: + compression1 = "lz4"; + compression2 = "lz40.1.0"; + break; + + case TileCompressionMethod.LZ77: + compression1 = "lz77"; + compression2 = "fastlz0.1.0"; + break; + + default: + throw new ArgumentException("Unsupported compression method"); + } - var compiler = FourCCElement.Make("COMP"); - info.Children.Add(compiler); + TileSet.ParameterBlocks[block.ParameterBlockID] = new GTSBCParameterBlock + { + Version = 0x238e, + CompressionName1 = compression1, + CompressionName2 = compression2, + B = 0, + C1 = 0, + C2 = 0, + BCField3 = 0, + DataType = (Byte)block.DataType, + D = 0, + FourCC = 0x20334342, + E1 = 0, + SaveMip = 1, + E3 = 0, + E4 = 0, + F = 0 + }; + break; + + case GTSCodec.Uniform: + header.ParameterBlockSize = (uint)Marshal.SizeOf(typeof(GTSUniformParameterBlock)); + TileSet.ParameterBlocks[block.ParameterBlockID] = new GTSUniformParameterBlock + { + Version = 0x42, + A_Unused = 0, + Width = 4, + Height = 1, + DataType = block.DataType + }; + break; + + default: + throw new ArgumentException("Unsupported codec type"); + } + } + } - var compVer = FourCCElement.Make("CMPW"); - compiler.Children.Add(compVer); - compVer.Children.Add(FourCCElement.Make("MAJR", 5)); - compVer.Children.Add(FourCCElement.Make("MINR", 0)); + private void BuildFourCC() + { + var fourCC = new TileSetFourCC(); + var meta = FourCCElement.Make("META"); + fourCC.Root = meta; - var buildVer = FourCCElement.Make("BLDV"); - compiler.Children.Add(buildVer); - buildVer.Children.Add(FourCCElement.Make("MAJR", 5)); - buildVer.Children.Add(FourCCElement.Make("MINR", 1)); - buildVer.Children.Add(FourCCElement.Make("BINF", "LSLib")); + var atlas = FourCCElement.Make("ATLS"); + meta.Children.Add(atlas); - info.Children.Add(FourCCElement.Make("DATE", "02-08-2023 07:49:30.7662814 PM +02:00")); - info.Children.Add(FourCCElement.Make("BLKS", "4096")); - info.Children.Add(FourCCElement.Make("TILE", "Software")); - info.Children.Add(FourCCElement.Make("BDPR", "default")); - info.Children.Add(FourCCElement.Make("LTMP", 0)); + var textures = FourCCElement.Make("TXTS"); + atlas.Children.Add(textures); - TileSet.FourCCMetadata = fourCC; + foreach (var texture in Textures) + { + var tex = FourCCElement.Make("TXTR"); + textures.Children.Add(tex); + tex.Children.Add(FourCCElement.Make("NAME", texture.Name)); + tex.Children.Add(FourCCElement.Make("WDTH", (uint)texture.Width)); + tex.Children.Add(FourCCElement.Make("HGHT", (uint)texture.Height)); + tex.Children.Add(FourCCElement.Make("XXXX", (uint)texture.X)); + tex.Children.Add(FourCCElement.Make("YYYY", (uint)texture.Y)); + tex.Children.Add(FourCCElement.Make("ADDR", "None")); + tex.Children.Add(FourCCElement.Make("SRGB", FourCCElementType.BinaryInt, [ + 0x01, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00 + ])); + tex.Children.Add(FourCCElement.Make("THMB", FourCCElementType.BinaryGuid, [ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + ])); } - private void CalculateGeometry() - { - var geom = new TileSetGeometryCalculator - { - BuildData = BuildData, - Textures = Textures - }; - geom.Update(); + var project = FourCCElement.Make("PROJ", ""); + meta.Children.Add(project); - Console.WriteLine($"Tile set geometry: {BuildData.TotalWidth}x{BuildData.TotalHeight} ({BuildData.TotalWidth/BuildData.RawTileWidth}x{BuildData.TotalHeight/BuildData.RawTileHeight} tiles), {BuildData.RawTileWidth}x{BuildData.RawTileHeight} tile size, {BuildData.PaddedTileWidth}x{BuildData.PaddedTileHeight} tile size with adjacency data"); - } + var layers = FourCCElement.Make("LINF"); + meta.Children.Add(layers); - private static int Clamp(int x, int min, int max) + for (var i = 0; i < BuildData.Layers.Count; i++) { - return Math.Min(max, Math.Max(x, min)); + var layerInfo = FourCCElement.Make("LAYR"); + layers.Children.Add(layerInfo); + layerInfo.Children.Add(FourCCElement.Make("INDX", (uint)i)); + layerInfo.Children.Add(FourCCElement.Make("TYPE", "BC3")); + layerInfo.Children.Add(FourCCElement.Make("NAME", BuildData.Layers[i].Name)); } - private void StitchPartialTile(BuildTile tile, BC5Image source, int tileX, int tileY, int sourceX, int sourceY, int width, int height) + var info = FourCCElement.Make("INFO"); + meta.Children.Add(info); + + var compiler = FourCCElement.Make("COMP"); + info.Children.Add(compiler); + + var compVer = FourCCElement.Make("CMPW"); + compiler.Children.Add(compVer); + compVer.Children.Add(FourCCElement.Make("MAJR", 5)); + compVer.Children.Add(FourCCElement.Make("MINR", 0)); + + var buildVer = FourCCElement.Make("BLDV"); + compiler.Children.Add(buildVer); + buildVer.Children.Add(FourCCElement.Make("MAJR", 5)); + buildVer.Children.Add(FourCCElement.Make("MINR", 1)); + buildVer.Children.Add(FourCCElement.Make("BINF", "LSLib")); + + info.Children.Add(FourCCElement.Make("DATE", "02-08-2023 07:49:30.7662814 PM +02:00")); + info.Children.Add(FourCCElement.Make("BLKS", "4096")); + info.Children.Add(FourCCElement.Make("TILE", "Software")); + info.Children.Add(FourCCElement.Make("BDPR", "default")); + info.Children.Add(FourCCElement.Make("LTMP", 0)); + + TileSet.FourCCMetadata = fourCC; + } + + private void CalculateGeometry() + { + var geom = new TileSetGeometryCalculator { - source.CopyTo( - tile.Image, - sourceX, sourceY, - tileX + BuildData.TileBorder, - tileY + BuildData.TileBorder, - width, height - ); - } + BuildData = BuildData, + Textures = Textures + }; + geom.Update(); + + Console.WriteLine($"Tile set geometry: {BuildData.TotalWidth}x{BuildData.TotalHeight} ({BuildData.TotalWidth/BuildData.RawTileWidth}x{BuildData.TotalHeight/BuildData.RawTileHeight} tiles), {BuildData.RawTileWidth}x{BuildData.RawTileHeight} tile size, {BuildData.PaddedTileWidth}x{BuildData.PaddedTileHeight} tile size with adjacency data"); + } + + private static int Clamp(int x, int min, int max) + { + return Math.Min(max, Math.Max(x, min)); + } + + private void StitchPartialTile(BuildTile tile, BC5Image source, int tileX, int tileY, int sourceX, int sourceY, int width, int height) + { + source.CopyTo( + tile.Image, + sourceX, sourceY, + tileX + BuildData.TileBorder, + tileY + BuildData.TileBorder, + width, height + ); + } - private void StitchTiles(BuildLevel level, int layer, int x, int y, BC5Image mip) + private void StitchTiles(BuildLevel level, int layer, int x, int y, BC5Image mip) + { + var layerInfo = BuildData.Layers[layer]; + var firstTileX = x / BuildData.RawTileWidth; + var firstTileY = y / BuildData.RawTileHeight; + var lastTileX = (x + mip.Width - 1) / BuildData.RawTileWidth; + var lastTileY = (y + mip.Height - 1) / BuildData.RawTileHeight; + + int sourceY = 0; + for (var tileY = firstTileY; tileY <= lastTileY; tileY++) { - var layerInfo = BuildData.Layers[layer]; - var firstTileX = x / BuildData.RawTileWidth; - var firstTileY = y / BuildData.RawTileHeight; - var lastTileX = (x + mip.Width - 1) / BuildData.RawTileWidth; - var lastTileY = (y + mip.Height - 1) / BuildData.RawTileHeight; - - int sourceY = 0; - for (var tileY = firstTileY; tileY <= lastTileY; tileY++) + int sourceX = 0; + for (var tileX = firstTileX; tileX <= lastTileX; tileX++) { - int sourceX = 0; - for (var tileX = firstTileX; tileX <= lastTileX; tileX++) - { - var tileXPixelsMin = tileX * BuildData.RawTileWidth; - var tileYPixelsMin = tileY * BuildData.RawTileHeight; - var tileXPixelsMax = tileXPixelsMin + BuildData.RawTileWidth; - var tileYPixelsMax = tileYPixelsMin + BuildData.RawTileHeight; + var tileXPixelsMin = tileX * BuildData.RawTileWidth; + var tileYPixelsMin = tileY * BuildData.RawTileHeight; + var tileXPixelsMax = tileXPixelsMin + BuildData.RawTileWidth; + var tileYPixelsMax = tileYPixelsMin + BuildData.RawTileHeight; - var stitchXMin = Clamp(x, tileXPixelsMin, tileXPixelsMax); - var stitchYMin = Clamp(y, tileYPixelsMin, tileYPixelsMax); - var stitchXMax = Clamp(x + mip.Width, tileXPixelsMin, tileXPixelsMax); - var stitchYMax = Clamp(y + mip.Height, tileYPixelsMin, tileYPixelsMax); + var stitchXMin = Clamp(x, tileXPixelsMin, tileXPixelsMax); + var stitchYMin = Clamp(y, tileYPixelsMin, tileYPixelsMax); + var stitchXMax = Clamp(x + mip.Width, tileXPixelsMin, tileXPixelsMax); + var stitchYMax = Clamp(y + mip.Height, tileYPixelsMin, tileYPixelsMax); - var stitchW = stitchXMax - stitchXMin; - var stitchH = stitchYMax - stitchYMin; + var stitchW = stitchXMax - stitchXMin; + var stitchH = stitchYMax - stitchYMin; - // GIGA JANK - if (stitchW >= 4 && stitchH >= 4) - { - var tile = level.GetOrCreateTile(tileX, tileY, layer, GTSCodec.BC, layerInfo.DataType); - StitchPartialTile(tile, mip, - stitchXMin - tileXPixelsMin, - stitchYMin - tileYPixelsMin, - sourceX, sourceY, - stitchXMax - stitchXMin, - stitchYMax - stitchYMin - ); - } - - sourceX += BuildData.RawTileWidth; + // GIGA JANK + if (stitchW >= 4 && stitchH >= 4) + { + var tile = level.GetOrCreateTile(tileX, tileY, layer, GTSCodec.BC, layerInfo.DataType); + StitchPartialTile(tile, mip, + stitchXMin - tileXPixelsMin, + stitchYMin - tileYPixelsMin, + sourceX, sourceY, + stitchXMax - stitchXMin, + stitchYMax - stitchYMin + ); } - sourceY += BuildData.RawTileHeight; + sourceX += BuildData.RawTileWidth; } + + sourceY += BuildData.RawTileHeight; } + } - private void BuildTextureTiles(BuildTexture texture, int level, int layerIndex, BuildLayer layer, BC5Image mip) + private void BuildTextureTiles(BuildTexture texture, int level, int layerIndex, BuildLayer layer, BC5Image mip) + { + var x = texture.X >> level; + var y = texture.Y >> level; + StitchTiles(layer.Levels[level], layerIndex, x, y, mip); + } + + private void BuildTextureTiles(BuildTexture texture, int layerIndex, BuildLayerTexture texLayer, BuildLayer layer) + { + if (texLayer.FirstMip + texLayer.Mips.Mips.Count < BuildData.BuildLevels) { - var x = texture.X >> level; - var y = texture.Y >> level; - StitchTiles(layer.Levels[level], layerIndex, x, y, mip); + throw new InvalidDataException($"Insufficient mip layers in texture '{texture.Name}', layer '{layer.Name}'; got {texLayer.FirstMip}+{texLayer.Mips.Mips.Count}, virtual texture has {BuildData.BuildLevels}"); } - private void BuildTextureTiles(BuildTexture texture, int layerIndex, BuildLayerTexture texLayer, BuildLayer layer) + for (var i = texLayer.FirstMip; i < BuildData.BuildLevels; i++) { - if (texLayer.FirstMip + texLayer.Mips.Mips.Count < BuildData.BuildLevels) - { - throw new InvalidDataException($"Insufficient mip layers in texture '{texture.Name}', layer '{layer.Name}'; got {texLayer.FirstMip}+{texLayer.Mips.Mips.Count}, virtual texture has {BuildData.BuildLevels}"); - } - - for (var i = texLayer.FirstMip; i < BuildData.BuildLevels; i++) - { - BuildTextureTiles(texture, i, layerIndex, layer, texLayer.Mips.Mips[i - texLayer.FirstMip]); - } + BuildTextureTiles(texture, i, layerIndex, layer, texLayer.Mips.Mips[i - texLayer.FirstMip]); } + } - private void BuildTiles() + private void BuildTiles() + { + foreach (var texture in Textures) { - foreach (var texture in Textures) + for (var layerIdx = 0; layerIdx < texture.Layers.Count; layerIdx++) { - for (var layerIdx = 0; layerIdx < texture.Layers.Count; layerIdx++) + if (texture.Layers[layerIdx] != null) { - if (texture.Layers[layerIdx] != null) - { - BuildTextureTiles(texture, layerIdx, texture.Layers[layerIdx], BuildData.Layers[layerIdx]); - } + BuildTextureTiles(texture, layerIdx, texture.Layers[layerIdx], BuildData.Layers[layerIdx]); } } } + } - private void BuildTileBorders(BuildLevel level) + private void BuildTileBorders(BuildLevel level) + { + for (var y = 0; y < level.TilesY; y++) { - for (var y = 0; y < level.TilesY; y++) + for (var x = 0; x < level.TilesX; x++) { - for (var x = 0; x < level.TilesX; x++) + var tile = level.Get(x, y); + if (tile == null) continue; + + // Left + if (x > 0) { - var tile = level.Get(x, y); - if (tile == null) continue; + level.Get(x - 1, y)?.Image.CopyTo(tile.Image, + BuildData.RawTileWidth, 0, + 0, 0, + BuildData.TileBorder, BuildData.PaddedTileHeight); + } - // Left - if (x > 0) - { - level.Get(x - 1, y)?.Image.CopyTo(tile.Image, - BuildData.RawTileWidth, 0, - 0, 0, - BuildData.TileBorder, BuildData.PaddedTileHeight); - } + // Right + if (x + 1 < level.TilesX) + { + level.Get(x + 1, y)?.Image.CopyTo(tile.Image, + BuildData.TileBorder, 0, + BuildData.RawTileWidth + BuildData.TileBorder, 0, + BuildData.TileBorder, BuildData.PaddedTileHeight); + } - // Right - if (x + 1 < level.TilesX) - { - level.Get(x + 1, y)?.Image.CopyTo(tile.Image, - BuildData.TileBorder, 0, - BuildData.RawTileWidth + BuildData.TileBorder, 0, - BuildData.TileBorder, BuildData.PaddedTileHeight); - } + // Top + if (y > 0) + { + level.Get(x, y - 1)?.Image.CopyTo(tile.Image, + 0, BuildData.RawTileHeight, + 0, 0, + BuildData.PaddedTileWidth, BuildData.TileBorder); + } - // Top - if (y > 0) - { - level.Get(x, y - 1)?.Image.CopyTo(tile.Image, - 0, BuildData.RawTileHeight, - 0, 0, - BuildData.PaddedTileWidth, BuildData.TileBorder); - } + // Bottom + if (y + 1 < level.TilesY) + { + level.Get(x, y + 1)?.Image.CopyTo(tile.Image, + 0, BuildData.TileBorder, + 0, BuildData.RawTileHeight + BuildData.TileBorder, + BuildData.PaddedTileWidth, BuildData.TileBorder); - // Bottom - if (y + 1 < level.TilesY) + // Bottom Left corner + if (x > 0) { - level.Get(x, y + 1)?.Image.CopyTo(tile.Image, - 0, BuildData.TileBorder, + level.Get(x - 1, y + 1)?.Image.CopyTo(tile.Image, + BuildData.RawTileWidth, BuildData.TileBorder, 0, BuildData.RawTileHeight + BuildData.TileBorder, - BuildData.PaddedTileWidth, BuildData.TileBorder); - - // Bottom Left corner - if (x > 0) - { - level.Get(x - 1, y + 1)?.Image.CopyTo(tile.Image, - BuildData.RawTileWidth, BuildData.TileBorder, - 0, BuildData.RawTileHeight + BuildData.TileBorder, - BuildData.TileBorder, BuildData.TileBorder); - } + BuildData.TileBorder, BuildData.TileBorder); + } - // Bottom Right corner - if (x + 1 < level.TilesX) - { - level.Get(x + 1, y + 1)?.Image.CopyTo(tile.Image, - BuildData.TileBorder, BuildData.TileBorder, - BuildData.RawTileWidth + BuildData.TileBorder, BuildData.RawTileHeight + BuildData.TileBorder, - BuildData.TileBorder, BuildData.TileBorder); - } + // Bottom Right corner + if (x + 1 < level.TilesX) + { + level.Get(x + 1, y + 1)?.Image.CopyTo(tile.Image, + BuildData.TileBorder, BuildData.TileBorder, + BuildData.RawTileWidth + BuildData.TileBorder, BuildData.RawTileHeight + BuildData.TileBorder, + BuildData.TileBorder, BuildData.TileBorder); } } } } + } - private void BuildTileBorders() + private void BuildTileBorders() + { + foreach (var layer in BuildData.Layers) { - foreach (var layer in BuildData.Layers) + foreach (var level in layer.Levels) { - foreach (var level in layer.Levels) - { - BuildTileBorders(level); - } + BuildTileBorders(level); } } + } - private void EmbedTileMips(BuildLayer layer, BuildLevel level) + private void EmbedTileMips(BuildLayer layer, BuildLevel level) + { + for (var y = 0; y < level.TilesY; y++) { - for (var y = 0; y < level.TilesY; y++) + for (var x = 0; x < level.TilesX; x++) { - for (var x = 0; x < level.TilesX; x++) + var tile = level.Get(x, y); + if (tile != null) { - var tile = level.Get(x, y); - if (tile != null) + if (level.Level + 1 < BuildData.BuildLevels) { - if (level.Level + 1 < BuildData.BuildLevels) + var nextLevelTile = layer.Levels[level.Level + 1].Get(x / 2, y / 2); + if (nextLevelTile != null) { - var nextLevelTile = layer.Levels[level.Level + 1].Get(x / 2, y / 2); - if (nextLevelTile != null) - { - var nextMip = new BC5Image(BuildData.PaddedTileWidth / 2, BuildData.PaddedTileHeight / 2); - var mipX = (x & 1) * (BuildData.RawTileWidth / 2) + BuildData.TileBorder / 2; - var mipY = (y & 1) * (BuildData.RawTileHeight / 2) + BuildData.TileBorder / 2; - nextLevelTile.Image.CopyTo(nextMip, mipX, mipY, 0, 0, BuildData.PaddedTileWidth / 2, BuildData.PaddedTileHeight / 2); - tile.EmbeddedMip = nextMip; - } + var nextMip = new BC5Image(BuildData.PaddedTileWidth / 2, BuildData.PaddedTileHeight / 2); + var mipX = (x & 1) * (BuildData.RawTileWidth / 2) + BuildData.TileBorder / 2; + var mipY = (y & 1) * (BuildData.RawTileHeight / 2) + BuildData.TileBorder / 2; + nextLevelTile.Image.CopyTo(nextMip, mipX, mipY, 0, 0, BuildData.PaddedTileWidth / 2, BuildData.PaddedTileHeight / 2); + tile.EmbeddedMip = nextMip; } } } } } + } - private void EmbedTileMips() + private void EmbedTileMips() + { + foreach (var layer in BuildData.Layers) { - foreach (var layer in BuildData.Layers) + foreach (var level in layer.Levels) { - foreach (var level in layer.Levels) + if (level.Level > 0 || Config.EmbedTopLevelMips) { - if (level.Level > 0 || Config.EmbedTopLevelMips) - { - EmbedTileMips(layer, level); - } + EmbedTileMips(layer, level); } } } + } - private void BuildGTSHeaders() + private void BuildGTSHeaders() + { + // Configuration-independent defaults + ref GTSHeader header = ref TileSet.Header; + header.Magic = GTSHeader.GRPGMagic; + header.Version = GTSHeader.CurrentVersion; + header.Unused = 0; + header.GUID = Guid.NewGuid(); + header.I6 = 0; + header.I7 = 0; + header.M = 0; + header.N = 0; + header.O = 0; + header.P = 0; + header.Q = 0; + header.R = 0; + header.S = 0; + header.PageSize = (UInt32)Config.PageSize; + header.XJJ = 0; + header.XKK = 0; + header.XLL = 0; + header.XMM = 0; + + header.TileWidth = BuildData.PaddedTileWidth; + header.TileHeight = BuildData.PaddedTileHeight; + header.TileBorder = BuildData.TileBorder; + } + + private void BuildPageFiles() + { + var builder = new PageFileSetBuilder(BuildData, Config); + if (Config.OneFilePerGTex) { - // Configuration-independent defaults - ref GTSHeader header = ref TileSet.Header; - header.Magic = GTSHeader.GRPGMagic; - header.Version = GTSHeader.CurrentVersion; - header.Unused = 0; - header.GUID = Guid.NewGuid(); - header.I6 = 0; - header.I7 = 0; - header.M = 0; - header.N = 0; - header.O = 0; - header.P = 0; - header.Q = 0; - header.R = 0; - header.S = 0; - header.PageSize = (UInt32)Config.PageSize; - header.XJJ = 0; - header.XKK = 0; - header.XLL = 0; - header.XMM = 0; - - header.TileWidth = BuildData.PaddedTileWidth; - header.TileHeight = BuildData.PaddedTileHeight; - header.TileBorder = BuildData.TileBorder; + PageFiles = builder.BuildFilePerGTex(Textures); } - - private void BuildPageFiles() + else { - var builder = new PageFileSetBuilder(BuildData, Config); - if (Config.OneFilePerGTex) - { - PageFiles = builder.BuildFilePerGTex(Textures); - } - else - { - PageFiles = builder.BuildSingleFile(); - } + PageFiles = builder.BuildSingleFile(); + } - TileSet.PageFileInfos = []; - uint firstPageIndex = 0; - foreach (var file in PageFiles) + TileSet.PageFileInfos = []; + uint firstPageIndex = 0; + foreach (var file in PageFiles) + { + var fileInfo = new PageFileInfo { - var fileInfo = new PageFileInfo + Meta = new GTSPageFileInfo { - Meta = new GTSPageFileInfo - { - FileName = file.FileName, - NumPages = (uint)file.Pages.Count, - Checksum = file.Checksum, - F = 2 - }, - FirstPageIndex = firstPageIndex, - FileName = file.FileName - }; - TileSet.PageFileInfos.Add(fileInfo); - firstPageIndex += (uint)file.Pages.Count; - } + FileName = file.FileName, + NumPages = (uint)file.Pages.Count, + Checksum = file.Checksum, + F = 2 + }, + FirstPageIndex = firstPageIndex, + FileName = file.FileName + }; + TileSet.PageFileInfos.Add(fileInfo); + firstPageIndex += (uint)file.Pages.Count; } + } + + private void BuildGTS() + { + TileSet = new VirtualTileSet(); + BuildGTSHeaders(); - private void BuildGTS() + TileSet.TileSetLayers = new GTSTileSetLayer[BuildData.Layers.Count]; + for (int i = 0; i < BuildData.Layers.Count; i++) { - TileSet = new VirtualTileSet(); - BuildGTSHeaders(); + var layer = BuildData.Layers[i]; + ref var gtsLayer = ref TileSet.TileSetLayers[i]; + gtsLayer.DataType = layer.DataType; + gtsLayer.B = -1; + } - TileSet.TileSetLayers = new GTSTileSetLayer[BuildData.Layers.Count]; - for (int i = 0; i < BuildData.Layers.Count; i++) - { - var layer = BuildData.Layers[i]; - ref var gtsLayer = ref TileSet.TileSetLayers[i]; - gtsLayer.DataType = layer.DataType; - gtsLayer.B = -1; - } + var levels = BuildData.Layers[0].Levels; - var levels = BuildData.Layers[0].Levels; + TileSet.TileSetLevels = new GTSTileSetLevel[BuildData.PageFileLevels]; + for (int i = 0; i < BuildData.PageFileLevels; i++) + { + var level = levels[i]; + ref var gtsLevel = ref TileSet.TileSetLevels[i]; + gtsLevel.Width = (uint)level.TilesX; + gtsLevel.Height = (uint)level.TilesY; + } - TileSet.TileSetLevels = new GTSTileSetLevel[BuildData.PageFileLevels]; - for (int i = 0; i < BuildData.PageFileLevels; i++) - { - var level = levels[i]; - ref var gtsLevel = ref TileSet.TileSetLevels[i]; - gtsLevel.Width = (uint)level.TilesX; - gtsLevel.Height = (uint)level.TilesY; - } + OnStepStarted("Generating tile lists"); + BuildFlatTileList(); + OnStepStarted("Encoding tiles"); + CompressTiles(); - OnStepStarted("Generating tile lists"); - BuildFlatTileList(); - OnStepStarted("Encoding tiles"); - CompressTiles(); + OnStepStarted("Building page files"); + BuildPageFiles(); - OnStepStarted("Building page files"); - BuildPageFiles(); + OnStepStarted("Building metadata"); + BuildTileInfos(); + BuildTileDownsampleInfos(); - OnStepStarted("Building metadata"); - BuildTileInfos(); - BuildTileDownsampleInfos(); + BuildParameterBlocks(); + BuildFourCC(); + } - BuildParameterBlocks(); - BuildFourCC(); - } + public void BuildFlatTileList() + { + PerLevelFlatTiles = new List(BuildData.PageFileLevels); - public void BuildFlatTileList() + for (var level = 0; level < BuildData.PageFileLevels; level++) { - PerLevelFlatTiles = new List(BuildData.PageFileLevels); + var levelInfo = BuildData.Layers[0].Levels[level]; + var flatTiles = new BuildTile[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; + PerLevelFlatTiles.Add(flatTiles); - for (var level = 0; level < BuildData.PageFileLevels; level++) + var tileIdx = 0; + for (var y = 0; y < levelInfo.TilesY; y++) { - var levelInfo = BuildData.Layers[0].Levels[level]; - var flatTiles = new BuildTile[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; - PerLevelFlatTiles.Add(flatTiles); - - var tileIdx = 0; - for (var y = 0; y < levelInfo.TilesY; y++) + for (var x = 0; x < levelInfo.TilesX; x++) { - for (var x = 0; x < levelInfo.TilesX; x++) + for (var layer = 0; layer < BuildData.Layers.Count; layer++) { - for (var layer = 0; layer < BuildData.Layers.Count; layer++) + var tile = BuildData.Layers[layer].Levels[level].Get(x, y); + if (tile != null) { - var tile = BuildData.Layers[layer].Levels[level].Get(x, y); - if (tile != null) - { - tile.Layer = layer; - tile.Level = level; - tile.X = x; - tile.Y = y; - flatTiles[tileIdx] = tile; - } - else - { - flatTiles[tileIdx] = null; - } - - tileIdx++; + tile.Layer = layer; + tile.Level = level; + tile.X = x; + tile.Y = y; + flatTiles[tileIdx] = tile; + } + else + { + flatTiles[tileIdx] = null; } + + tileIdx++; } } } } + } - public void CompressTiles() - { - var numTiles = PerLevelFlatTiles.Sum(tiles => tiles.Length); - var nextTile = 0; + public void CompressTiles() + { + var numTiles = PerLevelFlatTiles.Sum(tiles => tiles.Length); + var nextTile = 0; - foreach (var level in PerLevelFlatTiles) + foreach (var level in PerLevelFlatTiles) + { + foreach (var tile in level) { - foreach (var tile in level) + OnStepProgress(nextTile++, numTiles); + if (tile != null) { - OnStepProgress(nextTile++, numTiles); - if (tile != null) - { - Compressor.Compress(tile); - } + Compressor.Compress(tile); } } } + } - public void BuildTileInfos() - { - TileSet.PerLevelFlatTileIndices = new List(BuildData.PageFileLevels); - PerLevelFlatTiles = new List(BuildData.PageFileLevels); + public void BuildTileInfos() + { + TileSet.PerLevelFlatTileIndices = new List(BuildData.PageFileLevels); + PerLevelFlatTiles = new List(BuildData.PageFileLevels); - var flatTileInfos = new List(); - var packedTileIds = new List(); + var flatTileInfos = new List(); + var packedTileIds = new List(); - for (var level = 0; level < BuildData.PageFileLevels; level++) - { - var levelInfo = BuildData.Layers[0].Levels[level]; - var flatTileIndices = new UInt32[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; - TileSet.PerLevelFlatTileIndices.Add(flatTileIndices); + for (var level = 0; level < BuildData.PageFileLevels; level++) + { + var levelInfo = BuildData.Layers[0].Levels[level]; + var flatTileIndices = new UInt32[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; + TileSet.PerLevelFlatTileIndices.Add(flatTileIndices); - var flatTiles = new BuildTile[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; - PerLevelFlatTiles.Add(flatTiles); + var flatTiles = new BuildTile[levelInfo.TilesX * levelInfo.TilesY * BuildData.Layers.Count]; + PerLevelFlatTiles.Add(flatTiles); - var tileIdx = 0; - for (var y = 0; y < levelInfo.TilesY; y++) + var tileIdx = 0; + for (var y = 0; y < levelInfo.TilesY; y++) + { + for (var x = 0; x < levelInfo.TilesX; x++) { - for (var x = 0; x < levelInfo.TilesX; x++) + for (var layer = 0; layer < BuildData.Layers.Count; layer++) { - for (var layer = 0; layer < BuildData.Layers.Count; layer++) + var tile = BuildData.Layers[layer].Levels[level].Get(x, y); + if (tile != null) { - var tile = BuildData.Layers[layer].Levels[level].Get(x, y); - if (tile != null) - { - var flatTileIdx = (uint)flatTileInfos.Count; - var packedTileIdx = (uint)packedTileIds.Count; + var flatTileIdx = (uint)flatTileInfos.Count; + var packedTileIdx = (uint)packedTileIds.Count; - var packedTile = new GTSPackedTileID((uint)layer, (uint)level, (uint)x, (uint)y); - packedTileIds.Add(packedTile); + var packedTile = new GTSPackedTileID((uint)layer, (uint)level, (uint)x, (uint)y); + packedTileIds.Add(packedTile); - var tileInfo = new GTSFlatTileInfo - { - PageFileIndex = (UInt16)tile.PageFileIndex, - PageIndex = (UInt16)tile.PageIndex, - ChunkIndex = (UInt16)tile.ChunkIndex, - D = 1, - PackedTileIndex = packedTileIdx - }; - flatTileInfos.Add(tileInfo); - - flatTileIndices[tileIdx] = flatTileIdx; - flatTiles[tileIdx] = tile; - } - else + var tileInfo = new GTSFlatTileInfo { - flatTileIndices[tileIdx] = 0xFFFFFFFF; - flatTiles[tileIdx] = null; - } - - tileIdx++; + PageFileIndex = (UInt16)tile.PageFileIndex, + PageIndex = (UInt16)tile.PageIndex, + ChunkIndex = (UInt16)tile.ChunkIndex, + D = 1, + PackedTileIndex = packedTileIdx + }; + flatTileInfos.Add(tileInfo); + + flatTileIndices[tileIdx] = flatTileIdx; + flatTiles[tileIdx] = tile; } + else + { + flatTileIndices[tileIdx] = 0xFFFFFFFF; + flatTiles[tileIdx] = null; + } + + tileIdx++; } } } - - TileSet.PackedTileIDs = packedTileIds.ToArray(); - TileSet.FlatTileInfos = flatTileInfos.ToArray(); } - public void BuildTileDownsampleInfos() + TileSet.PackedTileIDs = packedTileIds.ToArray(); + TileSet.FlatTileInfos = flatTileInfos.ToArray(); + } + + public void BuildTileDownsampleInfos() + { + for (var level = 0; level < BuildData.PageFileLevels; level++) { - for (var level = 0; level < BuildData.PageFileLevels; level++) - { - var levelInfo = BuildData.Layers[0].Levels[level]; - var flatTileIndices = TileSet.PerLevelFlatTileIndices[level]; + var levelInfo = BuildData.Layers[0].Levels[level]; + var flatTileIndices = TileSet.PerLevelFlatTileIndices[level]; - var tileIdx = 0; - for (var y = 0; y < levelInfo.TilesY; y++) + var tileIdx = 0; + for (var y = 0; y < levelInfo.TilesY; y++) + { + for (var x = 0; x < levelInfo.TilesX; x++) { - for (var x = 0; x < levelInfo.TilesX; x++) + for (var layer = 0; layer < BuildData.Layers.Count; layer++) { - for (var layer = 0; layer < BuildData.Layers.Count; layer++) + if (flatTileIndices[tileIdx] == 0xFFFFFFFF) { - if (flatTileIndices[tileIdx] == 0xFFFFFFFF) + for (var downsampleLevel = level + 1; downsampleLevel < BuildData.PageFileLevels; downsampleLevel++) { - for (var downsampleLevel = level + 1; downsampleLevel < BuildData.PageFileLevels; downsampleLevel++) + var downsampleX = x >> (downsampleLevel - level); + var downsampleY = y >> (downsampleLevel - level); + + var dsIndices = TileSet.PerLevelFlatTileIndices[downsampleLevel]; + var dsIndex = dsIndices[layer + BuildData.Layers.Count * (downsampleX + downsampleY * BuildData.Layers[layer].Levels[downsampleLevel].TilesX)]; + if ((dsIndex & 0x80000000) == 0) { - var downsampleX = x >> (downsampleLevel - level); - var downsampleY = y >> (downsampleLevel - level); - - var dsIndices = TileSet.PerLevelFlatTileIndices[downsampleLevel]; - var dsIndex = dsIndices[layer + BuildData.Layers.Count * (downsampleX + downsampleY * BuildData.Layers[layer].Levels[downsampleLevel].TilesX)]; - if ((dsIndex & 0x80000000) == 0) - { - flatTileIndices[tileIdx] = dsIndex | 0x80000000; - break; - } + flatTileIndices[tileIdx] = dsIndex | 0x80000000; + break; } } - - tileIdx++; } + + tileIdx++; } } } } + } - public void Build(string dir) + public void Build(string dir) + { + OnStepStarted("Calculating geometry"); + CalculateGeometry(); + OnStepStarted("Building tiles"); + BuildTiles(); + OnStepStarted("Building tile borders"); + BuildTileBorders(); + OnStepStarted("Embedding tile mipmaps"); + if (Config.EmbedMips) { - OnStepStarted("Calculating geometry"); - CalculateGeometry(); - OnStepStarted("Building tiles"); - BuildTiles(); - OnStepStarted("Building tile borders"); - BuildTileBorders(); - OnStepStarted("Embedding tile mipmaps"); - if (Config.EmbedMips) - { - EmbedTileMips(); - } + EmbedTileMips(); + } - BuildGTS(); + BuildGTS(); - long tileBytes = 0, embeddedMipBytes = 0, tileCompressedBytes = 0, pages = 0, chunks = 0, levelTiles = 0; + long tileBytes = 0, embeddedMipBytes = 0, tileCompressedBytes = 0, pages = 0, chunks = 0, levelTiles = 0; - foreach (var pageFile in PageFiles) + foreach (var pageFile in PageFiles) + { + pages += pageFile.Pages.Count; + foreach (var page in pageFile.Pages) { - pages += pageFile.Pages.Count; - foreach (var page in pageFile.Pages) - { - chunks += page.Chunks.Count; - } + chunks += page.Chunks.Count; } + } - foreach (var level in PerLevelFlatTiles) + foreach (var level in PerLevelFlatTiles) + { + levelTiles += level.Length; + foreach (var tile in level) { - levelTiles += level.Length; - foreach (var tile in level) + if (tile != null) { - if (tile != null) + tileBytes += tile.Image.Data.Length; + if (tile.EmbeddedMip != null) { - tileBytes += tile.Image.Data.Length; - if (tile.EmbeddedMip != null) - { - embeddedMipBytes += tile.EmbeddedMip.Data.Length; - } - - tileCompressedBytes += tile.Compressed.Data.Length; + embeddedMipBytes += tile.EmbeddedMip.Data.Length; } + + tileCompressedBytes += tile.Compressed.Data.Length; } } + } - Console.WriteLine($"Flat tiles: {levelTiles} total, {TileSet.FlatTileInfos.Length} in use"); - Console.WriteLine($"Generated {PageFiles.Count} page files, {pages} pages, {chunks} chunks"); - Console.WriteLine($"Raw tile data: {tileBytes / 1024} KB tiles, {embeddedMipBytes / 1024} KB embedded mips, {tileCompressedBytes / 1024} KB transcoded, {pages*Config.PageSize/1024} KB pages total"); + Console.WriteLine($"Flat tiles: {levelTiles} total, {TileSet.FlatTileInfos.Length} in use"); + Console.WriteLine($"Generated {PageFiles.Count} page files, {pages} pages, {chunks} chunks"); + Console.WriteLine($"Raw tile data: {tileBytes / 1024} KB tiles, {embeddedMipBytes / 1024} KB embedded mips, {tileCompressedBytes / 1024} KB transcoded, {pages*Config.PageSize/1024} KB pages total"); - OnStepStarted("Saving tile set"); - TileSet.Save(Path.Join(dir, BuildData.GTSName + ".gts")); + OnStepStarted("Saving tile set"); + TileSet.Save(Path.Join(dir, BuildData.GTSName + ".gts")); - foreach (var file in PageFiles) - { - OnStepStarted($"Saving page file: {file.FileName}"); - file.Save(Path.Join(dir, file.FileName)); - } + foreach (var file in PageFiles) + { + OnStepStarted($"Saving page file: {file.FileName}"); + file.Save(Path.Join(dir, file.FileName)); } } } diff --git a/LSLib/VirtualTextures/Compression.cs b/LSLib/VirtualTextures/Compression.cs index 8a59fad6..b473b8c5 100644 --- a/LSLib/VirtualTextures/Compression.cs +++ b/LSLib/VirtualTextures/Compression.cs @@ -2,153 +2,152 @@ using System.IO; using LZ4; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +public enum TileCompressionMethod { - public enum TileCompressionMethod - { - Raw, - LZ4, - LZ77 - }; + Raw, + LZ4, + LZ77 +}; - public enum TileCompressionPreference - { - Uncompressed, - Best, - LZ4, - LZ77 - }; +public enum TileCompressionPreference +{ + Uncompressed, + Best, + LZ4, + LZ77 +}; - public class CompressedTile - { - public TileCompressionMethod Method; - public UInt32 ParameterBlockID; - public byte[] Data; - } +public class CompressedTile +{ + public TileCompressionMethod Method; + public UInt32 ParameterBlockID; + public byte[] Data; +} - public class TileCompressor - { - public ParameterBlockContainer ParameterBlocks; - public TileCompressionPreference Preference = TileCompressionPreference.Best; +public class TileCompressor +{ + public ParameterBlockContainer ParameterBlocks; + public TileCompressionPreference Preference = TileCompressionPreference.Best; - private byte[] GetRawBytes(BuildTile tile) + private byte[] GetRawBytes(BuildTile tile) + { + if (tile.EmbeddedMip == null) { - if (tile.EmbeddedMip == null) - { - return tile.Image.Data; - } - else - { - var data = new byte[tile.Image.Data.Length + tile.EmbeddedMip.Data.Length]; - Array.Copy(tile.Image.Data, 0, data, 0, tile.Image.Data.Length); - Array.Copy(tile.EmbeddedMip.Data, 0, data, tile.Image.Data.Length, tile.EmbeddedMip.Data.Length); - return data; - } + return tile.Image.Data; } - - public static byte[] CompressLZ4(byte[] raw) + else { - return LZ4Codec.EncodeHC(raw, 0, raw.Length); + var data = new byte[tile.Image.Data.Length + tile.EmbeddedMip.Data.Length]; + Array.Copy(tile.Image.Data, 0, data, 0, tile.Image.Data.Length); + Array.Copy(tile.EmbeddedMip.Data, 0, data, tile.Image.Data.Length, tile.EmbeddedMip.Data.Length); + return data; } + } - public static byte[] CompressLZ77(byte[] raw) - { - return Native.FastLZCompressor.Compress(raw, 2); - } + public static byte[] CompressLZ4(byte[] raw) + { + return LZ4Codec.EncodeHC(raw, 0, raw.Length); + } + + public static byte[] CompressLZ77(byte[] raw) + { + return Native.FastLZCompressor.Compress(raw, 2); + } - public byte[] Compress(byte[] uncompressed, out TileCompressionMethod method) + public byte[] Compress(byte[] uncompressed, out TileCompressionMethod method) + { + switch (Preference) { - switch (Preference) - { - case TileCompressionPreference.Uncompressed: - method = TileCompressionMethod.Raw; - return uncompressed; - - case TileCompressionPreference.Best: - var lz4 = CompressLZ4(uncompressed); - var lz77 = CompressLZ77(uncompressed); - if (lz4.Length <= lz77.Length) - { - method = TileCompressionMethod.LZ4; - return lz4; - } - else - { - method = TileCompressionMethod.LZ77; - return lz77; - } - - case TileCompressionPreference.LZ4: + case TileCompressionPreference.Uncompressed: + method = TileCompressionMethod.Raw; + return uncompressed; + + case TileCompressionPreference.Best: + var lz4 = CompressLZ4(uncompressed); + var lz77 = CompressLZ77(uncompressed); + if (lz4.Length <= lz77.Length) + { method = TileCompressionMethod.LZ4; - return CompressLZ4(uncompressed); - - case TileCompressionPreference.LZ77: + return lz4; + } + else + { method = TileCompressionMethod.LZ77; - return CompressLZ77(uncompressed); + return lz77; + } + + case TileCompressionPreference.LZ4: + method = TileCompressionMethod.LZ4; + return CompressLZ4(uncompressed); - default: - throw new ArgumentException("Invalid compression preference"); - } + case TileCompressionPreference.LZ77: + method = TileCompressionMethod.LZ77; + return CompressLZ77(uncompressed); + + default: + throw new ArgumentException("Invalid compression preference"); } + } - public CompressedTile Compress(BuildTile tile) + public CompressedTile Compress(BuildTile tile) + { + if (tile.Compressed != null) { - if (tile.Compressed != null) - { - return tile.Compressed; - } + return tile.Compressed; + } - var uncompressed = GetRawBytes(tile); - var compressed = new CompressedTile(); - compressed.Data = Compress(uncompressed, out compressed.Method); + var uncompressed = GetRawBytes(tile); + var compressed = new CompressedTile(); + compressed.Data = Compress(uncompressed, out compressed.Method); - var paramBlock = ParameterBlocks.GetOrAdd(tile.Codec, tile.DataType, compressed.Method); - compressed.ParameterBlockID = paramBlock.ParameterBlockID; + var paramBlock = ParameterBlocks.GetOrAdd(tile.Codec, tile.DataType, compressed.Method); + compressed.ParameterBlockID = paramBlock.ParameterBlockID; - tile.Compressed = compressed; - return compressed; - } + tile.Compressed = compressed; + return compressed; + } - public TileCompressionMethod GetMethod(string method1, string method2) + public TileCompressionMethod GetMethod(string method1, string method2) + { + if (method1 == "lz77" && method2 == "fastlz0.1.0") { - if (method1 == "lz77" && method2 == "fastlz0.1.0") - { - return TileCompressionMethod.LZ77; - } - else if (method1 == "lz4" && method2 == "lz40.1.0") - { - return TileCompressionMethod.LZ4; - } - else if (method1 == "raw") - { - return TileCompressionMethod.Raw; - } - else - { - throw new InvalidDataException($"Unsupported compression format: '{method1}', '{method2}'"); - } + return TileCompressionMethod.LZ77; } - - public byte[] Decompress(byte[] compressed, int outputSize, string method1, string method2) + else if (method1 == "lz4" && method2 == "lz40.1.0") + { + return TileCompressionMethod.LZ4; + } + else if (method1 == "raw") + { + return TileCompressionMethod.Raw; + } + else { - return Decompress(compressed, outputSize, GetMethod(method1, method2)); + throw new InvalidDataException($"Unsupported compression format: '{method1}', '{method2}'"); } + } - public byte[] Decompress(byte[] compressed, int outputSize, TileCompressionMethod method) + public byte[] Decompress(byte[] compressed, int outputSize, string method1, string method2) + { + return Decompress(compressed, outputSize, GetMethod(method1, method2)); + } + + public byte[] Decompress(byte[] compressed, int outputSize, TileCompressionMethod method) + { + switch (method) { - switch (method) - { - case TileCompressionMethod.Raw: - return compressed; - case TileCompressionMethod.LZ4: - var decompressed = new byte[outputSize]; - LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, outputSize, true); - return decompressed; - case TileCompressionMethod.LZ77: - return Native.FastLZCompressor.Decompress(compressed, outputSize); - default: - throw new ArgumentException(); - } + case TileCompressionMethod.Raw: + return compressed; + case TileCompressionMethod.LZ4: + var decompressed = new byte[outputSize]; + LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, outputSize, true); + return decompressed; + case TileCompressionMethod.LZ77: + return Native.FastLZCompressor.Decompress(compressed, outputSize); + default: + throw new ArgumentException(); } } } diff --git a/LSLib/VirtualTextures/Geometry.cs b/LSLib/VirtualTextures/Geometry.cs index 53fee2bc..596e8e63 100644 --- a/LSLib/VirtualTextures/Geometry.cs +++ b/LSLib/VirtualTextures/Geometry.cs @@ -1,183 +1,182 @@ using System; using System.Collections.Generic; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +public class TileSetGeometryCalculator { - public class TileSetGeometryCalculator - { - public List Textures; - public TileSetBuildData BuildData; + public List Textures; + public TileSetBuildData BuildData; - private int PlacementTileWidth = 0x1000; - private int PlacementTileHeight = 0x1000; - private int PlacementGridWidth; - private int PlacementGridHeight; - private BuildTexture[] PlacementGrid; + private int PlacementTileWidth = 0x1000; + private int PlacementTileHeight = 0x1000; + private int PlacementGridWidth; + private int PlacementGridHeight; + private BuildTexture[] PlacementGrid; + + private void ResizePlacementGrid(int w, int h) + { + PlacementGridWidth = w; + PlacementGridHeight = h; + PlacementGrid = new BuildTexture[w * h]; + } - private void ResizePlacementGrid(int w, int h) + private void GrowPlacementGrid() + { + if (PlacementGridWidth <= PlacementGridHeight) { - PlacementGridWidth = w; - PlacementGridHeight = h; - PlacementGrid = new BuildTexture[w * h]; + ResizePlacementGrid(PlacementGridWidth * 2, PlacementGridHeight); } - - private void GrowPlacementGrid() + else { - if (PlacementGridWidth <= PlacementGridHeight) - { - ResizePlacementGrid(PlacementGridWidth * 2, PlacementGridHeight); - } - else - { - ResizePlacementGrid(PlacementGridWidth, PlacementGridHeight * 2); - } + ResizePlacementGrid(PlacementGridWidth, PlacementGridHeight * 2); } + } - private bool TryToPlaceTexture(BuildTexture texture, int texX, int texY) - { - var width = texture.Width / BuildData.RawTileWidth / PlacementTileWidth; - var height = texture.Height / BuildData.RawTileHeight / PlacementTileHeight; + private bool TryToPlaceTexture(BuildTexture texture, int texX, int texY) + { + var width = texture.Width / BuildData.RawTileWidth / PlacementTileWidth; + var height = texture.Height / BuildData.RawTileHeight / PlacementTileHeight; - for (var y = texY; y < texY + height; y++) + for (var y = texY; y < texY + height; y++) + { + for (var x = texX; x < texX + width; x++) { - for (var x = texX; x < texX + width; x++) + if (PlacementGrid[x + y * PlacementGridWidth] != null) { - if (PlacementGrid[x + y * PlacementGridWidth] != null) - { - return false; - } + return false; } } + } - texture.X = texX * PlacementTileWidth * BuildData.RawTileWidth; - texture.Y = texY * PlacementTileHeight * BuildData.RawTileHeight; + texture.X = texX * PlacementTileWidth * BuildData.RawTileWidth; + texture.Y = texY * PlacementTileHeight * BuildData.RawTileHeight; - for (var y = texY; y < texY + height; y++) + for (var y = texY; y < texY + height; y++) + { + for (var x = texX; x < texX + width; x++) { - for (var x = texX; x < texX + width; x++) - { - PlacementGrid[x + y * PlacementGridWidth] = texture; - } + PlacementGrid[x + y * PlacementGridWidth] = texture; } - - return true; } - private bool TryToPlaceTexture(BuildTexture texture) - { - var width = texture.Width / BuildData.RawTileWidth / PlacementTileWidth; - var height = texture.Height / BuildData.RawTileHeight / PlacementTileHeight; + return true; + } + + private bool TryToPlaceTexture(BuildTexture texture) + { + var width = texture.Width / BuildData.RawTileWidth / PlacementTileWidth; + var height = texture.Height / BuildData.RawTileHeight / PlacementTileHeight; - for (var y = 0; y < PlacementGridHeight - height + 1; y++) + for (var y = 0; y < PlacementGridHeight - height + 1; y++) + { + for (var x = 0; x < PlacementGridWidth - width + 1; x++) { - for (var x = 0; x < PlacementGridWidth - width + 1; x++) + if (TryToPlaceTexture(texture, x, y)) { - if (TryToPlaceTexture(texture, x, y)) - { - return true; - } + return true; } } - - return false; } - private bool PlaceAllTextures() + return false; + } + + private bool PlaceAllTextures() + { + foreach (var tex in Textures) { - foreach (var tex in Textures) + if (!TryToPlaceTexture(tex)) { - if (!TryToPlaceTexture(tex)) - { - return false; - } + return false; } - - return true; } - private void DoAutoPlacement() + return true; + } + + private void DoAutoPlacement() + { + var startingX = 0; + var startingY = 0; + + foreach (var tex in Textures) { - var startingX = 0; - var startingY = 0; + PlacementTileWidth = Math.Min(PlacementTileWidth, tex.Width / BuildData.RawTileWidth); + PlacementTileHeight = Math.Min(PlacementTileHeight, tex.Height / BuildData.RawTileHeight); + startingX = Math.Max(startingX, tex.Width / BuildData.RawTileWidth); + startingY = Math.Max(startingY, tex.Height / BuildData.RawTileHeight); + } - foreach (var tex in Textures) - { - PlacementTileWidth = Math.Min(PlacementTileWidth, tex.Width / BuildData.RawTileWidth); - PlacementTileHeight = Math.Min(PlacementTileHeight, tex.Height / BuildData.RawTileHeight); - startingX = Math.Max(startingX, tex.Width / BuildData.RawTileWidth); - startingY = Math.Max(startingY, tex.Height / BuildData.RawTileHeight); - } + ResizePlacementGrid(startingX / PlacementTileWidth, startingY / PlacementTileHeight); - ResizePlacementGrid(startingX / PlacementTileWidth, startingY / PlacementTileHeight); + while (!PlaceAllTextures()) + { + GrowPlacementGrid(); + } - while (!PlaceAllTextures()) - { - GrowPlacementGrid(); - } + BuildData.TotalWidth = PlacementTileWidth * PlacementGridWidth * BuildData.RawTileWidth; + BuildData.TotalHeight = PlacementTileHeight * PlacementGridHeight * BuildData.RawTileWidth; + } - BuildData.TotalWidth = PlacementTileWidth * PlacementGridWidth * BuildData.RawTileWidth; - BuildData.TotalHeight = PlacementTileHeight * PlacementGridHeight * BuildData.RawTileWidth; + private void UpdateGeometry() + { + var minTexSize = 0x10000; + foreach (var tex in Textures) + { + minTexSize = Math.Min(minTexSize, Math.Min(tex.Height, tex.Width)); } - private void UpdateGeometry() + BuildData.MipFileStartLevel = 0; + while (minTexSize >= BuildData.RawTileHeight) { - var minTexSize = 0x10000; - foreach (var tex in Textures) - { - minTexSize = Math.Min(minTexSize, Math.Min(tex.Height, tex.Width)); - } + BuildData.MipFileStartLevel++; + minTexSize >>= 1; + } - BuildData.MipFileStartLevel = 0; - while (minTexSize >= BuildData.RawTileHeight) - { - BuildData.MipFileStartLevel++; - minTexSize >>= 1; - } + // Max W/H of all textures + var maxSize = Math.Max(BuildData.TotalWidth, BuildData.TotalHeight); + BuildData.PageFileLevels = 0; + while (maxSize >= BuildData.RawTileHeight) + { + BuildData.PageFileLevels++; + maxSize >>= 1; + } - // Max W/H of all textures - var maxSize = Math.Max(BuildData.TotalWidth, BuildData.TotalHeight); - BuildData.PageFileLevels = 0; - while (maxSize >= BuildData.RawTileHeight) - { - BuildData.PageFileLevels++; - maxSize >>= 1; - } + BuildData.BuildLevels = BuildData.PageFileLevels + 1; - BuildData.BuildLevels = BuildData.PageFileLevels + 1; + foreach (var layer in BuildData.Layers) + { + var levelWidth = BuildData.TotalWidth; + var levelHeight = BuildData.TotalHeight; - foreach (var layer in BuildData.Layers) + layer.Levels = new List(BuildData.BuildLevels); + for (var i = 0; i < BuildData.BuildLevels; i++) { - var levelWidth = BuildData.TotalWidth; - var levelHeight = BuildData.TotalHeight; - - layer.Levels = new List(BuildData.BuildLevels); - for (var i = 0; i < BuildData.BuildLevels; i++) + var tilesX = levelWidth / BuildData.RawTileWidth + (((levelWidth % BuildData.RawTileWidth) > 0) ? 1 : 0); + var tilesY = levelHeight / BuildData.RawTileHeight + (((levelHeight % BuildData.RawTileHeight) > 0) ? 1 : 0); + var level = new BuildLevel { - var tilesX = levelWidth / BuildData.RawTileWidth + (((levelWidth % BuildData.RawTileWidth) > 0) ? 1 : 0); - var tilesY = levelHeight / BuildData.RawTileHeight + (((levelHeight % BuildData.RawTileHeight) > 0) ? 1 : 0); - var level = new BuildLevel - { - Level = i, - Width = tilesX * BuildData.RawTileWidth, - Height = tilesY * BuildData.RawTileHeight, - TilesX = tilesX, - TilesY = tilesY, - PaddedTileWidth = BuildData.PaddedTileWidth, - PaddedTileHeight = BuildData.PaddedTileHeight, - Tiles = new BuildTile[tilesX * tilesY] - }; - layer.Levels.Add(level); - - levelWidth = Math.Max(1, levelWidth >> 1); - levelHeight = Math.Max(1, levelHeight >> 1); - } + Level = i, + Width = tilesX * BuildData.RawTileWidth, + Height = tilesY * BuildData.RawTileHeight, + TilesX = tilesX, + TilesY = tilesY, + PaddedTileWidth = BuildData.PaddedTileWidth, + PaddedTileHeight = BuildData.PaddedTileHeight, + Tiles = new BuildTile[tilesX * tilesY] + }; + layer.Levels.Add(level); + + levelWidth = Math.Max(1, levelWidth >> 1); + levelHeight = Math.Max(1, levelHeight >> 1); } } + } - public void Update() - { - DoAutoPlacement(); - UpdateGeometry(); - } + public void Update() + { + DoAutoPlacement(); + UpdateGeometry(); } } diff --git a/LSLib/VirtualTextures/PageFile.cs b/LSLib/VirtualTextures/PageFile.cs index 68f8f434..b76ad953 100644 --- a/LSLib/VirtualTextures/PageFile.cs +++ b/LSLib/VirtualTextures/PageFile.cs @@ -3,78 +3,77 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +public class PageFile : IDisposable { - public class PageFile : IDisposable - { - private readonly VirtualTileSet TileSet; - private readonly FileStream Stream; - private readonly BinaryReader Reader; - public GTPHeader Header; - private readonly List ChunkOffsets; + private readonly VirtualTileSet TileSet; + private readonly FileStream Stream; + private readonly BinaryReader Reader; + public GTPHeader Header; + private readonly List ChunkOffsets; - public PageFile(VirtualTileSet tileset, string path) - { - TileSet = tileset; - Stream = new FileStream(path, FileMode.Open, FileAccess.Read); - Reader = new BinaryReader(Stream); + public PageFile(VirtualTileSet tileset, string path) + { + TileSet = tileset; + Stream = new FileStream(path, FileMode.Open, FileAccess.Read); + Reader = new BinaryReader(Stream); - Header = BinUtils.ReadStruct(Reader); + Header = BinUtils.ReadStruct(Reader); - var numPages = Stream.Length / tileset.Header.PageSize; - ChunkOffsets = []; + var numPages = Stream.Length / tileset.Header.PageSize; + ChunkOffsets = []; - for (var page = 0; page < numPages; page++) - { - var numOffsets = Reader.ReadUInt32(); - var offsets = new UInt32[numOffsets]; - BinUtils.ReadStructs(Reader, offsets); - ChunkOffsets.Add(offsets); + for (var page = 0; page < numPages; page++) + { + var numOffsets = Reader.ReadUInt32(); + var offsets = new UInt32[numOffsets]; + BinUtils.ReadStructs(Reader, offsets); + ChunkOffsets.Add(offsets); - Stream.Position = (page + 1) * tileset.Header.PageSize; - } + Stream.Position = (page + 1) * tileset.Header.PageSize; } + } - public void Dispose() - { - Reader.Dispose(); - Stream.Dispose(); - } + public void Dispose() + { + Reader.Dispose(); + Stream.Dispose(); + } - private byte[] DoUnpackTileBC(GTPChunkHeader header, int outputSize, TileCompressor compressor) - { - var parameterBlock = (GTSBCParameterBlock)TileSet.ParameterBlocks[header.ParameterBlockID]; - var compressed = Reader.ReadBytes((int)header.Size); - return compressor.Decompress(compressed, outputSize, parameterBlock.CompressionName1, parameterBlock.CompressionName2); - } + private byte[] DoUnpackTileBC(GTPChunkHeader header, int outputSize, TileCompressor compressor) + { + var parameterBlock = (GTSBCParameterBlock)TileSet.ParameterBlocks[header.ParameterBlockID]; + var compressed = Reader.ReadBytes((int)header.Size); + return compressor.Decompress(compressed, outputSize, parameterBlock.CompressionName1, parameterBlock.CompressionName2); + } - private byte[] DoUnpackTileUniform(GTPChunkHeader header) - { - var parameterBlock = (GTSUniformParameterBlock)TileSet.ParameterBlocks[header.ParameterBlockID]; + private byte[] DoUnpackTileUniform(GTPChunkHeader header) + { + var parameterBlock = (GTSUniformParameterBlock)TileSet.ParameterBlocks[header.ParameterBlockID]; - byte[] img = new byte[TileSet.Header.TileWidth * TileSet.Header.TileHeight]; - Array.Clear(img, 0, img.Length); - return img; - } + byte[] img = new byte[TileSet.Header.TileWidth * TileSet.Header.TileHeight]; + Array.Clear(img, 0, img.Length); + return img; + } - public byte[] UnpackTile(int pageIndex, int chunkIndex, int outputSize, TileCompressor compressor) + public byte[] UnpackTile(int pageIndex, int chunkIndex, int outputSize, TileCompressor compressor) + { + Stream.Position = ChunkOffsets[pageIndex][chunkIndex] + (pageIndex * TileSet.Header.PageSize); + var chunkHeader = BinUtils.ReadStruct(Reader); + return chunkHeader.Codec switch { - Stream.Position = ChunkOffsets[pageIndex][chunkIndex] + (pageIndex * TileSet.Header.PageSize); - var chunkHeader = BinUtils.ReadStruct(Reader); - return chunkHeader.Codec switch - { - GTSCodec.Uniform => DoUnpackTileUniform(chunkHeader), - GTSCodec.BC => DoUnpackTileBC(chunkHeader, outputSize, compressor), - _ => throw new InvalidDataException($"Unsupported codec: {chunkHeader.Codec}"), - }; - } + GTSCodec.Uniform => DoUnpackTileUniform(chunkHeader), + GTSCodec.BC => DoUnpackTileBC(chunkHeader, outputSize, compressor), + _ => throw new InvalidDataException($"Unsupported codec: {chunkHeader.Codec}"), + }; + } - public BC5Image UnpackTileBC5(int pageIndex, int chunkIndex, TileCompressor compressor) - { - var compressedSize = 16 * ((TileSet.Header.TileWidth + 3) / 4) * ((TileSet.Header.TileHeight + 3) / 4) - + 16 * ((TileSet.Header.TileWidth/2 + 3) / 4) * ((TileSet.Header.TileHeight/2 + 3) / 4); - var chunk = UnpackTile(pageIndex, chunkIndex, compressedSize, compressor); - return new BC5Image(chunk, TileSet.Header.TileWidth, TileSet.Header.TileHeight); - } + public BC5Image UnpackTileBC5(int pageIndex, int chunkIndex, TileCompressor compressor) + { + var compressedSize = 16 * ((TileSet.Header.TileWidth + 3) / 4) * ((TileSet.Header.TileHeight + 3) / 4) + + 16 * ((TileSet.Header.TileWidth/2 + 3) / 4) * ((TileSet.Header.TileHeight/2 + 3) / 4); + var chunk = UnpackTile(pageIndex, chunkIndex, compressedSize, compressor); + return new BC5Image(chunk, TileSet.Header.TileWidth, TileSet.Header.TileHeight); } } diff --git a/LSLib/VirtualTextures/PageFileBuild.cs b/LSLib/VirtualTextures/PageFileBuild.cs index d6a7a50b..a9270ff7 100644 --- a/LSLib/VirtualTextures/PageFileBuild.cs +++ b/LSLib/VirtualTextures/PageFileBuild.cs @@ -5,285 +5,284 @@ using System.Linq; using System.Runtime.InteropServices; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + + +public class BuiltChunk { + public GTSCodec Codec; + public UInt32 ParameterBlockID; + public byte[] EncodedBlob; + public int ChunkIndex; + public UInt32 OffsetInPage; +} - public class BuiltChunk +public class PageBuilder +{ + public PageFileBuilder PageFile; + public List Chunks; + public int PageFileIndex; + public int PageIndex; + public int Budget = 0; + + public PageBuilder() { - public GTSCodec Codec; - public UInt32 ParameterBlockID; - public byte[] EncodedBlob; - public int ChunkIndex; - public UInt32 OffsetInPage; + Chunks = []; } - public class PageBuilder + public bool TryAdd(BuildTile tile) { - public PageFileBuilder PageFile; - public List Chunks; - public int PageFileIndex; - public int PageIndex; - public int Budget = 0; - - public PageBuilder() + if (tile.AddedToPageFile) { - Chunks = []; + throw new InvalidOperationException("Tried to add tile to page file multiple times"); } - public bool TryAdd(BuildTile tile) + var chunkSize = 4 + Marshal.SizeOf(typeof(GTPChunkHeader)) + tile.Compressed.Data.Length; + if (Budget + chunkSize > PageFile.Config.PageSize) { - if (tile.AddedToPageFile) - { - throw new InvalidOperationException("Tried to add tile to page file multiple times"); - } - - var chunkSize = 4 + Marshal.SizeOf(typeof(GTPChunkHeader)) + tile.Compressed.Data.Length; - if (Budget + chunkSize > PageFile.Config.PageSize) - { - return false; - } - - var chunk = new BuiltChunk - { - Codec = GTSCodec.BC, - ParameterBlockID = tile.Compressed.ParameterBlockID, - EncodedBlob = tile.Compressed.Data, - ChunkIndex = Chunks.Count - }; - - tile.AddedToPageFile = true; - tile.PageFileIndex = PageFileIndex; - tile.PageIndex = PageIndex; - tile.ChunkIndex = chunk.ChunkIndex; - Chunks.Add(chunk); - Budget += chunkSize; - return true; + return false; } + + var chunk = new BuiltChunk + { + Codec = GTSCodec.BC, + ParameterBlockID = tile.Compressed.ParameterBlockID, + EncodedBlob = tile.Compressed.Data, + ChunkIndex = Chunks.Count + }; + + tile.AddedToPageFile = true; + tile.PageFileIndex = PageFileIndex; + tile.PageIndex = PageIndex; + tile.ChunkIndex = chunk.ChunkIndex; + Chunks.Add(chunk); + Budget += chunkSize; + return true; } +} - public class PageFileBuilder(TileSetConfiguration config) +public class PageFileBuilder(TileSetConfiguration config) +{ + public readonly TileSetConfiguration Config = config; + public List Pages = []; + public string Name; + public string FileName; + public Guid Checksum; + public int PageFileIndex; + + public void AddTile(BuildTile tile) { - public readonly TileSetConfiguration Config = config; - public List Pages = []; - public string Name; - public string FileName; - public Guid Checksum; - public int PageFileIndex; - - public void AddTile(BuildTile tile) + if (Config.BackfillPages) { - if (Config.BackfillPages) + foreach (var page in Pages) { - foreach (var page in Pages) + if (page.TryAdd(tile)) { - if (page.TryAdd(tile)) - { - return; - } + return; } } + } - if (Pages.Count == 0 || !Pages.Last().TryAdd(tile)) + if (Pages.Count == 0 || !Pages.Last().TryAdd(tile)) + { + var newPage = new PageBuilder { - var newPage = new PageBuilder - { - PageFile = this, - PageFileIndex = PageFileIndex, - PageIndex = Pages.Count - }; - - if (newPage.PageIndex == 0) - { - newPage.Budget += Marshal.SizeOf(typeof(GTPHeader)); - } + PageFile = this, + PageFileIndex = PageFileIndex, + PageIndex = Pages.Count + }; - Pages.Add(newPage); - newPage.TryAdd(tile); + if (newPage.PageIndex == 0) + { + newPage.Budget += Marshal.SizeOf(typeof(GTPHeader)); } + + Pages.Add(newPage); + newPage.TryAdd(tile); } + } - public void Save(string path) + public void Save(string path) + { + using var stream = new FileStream(path, FileMode.Create, FileAccess.ReadWrite); + using var writer = new BinaryWriter(stream); + Save(stream, writer); + } + + public void SaveChunk(BinaryWriter writer, BuiltChunk chunk) + { + var header = new GTPChunkHeader { - using var stream = new FileStream(path, FileMode.Create, FileAccess.ReadWrite); - using var writer = new BinaryWriter(stream); - Save(stream, writer); - } + Codec = chunk.Codec, + ParameterBlockID = chunk.ParameterBlockID, + Size = (UInt32)chunk.EncodedBlob.Length + }; + BinUtils.WriteStruct(writer, ref header); + writer.Write(chunk.EncodedBlob); + } - public void SaveChunk(BinaryWriter writer, BuiltChunk chunk) + public void Save(Stream s, BinaryWriter writer) + { + var header = new GTPHeader { - var header = new GTPChunkHeader - { - Codec = chunk.Codec, - ParameterBlockID = chunk.ParameterBlockID, - Size = (UInt32)chunk.EncodedBlob.Length - }; - BinUtils.WriteStruct(writer, ref header); - writer.Write(chunk.EncodedBlob); - } + Magic = GTPHeader.HeaderMagic, + Version = GTPHeader.DefaultVersion, + GUID = Checksum + }; + BinUtils.WriteStruct(writer, ref header); - public void Save(Stream s, BinaryWriter writer) + for (var i = 0; i < Pages.Count; i++) { - var header = new GTPHeader - { - Magic = GTPHeader.HeaderMagic, - Version = GTPHeader.DefaultVersion, - GUID = Checksum - }; - BinUtils.WriteStruct(writer, ref header); + var page = Pages[i]; - for (var i = 0; i < Pages.Count; i++) + writer.Write((UInt32)page.Chunks.Count); + foreach (var chunk in page.Chunks) { - var page = Pages[i]; - - writer.Write((UInt32)page.Chunks.Count); - foreach (var chunk in page.Chunks) - { - writer.Write(chunk.OffsetInPage); - } + writer.Write(chunk.OffsetInPage); + } - foreach (var chunk in page.Chunks) - { - chunk.OffsetInPage = (uint)(s.Position % Config.PageSize); - SaveChunk(writer, chunk); - } + foreach (var chunk in page.Chunks) + { + chunk.OffsetInPage = (uint)(s.Position % Config.PageSize); + SaveChunk(writer, chunk); + } - var padSize = (Config.PageSize - (s.Position % Config.PageSize) % Config.PageSize); - if (padSize > 0) - { - var pad = new byte[padSize]; - Array.Clear(pad, 0, (int)padSize); - writer.Write(pad); - } + var padSize = (Config.PageSize - (s.Position % Config.PageSize) % Config.PageSize); + if (padSize > 0) + { + var pad = new byte[padSize]; + Array.Clear(pad, 0, (int)padSize); + writer.Write(pad); } + } - for (var i = 0; i < Pages.Count; i++) + for (var i = 0; i < Pages.Count; i++) + { + var page = Pages[i]; + s.Position = (i * Config.PageSize); + if (i == 0) { - var page = Pages[i]; - s.Position = (i * Config.PageSize); - if (i == 0) - { - s.Position += Marshal.SizeOf(typeof(GTPHeader)); - } + s.Position += Marshal.SizeOf(typeof(GTPHeader)); + } - writer.Write((UInt32)page.Chunks.Count); - foreach (var chunk in page.Chunks) - { - writer.Write(chunk.OffsetInPage); - } + writer.Write((UInt32)page.Chunks.Count); + foreach (var chunk in page.Chunks) + { + writer.Write(chunk.OffsetInPage); } } } +} - public class PageFileSetBuilder(TileSetBuildData buildData, TileSetConfiguration config) - { - private readonly TileSetBuildData BuildData = buildData; - private readonly TileSetConfiguration Config = config; +public class PageFileSetBuilder(TileSetBuildData buildData, TileSetConfiguration config) +{ + private readonly TileSetBuildData BuildData = buildData; + private readonly TileSetConfiguration Config = config; - private void BuildPageFile(PageFileBuilder file, int level, int minTileX, int minTileY, int maxTileX, int maxTileY) + private void BuildPageFile(PageFileBuilder file, int level, int minTileX, int minTileY, int maxTileX, int maxTileY) + { + for (var y = minTileY; y <= maxTileY; y++) { - for (var y = minTileY; y <= maxTileY; y++) + for (var x = minTileX; x <= maxTileX; x++) { - for (var x = minTileX; x <= maxTileX; x++) + for (var layer = 0; layer < BuildData.Layers.Count; layer++) { - for (var layer = 0; layer < BuildData.Layers.Count; layer++) + var tile = BuildData.Layers[layer].Levels[level].Get(x, y); + if (tile != null) { - var tile = BuildData.Layers[layer].Levels[level].Get(x, y); - if (tile != null) - { - file.AddTile(tile); - } + file.AddTile(tile); } } } } + } - private void BuildPageFile(PageFileBuilder file, BuildTexture texture) + private void BuildPageFile(PageFileBuilder file, BuildTexture texture) + { + for (var level = 0; level < BuildData.MipFileStartLevel; level++) { - for (var level = 0; level < BuildData.MipFileStartLevel; level++) - { - var x = texture.X >> level; - var y = texture.Y >> level; - var width = texture.Width >> level; - var height = texture.Height >> level; + var x = texture.X >> level; + var y = texture.Y >> level; + var width = texture.Width >> level; + var height = texture.Height >> level; - var minTileX = x / BuildData.RawTileWidth; - var minTileY = y / BuildData.RawTileHeight; - var maxTileX = (x + width - 1) / BuildData.RawTileWidth; - var maxTileY = (y + height - 1) / BuildData.RawTileHeight; + var minTileX = x / BuildData.RawTileWidth; + var minTileY = y / BuildData.RawTileHeight; + var maxTileX = (x + width - 1) / BuildData.RawTileWidth; + var maxTileY = (y + height - 1) / BuildData.RawTileHeight; - BuildPageFile(file, level, minTileX, minTileY, maxTileX, maxTileY); - } + BuildPageFile(file, level, minTileX, minTileY, maxTileX, maxTileY); } + } - private void BuildMipPageFile(PageFileBuilder file) + private void BuildMipPageFile(PageFileBuilder file) + { + for (var level = BuildData.MipFileStartLevel; level < BuildData.PageFileLevels; level++) { - for (var level = BuildData.MipFileStartLevel; level < BuildData.PageFileLevels; level++) - { - var lvl = BuildData.Layers[0].Levels[level]; - BuildPageFile(file, level, 0, 0, lvl.TilesX - 1, lvl.TilesY - 1); - } + var lvl = BuildData.Layers[0].Levels[level]; + BuildPageFile(file, level, 0, 0, lvl.TilesX - 1, lvl.TilesY - 1); } + } - private void BuildFullPageFile(PageFileBuilder file) + private void BuildFullPageFile(PageFileBuilder file) + { + for (var level = 0; level < BuildData.PageFileLevels; level++) { - for (var level = 0; level < BuildData.PageFileLevels; level++) - { - var lvl = BuildData.Layers[0].Levels[level]; - BuildPageFile(file, level, 0, 0, lvl.TilesX - 1, lvl.TilesY - 1); - } + var lvl = BuildData.Layers[0].Levels[level]; + BuildPageFile(file, level, 0, 0, lvl.TilesX - 1, lvl.TilesY - 1); } + } - public List BuildFilePerGTex(List textures) - { - var pageFiles = new List(); - - uint firstPageIndex = 0; - foreach (var texture in textures) - { - var file = new PageFileBuilder(Config) - { - Name = texture.Name, - FileName = BuildData.GTSName + "_" + texture.Name + ".gtp", - Checksum = Guid.NewGuid(), - PageFileIndex = pageFiles.Count - }; - pageFiles.Add(file); - BuildPageFile(file, texture); - - firstPageIndex += (uint)file.Pages.Count; - } + public List BuildFilePerGTex(List textures) + { + var pageFiles = new List(); - if (BuildData.MipFileStartLevel < BuildData.PageFileLevels) + uint firstPageIndex = 0; + foreach (var texture in textures) + { + var file = new PageFileBuilder(Config) { - var file = new PageFileBuilder(Config) - { - Name = "Mips", - FileName = BuildData.GTSName + "_Mips.gtp", - Checksum = Guid.NewGuid(), - PageFileIndex = pageFiles.Count - }; - pageFiles.Add(file); - BuildMipPageFile(file); - } + Name = texture.Name, + FileName = BuildData.GTSName + "_" + texture.Name + ".gtp", + Checksum = Guid.NewGuid(), + PageFileIndex = pageFiles.Count + }; + pageFiles.Add(file); + BuildPageFile(file, texture); - return pageFiles; + firstPageIndex += (uint)file.Pages.Count; } - public List BuildSingleFile() + if (BuildData.MipFileStartLevel < BuildData.PageFileLevels) { - var pageFiles = new List(); - var file = new PageFileBuilder(Config) { - Name = "Global", - FileName = BuildData.GTSName + ".gtp", + Name = "Mips", + FileName = BuildData.GTSName + "_Mips.gtp", Checksum = Guid.NewGuid(), PageFileIndex = pageFiles.Count }; pageFiles.Add(file); - BuildFullPageFile(file); - - return pageFiles; + BuildMipPageFile(file); } + + return pageFiles; + } + + public List BuildSingleFile() + { + var pageFiles = new List(); + + var file = new PageFileBuilder(Config) + { + Name = "Global", + FileName = BuildData.GTSName + ".gtp", + Checksum = Guid.NewGuid(), + PageFileIndex = pageFiles.Count + }; + pageFiles.Add(file); + BuildFullPageFile(file); + + return pageFiles; } } diff --git a/LSLib/VirtualTextures/VirtualTexture.cs b/LSLib/VirtualTextures/VirtualTexture.cs index e920c7dc..fadce1cf 100644 --- a/LSLib/VirtualTextures/VirtualTexture.cs +++ b/LSLib/VirtualTextures/VirtualTexture.cs @@ -5,669 +5,668 @@ using System.IO; using System.Text; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +public struct PageFileInfo { - public struct PageFileInfo - { - public GTSPageFileInfo Meta; - public uint FirstPageIndex; - public string FileName; - } + public GTSPageFileInfo Meta; + public uint FirstPageIndex; + public string FileName; +} - public enum FourCCElementType - { - Node, - Int, - String, - BinaryInt, - BinaryGuid - }; - - public class FourCCElement +public enum FourCCElementType +{ + Node, + Int, + String, + BinaryInt, + BinaryGuid +}; + +public class FourCCElement +{ + public FourCCElementType Type; + public string FourCC; + public string Str; + public uint UInt; + public byte[] Blob; + public List Children; + + public static FourCCElement Make(string fourCC) { - public FourCCElementType Type; - public string FourCC; - public string Str; - public uint UInt; - public byte[] Blob; - public List Children; - - public static FourCCElement Make(string fourCC) - { - return new FourCCElement - { - Type = FourCCElementType.Node, - FourCC = fourCC, - Children = [] - }; - } - - public static FourCCElement Make(string fourCC, uint value) - { - return new FourCCElement - { - Type = FourCCElementType.Int, - FourCC = fourCC, - UInt = value - }; - } - - public static FourCCElement Make(string fourCC, string value) - { - return new FourCCElement - { - Type = FourCCElementType.String, - FourCC = fourCC, - Str = value - }; - } - - public static FourCCElement Make(string fourCC, FourCCElementType type, byte[] value) + return new FourCCElement { - return new FourCCElement - { - Type = type, - FourCC = fourCC, - Blob = value - }; - } + Type = FourCCElementType.Node, + FourCC = fourCC, + Children = [] + }; + } - public FourCCElement GetChild(string fourCC) + public static FourCCElement Make(string fourCC, uint value) + { + return new FourCCElement { - foreach (var child in Children) - { - if (child.FourCC == fourCC) - { - return child; - } - } - - return null; - } + Type = FourCCElementType.Int, + FourCC = fourCC, + UInt = value + }; } - public class FourCCTextureMeta + public static FourCCElement Make(string fourCC, string value) { - public string Name; - public int X; - public int Y; - public int Width; - public int Height; + return new FourCCElement + { + Type = FourCCElementType.String, + FourCC = fourCC, + Str = value + }; } - public class TileSetFourCC + public static FourCCElement Make(string fourCC, FourCCElementType type, byte[] value) { - public FourCCElement Root; - - public void Read(Stream fs, BinaryReader reader, long length) + return new FourCCElement { - var fourCCs = new List(); - Read(fs, reader, length, fourCCs); - Root = fourCCs[0]; - } + Type = type, + FourCC = fourCC, + Blob = value + }; + } - public void Read(Stream fs, BinaryReader reader, long length, List elements) + public FourCCElement GetChild(string fourCC) + { + foreach (var child in Children) { - var end = fs.Position + length; - while (fs.Position < end) + if (child.FourCC == fourCC) { - var cc = new FourCCElement(); - var header = BinUtils.ReadStruct(reader); - cc.FourCC = header.FourCCName; - - Int32 valueSize = header.Length; - if (header.ExtendedLength == 1) - { - valueSize |= ((int)reader.ReadUInt32() << 16); - } - - switch (header.Format) - { - case 1: - { - cc.Type = FourCCElementType.Node; - cc.Children = []; - Read(fs, reader, valueSize, cc.Children); - break; - } - - case 2: - { - cc.Type = FourCCElementType.String; - - var str = reader.ReadBytes(valueSize - 2); - cc.Str = Encoding.Unicode.GetString(str); - var nullterm = reader.ReadUInt16(); // null terminator - Debug.Assert(nullterm == 0); - break; - } - - case 3: - { - cc.Type = FourCCElementType.Int; - Debug.Assert(valueSize == 4); - cc.UInt = reader.ReadUInt32(); - break; - } - - case 8: - { - cc.Type = FourCCElementType.BinaryInt; - cc.Blob = reader.ReadBytes(valueSize); - break; - } - - case 0x0D: - { - cc.Type = FourCCElementType.BinaryGuid; - cc.Blob = reader.ReadBytes(valueSize); - break; - } - - default: - throw new Exception($"Unrecognized FourCC type tag: {header.Format}"); - } - - if ((fs.Position % 4) != 0) - { - fs.Position += 4 - (fs.Position % 4); - } - - elements.Add(cc); + return child; } - - Debug.Assert(fs.Position == end); } + return null; + } +} - public List ExtractTextureMetadata() - { - var metaList = new List(); - var textures = Root.GetChild("ATLS").GetChild("TXTS").Children; - foreach (var tex in textures) - { - var meta = new FourCCTextureMeta - { - Name = tex.GetChild("NAME").Str, - Width = (int)tex.GetChild("WDTH").UInt, - Height = (int)tex.GetChild("HGHT").UInt, - X = (int)tex.GetChild("XXXX").UInt, - Y = (int)tex.GetChild("YYYY").UInt - }; - metaList.Add(meta); - } +public class FourCCTextureMeta +{ + public string Name; + public int X; + public int Y; + public int Width; + public int Height; +} - return metaList; - } +public class TileSetFourCC +{ + public FourCCElement Root; - public void Write(Stream fs, BinaryWriter writer) - { - Write(fs, writer, Root); - } + public void Read(Stream fs, BinaryReader reader, long length) + { + var fourCCs = new List(); + Read(fs, reader, length, fourCCs); + Root = fourCCs[0]; + } - public void Write(Stream fs, BinaryWriter writer, FourCCElement element) + public void Read(Stream fs, BinaryReader reader, long length, List elements) + { + var end = fs.Position + length; + while (fs.Position < end) { - var header = new GTSFourCCMetadata - { - FourCCName = element.FourCC - }; + var cc = new FourCCElement(); + var header = BinUtils.ReadStruct(reader); + cc.FourCC = header.FourCCName; - var length = element.Type switch + Int32 valueSize = header.Length; + if (header.ExtendedLength == 1) { - FourCCElementType.Node => (uint)0x10000000, - FourCCElementType.Int => (uint)4, - FourCCElementType.String => (UInt32)Encoding.Unicode.GetBytes(element.Str).Length + 2, - FourCCElementType.BinaryInt or FourCCElementType.BinaryGuid => (UInt32)element.Blob.Length, - _ => throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"), - }; - - header.Format = element.Type switch - { - FourCCElementType.Node => 1, - FourCCElementType.Int => 3, - FourCCElementType.String => 2, - FourCCElementType.BinaryInt => 8, - FourCCElementType.BinaryGuid => 0xD, - _ => throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"), - }; - - header.Length = (UInt16)(length & 0xffff); - if (length > 0xffff) - { - header.ExtendedLength = 1; + valueSize |= ((int)reader.ReadUInt32() << 16); } - BinUtils.WriteStruct(writer, ref header); - - if (length > 0xffff) + switch (header.Format) { - UInt32 extraLength = length >> 16; - writer.Write(extraLength); - } + case 1: + { + cc.Type = FourCCElementType.Node; + cc.Children = []; + Read(fs, reader, valueSize, cc.Children); + break; + } - switch (element.Type) - { - case FourCCElementType.Node: + case 2: { - var lengthOffset = fs.Position - 6; - var childrenOffset = fs.Position; - foreach (var child in element.Children) - { - Write(fs, writer, child); - } - var endOffset = fs.Position; - var childrenSize = (UInt32)(endOffset - childrenOffset); - - // Re-write node header with final node size - fs.Position = lengthOffset; - writer.Write((UInt32)childrenSize); - fs.Position = endOffset; + cc.Type = FourCCElementType.String; + var str = reader.ReadBytes(valueSize - 2); + cc.Str = Encoding.Unicode.GetString(str); + var nullterm = reader.ReadUInt16(); // null terminator + Debug.Assert(nullterm == 0); break; } - case FourCCElementType.Int: - writer.Write(element.UInt); - break; + case 3: + { + cc.Type = FourCCElementType.Int; + Debug.Assert(valueSize == 4); + cc.UInt = reader.ReadUInt32(); + break; + } - case FourCCElementType.String: - writer.Write(Encoding.Unicode.GetBytes(element.Str)); - writer.Write((UInt16)0); // null terminator - break; + case 8: + { + cc.Type = FourCCElementType.BinaryInt; + cc.Blob = reader.ReadBytes(valueSize); + break; + } - case FourCCElementType.BinaryInt: - case FourCCElementType.BinaryGuid: - writer.Write(element.Blob); - break; + case 0x0D: + { + cc.Type = FourCCElementType.BinaryGuid; + cc.Blob = reader.ReadBytes(valueSize); + break; + } default: - throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"); + throw new Exception($"Unrecognized FourCC type tag: {header.Format}"); } - while ((fs.Position % 4) != 0) + if ((fs.Position % 4) != 0) { - writer.Write((Byte)0); + fs.Position += 4 - (fs.Position % 4); } + + elements.Add(cc); } + + Debug.Assert(fs.Position == end); } - public class VirtualTileSet : IDisposable + + public List ExtractTextureMetadata() { - public String PagePath; - public GTSHeader Header; - public GTSTileSetLayer[] TileSetLayers; - public GTSTileSetLevel[] TileSetLevels; - public List PerLevelFlatTileIndices; - public GTSParameterBlockHeader[] ParameterBlockHeaders; - public Dictionary ParameterBlocks; - public List PageFileInfos; - public TileSetFourCC FourCCMetadata; - public GTSThumbnailInfo[] ThumbnailInfos; - public GTSPackedTileID[] PackedTileIDs; - public GTSFlatTileInfo[] FlatTileInfos; - - private readonly Dictionary PageFiles = []; - private readonly TileCompressor Compressor; - - public VirtualTileSet(string path, string pagePath) + var metaList = new List(); + var textures = Root.GetChild("ATLS").GetChild("TXTS").Children; + foreach (var tex in textures) { - PagePath = pagePath; - Compressor = new TileCompressor(); - - using var fs = new FileStream(path, FileMode.Open, FileAccess.Read); - using var reader = new BinaryReader(fs); - LoadFromStream(fs, reader, false); + var meta = new FourCCTextureMeta + { + Name = tex.GetChild("NAME").Str, + Width = (int)tex.GetChild("WDTH").UInt, + Height = (int)tex.GetChild("HGHT").UInt, + X = (int)tex.GetChild("XXXX").UInt, + Y = (int)tex.GetChild("YYYY").UInt + }; + metaList.Add(meta); } - public VirtualTileSet(string path) : this(path, Path.GetDirectoryName(path)) - { - } + return metaList; + } - public VirtualTileSet() + public void Write(Stream fs, BinaryWriter writer) + { + Write(fs, writer, Root); + } + + public void Write(Stream fs, BinaryWriter writer, FourCCElement element) + { + var header = new GTSFourCCMetadata { - } + FourCCName = element.FourCC + }; - public void Save(string path) + var length = element.Type switch { - using var fs = new FileStream(path, FileMode.Create, FileAccess.Write); - using var writer = new BinaryWriter(fs); - SaveToStream(fs, writer); + FourCCElementType.Node => (uint)0x10000000, + FourCCElementType.Int => (uint)4, + FourCCElementType.String => (UInt32)Encoding.Unicode.GetBytes(element.Str).Length + 2, + FourCCElementType.BinaryInt or FourCCElementType.BinaryGuid => (UInt32)element.Blob.Length, + _ => throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"), + }; + + header.Format = element.Type switch + { + FourCCElementType.Node => 1, + FourCCElementType.Int => 3, + FourCCElementType.String => 2, + FourCCElementType.BinaryInt => 8, + FourCCElementType.BinaryGuid => 0xD, + _ => throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"), + }; + + header.Length = (UInt16)(length & 0xffff); + if (length > 0xffff) + { + header.ExtendedLength = 1; } - public void Dispose() + BinUtils.WriteStruct(writer, ref header); + + if (length > 0xffff) { - foreach (var pageFile in PageFiles) - { - pageFile.Value.Dispose(); - } + UInt32 extraLength = length >> 16; + writer.Write(extraLength); } - private void LoadThumbnails(Stream fs, BinaryReader reader) + switch (element.Type) { - fs.Position = (long)Header.ThumbnailsOffset; - var thumbHdr = BinUtils.ReadStruct(reader); - ThumbnailInfos = new GTSThumbnailInfo[thumbHdr.NumThumbnails]; - BinUtils.ReadStructs(reader, ThumbnailInfos); - - foreach (var thumb in ThumbnailInfos) - { - // Decompress thumbnail blob - fs.Position = (uint)thumb.OffsetInFile; - var inb = new byte[thumb.CompressedSize]; - reader.Read(inb, 0, inb.Length); - var thumbnailBlob = Native.FastLZCompressor.Decompress(inb, Math.Max(thumb.Unknown2, thumb.Unknown3) * 0x100); - - var numSections = reader.ReadUInt32(); - var parameterBlockSize = reader.ReadUInt32(); - reader.ReadUInt32(); - var e4 = BinUtils.ReadStruct(reader); - int sectionNo = 0; - numSections -= 2; - - while (numSections-- > 0) + case FourCCElementType.Node: { - var mipLevelSize = reader.ReadUInt32(); - if (mipLevelSize > 0x10000) + var lengthOffset = fs.Position - 6; + var childrenOffset = fs.Position; + foreach (var child in element.Children) { - fs.Position -= 4; - break; + Write(fs, writer, child); } + var endOffset = fs.Position; + var childrenSize = (UInt32)(endOffset - childrenOffset); - var inf = new byte[mipLevelSize]; - reader.Read(inf, 0, inf.Length); + // Re-write node header with final node size + fs.Position = lengthOffset; + writer.Write((UInt32)childrenSize); + fs.Position = endOffset; - sectionNo++; + break; } - } + + case FourCCElementType.Int: + writer.Write(element.UInt); + break; + + case FourCCElementType.String: + writer.Write(Encoding.Unicode.GetBytes(element.Str)); + writer.Write((UInt16)0); // null terminator + break; + + case FourCCElementType.BinaryInt: + case FourCCElementType.BinaryGuid: + writer.Write(element.Blob); + break; + + default: + throw new InvalidDataException($"Unsupported FourCC value type: {element.Type}"); } - public void LoadFromStream(Stream fs, BinaryReader reader, bool loadThumbnails) + while ((fs.Position % 4) != 0) { - Header = BinUtils.ReadStruct(reader); + writer.Write((Byte)0); + } + } +} - fs.Position = (uint)Header.LayersOffset; - TileSetLayers = new GTSTileSetLayer[Header.NumLayers]; - BinUtils.ReadStructs(reader, TileSetLayers); +public class VirtualTileSet : IDisposable +{ + public String PagePath; + public GTSHeader Header; + public GTSTileSetLayer[] TileSetLayers; + public GTSTileSetLevel[] TileSetLevels; + public List PerLevelFlatTileIndices; + public GTSParameterBlockHeader[] ParameterBlockHeaders; + public Dictionary ParameterBlocks; + public List PageFileInfos; + public TileSetFourCC FourCCMetadata; + public GTSThumbnailInfo[] ThumbnailInfos; + public GTSPackedTileID[] PackedTileIDs; + public GTSFlatTileInfo[] FlatTileInfos; + + private readonly Dictionary PageFiles = []; + private readonly TileCompressor Compressor; + + public VirtualTileSet(string path, string pagePath) + { + PagePath = pagePath; + Compressor = new TileCompressor(); - fs.Position = (uint)Header.LevelsOffset; - TileSetLevels = new GTSTileSetLevel[Header.NumLevels]; - BinUtils.ReadStructs(reader, TileSetLevels); + using var fs = new FileStream(path, FileMode.Open, FileAccess.Read); + using var reader = new BinaryReader(fs); + LoadFromStream(fs, reader, false); + } - PerLevelFlatTileIndices = []; - foreach (var level in TileSetLevels) - { - fs.Position = (uint)level.FlatTileIndicesOffset; - var tileIndices = new UInt32[level.Height * level.Width * Header.NumLayers]; - BinUtils.ReadStructs(reader, tileIndices); - PerLevelFlatTileIndices.Add(tileIndices); - } + public VirtualTileSet(string path) : this(path, Path.GetDirectoryName(path)) + { + } - fs.Position = (uint)Header.ParameterBlockHeadersOffset; - ParameterBlockHeaders = new GTSParameterBlockHeader[Header.ParameterBlockHeadersCount]; - BinUtils.ReadStructs(reader, ParameterBlockHeaders); + public VirtualTileSet() + { + } - ParameterBlocks = []; - foreach (var hdr in ParameterBlockHeaders) - { - fs.Position = (uint)hdr.FileInfoOffset; - if (hdr.Codec == GTSCodec.BC) - { - Debug.Assert(hdr.ParameterBlockSize == 0x38); - var bc = BinUtils.ReadStruct(reader); - ParameterBlocks.Add(hdr.ParameterBlockID, bc); - Debug.Assert(bc.Version == 0x238e); - Debug.Assert(bc.B == 0); - Debug.Assert(bc.C1 == 0); - Debug.Assert(bc.C2 == 0); - Debug.Assert(bc.BCField3 == 0); - Debug.Assert(bc.DataType == (Byte)GTSDataType.R8G8B8A8_SRGB || bc.DataType == (Byte)GTSDataType.X8Y8Z8W8); - Debug.Assert(bc.D == 0); - Debug.Assert(bc.FourCC == 0x20334342); - Debug.Assert(bc.E1 == 0); - Debug.Assert(bc.SaveMip == 1); - Debug.Assert(bc.E3 == 0); - Debug.Assert(bc.E4 == 0); - Debug.Assert(bc.F == 0); - } - else - { - Debug.Assert(hdr.Codec == GTSCodec.Uniform); - Debug.Assert(hdr.ParameterBlockSize == 0x10); - - var blk = BinUtils.ReadStruct(reader); - Debug.Assert(blk.Version == 0x42); - Debug.Assert(blk.A_Unused == 0); - Debug.Assert(blk.Width == 4); - Debug.Assert(blk.Height == 1); - Debug.Assert(blk.DataType == GTSDataType.R8G8B8A8_SRGB || blk.DataType == GTSDataType.X8Y8Z8W8); - ParameterBlocks.Add(hdr.ParameterBlockID, blk); - } - } + public void Save(string path) + { + using var fs = new FileStream(path, FileMode.Create, FileAccess.Write); + using var writer = new BinaryWriter(fs); + SaveToStream(fs, writer); + } - fs.Position = (long)Header.PageFileMetadataOffset; - var pageFileInfos = new GTSPageFileInfo[Header.NumPageFiles]; - BinUtils.ReadStructs(reader, pageFileInfos); + public void Dispose() + { + foreach (var pageFile in PageFiles) + { + pageFile.Value.Dispose(); + } + } + + private void LoadThumbnails(Stream fs, BinaryReader reader) + { + fs.Position = (long)Header.ThumbnailsOffset; + var thumbHdr = BinUtils.ReadStruct(reader); + ThumbnailInfos = new GTSThumbnailInfo[thumbHdr.NumThumbnails]; + BinUtils.ReadStructs(reader, ThumbnailInfos); - PageFileInfos = []; - uint nextPageIndex = 0; - foreach (var info in pageFileInfos) + foreach (var thumb in ThumbnailInfos) + { + // Decompress thumbnail blob + fs.Position = (uint)thumb.OffsetInFile; + var inb = new byte[thumb.CompressedSize]; + reader.Read(inb, 0, inb.Length); + var thumbnailBlob = Native.FastLZCompressor.Decompress(inb, Math.Max(thumb.Unknown2, thumb.Unknown3) * 0x100); + + var numSections = reader.ReadUInt32(); + var parameterBlockSize = reader.ReadUInt32(); + reader.ReadUInt32(); + var e4 = BinUtils.ReadStruct(reader); + int sectionNo = 0; + numSections -= 2; + + while (numSections-- > 0) { - PageFileInfos.Add(new PageFileInfo + var mipLevelSize = reader.ReadUInt32(); + if (mipLevelSize > 0x10000) { - Meta = info, - FirstPageIndex = nextPageIndex, - FileName = info.FileName - }); - nextPageIndex += info.NumPages; - } + fs.Position -= 4; + break; + } - fs.Position = (long)Header.FourCCListOffset; - FourCCMetadata = new TileSetFourCC(); - FourCCMetadata.Read(fs, reader, Header.FourCCListSize); + var inf = new byte[mipLevelSize]; + reader.Read(inf, 0, inf.Length); - if (loadThumbnails) - { - LoadThumbnails(fs, reader); + sectionNo++; } + } + } + + public void LoadFromStream(Stream fs, BinaryReader reader, bool loadThumbnails) + { + Header = BinUtils.ReadStruct(reader); - fs.Position = (long)Header.PackedTileIDsOffset; - PackedTileIDs = new GTSPackedTileID[Header.NumPackedTileIDs]; - BinUtils.ReadStructs(reader, PackedTileIDs); + fs.Position = (uint)Header.LayersOffset; + TileSetLayers = new GTSTileSetLayer[Header.NumLayers]; + BinUtils.ReadStructs(reader, TileSetLayers); - fs.Position = (long)Header.FlatTileInfoOffset; - FlatTileInfos = new GTSFlatTileInfo[Header.NumFlatTileInfos]; - BinUtils.ReadStructs(reader, FlatTileInfos); - } + fs.Position = (uint)Header.LevelsOffset; + TileSetLevels = new GTSTileSetLevel[Header.NumLevels]; + BinUtils.ReadStructs(reader, TileSetLevels); - public void SaveToStream(Stream fs, BinaryWriter writer) + PerLevelFlatTileIndices = []; + foreach (var level in TileSetLevels) { - BinUtils.WriteStruct(writer, ref Header); + fs.Position = (uint)level.FlatTileIndicesOffset; + var tileIndices = new UInt32[level.Height * level.Width * Header.NumLayers]; + BinUtils.ReadStructs(reader, tileIndices); + PerLevelFlatTileIndices.Add(tileIndices); + } - Header.LayersOffset = (ulong)fs.Position; - Header.NumLayers = (uint)TileSetLayers.Length; - BinUtils.WriteStructs(writer, TileSetLayers); + fs.Position = (uint)Header.ParameterBlockHeadersOffset; + ParameterBlockHeaders = new GTSParameterBlockHeader[Header.ParameterBlockHeadersCount]; + BinUtils.ReadStructs(reader, ParameterBlockHeaders); - for (var i = 0; i < TileSetLevels.Length; i++) + ParameterBlocks = []; + foreach (var hdr in ParameterBlockHeaders) + { + fs.Position = (uint)hdr.FileInfoOffset; + if (hdr.Codec == GTSCodec.BC) { - ref var level = ref TileSetLevels[i]; - level.FlatTileIndicesOffset = (ulong)fs.Position; - - var tileIndices = PerLevelFlatTileIndices[i]; - Debug.Assert(tileIndices.Length == level.Height * level.Width * Header.NumLayers); - - BinUtils.WriteStructs(writer, tileIndices); + Debug.Assert(hdr.ParameterBlockSize == 0x38); + var bc = BinUtils.ReadStruct(reader); + ParameterBlocks.Add(hdr.ParameterBlockID, bc); + Debug.Assert(bc.Version == 0x238e); + Debug.Assert(bc.B == 0); + Debug.Assert(bc.C1 == 0); + Debug.Assert(bc.C2 == 0); + Debug.Assert(bc.BCField3 == 0); + Debug.Assert(bc.DataType == (Byte)GTSDataType.R8G8B8A8_SRGB || bc.DataType == (Byte)GTSDataType.X8Y8Z8W8); + Debug.Assert(bc.D == 0); + Debug.Assert(bc.FourCC == 0x20334342); + Debug.Assert(bc.E1 == 0); + Debug.Assert(bc.SaveMip == 1); + Debug.Assert(bc.E3 == 0); + Debug.Assert(bc.E4 == 0); + Debug.Assert(bc.F == 0); } + else + { + Debug.Assert(hdr.Codec == GTSCodec.Uniform); + Debug.Assert(hdr.ParameterBlockSize == 0x10); + + var blk = BinUtils.ReadStruct(reader); + Debug.Assert(blk.Version == 0x42); + Debug.Assert(blk.A_Unused == 0); + Debug.Assert(blk.Width == 4); + Debug.Assert(blk.Height == 1); + Debug.Assert(blk.DataType == GTSDataType.R8G8B8A8_SRGB || blk.DataType == GTSDataType.X8Y8Z8W8); + ParameterBlocks.Add(hdr.ParameterBlockID, blk); + } + } - Header.LevelsOffset = (ulong)fs.Position; - Header.NumLevels = (uint)TileSetLevels.Length; - BinUtils.WriteStructs(writer, TileSetLevels); - - Header.ParameterBlockHeadersOffset = (ulong)fs.Position; - Header.ParameterBlockHeadersCount = (uint)ParameterBlockHeaders.Length; - BinUtils.WriteStructs(writer, ParameterBlockHeaders); + fs.Position = (long)Header.PageFileMetadataOffset; + var pageFileInfos = new GTSPageFileInfo[Header.NumPageFiles]; + BinUtils.ReadStructs(reader, pageFileInfos); - for (var i = 0; i < ParameterBlockHeaders.Length; i++) + PageFileInfos = []; + uint nextPageIndex = 0; + foreach (var info in pageFileInfos) + { + PageFileInfos.Add(new PageFileInfo { - ref var hdr = ref ParameterBlockHeaders[i]; - hdr.FileInfoOffset = (ulong)fs.Position; - - if (hdr.Codec == GTSCodec.BC) - { - var block = (GTSBCParameterBlock)ParameterBlocks[hdr.ParameterBlockID]; - BinUtils.WriteStruct(writer, ref block); - } - else - { - Debug.Assert(hdr.Codec == GTSCodec.Uniform); - hdr.ParameterBlockSize = 0x10; + Meta = info, + FirstPageIndex = nextPageIndex, + FileName = info.FileName + }); + nextPageIndex += info.NumPages; + } - var block = (GTSUniformParameterBlock)ParameterBlocks[hdr.ParameterBlockID]; - BinUtils.WriteStruct(writer, ref block); - } - } + fs.Position = (long)Header.FourCCListOffset; + FourCCMetadata = new TileSetFourCC(); + FourCCMetadata.Read(fs, reader, Header.FourCCListSize); - Header.PageFileMetadataOffset = (ulong)fs.Position; - Header.NumPageFiles = (uint)PageFileInfos.Count; + if (loadThumbnails) + { + LoadThumbnails(fs, reader); + } - for (var i = 0; i < PageFileInfos.Count; i++) - { - var pageFile = PageFileInfos[i]; - BinUtils.WriteStruct(writer, ref pageFile.Meta); - } + fs.Position = (long)Header.PackedTileIDsOffset; + PackedTileIDs = new GTSPackedTileID[Header.NumPackedTileIDs]; + BinUtils.ReadStructs(reader, PackedTileIDs); - Header.FourCCListOffset = (ulong)fs.Position; - FourCCMetadata.Write(fs, writer); - Header.FourCCListSize = (uint)((ulong)fs.Position - Header.FourCCListOffset); + fs.Position = (long)Header.FlatTileInfoOffset; + FlatTileInfos = new GTSFlatTileInfo[Header.NumFlatTileInfos]; + BinUtils.ReadStructs(reader, FlatTileInfos); + } - Header.ThumbnailsOffset = (ulong)fs.Position; - var thumbHdr = new GTSThumbnailInfoHeader - { - NumThumbnails = 0 - }; - BinUtils.WriteStruct(writer, ref thumbHdr); + public void SaveToStream(Stream fs, BinaryWriter writer) + { + BinUtils.WriteStruct(writer, ref Header); - Header.PackedTileIDsOffset = (ulong)fs.Position; - Header.NumPackedTileIDs = (uint)PackedTileIDs.Length; - BinUtils.WriteStructs(writer, PackedTileIDs); + Header.LayersOffset = (ulong)fs.Position; + Header.NumLayers = (uint)TileSetLayers.Length; + BinUtils.WriteStructs(writer, TileSetLayers); - Header.FlatTileInfoOffset = (ulong)fs.Position; - Header.NumFlatTileInfos = (uint)FlatTileInfos.Length; - BinUtils.WriteStructs(writer, FlatTileInfos); + for (var i = 0; i < TileSetLevels.Length; i++) + { + ref var level = ref TileSetLevels[i]; + level.FlatTileIndicesOffset = (ulong)fs.Position; - // Re-write structures that contain offset information - fs.Position = 0; - BinUtils.WriteStruct(writer, ref Header); + var tileIndices = PerLevelFlatTileIndices[i]; + Debug.Assert(tileIndices.Length == level.Height * level.Width * Header.NumLayers); - fs.Position = (long)Header.ParameterBlockHeadersOffset; - BinUtils.WriteStructs(writer, ParameterBlockHeaders); + BinUtils.WriteStructs(writer, tileIndices); } - public bool GetTileInfo(int level, int layer, int x, int y, ref GTSFlatTileInfo tile) + Header.LevelsOffset = (ulong)fs.Position; + Header.NumLevels = (uint)TileSetLevels.Length; + BinUtils.WriteStructs(writer, TileSetLevels); + + Header.ParameterBlockHeadersOffset = (ulong)fs.Position; + Header.ParameterBlockHeadersCount = (uint)ParameterBlockHeaders.Length; + BinUtils.WriteStructs(writer, ParameterBlockHeaders); + + for (var i = 0; i < ParameterBlockHeaders.Length; i++) { - var tileIndices = PerLevelFlatTileIndices[level]; - var tileIndex = tileIndices[layer + Header.NumLayers * (x + y * TileSetLevels[level].Width)]; - if ((tileIndex & 0x80000000) == 0) + ref var hdr = ref ParameterBlockHeaders[i]; + hdr.FileInfoOffset = (ulong)fs.Position; + + if (hdr.Codec == GTSCodec.BC) { - tile = FlatTileInfos[tileIndex]; - return true; + var block = (GTSBCParameterBlock)ParameterBlocks[hdr.ParameterBlockID]; + BinUtils.WriteStruct(writer, ref block); } else { - return false; + Debug.Assert(hdr.Codec == GTSCodec.Uniform); + hdr.ParameterBlockSize = 0x10; + + var block = (GTSUniformParameterBlock)ParameterBlocks[hdr.ParameterBlockID]; + BinUtils.WriteStruct(writer, ref block); } } - public PageFile GetOrLoadPageFile(int pageFileIdx) - { - if (!PageFiles.TryGetValue(pageFileIdx, out PageFile file)) - { - var meta = PageFileInfos[pageFileIdx]; - file = new PageFile(this, Path.Join(PagePath, meta.FileName)); - PageFiles.Add(pageFileIdx, file); - } + Header.PageFileMetadataOffset = (ulong)fs.Position; + Header.NumPageFiles = (uint)PageFileInfos.Count; - return file; + for (var i = 0; i < PageFileInfos.Count; i++) + { + var pageFile = PageFileInfos[i]; + BinUtils.WriteStruct(writer, ref pageFile.Meta); } - public void StitchTexture(int level, int layer, int minX, int minY, int maxX, int maxY, BC5Image output) + Header.FourCCListOffset = (ulong)fs.Position; + FourCCMetadata.Write(fs, writer); + Header.FourCCListSize = (uint)((ulong)fs.Position - Header.FourCCListOffset); + + Header.ThumbnailsOffset = (ulong)fs.Position; + var thumbHdr = new GTSThumbnailInfoHeader { - var tileWidth = Header.TileWidth - Header.TileBorder * 2; - var tileHeight = Header.TileHeight - Header.TileBorder * 2; - GTSFlatTileInfo tileInfo = new(); - for (var y = minY; y <= maxY; y++) - { - for (var x = minX; x <= maxX; x++) - { - if (GetTileInfo(level, layer, x, y, ref tileInfo)) - { - var pageFile = GetOrLoadPageFile(tileInfo.PageFileIndex); - var tile = pageFile.UnpackTileBC5(tileInfo.PageIndex, tileInfo.ChunkIndex, Compressor); - tile.CopyTo(output, 8, 8, (x - minX) * tileWidth, (y - minY) * tileHeight, tileWidth, tileHeight); - } - } - } + NumThumbnails = 0 + }; + BinUtils.WriteStruct(writer, ref thumbHdr); + + Header.PackedTileIDsOffset = (ulong)fs.Position; + Header.NumPackedTileIDs = (uint)PackedTileIDs.Length; + BinUtils.WriteStructs(writer, PackedTileIDs); + + Header.FlatTileInfoOffset = (ulong)fs.Position; + Header.NumFlatTileInfos = (uint)FlatTileInfos.Length; + BinUtils.WriteStructs(writer, FlatTileInfos); + + // Re-write structures that contain offset information + fs.Position = 0; + BinUtils.WriteStruct(writer, ref Header); + + fs.Position = (long)Header.ParameterBlockHeadersOffset; + BinUtils.WriteStructs(writer, ParameterBlockHeaders); + } + + public bool GetTileInfo(int level, int layer, int x, int y, ref GTSFlatTileInfo tile) + { + var tileIndices = PerLevelFlatTileIndices[level]; + var tileIndex = tileIndices[layer + Header.NumLayers * (x + y * TileSetLevels[level].Width)]; + if ((tileIndex & 0x80000000) == 0) + { + tile = FlatTileInfos[tileIndex]; + return true; } + else + { + return false; + } + } - public BC5Image ExtractTexture(int level, int layer, int minX, int minY, int maxX, int maxY) + public PageFile GetOrLoadPageFile(int pageFileIdx) + { + if (!PageFiles.TryGetValue(pageFileIdx, out PageFile file)) { - var width = (maxX - minX + 1) * (Header.TileWidth - Header.TileBorder * 2); - var height = (maxY - minY + 1) * (Header.TileHeight - Header.TileBorder * 2); - var stitched = new BC5Image(width, height); - StitchTexture(level, layer, minX, minY, maxX, maxY, stitched); - return stitched; + var meta = PageFileInfos[pageFileIdx]; + file = new PageFile(this, Path.Join(PagePath, meta.FileName)); + PageFiles.Add(pageFileIdx, file); } - public int FindPageFile(string name) + return file; + } + + public void StitchTexture(int level, int layer, int minX, int minY, int maxX, int maxY, BC5Image output) + { + var tileWidth = Header.TileWidth - Header.TileBorder * 2; + var tileHeight = Header.TileHeight - Header.TileBorder * 2; + GTSFlatTileInfo tileInfo = new(); + for (var y = minY; y <= maxY; y++) { - for (var i = 0; i < PageFileInfos.Count; i++) + for (var x = minX; x <= maxX; x++) { - if (PageFileInfos[i].FileName.Contains(name)) + if (GetTileInfo(level, layer, x, y, ref tileInfo)) { - return i; + var pageFile = GetOrLoadPageFile(tileInfo.PageFileIndex); + var tile = pageFile.UnpackTileBC5(tileInfo.PageIndex, tileInfo.ChunkIndex, Compressor); + tile.CopyTo(output, 8, 8, (x - minX) * tileWidth, (y - minY) * tileHeight, tileWidth, tileHeight); } } - - return -1; } + } - public void ReleasePageFiles() - { - this.PageFiles.Clear(); - } + public BC5Image ExtractTexture(int level, int layer, int minX, int minY, int maxX, int maxY) + { + var width = (maxX - minX + 1) * (Header.TileWidth - Header.TileBorder * 2); + var height = (maxY - minY + 1) * (Header.TileHeight - Header.TileBorder * 2); + var stitched = new BC5Image(width, height); + StitchTexture(level, layer, minX, minY, maxX, maxY, stitched); + return stitched; + } - public BC5Image ExtractTexture(int level, int layer, FourCCTextureMeta tex) + public int FindPageFile(string name) + { + for (var i = 0; i < PageFileInfos.Count; i++) { - var tlW = Header.TileWidth - Header.TileBorder * 2; - var tlH = Header.TileHeight - Header.TileBorder * 2; - var tX = tex.X / tlW; - var tY = tex.Y / tlH; - var tW = tex.Width / tlW; - var tH = tex.Height / tlH; - var lv = (1 << level); - - var minX = (tX / lv) + ((tX % lv) > 0 ? 1 : 0); - var minY = (tY / lv) + ((tY % lv) > 0 ? 1 : 0); - var maxX = ((tX+tW) / lv) + (((tX + tW) % lv) > 0 ? 1 : 0) - 1; - var maxY = ((tY+tH) / lv) + (((tY + tH) % lv) > 0 ? 1 : 0) - 1; - - return ExtractTextureIfExists(level, layer, minX, minY, maxX, maxY); + if (PageFileInfos[i].FileName.Contains(name)) + { + return i; + } } - public BC5Image ExtractTextureIfExists(int levelIndex, int layer, int minX, int minY, int maxX, int maxY) + return -1; + } + + public void ReleasePageFiles() + { + this.PageFiles.Clear(); + } + + public BC5Image ExtractTexture(int level, int layer, FourCCTextureMeta tex) + { + var tlW = Header.TileWidth - Header.TileBorder * 2; + var tlH = Header.TileHeight - Header.TileBorder * 2; + var tX = tex.X / tlW; + var tY = tex.Y / tlH; + var tW = tex.Width / tlW; + var tH = tex.Height / tlH; + var lv = (1 << level); + + var minX = (tX / lv) + ((tX % lv) > 0 ? 1 : 0); + var minY = (tY / lv) + ((tY % lv) > 0 ? 1 : 0); + var maxX = ((tX+tW) / lv) + (((tX + tW) % lv) > 0 ? 1 : 0) - 1; + var maxY = ((tY+tH) / lv) + (((tY + tH) % lv) > 0 ? 1 : 0) - 1; + + return ExtractTextureIfExists(level, layer, minX, minY, maxX, maxY); + } + + public BC5Image ExtractTextureIfExists(int levelIndex, int layer, int minX, int minY, int maxX, int maxY) + { + GTSFlatTileInfo tile = new(); + for (var x = minX; x <= maxX; x++) { - GTSFlatTileInfo tile = new(); - for (var x = minX; x <= maxX; x++) + for (var y = minY; y <= maxY; y++) { - for (var y = minY; y <= maxY; y++) + if (!GetTileInfo(levelIndex, layer, x, y, ref tile)) { - if (!GetTileInfo(levelIndex, layer, x, y, ref tile)) - { - return null; - } + return null; } } - - return ExtractTexture(levelIndex, layer, minX, minY, maxX, maxY); } + + return ExtractTexture(levelIndex, layer, minX, minY, maxX, maxY); } } diff --git a/LSLib/VirtualTextures/VirtualTextureFormats.cs b/LSLib/VirtualTextures/VirtualTextureFormats.cs index 0a7f6a4d..93d318b0 100644 --- a/LSLib/VirtualTextures/VirtualTextureFormats.cs +++ b/LSLib/VirtualTextures/VirtualTextureFormats.cs @@ -3,396 +3,395 @@ using System.Runtime.InteropServices; using System.Text; -namespace LSLib.VirtualTextures +namespace LSLib.VirtualTextures; + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct DDSHeader { - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct DDSHeader + public const UInt32 DDSMagic = 0x20534444; + public const UInt32 HeaderSize = 0x7c; + public const UInt32 FourCC_DXT5 = 0x35545844; + + public UInt32 dwMagic; + public UInt32 dwSize; + public UInt32 dwFlags; + public UInt32 dwHeight; + public UInt32 dwWidth; + public UInt32 dwPitchOrLinearSize; + public UInt32 dwDepth; + public UInt32 dwMipMapCount; + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 11)] + public UInt32[] dwReserved1; + + public UInt32 dwPFSize; + public UInt32 dwPFFlags; + public UInt32 dwFourCC; + public UInt32 dwRGBBitCount; + public UInt32 dwRBitMask; + public UInt32 dwGBitMask; + public UInt32 dwBBitMask; + public UInt32 dwABitMask; + + public UInt32 dwCaps; + public UInt32 dwCaps2; + public UInt32 dwCaps3; + public UInt32 dwCaps4; + public UInt32 dwReserved2; + + public string FourCCName { - public const UInt32 DDSMagic = 0x20534444; - public const UInt32 HeaderSize = 0x7c; - public const UInt32 FourCC_DXT5 = 0x35545844; - - public UInt32 dwMagic; - public UInt32 dwSize; - public UInt32 dwFlags; - public UInt32 dwHeight; - public UInt32 dwWidth; - public UInt32 dwPitchOrLinearSize; - public UInt32 dwDepth; - public UInt32 dwMipMapCount; - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 11)] - public UInt32[] dwReserved1; - - public UInt32 dwPFSize; - public UInt32 dwPFFlags; - public UInt32 dwFourCC; - public UInt32 dwRGBBitCount; - public UInt32 dwRBitMask; - public UInt32 dwGBitMask; - public UInt32 dwBBitMask; - public UInt32 dwABitMask; - - public UInt32 dwCaps; - public UInt32 dwCaps2; - public UInt32 dwCaps3; - public UInt32 dwCaps4; - public UInt32 dwReserved2; - - public string FourCCName + get { - get - { - return Char.ToString((char)(dwFourCC & 0xff)) - + Char.ToString((char)((dwFourCC >> 8) & 0xff)) - + Char.ToString((char)((dwFourCC >> 16) & 0xff)) - + Char.ToString((char)((dwFourCC >> 24) & 0xff)); - } - - set - { - dwFourCC = (uint)value[0] - | ((uint)value[1] << 8) - | ((uint)value[2] << 16) - | ((uint)value[3] << 24); - } + return Char.ToString((char)(dwFourCC & 0xff)) + + Char.ToString((char)((dwFourCC >> 8) & 0xff)) + + Char.ToString((char)((dwFourCC >> 16) & 0xff)) + + Char.ToString((char)((dwFourCC >> 24) & 0xff)); } - }; - public enum GTSDataType : UInt32 - { - R8G8B8_SRGB = 0, - R8G8B8A8_SRGB = 1, - X8Y8Z0_TANGENT = 2, - R8G8B8_LINEAR = 3, - R8G8B8A8_LINEAR = 4, - X8 = 5, - X8Y8 = 6, - X8Y8Z8 = 7, - X8Y8Z8W8 = 8, - X16 = 9, - X16Y16 = 10, - X16Y16Z16 = 11, - X16Y16Z16W16 = 12, - X32 = 13, - X32_FLOAT = 14, - X32Y32 = 15, - X32Y32_FLOAT = 16, - X32Y32Z32 = 17, - X32Y32Z32_FLOAT = 18, - R32G32B32 = 19, - R32G32B32_FLOAT = 20, - X32Y32Z32W32 = 21, - X32Y32Z32W32_FLOAT = 22, - R32G32B32A32 = 23, - R32G32B32A32_FLOAT = 24, - R16G16B16_FLOAT = 25, - R16G16B16A16_FLOAT = 26 - }; - - public enum GTSCodec : UInt32 - { - Uniform = 0, - Color420 = 1, - Normal = 2, - RawColor = 3, - Binary = 4, - Codec15Color420 = 5, - Codec15Normal = 6, - RawNormal = 7, - Half = 8, - BC = 9, - MultiChannel = 10, - ASTC = 11 - }; - - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSHeader - { - public const UInt32 GRPGMagic = 0x47505247; // 'GRPG' - public const UInt32 CurrentVersion = 5; - - public UInt32 Magic; - public UInt32 Version; - public UInt32 Unused; - public Guid GUID; - public UInt32 NumLayers; - public UInt64 LayersOffset; - public UInt32 NumLevels; - public UInt64 LevelsOffset; - public Int32 TileWidth; - public Int32 TileHeight; - public Int32 TileBorder; - - public UInt32 I2; // Some tile count? - public UInt32 NumFlatTileInfos; - public UInt64 FlatTileInfoOffset; - public UInt32 I6; - public UInt32 I7; - - public UInt32 NumPackedTileIDs; - public UInt64 PackedTileIDsOffset; - - public UInt32 M; - public UInt32 N; - public UInt32 O; - public UInt32 P; - public UInt32 Q; - public UInt32 R; - public UInt32 S; - - public UInt32 PageSize; - public UInt32 NumPageFiles; - public UInt64 PageFileMetadataOffset; - - public UInt32 FourCCListSize; - public UInt64 FourCCListOffset; - - public UInt32 ParameterBlockHeadersCount; - public UInt64 ParameterBlockHeadersOffset; - - public UInt64 ThumbnailsOffset; - public UInt32 XJJ; - public UInt32 XKK; - public UInt32 XLL; - public UInt32 XMM; + set + { + dwFourCC = (uint)value[0] + | ((uint)value[1] << 8) + | ((uint)value[2] << 16) + | ((uint)value[3] << 24); + } } +}; - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSTileSetLayer - { - public GTSDataType DataType; - public Int32 B; // -1 - } +public enum GTSDataType : UInt32 +{ + R8G8B8_SRGB = 0, + R8G8B8A8_SRGB = 1, + X8Y8Z0_TANGENT = 2, + R8G8B8_LINEAR = 3, + R8G8B8A8_LINEAR = 4, + X8 = 5, + X8Y8 = 6, + X8Y8Z8 = 7, + X8Y8Z8W8 = 8, + X16 = 9, + X16Y16 = 10, + X16Y16Z16 = 11, + X16Y16Z16W16 = 12, + X32 = 13, + X32_FLOAT = 14, + X32Y32 = 15, + X32Y32_FLOAT = 16, + X32Y32Z32 = 17, + X32Y32Z32_FLOAT = 18, + R32G32B32 = 19, + R32G32B32_FLOAT = 20, + X32Y32Z32W32 = 21, + X32Y32Z32W32_FLOAT = 22, + R32G32B32A32 = 23, + R32G32B32A32_FLOAT = 24, + R16G16B16_FLOAT = 25, + R16G16B16A16_FLOAT = 26 +}; + +public enum GTSCodec : UInt32 +{ + Uniform = 0, + Color420 = 1, + Normal = 2, + RawColor = 3, + Binary = 4, + Codec15Color420 = 5, + Codec15Normal = 6, + RawNormal = 7, + Half = 8, + BC = 9, + MultiChannel = 10, + ASTC = 11 +}; + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSHeader +{ + public const UInt32 GRPGMagic = 0x47505247; // 'GRPG' + public const UInt32 CurrentVersion = 5; + + public UInt32 Magic; + public UInt32 Version; + public UInt32 Unused; + public Guid GUID; + public UInt32 NumLayers; + public UInt64 LayersOffset; + public UInt32 NumLevels; + public UInt64 LevelsOffset; + public Int32 TileWidth; + public Int32 TileHeight; + public Int32 TileBorder; + + public UInt32 I2; // Some tile count? + public UInt32 NumFlatTileInfos; + public UInt64 FlatTileInfoOffset; + public UInt32 I6; + public UInt32 I7; + + public UInt32 NumPackedTileIDs; + public UInt64 PackedTileIDsOffset; + + public UInt32 M; + public UInt32 N; + public UInt32 O; + public UInt32 P; + public UInt32 Q; + public UInt32 R; + public UInt32 S; + + public UInt32 PageSize; + public UInt32 NumPageFiles; + public UInt64 PageFileMetadataOffset; + + public UInt32 FourCCListSize; + public UInt64 FourCCListOffset; + + public UInt32 ParameterBlockHeadersCount; + public UInt64 ParameterBlockHeadersOffset; + + public UInt64 ThumbnailsOffset; + public UInt32 XJJ; + public UInt32 XKK; + public UInt32 XLL; + public UInt32 XMM; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSTileSetLevel - { - public UInt32 Width; // Width in tiles - public UInt32 Height; // Height in tiles - public UInt64 FlatTileIndicesOffset; // Flat tiles offset in file - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSTileSetLayer +{ + public GTSDataType DataType; + public Int32 B; // -1 +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSParameterBlockHeader - { - public UInt32 ParameterBlockID; - public GTSCodec Codec; - public UInt32 ParameterBlockSize; - public UInt64 FileInfoOffset; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSTileSetLevel +{ + public UInt32 Width; // Width in tiles + public UInt32 Height; // Height in tiles + public UInt64 FlatTileIndicesOffset; // Flat tiles offset in file +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSBCParameterBlock - { - public UInt16 Version; - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Compression1; - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Compression2; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSParameterBlockHeader +{ + public UInt32 ParameterBlockID; + public GTSCodec Codec; + public UInt32 ParameterBlockSize; + public UInt64 FileInfoOffset; +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSBCParameterBlock +{ + public UInt16 Version; + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Compression1; + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Compression2; - public string CompressionName1 + public string CompressionName1 + { + get { - get - { - int len; - for (len = 0; len < Compression1.Length && Compression1[len] != 0; len ++) {} - return Encoding.UTF8.GetString(Compression1, 0, len); - } - set - { - Compression1 = new byte[0x10]; - Array.Clear(Compression1, 0, 0x10); - byte[] encoded = Encoding.UTF8.GetBytes(value); - Array.Copy(encoded, Compression1, encoded.Length); - } + int len; + for (len = 0; len < Compression1.Length && Compression1[len] != 0; len ++) {} + return Encoding.UTF8.GetString(Compression1, 0, len); } - - public string CompressionName2 + set { - get - { - int len; - for (len = 0; len < Compression2.Length && Compression2[len] != 0; len ++) {} - return Encoding.UTF8.GetString(Compression2, 0, len); - } - set - { - Compression2 = new byte[0x10]; - Array.Clear(Compression2, 0, 0x10); - byte[] encoded = Encoding.UTF8.GetBytes(value); - Array.Copy(encoded, Compression2, encoded.Length); - } + Compression1 = new byte[0x10]; + Array.Clear(Compression1, 0, 0x10); + byte[] encoded = Encoding.UTF8.GetBytes(value); + Array.Copy(encoded, Compression1, encoded.Length); } - - public UInt32 B; - public Byte C1; - public Byte C2; - public Byte BCField3; - public Byte DataType; - public UInt16 D; - public UInt32 FourCC; - public Byte E1; - public Byte SaveMip; - public Byte E3; - public Byte E4; - public UInt32 F; } - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSUniformParameterBlock + public string CompressionName2 { - public UInt16 Version; - public UInt16 A_Unused; - public UInt32 Width; - public UInt32 Height; - public GTSDataType DataType; + get + { + int len; + for (len = 0; len < Compression2.Length && Compression2[len] != 0; len ++) {} + return Encoding.UTF8.GetString(Compression2, 0, len); + } + set + { + Compression2 = new byte[0x10]; + Array.Clear(Compression2, 0, 0x10); + byte[] encoded = Encoding.UTF8.GetBytes(value); + Array.Copy(encoded, Compression2, encoded.Length); + } } - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSPageFileInfo - { - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 512)] - public byte[] FileNameBuf; + public UInt32 B; + public Byte C1; + public Byte C2; + public Byte BCField3; + public Byte DataType; + public UInt16 D; + public UInt32 FourCC; + public Byte E1; + public Byte SaveMip; + public Byte E3; + public Byte E4; + public UInt32 F; +} - public UInt32 NumPages; - public Guid Checksum; - public UInt32 F; // 2 +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSUniformParameterBlock +{ + public UInt16 Version; + public UInt16 A_Unused; + public UInt32 Width; + public UInt32 Height; + public GTSDataType DataType; +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSPageFileInfo +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 512)] + public byte[] FileNameBuf; + + public UInt32 NumPages; + public Guid Checksum; + public UInt32 F; // 2 - public string FileName + public string FileName + { + get { - get + int nameLen; + for (nameLen = 0; nameLen < FileNameBuf.Length && FileNameBuf[nameLen] != 0; nameLen += 2) { - int nameLen; - for (nameLen = 0; nameLen < FileNameBuf.Length && FileNameBuf[nameLen] != 0; nameLen += 2) - { - } - return Encoding.Unicode.GetString(FileNameBuf, 0, nameLen); - } - set - { - FileNameBuf = new byte[512]; - Array.Clear(FileNameBuf, 0, 512); - byte[] encoded = Encoding.Unicode.GetBytes(value); - Array.Copy(encoded, FileNameBuf, encoded.Length); } + return Encoding.Unicode.GetString(FileNameBuf, 0, nameLen); + } + set + { + FileNameBuf = new byte[512]; + Array.Clear(FileNameBuf, 0, 512); + byte[] encoded = Encoding.Unicode.GetBytes(value); + Array.Copy(encoded, FileNameBuf, encoded.Length); } } +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSFourCCMetadata - { - public UInt32 FourCC; - public Byte Format; - public Byte ExtendedLength; - public UInt16 Length; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSFourCCMetadata +{ + public UInt32 FourCC; + public Byte Format; + public Byte ExtendedLength; + public UInt16 Length; - public string FourCCName + public string FourCCName + { + get { - get - { - return Char.ToString((char)(FourCC & 0xff)) - + Char.ToString((char)((FourCC >> 8) & 0xff)) - + Char.ToString((char)((FourCC >> 16) & 0xff)) - + Char.ToString((char)((FourCC >> 24) & 0xff)); - } + return Char.ToString((char)(FourCC & 0xff)) + + Char.ToString((char)((FourCC >> 8) & 0xff)) + + Char.ToString((char)((FourCC >> 16) & 0xff)) + + Char.ToString((char)((FourCC >> 24) & 0xff)); + } - set - { - FourCC = (uint)value[0] - | ((uint)value[1] << 8) - | ((uint)value[2] << 16) - | ((uint)value[3] << 24); - } + set + { + FourCC = (uint)value[0] + | ((uint)value[1] << 8) + | ((uint)value[2] << 16) + | ((uint)value[3] << 24); } } +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSThumbnailInfoHeader - { - public UInt32 NumThumbnails; - public UInt32 A; - public UInt32 B; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSThumbnailInfoHeader +{ + public UInt32 NumThumbnails; + public UInt32 A; + public UInt32 B; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSThumbnailInfo - { - public Guid GUID; - public UInt64 OffsetInFile; - public UInt32 CompressedSize; - public UInt32 Unknown1; - public UInt16 Unknown2; - public UInt16 Unknown3; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSThumbnailInfo +{ + public Guid GUID; + public UInt64 OffsetInFile; + public UInt32 CompressedSize; + public UInt32 Unknown1; + public UInt16 Unknown2; + public UInt16 Unknown3; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSPackedTileID(UInt32 layer, UInt32 level, UInt32 x, UInt32 y) - { - public UInt32 Val = (layer & 0xF) - | ((level & 0xF) << 4) - | ((y & 0xFFF) << 8) - | ((x & 0xFFF) << 20); +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSPackedTileID(UInt32 layer, UInt32 level, UInt32 x, UInt32 y) +{ + public UInt32 Val = (layer & 0xF) + | ((level & 0xF) << 4) + | ((y & 0xFFF) << 8) + | ((x & 0xFFF) << 20); - public UInt32 Layer + public UInt32 Layer + { + get { - get - { - return Val & 0x0F; - } + return Val & 0x0F; } + } - public UInt32 Level + public UInt32 Level + { + get { - get - { - return (Val >> 4) & 0x0F; - } + return (Val >> 4) & 0x0F; } + } - public UInt32 Y + public UInt32 Y + { + get { - get - { - return (Val >> 8) & 0x0FFF; - } + return (Val >> 8) & 0x0FFF; } + } - public UInt32 X + public UInt32 X + { + get { - get - { - return Val >> 20; - } + return Val >> 20; } } +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTSFlatTileInfo - { - public UInt16 PageFileIndex; // Index of file in PageFileInfos - public UInt16 PageIndex; // Index of 1MB page - public UInt16 ChunkIndex; // Index of entry within page - public UInt16 D; // Always 1? - public UInt32 PackedTileIndex; // Index of tile in PackedTileIDs - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTSFlatTileInfo +{ + public UInt16 PageFileIndex; // Index of file in PageFileInfos + public UInt16 PageIndex; // Index of 1MB page + public UInt16 ChunkIndex; // Index of entry within page + public UInt16 D; // Always 1? + public UInt32 PackedTileIndex; // Index of tile in PackedTileIDs +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTPHeader - { - public const UInt32 HeaderMagic = 0x50415247; - public const UInt32 DefaultVersion = 4; +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTPHeader +{ + public const UInt32 HeaderMagic = 0x50415247; + public const UInt32 DefaultVersion = 4; - public UInt32 Magic; - public UInt32 Version; - public Guid GUID; - } + public UInt32 Magic; + public UInt32 Version; + public Guid GUID; +} - [StructLayout(LayoutKind.Sequential, Pack = 1)] - public struct GTPChunkHeader - { - public GTSCodec Codec; - public UInt32 ParameterBlockID; - public UInt32 Size; - } +[StructLayout(LayoutKind.Sequential, Pack = 1)] +public struct GTPChunkHeader +{ + public GTSCodec Codec; + public UInt32 ParameterBlockID; + public UInt32 Size; } diff --git a/RconClient/DosPackets.cs b/RconClient/DosPackets.cs index 5bac88c7..6892043b 100644 --- a/RconClient/DosPackets.cs +++ b/RconClient/DosPackets.cs @@ -2,164 +2,163 @@ using System.Collections.Generic; using System.Text; -namespace LSLib.Rcon.DosPackets +namespace LSLib.Rcon.DosPackets; + +public enum DosPacketId : byte +{ + DosUnknown87 = 0x87, + DosEnumerationList = 0x8B, + DosDisconnectConsole = 0x89, + DosConsoleResponse = 0x8A, + DosSendConsoleCommand = 0x8B +}; + +public class DosUnknown87 : Packet { - public enum DosPacketId : byte + public void Read(BinaryReaderBE Reader) { - DosUnknown87 = 0x87, - DosEnumerationList = 0x8B, - DosDisconnectConsole = 0x89, - DosConsoleResponse = 0x8A, - DosSendConsoleCommand = 0x8B - }; + } - public class DosUnknown87 : Packet + public void Write(BinaryWriterBE Writer) { - public void Read(BinaryReaderBE Reader) - { - } - - public void Write(BinaryWriterBE Writer) - { - throw new NotImplementedException(); - } + throw new NotImplementedException(); } +} - public class DosEnumeration +public class DosEnumeration +{ + public String Name; + public Byte Type; + public List Values; +} + +public class DosEnumerationList : Packet +{ + public List Enumerations; + + private static String ReadString(BinaryReaderBE Reader) { - public String Name; - public Byte Type; - public List Values; + var length = Reader.ReadInt32(); + var strBytes = Reader.ReadBytes(length); + return Encoding.UTF8.GetString(strBytes); } - public class DosEnumerationList : Packet + public void Read(BinaryReaderBE Reader) { - public List Enumerations; - - private static String ReadString(BinaryReaderBE Reader) + Enumerations = new List(); + var numEnums = Reader.ReadUInt32(); + for (var i = 0; i < numEnums; i++) { - var length = Reader.ReadInt32(); - var strBytes = Reader.ReadBytes(length); - return Encoding.UTF8.GetString(strBytes); - } + var enumeration = new DosEnumeration(); + enumeration.Name = ReadString(Reader); + enumeration.Type = Reader.ReadByte(); + enumeration.Values = new List(); - public void Read(BinaryReaderBE Reader) - { - Enumerations = new List(); - var numEnums = Reader.ReadUInt32(); - for (var i = 0; i < numEnums; i++) + var numElems = Reader.ReadUInt32(); + for (var j = 0; j < numElems; j++) { - var enumeration = new DosEnumeration(); - enumeration.Name = ReadString(Reader); - enumeration.Type = Reader.ReadByte(); - enumeration.Values = new List(); - - var numElems = Reader.ReadUInt32(); - for (var j = 0; j < numElems; j++) - { - enumeration.Values.Add(ReadString(Reader)); - } - - Enumerations.Add(enumeration); + enumeration.Values.Add(ReadString(Reader)); } - } - public void Write(BinaryWriterBE Writer) - { - throw new NotImplementedException(); + Enumerations.Add(enumeration); } } - public class DosDisconnectConsole : Packet + public void Write(BinaryWriterBE Writer) { - public void Read(BinaryReaderBE Reader) - { - throw new NotImplementedException(); - } + throw new NotImplementedException(); + } +} + +public class DosDisconnectConsole : Packet +{ + public void Read(BinaryReaderBE Reader) + { + throw new NotImplementedException(); + } - public void Write(BinaryWriterBE Writer) + public void Write(BinaryWriterBE Writer) + { + Writer.Write((Byte)DosPacketId.DosDisconnectConsole); + byte[] pkt = new byte[] { - Writer.Write((Byte)DosPacketId.DosDisconnectConsole); - byte[] pkt = new byte[] - { - 0x00, 0x00, 0x00, 0x60, - 0x00, 0x08, 0x0A, 0x00, - 0x00, 0x09, 0x00, 0x00, - 0x00, 0x15 - }; - Writer.Write(pkt); - } + 0x00, 0x00, 0x00, 0x60, + 0x00, 0x08, 0x0A, 0x00, + 0x00, 0x09, 0x00, 0x00, + 0x00, 0x15 + }; + Writer.Write(pkt); } +} + +public class DosSendConsoleCommand : Packet +{ + public String Command; + public String[] Arguments; - public class DosSendConsoleCommand : Packet + public void Read(BinaryReaderBE Reader) { - public String Command; - public String[] Arguments; + throw new NotImplementedException(); + } - public void Read(BinaryReaderBE Reader) + public void Write(BinaryWriterBE Writer) + { + Writer.Write((Byte)DosPacketId.DosSendConsoleCommand); + Writer.Write((UInt32)1); + byte[] cmd = Encoding.UTF8.GetBytes(Command); + Writer.Write((UInt32)cmd.Length); + Writer.Write(cmd); + Writer.Write((Byte)0); + + if (Arguments == null) { - throw new NotImplementedException(); + Writer.Write((UInt32)0); } - - public void Write(BinaryWriterBE Writer) + else { - Writer.Write((Byte)DosPacketId.DosSendConsoleCommand); - Writer.Write((UInt32)1); - byte[] cmd = Encoding.UTF8.GetBytes(Command); - Writer.Write((UInt32)cmd.Length); - Writer.Write(cmd); - Writer.Write((Byte)0); - - if (Arguments == null) + Writer.Write((UInt32)Arguments.Length); + for (var i = 0; i < Arguments.Length; i++) { - Writer.Write((UInt32)0); + byte[] arg = Encoding.UTF8.GetBytes(Arguments[i]); + Writer.Write((UInt32)arg.Length); + Writer.Write(arg); + Writer.Write((Byte)0); } - else - { - Writer.Write((UInt32)Arguments.Length); - for (var i = 0; i < Arguments.Length; i++) - { - byte[] arg = Encoding.UTF8.GetBytes(Arguments[i]); - Writer.Write((UInt32)arg.Length); - Writer.Write(arg); - Writer.Write((Byte)0); - } - } - - Writer.Write((UInt16)0); } + + Writer.Write((UInt16)0); } +} - public class DosConsoleResponse : Packet +public class DosConsoleResponse : Packet +{ + public class ConsoleLine { - public class ConsoleLine - { - public String Line; - public UInt32 Level; - }; + public String Line; + public UInt32 Level; + }; - public ConsoleLine[] Lines; + public ConsoleLine[] Lines; - public void Read(BinaryReaderBE Reader) + public void Read(BinaryReaderBE Reader) + { + var lines = Reader.ReadUInt32(); + Lines = new ConsoleLine[lines]; + for (var i = 0; i < lines; i++) { - var lines = Reader.ReadUInt32(); - Lines = new ConsoleLine[lines]; - for (var i = 0; i < lines; i++) - { - var consoleLine = new ConsoleLine(); - var length = Reader.ReadUInt32(); - var unknown = Reader.ReadByte(); - var length2 = Reader.ReadUInt32(); - var line = Reader.ReadBytes((int)length); - consoleLine.Level = Reader.ReadUInt32(); - consoleLine.Line = Encoding.UTF8.GetString(line); - Lines[i] = consoleLine; - } + var consoleLine = new ConsoleLine(); + var length = Reader.ReadUInt32(); + var unknown = Reader.ReadByte(); + var length2 = Reader.ReadUInt32(); + var line = Reader.ReadBytes((int)length); + consoleLine.Level = Reader.ReadUInt32(); + consoleLine.Line = Encoding.UTF8.GetString(line); + Lines[i] = consoleLine; } + } - public void Write(BinaryWriterBE Writer) - { - throw new NotImplementedException(); - } + public void Write(BinaryWriterBE Writer) + { + throw new NotImplementedException(); } } diff --git a/RconClient/Encapsulation.cs b/RconClient/Encapsulation.cs index 2bc7b505..89c835c8 100644 --- a/RconClient/Encapsulation.cs +++ b/RconClient/Encapsulation.cs @@ -1,133 +1,132 @@ using System; -namespace LSLib.Rcon +namespace LSLib.Rcon; + +public enum EncapsulatedReliability +{ + Unreliable = 0, + UnreliableSequenced = 1, + Reliable = 2, + ReliableOrdered = 3, + ReliableSequenced = 4, + UnreliableAcked = 5, + RelaibleAcked = 6, + ReliableOrderedAcked = 7 +} + +public struct EncapsulatedFlags { - public enum EncapsulatedReliability + public EncapsulatedReliability Reliability; + public bool Split; + + public void Read(BinaryReaderBE reader) { - Unreliable = 0, - UnreliableSequenced = 1, - Reliable = 2, - ReliableOrdered = 3, - ReliableSequenced = 4, - UnreliableAcked = 5, - RelaibleAcked = 6, - ReliableOrderedAcked = 7 + Byte flags = reader.ReadByte(); + Split = (flags & 0x10) == 0x10; + Reliability = (EncapsulatedReliability)(flags >> 5); } - public struct EncapsulatedFlags + public void Write(BinaryWriterBE writer) { - public EncapsulatedReliability Reliability; - public bool Split; + Byte flags = (Byte)(((Byte)Reliability << 5) + | (Split ? 0x10 : 0x00)); + writer.Write(flags); + } - public void Read(BinaryReaderBE reader) - { - Byte flags = reader.ReadByte(); - Split = (flags & 0x10) == 0x10; - Reliability = (EncapsulatedReliability)(flags >> 5); - } + public bool IsReliable() + { + return Reliability == EncapsulatedReliability.Reliable + || Reliability == EncapsulatedReliability.ReliableOrdered + || Reliability == EncapsulatedReliability.ReliableSequenced + || Reliability == EncapsulatedReliability.RelaibleAcked + || Reliability == EncapsulatedReliability.ReliableOrderedAcked; + } - public void Write(BinaryWriterBE writer) + public bool IsOrdered() + { + return Reliability == EncapsulatedReliability.ReliableOrdered + || Reliability == EncapsulatedReliability.ReliableOrderedAcked; + } + + public bool IsSequenced() + { + return Reliability == EncapsulatedReliability.UnreliableSequenced + || Reliability == EncapsulatedReliability.ReliableSequenced; + } +} + +public class EncapsulatedPacket : Packet +{ + public EncapsulatedFlags Flags; + public UInt16 Length; + public SequenceNumber MessageIndex; + public SequenceNumber SequenceIndex; + public SequenceNumber OrderIndex; + public Byte OrderChannel; + public UInt32 SplitCount; + public UInt16 SplitId; + public UInt32 SplitIndex; + public byte[] Payload; + + public void Read(BinaryReaderBE Reader) + { + Flags.Read(Reader); + Length = Reader.ReadUInt16BE(); + + if (Flags.IsReliable()) { - Byte flags = (Byte)(((Byte)Reliability << 5) - | (Split ? 0x10 : 0x00)); - writer.Write(flags); + MessageIndex.Read(Reader); } - public bool IsReliable() + if (Flags.IsSequenced()) { - return Reliability == EncapsulatedReliability.Reliable - || Reliability == EncapsulatedReliability.ReliableOrdered - || Reliability == EncapsulatedReliability.ReliableSequenced - || Reliability == EncapsulatedReliability.RelaibleAcked - || Reliability == EncapsulatedReliability.ReliableOrderedAcked; + SequenceIndex.Read(Reader); } - public bool IsOrdered() + if (Flags.IsSequenced() || Flags.IsOrdered()) { - return Reliability == EncapsulatedReliability.ReliableOrdered - || Reliability == EncapsulatedReliability.ReliableOrderedAcked; + OrderIndex.Read(Reader); + OrderChannel = Reader.ReadByte(); } - public bool IsSequenced() + if (Flags.Split) { - return Reliability == EncapsulatedReliability.UnreliableSequenced - || Reliability == EncapsulatedReliability.ReliableSequenced; + SplitCount = Reader.ReadUInt32BE(); + SplitId = Reader.ReadUInt16BE(); + SplitIndex = Reader.ReadUInt32BE(); } + + Payload = Reader.ReadBytes(Length); } - public class EncapsulatedPacket : Packet + public void Write(BinaryWriterBE Writer) { - public EncapsulatedFlags Flags; - public UInt16 Length; - public SequenceNumber MessageIndex; - public SequenceNumber SequenceIndex; - public SequenceNumber OrderIndex; - public Byte OrderChannel; - public UInt32 SplitCount; - public UInt16 SplitId; - public UInt32 SplitIndex; - public byte[] Payload; - - public void Read(BinaryReaderBE Reader) + Flags.Write(Writer); + Writer.WriteBE(Length); + + if (Flags.IsReliable()) { - Flags.Read(Reader); - Length = Reader.ReadUInt16BE(); - - if (Flags.IsReliable()) - { - MessageIndex.Read(Reader); - } - - if (Flags.IsSequenced()) - { - SequenceIndex.Read(Reader); - } - - if (Flags.IsSequenced() || Flags.IsOrdered()) - { - OrderIndex.Read(Reader); - OrderChannel = Reader.ReadByte(); - } - - if (Flags.Split) - { - SplitCount = Reader.ReadUInt32BE(); - SplitId = Reader.ReadUInt16BE(); - SplitIndex = Reader.ReadUInt32BE(); - } - - Payload = Reader.ReadBytes(Length); + MessageIndex.Write(Writer); } - public void Write(BinaryWriterBE Writer) + if (Flags.IsSequenced()) { - Flags.Write(Writer); - Writer.WriteBE(Length); - - if (Flags.IsReliable()) - { - MessageIndex.Write(Writer); - } - - if (Flags.IsSequenced()) - { - SequenceIndex.Write(Writer); - } - - if (Flags.IsSequenced() || Flags.IsOrdered()) - { - OrderIndex.Write(Writer); - Writer.Write(OrderChannel); - } - - if (Flags.Split) - { - Writer.WriteBE(SplitCount); - Writer.WriteBE(SplitId); - Writer.WriteBE(SplitIndex); - } - - Writer.Write(Payload); + SequenceIndex.Write(Writer); } + + if (Flags.IsSequenced() || Flags.IsOrdered()) + { + OrderIndex.Write(Writer); + Writer.Write(OrderChannel); + } + + if (Flags.Split) + { + Writer.WriteBE(SplitCount); + Writer.WriteBE(SplitId); + Writer.WriteBE(SplitIndex); + } + + Writer.Write(Payload); } } diff --git a/RconClient/RakNetClient.cs b/RconClient/RakNetClient.cs index db2dfeae..33feee3e 100644 --- a/RconClient/RakNetClient.cs +++ b/RconClient/RakNetClient.cs @@ -4,182 +4,181 @@ using System.Net; using System.Net.Sockets; -namespace LSLib.Rcon +namespace LSLib.Rcon; + +public class AsyncUdpClient { - public class AsyncUdpClient - { - private UdpClient Socket; - public readonly UInt16 Port; + private UdpClient Socket; + public readonly UInt16 Port; - public delegate void PacketReceivedDelegate(IPEndPoint address, byte[] packet); - public PacketReceivedDelegate PacketReceived = delegate { }; + public delegate void PacketReceivedDelegate(IPEndPoint address, byte[] packet); + public PacketReceivedDelegate PacketReceived = delegate { }; - public AsyncUdpClient() - { - Random rnd = new Random(); - // Select a port number over 10000 as low port numbers - // are frequently used by various server apps. - Port = (UInt16)((rnd.Next() % (65536 - 10000)) + 10000); - Socket = new UdpClient(Port); - } - - public void RunLoop() + public AsyncUdpClient() + { + Random rnd = new Random(); + // Select a port number over 10000 as low port numbers + // are frequently used by various server apps. + Port = (UInt16)((rnd.Next() % (65536 - 10000)) + 10000); + Socket = new UdpClient(Port); + } + + public void RunLoop() + { + while (true) { - while (true) + IPEndPoint source = new IPEndPoint(0, 0); + byte[] packet; + try { - IPEndPoint source = new IPEndPoint(0, 0); - byte[] packet; - try + packet = Socket.Receive(ref source); + } + catch (SocketException e) + { + // WSAECONNRESET - This may happen if the Rcon server is + // not running on the port we're trying to send messages to. + if (e.ErrorCode == 10054) { - packet = Socket.Receive(ref source); + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine("Received connection reset - Rcon server probably not running."); + Console.ResetColor(); + break; } - catch (SocketException e) + else { - // WSAECONNRESET - This may happen if the Rcon server is - // not running on the port we're trying to send messages to. - if (e.ErrorCode == 10054) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine("Received connection reset - Rcon server probably not running."); - Console.ResetColor(); - break; - } - else - { - throw e; - } + throw e; } - - PacketReceived(source, packet); } - } - public void Send(IPEndPoint address, byte[] packet) - { - Socket.Send(packet, packet.Length, address); + PacketReceived(source, packet); } } - public class RakNetSocket + public void Send(IPEndPoint address, byte[] packet) { - private AsyncUdpClient Socket; - private byte[] ClientId; - private RakNetSession Session; + Socket.Send(packet, packet.Length, address); + } +} + +public class RakNetSocket +{ + private AsyncUdpClient Socket; + private byte[] ClientId; + private RakNetSession Session; - public delegate void SessionEstablishedDelegate(RakNetSession session); - public SessionEstablishedDelegate SessionEstablished = delegate { }; + public delegate void SessionEstablishedDelegate(RakNetSession session); + public SessionEstablishedDelegate SessionEstablished = delegate { }; - public RakNetSocket() - { - Socket = new AsyncUdpClient(); - Socket.PacketReceived += this.OnPacketReceived; + public RakNetSocket() + { + Socket = new AsyncUdpClient(); + Socket.PacketReceived += this.OnPacketReceived; + + ClientId = new byte[8]; + var random = new Random(); + random.NextBytes(ClientId); + } - ClientId = new byte[8]; - var random = new Random(); - random.NextBytes(ClientId); + private Packet DecodePacket(Byte id, BinaryReaderBE reader) + { + Packet packet = null; + switch ((PacketId)id) + { + case PacketId.OpenConnectionRequest1: packet = new OpenConnectionRequest1(); break; + case PacketId.OpenConnectionResponse1: packet = new OpenConnectionResponse1(); break; + case PacketId.OpenConnectionRequest2: packet = new OpenConnectionRequest2(); break; + case PacketId.OpenConnectionResponse2: packet = new OpenConnectionResponse2(); break; + default: throw new InvalidDataException("Unrecognized packet ID"); } - private Packet DecodePacket(Byte id, BinaryReaderBE reader) + packet.Read(reader); + return packet; + } + + private void HandleConnectionResponse1(IPEndPoint address, OpenConnectionResponse1 response) + { + var connReq = new OpenConnectionRequest2 { - Packet packet = null; - switch ((PacketId)id) + Magic = RakNetConstants.Magic, + ClientId = ClientId, + Address = new RakAddress { - case PacketId.OpenConnectionRequest1: packet = new OpenConnectionRequest1(); break; - case PacketId.OpenConnectionResponse1: packet = new OpenConnectionResponse1(); break; - case PacketId.OpenConnectionRequest2: packet = new OpenConnectionRequest2(); break; - case PacketId.OpenConnectionResponse2: packet = new OpenConnectionResponse2(); break; - default: throw new InvalidDataException("Unrecognized packet ID"); - } + Address = (UInt32)IPAddress.Parse("127.0.0.1").Address, + Port = Socket.Port + }, + MTU = 1200 + }; + Send(address, connReq); + } - packet.Read(reader); - return packet; - } + private void HandleConnectionResponse2(IPEndPoint address, OpenConnectionResponse2 response) + { + Session = new RakNetSession(this, address, ClientId); + SessionEstablished(Session); + Session.OnConnected(); + } - private void HandleConnectionResponse1(IPEndPoint address, OpenConnectionResponse1 response) + private void HandlePacket(IPEndPoint address, Packet packet) + { + if (packet is OpenConnectionResponse1) { - var connReq = new OpenConnectionRequest2 - { - Magic = RakNetConstants.Magic, - ClientId = ClientId, - Address = new RakAddress - { - Address = (UInt32)IPAddress.Parse("127.0.0.1").Address, - Port = Socket.Port - }, - MTU = 1200 - }; - Send(address, connReq); + HandleConnectionResponse1(address, packet as OpenConnectionResponse1); } - - private void HandleConnectionResponse2(IPEndPoint address, OpenConnectionResponse2 response) + else if (packet is OpenConnectionResponse2) + { + HandleConnectionResponse2(address, packet as OpenConnectionResponse2); + } + else { - Session = new RakNetSession(this, address, ClientId); - SessionEstablished(Session); - Session.OnConnected(); + throw new NotImplementedException("Packet type not handled"); } + } - private void HandlePacket(IPEndPoint address, Packet packet) + private void OnPacketReceived(IPEndPoint address, byte[] packet) + { + using (var stream = new MemoryStream(packet)) + using (var reader = new BinaryReaderBE(stream)) { - if (packet is OpenConnectionResponse1) - { - HandleConnectionResponse1(address, packet as OpenConnectionResponse1); - } - else if (packet is OpenConnectionResponse2) + byte id = reader.ReadByte(); + if (id < 0x80) { - HandleConnectionResponse2(address, packet as OpenConnectionResponse2); + var decoded = DecodePacket(id, reader); + HandlePacket(address, decoded); } else { - throw new NotImplementedException("Packet type not handled"); - } - } - - private void OnPacketReceived(IPEndPoint address, byte[] packet) - { - using (var stream = new MemoryStream(packet)) - using (var reader = new BinaryReaderBE(stream)) - { - byte id = reader.ReadByte(); - if (id < 0x80) + if (Session != null) { - var decoded = DecodePacket(id, reader); - HandlePacket(address, decoded); + Session.HandlePacket(id, reader); } else { - if (Session != null) - { - Session.HandlePacket(id, reader); - } - else - { - throw new Exception("Unhandled session packet - no session established!"); - } + throw new Exception("Unhandled session packet - no session established!"); } } } + } - public void Send(IPEndPoint address, Packet packet) + public void Send(IPEndPoint address, Packet packet) + { + using (var stream = new MemoryStream()) + using (var writer = new BinaryWriterBE(stream)) { - using (var stream = new MemoryStream()) - using (var writer = new BinaryWriterBE(stream)) - { - packet.Write(writer); - stream.SetLength(stream.Position); - Socket.Send(address, stream.ToArray()); - } + packet.Write(writer); + stream.SetLength(stream.Position); + Socket.Send(address, stream.ToArray()); } + } - public void BeginConnection(IPEndPoint address) + public void BeginConnection(IPEndPoint address) + { + var connReq = new OpenConnectionRequest1 { - var connReq = new OpenConnectionRequest1 - { - Magic = RakNetConstants.Magic, - Protocol = RakNetConstants.ProtocolVersion - }; - Send(address, connReq); + Magic = RakNetConstants.Magic, + Protocol = RakNetConstants.ProtocolVersion + }; + Send(address, connReq); - Socket.RunLoop(); - } + Socket.RunLoop(); } } diff --git a/RconClient/RakNetCommon.cs b/RconClient/RakNetCommon.cs index 65898209..18539c9e 100644 --- a/RconClient/RakNetCommon.cs +++ b/RconClient/RakNetCommon.cs @@ -1,57 +1,56 @@ using System; -namespace LSLib.Rcon +namespace LSLib.Rcon; + +public enum PacketId : byte { - public enum PacketId : byte - { - ConnectedPing = 0x00, - UnconnectedPing = 0x01, - ConnectedPong = 0x03, - OpenConnectionRequest1 = 0x05, - OpenConnectionResponse1 = 0x06, - OpenConnectionRequest2 = 0x07, - OpenConnectionResponse2 = 0x08, - ConnectionRequest = 0x09, - ConnectionRequestAccepted = 0x10, - NewIncomingConnection = 0x13, - DisconnectionNotification = 0x15, - UnconnectedPong = 0x1C, - EncapsulatedData = 0x84, - ACK = 0xC0 - }; + ConnectedPing = 0x00, + UnconnectedPing = 0x01, + ConnectedPong = 0x03, + OpenConnectionRequest1 = 0x05, + OpenConnectionResponse1 = 0x06, + OpenConnectionRequest2 = 0x07, + OpenConnectionResponse2 = 0x08, + ConnectionRequest = 0x09, + ConnectionRequestAccepted = 0x10, + NewIncomingConnection = 0x13, + DisconnectionNotification = 0x15, + UnconnectedPong = 0x1C, + EncapsulatedData = 0x84, + ACK = 0xC0 +}; - public class RakNetConstants - { - public const Byte ProtocolVersion = 6; - public static readonly byte[] Magic = new byte[] { 0x00, 0xff, 0xff, 0x00, 0xfe, 0xfe, 0xfe, 0xfe, 0xfd, 0xfd, 0xfd, 0xfd, 0x12, 0x34, 0x56, 0x78 }; - } +public class RakNetConstants +{ + public const Byte ProtocolVersion = 6; + public static readonly byte[] Magic = new byte[] { 0x00, 0xff, 0xff, 0x00, 0xfe, 0xfe, 0xfe, 0xfe, 0xfd, 0xfd, 0xfd, 0xfd, 0x12, 0x34, 0x56, 0x78 }; +} - public interface Packet +public interface Packet +{ + void Read(BinaryReaderBE Reader); + void Write(BinaryWriterBE Writer); +} + +public struct SequenceNumber +{ + public UInt32 Number; + + public void Read(BinaryReaderBE Reader) { - void Read(BinaryReaderBE Reader); - void Write(BinaryWriterBE Writer); + Byte b1 = Reader.ReadByte(); + Byte b2 = Reader.ReadByte(); + Byte b3 = Reader.ReadByte(); + Number = (UInt32)b1 | ((UInt32)b2 << 8) | ((UInt32)b3 << 16); } - public struct SequenceNumber + public void Write(BinaryWriterBE Writer) { - public UInt32 Number; - - public void Read(BinaryReaderBE Reader) - { - Byte b1 = Reader.ReadByte(); - Byte b2 = Reader.ReadByte(); - Byte b3 = Reader.ReadByte(); - Number = (UInt32)b1 | ((UInt32)b2 << 8) | ((UInt32)b3 << 16); - } - - public void Write(BinaryWriterBE Writer) - { - Byte b1 = (Byte)(Number & 0xff); - Byte b2 = (Byte)((Number >> 8) & 0xff); - Byte b3 = (Byte)((Number >> 16) & 0xff); - Writer.Write(b1); - Writer.Write(b2); - Writer.Write(b3); - } + Byte b1 = (Byte)(Number & 0xff); + Byte b2 = (Byte)((Number >> 8) & 0xff); + Byte b3 = (Byte)((Number >> 16) & 0xff); + Writer.Write(b1); + Writer.Write(b2); + Writer.Write(b3); } } diff --git a/RconClient/RakNetPackets.cs b/RconClient/RakNetPackets.cs index cd29543d..54f103a4 100644 --- a/RconClient/RakNetPackets.cs +++ b/RconClient/RakNetPackets.cs @@ -2,313 +2,310 @@ using System.Collections.Generic; using System.IO; -namespace LSLib.Rcon.Packets -{ - public struct RakAddress - { - public UInt32 Address; - public UInt16 Port; +namespace LSLib.Rcon.Packets; - public void Read(BinaryReaderBE Reader) - { - var type = Reader.ReadByte(); - if (type != 4) throw new InvalidDataException("Only IPv4 addresses are supported"); - Address = ~Reader.ReadUInt32(); - Port = Reader.ReadUInt16BE(); - } +public struct RakAddress +{ + public UInt32 Address; + public UInt16 Port; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((byte)4); - Writer.Write(~Address); - Writer.WriteBE(Port); - } + public void Read(BinaryReaderBE Reader) + { + var type = Reader.ReadByte(); + if (type != 4) throw new InvalidDataException("Only IPv4 addresses are supported"); + Address = ~Reader.ReadUInt32(); + Port = Reader.ReadUInt16BE(); } - public class OpenConnectionRequest1 : Packet + public void Write(BinaryWriterBE Writer) { - public byte[] Magic; - public Byte Protocol; + Writer.Write((byte)4); + Writer.Write(~Address); + Writer.WriteBE(Port); + } +} - public void Read(BinaryReaderBE Reader) - { - Magic = Reader.ReadBytes(16); - Protocol = Reader.ReadByte(); - } +public class OpenConnectionRequest1 : Packet +{ + public byte[] Magic; + public Byte Protocol; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((byte)PacketId.OpenConnectionRequest1); - Writer.Write(Magic); - Writer.Write(Protocol); - byte[] pad = new byte[0x482]; - Writer.Write(pad); - } + public void Read(BinaryReaderBE Reader) + { + Magic = Reader.ReadBytes(16); + Protocol = Reader.ReadByte(); } - public class OpenConnectionResponse1 : Packet + public void Write(BinaryWriterBE Writer) { - public byte[] Magic; - public byte[] ServerId; - public Byte Security; - public UInt16 MTU; + Writer.Write((byte)PacketId.OpenConnectionRequest1); + Writer.Write(Magic); + Writer.Write(Protocol); + byte[] pad = new byte[0x482]; + Writer.Write(pad); + } +} - public void Read(BinaryReaderBE Reader) - { - Magic = Reader.ReadBytes(16); - ServerId = Reader.ReadBytes(8); - Security = Reader.ReadByte(); - MTU = Reader.ReadUInt16BE(); - } +public class OpenConnectionResponse1 : Packet +{ + public byte[] Magic; + public byte[] ServerId; + public Byte Security; + public UInt16 MTU; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((byte)PacketId.OpenConnectionResponse1); - Writer.Write(Magic); - Writer.Write(ServerId); - Writer.Write(Security); - Writer.WriteBE(MTU); - byte[] pad = new byte[0x480]; - Writer.Write(pad); - } + public void Read(BinaryReaderBE Reader) + { + Magic = Reader.ReadBytes(16); + ServerId = Reader.ReadBytes(8); + Security = Reader.ReadByte(); + MTU = Reader.ReadUInt16BE(); } - public class OpenConnectionRequest2 : Packet + public void Write(BinaryWriterBE Writer) { - public byte[] Magic; - public RakAddress Address; - public UInt16 MTU; - public byte[] ClientId; + Writer.Write((byte)PacketId.OpenConnectionResponse1); + Writer.Write(Magic); + Writer.Write(ServerId); + Writer.Write(Security); + Writer.WriteBE(MTU); + byte[] pad = new byte[0x480]; + Writer.Write(pad); + } +} - public void Read(BinaryReaderBE Reader) - { - Magic = Reader.ReadBytes(16); - Address.Read(Reader); - MTU = Reader.ReadUInt16BE(); - ClientId = Reader.ReadBytes(8); - } +public class OpenConnectionRequest2 : Packet +{ + public byte[] Magic; + public RakAddress Address; + public UInt16 MTU; + public byte[] ClientId; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((byte)PacketId.OpenConnectionRequest2); - Writer.Write(Magic); - Address.Write(Writer); - Writer.WriteBE(MTU); - Writer.Write(ClientId); - } + public void Read(BinaryReaderBE Reader) + { + Magic = Reader.ReadBytes(16); + Address.Read(Reader); + MTU = Reader.ReadUInt16BE(); + ClientId = Reader.ReadBytes(8); } - public class OpenConnectionResponse2 : Packet + public void Write(BinaryWriterBE Writer) { - public byte[] Magic; - public byte[] ServerId; - public RakAddress Address; - public UInt16 MTU; - public Byte Security; + Writer.Write((byte)PacketId.OpenConnectionRequest2); + Writer.Write(Magic); + Address.Write(Writer); + Writer.WriteBE(MTU); + Writer.Write(ClientId); + } +} - public void Read(BinaryReaderBE Reader) - { - Magic = Reader.ReadBytes(16); - ServerId = Reader.ReadBytes(8); - Address.Read(Reader); - MTU = Reader.ReadUInt16BE(); - Security = Reader.ReadByte(); - } +public class OpenConnectionResponse2 : Packet +{ + public byte[] Magic; + public byte[] ServerId; + public RakAddress Address; + public UInt16 MTU; + public Byte Security; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((byte)PacketId.OpenConnectionRequest2); - Writer.Write(Magic); - Writer.Write(ServerId); - Address.Write(Writer); - Writer.WriteBE(MTU); - Writer.Write(Security); - } + public void Read(BinaryReaderBE Reader) + { + Magic = Reader.ReadBytes(16); + ServerId = Reader.ReadBytes(8); + Address.Read(Reader); + MTU = Reader.ReadUInt16BE(); + Security = Reader.ReadByte(); } - public class ConnectionRequest : Packet + public void Write(BinaryWriterBE Writer) { - public byte[] ClientId; - public UInt32 Time; - public Byte Security; + Writer.Write((byte)PacketId.OpenConnectionRequest2); + Writer.Write(Magic); + Writer.Write(ServerId); + Address.Write(Writer); + Writer.WriteBE(MTU); + Writer.Write(Security); + } +} - public void Read(BinaryReaderBE Reader) - { - ClientId = Reader.ReadBytes(8); - Reader.ReadUInt32(); // Unknown - Time = Reader.ReadUInt32BE(); - Security = Reader.ReadByte(); - } +public class ConnectionRequest : Packet +{ + public byte[] ClientId; + public UInt32 Time; + public Byte Security; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((Byte)PacketId.ConnectionRequest); - Writer.Write(ClientId); - Writer.Write((UInt32)0); - Writer.Write(Time); - Writer.Write(Security); - } + public void Read(BinaryReaderBE Reader) + { + ClientId = Reader.ReadBytes(8); + Reader.ReadUInt32(); // Unknown + Time = Reader.ReadUInt32BE(); + Security = Reader.ReadByte(); } - public class ConnectionRequestAccepted : Packet + public void Write(BinaryWriterBE Writer) { - public void Read(BinaryReaderBE Reader) - { - // TODO - Unknown. - } + Writer.Write((Byte)PacketId.ConnectionRequest); + Writer.Write(ClientId); + Writer.Write((UInt32)0); + Writer.Write(Time); + Writer.Write(Security); + } +} - public void Write(BinaryWriterBE Writer) - { - throw new NotImplementedException(); - } +public class ConnectionRequestAccepted : Packet +{ + public void Read(BinaryReaderBE Reader) + { + // TODO - Unknown. } - public class NewIncomingConnection : Packet + public void Write(BinaryWriterBE Writer) { - public void Read(BinaryReaderBE Reader) - { - } + throw new NotImplementedException(); + } +} - public void Write(BinaryWriterBE Writer) - { - byte[] pkt = new byte[] - { - // Message ID - 0x13, - // List of addresses? - 0x04, 0x80, 0xff, 0xff, 0xfe, 0x15, 0x0c, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, - // Ping time - 0x18, 0x8e, 0x2f, 0x3f, - 0x00, 0x00, 0x00, 0x00, - // Pong time - 0x18, 0x8e, 0x2f, 0x3f - }; - Writer.Write(pkt); - } +public class NewIncomingConnection : Packet +{ + public void Read(BinaryReaderBE Reader) + { } - public class DisconnectionNotification : Packet + public void Write(BinaryWriterBE Writer) { - public void Read(BinaryReaderBE Reader) + byte[] pkt = new byte[] { - } + // Message ID + 0x13, + // List of addresses? + 0x04, 0x80, 0xff, 0xff, 0xfe, 0x15, 0x0c, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x04, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, + // Ping time + 0x18, 0x8e, 0x2f, 0x3f, + 0x00, 0x00, 0x00, 0x00, + // Pong time + 0x18, 0x8e, 0x2f, 0x3f + }; + Writer.Write(pkt); + } +} - public void Write(BinaryWriterBE Writer) - { - Writer.Write((Byte)PacketId.DisconnectionNotification); - } +public class DisconnectionNotification : Packet +{ + public void Read(BinaryReaderBE Reader) + { } - public class ConnectedPing : Packet + public void Write(BinaryWriterBE Writer) { - public UInt32 SendTime; + Writer.Write((Byte)PacketId.DisconnectionNotification); + } +} - public void Read(BinaryReaderBE Reader) - { - SendTime = Reader.ReadUInt32BE(); - } +public class ConnectedPing : Packet +{ + public UInt32 SendTime; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((Byte)PacketId.ConnectedPing); - Writer.WriteBE(SendTime); - } + public void Read(BinaryReaderBE Reader) + { + SendTime = Reader.ReadUInt32BE(); } - public class ConnectedPong : Packet + public void Write(BinaryWriterBE Writer) { - public UInt32 ReceiveTime; - public UInt32 SendTime; + Writer.Write((Byte)PacketId.ConnectedPing); + Writer.WriteBE(SendTime); + } +} - public void Read(BinaryReaderBE Reader) - { - ReceiveTime = Reader.ReadUInt32BE(); - SendTime = Reader.ReadUInt32BE(); - } +public class ConnectedPong : Packet +{ + public UInt32 ReceiveTime; + public UInt32 SendTime; - public void Write(BinaryWriterBE Writer) - { - Writer.Write((Byte)PacketId.ConnectedPong); - Writer.WriteBE(ReceiveTime); - Writer.WriteBE(SendTime); - } + public void Read(BinaryReaderBE Reader) + { + ReceiveTime = Reader.ReadUInt32BE(); + SendTime = Reader.ReadUInt32BE(); } - public class DataPacket : Packet + public void Write(BinaryWriterBE Writer) { - public Byte Id; - public SequenceNumber Sequence; - public Packet WrappedPacket; + Writer.Write((Byte)PacketId.ConnectedPong); + Writer.WriteBE(ReceiveTime); + Writer.WriteBE(SendTime); + } +} - public void Read(BinaryReaderBE Reader) - { - Sequence.Read(Reader); - WrappedPacket.Read(Reader); - } +public class DataPacket : Packet +{ + public Byte Id; + public SequenceNumber Sequence; + public Packet WrappedPacket; - public void Write(BinaryWriterBE Writer) - { - Writer.Write(Id); - Sequence.Write(Writer); - WrappedPacket.Write(Writer); - } + public void Read(BinaryReaderBE Reader) + { + Sequence.Read(Reader); + WrappedPacket.Read(Reader); } - public class Acknowledgement : Packet + public void Write(BinaryWriterBE Writer) { - public List SequenceNumbers; + Writer.Write(Id); + Sequence.Write(Writer); + WrappedPacket.Write(Writer); + } +} - public void Read(BinaryReaderBE Reader) +public class Acknowledgement : Packet +{ + public List SequenceNumbers; + + public void Read(BinaryReaderBE Reader) + { + SequenceNumbers = new List(); + UInt16 numAcks = Reader.ReadUInt16BE(); + for (var i = 0; i < numAcks; i++) { - SequenceNumbers = new List(); - UInt16 numAcks = Reader.ReadUInt16BE(); - for (var i = 0; i < numAcks; i++) + Byte type = Reader.ReadByte(); + if (type == 0) { - Byte type = Reader.ReadByte(); - if (type == 0) - { - SequenceNumber first = new SequenceNumber(), - last = new SequenceNumber(); - first.Read(Reader); - last.Read(Reader); - for (UInt32 seq = first.Number; seq < last.Number; seq++) - { - SequenceNumber num = new SequenceNumber(); - num.Number = seq; - SequenceNumbers.Add(num); - } - } - else + SequenceNumber first = new SequenceNumber(), + last = new SequenceNumber(); + first.Read(Reader); + last.Read(Reader); + for (UInt32 seq = first.Number; seq < last.Number; seq++) { SequenceNumber num = new SequenceNumber(); - num.Read(Reader); + num.Number = seq; SequenceNumbers.Add(num); } } - } - - public void Write(BinaryWriterBE Writer) - { - Writer.Write((Byte)PacketId.ACK); - Writer.WriteBE((UInt16)SequenceNumbers.Count); - foreach (var seq in SequenceNumbers) + else { - Writer.Write((Byte)1); - seq.Write(Writer); + SequenceNumber num = new SequenceNumber(); + num.Read(Reader); + SequenceNumbers.Add(num); } } } - + public void Write(BinaryWriterBE Writer) + { + Writer.Write((Byte)PacketId.ACK); + Writer.WriteBE((UInt16)SequenceNumbers.Count); + foreach (var seq in SequenceNumbers) + { + Writer.Write((Byte)1); + seq.Write(Writer); + } + } } diff --git a/RconClient/RakNetSession.cs b/RconClient/RakNetSession.cs index 9c03674b..1b1cd4f0 100644 --- a/RconClient/RakNetSession.cs +++ b/RconClient/RakNetSession.cs @@ -4,274 +4,273 @@ using System.IO; using System.Net; -namespace LSLib.Rcon +namespace LSLib.Rcon; + +public class SplitPacket +{ + public UInt16 Index; + public UInt32 Available; + public byte[][] Buffers; +} + +public class RakNetSession { - public class SplitPacket + private RakNetSocket Socket; + private IPEndPoint Address; + private byte[] ClientId; + + private UInt32 NextPacketId = 0; + private UInt32 NextReliableId = 0; + private UInt32 NextSequenceId = 0; + private UInt32 NextOrderId = 0; + private Dictionary Splits; + + public delegate Packet PacketConstructorDelegate(Byte id); + public PacketConstructorDelegate PacketConstructor = delegate { return null; }; + + public delegate void PacketReceivedDelegate(RakNetSession session, Packet packet); + public PacketReceivedDelegate PacketReceived = delegate { }; + + public delegate void SessionDisconnectedDelegate(RakNetSession session); + public SessionDisconnectedDelegate SessionDisconnected = delegate { }; + + public RakNetSession(RakNetSocket Socket, IPEndPoint Address, byte[] ClientId) { - public UInt16 Index; - public UInt32 Available; - public byte[][] Buffers; + this.Socket = Socket; + this.Address = Address; + this.ClientId = ClientId; + Splits = new Dictionary(); } - public class RakNetSession + private void HandleConnectedPing(ConnectedPing packet) { - private RakNetSocket Socket; - private IPEndPoint Address; - private byte[] ClientId; - - private UInt32 NextPacketId = 0; - private UInt32 NextReliableId = 0; - private UInt32 NextSequenceId = 0; - private UInt32 NextOrderId = 0; - private Dictionary Splits; - - public delegate Packet PacketConstructorDelegate(Byte id); - public PacketConstructorDelegate PacketConstructor = delegate { return null; }; + var pong = new ConnectedPong(); + pong.ReceiveTime = packet.SendTime; + pong.SendTime = packet.SendTime; + SendEncapsulated(pong, EncapsulatedReliability.Unreliable); + } - public delegate void PacketReceivedDelegate(RakNetSession session, Packet packet); - public PacketReceivedDelegate PacketReceived = delegate { }; + private void HandleConnectionRequestAccepted(ConnectionRequestAccepted packet) + { + var ackReq = new NewIncomingConnection(); + SendEncapsulated(ackReq, EncapsulatedReliability.ReliableOrdered); + } - public delegate void SessionDisconnectedDelegate(RakNetSession session); - public SessionDisconnectedDelegate SessionDisconnected = delegate { }; + private void HandleDisconnectionNotification(DisconnectionNotification packet) + { + SessionDisconnected(this); + } - public RakNetSession(RakNetSocket Socket, IPEndPoint Address, byte[] ClientId) + private void HandleEncapsulatedPayload(byte[] payload) + { + using (var encapMemory = new MemoryStream(payload)) + using (var encapStream = new BinaryReaderBE(encapMemory)) { - this.Socket = Socket; - this.Address = Address; - this.ClientId = ClientId; - Splits = new Dictionary(); + var encapId = encapStream.ReadByte(); + HandlePacketDecapsulated(encapId, encapStream); } + } - private void HandleConnectedPing(ConnectedPing packet) + private void HandleSplitPacket(EncapsulatedPacket packet) + { + SplitPacket split = null; + if (!Splits.TryGetValue(packet.SplitId, out split)) { - var pong = new ConnectedPong(); - pong.ReceiveTime = packet.SendTime; - pong.SendTime = packet.SendTime; - SendEncapsulated(pong, EncapsulatedReliability.Unreliable); + split = new SplitPacket(); + split.Index = packet.SplitId; + split.Available = 0; + split.Buffers = new byte[packet.SplitCount][]; + Splits.Add(packet.SplitId, split); } - private void HandleConnectionRequestAccepted(ConnectionRequestAccepted packet) + if (split.Buffers.Length != packet.SplitCount) { - var ackReq = new NewIncomingConnection(); - SendEncapsulated(ackReq, EncapsulatedReliability.ReliableOrdered); + throw new InvalidDataException("Packet split count mismatch"); } - private void HandleDisconnectionNotification(DisconnectionNotification packet) + if (split.Buffers[packet.SplitIndex] != null) { - SessionDisconnected(this); + return; } - private void HandleEncapsulatedPayload(byte[] payload) + split.Buffers[packet.SplitIndex] = packet.Payload; + split.Available++; + + if (split.Available == split.Buffers.Length) { - using (var encapMemory = new MemoryStream(payload)) - using (var encapStream = new BinaryReaderBE(encapMemory)) + Splits.Remove(split.Index); + using (var memory = new MemoryStream()) + using (var stream = new BinaryWriter(memory)) { - var encapId = encapStream.ReadByte(); - HandlePacketDecapsulated(encapId, encapStream); + foreach (var buffer in split.Buffers) + { + stream.Write(buffer); + } + + memory.SetLength(memory.Position); + HandleEncapsulatedPayload(memory.ToArray()); } } + } - private void HandleSplitPacket(EncapsulatedPacket packet) + private void SendAcknowledgement(SequenceNumber sequence) + { + var ack = new Acknowledgement { - SplitPacket split = null; - if (!Splits.TryGetValue(packet.SplitId, out split)) - { - split = new SplitPacket(); - split.Index = packet.SplitId; - split.Available = 0; - split.Buffers = new byte[packet.SplitCount][]; - Splits.Add(packet.SplitId, split); - } - - if (split.Buffers.Length != packet.SplitCount) - { - throw new InvalidDataException("Packet split count mismatch"); - } + SequenceNumbers = new List { sequence } + }; + Socket.Send(Address, ack); + } - if (split.Buffers[packet.SplitIndex] != null) - { - return; - } + private void HandleEncapsulatedPacket(DataPacket data, EncapsulatedPacket packet) + { + if (packet.Flags.Split) + { + HandleSplitPacket(packet); + } + else + { + HandleEncapsulatedPayload(packet.Payload); + } - split.Buffers[packet.SplitIndex] = packet.Payload; - split.Available++; + if (packet.Flags.IsReliable()) + { + SendAcknowledgement(data.Sequence); + } + } - if (split.Available == split.Buffers.Length) - { - Splits.Remove(split.Index); - using (var memory = new MemoryStream()) - using (var stream = new BinaryWriter(memory)) - { - foreach (var buffer in split.Buffers) - { - stream.Write(buffer); - } + private void HandlePacketDecapsulated(Byte id, BinaryReaderBE reader) + { + var packet = DecodePacketDecapsulated(id, reader); - memory.SetLength(memory.Position); - HandleEncapsulatedPayload(memory.ToArray()); - } - } + if (packet is ConnectedPing) + { + HandleConnectedPing(packet as ConnectedPing); } - - private void SendAcknowledgement(SequenceNumber sequence) + else if (packet is ConnectionRequestAccepted) { - var ack = new Acknowledgement - { - SequenceNumbers = new List { sequence } - }; - Socket.Send(Address, ack); + HandleConnectionRequestAccepted(packet as ConnectionRequestAccepted); } - - private void HandleEncapsulatedPacket(DataPacket data, EncapsulatedPacket packet) + else if (packet is DisconnectionNotification) { - if (packet.Flags.Split) - { - HandleSplitPacket(packet); - } - else - { - HandleEncapsulatedPayload(packet.Payload); - } - - if (packet.Flags.IsReliable()) - { - SendAcknowledgement(data.Sequence); - } + HandleDisconnectionNotification(packet as DisconnectionNotification); } - - private void HandlePacketDecapsulated(Byte id, BinaryReaderBE reader) + else if (id >= 0x80) { - var packet = DecodePacketDecapsulated(id, reader); - - if (packet is ConnectedPing) - { - HandleConnectedPing(packet as ConnectedPing); - } - else if (packet is ConnectionRequestAccepted) - { - HandleConnectionRequestAccepted(packet as ConnectionRequestAccepted); - } - else if (packet is DisconnectionNotification) - { - HandleDisconnectionNotification(packet as DisconnectionNotification); - } - else if (id >= 0x80) - { - PacketReceived(this, packet); - } - else - { - throw new Exception("Unhandled encapsulated packet"); - } + PacketReceived(this, packet); } - - public void HandlePacket(Byte id, BinaryReaderBE reader) + else { - var packet = DecodePacket(id, reader); - - if (packet is Acknowledgement) - { - // TODO - ACK mechanism not handled - } - else if (packet is DataPacket) - { - var encap = (packet as DataPacket).WrappedPacket as EncapsulatedPacket; - HandleEncapsulatedPacket(packet as DataPacket, encap); - } - else - { - throw new Exception("Unhandled packet"); - } + throw new Exception("Unhandled encapsulated packet"); } + } - private Packet DecodePacket(Byte id, BinaryReaderBE reader) - { - Packet packet = null; - if (id >= 0x80 && id < 0xA0) - { - var dataPkt = new DataPacket(); - dataPkt.WrappedPacket = new EncapsulatedPacket(); - packet = dataPkt; - } - else - { - switch ((PacketId)id) - { - case PacketId.ACK: packet = new Acknowledgement(); break; - default: throw new InvalidDataException("Unrecognized packet ID"); - } - } + public void HandlePacket(Byte id, BinaryReaderBE reader) + { + var packet = DecodePacket(id, reader); - packet.Read(reader); - return packet; + if (packet is Acknowledgement) + { + // TODO - ACK mechanism not handled + } + else if (packet is DataPacket) + { + var encap = (packet as DataPacket).WrappedPacket as EncapsulatedPacket; + HandleEncapsulatedPacket(packet as DataPacket, encap); + } + else + { + throw new Exception("Unhandled packet"); } + } - private Packet DecodePacketDecapsulated(Byte id, BinaryReaderBE reader) + private Packet DecodePacket(Byte id, BinaryReaderBE reader) + { + Packet packet = null; + if (id >= 0x80 && id < 0xA0) + { + var dataPkt = new DataPacket(); + dataPkt.WrappedPacket = new EncapsulatedPacket(); + packet = dataPkt; + } + else { - Packet packet = null; switch ((PacketId)id) { - case PacketId.ConnectedPing: packet = new ConnectedPing(); break; - case PacketId.ConnectionRequest: packet = new ConnectionRequest(); break; - case PacketId.ConnectionRequestAccepted: packet = new ConnectionRequestAccepted(); break; - case PacketId.DisconnectionNotification: packet = new DisconnectionNotification(); break; - default: - packet = PacketConstructor(id); - if (packet == null) throw new InvalidDataException("Unrecognized encapsulated packet ID"); - break; + case PacketId.ACK: packet = new Acknowledgement(); break; + default: throw new InvalidDataException("Unrecognized packet ID"); } - - packet.Read(reader); - return packet; } - public void SendEncapsulated(Packet packet, EncapsulatedReliability reliability) + packet.Read(reader); + return packet; + } + + private Packet DecodePacketDecapsulated(Byte id, BinaryReaderBE reader) + { + Packet packet = null; + switch ((PacketId)id) { - var dataPkt = new DataPacket(); - dataPkt.Id = (byte)PacketId.EncapsulatedData; - dataPkt.Sequence.Number = NextPacketId++; + case PacketId.ConnectedPing: packet = new ConnectedPing(); break; + case PacketId.ConnectionRequest: packet = new ConnectionRequest(); break; + case PacketId.ConnectionRequestAccepted: packet = new ConnectionRequestAccepted(); break; + case PacketId.DisconnectionNotification: packet = new DisconnectionNotification(); break; + default: + packet = PacketConstructor(id); + if (packet == null) throw new InvalidDataException("Unrecognized encapsulated packet ID"); + break; + } - var encapPkt = new EncapsulatedPacket(); - encapPkt.Flags.Reliability = reliability; - if (encapPkt.Flags.IsReliable()) - { - encapPkt.MessageIndex.Number = NextReliableId++; - } + packet.Read(reader); + return packet; + } - if (encapPkt.Flags.IsSequenced()) - { - encapPkt.SequenceIndex.Number = NextSequenceId++; - } + public void SendEncapsulated(Packet packet, EncapsulatedReliability reliability) + { + var dataPkt = new DataPacket(); + dataPkt.Id = (byte)PacketId.EncapsulatedData; + dataPkt.Sequence.Number = NextPacketId++; - if (encapPkt.Flags.IsSequenced() || encapPkt.Flags.IsOrdered()) - { - encapPkt.OrderChannel = 0; - encapPkt.OrderIndex.Number = NextOrderId++; - } + var encapPkt = new EncapsulatedPacket(); + encapPkt.Flags.Reliability = reliability; + if (encapPkt.Flags.IsReliable()) + { + encapPkt.MessageIndex.Number = NextReliableId++; + } - using (var memory = new MemoryStream()) - using (var stream = new BinaryWriterBE(memory)) - { - packet.Write(stream); - memory.SetLength(memory.Position); - encapPkt.Payload = memory.ToArray(); - encapPkt.Length = (UInt16)(encapPkt.Payload.Length * 8); - } + if (encapPkt.Flags.IsSequenced()) + { + encapPkt.SequenceIndex.Number = NextSequenceId++; + } - dataPkt.WrappedPacket = encapPkt; - Socket.Send(Address, dataPkt); + if (encapPkt.Flags.IsSequenced() || encapPkt.Flags.IsOrdered()) + { + encapPkt.OrderChannel = 0; + encapPkt.OrderIndex.Number = NextOrderId++; } - public void OnConnected() + using (var memory = new MemoryStream()) + using (var stream = new BinaryWriterBE(memory)) { - var currentTimestamp = (UInt32)(DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0, 0)).TotalSeconds; - var connReq = new ConnectionRequest - { - ClientId = ClientId, - Time = currentTimestamp, - Security = 0 - }; - SendEncapsulated(connReq, EncapsulatedReliability.Reliable); + packet.Write(stream); + memory.SetLength(memory.Position); + encapPkt.Payload = memory.ToArray(); + encapPkt.Length = (UInt16)(encapPkt.Payload.Length * 8); } + + dataPkt.WrappedPacket = encapPkt; + Socket.Send(Address, dataPkt); + } + + public void OnConnected() + { + var currentTimestamp = (UInt32)(DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0, 0)).TotalSeconds; + var connReq = new ConnectionRequest + { + ClientId = ClientId, + Time = currentTimestamp, + Security = 0 + }; + SendEncapsulated(connReq, EncapsulatedReliability.Reliable); } } diff --git a/RconClient/Rcon.cs b/RconClient/Rcon.cs index bd3eed79..eaf80650 100644 --- a/RconClient/Rcon.cs +++ b/RconClient/Rcon.cs @@ -4,155 +4,154 @@ using System.Net; using System.Timers; -namespace LSLib.Rcon +namespace LSLib.Rcon; + +public class RconApp { - public class RconApp - { - static private bool Executed = false; - static private bool ReceivedEvents = false; - static private RakNetSession Session; - static private string Command; - static private string[] Arguments; + static private bool Executed = false; + static private bool ReceivedEvents = false; + static private RakNetSession Session; + static private string Command; + static private string[] Arguments; - static void Main(string[] args) + static void Main(string[] args) + { + if (args.Length < 2) { - if (args.Length < 2) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.Write("Usage: Rcon [ ...]"); - Console.ResetColor(); - Environment.Exit(1); - } + Console.ForegroundColor = ConsoleColor.Red; + Console.Write("Usage: Rcon [ ...]"); + Console.ResetColor(); + Environment.Exit(1); + } - var ipPort = args[0].Split(':'); - var port = Int32.Parse(ipPort[1]); - Command = args[1]; - Arguments = new string[args.Length - 2]; - Array.Copy(args, 2, Arguments, 0, args.Length - 2); - - var socket = new RakNetSocket(); - socket.SessionEstablished += OnSessionEstablished; + var ipPort = args[0].Split(':'); + var port = Int32.Parse(ipPort[1]); + Command = args[1]; + Arguments = new string[args.Length - 2]; + Array.Copy(args, 2, Arguments, 0, args.Length - 2); + + var socket = new RakNetSocket(); + socket.SessionEstablished += OnSessionEstablished; - // Create a disconnect timer to make sure that we disconnect after the last console message - var timer = new System.Timers.Timer(3000); - timer.Elapsed += OnTimedEvent; - timer.Enabled = true; - - IPEndPoint target = new IPEndPoint(IPAddress.Parse(ipPort[0]), port); - socket.BeginConnection(target); - } + // Create a disconnect timer to make sure that we disconnect after the last console message + var timer = new System.Timers.Timer(3000); + timer.Elapsed += OnTimedEvent; + timer.Enabled = true; + + IPEndPoint target = new IPEndPoint(IPAddress.Parse(ipPort[0]), port); + socket.BeginConnection(target); + } - static void OnTimedEvent(object source, ElapsedEventArgs e) + static void OnTimedEvent(object source, ElapsedEventArgs e) + { + if (!ReceivedEvents) { - if (!ReceivedEvents) - { - ReceivedEvents = true; + ReceivedEvents = true; - if (Session != null) - { - Console.WriteLine("Disconnecting."); - var disconnectCmd = new DosDisconnectConsole(); - Session.SendEncapsulated(disconnectCmd, EncapsulatedReliability.ReliableOrdered); - } - else - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine("Timed out waiting for session establishment"); - Console.ResetColor(); - } + if (Session != null) + { + Console.WriteLine("Disconnecting."); + var disconnectCmd = new DosDisconnectConsole(); + Session.SendEncapsulated(disconnectCmd, EncapsulatedReliability.ReliableOrdered); } else { - ReceivedEvents = false; + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine("Timed out waiting for session establishment"); + Console.ResetColor(); } } + else + { + ReceivedEvents = false; + } + } - static Packet OnPacketParse(Byte id) + static Packet OnPacketParse(Byte id) + { + switch ((DosPacketId)id) { - switch ((DosPacketId)id) - { - case DosPacketId.DosUnknown87: return new DosUnknown87(); - case DosPacketId.DosEnumerationList: return new DosEnumerationList(); - case DosPacketId.DosConsoleResponse: return new DosConsoleResponse(); - default: return null; - } + case DosPacketId.DosUnknown87: return new DosUnknown87(); + case DosPacketId.DosEnumerationList: return new DosEnumerationList(); + case DosPacketId.DosConsoleResponse: return new DosConsoleResponse(); + default: return null; } + } + + static void OnSessionEstablished(RakNetSession session) + { + Console.WriteLine("RakNet session established to Rcon server."); + session.PacketConstructor += OnPacketParse; + session.PacketReceived += OnPacketReceived; + session.SessionDisconnected += OnSessionDisconnected; + Session = session; + } - static void OnSessionEstablished(RakNetSession session) + static void OnSessionDisconnected(RakNetSession session) + { + if (!Executed) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine("Received DisconnectionNotification before console command could be sent."); + Console.ResetColor(); + } + else { - Console.WriteLine("RakNet session established to Rcon server."); - session.PacketConstructor += OnPacketParse; - session.PacketReceived += OnPacketReceived; - session.SessionDisconnected += OnSessionDisconnected; - Session = session; + Console.WriteLine("Closed connection to Rcon server."); } - static void OnSessionDisconnected(RakNetSession session) + Environment.Exit(0); + } + + static void OnPacketReceived(RakNetSession session, Packet packet) + { + if (packet is DosUnknown87) + { + // Unknown. + } + else if (packet is DosEnumerationList) { if (!Executed) { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine("Received DisconnectionNotification before console command could be sent."); - Console.ResetColor(); - } - else - { - Console.WriteLine("Closed connection to Rcon server."); + Console.WriteLine("Sending console command:"); + Console.WriteLine("> " + Command + " " + String.Join(" ", Arguments)); + var consoleCmd = new DosSendConsoleCommand + { + Command = Command, + Arguments = Arguments + }; + session.SendEncapsulated(consoleCmd, EncapsulatedReliability.ReliableOrdered); } - Environment.Exit(0); + ReceivedEvents = true; } - - static void OnPacketReceived(RakNetSession session, Packet packet) + else if (packet is DosConsoleResponse) { - if (packet is DosUnknown87) - { - // Unknown. - } - else if (packet is DosEnumerationList) + bool hasResult = false; + var lines = (packet as DosConsoleResponse).Lines; + foreach (var line in lines) { - if (!Executed) + switch (line.Level) { - Console.WriteLine("Sending console command:"); - Console.WriteLine("> " + Command + " " + String.Join(" ", Arguments)); - var consoleCmd = new DosSendConsoleCommand - { - Command = Command, - Arguments = Arguments - }; - session.SendEncapsulated(consoleCmd, EncapsulatedReliability.ReliableOrdered); + case 4: Console.ForegroundColor = ConsoleColor.Green; hasResult = true; break; + case 5: Console.ForegroundColor = ConsoleColor.Red; hasResult = true; break; + default: Console.ResetColor(); break; } - - ReceivedEvents = true; + Console.WriteLine(line.Line); } - else if (packet is DosConsoleResponse) - { - bool hasResult = false; - var lines = (packet as DosConsoleResponse).Lines; - foreach (var line in lines) - { - switch (line.Level) - { - case 4: Console.ForegroundColor = ConsoleColor.Green; hasResult = true; break; - case 5: Console.ForegroundColor = ConsoleColor.Red; hasResult = true; break; - default: Console.ResetColor(); break; - } - Console.WriteLine(line.Line); - } - Console.ResetColor(); - Executed = true; - ReceivedEvents = true; + Console.ResetColor(); + Executed = true; + ReceivedEvents = true; - if (hasResult) - { - var disconnectCmd = new DosDisconnectConsole(); - Session.SendEncapsulated(disconnectCmd, EncapsulatedReliability.ReliableOrdered); - } - } - else + if (hasResult) { - throw new Exception("Unhandled DOS encapsulated packet"); + var disconnectCmd = new DosDisconnectConsole(); + Session.SendEncapsulated(disconnectCmd, EncapsulatedReliability.ReliableOrdered); } } + else + { + throw new Exception("Unhandled DOS encapsulated packet"); + } } } diff --git a/RconClient/Utils.cs b/RconClient/Utils.cs index 7d8b63b1..69a9f541 100644 --- a/RconClient/Utils.cs +++ b/RconClient/Utils.cs @@ -1,51 +1,50 @@ using System; using System.IO; -namespace LSLib.Rcon -{ - public class BinaryWriterBE : BinaryWriter - { - public BinaryWriterBE(Stream s) - : base(s) - { } +namespace LSLib.Rcon; - public void WriteBE(UInt16 value) - { - UInt16 be = (ushort)((ushort)((value & 0xff) << 8) | ((value >> 8) & 0xff)); - Write(be); - } +public class BinaryWriterBE : BinaryWriter +{ + public BinaryWriterBE(Stream s) + : base(s) + { } - public void WriteBE(UInt32 value) - { - // swap adjacent 16-bit blocks - UInt32 be = (value >> 16) | (value << 16); - // swap adjacent 8-bit blocks - be = ((be & 0xFF00FF00) >> 8) | ((be & 0x00FF00FF) << 8); - Write(be); - } + public void WriteBE(UInt16 value) + { + UInt16 be = (ushort)((ushort)((value & 0xff) << 8) | ((value >> 8) & 0xff)); + Write(be); } - public class BinaryReaderBE : BinaryReader + public void WriteBE(UInt32 value) { - public BinaryReaderBE(Stream s) - : base(s) - { } + // swap adjacent 16-bit blocks + UInt32 be = (value >> 16) | (value << 16); + // swap adjacent 8-bit blocks + be = ((be & 0xFF00FF00) >> 8) | ((be & 0x00FF00FF) << 8); + Write(be); + } +} - public UInt16 ReadUInt16BE() - { - UInt16 be = ReadUInt16(); - UInt16 le = (ushort)((ushort)((be & 0xff) << 8) | ((be >> 8) & 0xff)); - return le; - } +public class BinaryReaderBE : BinaryReader +{ + public BinaryReaderBE(Stream s) + : base(s) + { } - public UInt32 ReadUInt32BE() - { - UInt32 be = ReadUInt32(); - // swap adjacent 16-bit blocks - UInt32 le = (be >> 16) | (be << 16); - // swap adjacent 8-bit blocks - le = ((le & 0xFF00FF00) >> 8) | ((le & 0x00FF00FF) << 8); - return le; - } + public UInt16 ReadUInt16BE() + { + UInt16 be = ReadUInt16(); + UInt16 le = (ushort)((ushort)((be & 0xff) << 8) | ((be >> 8) & 0xff)); + return le; + } + + public UInt32 ReadUInt32BE() + { + UInt32 be = ReadUInt32(); + // swap adjacent 16-bit blocks + UInt32 le = (be >> 16) | (be << 16); + // swap adjacent 8-bit blocks + le = ((le & 0xFF00FF00) >> 8) | ((le & 0x00FF00FF) << 8); + return le; } } diff --git a/StatParser/Arguments.cs b/StatParser/Arguments.cs index 33e22afd..14e19363 100644 --- a/StatParser/Arguments.cs +++ b/StatParser/Arguments.cs @@ -1,44 +1,43 @@ using CommandLineParser.Arguments; -namespace LSTools.StatParser +namespace LSTools.StatParser; + +public class CommandLineArguments { - public class CommandLineArguments - { - [SwitchArgument("no-packages", false, - Description = "Don't look for goal files inside packages", - Optional = true - )] - public bool NoPackages; + [SwitchArgument("no-packages", false, + Description = "Don't look for goal files inside packages", + Optional = true + )] + public bool NoPackages; - [ValueArgument(typeof(string), "mod", - Description = "Mod to add", - AllowMultiple = true, - ValueOptional = false, - Optional = false - )] - public string[] Mods; + [ValueArgument(typeof(string), "mod", + Description = "Mod to add", + AllowMultiple = true, + ValueOptional = false, + Optional = false + )] + public string[] Mods; - [ValueArgument(typeof(string), "dependency", - Description = "Dependencies to add", - AllowMultiple = true, - ValueOptional = false, - Optional = true - )] - public string[] Dependencies; + [ValueArgument(typeof(string), "dependency", + Description = "Dependencies to add", + AllowMultiple = true, + ValueOptional = false, + Optional = true + )] + public string[] Dependencies; - [ValueArgument(typeof(string), "game-data-path", - Description = "Game data path", - ValueOptional = false, - Optional = true - )] - public string GameDataPath; + [ValueArgument(typeof(string), "game-data-path", + Description = "Game data path", + ValueOptional = false, + Optional = true + )] + public string GameDataPath; - [ValueArgument(typeof(string), "package-paths", - Description = "Additional package path(s)", - AllowMultiple = true, - ValueOptional = false, - Optional = true - )] - public string[] PackagePaths; - } + [ValueArgument(typeof(string), "package-paths", + Description = "Additional package path(s)", + AllowMultiple = true, + ValueOptional = false, + Optional = true + )] + public string[] PackagePaths; } diff --git a/StatParser/Program.cs b/StatParser/Program.cs index d4099d23..e56d80bb 100644 --- a/StatParser/Program.cs +++ b/StatParser/Program.cs @@ -2,68 +2,67 @@ using System; using System.Collections.Generic; -namespace LSTools.StatParser +namespace LSTools.StatParser; + +class Program { - class Program + static int Run(CommandLineArguments args) { - static int Run(CommandLineArguments args) + using (var statChecker = new StatChecker(args.GameDataPath)) { - using (var statChecker = new StatChecker(args.GameDataPath)) - { - statChecker.LoadPackages = !args.NoPackages; - - var mods = new List(args.Mods); - var dependencies = new List(args.Dependencies); - var packagePaths = new List(args.PackagePaths); - statChecker.Check(mods, dependencies, packagePaths); - } + statChecker.LoadPackages = !args.NoPackages; - return 0; + var mods = new List(args.Mods); + var dependencies = new List(args.Dependencies); + var packagePaths = new List(args.PackagePaths); + statChecker.Check(mods, dependencies, packagePaths); } - static void Main(string[] args) + return 0; + } + + static void Main(string[] args) + { + if (args.Length == 0) { - if (args.Length == 0) - { - Console.WriteLine("Usage: StatParser "); - Console.WriteLine(" --game-data-path - Location of the game Data folder"); - Console.WriteLine(" --package-paths - Location of additional packages to load"); - Console.WriteLine(" --dependency - Load stat files from the specified mod as a dependency"); - Console.WriteLine(" --mod - Check all stat files from the specified mod"); - Console.WriteLine(" --no-packages - Don't load files from packages"); - Environment.Exit(1); - } + Console.WriteLine("Usage: StatParser "); + Console.WriteLine(" --game-data-path - Location of the game Data folder"); + Console.WriteLine(" --package-paths - Location of additional packages to load"); + Console.WriteLine(" --dependency - Load stat files from the specified mod as a dependency"); + Console.WriteLine(" --mod - Check all stat files from the specified mod"); + Console.WriteLine(" --no-packages - Don't load files from packages"); + Environment.Exit(1); + } - CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser(); + CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser(); - var argv = new CommandLineArguments(); + var argv = new CommandLineArguments(); - parser.ExtractArgumentAttributes(argv); + parser.ExtractArgumentAttributes(argv); - try - { - parser.ParseCommandLine(args); - } - catch (CommandLineArgumentException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine($"Argument --{e.Argument}: {e.Message}"); - Console.ResetColor(); - Environment.Exit(1); - } - catch (CommandLineException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine(e.Message); - Console.ResetColor(); - Environment.Exit(1); - } + try + { + parser.ParseCommandLine(args); + } + catch (CommandLineArgumentException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine($"Argument --{e.Argument}: {e.Message}"); + Console.ResetColor(); + Environment.Exit(1); + } + catch (CommandLineException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ResetColor(); + Environment.Exit(1); + } - if (parser.ParsingSucceeded) - { - var exitCode = Run(argv); - Environment.Exit(exitCode); - } + if (parser.ParsingSucceeded) + { + var exitCode = Run(argv); + Environment.Exit(exitCode); } } } diff --git a/StatParser/StatChecker.cs b/StatParser/StatChecker.cs index 87beaad7..a76aee66 100644 --- a/StatParser/StatChecker.cs +++ b/StatParser/StatChecker.cs @@ -6,159 +6,158 @@ using System.Linq; using System.Xml; -namespace LSTools.StatParser -{ - class StatChecker : IDisposable - { - private string GameDataPath; - private ModResources Mods = new ModResources(); - private StatDefinitionRepository Definitions; - private StatLoadingContext Context; - private StatLoader Loader; +namespace LSTools.StatParser; - public bool LoadPackages = true; +class StatChecker : IDisposable +{ + private string GameDataPath; + private ModResources Mods = new ModResources(); + private StatDefinitionRepository Definitions; + private StatLoadingContext Context; + private StatLoader Loader; + public bool LoadPackages = true; - public StatChecker(string gameDataPath) - { - GameDataPath = gameDataPath; - } - public void Dispose() - { - Mods.Dispose(); - } + public StatChecker(string gameDataPath) + { + GameDataPath = gameDataPath; + } - private void LoadStats(ModInfo mod) - { - foreach (var file in mod.Stats) - { - var statStream = file.Value.MakeStream(); - try - { - Loader.LoadStatsFromStream(file.Key, statStream); - } - finally - { - file.Value.ReleaseStream(); - } - } - } + public void Dispose() + { + Mods.Dispose(); + } - private XmlDocument LoadXml(AbstractFileInfo file) + private void LoadStats(ModInfo mod) + { + foreach (var file in mod.Stats) { - if (file == null) return null; - - var stream = file.MakeStream(); + var statStream = file.Value.MakeStream(); try { - var doc = new XmlDocument(); - doc.Load(stream); - return doc; + Loader.LoadStatsFromStream(file.Key, statStream); } finally { - file.ReleaseStream(); + file.Value.ReleaseStream(); } } + } - private void LoadGuidResources(ModInfo mod) - { - var actionResources = LoadXml(mod.ActionResourcesFile); - if (actionResources != null) - { - Loader.LoadActionResources(actionResources); - } + private XmlDocument LoadXml(AbstractFileInfo file) + { + if (file == null) return null; - var actionResourceGroups = LoadXml(mod.ActionResourceGroupsFile); - if (actionResourceGroups != null) - { - Loader.LoadActionResourceGroups(actionResourceGroups); - } + var stream = file.MakeStream(); + try + { + var doc = new XmlDocument(); + doc.Load(stream); + return doc; } - - private void LoadMod(string modName) + finally { - if (!Mods.Mods.TryGetValue(modName, out ModInfo mod)) - { - throw new Exception($"Mod not found: {modName}"); - } + file.ReleaseStream(); + } + } - LoadStats(mod); - LoadGuidResources(mod); + private void LoadGuidResources(ModInfo mod) + { + var actionResources = LoadXml(mod.ActionResourcesFile); + if (actionResources != null) + { + Loader.LoadActionResources(actionResources); } - private void LoadStatDefinitions(ModResources resources) + var actionResourceGroups = LoadXml(mod.ActionResourceGroupsFile); + if (actionResourceGroups != null) { - Definitions = new StatDefinitionRepository(); - Definitions.LoadEnumerations(resources.Mods["Shared"].ValueListsFile.MakeStream()); - Definitions.LoadDefinitions(resources.Mods["Shared"].ModifiersFile.MakeStream()); + Loader.LoadActionResourceGroups(actionResourceGroups); } + } - private void CompilationDiagnostic(StatLoadingError message) + private void LoadMod(string modName) + { + if (!Mods.Mods.TryGetValue(modName, out ModInfo mod)) { - if (message.Code == DiagnosticCode.StatSyntaxError) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.Write("ERR! "); - } - else - { - Console.ForegroundColor = ConsoleColor.DarkYellow; - Console.Write("WARN "); - } + throw new Exception($"Mod not found: {modName}"); + } - if (message.Path != null) - { - var baseName = Path.GetFileName(message.Path); - Console.Write($"{baseName}:{message.Line}: "); - } + LoadStats(mod); + LoadGuidResources(mod); + } - Console.WriteLine("[{0}] {1}", message.Code, message.Message); - Console.ResetColor(); + private void LoadStatDefinitions(ModResources resources) + { + Definitions = new StatDefinitionRepository(); + Definitions.LoadEnumerations(resources.Mods["Shared"].ValueListsFile.MakeStream()); + Definitions.LoadDefinitions(resources.Mods["Shared"].ModifiersFile.MakeStream()); + } + + private void CompilationDiagnostic(StatLoadingError message) + { + if (message.Code == DiagnosticCode.StatSyntaxError) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.Write("ERR! "); + } + else + { + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.Write("WARN "); } - public void Check(List mods, List dependencies, List packagePaths) + if (message.Path != null) { - Context = new StatLoadingContext(); + var baseName = Path.GetFileName(message.Path); + Console.Write($"{baseName}:{message.Line}: "); + } - Loader = new StatLoader(Context); - - var visitor = new ModPathVisitor(Mods) - { - Game = LSLib.LS.Story.Compiler.TargetGame.DOS2DE, - CollectStats = true, - CollectGuidResources = true, - LoadPackages = LoadPackages - }; - visitor.Discover(GameDataPath); - packagePaths.ForEach(path => visitor.DiscoverUserPackages(path)); - - LoadStatDefinitions(visitor.Resources); - Context.Definitions = Definitions; - - foreach (var modName in dependencies) - { - LoadMod(modName); - } + Console.WriteLine("[{0}] {1}", message.Code, message.Message); + Console.ResetColor(); + } - Loader.ResolveUsageRef(); - Loader.InstantiateEntries(); + public void Check(List mods, List dependencies, List packagePaths) + { + Context = new StatLoadingContext(); - Context.Errors.Clear(); + Loader = new StatLoader(Context); + + var visitor = new ModPathVisitor(Mods) + { + Game = LSLib.LS.Story.Compiler.TargetGame.DOS2DE, + CollectStats = true, + CollectGuidResources = true, + LoadPackages = LoadPackages + }; + visitor.Discover(GameDataPath); + packagePaths.ForEach(path => visitor.DiscoverUserPackages(path)); + + LoadStatDefinitions(visitor.Resources); + Context.Definitions = Definitions; + + foreach (var modName in dependencies) + { + LoadMod(modName); + } - foreach (var modName in mods) - { - LoadMod(modName); - } + Loader.ResolveUsageRef(); + Loader.InstantiateEntries(); - Loader.ResolveUsageRef(); - Loader.InstantiateEntries(); + Context.Errors.Clear(); - foreach (var message in Context.Errors) - { - CompilationDiagnostic(message); - } + foreach (var modName in mods) + { + LoadMod(modName); + } + + Loader.ResolveUsageRef(); + Loader.InstantiateEntries(); + + foreach (var message in Context.Errors) + { + CompilationDiagnostic(message); } } } diff --git a/StoryCompiler/DebugInfoSaver.cs b/StoryCompiler/DebugInfoSaver.cs index 1ece6b7c..081a0b95 100644 --- a/StoryCompiler/DebugInfoSaver.cs +++ b/StoryCompiler/DebugInfoSaver.cs @@ -5,201 +5,200 @@ using LSLib.LS.Story.Compiler; using System.Text; -namespace LSTools.StoryCompiler +namespace LSTools.StoryCompiler; + +class DebugInfoSaver { - class DebugInfoSaver + private DatabaseDebugInfoMsg ToProtobuf(DatabaseDebugInfo debugInfo) { - private DatabaseDebugInfoMsg ToProtobuf(DatabaseDebugInfo debugInfo) + var msg = new DatabaseDebugInfoMsg { - var msg = new DatabaseDebugInfoMsg - { - Id = debugInfo.Id, - Name = debugInfo.Name - }; - foreach (var paramType in debugInfo.ParamTypes) - { - msg.ParamTypes.Add(paramType); - } - - return msg; - } - - private GoalDebugInfoMsg ToProtobuf(GoalDebugInfo debugInfo) + Id = debugInfo.Id, + Name = debugInfo.Name + }; + foreach (var paramType in debugInfo.ParamTypes) { - var msg = new GoalDebugInfoMsg - { - Id = debugInfo.Id, - Name = debugInfo.Name, - Path = debugInfo.Path - }; - - foreach (var action in debugInfo.InitActions) - { - var varAct = ToProtobuf(action); - msg.InitActions.Add(varAct); - } + msg.ParamTypes.Add(paramType); + } - foreach (var action in debugInfo.ExitActions) - { - var varAct = ToProtobuf(action); - msg.ExitActions.Add(varAct); - } + return msg; + } - return msg; - } + private GoalDebugInfoMsg ToProtobuf(GoalDebugInfo debugInfo) + { + var msg = new GoalDebugInfoMsg + { + Id = debugInfo.Id, + Name = debugInfo.Name, + Path = debugInfo.Path + }; - private RuleVariableDebugInfoMsg ToProtobuf(RuleVariableDebugInfo debugInfo) + foreach (var action in debugInfo.InitActions) { - return new RuleVariableDebugInfoMsg - { - Index = debugInfo.Index, - Name = debugInfo.Name, - Type = debugInfo.Type, - Unused = debugInfo.Unused - }; + var varAct = ToProtobuf(action); + msg.InitActions.Add(varAct); } - private ActionDebugInfoMsg ToProtobuf(ActionDebugInfo debugInfo) + foreach (var action in debugInfo.ExitActions) { - return new ActionDebugInfoMsg - { - Line = debugInfo.Line - }; + var varAct = ToProtobuf(action); + msg.ExitActions.Add(varAct); } - private RuleDebugInfoMsg ToProtobuf(RuleDebugInfo debugInfo) + return msg; + } + + private RuleVariableDebugInfoMsg ToProtobuf(RuleVariableDebugInfo debugInfo) + { + return new RuleVariableDebugInfoMsg { - var msg = new RuleDebugInfoMsg - { - Id = debugInfo.Id, - GoalId = debugInfo.GoalId, - Name = debugInfo.Name, - ConditionsStartLine = debugInfo.ConditionsStartLine, - ConditionsEndLine = debugInfo.ConditionsEndLine, - ActionsStartLine = debugInfo.ActionsStartLine, - ActionsEndLine = debugInfo.ActionsEndLine - }; - - foreach (var variable in debugInfo.Variables) - { - var varMsg = ToProtobuf(variable); - msg.Variables.Add(varMsg); - } + Index = debugInfo.Index, + Name = debugInfo.Name, + Type = debugInfo.Type, + Unused = debugInfo.Unused + }; + } - foreach (var action in debugInfo.Actions) - { - var varAct = ToProtobuf(action); - msg.Actions.Add(varAct); - } + private ActionDebugInfoMsg ToProtobuf(ActionDebugInfo debugInfo) + { + return new ActionDebugInfoMsg + { + Line = debugInfo.Line + }; + } - return msg; + private RuleDebugInfoMsg ToProtobuf(RuleDebugInfo debugInfo) + { + var msg = new RuleDebugInfoMsg + { + Id = debugInfo.Id, + GoalId = debugInfo.GoalId, + Name = debugInfo.Name, + ConditionsStartLine = debugInfo.ConditionsStartLine, + ConditionsEndLine = debugInfo.ConditionsEndLine, + ActionsStartLine = debugInfo.ActionsStartLine, + ActionsEndLine = debugInfo.ActionsEndLine + }; + + foreach (var variable in debugInfo.Variables) + { + var varMsg = ToProtobuf(variable); + msg.Variables.Add(varMsg); } - private NodeDebugInfoMsg ToProtobuf(NodeDebugInfo debugInfo) + foreach (var action in debugInfo.Actions) { - var msg = new NodeDebugInfoMsg - { - Id = debugInfo.Id, - RuleId = debugInfo.RuleId, - Line = (UInt32)debugInfo.Line, - DatabaseId = debugInfo.DatabaseId, - Name = debugInfo.Name, - Type = (NodeDebugInfoMsg.Types.NodeType)debugInfo.Type, - ParentNodeId = debugInfo.ParentNodeId, - FunctionName = debugInfo.FunctionName != null ? debugInfo.FunctionName.Name : "", - FunctionArity = debugInfo.FunctionName != null ? (uint)debugInfo.FunctionName.Arity : 0 - }; - - foreach (var map in debugInfo.ColumnToVariableMaps) - { - msg.ColumnMaps.Add((UInt32)map.Key, (UInt32)map.Value); - } - - return msg; + var varAct = ToProtobuf(action); + msg.Actions.Add(varAct); } - private FunctionParamDebugInfoMsg ToProtobuf(FunctionParamDebugInfo debugInfo) + return msg; + } + + private NodeDebugInfoMsg ToProtobuf(NodeDebugInfo debugInfo) + { + var msg = new NodeDebugInfoMsg { - return new FunctionParamDebugInfoMsg - { - TypeId = debugInfo.TypeId, - Name = debugInfo.Name ?? "", - Out = debugInfo.Out - }; + Id = debugInfo.Id, + RuleId = debugInfo.RuleId, + Line = (UInt32)debugInfo.Line, + DatabaseId = debugInfo.DatabaseId, + Name = debugInfo.Name, + Type = (NodeDebugInfoMsg.Types.NodeType)debugInfo.Type, + ParentNodeId = debugInfo.ParentNodeId, + FunctionName = debugInfo.FunctionName != null ? debugInfo.FunctionName.Name : "", + FunctionArity = debugInfo.FunctionName != null ? (uint)debugInfo.FunctionName.Arity : 0 + }; + + foreach (var map in debugInfo.ColumnToVariableMaps) + { + msg.ColumnMaps.Add((UInt32)map.Key, (UInt32)map.Value); } - private FunctionDebugInfoMsg ToProtobuf(FunctionDebugInfo debugInfo) + return msg; + } + + private FunctionParamDebugInfoMsg ToProtobuf(FunctionParamDebugInfo debugInfo) + { + return new FunctionParamDebugInfoMsg { - var msg = new FunctionDebugInfoMsg - { - Name = debugInfo.Name, - TypeId = debugInfo.TypeId - }; + TypeId = debugInfo.TypeId, + Name = debugInfo.Name ?? "", + Out = debugInfo.Out + }; + } - foreach (var param in debugInfo.Params) - { - msg.Params.Add(ToProtobuf(param)); - } + private FunctionDebugInfoMsg ToProtobuf(FunctionDebugInfo debugInfo) + { + var msg = new FunctionDebugInfoMsg + { + Name = debugInfo.Name, + TypeId = debugInfo.TypeId + }; - return msg; + foreach (var param in debugInfo.Params) + { + msg.Params.Add(ToProtobuf(param)); } - private StoryDebugInfoMsg ToProtobuf(StoryDebugInfo debugInfo) - { - var msg = new StoryDebugInfoMsg(); - msg.Version = debugInfo.Version; + return msg; + } - foreach (var db in debugInfo.Databases) - { - var dbMsg = ToProtobuf(db.Value); - msg.Databases.Add(dbMsg); - } + private StoryDebugInfoMsg ToProtobuf(StoryDebugInfo debugInfo) + { + var msg = new StoryDebugInfoMsg(); + msg.Version = debugInfo.Version; - foreach (var goal in debugInfo.Goals) - { - var goalMsg = ToProtobuf(goal.Value); - msg.Goals.Add(goalMsg); - } + foreach (var db in debugInfo.Databases) + { + var dbMsg = ToProtobuf(db.Value); + msg.Databases.Add(dbMsg); + } - foreach (var rule in debugInfo.Rules) - { - var ruleMsg = ToProtobuf(rule.Value); - msg.Rules.Add(ruleMsg); - } + foreach (var goal in debugInfo.Goals) + { + var goalMsg = ToProtobuf(goal.Value); + msg.Goals.Add(goalMsg); + } - foreach (var node in debugInfo.Nodes) - { - var nodeMsg = ToProtobuf(node.Value); - msg.Nodes.Add(nodeMsg); - } + foreach (var rule in debugInfo.Rules) + { + var ruleMsg = ToProtobuf(rule.Value); + msg.Rules.Add(ruleMsg); + } - foreach (var func in debugInfo.Functions) - { - var funcMsg = ToProtobuf(func.Value); - msg.Functions.Add(funcMsg); - } + foreach (var node in debugInfo.Nodes) + { + var nodeMsg = ToProtobuf(node.Value); + msg.Nodes.Add(nodeMsg); + } - return msg; + foreach (var func in debugInfo.Functions) + { + var funcMsg = ToProtobuf(func.Value); + msg.Functions.Add(funcMsg); } - public void Save(Stream stream, StoryDebugInfo debugInfo) + return msg; + } + + public void Save(Stream stream, StoryDebugInfo debugInfo) + { + var msg = ToProtobuf(debugInfo); + using (var ms = new MemoryStream()) + using (var codedStream = new CodedOutputStream(ms)) { - var msg = ToProtobuf(debugInfo); - using (var ms = new MemoryStream()) - using (var codedStream = new CodedOutputStream(ms)) + msg.WriteTo(codedStream); + codedStream.Flush(); + + byte[] proto = ms.ToArray(); + byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); + byte[] compressed = BinUtils.Compress(proto, flags); + stream.Write(compressed, 0, compressed.Length); + + using (var writer = new BinaryWriter(stream, Encoding.UTF8, true)) { - msg.WriteTo(codedStream); - codedStream.Flush(); - - byte[] proto = ms.ToArray(); - byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); - byte[] compressed = BinUtils.Compress(proto, flags); - stream.Write(compressed, 0, compressed.Length); - - using (var writer = new BinaryWriter(stream, Encoding.UTF8, true)) - { - writer.Write((UInt32)proto.Length); - } + writer.Write((UInt32)proto.Length); } } } diff --git a/StoryCompiler/Log.cs b/StoryCompiler/Log.cs index 05ed1349..6134f325 100644 --- a/StoryCompiler/Log.cs +++ b/StoryCompiler/Log.cs @@ -6,216 +6,215 @@ using System.IO; using System.Text; -namespace LSTools.StoryCompiler +namespace LSTools.StoryCompiler; + +public interface Logger +{ + void CompilationStarted(); + void CompilationFinished(bool succeeded); + + void TaskStarted(string name); + void TaskFinished(); + + void CompilationDiagnostic(Diagnostic message); +} + +public class ConsoleLogger : Logger { - public interface Logger + private Stopwatch compilationTimer = new Stopwatch(); + private Stopwatch taskTimer = new Stopwatch(); + + public void CompilationStarted() { - void CompilationStarted(); - void CompilationFinished(bool succeeded); + compilationTimer.Restart(); + } - void TaskStarted(string name); - void TaskFinished(); + public void CompilationFinished(bool succeeded) + { + compilationTimer.Stop(); + Console.WriteLine("Compilation took: {0} ms", compilationTimer.Elapsed.Seconds * 1000 + compilationTimer.Elapsed.Milliseconds); + } - void CompilationDiagnostic(Diagnostic message); + public void TaskStarted(string name) + { + Console.Write(name + " ... "); + taskTimer.Restart(); } - public class ConsoleLogger : Logger + public void TaskFinished() { - private Stopwatch compilationTimer = new Stopwatch(); - private Stopwatch taskTimer = new Stopwatch(); + taskTimer.Stop(); + Console.WriteLine("{0} ms", taskTimer.Elapsed.Seconds * 1000 + taskTimer.Elapsed.Milliseconds); + } - public void CompilationStarted() + public void CompilationDiagnostic(Diagnostic message) + { + switch (message.Level) { - compilationTimer.Restart(); + case MessageLevel.Error: + Console.ForegroundColor = ConsoleColor.Red; + Console.Write("ERR! "); + break; + + case MessageLevel.Warning: + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.Write("WARN "); + break; } - public void CompilationFinished(bool succeeded) + if (message.Location != null) { - compilationTimer.Stop(); - Console.WriteLine("Compilation took: {0} ms", compilationTimer.Elapsed.Seconds * 1000 + compilationTimer.Elapsed.Milliseconds); + Console.Write($"{message.Location.FileName}:{message.Location.StartLine}:{message.Location.StartColumn}: "); } - public void TaskStarted(string name) - { - Console.Write(name + " ... "); - taskTimer.Restart(); - } + Console.WriteLine("[{0}] {1}", message.Code, message.Message); + Console.ResetColor(); + } +} - public void TaskFinished() - { - taskTimer.Stop(); - Console.WriteLine("{0} ms", taskTimer.Elapsed.Seconds * 1000 + taskTimer.Elapsed.Milliseconds); - } +public class JsonLogConverter : JsonConverter +{ + public override bool CanConvert(Type objectType) + { + return + objectType.Equals(typeof(JsonLoggerOutput)) + || objectType.Equals(typeof(Diagnostic)); + } + + public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + { + throw new NotImplementedException(); + } - public void CompilationDiagnostic(Diagnostic message) + public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) + { + if (value is JsonLoggerOutput) { - switch (message.Level) + var output = value as JsonLoggerOutput; + writer.WriteStartObject(); + + writer.WritePropertyName("successful"); + writer.WriteValue(output.Succeeded); + + writer.WritePropertyName("stats"); + writer.WriteStartObject(); + foreach (var time in output.StepTimes) { - case MessageLevel.Error: - Console.ForegroundColor = ConsoleColor.Red; - Console.Write("ERR! "); - break; - - case MessageLevel.Warning: - Console.ForegroundColor = ConsoleColor.DarkYellow; - Console.Write("WARN "); - break; + writer.WritePropertyName(time.Key); + writer.WriteValue(time.Value); } + writer.WriteEndObject(); - if (message.Location != null) + writer.WritePropertyName("messages"); + writer.WriteStartArray(); + foreach (var diagnostic in output.Diagnostics) { - Console.Write($"{message.Location.FileName}:{message.Location.StartLine}:{message.Location.StartColumn}: "); + serializer.Serialize(writer, diagnostic); } + writer.WriteEndArray(); - Console.WriteLine("[{0}] {1}", message.Code, message.Message); - Console.ResetColor(); - } - } - - public class JsonLogConverter : JsonConverter - { - public override bool CanConvert(Type objectType) - { - return - objectType.Equals(typeof(JsonLoggerOutput)) - || objectType.Equals(typeof(Diagnostic)); + writer.WriteEndObject(); } - - public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) + else if (value is Diagnostic) { - throw new NotImplementedException(); - } + var diagnostic = value as Diagnostic; + writer.WriteStartObject(); - public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) - { - if (value is JsonLoggerOutput) + writer.WritePropertyName("location"); + if (diagnostic.Location != null) { - var output = value as JsonLoggerOutput; writer.WriteStartObject(); - writer.WritePropertyName("successful"); - writer.WriteValue(output.Succeeded); + writer.WritePropertyName("file"); + writer.WriteValue(diagnostic.Location.FileName); - writer.WritePropertyName("stats"); - writer.WriteStartObject(); - foreach (var time in output.StepTimes) - { - writer.WritePropertyName(time.Key); - writer.WriteValue(time.Value); - } - writer.WriteEndObject(); + writer.WritePropertyName("StartLine"); + writer.WriteValue(diagnostic.Location.StartLine); - writer.WritePropertyName("messages"); - writer.WriteStartArray(); - foreach (var diagnostic in output.Diagnostics) - { - serializer.Serialize(writer, diagnostic); - } - writer.WriteEndArray(); + writer.WritePropertyName("StartColumn"); + writer.WriteValue(diagnostic.Location.StartColumn); - writer.WriteEndObject(); - } - else if (value is Diagnostic) - { - var diagnostic = value as Diagnostic; - writer.WriteStartObject(); - - writer.WritePropertyName("location"); - if (diagnostic.Location != null) - { - writer.WriteStartObject(); - - writer.WritePropertyName("file"); - writer.WriteValue(diagnostic.Location.FileName); - - writer.WritePropertyName("StartLine"); - writer.WriteValue(diagnostic.Location.StartLine); - - writer.WritePropertyName("StartColumn"); - writer.WriteValue(diagnostic.Location.StartColumn); - - writer.WritePropertyName("EndLine"); - writer.WriteValue(diagnostic.Location.EndLine); - - writer.WritePropertyName("EndColumn"); - writer.WriteValue(diagnostic.Location.EndColumn); - - writer.WriteEndObject(); - } - else - { - writer.WriteNull(); - } - - writer.WritePropertyName("code"); - writer.WriteValue(diagnostic.Code); - - writer.WritePropertyName("level"); - writer.WriteValue(diagnostic.Level); + writer.WritePropertyName("EndLine"); + writer.WriteValue(diagnostic.Location.EndLine); - writer.WritePropertyName("message"); - writer.WriteValue(diagnostic.Message); + writer.WritePropertyName("EndColumn"); + writer.WriteValue(diagnostic.Location.EndColumn); writer.WriteEndObject(); } else { - throw new InvalidOperationException(); + writer.WriteNull(); } + + writer.WritePropertyName("code"); + writer.WriteValue(diagnostic.Code); + + writer.WritePropertyName("level"); + writer.WriteValue(diagnostic.Level); + + writer.WritePropertyName("message"); + writer.WriteValue(diagnostic.Message); + + writer.WriteEndObject(); + } + else + { + throw new InvalidOperationException(); } } +} + +class JsonLoggerOutput +{ + public Dictionary StepTimes = new Dictionary(); + public List Diagnostics = new List(); + public bool Succeeded; +} - class JsonLoggerOutput +class JsonLogger : Logger +{ + private Stopwatch TaskTimer = new Stopwatch(); + private JsonLoggerOutput Output = new JsonLoggerOutput(); + private String CurrentStep; + + public void CompilationStarted() { - public Dictionary StepTimes = new Dictionary(); - public List Diagnostics = new List(); - public bool Succeeded; } - class JsonLogger : Logger + public void CompilationFinished(bool succeeded) { - private Stopwatch TaskTimer = new Stopwatch(); - private JsonLoggerOutput Output = new JsonLoggerOutput(); - private String CurrentStep; - - public void CompilationStarted() - { - } + Output.Succeeded = succeeded; + var serializer = new JsonSerializer(); + serializer.Converters.Add(new JsonLogConverter()); - public void CompilationFinished(bool succeeded) + using (var memory = new MemoryStream()) { - Output.Succeeded = succeeded; - var serializer = new JsonSerializer(); - serializer.Converters.Add(new JsonLogConverter()); - - using (var memory = new MemoryStream()) + using (var stream = new StreamWriter(memory)) + using (JsonWriter writer = new JsonTextWriter(stream)) { - using (var stream = new StreamWriter(memory)) - using (JsonWriter writer = new JsonTextWriter(stream)) - { - serializer.Serialize(writer, Output); - } - - var json = Encoding.UTF8.GetString(memory.ToArray()); - Console.Write(json); + serializer.Serialize(writer, Output); } - } - public void TaskStarted(string name) - { - CurrentStep = name; - TaskTimer.Restart(); + var json = Encoding.UTF8.GetString(memory.ToArray()); + Console.Write(json); } + } - public void TaskFinished() - { - TaskTimer.Stop(); - Output.StepTimes.Add(CurrentStep, TaskTimer.Elapsed.Seconds * 60 + TaskTimer.Elapsed.Milliseconds); - } + public void TaskStarted(string name) + { + CurrentStep = name; + TaskTimer.Restart(); + } - public void CompilationDiagnostic(Diagnostic message) - { - Output.Diagnostics.Add(message); - } + public void TaskFinished() + { + TaskTimer.Stop(); + Output.StepTimes.Add(CurrentStep, TaskTimer.Elapsed.Seconds * 60 + TaskTimer.Elapsed.Milliseconds); + } + + public void CompilationDiagnostic(Diagnostic message) + { + Output.Diagnostics.Add(message); } } diff --git a/StoryCompiler/ModCompiler.cs b/StoryCompiler/ModCompiler.cs index e4a4b636..e95fef1a 100644 --- a/StoryCompiler/ModCompiler.cs +++ b/StoryCompiler/ModCompiler.cs @@ -11,509 +11,508 @@ using System.Text.RegularExpressions; using System.Threading.Tasks; -namespace LSTools.StoryCompiler +namespace LSTools.StoryCompiler; + +class ModCompiler : IDisposable { - class ModCompiler : IDisposable + class GoalScript { - class GoalScript - { - public string Name; - public string Path; - public byte[] ScriptBody; - } + public string Name; + public string Path; + public byte[] ScriptBody; + } - private Logger Logger; - private String GameDataPath; - private Compiler Compiler = new Compiler(); - private ModResources Mods = new ModResources(); - private List GoalScripts = new List(); - private List GameObjectLSFs = new List(); - private bool HasErrors = false; - private HashSet TypeCoercionWhitelist; - - public bool CheckOnly = false; - public bool CheckGameObjects = false; - public bool LoadPackages = true; - public bool AllowTypeCoercion = false; - public bool OsiExtender = false; - public TargetGame Game = TargetGame.DOS2; - - public ModCompiler(Logger logger, String gameDataPath) - { - Logger = logger; - GameDataPath = gameDataPath; - } + private Logger Logger; + private String GameDataPath; + private Compiler Compiler = new Compiler(); + private ModResources Mods = new ModResources(); + private List GoalScripts = new List(); + private List GameObjectLSFs = new List(); + private bool HasErrors = false; + private HashSet TypeCoercionWhitelist; + + public bool CheckOnly = false; + public bool CheckGameObjects = false; + public bool LoadPackages = true; + public bool AllowTypeCoercion = false; + public bool OsiExtender = false; + public TargetGame Game = TargetGame.DOS2; + + public ModCompiler(Logger logger, String gameDataPath) + { + Logger = logger; + GameDataPath = gameDataPath; + } + + public void Dispose() + { + Mods.Dispose(); + } - public void Dispose() + private void LoadStoryHeaders(Stream stream) + { + var hdrLoader = new StoryHeaderLoader(Compiler.Context); + var declarations = hdrLoader.ParseHeader(stream); + if (declarations == null) { - Mods.Dispose(); + throw new Exception("Failed to parse story header file"); } - private void LoadStoryHeaders(Stream stream) + hdrLoader.LoadHeader(declarations); + } + + private void LoadTypeCoercionWhitelist(Stream stream) + { + TypeCoercionWhitelist = new HashSet(); + using (var reader = new StreamReader(stream)) { - var hdrLoader = new StoryHeaderLoader(Compiler.Context); - var declarations = hdrLoader.ParseHeader(stream); - if (declarations == null) + while (!reader.EndOfStream) { - throw new Exception("Failed to parse story header file"); + var func = reader.ReadLine().Trim(); + if (func.Length > 0) + { + TypeCoercionWhitelist.Add(func); + } } + } + } - hdrLoader.LoadHeader(declarations); + public void SetWarningOptions(Dictionary options) + { + foreach (var option in options) + { + Compiler.Context.Log.WarningSwitches[option.Key] = option.Value; } + } + + class IRBuildTasks + { + public ConcurrentQueue Inputs = new ConcurrentQueue(); + public ConcurrentQueue IRs = new ConcurrentQueue(); + } - private void LoadTypeCoercionWhitelist(Stream stream) + private void BuildIR(IRBuildTasks tasks) + { + var goalLoader = new IRGenerator(Compiler.Context); + while (tasks.Inputs.TryDequeue(out GoalScript script)) { - TypeCoercionWhitelist = new HashSet(); - using (var reader = new StreamReader(stream)) + using (var stream = new MemoryStream(script.ScriptBody)) { - while (!reader.EndOfStream) + var ast = goalLoader.ParseGoal(script.Path, stream); + + if (ast != null) { - var func = reader.ReadLine().Trim(); - if (func.Length > 0) - { - TypeCoercionWhitelist.Add(func); - } + var ir = goalLoader.GenerateGoalIR(ast); + ir.Name = script.Name; + tasks.IRs.Enqueue(ir); + } + else + { + var msg = new Diagnostic(goalLoader.LastLocation, MessageLevel.Error, "X00", $"Could not parse goal file " + script.Name); + Logger.CompilationDiagnostic(msg); + HasErrors = true; } } } + } - public void SetWarningOptions(Dictionary options) + private List ParallelBuildIR() + { + var tasks = new IRBuildTasks(); + foreach (var script in GoalScripts) { - foreach (var option in options) - { - Compiler.Context.Log.WarningSwitches[option.Key] = option.Value; - } + tasks.Inputs.Enqueue(script); } - class IRBuildTasks + IRBuildTasks[] threadTasks = new[] { tasks, tasks, tasks, tasks }; + Task.WhenAll(threadTasks.Select(task => Task.Run(() => { BuildIR(task); }))).Wait(); + + var sorted = new SortedDictionary(); + while (tasks.IRs.TryDequeue(out IRGoal goal)) { - public ConcurrentQueue Inputs = new ConcurrentQueue(); - public ConcurrentQueue IRs = new ConcurrentQueue(); + sorted[goal.Name] = goal; } - private void BuildIR(IRBuildTasks tasks) - { - var goalLoader = new IRGenerator(Compiler.Context); - while (tasks.Inputs.TryDequeue(out GoalScript script)) - { - using (var stream = new MemoryStream(script.ScriptBody)) - { - var ast = goalLoader.ParseGoal(script.Path, stream); + return sorted.Values.ToList(); + } - if (ast != null) - { - var ir = goalLoader.GenerateGoalIR(ast); - ir.Name = script.Name; - tasks.IRs.Enqueue(ir); - } - else - { - var msg = new Diagnostic(goalLoader.LastLocation, MessageLevel.Error, "X00", $"Could not parse goal file " + script.Name); - Logger.CompilationDiagnostic(msg); - HasErrors = true; - } - } - } - } + class PreprocessTasks + { + public ConcurrentQueue Inputs = new ConcurrentQueue(); + } - private List ParallelBuildIR() + private void Preprocess(PreprocessTasks tasks) + { + var preprocessor = new Preprocessor(); + while (tasks.Inputs.TryDequeue(out GoalScript script)) { - var tasks = new IRBuildTasks(); - foreach (var script in GoalScripts) + var scriptText = Encoding.UTF8.GetString(script.ScriptBody); + string preprocessed = null; + if (preprocessor.Preprocess(scriptText, ref preprocessed)) { - tasks.Inputs.Enqueue(script); + script.ScriptBody = Encoding.UTF8.GetBytes(preprocessed); } + } + } - IRBuildTasks[] threadTasks = new[] { tasks, tasks, tasks, tasks }; - Task.WhenAll(threadTasks.Select(task => Task.Run(() => { BuildIR(task); }))).Wait(); + private void ParallelPreprocess() + { + var tasks = new PreprocessTasks(); + foreach (var script in GoalScripts) + { + tasks.Inputs.Enqueue(script); + } - var sorted = new SortedDictionary(); - while (tasks.IRs.TryDequeue(out IRGoal goal)) - { - sorted[goal.Name] = goal; - } + PreprocessTasks[] threadTasks = new[] { tasks, tasks, tasks, tasks }; + Task.WhenAll(threadTasks.Select(task => Task.Run(() => { Preprocess(task); }))).Wait(); + } - return sorted.Values.ToList(); + private void LoadGameObjects(Resource resource) + { + if (!resource.Regions.TryGetValue("Templates", out Region templates)) + { + // TODO - log error + return; } - class PreprocessTasks + if (!templates.Children.TryGetValue("GameObjects", out List gameObjects)) { - public ConcurrentQueue Inputs = new ConcurrentQueue(); + // TODO - log error + return; } - private void Preprocess(PreprocessTasks tasks) + foreach (var gameObject in gameObjects) { - var preprocessor = new Preprocessor(); - while (tasks.Inputs.TryDequeue(out GoalScript script)) + if (gameObject.Attributes.TryGetValue("MapKey", out NodeAttribute objectGuid) + && gameObject.Attributes.TryGetValue("Name", out NodeAttribute objectName) + && gameObject.Attributes.TryGetValue("Type", out NodeAttribute objectType)) { - var scriptText = Encoding.UTF8.GetString(script.ScriptBody); - string preprocessed = null; - if (preprocessor.Preprocess(scriptText, ref preprocessed)) + LSLib.LS.Story.Compiler.ValueType type = null; + switch ((string)objectType.Value) + { + case "item": type = Compiler.Context.LookupType("ITEMGUID"); break; + case "character": type = Compiler.Context.LookupType("CHARACTERGUID"); break; + case "trigger": type = Compiler.Context.LookupType("TRIGGERGUID"); break; + default: + // TODO - log unknown type + break; + } + + if (type != null) { - script.ScriptBody = Encoding.UTF8.GetBytes(preprocessed); + var gameObjectInfo = new GameObjectInfo + { + Name = objectName.Value + "_" + objectGuid.Value, + Type = type + }; + Compiler.Context.GameObjects[(string)objectGuid.Value] = gameObjectInfo; } } } + } - private void ParallelPreprocess() + private void LoadGlobals() + { + foreach (var lsf in GameObjectLSFs) { - var tasks = new PreprocessTasks(); - foreach (var script in GoalScripts) + using (var stream = new MemoryStream(lsf)) + using (var reader = new LSFReader(stream)) { - tasks.Inputs.Enqueue(script); + var resource = reader.Read(); + LoadGameObjects(resource); } - - PreprocessTasks[] threadTasks = new[] { tasks, tasks, tasks, tasks }; - Task.WhenAll(threadTasks.Select(task => Task.Run(() => { Preprocess(task); }))).Wait(); } + } - private void LoadGameObjects(Resource resource) + private void LoadGoals(ModInfo mod) + { + foreach (var file in mod.Scripts) { - if (!resource.Regions.TryGetValue("Templates", out Region templates)) - { - // TODO - log error - return; - } - - if (!templates.Children.TryGetValue("GameObjects", out List gameObjects)) - { - // TODO - log error - return; - } - - foreach (var gameObject in gameObjects) + var scriptStream = file.Value.MakeStream(); + try { - if (gameObject.Attributes.TryGetValue("MapKey", out NodeAttribute objectGuid) - && gameObject.Attributes.TryGetValue("Name", out NodeAttribute objectName) - && gameObject.Attributes.TryGetValue("Type", out NodeAttribute objectType)) + using (var reader = new BinaryReader(scriptStream)) { - LSLib.LS.Story.Compiler.ValueType type = null; - switch ((string)objectType.Value) + string path; + if (file.Value is PackagedFileInfo) { - case "item": type = Compiler.Context.LookupType("ITEMGUID"); break; - case "character": type = Compiler.Context.LookupType("CHARACTERGUID"); break; - case "trigger": type = Compiler.Context.LookupType("TRIGGERGUID"); break; - default: - // TODO - log unknown type - break; + var pkgd = file.Value as PackagedFileInfo; + path = (pkgd.PackageStream as FileStream).Name + ":/" + pkgd.Name; } - - if (type != null) + else { - var gameObjectInfo = new GameObjectInfo - { - Name = objectName.Value + "_" + objectGuid.Value, - Type = type - }; - Compiler.Context.GameObjects[(string)objectGuid.Value] = gameObjectInfo; + var fs = file.Value as FilesystemFileInfo; + path = fs.FilesystemPath; } + + var script = new GoalScript + { + Name = Path.GetFileNameWithoutExtension(file.Value.Name), + Path = path, + ScriptBody = reader.ReadBytes((int)scriptStream.Length) + }; + GoalScripts.Add(script); } } - } - - private void LoadGlobals() - { - foreach (var lsf in GameObjectLSFs) + finally { - using (var stream = new MemoryStream(lsf)) - using (var reader = new LSFReader(stream)) - { - var resource = reader.Read(); - LoadGameObjects(resource); - } + file.Value.ReleaseStream(); } } + } - private void LoadGoals(ModInfo mod) + private void LoadOrphanQueryIgnores(ModInfo mod) + { + if (mod.OrphanQueryIgnoreList == null) return; + + var ignoreStream = mod.OrphanQueryIgnoreList.MakeStream(); + try { - foreach (var file in mod.Scripts) + using (var reader = new StreamReader(ignoreStream)) { - var scriptStream = file.Value.MakeStream(); - try + var ignoreRe = new Regex("^([a-zA-Z0-9_]+)\\s+([0-9]+)$"); + + while (!reader.EndOfStream) { - using (var reader = new BinaryReader(scriptStream)) + string ignoreLine = reader.ReadLine(); + var match = ignoreRe.Match(ignoreLine); + if (match.Success) { - string path; - if (file.Value is PackagedFileInfo) - { - var pkgd = file.Value as PackagedFileInfo; - path = (pkgd.PackageStream as FileStream).Name + ":/" + pkgd.Name; - } - else - { - var fs = file.Value as FilesystemFileInfo; - path = fs.FilesystemPath; - } - - var script = new GoalScript - { - Name = Path.GetFileNameWithoutExtension(file.Value.Name), - Path = path, - ScriptBody = reader.ReadBytes((int)scriptStream.Length) - }; - GoalScripts.Add(script); + var signature = new FunctionNameAndArity( + match.Groups[1].Value, Int32.Parse(match.Groups[2].Value)); + Compiler.IgnoreUnusedDatabases.Add(signature); } } - finally - { - file.Value.ReleaseStream(); - } } } + finally + { + mod.OrphanQueryIgnoreList.ReleaseStream(); + } + } - private void LoadOrphanQueryIgnores(ModInfo mod) + private void LoadGameObjects(ModInfo mod) + { + foreach (var file in mod.Globals) { - if (mod.OrphanQueryIgnoreList == null) return; - - var ignoreStream = mod.OrphanQueryIgnoreList.MakeStream(); + var globalStream = file.Value.MakeStream(); try { - using (var reader = new StreamReader(ignoreStream)) + using (var reader = new BinaryReader(globalStream)) { - var ignoreRe = new Regex("^([a-zA-Z0-9_]+)\\s+([0-9]+)$"); - - while (!reader.EndOfStream) - { - string ignoreLine = reader.ReadLine(); - var match = ignoreRe.Match(ignoreLine); - if (match.Success) - { - var signature = new FunctionNameAndArity( - match.Groups[1].Value, Int32.Parse(match.Groups[2].Value)); - Compiler.IgnoreUnusedDatabases.Add(signature); - } - } + var globalLsf = reader.ReadBytes((int)globalStream.Length); + GameObjectLSFs.Add(globalLsf); } } finally { - mod.OrphanQueryIgnoreList.ReleaseStream(); + file.Value.ReleaseStream(); } } - private void LoadGameObjects(ModInfo mod) + foreach (var file in mod.LevelObjects) { - foreach (var file in mod.Globals) + var objectStream = file.Value.MakeStream(); + try { - var globalStream = file.Value.MakeStream(); - try + using (var reader = new BinaryReader(objectStream)) { - using (var reader = new BinaryReader(globalStream)) - { - var globalLsf = reader.ReadBytes((int)globalStream.Length); - GameObjectLSFs.Add(globalLsf); - } - } - finally - { - file.Value.ReleaseStream(); + var levelLsf = reader.ReadBytes((int)objectStream.Length); + GameObjectLSFs.Add(levelLsf); } } - - foreach (var file in mod.LevelObjects) + finally { - var objectStream = file.Value.MakeStream(); - try - { - using (var reader = new BinaryReader(objectStream)) - { - var levelLsf = reader.ReadBytes((int)objectStream.Length); - GameObjectLSFs.Add(levelLsf); - } - } - finally - { - file.Value.ReleaseStream(); - } + file.Value.ReleaseStream(); } } + } - private void LoadMod(string modName) + private void LoadMod(string modName) + { + if (!Mods.Mods.TryGetValue(modName, out ModInfo mod)) { - if (!Mods.Mods.TryGetValue(modName, out ModInfo mod)) - { - throw new Exception($"Mod not found: {modName}"); - } + throw new Exception($"Mod not found: {modName}"); + } - LoadGoals(mod); - LoadOrphanQueryIgnores(mod); + LoadGoals(mod); + LoadOrphanQueryIgnores(mod); - if (CheckGameObjects) - { - LoadGameObjects(mod); - } + if (CheckGameObjects) + { + LoadGameObjects(mod); } + } + + public bool Compile(string outputPath, string debugInfoPath, List mods) + { + Logger.CompilationStarted(); + HasErrors = false; + Compiler.Game = Game; + Compiler.AllowTypeCoercion = AllowTypeCoercion; - public bool Compile(string outputPath, string debugInfoPath, List mods) + if (mods.Count > 0) { - Logger.CompilationStarted(); - HasErrors = false; - Compiler.Game = Game; - Compiler.AllowTypeCoercion = AllowTypeCoercion; + Logger.TaskStarted("Discovering module files"); + var visitor = new ModPathVisitor(Mods) + { + Game = Game, + CollectStoryGoals = true, + CollectGlobals = CheckGameObjects, + CollectLevels = CheckGameObjects, + LoadPackages = LoadPackages + }; + visitor.Discover(GameDataPath); + Logger.TaskFinished(); - if (mods.Count > 0) + Logger.TaskStarted("Loading module files"); + if (CheckGameObjects) { - Logger.TaskStarted("Discovering module files"); - var visitor = new ModPathVisitor(Mods) + var nullGameObject = new GameObjectInfo { - Game = Game, - CollectStoryGoals = true, - CollectGlobals = CheckGameObjects, - CollectLevels = CheckGameObjects, - LoadPackages = LoadPackages + Name = "NULL_00000000-0000-0000-0000-000000000000", + Type = Compiler.Context.LookupType("GUIDSTRING") }; - visitor.Discover(GameDataPath); - Logger.TaskFinished(); - - Logger.TaskStarted("Loading module files"); - if (CheckGameObjects) - { - var nullGameObject = new GameObjectInfo - { - Name = "NULL_00000000-0000-0000-0000-000000000000", - Type = Compiler.Context.LookupType("GUIDSTRING") - }; - Compiler.Context.GameObjects.Add("00000000-0000-0000-0000-000000000000", nullGameObject); - } - - foreach (var modName in mods) - { - LoadMod(modName); - } - - AbstractFileInfo storyHeaderFile = null; - AbstractFileInfo typeCoercionWhitelistFile = null; - var modsSearchPath = mods.ToList(); - modsSearchPath.Reverse(); - foreach (var modName in modsSearchPath) - { - if (storyHeaderFile == null && Mods.Mods[modName].StoryHeaderFile != null) - { - storyHeaderFile = Mods.Mods[modName].StoryHeaderFile; - } + Compiler.Context.GameObjects.Add("00000000-0000-0000-0000-000000000000", nullGameObject); + } - if (typeCoercionWhitelistFile == null && Mods.Mods[modName].TypeCoercionWhitelistFile != null) - { - typeCoercionWhitelistFile = Mods.Mods[modName].TypeCoercionWhitelistFile; - } - } + foreach (var modName in mods) + { + LoadMod(modName); + } - if (storyHeaderFile != null) - { - var storyStream = storyHeaderFile.MakeStream(); - LoadStoryHeaders(storyStream); - storyHeaderFile.ReleaseStream(); - } - else + AbstractFileInfo storyHeaderFile = null; + AbstractFileInfo typeCoercionWhitelistFile = null; + var modsSearchPath = mods.ToList(); + modsSearchPath.Reverse(); + foreach (var modName in modsSearchPath) + { + if (storyHeaderFile == null && Mods.Mods[modName].StoryHeaderFile != null) { - Logger.CompilationDiagnostic(new Diagnostic(null, MessageLevel.Error, "X00", "Unable to locate story header file (story_header.div)")); - HasErrors = true; + storyHeaderFile = Mods.Mods[modName].StoryHeaderFile; } - if (typeCoercionWhitelistFile != null) + if (typeCoercionWhitelistFile == null && Mods.Mods[modName].TypeCoercionWhitelistFile != null) { - var typeCoercionStream = typeCoercionWhitelistFile.MakeStream(); - LoadTypeCoercionWhitelist(typeCoercionStream); - typeCoercionWhitelistFile.ReleaseStream(); - Compiler.TypeCoercionWhitelist = TypeCoercionWhitelist; + typeCoercionWhitelistFile = Mods.Mods[modName].TypeCoercionWhitelistFile; } - - Logger.TaskFinished(); } - if (CheckGameObjects) + if (storyHeaderFile != null) { - Logger.TaskStarted("Loading game objects"); - LoadGlobals(); - Logger.TaskFinished(); + var storyStream = storyHeaderFile.MakeStream(); + LoadStoryHeaders(storyStream); + storyHeaderFile.ReleaseStream(); } else { - Compiler.Context.Log.WarningSwitches[DiagnosticCode.UnresolvedGameObjectName] = false; + Logger.CompilationDiagnostic(new Diagnostic(null, MessageLevel.Error, "X00", "Unable to locate story header file (story_header.div)")); + HasErrors = true; } - if (OsiExtender) + if (typeCoercionWhitelistFile != null) { - Logger.TaskStarted("Precompiling scripts"); - ParallelPreprocess(); - Logger.TaskFinished(); + var typeCoercionStream = typeCoercionWhitelistFile.MakeStream(); + LoadTypeCoercionWhitelist(typeCoercionStream); + typeCoercionWhitelistFile.ReleaseStream(); + Compiler.TypeCoercionWhitelist = TypeCoercionWhitelist; } - var asts = new Dictionary(); - var goalLoader = new IRGenerator(Compiler.Context); + Logger.TaskFinished(); + } - Logger.TaskStarted("Generating IR"); - var orderedGoalAsts = ParallelBuildIR(); - foreach (var goal in orderedGoalAsts) - { - Compiler.AddGoal(goal); - } + if (CheckGameObjects) + { + Logger.TaskStarted("Loading game objects"); + LoadGlobals(); + Logger.TaskFinished(); + } + else + { + Compiler.Context.Log.WarningSwitches[DiagnosticCode.UnresolvedGameObjectName] = false; + } + + if (OsiExtender) + { + Logger.TaskStarted("Precompiling scripts"); + ParallelPreprocess(); Logger.TaskFinished(); + } + var asts = new Dictionary(); + var goalLoader = new IRGenerator(Compiler.Context); - bool updated; - var iter = 1; - do - { - Logger.TaskStarted($"Propagating rule types {iter}"); - updated = Compiler.PropagateRuleTypes(); - Logger.TaskFinished(); + Logger.TaskStarted("Generating IR"); + var orderedGoalAsts = ParallelBuildIR(); + foreach (var goal in orderedGoalAsts) + { + Compiler.AddGoal(goal); + } + Logger.TaskFinished(); - if (iter++ > 10) - { - Compiler.Context.Log.Error(null, DiagnosticCode.InternalError, - "Maximal number of rule propagation retries exceeded"); - break; - } - } while (updated); - Logger.TaskStarted("Checking for unresolved references"); - Compiler.VerifyIR(); + bool updated; + var iter = 1; + do + { + Logger.TaskStarted($"Propagating rule types {iter}"); + updated = Compiler.PropagateRuleTypes(); Logger.TaskFinished(); - foreach (var message in Compiler.Context.Log.Log) + if (iter++ > 10) { - Logger.CompilationDiagnostic(message); - if (message.Level == MessageLevel.Error) - { - HasErrors = true; - } + Compiler.Context.Log.Error(null, DiagnosticCode.InternalError, + "Maximal number of rule propagation retries exceeded"); + break; } + } while (updated); - if (!HasErrors && !CheckOnly) + Logger.TaskStarted("Checking for unresolved references"); + Compiler.VerifyIR(); + Logger.TaskFinished(); + + foreach (var message in Compiler.Context.Log.Log) + { + Logger.CompilationDiagnostic(message); + if (message.Level == MessageLevel.Error) { - Logger.TaskStarted("Generating story nodes"); - var emitter = new StoryEmitter(Compiler.Context); - if (debugInfoPath != null) - { - emitter.EnableDebugInfo(); - } + HasErrors = true; + } + } - var story = emitter.EmitStory(); - Logger.TaskFinished(); + if (!HasErrors && !CheckOnly) + { + Logger.TaskStarted("Generating story nodes"); + var emitter = new StoryEmitter(Compiler.Context); + if (debugInfoPath != null) + { + emitter.EnableDebugInfo(); + } - Logger.TaskStarted("Saving story binary"); - using (var file = new FileStream(outputPath, FileMode.Create, FileAccess.Write)) - { - var writer = new StoryWriter(); - writer.Write(file, story, false); - } - Logger.TaskFinished(); + var story = emitter.EmitStory(); + Logger.TaskFinished(); + + Logger.TaskStarted("Saving story binary"); + using (var file = new FileStream(outputPath, FileMode.Create, FileAccess.Write)) + { + var writer = new StoryWriter(); + writer.Write(file, story, false); + } + Logger.TaskFinished(); - if (debugInfoPath != null) + if (debugInfoPath != null) + { + Logger.TaskStarted("Saving debug info"); + using (var file = new FileStream(debugInfoPath, FileMode.Create, FileAccess.Write)) { - Logger.TaskStarted("Saving debug info"); - using (var file = new FileStream(debugInfoPath, FileMode.Create, FileAccess.Write)) - { - var writer = new DebugInfoSaver(); - writer.Save(file, emitter.DebugInfo); - } - Logger.TaskFinished(); + var writer = new DebugInfoSaver(); + writer.Save(file, emitter.DebugInfo); } + Logger.TaskFinished(); } - - Logger.CompilationFinished(!HasErrors); - return !HasErrors; } + + Logger.CompilationFinished(!HasErrors); + return !HasErrors; } } diff --git a/StoryCompiler/Program.cs b/StoryCompiler/Program.cs index d5e0ddbd..29b81f03 100644 --- a/StoryCompiler/Program.cs +++ b/StoryCompiler/Program.cs @@ -5,130 +5,129 @@ using System.Collections.Generic; using LSLib.LS.Story.Compiler; -namespace LSTools.StoryCompiler +namespace LSTools.StoryCompiler; + +class Program { - class Program + static void DebugDump(string storyPath, string debugPath) { - static void DebugDump(string storyPath, string debugPath) + Story story; + using (var file = new FileStream(storyPath, FileMode.Open, FileAccess.Read, FileShare.Read)) { - Story story; - using (var file = new FileStream(storyPath, FileMode.Open, FileAccess.Read, FileShare.Read)) - { - var reader = new StoryReader(); - story = reader.Read(file); - } - - using (var debugFile = new FileStream(debugPath, FileMode.Create, FileAccess.Write)) + var reader = new StoryReader(); + story = reader.Read(file); + } + + using (var debugFile = new FileStream(debugPath, FileMode.Create, FileAccess.Write)) + { + using (var writer = new StreamWriter(debugFile)) { - using (var writer = new StreamWriter(debugFile)) - { - story.DebugDump(writer); - } + story.DebugDump(writer); } } + } - static int Run(CommandLineArguments args) + static int Run(CommandLineArguments args) + { + Logger logger; + if (args.JsonOutput) + { + logger = new JsonLogger(); + } + else { - Logger logger; - if (args.JsonOutput) + logger = new ConsoleLogger(); + } + + using (var modCompiler = new ModCompiler(logger, args.GameDataPath)) + { + modCompiler.SetWarningOptions(CommandLineArguments.GetWarningOptions(args.Warnings)); + modCompiler.CheckGameObjects = args.CheckGameObjects; + modCompiler.CheckOnly = args.CheckOnly; + modCompiler.LoadPackages = !args.NoPackages; + modCompiler.AllowTypeCoercion = args.AllowTypeCoercion; + modCompiler.OsiExtender = args.OsiExtender; + if (args.Game == "dos2") + { + modCompiler.Game = TargetGame.DOS2; + } + else if (args.Game == "dos2de") + { + modCompiler.Game = TargetGame.DOS2DE; + } + else if (args.Game == "bg3") { - logger = new JsonLogger(); + modCompiler.Game = TargetGame.BG3; } else { - logger = new ConsoleLogger(); + throw new ArgumentException("Unsupported game type"); } - using (var modCompiler = new ModCompiler(logger, args.GameDataPath)) + var mods = new List(args.Mods); + if (!modCompiler.Compile(args.OutputPath, args.DebugInfoOutputPath, mods)) { - modCompiler.SetWarningOptions(CommandLineArguments.GetWarningOptions(args.Warnings)); - modCompiler.CheckGameObjects = args.CheckGameObjects; - modCompiler.CheckOnly = args.CheckOnly; - modCompiler.LoadPackages = !args.NoPackages; - modCompiler.AllowTypeCoercion = args.AllowTypeCoercion; - modCompiler.OsiExtender = args.OsiExtender; - if (args.Game == "dos2") - { - modCompiler.Game = TargetGame.DOS2; - } - else if (args.Game == "dos2de") - { - modCompiler.Game = TargetGame.DOS2DE; - } - else if (args.Game == "bg3") - { - modCompiler.Game = TargetGame.BG3; - } - else - { - throw new ArgumentException("Unsupported game type"); - } - - var mods = new List(args.Mods); - if (!modCompiler.Compile(args.OutputPath, args.DebugInfoOutputPath, mods)) - { - return 3; - } - - if (args.DebugLogOutputPath != null && !args.CheckOnly) - { - DebugDump(args.OutputPath, args.DebugLogOutputPath); - } + return 3; } - return 0; + if (args.DebugLogOutputPath != null && !args.CheckOnly) + { + DebugDump(args.OutputPath, args.DebugLogOutputPath); + } } - static void Main(string[] args) + return 0; + } + + static void Main(string[] args) + { + if (args.Length == 0) { - if (args.Length == 0) - { - Console.WriteLine("Usage: StoryCompiler "); - Console.WriteLine(" --game-data-path - Location of the game Data folder"); - Console.WriteLine(" --game - Which game to target during compilation"); - Console.WriteLine(" --output - Compiled story output path"); - Console.WriteLine(" --debug-info - Debugging symbols path"); - Console.WriteLine(" --debug-log - Debug output log path"); - Console.WriteLine(" --mod - Check and compile all goals from the specified mod"); - Console.WriteLine(" --no-warn - Suppress warnings with diagnostic code "); - Console.WriteLine(" --check-only - Only check scripts for errors, don't generate compiled story file"); - Console.WriteLine(" --check-names - Verify game object names (slow!)"); - Console.WriteLine(" --no-packages - Don't load files from packages"); - Console.WriteLine(" --allow-type-coercion - Allow \"casting\" between unrelated types"); - Console.WriteLine(" --osi-extender - Compile using Osiris Extender features"); - Environment.Exit(1); - } + Console.WriteLine("Usage: StoryCompiler "); + Console.WriteLine(" --game-data-path - Location of the game Data folder"); + Console.WriteLine(" --game - Which game to target during compilation"); + Console.WriteLine(" --output - Compiled story output path"); + Console.WriteLine(" --debug-info - Debugging symbols path"); + Console.WriteLine(" --debug-log - Debug output log path"); + Console.WriteLine(" --mod - Check and compile all goals from the specified mod"); + Console.WriteLine(" --no-warn - Suppress warnings with diagnostic code "); + Console.WriteLine(" --check-only - Only check scripts for errors, don't generate compiled story file"); + Console.WriteLine(" --check-names - Verify game object names (slow!)"); + Console.WriteLine(" --no-packages - Don't load files from packages"); + Console.WriteLine(" --allow-type-coercion - Allow \"casting\" between unrelated types"); + Console.WriteLine(" --osi-extender - Compile using Osiris Extender features"); + Environment.Exit(1); + } - CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser(); + CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser(); - var argv = new CommandLineArguments(); + var argv = new CommandLineArguments(); - parser.ExtractArgumentAttributes(argv); + parser.ExtractArgumentAttributes(argv); - try - { - parser.ParseCommandLine(args); - } - catch (CommandLineArgumentException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine($"Argument --{e.Argument}: {e.Message}"); - Console.ResetColor(); - Environment.Exit(1); - } - catch (CommandLineException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine(e.Message); - Console.ResetColor(); - Environment.Exit(1); - } + try + { + parser.ParseCommandLine(args); + } + catch (CommandLineArgumentException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine($"Argument --{e.Argument}: {e.Message}"); + Console.ResetColor(); + Environment.Exit(1); + } + catch (CommandLineException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ResetColor(); + Environment.Exit(1); + } - if (parser.ParsingSucceeded) - { - var exitCode = Run(argv); - Environment.Exit(exitCode); - } + if (parser.ParsingSucceeded) + { + var exitCode = Run(argv); + Environment.Exit(exitCode); } } } diff --git a/StoryDecompiler/Arguments.cs b/StoryDecompiler/Arguments.cs index 98d53b9d..733c02be 100644 --- a/StoryDecompiler/Arguments.cs +++ b/StoryDecompiler/Arguments.cs @@ -1,28 +1,27 @@ using CommandLineParser.Arguments; using System; -namespace LSTools.StoryDecompiler +namespace LSTools.StoryDecompiler; + +public class CommandLineArguments { - public class CommandLineArguments - { - [ValueArgument(typeof(string), "input", - Description = "Compiled story/savegame file path", - ValueOptional = false, - Optional = false - )] - public string InputPath; + [ValueArgument(typeof(string), "input", + Description = "Compiled story/savegame file path", + ValueOptional = false, + Optional = false + )] + public string InputPath; - [ValueArgument(typeof(string), "output", - Description = "Goal output directory", - ValueOptional = false, - Optional = false - )] - public string OutputPath; + [ValueArgument(typeof(string), "output", + Description = "Goal output directory", + ValueOptional = false, + Optional = false + )] + public string OutputPath; - [SwitchArgument("debug-log", false, - Description = "Generate story debug log", - Optional = true - )] - public bool DebugLog; - } + [SwitchArgument("debug-log", false, + Description = "Generate story debug log", + Optional = true + )] + public bool DebugLog; } diff --git a/StoryDecompiler/Program.cs b/StoryDecompiler/Program.cs index b722b679..65f10947 100644 --- a/StoryDecompiler/Program.cs +++ b/StoryDecompiler/Program.cs @@ -6,156 +6,155 @@ using System.Linq; using System.Collections.Generic; -namespace LSTools.StoryDecompiler +namespace LSTools.StoryDecompiler; + +class Program { - class Program + private static MemoryStream LoadStoryStreamFromSave(String path) { - private static MemoryStream LoadStoryStreamFromSave(String path) + using (var packageReader = new PackageReader(path)) { - using (var packageReader = new PackageReader(path)) - { - Package package = packageReader.Read(); + Package package = packageReader.Read(); - AbstractFileInfo globalsFile = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - if (globalsFile == null) - { - throw new Exception("Could not find globals.lsf in savegame archive."); - } + AbstractFileInfo globalsFile = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + if (globalsFile == null) + { + throw new Exception("Could not find globals.lsf in savegame archive."); + } - Resource resource; - Stream rsrcStream = globalsFile.MakeStream(); - try - { - using (var rsrcReader = new LSFReader(rsrcStream)) - { - resource = rsrcReader.Read(); - } - } - finally + Resource resource; + Stream rsrcStream = globalsFile.MakeStream(); + try + { + using (var rsrcReader = new LSFReader(rsrcStream)) { - globalsFile.ReleaseStream(); + resource = rsrcReader.Read(); } - - LSLib.LS.Node storyNode = resource.Regions["Story"].Children["Story"][0]; - var storyBlob = storyNode.Attributes["Story"].Value as byte[]; - var storyStream = new MemoryStream(storyBlob); - return storyStream; } - } + finally + { + globalsFile.ReleaseStream(); + } - private static Stream LoadStoryStreamFromFile(String path) - { - return new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + LSLib.LS.Node storyNode = resource.Regions["Story"].Children["Story"][0]; + var storyBlob = storyNode.Attributes["Story"].Value as byte[]; + var storyStream = new MemoryStream(storyBlob); + return storyStream; } + } - private static Story LoadStory(String path) - { - string extension = Path.GetExtension(path).ToLower(); + private static Stream LoadStoryStreamFromFile(String path) + { + return new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + } - Stream storyStream; - switch (extension) - { - case ".lsv": - storyStream = LoadStoryStreamFromSave(path); - break; + private static Story LoadStory(String path) + { + string extension = Path.GetExtension(path).ToLower(); - case ".osi": - storyStream = LoadStoryStreamFromFile(path); - break; + Stream storyStream; + switch (extension) + { + case ".lsv": + storyStream = LoadStoryStreamFromSave(path); + break; - default: - throw new Exception($"Unsupported story/save extension: {extension}"); - } + case ".osi": + storyStream = LoadStoryStreamFromFile(path); + break; - using (storyStream) - { - var reader = new StoryReader(); - return reader.Read(storyStream); - } + default: + throw new Exception($"Unsupported story/save extension: {extension}"); + } + + using (storyStream) + { + var reader = new StoryReader(); + return reader.Read(storyStream); } + } - private static void DebugDumpStory(Story story, String debugLogPath) + private static void DebugDumpStory(Story story, String debugLogPath) + { + using (var debugFile = new FileStream(debugLogPath, FileMode.Create, FileAccess.Write)) { - using (var debugFile = new FileStream(debugLogPath, FileMode.Create, FileAccess.Write)) + using (var writer = new StreamWriter(debugFile)) { - using (var writer = new StreamWriter(debugFile)) - { - story.DebugDump(writer); - } + story.DebugDump(writer); } } + } - private static void DecompileStoryGoals(Story story, String outputDir) + private static void DecompileStoryGoals(Story story, String outputDir) + { + foreach (KeyValuePair goal in story.Goals) { - foreach (KeyValuePair goal in story.Goals) + string filePath = Path.Combine(outputDir, $"{goal.Value.Name}.txt"); + using (var goalFile = new FileStream(filePath, FileMode.Create, FileAccess.Write)) { - string filePath = Path.Combine(outputDir, $"{goal.Value.Name}.txt"); - using (var goalFile = new FileStream(filePath, FileMode.Create, FileAccess.Write)) + using (var writer = new StreamWriter(goalFile)) { - using (var writer = new StreamWriter(goalFile)) - { - goal.Value.MakeScript(writer, story); - } + goal.Value.MakeScript(writer, story); } } } + } - private static void Run(CommandLineArguments args) - { - Console.WriteLine($"Loading story from {args.InputPath} ..."); - var story = LoadStory(args.InputPath); - - if (args.DebugLog) - { - Console.WriteLine($"Exporting debug log ..."); - string debugLogPath = Path.Combine(args.OutputPath, "debug.log"); - DebugDumpStory(story, debugLogPath); - } + private static void Run(CommandLineArguments args) + { + Console.WriteLine($"Loading story from {args.InputPath} ..."); + var story = LoadStory(args.InputPath); - Console.WriteLine($"Exporting goals ..."); - DecompileStoryGoals(story, args.OutputPath); + if (args.DebugLog) + { + Console.WriteLine($"Exporting debug log ..."); + string debugLogPath = Path.Combine(args.OutputPath, "debug.log"); + DebugDumpStory(story, debugLogPath); } - static void Main(string[] args) + Console.WriteLine($"Exporting goals ..."); + DecompileStoryGoals(story, args.OutputPath); + } + + static void Main(string[] args) + { + if (args.Length == 0) { - if (args.Length == 0) - { - Console.WriteLine("Usage: StoryDecompiler "); - Console.WriteLine(" --input - Compiled story/savegame file path"); - Console.WriteLine(" --output - Goal output directory"); - Console.WriteLine(" --debug-log - Generate story debug log"); - Environment.Exit(1); - } + Console.WriteLine("Usage: StoryDecompiler "); + Console.WriteLine(" --input - Compiled story/savegame file path"); + Console.WriteLine(" --output - Goal output directory"); + Console.WriteLine(" --debug-log - Generate story debug log"); + Environment.Exit(1); + } - CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser(); + CommandLineParser.CommandLineParser parser = new CommandLineParser.CommandLineParser(); - var argv = new CommandLineArguments(); + var argv = new CommandLineArguments(); - parser.ExtractArgumentAttributes(argv); + parser.ExtractArgumentAttributes(argv); - try - { - parser.ParseCommandLine(args); - } - catch (CommandLineArgumentException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine($"Argument --{e.Argument}: {e.Message}"); - Console.ResetColor(); - Environment.Exit(1); - } - catch (CommandLineException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine(e.Message); - Console.ResetColor(); - Environment.Exit(1); - } + try + { + parser.ParseCommandLine(args); + } + catch (CommandLineArgumentException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine($"Argument --{e.Argument}: {e.Message}"); + Console.ResetColor(); + Environment.Exit(1); + } + catch (CommandLineException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ResetColor(); + Environment.Exit(1); + } - if (parser.ParsingSucceeded) - { - Run(argv); - } + if (parser.ParsingSucceeded) + { + Run(argv); } } } diff --git a/VTexTool/Program.cs b/VTexTool/Program.cs index 5ceceac4..de947e72 100644 --- a/VTexTool/Program.cs +++ b/VTexTool/Program.cs @@ -4,52 +4,51 @@ using System.IO; using System.Linq; -namespace LSTools.StoryDecompiler +namespace LSTools.VTexTool; + +class Program { - class Program + static void Main(string[] args) { - static void Main(string[] args) + if (args.Length != 2) { - if (args.Length != 2) - { - Console.WriteLine("Usage: VTexTool.exe "); - Environment.Exit(1); - } + Console.WriteLine("Usage: VTexTool.exe "); + Environment.Exit(1); + } - Console.WriteLine($"LSLib Virtual Tile Set Generator (v{Common.MajorVersion}.{Common.MinorVersion}.{Common.PatchVersion})"); + Console.WriteLine($"LSLib Virtual Tile Set Generator (v{Common.MajorVersion}.{Common.MinorVersion}.{Common.PatchVersion})"); - try + try + { + var configPath = Path.Combine(args[0], args[1]); + var descriptor = new TileSetDescriptor { - var configPath = Path.Combine(args[0], args[1]); - var descriptor = new TileSetDescriptor - { - RootPath = args[0] - }; - descriptor.Load(configPath); - - var builder = new TileSetBuilder(descriptor.Config); - foreach (var texture in descriptor.Textures) - { - var layerPaths = texture.Layers.Select(name => name != null ? Path.Combine(descriptor.SourceTexturePath, name) : null).ToList(); - builder.AddTexture(texture.Name, layerPaths); - } + RootPath = args[0] + }; + descriptor.Load(configPath); - builder.Build(descriptor.VirtualTexturePath); - } - catch (InvalidDataException e) + var builder = new TileSetBuilder(descriptor.Config); + foreach (var texture in descriptor.Textures) { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine(e.Message); - Console.ForegroundColor = ConsoleColor.Gray; - Environment.Exit(1); - } - catch (FileNotFoundException e) - { - Console.ForegroundColor = ConsoleColor.Red; - Console.WriteLine(e.Message); - Console.ForegroundColor = ConsoleColor.Gray; - Environment.Exit(1); + var layerPaths = texture.Layers.Select(name => name != null ? Path.Combine(descriptor.SourceTexturePath, name) : null).ToList(); + builder.AddTexture(texture.Name, layerPaths); } + + builder.Build(descriptor.VirtualTexturePath); + } + catch (InvalidDataException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ForegroundColor = ConsoleColor.Gray; + Environment.Exit(1); + } + catch (FileNotFoundException e) + { + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine(e.Message); + Console.ForegroundColor = ConsoleColor.Gray; + Environment.Exit(1); } } } From 4b240747c85f83cdc3223c61c37829731bf7f644 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 11 Dec 2023 12:03:28 +0100 Subject: [PATCH 046/139] Remove CRC from LSLibNative --- LSLib/LS/PackageCommon.cs | 3 +- LSLib/LS/PackageWriter.cs | 4 +- LSLib/LSLib.csproj | 1 + LSLibNative/LSLibNative.vcxproj | 2 - LSLibNative/LSLibNative.vcxproj.filters | 6 -- LSLibNative/crc32.cpp | 74 ------------------------- LSLibNative/crc32.h | 16 ------ 7 files changed, 5 insertions(+), 101 deletions(-) delete mode 100644 LSLibNative/crc32.cpp delete mode 100644 LSLibNative/crc32.h diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 7a7ba5cd..f6b43fbb 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.IO; +using System.IO.Hashing; using System.Linq; using System.Runtime.InteropServices; using System.Text; @@ -282,7 +283,7 @@ public override Stream MakeStream() if (Crc != 0) { - UInt32 computedCrc = Crc32.Compute(compressed, 0); + UInt32 computedCrc = Crc32.HashToUInt32(compressed); if (computedCrc != Crc) { string msg = $"CRC check failed on file '{Name}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 69c3ec57..ff63c2b4 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -1,12 +1,12 @@ using System; using System.Collections.Generic; using System.IO; +using System.IO.Hashing; using System.Linq; using System.Runtime.InteropServices; using System.Security.Cryptography; using System.Text; using LSLib.LS.Enums; -using LSLib.Native; using LZ4; namespace LSLib.LS; @@ -81,7 +81,7 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) } packaged.SizeOnDisk = (UInt64) (stream.Position - (long)packaged.OffsetInFile); - packaged.Crc = Crc32.Compute(compressed, 0); + packaged.Crc = Crc32.HashToUInt32(compressed); if ((package.Metadata.Flags & PackageFlags.Solid) == 0) { diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index ee8119d0..5a71b1a4 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -31,6 +31,7 @@ + diff --git a/LSLibNative/LSLibNative.vcxproj b/LSLibNative/LSLibNative.vcxproj index 6e8dd79b..2d072f91 100644 --- a/LSLibNative/LSLibNative.vcxproj +++ b/LSLibNative/LSLibNative.vcxproj @@ -114,7 +114,6 @@ - @@ -127,7 +126,6 @@ - false false diff --git a/LSLibNative/LSLibNative.vcxproj.filters b/LSLibNative/LSLibNative.vcxproj.filters index 04a9107c..8e60d1e1 100644 --- a/LSLibNative/LSLibNative.vcxproj.filters +++ b/LSLibNative/LSLibNative.vcxproj.filters @@ -45,9 +45,6 @@ Header Files - - Header Files - Header Files @@ -77,9 +74,6 @@ Source Files - - Source Files - Source Files diff --git a/LSLibNative/crc32.cpp b/LSLibNative/crc32.cpp deleted file mode 100644 index 8950b3dc..00000000 --- a/LSLibNative/crc32.cpp +++ /dev/null @@ -1,74 +0,0 @@ -#pragma once - -#include "crc32.h" - -namespace LSLib { - namespace Native { - static uint32_t Crc32Lookup[8][0x100]; - static bool Crc32LookupTableInitialized = false; - - void InitCrc32LookupTable() - { - if (Crc32LookupTableInitialized) return; - - for (unsigned int i = 0; i <= 0xFF; i++) - { - uint32_t crc = i; - for (unsigned int j = 0; j < 8; j++) - crc = (crc >> 1) ^ (-int(crc & 1) & 0xedb88320u); - Crc32Lookup[0][i] = crc; - } - - for (unsigned int i = 0; i <= 0xFF; i++) - { - Crc32Lookup[1][i] = (Crc32Lookup[0][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[0][i] & 0xFF]; - Crc32Lookup[2][i] = (Crc32Lookup[1][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[1][i] & 0xFF]; - Crc32Lookup[3][i] = (Crc32Lookup[2][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[2][i] & 0xFF]; - Crc32Lookup[4][i] = (Crc32Lookup[3][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[3][i] & 0xFF]; - Crc32Lookup[5][i] = (Crc32Lookup[4][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[4][i] & 0xFF]; - Crc32Lookup[6][i] = (Crc32Lookup[5][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[5][i] & 0xFF]; - Crc32Lookup[7][i] = (Crc32Lookup[6][i] >> 8) ^ Crc32Lookup[0][Crc32Lookup[6][i] & 0xFF]; - } - - Crc32LookupTableInitialized = true; - } - - uint32_t Crc32::Compute(array ^ input, uint32_t previousCrc32) - { - if (input->Length == 0) - { - return previousCrc32; - } - - pin_ptr inputPin(&input[input->GetLowerBound(0)]); - - uint32_t * current = (uint32_t *)inputPin; - int length = input->Length; - uint32_t crc = ~previousCrc32; - - InitCrc32LookupTable(); - - // process eight bytes at once - while (length >= 8) - { - uint32_t one = *current++ ^ crc; - uint32_t two = *current++; - crc = Crc32Lookup[7][one & 0xFF] ^ - Crc32Lookup[6][(one >> 8) & 0xFF] ^ - Crc32Lookup[5][(one >> 16) & 0xFF] ^ - Crc32Lookup[4][one >> 24] ^ - Crc32Lookup[3][two & 0xFF] ^ - Crc32Lookup[2][(two >> 8) & 0xFF] ^ - Crc32Lookup[1][(two >> 16) & 0xFF] ^ - Crc32Lookup[0][two >> 24]; - length -= 8; - } - byte * currentChar = (byte *)current; - // remaining 1 to 7 bytes - while (length--) - crc = (crc >> 8) ^ Crc32Lookup[0][(crc & 0xFF) ^ *currentChar++]; - - return ~crc; - } - } -} diff --git a/LSLibNative/crc32.h b/LSLibNative/crc32.h deleted file mode 100644 index 62b954a2..00000000 --- a/LSLibNative/crc32.h +++ /dev/null @@ -1,16 +0,0 @@ -#pragma once - -#include - -using namespace System; -using namespace System::Collections::Generic; - -namespace LSLib { - namespace Native { - public ref class Crc32 abstract sealed - { - public: - static uint32_t Compute(array ^ input, uint32_t previousCrc32); - }; - } -} From fcce1518b714ceab850f97c54c462442d5827300 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 11 Dec 2023 12:04:06 +0100 Subject: [PATCH 047/139] Oopsy --- LSLib/Granny/GR2/Format.cs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index d4c20cd6..a1ec12ef 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -6,6 +6,7 @@ using OpenTK.Mathematics; using System.IO; using System.Reflection; +using System.IO.Hashing; namespace LSLib.Granny.GR2; @@ -457,7 +458,7 @@ public UInt32 CalculateCRC(Stream stream) stream.Seek(totalHeaderSize, SeekOrigin.Begin); byte[] body = new byte[fileSize - totalHeaderSize]; stream.Read(body, 0, (int)(fileSize - totalHeaderSize)); - UInt32 crc = Native.Crc32.Compute(body, 0); + UInt32 crc = Crc32.HashToUInt32(body); stream.Seek(originalPos, SeekOrigin.Begin); return crc; } From 412442a3f9cc8cc86aade9e9b80361526db7ef9d Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 11 Dec 2023 16:59:54 +0100 Subject: [PATCH 048/139] Fix truncation of zlib stream --- LSLib/LS/BinUtils.cs | 8 +++++--- LSLib/LS/PackageWriter.cs | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index da01fec6..99d1c770 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -385,10 +385,12 @@ public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel compre }; using var outputStream = new MemoryStream(); - using var compressor = new ZLibStream(outputStream, level); + using (var compressor = new ZLibStream(outputStream, level, true)) + { + compressor.Write(uncompressed, 0, uncompressed.Length); + } + - compressor.Write(uncompressed, 0, uncompressed.Length); - compressor.Flush(); return outputStream.ToArray(); } diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index ff63c2b4..956d50f5 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -49,7 +49,7 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) var compression = Compression; var compressionLevel = LSCompressionLevel; - if (info.Name.EndsWith(".gts") || info.Name.EndsWith(".gtp")) + if (info.Name.EndsWith(".gts") || info.Name.EndsWith(".gtp") || size == 0) { compression = CompressionMethod.None; compressionLevel = LSCompressionLevel.FastCompression; From 95dfd14c282861c748142dc23526fad81826d8e0 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Thu, 14 Dec 2023 18:02:48 +0100 Subject: [PATCH 049/139] Fix indentation --- LSLib/LS/Stats/StatDefinitions.cs | 276 +++++++++++++++--------------- 1 file changed, 138 insertions(+), 138 deletions(-) diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs index b7f11eef..d0f068c7 100644 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ b/LSLib/LS/Stats/StatDefinitions.cs @@ -842,46 +842,46 @@ public void LoadDefinitions(Stream stream) AddBoost("AC", 1, [ - "AC", "Int" + "AC", "Int" ]); AddBoost("Ability", 2, [ - "Ability", "Ability", - "Amount", "Int", - "Arg3", "Int", + "Ability", "Ability", + "Amount", "Int", + "Arg3", "Int", ]); AddBoost("RollBonus", 2, [ - "RollType", "StatsRollType", - "Bonus", "Lua", - "Arg3", "String", + "RollType", "StatsRollType", + "Bonus", "Lua", + "Arg3", "String", ]); AddBoost("Advantage", 1, [ - "Type", "AdvantageType", - "Arg2", "String", // Depends on type - "Tag1", "String", // TagManager resource - "Tag2", "String", // TagManager resource - "Tag3", "String", // TagManager resource + "Type", "AdvantageType", + "Arg2", "String", // Depends on type + "Tag1", "String", // TagManager resource + "Tag2", "String", // TagManager resource + "Tag3", "String", // TagManager resource ]); AddBoost("Disadvantage", 1, [ - "Type", "AdvantageType", - "Arg2", "String", // Depends on type - "Tag1", "String", // TagManager resource - "Tag2", "String", // TagManager resource - "Tag3", "String", // TagManager resource + "Type", "AdvantageType", + "Arg2", "String", // Depends on type + "Tag1", "String", // TagManager resource + "Tag2", "String", // TagManager resource + "Tag3", "String", // TagManager resource ]); AddBoost("ActionResource", 2, [ - "Resource", "String", // Action resource name - "Amount", "Float", - "Level", "Int", + "Resource", "String", // Action resource name + "Amount", "Float", + "Level", "Int", "DieType", "DieType", ]); AddBoost("CriticalHit", 3, [ - "Type", "CriticalHitType", - "Result", "CriticalHitResult", - "When", "CriticalHitWhen", - "Arg4", "Float", + "Type", "CriticalHitType", + "Result", "CriticalHitResult", + "When", "CriticalHitWhen", + "Arg4", "Float", ]); AddBoost("AbilityFailedSavingThrow", 1, [ - "Ability", "Ability" + "Ability", "Ability" ]); AddBoost("Resistance", 2, [ "DamageType", "AllOrDamageType", @@ -893,11 +893,11 @@ public void LoadDefinitions(Stream stream) "DamageType3", "Damage Type", ]); AddBoost("ProficiencyBonusOverride", 1, [ - "Bonus", "Lua" + "Bonus", "Lua" ]); AddBoost("ActionResourceOverride", 2, [ "Resource", "String", // Action resource name - "Amount", "Float", + "Amount", "Float", "Level", "Int", "DieType", "DieType", ]); @@ -908,83 +908,83 @@ public void LoadDefinitions(Stream stream) AddBoost("AddProficiencyToDamage", 0, []); AddBoost("ActionResourceConsumeMultiplier", 3, [ "Resource", "String", // Action resource name - "Multiplier", "Float", + "Multiplier", "Float", "Level", "Int", ]); AddBoost("BlockVerbalComponent", 0, []); AddBoost("BlockSomaticComponent", 0, []); AddBoost("HalveWeaponDamage", 1, [ - "Ability", "Ability" + "Ability", "Ability" ]); AddBoost("UnlockSpell", 1, [ - "SpellId", "SpellId", + "SpellId", "SpellId", "Type", "UnlockSpellType", "SpellGuid", "String", // "None" or GUID or "" "Cooldown", "SpellCooldownType", "Ability", "Ability" ]); AddBoost("SourceAdvantageOnAttack", 0, [ - "Arg1", "Float" + "Arg1", "Float" ]); AddBoost("ProficiencyBonus", 1, [ - "Type", "ProficiencyBonusBoostType", + "Type", "ProficiencyBonusBoostType", "Arg2", "String" ]); AddBoost("BlockSpellCast", 0, [ - "Arg1", "Float" + "Arg1", "Float" ]); AddBoost("Proficiency", 1, [ - "Arg1", "ProficiencyGroupFlags", - "Arg2", "ProficiencyGroupFlags", - "Arg3", "ProficiencyGroupFlags", + "Arg1", "ProficiencyGroupFlags", + "Arg2", "ProficiencyGroupFlags", + "Arg3", "ProficiencyGroupFlags", ]); AddBoost("SourceAllyAdvantageOnAttack", 0, []); AddBoost("IncreaseMaxHP", 1, [ - "Amount", "String" // Lua or % + "Amount", "String" // Lua or % ]); AddBoost("ActionResourceBlock", 1, [ "Resource", "String", // Action resource name "Level", "Int", ]); AddBoost("StatusImmunity", 1, [ - "StatusId", "StatusIdOrGroup", - "Tag1", "String", // Tag resource name - "Tag2", "String", // Tag resource name - "Tag3", "String", // Tag resource name - "Tag4", "String", // Tag resource name - "Tag5", "String", // Tag resource name + "StatusId", "StatusIdOrGroup", + "Tag1", "String", // Tag resource name + "Tag2", "String", // Tag resource name + "Tag3", "String", // Tag resource name + "Tag4", "String", // Tag resource name + "Tag5", "String", // Tag resource name ]); AddBoost("UseBoosts", 1, [ - "Arg1", "StatsFunctors" + "Arg1", "StatsFunctors" ]); AddBoost("CannotHarmCauseEntity", 1, [ - "Arg1", "String" + "Arg1", "String" ]); AddBoost("TemporaryHP", 1, [ - "Amount", "Lua" + "Amount", "Lua" ]); AddBoost("Weight", 1, [ - "Weight", "Float" + "Weight", "Float" ]); AddBoost("WeightCategory", 1, [ - "Category", "Int" + "Category", "Int" ]); AddBoost("FactionOverride", 1, [ - "Faction", "String" // Faction resource GUID or "Source" + "Faction", "String" // Faction resource GUID or "Source" ]); AddBoost("ActionResourceMultiplier", 2, [ "Resource", "String", // Action resource name - "Multiplier", "Int", + "Multiplier", "Int", "Level", "Int", ]); AddBoost("BlockRegainHP", 0, [ - "Type", "ResurrectTypes" + "Type", "ResurrectTypes" ]); AddBoost("Initiative", 1, [ - "Initiative", "Int" + "Initiative", "Int" ]); AddBoost("DarkvisionRange", 1, [ - "Range", "Float" + "Range", "Float" ]); AddBoost("DarkvisionRangeMin", 1, [ "Range", "Float" @@ -993,18 +993,18 @@ public void LoadDefinitions(Stream stream) "Range", "Float" ]); AddBoost("Tag", 1, [ - "Arg1", "String" // Tag resource name + "Arg1", "String" // Tag resource name ]); AddBoost("IgnoreDamageThreshold", 2, [ - "DamageType", "AllOrDamageType", + "DamageType", "AllOrDamageType", "Threshold", "Int" ]); AddBoost("Skill", 2, [ - "Skill", "SkillType", + "Skill", "SkillType", "Amount", "Lua" ]); AddBoost("WeaponDamage", 2, [ - "Amount", "Lua", + "Amount", "Lua", "DamageType", "Damage Type", "Arg3", "Boolean" ]); @@ -1013,84 +1013,84 @@ public void LoadDefinitions(Stream stream) ]); AddBoost("IgnoreFallDamage", 0, []); AddBoost("Reroll", 3, [ - "RollType", "StatsRollType", + "RollType", "StatsRollType", "RollBelow", "Int", "Arg3", "Boolean" ]); AddBoost("DownedStatus", 1, [ - "StatusId", "StatusId", + "StatusId", "StatusId", "Arg2", "Int" ]); AddBoost("Invulnerable", 0, []); AddBoost("WeaponEnchantment", 1, [ - "Enchantment", "Int" + "Enchantment", "Int" ]); AddBoost("GuaranteedChanceRollOutcome", 1, [ - "Arg1", "Boolean" + "Arg1", "Boolean" ]); AddBoost("Attribute", 1, [ - "Flags", "AttributeFlags" + "Flags", "AttributeFlags" ]); AddBoost("IgnoreLeaveAttackRange", 0, []); AddBoost("GameplayLight", 2, [ - "Arg1", "Float", - "Arg2", "Boolean", - "Arg3", "Float", - "Arg4", "Boolean" + "Arg1", "Float", + "Arg2", "Boolean", + "Arg3", "Float", + "Arg4", "Boolean" ]); AddBoost("DialogueBlock", 0, []); AddBoost("DualWielding", 1, [ - "DW", "Boolean" + "DW", "Boolean" ]); AddBoost("Savant", 1, [ - "SpellSchool", "SpellSchool" + "SpellSchool", "SpellSchool" ]); AddBoost("MinimumRollResult", 2, [ - "RollType", "StatsRollType", + "RollType", "StatsRollType", "MinResult", "Int" ]); AddBoost("Lootable", 0, []); AddBoost("CharacterWeaponDamage", 1, [ - "Amount", "Lua", + "Amount", "Lua", "DamageType", "Damage Type" ]); AddBoost("ProjectileDeflect", 0, [ - "Type1", "String", - "Type2", "String", + "Type1", "String", + "Type2", "String", ]); AddBoost("AbilityOverrideMinimum", 2, [ - "Ability", "Ability", + "Ability", "Ability", "Minimum", "Int" ]); AddBoost("ACOverrideFormula", 2, [ - "AC", "Int", + "AC", "Int", "Arg2", "Boolean", "Ability1", "Ability", "Ability2", "Ability", "Ability3", "Ability", ]); AddBoost("FallDamageMultiplier", 1, [ - "Multiplier", "Float" + "Multiplier", "Float" ]); AddBoost("ActiveCharacterLight", 1, [ - "Light", "String" + "Light", "String" ]); AddBoost("Invisibility", 0, []); AddBoost("TwoWeaponFighting", 0, []); AddBoost("WeaponAttackTypeOverride", 1, [ - "Type", "AttackType" + "Type", "AttackType" ]); AddBoost("WeaponDamageDieOverride", 1, [ - "DamageDie", "String", // die, eg. 1d10 + "DamageDie", "String", // die, eg. 1d10 ]); AddBoost("CarryCapacityMultiplier", 1, [ - "Multiplier", "Float" + "Multiplier", "Float" ]); AddBoost("WeaponProperty", 1, [ - "Flags1", "WeaponFlags" + "Flags1", "WeaponFlags" ]); AddBoost("WeaponAttackRollAbilityOverride", 1, [ - "Ability", "AbilityOrAttackRollAbility" + "Ability", "AbilityOrAttackRollAbility" ]); AddBoost("BlockTravel", 0, []); AddBoost("BlockGatherAtCamp", 0, []); @@ -1098,7 +1098,7 @@ public void LoadDefinitions(Stream stream) AddBoost("VoicebarkBlock", 0, []); AddBoost("HiddenDuringCinematic", 0, []); AddBoost("SightRangeAdditive", 1, [ - "Range", "Float" + "Range", "Float" ]); AddBoost("SightRangeMinimum", 1, [ "Range", "Float" @@ -1115,98 +1115,98 @@ public void LoadDefinitions(Stream stream) ]); AddBoost("NonLethal", 0, []); AddBoost("UnlockSpellVariant", 1, [ - "Modification1", "Lua", // TODO - add Modification parser? - "Modification2", "Lua", - "Modification3", "Lua", - "Modification4", "Lua", - "Modification5", "Lua", - "Modification6", "Lua", - "Modification7", "Lua", - "Modification8", "Lua", - "Modification9", "Lua", - "Modification10", "Lua", - "Modification11", "Lua", - "Modification12", "Lua", - "Modification13", "Lua", - "Modification14", "Lua", - "Modification15", "Lua" + "Modification1", "Lua", // TODO - add Modification parser? + "Modification2", "Lua", + "Modification3", "Lua", + "Modification4", "Lua", + "Modification5", "Lua", + "Modification6", "Lua", + "Modification7", "Lua", + "Modification8", "Lua", + "Modification9", "Lua", + "Modification10", "Lua", + "Modification11", "Lua", + "Modification12", "Lua", + "Modification13", "Lua", + "Modification14", "Lua", + "Modification15", "Lua" ]); AddBoost("DetectDisturbancesBlock", 1, [ - "Arg1", "Boolean" + "Arg1", "Boolean" ]); AddBoost("BlockAbilityModifierFromAC", 1, [ - "Ability", "Ability" + "Ability", "Ability" ]); AddBoost("ScaleMultiplier", 0, [ - "Multiplier", "Float" + "Multiplier", "Float" ]); AddBoost("CriticalDamageOnHit", 0, []); AddBoost("DamageReduction", 2, [ - "DamageType", "AllOrDamageType", + "DamageType", "AllOrDamageType", "ReductionType", "DamageReductionType", "Amount", "Lua" ]); AddBoost("ReduceCriticalAttackThreshold", 1, [ - "Threshold", "Int", + "Threshold", "Int", "StatusId", "StatusIdOrGroup" ]); AddBoost("PhysicalForceRangeBonus", 1, [ - "Arg1", "String" + "Arg1", "String" ]); AddBoost("ObjectSize", 1, [ - "Size", "Int" + "Size", "Int" ]); AddBoost("ObjectSizeOverride", 1, [ "Size", "String" ]); AddBoost("ItemReturnToOwner", 0, []); AddBoost("AiArchetypeOverride", 1, [ - "Archetype", "String", + "Archetype", "String", "Arg2", "Int" ]); AddBoost("ExpertiseBonus", 1, [ - "Skill", "SkillType" + "Skill", "SkillType" ]); AddBoost("EntityThrowDamage", 1, [ - "Die", "String", + "Die", "String", "DamageType", "Damage Type" ]); AddBoost("WeaponDamageTypeOverride", 1, [ - "DamageType", "Damage Type" + "DamageType", "Damage Type" ]); AddBoost("MaximizeHealing", 1, [ - "Direction", "HealingDirection", + "Direction", "HealingDirection", "Type", "ResurrectType" ]); AddBoost("IgnoreEnterAttackRange", 0, []); AddBoost("DamageBonus", 1, [ - "Amount", "Lua", + "Amount", "Lua", "DamageType", "Damage Type", "Arg3", "Boolean" ]); AddBoost("Detach", 0, []); AddBoost("ConsumeItemBlock", 0, []); AddBoost("AdvanceSpells", 1, [ - "SpellId", "SpellId", + "SpellId", "SpellId", "Arg2", "Int" ]); AddBoost("SpellResistance", 1, [ - "Resistance", "ResistanceBoostFlags" + "Resistance", "ResistanceBoostFlags" ]); AddBoost("WeaponAttackRollBonus", 1, [ - "Amount", "Lua" + "Amount", "Lua" ]); AddBoost("SpellSaveDC", 1, [ - "DC", "Int" + "DC", "Int" ]); AddBoost("RedirectDamage", 1, [ - "Arg1", "Float", - "DamageType", "Damage Type", - "DamageType2", "Damage Type", + "Arg1", "Float", + "DamageType", "Damage Type", + "DamageType2", "Damage Type", "Arg4", "Boolean" ]); AddBoost("CanSeeThrough", 1, [ - "CanSeeThrough", "Boolean" + "CanSeeThrough", "Boolean" ]); AddBoost("CanShootThrough", 1, [ "CanShootThrough", "Boolean" @@ -1216,14 +1216,14 @@ public void LoadDefinitions(Stream stream) ]); AddBoost("MonkWeaponAttackOverride", 0, []); AddBoost("MonkWeaponDamageDiceOverride", 1, [ - "Arg1", "Lua" + "Arg1", "Lua" ]); AddBoost("IntrinsicSummonerProficiency", 0, []); AddBoost("HorizontalFOVOverride", 1, [ - "FOV", "Float" + "FOV", "Float" ]); AddBoost("CharacterUnarmedDamage", 1, [ - "Damage", "Lua", + "Damage", "Lua", "DamageType", "Damage Type" ]); AddBoost("UnarmedMagicalProperty", 0, []); @@ -1233,60 +1233,60 @@ public void LoadDefinitions(Stream stream) ]); AddBoost("AreaDamageEvade", 0, []); AddBoost("ActionResourcePreventReduction", 1, [ - "ActionResource", "String", // Action resource name + "ActionResource", "String", // Action resource name "Level", "Int" ]); AddBoost("AttackSpellOverride", 1, [ - "AttackSpell", "SpellId", - "OriginalSpell", "SpellId" + "AttackSpell", "SpellId", + "OriginalSpell", "SpellId" ]); AddBoost("Lock", 0, [ - "DC", "Guid" + "DC", "Guid" ]); AddBoost("NoAOEDamageOnLand", 0, []); AddBoost("IgnorePointBlankDisadvantage", 1, [ - "Flags", "WeaponFlags" + "Flags", "WeaponFlags" ]); AddBoost("CriticalHitExtraDice", 1, [ - "ExtraDice", "Int", + "ExtraDice", "Int", "AttackType", "AttackType" ]); AddBoost("DodgeAttackRoll", 2, [ - "Arg1", "Int", - "Arg2", "Int", - "Status", "StatusIdOrGroup" + "Arg1", "Int", + "Arg2", "Int", + "Status", "StatusIdOrGroup" ]); AddBoost("GameplayObscurity", 1, [ - "Obscurity", "Float" + "Obscurity", "Float" ]); AddBoost("MaximumRollResult", 2, [ "RollType", "StatsRollType", "MinResult", "Int" ]); AddBoost("UnlockInterrupt", 1, [ - "Interrupt", "Interrupt" + "Interrupt", "Interrupt" ]); AddBoost("IntrinsicSourceProficiency", 0, []); AddBoost("JumpMaxDistanceBonus", 1, [ - "Bonus", "Float" + "Bonus", "Float" ]); AddBoost("ArmorAbilityModifierCapOverride", 2, [ - "ArmorType", "ArmorType", + "ArmorType", "ArmorType", "Cap", "Int" ]); AddBoost("IgnoreResistance", 2, [ - "DamageType", "Damage Type", + "DamageType", "Damage Type", "Flags", "ResistanceBoostFlags" ]); AddBoost("ConcentrationIgnoreDamage", 1, [ - "SpellSchool", "SpellSchool" + "SpellSchool", "SpellSchool" ]); AddBoost("LeaveTriggers", 0, []); AddBoost("IgnoreLowGroundPenalty", 1, [ - "RollType", "StatsRollType" + "RollType", "StatsRollType" ]); AddBoost("IgnoreSurfaceCover", 1, [ - "SurfaceType", "String" // Surface type + "SurfaceType", "String" // Surface type ]); AddBoost("EnableBasicItemInteractions", 0, []); AddBoost("SoundsBlocked", 0, []); From 876bd44ca492f4e9b498a9798e560ae49e2b9f55 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 16 Dec 2023 19:00:56 +0100 Subject: [PATCH 050/139] Add more VT optimizations --- LSLib/LS/Common.cs | 2 +- LSLib/VirtualTextures/BC5Image.cs | 1 + LSLib/VirtualTextures/Build.cs | 103 +++++++++++++++++-------- LSLib/VirtualTextures/PageFileBuild.cs | 89 ++++++++++++++++++--- make-release.bat | 44 +++++++++++ 5 files changed, 194 insertions(+), 45 deletions(-) create mode 100644 make-release.bat diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 5f6ccc25..7a4ca861 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -9,7 +9,7 @@ public static class Common public const int MinorVersion = 19; - public const int PatchVersion = 0; + public const int PatchVersion = 1; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; diff --git a/LSLib/VirtualTextures/BC5Image.cs b/LSLib/VirtualTextures/BC5Image.cs index 96327a82..ffc3b8f1 100644 --- a/LSLib/VirtualTextures/BC5Image.cs +++ b/LSLib/VirtualTextures/BC5Image.cs @@ -22,6 +22,7 @@ public BC5Image(byte[] data, int width, int height) public BC5Image(int width, int height) { Data = new byte[width * height]; + Array.Clear(Data); Width = width; Height = height; } diff --git a/LSLib/VirtualTextures/Build.cs b/LSLib/VirtualTextures/Build.cs index b202e43a..82e5d359 100644 --- a/LSLib/VirtualTextures/Build.cs +++ b/LSLib/VirtualTextures/Build.cs @@ -59,8 +59,10 @@ public void Load(XmlDocument doc) case "PageSize": Config.PageSize = Int32.Parse(value); break; case "OneFilePerGTex": Config.OneFilePerGTex = Boolean.Parse(value); break; case "BackfillPages": Config.BackfillPages = Boolean.Parse(value); break; + case "DeduplicateTiles": Config.DeduplicateTiles = Boolean.Parse(value); break; case "EmbedMips": Config.EmbedMips = Boolean.Parse(value); break; case "EmbedTopLevelMips": Config.EmbedTopLevelMips = Boolean.Parse(value); break; + case "ZeroBorders": Config.ZeroBorders = Boolean.Parse(value); break; default: throw new InvalidDataException($"Unsupported configuration key: {key}"); } } @@ -150,6 +152,7 @@ public class BuildTile public int PageFileIndex; public int PageIndex; public int ChunkIndex; + public BuildTile DuplicateOf; } public class BuildLayer @@ -169,10 +172,12 @@ public class TileSetConfiguration public List Layers; public TileCompressionPreference Compression = TileCompressionPreference.Best; public Int32 PageSize = 0x100000; - public bool OneFilePerGTex = true; - public bool BackfillPages = false; + public bool OneFilePerGTex = false; + public bool BackfillPages = true; + public bool DeduplicateTiles = true; public bool EmbedMips = true; - public bool EmbedTopLevelMips = false; + public bool EmbedTopLevelMips = true; + public bool ZeroBorders = false; } public class BuildLayerTexture @@ -303,6 +308,7 @@ public class TileSetBuilder private readonly TileSetConfiguration Config; private readonly TileCompressor Compressor; private readonly ParameterBlockContainer ParameterBlocks; + private PageFileSetBuilder SetBuilder; public VirtualTileSet TileSet; public List Textures; @@ -840,16 +846,16 @@ private void BuildGTSHeaders() header.TileBorder = BuildData.TileBorder; } - private void BuildPageFiles() + private void PreparePageFiles() { - var builder = new PageFileSetBuilder(BuildData, Config); + SetBuilder = new PageFileSetBuilder(BuildData, Config); if (Config.OneFilePerGTex) { - PageFiles = builder.BuildFilePerGTex(Textures); + PageFiles = SetBuilder.BuildFilePerGTex(Textures); } else { - PageFiles = builder.BuildSingleFile(); + PageFiles = SetBuilder.BuildSingleFile(); } TileSet.PageFileInfos = []; @@ -898,13 +904,20 @@ private void BuildGTS() gtsLevel.Height = (uint)level.TilesY; } - OnStepStarted("Generating tile lists"); - BuildFlatTileList(); + OnStepStarted("Preparing page files"); + PreparePageFiles(); + + OnStepStarted("Deduplicating tiles"); + SetBuilder.DeduplicateTiles(); + OnStepStarted("Encoding tiles"); CompressTiles(); OnStepStarted("Building page files"); - BuildPageFiles(); + SetBuilder.CommitPageFiles(); + + OnStepStarted("Generating tile lists"); + BuildFlatTileList(); OnStepStarted("Building metadata"); BuildTileInfos(); @@ -954,15 +967,15 @@ public void BuildFlatTileList() public void CompressTiles() { - var numTiles = PerLevelFlatTiles.Sum(tiles => tiles.Length); + var numTiles = PageFiles.Sum(pf => pf.PendingTiles.Count); var nextTile = 0; - foreach (var level in PerLevelFlatTiles) + foreach (var file in PageFiles) { - foreach (var tile in level) + foreach (var tile in file.PendingTiles) { OnStepProgress(nextTile++, numTiles); - if (tile != null) + if (tile.DuplicateOf == null) { Compressor.Compress(tile); } @@ -975,6 +988,7 @@ public void BuildTileInfos() TileSet.PerLevelFlatTileIndices = new List(BuildData.PageFileLevels); PerLevelFlatTiles = new List(BuildData.PageFileLevels); + var flatTileMap = new Dictionary(); var flatTileInfos = new List(); var packedTileIds = new List(); @@ -997,21 +1011,33 @@ public void BuildTileInfos() var tile = BuildData.Layers[layer].Levels[level].Get(x, y); if (tile != null) { - var flatTileIdx = (uint)flatTileInfos.Count; + uint flatTileIdx; var packedTileIdx = (uint)packedTileIds.Count; var packedTile = new GTSPackedTileID((uint)layer, (uint)level, (uint)x, (uint)y); packedTileIds.Add(packedTile); - var tileInfo = new GTSFlatTileInfo + var tileKey = (long)tile.ChunkIndex + | ((long)tile.PageIndex << 16) + | ((long)tile.ChunkIndex << 32); + if (flatTileMap.TryGetValue(tileKey, out uint dupTileIdx)) + { + flatTileIdx = dupTileIdx; + } + else { - PageFileIndex = (UInt16)tile.PageFileIndex, - PageIndex = (UInt16)tile.PageIndex, - ChunkIndex = (UInt16)tile.ChunkIndex, - D = 1, - PackedTileIndex = packedTileIdx - }; - flatTileInfos.Add(tileInfo); + flatTileIdx = (uint)flatTileInfos.Count; + flatTileMap[tileKey] = flatTileIdx; + var tileInfo = new GTSFlatTileInfo + { + PageFileIndex = (UInt16)tile.PageFileIndex, + PageIndex = (UInt16)tile.PageIndex, + ChunkIndex = (UInt16)tile.ChunkIndex, + D = 1, + PackedTileIndex = packedTileIdx + }; + flatTileInfos.Add(tileInfo); + } flatTileIndices[tileIdx] = flatTileIdx; flatTiles[tileIdx] = tile; @@ -1074,10 +1100,16 @@ public void Build(string dir) { OnStepStarted("Calculating geometry"); CalculateGeometry(); + OnStepStarted("Building tiles"); BuildTiles(); - OnStepStarted("Building tile borders"); - BuildTileBorders(); + + if (BuildData.TileBorder > 0 && !Config.ZeroBorders) + { + OnStepStarted("Building tile borders"); + BuildTileBorders(); + } + OnStepStarted("Embedding tile mipmaps"); if (Config.EmbedMips) { @@ -1086,7 +1118,7 @@ public void Build(string dir) BuildGTS(); - long tileBytes = 0, embeddedMipBytes = 0, tileCompressedBytes = 0, pages = 0, chunks = 0, levelTiles = 0; + long tileBytes = 0, embeddedMipBytes = 0, tileCompressedBytes = 0, pages = 0, chunks = 0, levelTiles = 0, duplicates = 0; foreach (var pageFile in PageFiles) { @@ -1104,18 +1136,25 @@ public void Build(string dir) { if (tile != null) { - tileBytes += tile.Image.Data.Length; - if (tile.EmbeddedMip != null) + if (tile.DuplicateOf == null) { - embeddedMipBytes += tile.EmbeddedMip.Data.Length; - } + tileBytes += tile.Image.Data.Length; + if (tile.EmbeddedMip != null) + { + embeddedMipBytes += tile.EmbeddedMip.Data.Length; + } - tileCompressedBytes += tile.Compressed.Data.Length; + tileCompressedBytes += tile.Compressed.Data.Length; + } + else + { + duplicates++; + } } } } - Console.WriteLine($"Flat tiles: {levelTiles} total, {TileSet.FlatTileInfos.Length} in use"); + Console.WriteLine($"Tile map: {levelTiles} total, {TileSet.FlatTileInfos.Length} in use, {duplicates} duplicates"); Console.WriteLine($"Generated {PageFiles.Count} page files, {pages} pages, {chunks} chunks"); Console.WriteLine($"Raw tile data: {tileBytes / 1024} KB tiles, {embeddedMipBytes / 1024} KB embedded mips, {tileCompressedBytes / 1024} KB transcoded, {pages*Config.PageSize/1024} KB pages total"); diff --git a/LSLib/VirtualTextures/PageFileBuild.cs b/LSLib/VirtualTextures/PageFileBuild.cs index a9270ff7..99fe0f09 100644 --- a/LSLib/VirtualTextures/PageFileBuild.cs +++ b/LSLib/VirtualTextures/PageFileBuild.cs @@ -4,6 +4,7 @@ using System.IO; using System.Linq; using System.Runtime.InteropServices; +using System.Security.Cryptography; namespace LSLib.VirtualTextures; @@ -70,7 +71,53 @@ public class PageFileBuilder(TileSetConfiguration config) public Guid Checksum; public int PageFileIndex; + public List PendingTiles = []; + public List> Duplicates = []; + public void AddTile(BuildTile tile) + { + PendingTiles.Add(tile); + } + + public void DeduplicateTiles() + { + var digests = new Dictionary(); + + foreach (var tile in PendingTiles) + { + var digest = new Guid(MD5.HashData(tile.Image.Data)); + if (!digests.TryAdd(digest, tile)) + { + tile.DuplicateOf = digests[digest]; + Duplicates.Add(Tuple.Create(tile, digests[digest])); + } + } + + PendingTiles = [.. digests.Values]; + } + + public void CommitTiles() + { + foreach (var tile in PendingTiles) + { + CommitTile(tile); + } + + PendingTiles.Clear(); + + foreach (var dup in Duplicates) + { + dup.Item1.AddedToPageFile = true; + dup.Item1.PageFileIndex = dup.Item2.PageFileIndex; + dup.Item1.PageIndex = dup.Item2.PageIndex; + dup.Item1.ChunkIndex = dup.Item2.ChunkIndex; + dup.Item1.DuplicateOf = dup.Item2; + } + + Duplicates.Clear(); + } + + private void CommitTile(BuildTile tile) { if (Config.BackfillPages) { @@ -178,6 +225,7 @@ public class PageFileSetBuilder(TileSetBuildData buildData, TileSetConfiguration { private readonly TileSetBuildData BuildData = buildData; private readonly TileSetConfiguration Config = config; + private List PageFiles = []; private void BuildPageFile(PageFileBuilder file, int level, int minTileX, int minTileY, int maxTileX, int maxTileY) { @@ -235,8 +283,6 @@ private void BuildFullPageFile(PageFileBuilder file) public List BuildFilePerGTex(List textures) { - var pageFiles = new List(); - uint firstPageIndex = 0; foreach (var texture in textures) { @@ -245,9 +291,9 @@ public List BuildFilePerGTex(List textures) Name = texture.Name, FileName = BuildData.GTSName + "_" + texture.Name + ".gtp", Checksum = Guid.NewGuid(), - PageFileIndex = pageFiles.Count + PageFileIndex = PageFiles.Count }; - pageFiles.Add(file); + PageFiles.Add(file); BuildPageFile(file, texture); firstPageIndex += (uint)file.Pages.Count; @@ -260,29 +306,48 @@ public List BuildFilePerGTex(List textures) Name = "Mips", FileName = BuildData.GTSName + "_Mips.gtp", Checksum = Guid.NewGuid(), - PageFileIndex = pageFiles.Count + PageFileIndex = PageFiles.Count }; - pageFiles.Add(file); + PageFiles.Add(file); BuildMipPageFile(file); } - return pageFiles; + return PageFiles; } public List BuildSingleFile() { - var pageFiles = new List(); - var file = new PageFileBuilder(Config) { Name = "Global", FileName = BuildData.GTSName + ".gtp", Checksum = Guid.NewGuid(), - PageFileIndex = pageFiles.Count + PageFileIndex = PageFiles.Count }; - pageFiles.Add(file); + PageFiles.Add(file); BuildFullPageFile(file); - return pageFiles; + return PageFiles; + } + + public void DeduplicateTiles() + { + if (Config.DeduplicateTiles) + { + foreach (var file in PageFiles) + { + file.DeduplicateTiles(); + } + } + } + + public List CommitPageFiles() + { + foreach (var file in PageFiles) + { + file.CommitTiles(); + } + + return PageFiles; } } diff --git a/make-release.bat b/make-release.bat new file mode 100644 index 00000000..90249ffc --- /dev/null +++ b/make-release.bat @@ -0,0 +1,44 @@ +mkdir Release\Packed +mkdir Release\Packed\Tools + +copy RconClient\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy RconClient\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy RconClient\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy RconClient\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy StoryCompiler\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy StoryCompiler\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy StoryCompiler\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy StoryCompiler\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy VTexTool\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy VTexTool\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy VTexTool\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy VTexTool\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy StoryDecompiler\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy StoryDecompiler\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy StoryDecompiler\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy StoryDecompiler\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy DebuggerFrontend\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy DebuggerFrontend\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy DebuggerFrontend\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy DebuggerFrontend\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy StatParser\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy StatParser\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy StatParser\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy StatParser\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy Divine\bin\Release\net8.0\*.config Release\Packed\Tools\ +copy Divine\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ +copy Divine\bin\Release\net8.0\*.dll Release\Packed\Tools\ +copy Divine\bin\Release\net8.0\*.exe Release\Packed\Tools\ + +copy ConverterApp\bin\Release\net8.0-windows\*.config Release\Packed\ +copy ConverterApp\bin\Release\net8.0-windows\*.runtimeconfig.json Release\Packed\ +copy ConverterApp\bin\Release\net8.0-windows\*.dll Release\Packed\ +copy ConverterApp\bin\Release\net8.0-windows\*.exe Release\Packed\ + +pause \ No newline at end of file From 05e4a94c68f6c4fd89c4cadd4aa2c926027dd3c8 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 17 Dec 2023 00:02:59 +0100 Subject: [PATCH 051/139] Fix offset calculation bug while building page file --- LSLib/VirtualTextures/PageFileBuild.cs | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/LSLib/VirtualTextures/PageFileBuild.cs b/LSLib/VirtualTextures/PageFileBuild.cs index 99fe0f09..b4f2ec98 100644 --- a/LSLib/VirtualTextures/PageFileBuild.cs +++ b/LSLib/VirtualTextures/PageFileBuild.cs @@ -136,7 +136,8 @@ private void CommitTile(BuildTile tile) { PageFile = this, PageFileIndex = PageFileIndex, - PageIndex = Pages.Count + PageIndex = Pages.Count, + Budget = 4 }; if (newPage.PageIndex == 0) @@ -194,6 +195,11 @@ public void Save(Stream s, BinaryWriter writer) SaveChunk(writer, chunk); } + if (s.Position > Config.PageSize * (i+1)) + { + throw new Exception($"Overrun while writing page {i} of page file {Name}"); + } + var padSize = (Config.PageSize - (s.Position % Config.PageSize) % Config.PageSize); if (padSize > 0) { From 86f6f94ca63bfa95065eff7065451251fa26cd0b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 17 Dec 2023 00:14:20 +0100 Subject: [PATCH 052/139] Fix GTP padding logic --- LSLib/VirtualTextures/PageFileBuild.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/VirtualTextures/PageFileBuild.cs b/LSLib/VirtualTextures/PageFileBuild.cs index b4f2ec98..31ebeecc 100644 --- a/LSLib/VirtualTextures/PageFileBuild.cs +++ b/LSLib/VirtualTextures/PageFileBuild.cs @@ -200,7 +200,7 @@ public void Save(Stream s, BinaryWriter writer) throw new Exception($"Overrun while writing page {i} of page file {Name}"); } - var padSize = (Config.PageSize - (s.Position % Config.PageSize) % Config.PageSize); + var padSize = (Config.PageSize - (s.Position % Config.PageSize)) % Config.PageSize; if (padSize > 0) { var pad = new byte[padSize]; From 275e7ae0a14423d35e34ad78f76e7edec9ebdee7 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 19 Dec 2023 18:38:56 +0100 Subject: [PATCH 053/139] Simplify package build logic --- ConverterApp/DebugDumper.cs | 6 +- ConverterApp/OsirisPane.cs | 2 +- ConverterApp/PackagePane.cs | 2 +- Divine/CLI/CommandLineActions.cs | 2 +- Divine/CLI/CommandLinePackageProcessor.cs | 25 +- LSLib/Granny/GR2/Format.cs | 6 +- LSLib/LS/BinUtils.cs | 16 + LSLib/LS/Mods/ModResources.cs | 52 +-- LSLib/LS/PackageCommon.cs | 423 +++------------------ LSLib/LS/PackageFormat.cs | 438 ++++++++++++++++++++++ LSLib/LS/PackageReader.cs | 349 +++++------------ LSLib/LS/PackageWriter.cs | 346 +++++------------ LSLib/LS/Save/SavegameHelpers.cs | 12 +- StatParser/StatChecker.cs | 2 +- StoryCompiler/ModCompiler.cs | 6 +- StoryDecompiler/Program.cs | 2 +- 16 files changed, 751 insertions(+), 938 deletions(-) create mode 100644 LSLib/LS/PackageFormat.cs diff --git a/ConverterApp/DebugDumper.cs b/ConverterApp/DebugDumper.cs index c847c8d9..bee240d7 100644 --- a/ConverterApp/DebugDumper.cs +++ b/ConverterApp/DebugDumper.cs @@ -90,7 +90,7 @@ private void DoLsxConversion() private Resource LoadPackagedResource(string path) { - AbstractFileInfo fileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == path); + var fileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == path); if (fileInfo == null) { throw new ArgumentException($"Could not locate file in package: '{path}"); @@ -256,7 +256,7 @@ private void RunTasks() } ReportProgress(70, "Loading story ..."); - AbstractFileInfo storySave = SavePackage.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + var storySave = SavePackage.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); Stream storyStream; if (storySave != null) { @@ -299,7 +299,7 @@ public void Run() { SavePackage = packageReader.Read(); - AbstractFileInfo abstractFileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + var abstractFileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); if (abstractFileInfo == null) { MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); diff --git a/ConverterApp/OsirisPane.cs b/ConverterApp/OsirisPane.cs index a3ccefeb..27c54816 100644 --- a/ConverterApp/OsirisPane.cs +++ b/ConverterApp/OsirisPane.cs @@ -84,7 +84,7 @@ public Resource LoadResourceFromSave(string path) var packageReader = new PackageReader(path); Package package = packageReader.Read(); - AbstractFileInfo abstractFileInfo = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + var abstractFileInfo = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); if (abstractFileInfo == null) { MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); diff --git a/ConverterApp/PackagePane.cs b/ConverterApp/PackagePane.cs index 0db0ef7f..a395df16 100644 --- a/ConverterApp/PackagePane.cs +++ b/ConverterApp/PackagePane.cs @@ -32,7 +32,7 @@ public PackagePane(ISettingsDataSource settingsDataSource) #endif } - private void PackageProgressUpdate(string status, long numerator, long denominator, AbstractFileInfo file) + private void PackageProgressUpdate(string status, long numerator, long denominator, IAbstractFileInfo file) { if (file != null) { diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index c68f12b8..125708e2 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -125,7 +125,7 @@ private static void SetUpAndValidate(CommandLineArguments args) private static void Process(CommandLineArguments args) { - Func filter; + Func filter; if (args.Expression != null) { diff --git a/Divine/CLI/CommandLinePackageProcessor.cs b/Divine/CLI/CommandLinePackageProcessor.cs index b0bafab5..a208f0c5 100644 --- a/Divine/CLI/CommandLinePackageProcessor.cs +++ b/Divine/CLI/CommandLinePackageProcessor.cs @@ -16,7 +16,7 @@ public static void Create() CreatePackageResource(); } - public static void ListFiles(Func filter = null) + public static void ListFiles(Func filter = null) { if (CommandLineActions.SourcePath == null) { @@ -41,7 +41,7 @@ private static void ExtractSingleFile(string packagePath, string destinationPath { Package package = reader.Read(); // Try to match by full path - AbstractFileInfo file = package.Files.Find(fileInfo => string.Compare(fileInfo.Name, packagedPath, StringComparison.OrdinalIgnoreCase) == 0 && !fileInfo.IsDeletion()); + var file = package.Files.Find(fileInfo => string.Compare(fileInfo.Name, packagedPath, StringComparison.OrdinalIgnoreCase) == 0 && !fileInfo.IsDeletion()); if (file == null) { // Try to match by filename only @@ -79,22 +79,21 @@ private static void ExtractSingleFile(string packagePath, string destinationPath } } - private static void ListPackageFiles(string packagePath, Func filter = null) + private static void ListPackageFiles(string packagePath, Func filter = null) { try { using (var reader = new PackageReader(packagePath)) { Package package = reader.Read(); + var files = package.Files; - List files = package.Files; + if (filter != null) + { + files = files.FindAll(obj => filter(obj)); + } - if (filter != null) - { - files = files.FindAll(obj => filter(obj)); - } - - foreach (AbstractFileInfo fileInfo in files.OrderBy(obj => obj.Name)) + foreach (var fileInfo in files.OrderBy(obj => obj.Name)) { Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.CRC()}"); } @@ -111,7 +110,7 @@ private static void ListPackageFiles(string packagePath, Func filter = null) + public static void Extract(Func filter = null) { if (CommandLineActions.SourcePath == null) { @@ -127,7 +126,7 @@ public static void Extract(Func filter = null) } } - public static void BatchExtract(Func filter = null) + public static void BatchExtract(Func filter = null) { string[] files = Directory.GetFiles(CommandLineActions.SourcePath, $"*.{Args.InputFormat}"); @@ -173,7 +172,7 @@ private static void CreatePackageResource(string file = "") CommandLineLogger.LogInfo("Package created successfully."); } - private static void ExtractPackageResource(string file = "", string folder = "", Func filter = null) + private static void ExtractPackageResource(string file = "", string folder = "", Func filter = null) { if (string.IsNullOrEmpty(file)) { diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index a1ec12ef..6913b148 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -344,7 +344,8 @@ public void SetFormat(Format format, bool alternateSignature) Format.LittleEndian32 => LittleEndian32Magic2, Format.LittleEndian64 => LittleEndian64Magic2, Format.BigEndian32 => BigEndian32Magic2, - Format.BigEndian64 => BigEndian64Magic2 + Format.BigEndian64 => BigEndian64Magic2, + _ => throw new InvalidDataException("Invalid GR2 signature") }; } else @@ -354,7 +355,8 @@ public void SetFormat(Format format, bool alternateSignature) Format.LittleEndian32 => LittleEndian32Magic, Format.LittleEndian64 => LittleEndian64Magic, Format.BigEndian32 => BigEndian32Magic, - Format.BigEndian64 => BigEndian64Magic + Format.BigEndian64 => BigEndian64Magic, + _ => throw new InvalidDataException("Invalid GR2 signature") }; } } diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 99d1c770..6b240639 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -4,6 +4,7 @@ using System.Runtime.InteropServices; using LSLib.LS.Enums; using System.IO.Compression; +using System.Text; namespace LSLib.LS; @@ -61,6 +62,21 @@ public static void WriteStructs(BinaryWriter writer, T[] elements) writer.Write(writeBuffer); } + public static String NullTerminatedBytesToString(byte[] b) + { + int len; + for (len = 0; len < b.Length && b[len] != 0; len++) {} + return Encoding.UTF8.GetString(b, 0, len); + } + + public static byte[] StringToNullTerminatedBytes(string s, int length) + { + var b = new byte[length]; + int len = Encoding.UTF8.GetBytes(s, b); + Array.Clear(b, len, b.Length - len); + return b; + } + public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader) { var attr = new NodeAttribute(type); diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 6eff5156..39b7954c 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -10,19 +10,19 @@ namespace LSLib.LS; public class ModInfo(string name) { public string Name = name; - public AbstractFileInfo Meta; - public Dictionary Scripts = []; - public Dictionary Stats = []; - public Dictionary Globals = []; - public Dictionary LevelObjects = []; - public AbstractFileInfo OrphanQueryIgnoreList; - public AbstractFileInfo StoryHeaderFile; - public AbstractFileInfo TypeCoercionWhitelistFile; - public AbstractFileInfo ModifiersFile; - public AbstractFileInfo ValueListsFile; - public AbstractFileInfo ActionResourcesFile; - public AbstractFileInfo ActionResourceGroupsFile; - public List TagFiles = []; + public IAbstractFileInfo Meta; + public Dictionary Scripts = []; + public Dictionary Stats = []; + public Dictionary Globals = []; + public Dictionary LevelObjects = []; + public IAbstractFileInfo OrphanQueryIgnoreList; + public IAbstractFileInfo StoryHeaderFile; + public IAbstractFileInfo TypeCoercionWhitelistFile; + public IAbstractFileInfo ModifiersFile; + public IAbstractFileInfo ValueListsFile; + public IAbstractFileInfo ActionResourcesFile; + public IAbstractFileInfo ActionResourceGroupsFile; + public List TagFiles = []; } public class ModResources : IDisposable @@ -97,32 +97,32 @@ private ModInfo GetMod(string modName) return mod; } - private void AddMetadataToMod(string modName, AbstractFileInfo file) + private void AddMetadataToMod(string modName, IAbstractFileInfo file) { GetMod(modName).Meta = file; } - private void AddStatToMod(string modName, string path, AbstractFileInfo file) + private void AddStatToMod(string modName, string path, IAbstractFileInfo file) { GetMod(modName).Stats[path] = file; } - private void AddScriptToMod(string modName, string scriptName, AbstractFileInfo file) + private void AddScriptToMod(string modName, string scriptName, IAbstractFileInfo file) { GetMod(modName).Scripts[scriptName] = file; } - private void AddGlobalsToMod(string modName, string path, AbstractFileInfo file) + private void AddGlobalsToMod(string modName, string path, IAbstractFileInfo file) { GetMod(modName).Globals[path] = file; } - private void AddLevelObjectsToMod(string modName, string path, AbstractFileInfo file) + private void AddLevelObjectsToMod(string modName, string path, IAbstractFileInfo file) { GetMod(modName).LevelObjects[path] = file; } - private void DiscoverPackagedFile(AbstractFileInfo file) + private void DiscoverPackagedFile(IAbstractFileInfo file) { if (file.IsDeletion()) return; @@ -347,7 +347,7 @@ private void DiscoverModGoals(string modName, string modPath) var fileInfo = new FilesystemFileInfo { FilesystemPath = Path.Join(goalPath, goalFile), - Name = goalFile + FileName = goalFile }; AddScriptToMod(modName, goalFile, fileInfo); } @@ -366,7 +366,7 @@ private void DiscoverModStats(string modName, string modPublicPath) var fileInfo = new FilesystemFileInfo { FilesystemPath = Path.Join(statsPath, statFile), - Name = statFile + FileName = statFile }; AddStatToMod(modName, statFile, fileInfo); } @@ -385,7 +385,7 @@ private void DiscoverModGlobals(string modName, string modPath) var fileInfo = new FilesystemFileInfo { FilesystemPath = Path.Join(globalsPath, globalFile), - Name = globalFile + FileName = globalFile }; AddGlobalsToMod(modName, globalFile, fileInfo); } @@ -405,7 +405,7 @@ private void DiscoverModLevelObjects(string modName, string modPath) var fileInfo = new FilesystemFileInfo { FilesystemPath = Path.Join(levelsPath, levelFile), - Name = levelFile + FileName = levelFile }; AddLevelObjectsToMod(modName, levelFile, fileInfo); } @@ -426,7 +426,7 @@ public void DiscoverModDirectory(string modName, string modPath, string publicPa var fileInfo = new FilesystemFileInfo { FilesystemPath = headerPath, - Name = headerPath + FileName = headerPath }; GetMod(modName).StoryHeaderFile = fileInfo; } @@ -437,7 +437,7 @@ public void DiscoverModDirectory(string modName, string modPath, string publicPa var fileInfo = new FilesystemFileInfo { FilesystemPath = orphanQueryIgnoresPath, - Name = orphanQueryIgnoresPath + FileName = orphanQueryIgnoresPath }; GetMod(modName).OrphanQueryIgnoreList = fileInfo; } @@ -448,7 +448,7 @@ public void DiscoverModDirectory(string modName, string modPath, string publicPa var fileInfo = new FilesystemFileInfo { FilesystemPath = typeCoercionWhitelistPath, - Name = typeCoercionWhitelistPath + FileName = typeCoercionWhitelistPath }; GetMod(modName).TypeCoercionWhitelistFile = fileInfo; } diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index f6b43fbb..cd4a5eb0 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -3,171 +3,21 @@ using System.IO; using System.IO.Hashing; using System.Linq; -using System.Runtime.InteropServices; using System.Text; using LSLib.LS.Enums; -using LSLib.Native; namespace LSLib.LS; -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader7 +public interface IAbstractFileInfo { - public UInt32 Version; - public UInt32 DataOffset; - public UInt32 NumParts; - public UInt32 FileListSize; - public Byte LittleEndian; - public UInt32 NumFiles; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct FileEntry7 -{ - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt32 OffsetInFile; - public UInt32 SizeOnDisk; - public UInt32 UncompressedSize; - public UInt32 ArchivePart; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader10 -{ - public UInt32 Version; - public UInt32 DataOffset; - public UInt32 FileListSize; - public UInt16 NumParts; - public Byte Flags; - public Byte Priority; - public UInt32 NumFiles; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader13 -{ - public UInt32 Version; - public UInt32 FileListOffset; - public UInt32 FileListSize; - public UInt16 NumParts; - public Byte Flags; - public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader15 -{ - public UInt32 Version; - public UInt64 FileListOffset; - public UInt32 FileListSize; - public Byte Flags; - public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader16 -{ - public UInt32 Version; - public UInt64 FileListOffset; - public UInt32 FileListSize; - public Byte Flags; - public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; - - public UInt16 NumParts; -} - -[Flags] -public enum PackageFlags -{ - /// - /// Allow memory-mapped access to the files in this archive. - /// - AllowMemoryMapping = 0x02, - /// - /// All files are compressed into a single LZ4 stream - /// - Solid = 0x04, - /// - /// Archive contents should be preloaded on game startup. - /// - Preload = 0x08 -}; - -public class PackageMetadata -{ - /// - /// Package flags bitmask. Allowed values are in the PackageFlags enumeration. - /// - public PackageFlags Flags = 0; - /// - /// Load priority. Packages with higher priority are loaded later (i.e. they override earlier packages). - /// - public Byte Priority = 0; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct FileEntry13 -{ - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt32 OffsetInFile; - public UInt32 SizeOnDisk; - public UInt32 UncompressedSize; - public UInt32 ArchivePart; - public UInt32 Flags; - public UInt32 Crc; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct FileEntry15 -{ - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt64 OffsetInFile; - public UInt64 SizeOnDisk; - public UInt64 UncompressedSize; - public UInt32 ArchivePart; - public UInt32 Flags; - public UInt32 Crc; - public UInt32 Unknown2; -} - -[StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct FileEntry18 -{ - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] - public byte[] Name; - - public UInt32 OffsetInFile1; - public UInt16 OffsetInFile2; - public Byte ArchivePart; - public Byte Flags; - public UInt32 SizeOnDisk; - public UInt32 UncompressedSize; -} - -public abstract class AbstractFileInfo -{ - public String Name; - + public abstract String GetName(); public abstract UInt64 Size(); public abstract UInt32 CRC(); public abstract Stream MakeStream(); public abstract void ReleaseStream(); public abstract bool IsDeletion(); + + public string Name { get { return GetName(); } } } @@ -224,17 +74,11 @@ public override long Position public override void Flush() { } } -public class PackagedFileInfo : AbstractFileInfo, IDisposable +public class PackagedFileInfo : PackagedFileInfoCommon, IAbstractFileInfo, IDisposable { - public UInt32 ArchivePart; - public UInt32 Crc; - public UInt32 Flags; - public UInt64 OffsetInFile; public Stream PackageStream; - public UInt64 SizeOnDisk; - public UInt64 UncompressedSize; public bool Solid; - public UInt32 SolidOffset; + public ulong SolidOffset; public Stream SolidStream; private Stream _uncompressedStream; @@ -243,11 +87,13 @@ public void Dispose() ReleaseStream(); } - public override UInt64 Size() => (Flags & 0x0F) == 0 ? SizeOnDisk : UncompressedSize; + public String GetName() => FileName; - public override UInt32 CRC() => Crc; + public UInt64 Size() => (Flags & 0x0F) == 0 ? SizeOnDisk : UncompressedSize; - public override Stream MakeStream() + public UInt32 CRC() => Crc; + + public Stream MakeStream() { if (IsDeletion()) { @@ -268,7 +114,7 @@ public override Stream MakeStream() if (SizeOnDisk > 0x7fffffff) { - throw new InvalidDataException($"File '{Name}' is over 2GB ({SizeOnDisk} bytes), which is not supported yet!"); + throw new InvalidDataException($"File '{FileName}' is over 2GB ({SizeOnDisk} bytes), which is not supported yet!"); } var compressed = new byte[SizeOnDisk]; @@ -286,14 +132,14 @@ public override Stream MakeStream() UInt32 computedCrc = Crc32.HashToUInt32(compressed); if (computedCrc != Crc) { - string msg = $"CRC check failed on file '{Name}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; + string msg = $"CRC check failed on file '{FileName}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; throw new InvalidDataException(msg); } } if (Solid) { - SolidStream.Seek(SolidOffset, SeekOrigin.Begin); + SolidStream.Seek((long)SolidOffset, SeekOrigin.Begin); byte[] uncompressed = new byte[UncompressedSize]; SolidStream.Read(uncompressed, 0, (int)UncompressedSize); _uncompressedStream = new MemoryStream(uncompressed); @@ -307,7 +153,7 @@ public override Stream MakeStream() return _uncompressedStream; } - public override void ReleaseStream() + public void ReleaseStream() { if (_uncompressedStream == null) { @@ -318,210 +164,44 @@ public override void ReleaseStream() _uncompressedStream = null; } - internal static PackagedFileInfo CreateFromEntry(FileEntry13 entry, Stream dataStream) - { - var info = new PackagedFileInfo - { - PackageStream = dataStream, - OffsetInFile = entry.OffsetInFile, - SizeOnDisk = entry.SizeOnDisk, - UncompressedSize = entry.UncompressedSize, - ArchivePart = entry.ArchivePart, - Flags = entry.Flags, - Crc = entry.Crc, - Solid = false - }; - - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - - uint compressionMethod = entry.Flags & 0x0F; - if (compressionMethod > 2 || (entry.Flags & ~0x7F) != 0) - { - string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; - throw new InvalidDataException(msg); - } - - return info; - } - - internal static PackagedFileInfo CreateFromEntry(FileEntry15 entry, Stream dataStream) - { - var info = new PackagedFileInfo - { - PackageStream = dataStream, - OffsetInFile = entry.OffsetInFile, - SizeOnDisk = entry.SizeOnDisk, - UncompressedSize = entry.UncompressedSize, - ArchivePart = entry.ArchivePart, - Flags = entry.Flags, - Crc = entry.Crc, - Solid = false - }; - - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - - uint compressionMethod = entry.Flags & 0x0F; - if (compressionMethod > 2 || (entry.Flags & ~0x7F) != 0) - { - string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; - throw new InvalidDataException(msg); - } - - return info; - } - - internal static PackagedFileInfo CreateFromEntry(FileEntry18 entry, Stream dataStream) + internal static PackagedFileInfo CreateFromEntry(ILSPKFile entry, Stream dataStream) { var info = new PackagedFileInfo { PackageStream = dataStream, - OffsetInFile = entry.OffsetInFile1 | ((ulong)entry.OffsetInFile2 << 32), - SizeOnDisk = entry.SizeOnDisk, - UncompressedSize = entry.UncompressedSize, - ArchivePart = entry.ArchivePart, - Flags = entry.Flags, - Crc = 0, Solid = false }; - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); + entry.ToCommon(info); - uint compressionMethod = (uint)entry.Flags & 0x0F; - if (compressionMethod > 2 || ((uint)entry.Flags & ~0x7F) != 0) + var compressionMethod = info.Flags & 0x0F; + if (compressionMethod > 2 || (info.Flags & ~0x7F) != 0) { - string msg = $"File '{info.Name}' has unsupported flags: {entry.Flags}"; + string msg = $"File '{info.FileName}' has unsupported flags: {info.Flags}"; throw new InvalidDataException(msg); } return info; } - internal static PackagedFileInfo CreateSolidFromEntry(FileEntry13 entry, Stream dataStream, uint solidOffset, Stream solidStream) - { - var info = CreateFromEntry(entry, dataStream); - info.Solid = true; - info.SolidOffset = solidOffset; - info.SolidStream = solidStream; - return info; - } - - internal static PackagedFileInfo CreateFromEntry(FileEntry7 entry, Stream dataStream) - { - var info = new PackagedFileInfo - { - PackageStream = dataStream - }; - - int nameLen; - for (nameLen = 0; nameLen < entry.Name.Length && entry.Name[nameLen] != 0; nameLen++) - { - } - info.Name = Encoding.UTF8.GetString(entry.Name, 0, nameLen); - - info.OffsetInFile = entry.OffsetInFile; - info.SizeOnDisk = entry.SizeOnDisk; - info.UncompressedSize = entry.UncompressedSize; - info.ArchivePart = entry.ArchivePart; - info.Crc = 0; - - info.Flags = entry.UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.DefaultCompression) : (uint) 0; - - return info; - } - - internal FileEntry7 MakeEntryV7() - { - var entry = new FileEntry7 - { - Name = new byte[256] - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - entry.OffsetInFile = (uint)OffsetInFile; - entry.SizeOnDisk = (uint)SizeOnDisk; - entry.UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize; - entry.ArchivePart = ArchivePart; - return entry; - } - - internal FileEntry13 MakeEntryV13() - { - var entry = new FileEntry13 - { - Name = new byte[256] - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - entry.OffsetInFile = (uint)OffsetInFile; - entry.SizeOnDisk = (uint)SizeOnDisk; - entry.UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize; - entry.ArchivePart = ArchivePart; - entry.Flags = Flags; - entry.Crc = Crc; - return entry; - } - - internal FileEntry15 MakeEntryV15() - { - var entry = new FileEntry15 - { - Name = new byte[256], - OffsetInFile = OffsetInFile, - SizeOnDisk = SizeOnDisk, - UncompressedSize = (Flags & 0x0F) == 0 ? 0 : UncompressedSize, - Flags = Flags, - Crc = Crc, - ArchivePart = ArchivePart, - Unknown2 = 0 - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - return entry; - } - - internal FileEntry18 MakeEntryV18() + internal void MakeSolid(ulong solidOffset, Stream solidStream) { - var entry = new FileEntry18 - { - Name = new byte[256], - OffsetInFile1 = (uint)(OffsetInFile & 0xffffffff), - OffsetInFile2 = (ushort)((OffsetInFile >> 32) & 0xffff), - SizeOnDisk = (uint)SizeOnDisk, - UncompressedSize = (Flags & 0x0F) == 0 ? 0 : (uint)UncompressedSize, - Flags = (byte)Flags, - ArchivePart = (byte)ArchivePart - }; - byte[] encodedName = Encoding.UTF8.GetBytes(Name.Replace('\\', '/')); - Array.Copy(encodedName, entry.Name, encodedName.Length); - - return entry; + Solid = true; + SolidOffset = solidOffset; + SolidStream = solidStream; } - public override bool IsDeletion() + public bool IsDeletion() { - return OffsetInFile == 0xdeadbeefdeadbeef; + return (OffsetInFile & 0x0000ffffffffffff) == 0xbeefdeadbeef; } } -public class FilesystemFileInfo : AbstractFileInfo, IDisposable +public class FilesystemFileInfo : IAbstractFileInfo, IDisposable { public long CachedSize; public string FilesystemPath; + public string FileName; private FileStream _stream; public void Dispose() @@ -529,13 +209,15 @@ public void Dispose() ReleaseStream(); } - public override UInt64 Size() => (UInt64) CachedSize; + public String GetName() => FileName; + + public UInt64 Size() => (UInt64) CachedSize; - public override UInt32 CRC() => throw new NotImplementedException("!"); + public UInt32 CRC() => throw new NotImplementedException("!"); - public override Stream MakeStream() => _stream ??= File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); + public Stream MakeStream() => _stream ??= File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); - public override void ReleaseStream() + public void ReleaseStream() { _stream?.Dispose(); _stream = null; @@ -545,7 +227,7 @@ public static FilesystemFileInfo CreateFromEntry(string filesystemPath, string n { var info = new FilesystemFileInfo { - Name = name, + FileName = name, FilesystemPath = filesystemPath }; @@ -554,23 +236,26 @@ public static FilesystemFileInfo CreateFromEntry(string filesystemPath, string n return info; } - public override bool IsDeletion() + public bool IsDeletion() { return false; } } -public class StreamFileInfo : AbstractFileInfo +public class StreamFileInfo : IAbstractFileInfo { public Stream Stream; + public String FileName; + + public String GetName() => FileName; - public override UInt64 Size() => (UInt64) Stream.Length; + public UInt64 Size() => (UInt64) Stream.Length; - public override UInt32 CRC() => throw new NotImplementedException("!"); + public UInt32 CRC() => throw new NotImplementedException("!"); - public override Stream MakeStream() => Stream; + public Stream MakeStream() => Stream; - public override void ReleaseStream() + public void ReleaseStream() { } @@ -578,13 +263,13 @@ public static StreamFileInfo CreateFromStream(Stream stream, string name) { var info = new StreamFileInfo { - Name = name, + FileName = name, Stream = stream }; return info; } - public override bool IsDeletion() + public bool IsDeletion() { return false; } @@ -596,8 +281,8 @@ public class Package public readonly static byte[] Signature = [ 0x4C, 0x53, 0x50, 0x4B ]; - public PackageMetadata Metadata = new(); - public List Files = []; + public PackageHeaderCommon Metadata = new(); + public List Files = []; public PackageVersion Version; public static string MakePartFilename(string path, int part) @@ -620,23 +305,23 @@ public class PackageCreationOptions public class Packager { - public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator, AbstractFileInfo file); + public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator, IAbstractFileInfo file); public ProgressUpdateDelegate ProgressUpdate = delegate { }; - private void WriteProgressUpdate(AbstractFileInfo file, long numerator, long denominator) + private void WriteProgressUpdate(IAbstractFileInfo file, long numerator, long denominator) { ProgressUpdate(file.Name, numerator, denominator, file); } - public void UncompressPackage(Package package, string outputPath, Func filter = null) + public void UncompressPackage(Package package, string outputPath, Func filter = null) { if (outputPath.Length > 0 && !outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) { outputPath += Path.DirectorySeparatorChar; } - List files = package.Files; + List files = package.Files; if (filter != null) { @@ -647,7 +332,7 @@ public void UncompressPackage(Package package, string outputPath, Func filter = null) + public void UncompressPackage(string packagePath, string outputPath, Func filter = null) { ProgressUpdate("Reading package headers ...", 0, 1, null); using var reader = new PackageReader(packagePath); diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs new file mode 100644 index 00000000..703b47ba --- /dev/null +++ b/LSLib/LS/PackageFormat.cs @@ -0,0 +1,438 @@ +using LSLib.Granny; +using LSLib.LS.Enums; +using System; +using System.Runtime.InteropServices; +using System.Text; + +namespace LSLib.LS; + +public class PackageHeaderCommon +{ + public UInt32 Version; + public UInt64 FileListOffset; + // Size of file list; used for legacy (<= v10) packages only + public UInt32 FileListSize; + // Number of packed files; used for legacy (<= v10) packages only + public UInt32 NumFiles; + public UInt32 NumParts; + // Offset of packed data in archive part 0; used for legacy (<= v10) packages only + public UInt32 DataOffset; + public PackageFlags Flags; + public Byte Priority; + public byte[] Md5; +} + +internal interface ILSPKHeader +{ + public PackageHeaderCommon ToCommonHeader(); + abstract public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h); +} + + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader7 : ILSPKHeader +{ + public UInt32 Version; + public UInt32 DataOffset; + public UInt32 NumParts; + public UInt32 FileListSize; + public Byte LittleEndian; + public UInt32 NumFiles; + + public readonly PackageHeaderCommon ToCommonHeader() + { + return new PackageHeaderCommon + { + Version = Version, + DataOffset = DataOffset, + FileListOffset = (ulong)Marshal.SizeOf(typeof(LSPKHeader7)), + FileListSize = FileListSize, + NumFiles = NumFiles, + NumParts = NumParts, + Flags = 0, + Priority = 0, + Md5 = null + }; + } + + public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) + { + return new LSPKHeader7 + { + Version = h.Version, + DataOffset = (uint)(h.FileListOffset + h.FileListSize), + NumParts = h.NumParts, + FileListSize = h.FileListSize, + LittleEndian = 0, + NumFiles = h.NumFiles + }; + } +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader10 : ILSPKHeader +{ + public UInt32 Version; + public UInt32 DataOffset; + public UInt32 FileListSize; + public UInt16 NumParts; + public Byte Flags; + public Byte Priority; + public UInt32 NumFiles; + + public readonly PackageHeaderCommon ToCommonHeader() + { + return new PackageHeaderCommon + { + Version = Version, + DataOffset = DataOffset, + FileListOffset = (ulong)Marshal.SizeOf(typeof(LSPKHeader7)), + FileListSize = FileListSize, + NumFiles = NumFiles, + NumParts = NumParts, + Flags = (PackageFlags)Flags, + Priority = Priority, + Md5 = null + }; + } + + public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) + { + return new LSPKHeader10 + { + Version = h.Version, + DataOffset = (uint)(h.FileListOffset + 4 + h.FileListSize), + FileListSize = h.FileListSize, + NumParts = (UInt16)h.NumParts, + Flags = (byte)h.Flags, + Priority = h.Priority, + NumFiles = h.NumFiles + }; + } +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader13 : ILSPKHeader +{ + public UInt32 Version; + public UInt32 FileListOffset; + public UInt32 FileListSize; + public UInt16 NumParts; + public Byte Flags; + public Byte Priority; + + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Md5; + + public readonly PackageHeaderCommon ToCommonHeader() + { + return new PackageHeaderCommon + { + Version = Version, + DataOffset = 0, + FileListOffset = FileListOffset, + FileListSize = FileListSize, + NumParts = NumParts, + Flags = (PackageFlags)Flags, + Priority = Priority, + Md5 = Md5 + }; + } + + public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) + { + return new LSPKHeader13 + { + Version = h.Version, + FileListOffset = (UInt32)h.FileListOffset, + FileListSize = h.FileListSize, + NumParts = (UInt16)h.NumParts, + Flags = (byte)h.Flags, + Priority = h.Priority, + Md5 = h.Md5 + }; + } +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader15 : ILSPKHeader +{ + public UInt32 Version; + public UInt64 FileListOffset; + public UInt32 FileListSize; + public Byte Flags; + public Byte Priority; + + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Md5; + + public readonly PackageHeaderCommon ToCommonHeader() + { + return new PackageHeaderCommon + { + Version = Version, + DataOffset = 0, + FileListOffset = FileListOffset, + FileListSize = FileListSize, + NumParts = 1, + Flags = (PackageFlags)Flags, + Priority = Priority, + Md5 = Md5 + }; + } + + public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) + { + return new LSPKHeader15 + { + Version = h.Version, + FileListOffset = (UInt32)h.FileListOffset, + FileListSize = h.FileListSize, + Flags = (byte)h.Flags, + Priority = h.Priority, + Md5 = h.Md5 + }; + } +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSPKHeader16 : ILSPKHeader +{ + public UInt32 Version; + public UInt64 FileListOffset; + public UInt32 FileListSize; + public Byte Flags; + public Byte Priority; + + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] + public byte[] Md5; + + public UInt16 NumParts; + + public readonly PackageHeaderCommon ToCommonHeader() + { + return new PackageHeaderCommon + { + Version = Version, + FileListOffset = FileListOffset, + FileListSize = FileListSize, + NumParts = NumParts, + Flags = (PackageFlags)Flags, + Priority = Priority, + Md5 = Md5 + }; + } + + public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) + { + return new LSPKHeader16 + { + Version = h.Version, + FileListOffset = (UInt32)h.FileListOffset, + FileListSize = h.FileListSize, + Flags = (byte)h.Flags, + Priority = h.Priority, + Md5 = h.Md5, + NumParts = (UInt16)h.NumParts + }; + } +} + +[Flags] +public enum PackageFlags +{ + /// + /// Allow memory-mapped access to the files in this archive. + /// + AllowMemoryMapping = 0x02, + /// + /// All files are compressed into a single LZ4 stream + /// + Solid = 0x04, + /// + /// Archive contents should be preloaded on game startup. + /// + Preload = 0x08 +}; + + + +abstract public class PackagedFileInfoCommon +{ + public string FileName; + public UInt32 ArchivePart; + public UInt32 Crc; + public Byte Flags; + public UInt64 OffsetInFile; + public UInt64 SizeOnDisk; + public UInt64 UncompressedSize; +} + +internal interface ILSPKFile +{ + public void ToCommon(PackagedFileInfoCommon info); + abstract public static ILSPKFile FromCommon(PackagedFileInfoCommon info); + public UInt16 ArchivePartNumber(); +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry7 : ILSPKFile +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt32 OffsetInFile; + public UInt32 SizeOnDisk; + public UInt32 UncompressedSize; + public UInt32 ArchivePart; + + public readonly void ToCommon(PackagedFileInfoCommon info) + { + info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.ArchivePart = ArchivePart; + info.Crc = 0; + info.Flags = UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.DefaultCompression) : (byte)0; + info.OffsetInFile = OffsetInFile; + info.SizeOnDisk = SizeOnDisk; + info.UncompressedSize = UncompressedSize; + } + + public static ILSPKFile FromCommon(PackagedFileInfoCommon info) + { + return new FileEntry7 + { + Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + OffsetInFile = (uint)info.OffsetInFile, + SizeOnDisk = (uint)info.SizeOnDisk, + UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize, + ArchivePart = info.ArchivePart + }; + } + + public readonly UInt16 ArchivePartNumber() => (UInt16)ArchivePart; +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry10 : ILSPKFile +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt32 OffsetInFile; + public UInt32 SizeOnDisk; + public UInt32 UncompressedSize; + public UInt32 ArchivePart; + public UInt32 Flags; + public UInt32 Crc; + + public readonly void ToCommon(PackagedFileInfoCommon info) + { + info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.ArchivePart = ArchivePart; + info.Crc = Crc; + info.Flags = (Byte)Flags; + info.OffsetInFile = OffsetInFile; + info.SizeOnDisk = SizeOnDisk; + info.UncompressedSize = UncompressedSize; + } + + public static ILSPKFile FromCommon(PackagedFileInfoCommon info) + { + return new FileEntry10 + { + Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + OffsetInFile = (uint)info.OffsetInFile, + SizeOnDisk = (uint)info.SizeOnDisk, + UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize, + ArchivePart = info.ArchivePart, + Flags = info.Flags, + Crc = info.Crc + }; + } + + public readonly UInt16 ArchivePartNumber() => (UInt16)ArchivePart; +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry15 : ILSPKFile +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt64 OffsetInFile; + public UInt64 SizeOnDisk; + public UInt64 UncompressedSize; + public UInt32 ArchivePart; + public UInt32 Flags; + public UInt32 Crc; + public UInt32 Unknown2; + + public readonly void ToCommon(PackagedFileInfoCommon info) + { + info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.ArchivePart = ArchivePart; + info.Crc = Crc; + info.Flags = (Byte)Flags; + info.OffsetInFile = OffsetInFile; + info.SizeOnDisk = SizeOnDisk; + info.UncompressedSize = UncompressedSize; + } + + public static ILSPKFile FromCommon(PackagedFileInfoCommon info) + { + return new FileEntry15 + { + Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + OffsetInFile = (uint)info.OffsetInFile, + SizeOnDisk = (uint)info.SizeOnDisk, + UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize, + ArchivePart = info.ArchivePart, + Flags = info.Flags, + Crc = info.Crc, + Unknown2 = 0 + }; + } + + public readonly UInt16 ArchivePartNumber() => (UInt16)ArchivePart; +} + +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct FileEntry18 : ILSPKFile +{ + [MarshalAs(UnmanagedType.ByValArray, SizeConst = 256)] + public byte[] Name; + + public UInt32 OffsetInFile1; + public UInt16 OffsetInFile2; + public Byte ArchivePart; + public Byte Flags; + public UInt32 SizeOnDisk; + public UInt32 UncompressedSize; + + public readonly void ToCommon(PackagedFileInfoCommon info) + { + info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.ArchivePart = ArchivePart; + info.Crc = 0; + info.Flags = Flags; + info.OffsetInFile = OffsetInFile1 | ((ulong)OffsetInFile2 << 32); + info.SizeOnDisk = SizeOnDisk; + info.UncompressedSize = UncompressedSize; + } + + public static ILSPKFile FromCommon(PackagedFileInfoCommon info) + { + return new FileEntry18 + { + Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + OffsetInFile1 = (uint)(info.OffsetInFile & 0xffffffff), + OffsetInFile2 = (ushort)((info.OffsetInFile >> 32) & 0xffff), + ArchivePart = (byte)info.ArchivePart, + Flags = info.Flags, + SizeOnDisk = (uint)info.SizeOnDisk, + UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize + }; + } + + public readonly UInt16 ArchivePartNumber() => ArchivePart; +} diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 67b30d8b..89895034 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -25,11 +25,11 @@ public NotAPackageException(string message, Exception innerException) : base(mes public class PackageReader(string path, bool metadataOnly = false) : IDisposable { - private Stream[] _streams; + private Stream[] Streams; public void Dispose() { - foreach (Stream stream in _streams ?? []) + foreach (Stream stream in Streams ?? []) { stream?.Dispose(); } @@ -38,179 +38,32 @@ public void Dispose() private void OpenStreams(FileStream mainStream, int numParts) { // Open a stream for each file chunk - _streams = new Stream[numParts]; - _streams[0] = mainStream; + Streams = new Stream[numParts]; + Streams[0] = mainStream; for (var part = 1; part < numParts; part++) { string partPath = Package.MakePartFilename(path, part); - _streams[part] = File.Open(partPath, FileMode.Open, FileAccess.Read, FileShare.Read); + Streams[part] = File.Open(partPath, FileMode.Open, FileAccess.Read, FileShare.Read); } } - private Package ReadPackageV7(FileStream mainStream, BinaryReader reader) + private void ReadCompressedFileList(BinaryReader reader, Package package) where TFile : ILSPKFile { - var package = new Package(); - mainStream.Seek(0, SeekOrigin.Begin); - var header = BinUtils.ReadStruct(reader); - - package.Metadata.Flags = 0; - package.Metadata.Priority = 0; - package.Version = PackageVersion.V7; - - if (metadataOnly) return package; - - OpenStreams(mainStream, (int) header.NumParts); - for (uint i = 0; i < header.NumFiles; i++) - { - var entry = BinUtils.ReadStruct(reader); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile += header.DataOffset; - } - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } - - return package; - } - - private Package ReadPackageV10(FileStream mainStream, BinaryReader reader) - { - var package = new Package(); - mainStream.Seek(4, SeekOrigin.Begin); - var header = BinUtils.ReadStruct(reader); - - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V10; - - if (metadataOnly) return package; - - OpenStreams(mainStream, header.NumParts); - for (uint i = 0; i < header.NumFiles; i++) - { - var entry = BinUtils.ReadStruct(reader); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile += header.DataOffset; - } - - // Add missing compression level flags - entry.Flags = (entry.Flags & 0x0f) | 0x20; - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } - - return package; - } - - private Package ReadPackageV13(FileStream mainStream, BinaryReader reader) - { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); - - if (header.Version != (ulong) PackageVersion.V13) - { - string msg = $"Unsupported package version {header.Version}; this package layout is only supported for {PackageVersion.V13}"; - throw new InvalidDataException(msg); - } - - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V13; - - if (metadataOnly) return package; - - OpenStreams(mainStream, header.NumParts); - mainStream.Seek(header.FileListOffset, SeekOrigin.Begin); int numFiles = reader.ReadInt32(); - int fileBufferSize = Marshal.SizeOf(typeof(FileEntry13)) * numFiles; - byte[] compressedFileList = reader.ReadBytes((int) header.FileListSize - 4); - - var uncompressedList = new byte[fileBufferSize]; - int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); - if (uncompressedSize != fileBufferSize) - { - string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; - throw new InvalidDataException(msg); - } - - var ms = new MemoryStream(uncompressedList); - var msr = new BinaryReader(ms); - - var entries = new FileEntry13[numFiles]; - BinUtils.ReadStructs(msr, entries); - - if ((package.Metadata.Flags & PackageFlags.Solid) == PackageFlags.Solid && numFiles > 0) + int compressedSize; + if (package.Metadata.Version > 13) { - // Calculate compressed frame offset and bounds - uint totalUncompressedSize = 0; - uint totalSizeOnDisk = 0; - uint firstOffset = 0xffffffff; - uint lastOffset = 0; - - foreach (var entry in entries) - { - totalUncompressedSize += entry.UncompressedSize; - totalSizeOnDisk += entry.SizeOnDisk; - if (entry.OffsetInFile < firstOffset) - { - firstOffset = entry.OffsetInFile; - } - if (entry.OffsetInFile + entry.SizeOnDisk > lastOffset) - { - lastOffset = entry.OffsetInFile + entry.SizeOnDisk; - } - } - - if (firstOffset != 7 || lastOffset - firstOffset != totalSizeOnDisk) - { - string msg = $"Incorrectly compressed solid archive; offsets {firstOffset}/{lastOffset}, bytes {totalSizeOnDisk}"; - throw new InvalidDataException(msg); - } - - // Decompress all files as a single frame (solid) - byte[] frame = new byte[lastOffset]; - mainStream.Seek(0, SeekOrigin.Begin); - mainStream.Read(frame, 0, (int)lastOffset); - - byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); - var decompressedStream = new MemoryStream(decompressed); - - // Update offsets to point to the decompressed chunk - uint offset = 7; - uint compressedOffset = 0; - foreach (var entry in entries) - { - if (entry.OffsetInFile != offset) - { - throw new InvalidDataException("File list in solid archive not contiguous"); - } - - var file = PackagedFileInfo.CreateSolidFromEntry(entry, _streams[entry.ArchivePart], compressedOffset, decompressedStream); - package.Files.Add(file); - - offset += entry.SizeOnDisk; - compressedOffset += entry.UncompressedSize; - } + compressedSize = reader.ReadInt32(); } else { - foreach (var entry in entries) - { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); - } + compressedSize = (int)package.Metadata.FileListSize - 4; } - return package; - } - - private void ReadFileListV15(BinaryReader reader, Package package) - { - int numFiles = reader.ReadInt32(); - int compressedSize = reader.ReadInt32(); byte[] compressedFileList = reader.ReadBytes(compressedSize); - int fileBufferSize = Marshal.SizeOf(typeof(FileEntry15)) * numFiles; + int fileBufferSize = Marshal.SizeOf(typeof(TFile)) * numFiles; var uncompressedList = new byte[fileBufferSize]; int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); if (uncompressedSize != fileBufferSize) @@ -222,112 +75,119 @@ private void ReadFileListV15(BinaryReader reader, Package package) var ms = new MemoryStream(uncompressedList); var msr = new BinaryReader(ms); - var entries = new FileEntry15[numFiles]; + var entries = new TFile[numFiles]; BinUtils.ReadStructs(msr, entries); foreach (var entry in entries) { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); + package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, Streams[entry.ArchivePartNumber()])); } } - private void ReadFileListV18(BinaryReader reader, Package package) + private void ReadFileList(BinaryReader reader, Package package) where TFile : ILSPKFile { - int numFiles = reader.ReadInt32(); - int compressedSize = reader.ReadInt32(); - byte[] compressedFileList = reader.ReadBytes(compressedSize); - - int fileBufferSize = Marshal.SizeOf(typeof(FileEntry18)) * numFiles; - var uncompressedList = new byte[fileBufferSize]; - int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, false); - if (uncompressedSize != fileBufferSize) - { - string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; - throw new InvalidDataException(msg); - } - - var ms = new MemoryStream(uncompressedList); - var msr = new BinaryReader(ms); - - var entries = new FileEntry18[numFiles]; - BinUtils.ReadStructs(msr, entries); + var entries = new TFile[package.Metadata.NumFiles]; + BinUtils.ReadStructs(reader, entries); foreach (var entry in entries) { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, _streams[entry.ArchivePart])); + var file = PackagedFileInfo.CreateFromEntry(entry, Streams[entry.ArchivePartNumber()]); + if (file.ArchivePart == 0) + { + file.OffsetInFile += package.Metadata.DataOffset; + } + + package.Files.Add(file); } } - private Package ReadPackageV15(FileStream mainStream, BinaryReader reader) + private Package ReadHeaderAndFileList(FileStream mainStream, BinaryReader reader) + where THeader : ILSPKHeader + where TFile : ILSPKFile { var package = new Package(); - var header = BinUtils.ReadStruct(reader); + var header = BinUtils.ReadStruct(reader); - if (header.Version != (ulong)PackageVersion.V15) - { - string msg = $"Unsupported package version {header.Version}; this layout is only supported for V15"; - throw new InvalidDataException(msg); - } - - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V15; + package.Metadata = header.ToCommonHeader(); + package.Version = (PackageVersion)package.Metadata.Version; if (metadataOnly) return package; - OpenStreams(mainStream, 1); - mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); - ReadFileListV15(reader, package); + OpenStreams(mainStream, (int)package.Metadata.NumParts); - return package; - } - - private Package ReadPackageV16(FileStream mainStream, BinaryReader reader) - { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); - - if (header.Version != (ulong)PackageVersion.V16) + if (package.Metadata.Version > 10) { - string msg = $"Unsupported package version {header.Version}; this layout is only supported for V16"; - throw new InvalidDataException(msg); + mainStream.Seek((long)package.Metadata.FileListOffset, SeekOrigin.Begin); + ReadCompressedFileList(reader, package); + } + else + { + ReadFileList(reader, package); } - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V16; - - if (metadataOnly) return package; - - OpenStreams(mainStream, header.NumParts); - mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); - ReadFileListV15(reader, package); + if (((PackageFlags)package.Metadata.Flags).HasFlag(PackageFlags.Solid) && package.Files.Count > 0) + { + UnpackSolidSegment(mainStream, package); + } return package; } - private Package ReadPackageV18(FileStream mainStream, BinaryReader reader) + private void UnpackSolidSegment(FileStream mainStream, Package package) { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); + // Calculate compressed frame offset and bounds + ulong totalUncompressedSize = 0; + ulong totalSizeOnDisk = 0; + ulong firstOffset = 0xffffffff; + ulong lastOffset = 0; - if (header.Version != (ulong)PackageVersion.V18) + foreach (var entry in package.Files) { - string msg = $"Unsupported package version {header.Version}; this layout is only supported for V18"; + var file = entry as PackagedFileInfo; + + totalUncompressedSize += file.UncompressedSize; + totalSizeOnDisk += file.SizeOnDisk; + if (file.OffsetInFile < firstOffset) + { + firstOffset = file.OffsetInFile; + } + if (file.OffsetInFile + file.SizeOnDisk > lastOffset) + { + lastOffset = file.OffsetInFile + file.SizeOnDisk; + } + } + + if (firstOffset != 7 || lastOffset - firstOffset != totalSizeOnDisk) + { + string msg = $"Incorrectly compressed solid archive; offsets {firstOffset}/{lastOffset}, bytes {totalSizeOnDisk}"; throw new InvalidDataException(msg); } - package.Metadata.Flags = (PackageFlags)header.Flags; - package.Metadata.Priority = header.Priority; - package.Version = PackageVersion.V18; + // Decompress all files as a single frame (solid) + byte[] frame = new byte[lastOffset]; + mainStream.Seek(0, SeekOrigin.Begin); + mainStream.Read(frame, 0, (int)lastOffset); - if (metadataOnly) return package; + byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); + var decompressedStream = new MemoryStream(decompressed); - OpenStreams(mainStream, header.NumParts); - mainStream.Seek((long)header.FileListOffset, SeekOrigin.Begin); - ReadFileListV18(reader, package); + // Update offsets to point to the decompressed chunk + ulong offset = 7; + ulong compressedOffset = 0; + foreach (var entry in package.Files) + { + var file = entry as PackagedFileInfo; - return package; + if (file.OffsetInFile != offset) + { + throw new InvalidDataException("File list in solid archive not contiguous"); + } + + file.MakeSolid(compressedOffset, decompressedStream); + + offset += file.SizeOnDisk; + compressedOffset += file.UncompressedSize; + } } public Package Read() @@ -342,7 +202,7 @@ public Package Read() if (Package.Signature.SequenceEqual(signature)) { mainStream.Seek(-headerSize, SeekOrigin.End); - return ReadPackageV13(mainStream, reader); + return ReadHeaderAndFileList(mainStream, reader); } // Check for v10 package headers @@ -352,29 +212,15 @@ public Package Read() if (Package.Signature.SequenceEqual(signature)) { version = reader.ReadInt32(); - if (version == 10) - { - return ReadPackageV10(mainStream, reader); - } - else if (version == 15) + mainStream.Seek(4, SeekOrigin.Begin); + return version switch { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV15(mainStream, reader); - } - else if (version == 16) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV16(mainStream, reader); - } - else if (version == 18) - { - mainStream.Seek(4, SeekOrigin.Begin); - return ReadPackageV18(mainStream, reader); - } - else - { - throw new InvalidDataException($"Package version v{version} not supported"); - } + 10 => ReadHeaderAndFileList(mainStream, reader), + 15 => ReadHeaderAndFileList(mainStream, reader), + 16 => ReadHeaderAndFileList(mainStream, reader), + 18 => ReadHeaderAndFileList(mainStream, reader), + _ => throw new InvalidDataException($"Package version v{version} not supported") + }; } // Check for v9 and v7 package headers @@ -382,7 +228,8 @@ public Package Read() version = reader.ReadInt32(); if (version == 7 || version == 9) { - return ReadPackageV7(mainStream, reader); + mainStream.Seek(0, SeekOrigin.Begin); + return ReadHeaderAndFileList(mainStream, reader); } throw new NotAPackageException("No valid signature found in package file"); diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 956d50f5..88718141 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -3,29 +3,32 @@ using System.IO; using System.IO.Hashing; using System.Linq; +using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; using System.Security.Cryptography; using System.Text; +using LSLib.Granny.GR2; using LSLib.LS.Enums; +using LSLib.VirtualTextures; using LZ4; namespace LSLib.LS; public class PackageWriter(Package package, string path) : IDisposable { - public delegate void WriteProgressDelegate(AbstractFileInfo abstractFile, long numerator, long denominator); + public delegate void WriteProgressDelegate(IAbstractFileInfo abstractFile, long numerator, long denominator); private const long MaxPackageSizeDOS = 0x40000000; private const long MaxPackageSizeBG3 = 0x100000000; public CompressionMethod Compression = CompressionMethod.None; public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; - private readonly List _streams = []; + private readonly List Streams = []; public PackageVersion Version = Package.CurrentVersion; public WriteProgressDelegate WriteProgress = delegate { }; public void Dispose() { - foreach (Stream stream in _streams) + foreach (Stream stream in Streams) { stream.Dispose(); } @@ -33,17 +36,17 @@ public void Dispose() public int PaddingLength() => Version <= PackageVersion.V9 ? 0x8000 : 0x40; - public PackagedFileInfo WriteFile(AbstractFileInfo info) + public PackagedFileInfo WriteFile(IAbstractFileInfo info) { // Assume that all files are written uncompressed (worst-case) when calculating package sizes long size = (long)info.Size(); - if ((Version < PackageVersion.V15 && _streams.Last().Position + size > MaxPackageSizeDOS) - || (Version >= PackageVersion.V16 && _streams.Last().Position + size > MaxPackageSizeBG3)) + if ((Version < PackageVersion.V15 && Streams.Last().Position + size > MaxPackageSizeDOS) + || (Version >= PackageVersion.V16 && Streams.Last().Position + size > MaxPackageSizeBG3)) { // Start a new package file if the current one is full. - string partPath = Package.MakePartFilename(path, _streams.Count); + string partPath = Package.MakePartFilename(path, Streams.Count); var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); - _streams.Add(nextPart); + Streams.Add(nextPart); } var compression = Compression; @@ -55,13 +58,13 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) compressionLevel = LSCompressionLevel.FastCompression; } - Stream stream = _streams.Last(); + Stream stream = Streams.Last(); var packaged = new PackagedFileInfo { PackageStream = stream, - Name = info.Name, + FileName = info.Name, UncompressedSize = (ulong)size, - ArchivePart = (UInt32) (_streams.Count - 1), + ArchivePart = (UInt32) (Streams.Count - 1), OffsetInFile = (UInt32) stream.Position, Flags = BinUtils.MakeCompressionFlags(compression, compressionLevel) }; @@ -83,7 +86,7 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) packaged.SizeOnDisk = (UInt64) (stream.Position - (long)packaged.OffsetInFile); packaged.Crc = Crc32.HashToUInt32(compressed); - if ((package.Metadata.Flags & PackageFlags.Solid) == 0) + if (!package.Metadata.Flags.HasFlag(PackageFlags.Solid)) { int padLength = PaddingLength(); long alignTo; @@ -110,155 +113,60 @@ public PackagedFileInfo WriteFile(AbstractFileInfo info) return packaged; } - public void WriteV7(FileStream mainStream) + private void PackV7(FileStream mainStream) + where THeader : ILSPKHeader + where TFile : ILSPKFile { - if (Compression == CompressionMethod.LZ4) - { - throw new ArgumentException("LZ4 compression is only supported by V10 and later package versions"); - } + package.Metadata.NumFiles = (uint)package.Files.Count; + package.Metadata.FileListSize = (UInt32)(Marshal.SizeOf(typeof(TFile)) * package.Files.Count); using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - var header = new LSPKHeader7 - { - Version = (uint)Version, - NumFiles = (UInt32)package.Files.Count, - FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry7)) * package.Files.Count) - }; - header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader7)) + header.FileListSize; - int paddingLength = PaddingLength(); - if (header.DataOffset % paddingLength > 0) - { - header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); - } - // Write a placeholder instead of the actual headers; we'll write them after we - // compressed and flushed all files to disk - var placeholder = new byte[header.DataOffset]; - writer.Write(placeholder); + package.Metadata.DataOffset = 4 + (UInt32)Marshal.SizeOf(typeof(THeader)) + package.Metadata.FileListSize; - long totalSize = package.Files.Sum(p => (long)p.Size()); - long currentSize = 0; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } - - mainStream.Seek(0, SeekOrigin.Begin); - header.LittleEndian = 0; - header.NumParts = (UInt16)_streams.Count; - BinUtils.WriteStruct(writer, ref header); - - foreach (PackagedFileInfo file in writtenFiles) - { - FileEntry7 entry = file.MakeEntryV7(); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile -= header.DataOffset; - } - - BinUtils.WriteStruct(writer, ref entry); - } - } - - public void WriteV10(FileStream mainStream) - { - using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - var header = new LSPKHeader10 - { - Version = (uint)Version, - NumFiles = (UInt32)package.Files.Count, - FileListSize = (UInt32)(Marshal.SizeOf(typeof(FileEntry13)) * package.Files.Count) - }; - header.DataOffset = (UInt32)Marshal.SizeOf(typeof(LSPKHeader10)) + 4 + header.FileListSize; int paddingLength = PaddingLength(); - if (header.DataOffset % paddingLength > 0) + if (package.Metadata.DataOffset % paddingLength > 0) { - header.DataOffset += (UInt32)(paddingLength - header.DataOffset % paddingLength); + package.Metadata.DataOffset += (UInt32)(paddingLength - package.Metadata.DataOffset % paddingLength); } // Write a placeholder instead of the actual headers; we'll write them after we // compressed and flushed all files to disk - var placeholder = new byte[header.DataOffset]; + var placeholder = new byte[package.Metadata.DataOffset]; writer.Write(placeholder); - long totalSize = package.Files.Sum(p => (long)p.Size()); - long currentSize = 0; - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } + var writtenFiles = PackFiles(); mainStream.Seek(0, SeekOrigin.Begin); writer.Write(Package.Signature); - header.NumParts = (UInt16)_streams.Count; - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - BinUtils.WriteStruct(writer, ref header); + package.Metadata.NumParts = (UInt16)Streams.Count; + package.Metadata.Md5 = ComputeArchiveHash(); - foreach (PackagedFileInfo file in writtenFiles) - { - FileEntry13 entry = file.MakeEntryV13(); - if (entry.ArchivePart == 0) - { - entry.OffsetInFile -= header.DataOffset; - } + var header = (THeader)THeader.FromCommonHeader(package.Metadata); + BinUtils.WriteStruct(writer, ref header); - // v10 packages don't support compression level in the flags field - entry.Flags &= 0x0f; - BinUtils.WriteStruct(writer, ref entry); - } + WriteFileList(writer, writtenFiles); } - public void WriteV13(FileStream mainStream) + private void PackV13(FileStream mainStream) + where THeader : ILSPKHeader + where TFile : ILSPKFile { - long totalSize = package.Files.Sum(p => (long) p.Size()); - long currentSize = 0; - - var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) - { - WriteProgress(file, currentSize, totalSize); - writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); - } + var writtenFiles = PackFiles(); using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - var header = new LSPKHeader13 - { - Version = (uint)Version, - FileListOffset = (UInt32)mainStream.Position - }; - writer.Write((UInt32)writtenFiles.Count); + package.Metadata.FileListOffset = (UInt64)mainStream.Position; + WriteCompressedFileList(writer, writtenFiles); - var fileList = new MemoryStream(); - var fileListWriter = new BinaryWriter(fileList); - foreach (PackagedFileInfo file in writtenFiles) - { - FileEntry13 entry = file.MakeEntryV13(); - BinUtils.WriteStruct(fileListWriter, ref entry); - } + package.Metadata.FileListSize = (UInt32)(mainStream.Position - (long)package.Metadata.FileListOffset); + package.Metadata.Md5 = ComputeArchiveHash(); + package.Metadata.NumParts = (UInt16)Streams.Count; - byte[] fileListBuf = fileList.ToArray(); - fileListWriter.Dispose(); - byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); - - writer.Write(compressedFileList); - - header.FileListSize = (UInt32)mainStream.Position - header.FileListOffset; - header.NumParts = (UInt16)_streams.Count; - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); + var header = (THeader)THeader.FromCommonHeader(package.Metadata); BinUtils.WriteStruct(writer, ref header); - writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(LSPKHeader13)))); + writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(THeader)))); writer.Write(Package.Signature); } @@ -268,7 +176,7 @@ private List PackFiles() long currentSize = 0; var writtenFiles = new List(); - foreach (AbstractFileInfo file in package.Files) + foreach (var file in package.Files) { WriteProgress(file, currentSize, totalSize); writtenFiles.Add(WriteFile(file)); @@ -278,88 +186,26 @@ private List PackFiles() return writtenFiles; } - private void WriteFileListV15(BinaryWriter metadataWriter, List files) + private void WriteFileList(BinaryWriter metadataWriter, List files) + where TFile : ILSPKFile { - byte[] fileListBuf; - using (var fileList = new MemoryStream()) - using (var fileListWriter = new BinaryWriter(fileList)) + foreach (var file in files) { - foreach (PackagedFileInfo file in files) + if (file.ArchivePart == 0) { - FileEntry15 entry = file.MakeEntryV15(); - BinUtils.WriteStruct(fileListWriter, ref entry); + file.OffsetInFile -= package.Metadata.DataOffset; } - fileListBuf = fileList.ToArray(); - } - - byte[] compressedFileList = LZ4Codec.EncodeHC(fileListBuf, 0, fileListBuf.Length); - - metadataWriter.Write((UInt32)files.Count); - metadataWriter.Write((UInt32)compressedFileList.Length); - metadataWriter.Write(compressedFileList); - } - - public void WriteV15(FileStream mainStream) - { - var header = new LSPKHeader15 - { - Version = (uint)Version - }; + // <= v10 packages don't support compression level in the flags field + file.Flags &= 0x0f; - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - writer.Write(Package.Signature); - BinUtils.WriteStruct(writer, ref header); - } - - var writtenFiles = PackFiles(); - - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - header.FileListOffset = (UInt64)mainStream.Position; - WriteFileListV15(writer, writtenFiles); - - header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - mainStream.Seek(4, SeekOrigin.Begin); - BinUtils.WriteStruct(writer, ref header); + var entry = (TFile)TFile.FromCommon(file); + BinUtils.WriteStruct(metadataWriter, ref entry); } } - public void WriteV16(FileStream mainStream) - { - var header = new LSPKHeader16 - { - Version = (uint)Version - }; - - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - writer.Write(Package.Signature); - BinUtils.WriteStruct(writer, ref header); - } - - var writtenFiles = PackFiles(); - - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - header.FileListOffset = (UInt64)mainStream.Position; - WriteFileListV15(writer, writtenFiles); - - header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - header.NumParts = (UInt16)_streams.Count; - mainStream.Seek(4, SeekOrigin.Begin); - BinUtils.WriteStruct(writer, ref header); - } - } - - private void WriteFileListV18(BinaryWriter metadataWriter, List files) + private void WriteCompressedFileList(BinaryWriter metadataWriter, List files) + where TFile : ILSPKFile { byte[] fileListBuf; using (var fileList = new MemoryStream()) @@ -367,7 +213,7 @@ private void WriteFileListV18(BinaryWriter metadataWriter, List PackageVersion.V13) + { + metadataWriter.Write((UInt32)compressedFileList.Length); + } + else + { + package.Metadata.FileListSize = (uint)compressedFileList.Length + 4; + } + metadataWriter.Write(compressedFileList); } - public void WriteV18(FileStream mainStream) + private void PackV15(FileStream mainStream) + where THeader : ILSPKHeader + where TFile : ILSPKFile { - var header = new LSPKHeader16 - { - Version = (uint)Version - }; - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) { writer.Write(Package.Signature); + var header = (THeader)THeader.FromCommonHeader(package.Metadata); BinUtils.WriteStruct(writer, ref header); } @@ -398,15 +251,15 @@ public void WriteV18(FileStream mainStream) using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) { - header.FileListOffset = (UInt64)mainStream.Position; - WriteFileListV18(writer, writtenFiles); - - header.FileListSize = (UInt32)(mainStream.Position - (long)header.FileListOffset); - header.Priority = package.Metadata.Priority; - header.Flags = (byte)package.Metadata.Flags; - header.Md5 = ComputeArchiveHash(); - header.NumParts = (UInt16)_streams.Count; + package.Metadata.FileListOffset = (UInt64)mainStream.Position; + WriteCompressedFileList(writer, writtenFiles); + + package.Metadata.FileListSize = (UInt32)(mainStream.Position - (long)package.Metadata.FileListOffset); + package.Metadata.Md5 = ComputeArchiveHash(); + package.Metadata.NumParts = (UInt16)Streams.Count; + mainStream.Seek(4, SeekOrigin.Begin); + var header = (THeader)THeader.FromCommonHeader(package.Metadata); BinUtils.WriteStruct(writer, ref header); } } @@ -414,14 +267,14 @@ public void WriteV18(FileStream mainStream) public byte[] ComputeArchiveHash() { // MD5 is computed over the contents of all files in an alphabetically sorted order - List orderedFileList = package.Files.Select(item => item).ToList(); + var orderedFileList = package.Files.Select(item => item).ToList(); if (Version < PackageVersion.V15) { orderedFileList.Sort((a, b) => String.CompareOrdinal(a.Name, b.Name)); } using MD5 md5 = MD5.Create(); - foreach (AbstractFileInfo file in orderedFileList) + foreach (var file in orderedFileList) { Stream packagedStream = file.MakeStream(); try @@ -453,45 +306,18 @@ public byte[] ComputeArchiveHash() public void Write() { var mainStream = File.Open(path, FileMode.Create, FileAccess.Write); - _streams.Add(mainStream); + Streams.Add(mainStream); switch (Version) { - case PackageVersion.V18: - { - WriteV18(mainStream); - break; - } - case PackageVersion.V16: - { - WriteV16(mainStream); - break; - } - case PackageVersion.V15: - { - WriteV15(mainStream); - break; - } - case PackageVersion.V13: - { - WriteV13(mainStream); - break; - } - case PackageVersion.V10: - { - WriteV10(mainStream); - break; - } + case PackageVersion.V18: PackV15(mainStream); break; + case PackageVersion.V16: PackV15(mainStream); break; + case PackageVersion.V15: PackV15(mainStream); break; + case PackageVersion.V13: PackV13(mainStream); break; + case PackageVersion.V10: PackV7(mainStream); break; case PackageVersion.V9: - case PackageVersion.V7: - { - WriteV7(mainStream); - break; - } - default: - { - throw new ArgumentException($"Cannot write version {Version} packages"); - } + case PackageVersion.V7: PackV7(mainStream); break; + default: throw new ArgumentException($"Cannot write version {Version} packages"); } } } diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index c8fd5f4c..e2e401f5 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -26,7 +26,7 @@ public void Dispose() public Resource LoadGlobals() { - AbstractFileInfo globalsInfo = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + var globalsInfo = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); if (globalsInfo == null) { throw new InvalidDataException("The specified package is not a valid savegame (globals.lsf not found)"); @@ -55,7 +55,7 @@ public Story.Story LoadStory(Stream s) public Story.Story LoadStory() { - AbstractFileInfo storyInfo = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + var storyInfo = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); if (storyInfo != null) { Stream rsrcStream = storyInfo.MakeStream(); @@ -110,16 +110,16 @@ public void ResaveStory(Story.Story story, Game game, string path) var rewrittenPackage = new Package(); var conversionParams = ResourceConversionParameters.FromGameVersion(game); - AbstractFileInfo storyBin = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); + var storyBin = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); if (storyBin == null) { var globalsStream = ResaveStoryToGlobals(story, conversionParams); - AbstractFileInfo globalsLsf = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + var globalsLsf = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); StreamFileInfo globalsRepacked = StreamFileInfo.CreateFromStream(globalsStream, globalsLsf.Name); rewrittenPackage.Files.Add(globalsRepacked); - List files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); + var files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); rewrittenPackage.Files.AddRange(files); } else @@ -133,7 +133,7 @@ public void ResaveStory(Story.Story story, Game game, string path) StreamFileInfo storyRepacked = StreamFileInfo.CreateFromStream(storyStream, "StorySave.bin"); rewrittenPackage.Files.Add(storyRepacked); - List files = Package.Files.Where(x => x.Name != "StorySave.bin").ToList(); + var files = Package.Files.Where(x => x.Name != "StorySave.bin").ToList(); rewrittenPackage.Files.AddRange(files); } diff --git a/StatParser/StatChecker.cs b/StatParser/StatChecker.cs index a76aee66..b97343e0 100644 --- a/StatParser/StatChecker.cs +++ b/StatParser/StatChecker.cs @@ -45,7 +45,7 @@ private void LoadStats(ModInfo mod) } } - private XmlDocument LoadXml(AbstractFileInfo file) + private XmlDocument LoadXml(IAbstractFileInfo file) { if (file == null) return null; diff --git a/StoryCompiler/ModCompiler.cs b/StoryCompiler/ModCompiler.cs index e95fef1a..fc5bb3db 100644 --- a/StoryCompiler/ModCompiler.cs +++ b/StoryCompiler/ModCompiler.cs @@ -237,7 +237,7 @@ private void LoadGoals(ModInfo mod) if (file.Value is PackagedFileInfo) { var pkgd = file.Value as PackagedFileInfo; - path = (pkgd.PackageStream as FileStream).Name + ":/" + pkgd.Name; + path = (pkgd.PackageStream as FileStream).Name + ":/" + pkgd.FileName; } else { @@ -381,8 +381,8 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) LoadMod(modName); } - AbstractFileInfo storyHeaderFile = null; - AbstractFileInfo typeCoercionWhitelistFile = null; + IAbstractFileInfo storyHeaderFile = null; + IAbstractFileInfo typeCoercionWhitelistFile = null; var modsSearchPath = mods.ToList(); modsSearchPath.Reverse(); foreach (var modName in modsSearchPath) diff --git a/StoryDecompiler/Program.cs b/StoryDecompiler/Program.cs index 65f10947..727c95d0 100644 --- a/StoryDecompiler/Program.cs +++ b/StoryDecompiler/Program.cs @@ -16,7 +16,7 @@ private static MemoryStream LoadStoryStreamFromSave(String path) { Package package = packageReader.Read(); - AbstractFileInfo globalsFile = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + var globalsFile = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); if (globalsFile == null) { throw new Exception("Could not find globals.lsf in savegame archive."); From deed7011b2e8323e3b7b7a82dce5f202298c65bd Mon Sep 17 00:00:00 2001 From: Norbyte Date: Thu, 21 Dec 2023 16:11:16 +0100 Subject: [PATCH 054/139] Package logic reworks --- ConverterApp/ConverterAppSettings.cs | 1 + ConverterApp/DebugDumper.cs | 41 ++- ConverterApp/OsirisPane.cs | 20 +- ConverterApp/PackagePane.cs | 65 ++--- DebuggerFrontend/DebugInfoLoader.cs | 2 +- Divine/CLI/CommandLineArguments.cs | 30 ++- Divine/CLI/CommandLinePackageProcessor.cs | 80 +++--- LSLib/LS/BinUtils.cs | 224 +++++++++++----- LSLib/LS/Enums/Compression.cs | 75 ++++++ LSLib/LS/Enums/CompressionFlags.cs | 8 - LSLib/LS/Enums/CompressionLevel.cs | 8 - LSLib/LS/Enums/CompressionMethod.cs | 8 - LSLib/LS/Enums/PackageVersion.cs | 35 ++- LSLib/LS/Mods/ModResources.cs | 12 +- LSLib/LS/PackageCommon.cs | 297 ++++++---------------- LSLib/LS/PackageFormat.cs | 79 +++--- LSLib/LS/PackageReader.cs | 202 +++++++++------ LSLib/LS/PackageWriter.cs | 204 ++++++++------- LSLib/LS/ResourceUtils.cs | 4 +- LSLib/LS/Resources/LSF/LSFCommon.cs | 4 +- LSLib/LS/Resources/LSF/LSFReader.cs | 9 +- LSLib/LS/Resources/LSF/LSFWriter.cs | 14 +- LSLib/LS/Save/SavegameHelpers.cs | 85 +++---- StatParser/StatChecker.cs | 31 +-- StoryCompiler/DebugInfoSaver.cs | 2 +- StoryCompiler/ModCompiler.cs | 117 +++------ StoryDecompiler/Program.cs | 18 +- 27 files changed, 854 insertions(+), 821 deletions(-) create mode 100644 LSLib/LS/Enums/Compression.cs delete mode 100644 LSLib/LS/Enums/CompressionFlags.cs delete mode 100644 LSLib/LS/Enums/CompressionLevel.cs delete mode 100644 LSLib/LS/Enums/CompressionMethod.cs diff --git a/ConverterApp/ConverterAppSettings.cs b/ConverterApp/ConverterAppSettings.cs index 94d8863b..e377ce57 100644 --- a/ConverterApp/ConverterAppSettings.cs +++ b/ConverterApp/ConverterAppSettings.cs @@ -1,4 +1,5 @@ using LSLib.Granny.Model; +using LSLib.LS; using LSLib.LS.Enums; using System; using System.Collections.Generic; diff --git a/ConverterApp/DebugDumper.cs b/ConverterApp/DebugDumper.cs index bee240d7..3a020106 100644 --- a/ConverterApp/DebugDumper.cs +++ b/ConverterApp/DebugDumper.cs @@ -63,7 +63,7 @@ public DebugDumperTask() private void DoExtractPackage() { var packager = new Packager(); - packager.ProgressUpdate = (file, numerator, denominator, fileInfo) => { + packager.ProgressUpdate = (file, numerator, denominator) => { ReportProgress(5 + (int)(numerator * 15 / denominator), "Extracting: " + file); }; packager.UncompressPackage(SavePackage, ExtractionPath); @@ -97,17 +97,10 @@ private Resource LoadPackagedResource(string path) } Resource resource; - Stream rsrcStream = fileInfo.MakeStream(); - try + using var rsrcStream = fileInfo.CreateContentReader(); + using (var rsrcReader = new LSFReader(rsrcStream)) { - using (var rsrcReader = new LSFReader(rsrcStream)) - { - resource = rsrcReader.Read(); - } - } - finally - { - fileInfo.ReleaseStream(); + resource = rsrcReader.Read(); } return resource; @@ -260,7 +253,7 @@ private void RunTasks() Stream storyStream; if (storySave != null) { - storyStream = storySave.MakeStream(); + storyStream = storySave.CreateContentReader(); } else { @@ -295,20 +288,20 @@ public void Run() { ReportProgress(0, "Reading package ..."); - using (var packageReader = new PackageReader(SaveFilePath)) - { - SavePackage = packageReader.Read(); + var packageReader = new PackageReader(); + using var savePackage = packageReader.Read(SaveFilePath); - var abstractFileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - if (abstractFileInfo == null) - { - MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); - return; - } + SavePackage = savePackage; + var abstractFileInfo = SavePackage.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); + if (abstractFileInfo == null) + { + MessageBox.Show("The specified package is not a valid savegame (globals.lsf not found)", "Load Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); + return; + } - RunTasks(); + RunTasks(); + SavePackage = null; - MessageBox.Show($"Savegame dumped to {DataDumpPath}."); - } + MessageBox.Show($"Savegame dumped to {DataDumpPath}."); } } diff --git a/ConverterApp/OsirisPane.cs b/ConverterApp/OsirisPane.cs index 27c54816..00b0ca33 100644 --- a/ConverterApp/OsirisPane.cs +++ b/ConverterApp/OsirisPane.cs @@ -3,7 +3,6 @@ using System.Drawing; using System.IO; using System.Linq; -using System.Runtime.CompilerServices; using System.Windows.Forms; using LSLib.LS; using LSLib.LS.Enums; @@ -81,8 +80,8 @@ private void LoadStory() public Resource LoadResourceFromSave(string path) { - var packageReader = new PackageReader(path); - Package package = packageReader.Read(); + var packageReader = new PackageReader(); + using var package = packageReader.Read(path); var abstractFileInfo = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); if (abstractFileInfo == null) @@ -92,18 +91,9 @@ public Resource LoadResourceFromSave(string path) } Resource resource; - Stream rsrcStream = abstractFileInfo.MakeStream(); - try - { - using (var rsrcReader = new LSFReader(rsrcStream)) - { - resource = rsrcReader.Read(); - } - } - finally - { - abstractFileInfo.ReleaseStream(); - } + using var rsrcStream = abstractFileInfo.CreateContentReader(); + using var rsrcReader = new LSFReader(rsrcStream); + resource = rsrcReader.Read(); return resource; } diff --git a/ConverterApp/PackagePane.cs b/ConverterApp/PackagePane.cs index a395df16..6e227b88 100644 --- a/ConverterApp/PackagePane.cs +++ b/ConverterApp/PackagePane.cs @@ -32,25 +32,22 @@ public PackagePane(ISettingsDataSource settingsDataSource) #endif } - private void PackageProgressUpdate(string status, long numerator, long denominator, IAbstractFileInfo file) + private void PackageProgressUpdate(string status, long numerator, long denominator) { - if (file != null) + // Throttle the progress displays to 10 updates per second to prevent UI + // updates from slowing down the compression/decompression process + if (_displayTimer == null) { - // Throttle the progress displays to 10 updates per second to prevent UI - // updates from slowing down the compression/decompression process - if (_displayTimer == null) - { - _displayTimer = new Stopwatch(); - _displayTimer.Start(); - } - else if (_displayTimer.ElapsedMilliseconds < 100) - { - return; - } - else - { - _displayTimer.Restart(); - } + _displayTimer = new Stopwatch(); + _displayTimer.Start(); + } + else if (_displayTimer.ElapsedMilliseconds < 100) + { + return; + } + else + { + _displayTimer.Restart(); } packageProgressLabel.Text = status; @@ -88,10 +85,12 @@ private void extractPackageBtn_Click(object sender, EventArgs e) MessageBox.Show($"The specified file ({extractPackagePath.Text}) is not an PAK package or savegame archive.", "Extraction Failed", MessageBoxButtons.OK, MessageBoxIcon.Warning); } } +#if !DEBUG catch (Exception exc) { MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{exc}", "Extraction Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } +#endif finally { packageProgressLabel.Text = ""; @@ -120,68 +119,70 @@ private void createPackageBtn_Click(object sender, EventArgs e) try { - var options = new PackageCreationOptions(); - options.Version = SelectedPackageVersion(); + var build = new PackageBuildData(); + build.Version = SelectedPackageVersion(); switch (compressionMethod.SelectedIndex) { case 1: { - options.Compression = CompressionMethod.Zlib; + build.Compression = CompressionMethod.Zlib; + build.CompressionLevel = LSCompressionLevel.Fast; break; } case 2: { - options.Compression = CompressionMethod.Zlib; - options.FastCompression = false; + build.Compression = CompressionMethod.Zlib; break; } case 3: { - options.Compression = CompressionMethod.LZ4; + build.Compression = CompressionMethod.LZ4; + build.CompressionLevel = LSCompressionLevel.Fast; break; } case 4: { - options.Compression = CompressionMethod.LZ4; - options.FastCompression = false; + build.Compression = CompressionMethod.LZ4; break; } } // Fallback to Zlib, if the package version doesn't support LZ4 - if (options.Compression == CompressionMethod.LZ4 && options.Version <= PackageVersion.V9) + if (build.Compression == CompressionMethod.LZ4 && build.Version <= PackageVersion.V9) { - options.Compression = CompressionMethod.Zlib; + build.Compression = CompressionMethod.Zlib; } if (solid.Checked) { - options.Flags |= PackageFlags.Solid; + build.Flags |= PackageFlags.Solid; } if (allowMemoryMapping.Checked) { - options.Flags |= PackageFlags.AllowMemoryMapping; + build.Flags |= PackageFlags.AllowMemoryMapping; } if (preloadIntoCache.Checked) { - options.Flags |= PackageFlags.Preload; + build.Flags |= PackageFlags.Preload; } - options.Priority = (byte)packagePriority.Value; + build.Priority = (byte)packagePriority.Value; var packager = new Packager(); packager.ProgressUpdate += PackageProgressUpdate; - packager.CreatePackage(createPackagePath.Text, createSrcPath.Text, options); + packager.CreatePackage(createPackagePath.Text, createSrcPath.Text, build).Wait(); MessageBox.Show("Package created successfully."); } +#if !DEBUG catch (Exception exc) { MessageBox.Show($"Internal error!{Environment.NewLine}{Environment.NewLine}{exc}", "Package Build Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } +#endif finally { packageProgressLabel.Text = ""; diff --git a/DebuggerFrontend/DebugInfoLoader.cs b/DebuggerFrontend/DebugInfoLoader.cs index cf2962fc..c6f97109 100644 --- a/DebuggerFrontend/DebugInfoLoader.cs +++ b/DebuggerFrontend/DebugInfoLoader.cs @@ -203,7 +203,7 @@ public StoryDebugInfo Load(byte[] msgPayload) var compressed = new byte[msgPayload.Length - 4]; Array.Copy(msgPayload, 0, compressed, 0, msgPayload.Length - 4); - byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); + var flags = BinUtils.MakeCompressionFlags(CompressionMethod.LZ4, LSCompressionLevel.Fast); byte[] decompressed = BinUtils.Decompress(compressed, (int)decompressedSize, flags); var msg = StoryDebugInfoMsg.Parser.ParseFrom(decompressed); var debugInfo = FromProtobuf(msg); diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index dffdee3d..cf1ee673 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -4,6 +4,7 @@ using System.Linq; using CommandLineParser.Arguments; using LSLib.Granny.Model; +using LSLib.LS; using LSLib.LS.Enums; namespace Divine.CLI; @@ -95,7 +96,7 @@ public class CommandLineArguments ValueOptional = false, Optional = true )] - public string CompressionMethod; + public string PakCompressionMethod; // @formatter:off [EnumeratedValueArgument(typeof(string), 'e', "gr2-options", @@ -296,33 +297,35 @@ public static ResourceFormat GetResourceFormatByString(string resourceFormat) public static Dictionary GetCompressionOptions(string compressionOption, PackageVersion packageVersion) { CompressionMethod compression; - var fastCompression = true; + LSCompressionLevel level; switch (compressionOption) { case "zlibfast": { - compression = LSLib.LS.Enums.CompressionMethod.Zlib; + compression = CompressionMethod.Zlib; + level = LSCompressionLevel.Fast; break; } case "zlib": { - compression = LSLib.LS.Enums.CompressionMethod.Zlib; - fastCompression = false; + compression = CompressionMethod.Zlib; + level = LSCompressionLevel.Default; break; } case "lz4": { - compression = LSLib.LS.Enums.CompressionMethod.LZ4; + compression = CompressionMethod.LZ4; + level = LSCompressionLevel.Fast; break; } case "lz4hc": { - compression = LSLib.LS.Enums.CompressionMethod.LZ4; - fastCompression = false; + compression = CompressionMethod.LZ4; + level = LSCompressionLevel.Default; break; } @@ -330,22 +333,23 @@ public static Dictionary GetCompressionOptions(string compressio case "none": default: { - compression = LSLib.LS.Enums.CompressionMethod.None; + compression = CompressionMethod.None; + level = LSCompressionLevel.Default; break; } } // fallback to zlib, if the package version doesn't support lz4 - if (compression == LSLib.LS.Enums.CompressionMethod.LZ4 && packageVersion <= LSLib.LS.Enums.PackageVersion.V9) + if (compression == CompressionMethod.LZ4 && packageVersion <= PackageVersion.V9) { - compression = LSLib.LS.Enums.CompressionMethod.Zlib; - fastCompression = false; + compression = CompressionMethod.Zlib; + level = LSCompressionLevel.Default; } var compressionOptions = new Dictionary { { "Compression", compression }, - { "FastCompression", fastCompression } + { "CompressionLevel", level } }; return compressionOptions; diff --git a/Divine/CLI/CommandLinePackageProcessor.cs b/Divine/CLI/CommandLinePackageProcessor.cs index a208f0c5..bd1ab938 100644 --- a/Divine/CLI/CommandLinePackageProcessor.cs +++ b/Divine/CLI/CommandLinePackageProcessor.cs @@ -37,36 +37,25 @@ private static void ExtractSingleFile(string packagePath, string destinationPath { try { - using (var reader = new PackageReader(packagePath)) + var reader = new PackageReader(); + using var package = reader.Read(packagePath); + + // Try to match by full path + var file = package.Files.Find(fileInfo => string.Compare(fileInfo.Name, packagedPath, StringComparison.OrdinalIgnoreCase) == 0 && !fileInfo.IsDeletion()); + if (file == null) { - Package package = reader.Read(); - // Try to match by full path - var file = package.Files.Find(fileInfo => string.Compare(fileInfo.Name, packagedPath, StringComparison.OrdinalIgnoreCase) == 0 && !fileInfo.IsDeletion()); + // Try to match by filename only + file = package.Files.Find(fileInfo => string.Compare(Path.GetFileName(fileInfo.Name), packagedPath, StringComparison.OrdinalIgnoreCase) == 0); if (file == null) { - // Try to match by filename only - file = package.Files.Find(fileInfo => string.Compare(Path.GetFileName(fileInfo.Name), packagedPath, StringComparison.OrdinalIgnoreCase) == 0); - if (file == null) - { - CommandLineLogger.LogError($"Package doesn't contain file named '{packagedPath}'"); - return; - } - } - - using (var fs = new FileStream(destinationPath, FileMode.Create, FileAccess.Write)) - { - try - { - Stream stream = file.MakeStream(); - stream.CopyTo(fs); - } - finally - { - file.ReleaseStream(); - } - + CommandLineLogger.LogError($"Package doesn't contain file named '{packagedPath}'"); + return; } } + + using var fs = new FileStream(destinationPath, FileMode.Create, FileAccess.Write); + using var source = file.CreateContentReader(); + source.CopyTo(fs); } catch (NotAPackageException) { @@ -83,20 +72,18 @@ private static void ListPackageFiles(string packagePath, Func filter(obj)); - } + if (filter != null) + { + files = files.FindAll(obj => filter(obj)); + } - foreach (var fileInfo in files.OrderBy(obj => obj.Name)) - { - Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.CRC()}"); - } + foreach (var fileInfo in files.OrderBy(obj => obj.Name)) + { + Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.CRC()}"); } } catch (NotAPackageException) @@ -153,21 +140,18 @@ private static void CreatePackageResource(string file = "") CommandLineLogger.LogDebug($"Using destination path: {file}"); } - var options = new PackageCreationOptions(); - options.Version = CommandLineActions.PackageVersion; - - options.Priority = (byte)CommandLineActions.PackagePriority; - - Dictionary compressionOptions = CommandLineArguments.GetCompressionOptions(Path.GetExtension(file)?.ToLower() == ".lsv" ? "zlib" : Args.CompressionMethod, options.Version); + var build = new PackageBuildData(); + build.Version = CommandLineActions.PackageVersion; + build.Priority = (byte)CommandLineActions.PackagePriority; - options.Compression = (CompressionMethod)compressionOptions["Compression"]; - options.FastCompression = (bool)compressionOptions["FastCompression"]; + Dictionary compressionOptions = CommandLineArguments.GetCompressionOptions(Path.GetExtension(file)?.ToLower() == ".lsv" ? "zlib" : Args.PakCompressionMethod, build.Version); + build.Compression = (CompressionMethod)compressionOptions["Compression"]; + build.CompressionLevel = (LSCompressionLevel)compressionOptions["CompressionLevel"]; - var fast = options.FastCompression ? "Fast" : "Normal"; - CommandLineLogger.LogDebug($"Using compression method: {options.Compression.ToString()} ({fast})"); + CommandLineLogger.LogDebug($"Using compression method: {build.Compression} (build.CompressionLevel)"); var packager = new Packager(); - packager.CreatePackage(file, CommandLineActions.SourcePath, options); + packager.CreatePackage(file, CommandLineActions.SourcePath, build).Wait(); CommandLineLogger.LogInfo("Package created successfully."); } diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 6b240639..9c5a2034 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -5,9 +5,129 @@ using LSLib.LS.Enums; using System.IO.Compression; using System.Text; +using System.Threading.Tasks; +using System.Threading; +using System.IO.MemoryMappedFiles; namespace LSLib.LS; + +public class ReadOnlySubstream : Stream +{ + private readonly Stream SourceStream; + private readonly long FileOffset; + private readonly long Size; + private long CurPosition = 0; + + public ReadOnlySubstream(Stream sourceStream, long offset, long size) + { + SourceStream = sourceStream; + FileOffset = offset; + Size = size; + } + + public override bool CanRead { get { return true; } } + public override bool CanSeek { get { return false; } } + + public override int Read(byte[] buffer, int offset, int count) + { + SourceStream.Seek(FileOffset + CurPosition, SeekOrigin.Begin); + long readable = Size - CurPosition; + int bytesToRead = (readable < count) ? (int)readable : count; + var read = SourceStream.Read(buffer, offset, bytesToRead); + CurPosition += read; + return read; + } + + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) + { + SourceStream.Seek(FileOffset + CurPosition, SeekOrigin.Begin); + long readable = Size - CurPosition; + int bytesToRead = (readable < count) ? (int)readable : count; + CurPosition += bytesToRead; + return SourceStream.ReadAsync(buffer, offset, bytesToRead, cancellationToken); + } + + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException(); + } + + + public override long Position + { + get { return CurPosition; } + set { throw new NotSupportedException(); } + } + + public override bool CanTimeout { get { return SourceStream.CanTimeout; } } + public override bool CanWrite { get { return false; } } + public override long Length { get { return Size; } } + public override void SetLength(long value) { throw new NotSupportedException(); } + public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } + public override void Flush() { } +} + + +public class LZ4DecompressionStream : Stream +{ + private readonly MemoryMappedViewAccessor View; + private readonly long Offset; + private readonly int Size; + private readonly int DecompressedSize; + private MemoryStream Decompressed; + + public LZ4DecompressionStream(MemoryMappedViewAccessor view, long offset, int size, int decompressedSize) + { + View = view; + Offset = offset; + Size = size; + DecompressedSize = decompressedSize; + } + + private void DoDecompression() + { + var compressed = new byte[Size]; + View.ReadArray(Offset, compressed, 0, Size); + + var decompressed = new byte[DecompressedSize]; + LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, DecompressedSize, true); + Decompressed = new MemoryStream(decompressed); + } + + public override bool CanRead { get { return true; } } + public override bool CanSeek { get { return false; } } + + public override int Read(byte[] buffer, int offset, int count) + { + if (Decompressed == null) + { + DoDecompression(); + } + + return Decompressed.Read(buffer, offset, count); + } + + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException(); + } + + + public override long Position + { + get { return Decompressed?.Position ?? 0; } + set { throw new NotSupportedException(); } + } + + public override bool CanTimeout { get { return false; } } + public override bool CanWrite { get { return false; } } + public override long Length { get { return DecompressedSize; } } + public override void SetLength(long value) { throw new NotSupportedException(); } + public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } + public override void Flush() { } +} + public static class BinUtils { public static T ReadStruct(BinaryReader reader) @@ -284,54 +404,14 @@ public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) } } - public static CompressionMethod CompressionFlagsToMethod(byte flags) - { - return (flags & 0x0f) switch - { - (int)CompressionMethod.None => CompressionMethod.None, - (int)CompressionMethod.Zlib => CompressionMethod.Zlib, - (int)CompressionMethod.LZ4 => CompressionMethod.LZ4, - _ => throw new ArgumentException("Invalid compression method") - }; - } - - public static LSCompressionLevel CompressionFlagsToLevel(byte flags) + public static CompressionFlags MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) { - return (flags & 0xf0) switch - { - (int)CompressionFlags.FastCompress => LSCompressionLevel.FastCompression, - (int)CompressionFlags.DefaultCompress => LSCompressionLevel.DefaultCompression, - (int)CompressionFlags.MaxCompressionLevel => LSCompressionLevel.MaxCompression, - _ => throw new ArgumentException("Invalid compression flags") - }; + return method.ToFlags() | level.ToFlags(); } - public static byte MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) + public static byte[] Decompress(byte[] compressed, int decompressedSize, CompressionFlags compression, bool chunked = false) { - if (method == CompressionMethod.None) - { - return 0; - } - - byte flags = 0; - if (method == CompressionMethod.Zlib) - flags = 0x1; - else if (method == CompressionMethod.LZ4) - flags = 0x2; - - if (level == LSCompressionLevel.FastCompression) - flags |= 0x10; - else if (level == LSCompressionLevel.DefaultCompression) - flags |= 0x20; - else if (level == LSCompressionLevel.MaxCompression) - flags |= 0x40; - - return flags; - } - - public static byte[] Decompress(byte[] compressed, int decompressedSize, byte compressionFlags, bool chunked = false) - { - switch (CompressionFlagsToMethod(compressionFlags)) + switch (compression.Method()) { case CompressionMethod.None: return compressed; @@ -362,46 +442,68 @@ public static byte[] Decompress(byte[] compressed, int decompressedSize, byte co else { var decompressed = new byte[decompressedSize]; - LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, decompressedSize, true); + var resultSize = LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, decompressedSize, true); + if (resultSize != decompressedSize) + { + string msg = $"LZ4 compressor disagrees about the size of compressed buffer; expected {decompressedSize}, got {resultSize}"; + throw new InvalidDataException(msg); + } return decompressed; } default: - { - var msg = String.Format("No decompressor found for this format: {0}", compressionFlags); - throw new InvalidDataException(msg); - } + throw new InvalidDataException($"No decompressor found for this format: {compression}"); + } + } + + public static Stream Decompress(MemoryMappedFile file, MemoryMappedViewAccessor view, long sourceOffset, + int sourceSize, int decompressedSize, CompressionFlags compression) + { + switch (compression.Method()) + { + case CompressionMethod.None: + return file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); + + case CompressionMethod.Zlib: + var sourceStream = file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); + return new ZLibStream(sourceStream, CompressionMode.Decompress); + + case CompressionMethod.LZ4: + return new LZ4DecompressionStream(view, sourceOffset, sourceSize, decompressedSize); + + default: + throw new InvalidDataException($"No decompressor found for this format: {compression}"); } } - public static byte[] Compress(byte[] uncompressed, byte compressionFlags) + public static byte[] Compress(byte[] uncompressed, CompressionFlags compression) { - return Compress(uncompressed, (CompressionMethod)(compressionFlags & 0x0F), CompressionFlagsToLevel(compressionFlags)); + return Compress(uncompressed, compression.Method(), compression.Level()); } - public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel compressionLevel, bool chunked = false) + public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel level, bool chunked = false) { return method switch { CompressionMethod.None => uncompressed, - CompressionMethod.Zlib => CompressZlib(uncompressed, compressionLevel), - CompressionMethod.LZ4 => CompressLZ4(uncompressed, compressionLevel, chunked), + CompressionMethod.Zlib => CompressZlib(uncompressed, level), + CompressionMethod.LZ4 => CompressLZ4(uncompressed, level, chunked), _ => throw new ArgumentException("Invalid compression method specified") }; } - public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel compressionLevel) + public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel level) { - var level = compressionLevel switch + var zLevel = level switch { - LSCompressionLevel.FastCompression => CompressionLevel.Fastest, - LSCompressionLevel.DefaultCompression => CompressionLevel.Optimal, - LSCompressionLevel.MaxCompression => CompressionLevel.SmallestSize, + LSCompressionLevel.Fast => CompressionLevel.Fastest, + LSCompressionLevel.Default => CompressionLevel.Optimal, + LSCompressionLevel.Max => CompressionLevel.SmallestSize, _ => throw new ArgumentException() }; using var outputStream = new MemoryStream(); - using (var compressor = new ZLibStream(outputStream, level, true)) + using (var compressor = new ZLibStream(outputStream, zLevel, true)) { compressor.Write(uncompressed, 0, uncompressed.Length); } @@ -416,7 +518,7 @@ public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compres { return Native.LZ4FrameCompressor.Compress(uncompressed); } - else if (compressionLevel == LSCompressionLevel.FastCompression) + else if (compressionLevel == LSCompressionLevel.Fast) { return LZ4Codec.Encode(uncompressed, 0, uncompressed.Length); } diff --git a/LSLib/LS/Enums/Compression.cs b/LSLib/LS/Enums/Compression.cs new file mode 100644 index 00000000..aa8884f6 --- /dev/null +++ b/LSLib/LS/Enums/Compression.cs @@ -0,0 +1,75 @@ +using System; + +namespace LSLib.LS; + +public enum CompressionMethod +{ + None, + Zlib, + LZ4 +}; + +public enum LSCompressionLevel +{ + Fast, + Default, + Max +}; + +public enum CompressionFlags : byte +{ + MethodNone = 0, + MethodZlib = 1, + MethodLZ4 = 2, + FastCompress = 0x10, + DefaultCompress = 0x20, + MaxCompress = 0x40 +}; + +public static class CompressionFlagExtensions +{ + public static CompressionMethod Method(this CompressionFlags f) + { + return (CompressionFlags)((byte)f & 0x0F) switch + { + CompressionFlags.MethodNone => CompressionMethod.None, + CompressionFlags.MethodZlib => CompressionMethod.Zlib, + CompressionFlags.MethodLZ4 => CompressionMethod.LZ4, + _ => throw new NotSupportedException($"Unsupported compression method: {(byte)f & 0x0F}") + }; + } + + public static LSCompressionLevel Level(this CompressionFlags f) + { + return (CompressionFlags)((byte)f & 0xF0) switch + { + CompressionFlags.FastCompress => LSCompressionLevel.Fast, + CompressionFlags.DefaultCompress => LSCompressionLevel.Default, + CompressionFlags.MaxCompress => LSCompressionLevel.Max, + // Ignore unknown compression levels since they have no impact on actual decompression logic + _ => LSCompressionLevel.Default + }; + } + + public static CompressionFlags ToFlags(this CompressionMethod method) + { + return method switch + { + CompressionMethod.None => CompressionFlags.MethodNone, + CompressionMethod.Zlib => CompressionFlags.MethodZlib, + CompressionMethod.LZ4 => CompressionFlags.MethodLZ4, + _ => throw new NotSupportedException($"Unsupported compression method: {method}") + }; + } + + public static CompressionFlags ToFlags(this LSCompressionLevel level) + { + return level switch + { + LSCompressionLevel.Fast => CompressionFlags.FastCompress, + LSCompressionLevel.Default => CompressionFlags.DefaultCompress, + LSCompressionLevel.Max => CompressionFlags.MaxCompress, + _ => throw new NotSupportedException($"Unsupported compression level: {level}") + }; + } +} diff --git a/LSLib/LS/Enums/CompressionFlags.cs b/LSLib/LS/Enums/CompressionFlags.cs deleted file mode 100644 index 131976e0..00000000 --- a/LSLib/LS/Enums/CompressionFlags.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace LSLib.LS.Enums; - -public enum CompressionFlags -{ - FastCompress = 0x10, - DefaultCompress = 0x20, - MaxCompressionLevel = 0x40 -}; \ No newline at end of file diff --git a/LSLib/LS/Enums/CompressionLevel.cs b/LSLib/LS/Enums/CompressionLevel.cs deleted file mode 100644 index 669ad4c8..00000000 --- a/LSLib/LS/Enums/CompressionLevel.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace LSLib.LS.Enums; - -public enum LSCompressionLevel -{ - FastCompression, - DefaultCompression, - MaxCompression -}; \ No newline at end of file diff --git a/LSLib/LS/Enums/CompressionMethod.cs b/LSLib/LS/Enums/CompressionMethod.cs deleted file mode 100644 index 84ef27de..00000000 --- a/LSLib/LS/Enums/CompressionMethod.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace LSLib.LS.Enums; - -public enum CompressionMethod -{ - None = 0, - Zlib = 1, - LZ4 = 2 -}; \ No newline at end of file diff --git a/LSLib/LS/Enums/PackageVersion.cs b/LSLib/LS/Enums/PackageVersion.cs index 35613c7d..6ebddeba 100644 --- a/LSLib/LS/Enums/PackageVersion.cs +++ b/LSLib/LS/Enums/PackageVersion.cs @@ -1,4 +1,6 @@ -namespace LSLib.LS.Enums; +using System; + +namespace LSLib.LS.Enums; public enum PackageVersion { @@ -10,3 +12,34 @@ public enum PackageVersion V16 = 16, // BG3 EA Patch4 V18 = 18 // BG3 Release }; +public static class PackageVersionExtensions +{ + public static bool HasCrc(this PackageVersion ver) + { + return ver >= PackageVersion.V10 && ver <= PackageVersion.V16; + } + + public static long MaxPackageSize(this PackageVersion ver) + { + if (ver <= PackageVersion.V15) + { + return 0x40000000; + } + else + { + return 0x100000000; + } + } + + public static int PaddingSize(this PackageVersion ver) + { + if (ver <= PackageVersion.V9) + { + return 0x1000; + } + else + { + return 0x40; + } + } +} diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 39b7954c..8838e845 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -28,7 +28,7 @@ public class ModInfo(string name) public class ModResources : IDisposable { public Dictionary Mods = []; - public List LoadedPackages = []; + public List LoadedPackages = []; public void Dispose() { @@ -254,9 +254,9 @@ private void DiscoverPackagedFile(IAbstractFileInfo file) public void DiscoverPackage(string packagePath) { - var reader = new PackageReader(packagePath); - Resources.LoadedPackages.Add(reader); - var package = reader.Read(); + var reader = new PackageReader(); + var package = reader.Read(packagePath); + Resources.LoadedPackages.Add(package); foreach (var file in package.Files) { @@ -302,8 +302,8 @@ public void DiscoverBuiltinPackages(string gameDataPath) // Don't load 2nd, 3rd, ... parts of a multi-part archive && !archivePartRe.IsMatch(baseName)) { - var reader = new PackageReader(path, true); - var package = reader.Read(); + var reader = new PackageReader(); + var package = reader.Read(path, true); packagePriorities.Add(new Tuple(path, package.Metadata.Priority)); } } diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index cd4a5eb0..7158b0db 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -1,9 +1,9 @@ using System; using System.Collections.Generic; using System.IO; -using System.IO.Hashing; +using System.IO.MemoryMappedFiles; using System.Linq; -using System.Text; +using System.Threading.Tasks; using LSLib.LS.Enums; namespace LSLib.LS; @@ -13,174 +13,55 @@ public interface IAbstractFileInfo public abstract String GetName(); public abstract UInt64 Size(); public abstract UInt32 CRC(); - public abstract Stream MakeStream(); - public abstract void ReleaseStream(); + public abstract Stream CreateContentReader(); public abstract bool IsDeletion(); public string Name { get { return GetName(); } } } -public class UncompressedPackagedFileStream : Stream +public class PackagedFileInfo : PackagedFileInfoCommon, IAbstractFileInfo { - private readonly Stream PackageStream; - private readonly PackagedFileInfo FileInfo; - - public UncompressedPackagedFileStream(Stream packageStream, PackagedFileInfo fileInfo) - { - PackageStream = packageStream; - FileInfo = fileInfo; - PackageStream.Seek((long)fileInfo.OffsetInFile, SeekOrigin.Begin); - - if ((CompressionMethod)(FileInfo.Flags & 0x0F) != CompressionMethod.None) - { - throw new ArgumentException("We only support uncompressed files!"); - } - } - - public override bool CanRead { get { return true; } } - public override bool CanSeek { get { return false; } } - - public override int Read(byte[] buffer, int offset, int count) - { - if (PackageStream.Position < (long)FileInfo.OffsetInFile - || PackageStream.Position > (long)FileInfo.OffsetInFile + (long)FileInfo.SizeOnDisk) - { - throw new Exception("Stream at unexpected position while reading packaged file?"); - } - - long readable = (long)FileInfo.SizeOnDisk - Position; - int bytesToRead = (readable < count) ? (int)readable : count; - return PackageStream.Read(buffer, offset, bytesToRead); - } - - public override long Seek(long offset, SeekOrigin origin) - { - throw new NotSupportedException(); - } - - - public override long Position - { - get { return PackageStream.Position - (long)FileInfo.OffsetInFile; } - set { throw new NotSupportedException(); } - } - - public override bool CanTimeout { get { return PackageStream.CanTimeout; } } - public override bool CanWrite { get { return false; } } - public override long Length { get { return (long)FileInfo.SizeOnDisk; } } - public override void SetLength(long value) { throw new NotSupportedException(); } - public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } - public override void Flush() { } -} - -public class PackagedFileInfo : PackagedFileInfoCommon, IAbstractFileInfo, IDisposable -{ - public Stream PackageStream; + public MemoryMappedFile PackageFile; + public MemoryMappedViewAccessor PackageView; public bool Solid; public ulong SolidOffset; public Stream SolidStream; - private Stream _uncompressedStream; - - public void Dispose() - { - ReleaseStream(); - } public String GetName() => FileName; - public UInt64 Size() => (Flags & 0x0F) == 0 ? SizeOnDisk : UncompressedSize; + public UInt64 Size() => Flags.Method() == CompressionMethod.None ? SizeOnDisk : UncompressedSize; public UInt32 CRC() => Crc; - public Stream MakeStream() + public Stream CreateContentReader() { if (IsDeletion()) { throw new InvalidOperationException("Cannot open file stream for a deleted file"); } - if (_uncompressedStream != null) - { - return _uncompressedStream; - } - - if ((CompressionMethod)(Flags & 0x0F) == CompressionMethod.None && !Solid) - { - // Use direct stream read for non-compressed files - _uncompressedStream = new UncompressedPackagedFileStream(PackageStream, this); - return _uncompressedStream; - } - - if (SizeOnDisk > 0x7fffffff) - { - throw new InvalidDataException($"File '{FileName}' is over 2GB ({SizeOnDisk} bytes), which is not supported yet!"); - } - - var compressed = new byte[SizeOnDisk]; - - PackageStream.Seek((long)OffsetInFile, SeekOrigin.Begin); - int readSize = PackageStream.Read(compressed, 0, (int)SizeOnDisk); - if (readSize != (long)SizeOnDisk) - { - string msg = $"Failed to read {SizeOnDisk} bytes from archive (only got {readSize})"; - throw new InvalidDataException(msg); - } - - if (Crc != 0) - { - UInt32 computedCrc = Crc32.HashToUInt32(compressed); - if (computedCrc != Crc) - { - string msg = $"CRC check failed on file '{FileName}', archive is possibly corrupted. Expected {Crc,8:X}, got {computedCrc,8:X}"; - throw new InvalidDataException(msg); - } - } - if (Solid) { SolidStream.Seek((long)SolidOffset, SeekOrigin.Begin); - byte[] uncompressed = new byte[UncompressedSize]; - SolidStream.Read(uncompressed, 0, (int)UncompressedSize); - _uncompressedStream = new MemoryStream(uncompressed); + return new ReadOnlySubstream(SolidStream, (long)SolidOffset, (long)UncompressedSize); } else { - byte[] uncompressed = BinUtils.Decompress(compressed, (int)Size(), (byte)Flags); - _uncompressedStream = new MemoryStream(uncompressed); - } - - return _uncompressedStream; - } - - public void ReleaseStream() - { - if (_uncompressedStream == null) - { - return; + return BinUtils.Decompress(PackageFile, PackageView, (long)OffsetInFile, (int)SizeOnDisk, (int)UncompressedSize, Flags); } - - _uncompressedStream.Dispose(); - _uncompressedStream = null; } - internal static PackagedFileInfo CreateFromEntry(ILSPKFile entry, Stream dataStream) + internal static PackagedFileInfo CreateFromEntry(ILSPKFile entry, MemoryMappedFile file, MemoryMappedViewAccessor view) { var info = new PackagedFileInfo { - PackageStream = dataStream, + PackageFile = file, + PackageView = view, Solid = false }; entry.ToCommon(info); - - var compressionMethod = info.Flags & 0x0F; - if (compressionMethod > 2 || (info.Flags & ~0x7F) != 0) - { - string msg = $"File '{info.FileName}' has unsupported flags: {info.Flags}"; - throw new InvalidDataException(msg); - } - return info; } @@ -197,17 +78,11 @@ public bool IsDeletion() } } -public class FilesystemFileInfo : IAbstractFileInfo, IDisposable +public class FilesystemFileInfo : IAbstractFileInfo { public long CachedSize; public string FilesystemPath; public string FileName; - private FileStream _stream; - - public void Dispose() - { - ReleaseStream(); - } public String GetName() => FileName; @@ -215,13 +90,7 @@ public void Dispose() public UInt32 CRC() => throw new NotImplementedException("!"); - public Stream MakeStream() => _stream ??= File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); - - public void ReleaseStream() - { - _stream?.Dispose(); - _stream = null; - } + public Stream CreateContentReader() => File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); public static FilesystemFileInfo CreateFromEntry(string filesystemPath, string name) { @@ -253,11 +122,7 @@ public class StreamFileInfo : IAbstractFileInfo public UInt32 CRC() => throw new NotImplementedException("!"); - public Stream MakeStream() => Stream; - - public void ReleaseStream() - { - } + public Stream CreateContentReader() => Stream; public static StreamFileInfo CreateFromStream(Stream stream, string name) { @@ -275,43 +140,76 @@ public bool IsDeletion() } } -public class Package +public class PackageBuildInputFile { - public const PackageVersion CurrentVersion = PackageVersion.V18; + public string Path; + public string FilesystemPath; + public byte[] Body; - public readonly static byte[] Signature = [ 0x4C, 0x53, 0x50, 0x4B ]; + public Stream MakeInputStream() + { + if (Body != null) + { + return new MemoryStream(Body); + } + else + { + return new FileStream(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.Read); + } + } - public PackageHeaderCommon Metadata = new(); - public List Files = []; - public PackageVersion Version; + public long Size() + { + if (Body != null) + { + return Body.Length; + } + else + { + return new FileInfo(FilesystemPath).Length; + } + } + + public static PackageBuildInputFile CreateFromBlob(byte[] body, string path) + { + return new PackageBuildInputFile + { + Path = path, + Body = body + }; + } - public static string MakePartFilename(string path, int part) + public static PackageBuildInputFile CreateFromFilesystem(string filesystemPath, string path) { - string dirName = Path.GetDirectoryName(path); - string baseName = Path.GetFileNameWithoutExtension(path); - string extension = Path.GetExtension(path); - return $"{dirName}/{baseName}_{part}{extension}"; + return new PackageBuildInputFile + { + Path = path, + FilesystemPath = filesystemPath + }; } } -public class PackageCreationOptions +public class PackageBuildData { - public PackageVersion Version = PackageVersion.V16; + public PackageVersion Version = PackageHeaderCommon.CurrentVersion; public CompressionMethod Compression = CompressionMethod.None; - public bool FastCompression = true; + public LSCompressionLevel CompressionLevel = LSCompressionLevel.Default; public PackageFlags Flags = 0; public byte Priority = 0; + // Calculate full archive checksum? + public bool Hash = false; + public List Files = []; } public class Packager { - public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator, IAbstractFileInfo file); + public delegate void ProgressUpdateDelegate(string status, long numerator, long denominator); public ProgressUpdateDelegate ProgressUpdate = delegate { }; - private void WriteProgressUpdate(IAbstractFileInfo file, long numerator, long denominator) + private void WriteProgressUpdate(PackageBuildInputFile file, long numerator, long denominator) { - ProgressUpdate(file.Name, numerator, denominator, file); + ProgressUpdate(file.Path, numerator, denominator); } public void UncompressPackage(Package package, string outputPath, Func filter = null) @@ -331,10 +229,9 @@ public void UncompressPackage(Package package, string outputPath, Func (long)p.Size()); long currentSize = 0; - var buffer = new byte[32768]; foreach (var file in files) { - ProgressUpdate(file.Name, currentSize, totalSize, file); + ProgressUpdate(file.Name, currentSize, totalSize); currentSize += (long)file.Size(); if (file.IsDeletion()) continue; @@ -343,70 +240,44 @@ public void UncompressPackage(Package package, string outputPath, Func 0) - { - outFile.Write(buffer, 0, read); - } - } - finally - { - file.ReleaseStream(); - } + using var inStream = file.CreateContentReader(); + using var outFile = File.Open(outPath, FileMode.Create, FileAccess.Write); + inStream.CopyTo(outFile); } } public void UncompressPackage(string packagePath, string outputPath, Func filter = null) { - ProgressUpdate("Reading package headers ...", 0, 1, null); - using var reader = new PackageReader(packagePath); - Package package = reader.Read(); + ProgressUpdate("Reading package headers ...", 0, 1); + var reader = new PackageReader(); + using var package = reader.Read(packagePath); UncompressPackage(package, outputPath, filter); } - private static Package CreatePackageFromPath(string path) + private static void AddFilesFromPath(PackageBuildData build, string path) { - var package = new Package(); - if (!path.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) { path += Path.DirectorySeparatorChar; } - Dictionary files = Directory.EnumerateFiles(path, "*.*", SearchOption.AllDirectories) - .ToDictionary(k => k.Replace(path, string.Empty), v => v); - - foreach (KeyValuePair file in files) - { - FilesystemFileInfo fileInfo = FilesystemFileInfo.CreateFromEntry(file.Value, file.Key); - package.Files.Add(fileInfo); - fileInfo.Dispose(); - } - - return package; + foreach (var file in Directory.EnumerateFiles(path, "*.*", SearchOption.AllDirectories)) + { + var name = Path.GetRelativePath(path, file); + build.Files.Add(PackageBuildInputFile.CreateFromFilesystem(file, name)); + } } - public void CreatePackage(string packagePath, string inputPath, PackageCreationOptions options) + public async Task CreatePackage(string packagePath, string inputPath, PackageBuildData build) { FileManager.TryToCreateDirectory(packagePath); - ProgressUpdate("Enumerating files ...", 0, 1, null); - Package package = CreatePackageFromPath(inputPath); - package.Metadata.Flags = options.Flags; - package.Metadata.Priority = options.Priority; + ProgressUpdate("Enumerating files ...", 0, 1); + AddFilesFromPath(build, inputPath); - ProgressUpdate("Creating archive ...", 0, 1, null); - using var writer = new PackageWriter(package, packagePath); + ProgressUpdate("Creating archive ...", 0, 1); + using var writer = new PackageWriter(build, packagePath); writer.WriteProgress += WriteProgressUpdate; - writer.Version = options.Version; - writer.Compression = options.Compression; - writer.LSCompressionLevel = options.FastCompression ? LSCompressionLevel.FastCompression : LSCompressionLevel.DefaultCompression; writer.Write(); } } diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index 703b47ba..074b9f01 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -1,13 +1,16 @@ -using LSLib.Granny; +using LSLib.Granny.GR2; using LSLib.LS.Enums; using System; +using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; -using System.Text; namespace LSLib.LS; public class PackageHeaderCommon { + public const PackageVersion CurrentVersion = PackageVersion.V18; + public const UInt32 Signature = 0x4B50534C; + public UInt32 Version; public UInt64 FileListOffset; // Size of file list; used for legacy (<= v10) packages only @@ -155,20 +158,18 @@ public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) } [StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader15 : ILSPKHeader +internal unsafe struct LSPKHeader15 : ILSPKHeader { public UInt32 Version; public UInt64 FileListOffset; public UInt32 FileListSize; public Byte Flags; public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; + public fixed byte Md5[16]; public readonly PackageHeaderCommon ToCommonHeader() { - return new PackageHeaderCommon + var header = new PackageHeaderCommon { Version = Version, DataOffset = 0, @@ -177,41 +178,48 @@ public readonly PackageHeaderCommon ToCommonHeader() NumParts = 1, Flags = (PackageFlags)Flags, Priority = Priority, - Md5 = Md5 + Md5 = new byte[16] }; + + fixed (byte* md = Md5) + { + Marshal.Copy(new IntPtr(md), header.Md5, 0, 0x10); + } + + return header; } public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) { - return new LSPKHeader15 + var header = new LSPKHeader15 { Version = h.Version, FileListOffset = (UInt32)h.FileListOffset, FileListSize = h.FileListSize, Flags = (byte)h.Flags, - Priority = h.Priority, - Md5 = h.Md5 + Priority = h.Priority }; + + Marshal.Copy(h.Md5, 0, new IntPtr(header.Md5), 0x10); + return header; } } [StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader16 : ILSPKHeader +internal unsafe struct LSPKHeader16 : ILSPKHeader { public UInt32 Version; public UInt64 FileListOffset; public UInt32 FileListSize; public Byte Flags; public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; + public fixed byte Md5[16]; public UInt16 NumParts; public readonly PackageHeaderCommon ToCommonHeader() { - return new PackageHeaderCommon + var header = new PackageHeaderCommon { Version = Version, FileListOffset = FileListOffset, @@ -219,22 +227,31 @@ public readonly PackageHeaderCommon ToCommonHeader() NumParts = NumParts, Flags = (PackageFlags)Flags, Priority = Priority, - Md5 = Md5 + Md5 = new byte[16] }; + + fixed (byte* md = Md5) + { + Marshal.Copy(new IntPtr(md), header.Md5, 0, 0x10); + } + + return header; } public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) { - return new LSPKHeader16 + var header = new LSPKHeader16 { Version = h.Version, FileListOffset = (UInt32)h.FileListOffset, FileListSize = h.FileListSize, Flags = (byte)h.Flags, Priority = h.Priority, - Md5 = h.Md5, NumParts = (UInt16)h.NumParts }; + + Marshal.Copy(h.Md5, 0, new IntPtr(header.Md5), 0x10); + return header; } } @@ -262,7 +279,7 @@ abstract public class PackagedFileInfoCommon public string FileName; public UInt32 ArchivePart; public UInt32 Crc; - public Byte Flags; + public CompressionFlags Flags; public UInt64 OffsetInFile; public UInt64 SizeOnDisk; public UInt64 UncompressedSize; @@ -291,7 +308,7 @@ public readonly void ToCommon(PackagedFileInfoCommon info) info.FileName = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = 0; - info.Flags = UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.DefaultCompression) : (byte)0; + info.Flags = UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.Default) : 0; info.OffsetInFile = OffsetInFile; info.SizeOnDisk = SizeOnDisk; info.UncompressedSize = UncompressedSize; @@ -304,7 +321,7 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), OffsetInFile = (uint)info.OffsetInFile, SizeOnDisk = (uint)info.SizeOnDisk, - UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize, + UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize, ArchivePart = info.ArchivePart }; } @@ -330,7 +347,7 @@ public readonly void ToCommon(PackagedFileInfoCommon info) info.FileName = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = Crc; - info.Flags = (Byte)Flags; + info.Flags = (CompressionFlags)Flags; info.OffsetInFile = OffsetInFile; info.SizeOnDisk = SizeOnDisk; info.UncompressedSize = UncompressedSize; @@ -343,9 +360,9 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), OffsetInFile = (uint)info.OffsetInFile, SizeOnDisk = (uint)info.SizeOnDisk, - UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize, + UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize, ArchivePart = info.ArchivePart, - Flags = info.Flags, + Flags = (byte)info.Flags, Crc = info.Crc }; } @@ -372,7 +389,7 @@ public readonly void ToCommon(PackagedFileInfoCommon info) info.FileName = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = Crc; - info.Flags = (Byte)Flags; + info.Flags = (CompressionFlags)Flags; info.OffsetInFile = OffsetInFile; info.SizeOnDisk = SizeOnDisk; info.UncompressedSize = UncompressedSize; @@ -385,9 +402,9 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), OffsetInFile = (uint)info.OffsetInFile, SizeOnDisk = (uint)info.SizeOnDisk, - UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize, + UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize, ArchivePart = info.ArchivePart, - Flags = info.Flags, + Flags = (Byte)info.Flags, Crc = info.Crc, Unknown2 = 0 }; @@ -414,7 +431,7 @@ public readonly void ToCommon(PackagedFileInfoCommon info) info.FileName = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = 0; - info.Flags = Flags; + info.Flags = (CompressionFlags)Flags; info.OffsetInFile = OffsetInFile1 | ((ulong)OffsetInFile2 << 32); info.SizeOnDisk = SizeOnDisk; info.UncompressedSize = UncompressedSize; @@ -428,9 +445,9 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) OffsetInFile1 = (uint)(info.OffsetInFile & 0xffffffff), OffsetInFile2 = (ushort)((info.OffsetInFile >> 32) & 0xffff), ArchivePart = (byte)info.ArchivePart, - Flags = info.Flags, + Flags = (byte)info.Flags, SizeOnDisk = (uint)info.SizeOnDisk, - UncompressedSize = (info.Flags & 0x0F) == 0 ? 0 : (uint)info.UncompressedSize + UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize }; } diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 89895034..193d910b 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -5,6 +5,10 @@ using System.Text; using LZ4; using LSLib.LS.Enums; +using System.IO.MemoryMappedFiles; +using System.Reflection; +using LSLib.VirtualTextures; +using System.Collections.Generic; namespace LSLib.LS; @@ -23,117 +27,164 @@ public NotAPackageException(string message, Exception innerException) : base(mes } } -public class PackageReader(string path, bool metadataOnly = false) : IDisposable +public class Package : IDisposable { - private Stream[] Streams; + public readonly string PackagePath; + internal readonly MemoryMappedFile MetadataFile; + internal readonly MemoryMappedViewAccessor MetadataView; - public void Dispose() + internal MemoryMappedFile[] Parts; + internal MemoryMappedViewAccessor[] Views; + + public PackageHeaderCommon Metadata; + public List Files = []; + + public PackageVersion Version { - foreach (Stream stream in Streams ?? []) - { - stream?.Dispose(); - } + get { return (PackageVersion)Metadata.Version; } } - private void OpenStreams(FileStream mainStream, int numParts) + public void OpenPart(int index, string path) + { + var file = File.OpenRead(path); + Parts[index] = MemoryMappedFile.CreateFromFile(file, null, file.Length, MemoryMappedFileAccess.Read, HandleInheritability.None, false); + Views[index] = MetadataFile.CreateViewAccessor(0, file.Length, MemoryMappedFileAccess.Read); + } + + public void OpenStreams(int numParts) { // Open a stream for each file chunk - Streams = new Stream[numParts]; - Streams[0] = mainStream; + Parts = new MemoryMappedFile[numParts]; + Views = new MemoryMappedViewAccessor[numParts]; + + Parts[0] = MetadataFile; + Views[0] = MetadataView; for (var part = 1; part < numParts; part++) { - string partPath = Package.MakePartFilename(path, part); - Streams[part] = File.Open(partPath, FileMode.Open, FileAccess.Read, FileShare.Read); + string partPath = Package.MakePartFilename(PackagePath, part); + OpenPart(part, partPath); } } - private void ReadCompressedFileList(BinaryReader reader, Package package) where TFile : ILSPKFile + internal Package(string path) { - int numFiles = reader.ReadInt32(); - int compressedSize; - if (package.Metadata.Version > 13) + PackagePath = path; + var file = File.OpenRead(PackagePath); + MetadataFile = MemoryMappedFile.CreateFromFile(file, null, file.Length, MemoryMappedFileAccess.Read, HandleInheritability.None, false); + MetadataView = MetadataFile.CreateViewAccessor(0, file.Length, MemoryMappedFileAccess.Read); + } + + public void Dispose() + { + MetadataView?.Dispose(); + MetadataFile?.Dispose(); + + foreach (var view in Views ?? []) { - compressedSize = reader.ReadInt32(); + view?.Dispose(); } - else + + foreach (var file in Parts ?? []) { - compressedSize = (int)package.Metadata.FileListSize - 4; + file?.Dispose(); } + } + + public static string MakePartFilename(string path, int part) + { + string dirName = Path.GetDirectoryName(path); + string baseName = Path.GetFileNameWithoutExtension(path); + string extension = Path.GetExtension(path); + return Path.Join(dirName, $"{baseName}_{part}{extension}"); + } +} - byte[] compressedFileList = reader.ReadBytes(compressedSize); +public class PackageReader +{ + private bool MetadataOnly; + private Package Pak; - int fileBufferSize = Marshal.SizeOf(typeof(TFile)) * numFiles; - var uncompressedList = new byte[fileBufferSize]; - int uncompressedSize = LZ4Codec.Decode(compressedFileList, 0, compressedFileList.Length, uncompressedList, 0, fileBufferSize, true); - if (uncompressedSize != fileBufferSize) + private void ReadCompressedFileList(MemoryMappedViewAccessor view, long offset) + where TFile : struct, ILSPKFile + { + int numFiles = view.ReadInt32(offset); + byte[] compressed; + if (Pak.Metadata.Version > 13) { - string msg = $"LZ4 compressor disagrees about the size of file headers; expected {fileBufferSize}, got {uncompressedSize}"; - throw new InvalidDataException(msg); + int compressedSize = view.ReadInt32(offset + 4); + compressed = new byte[compressedSize]; + view.ReadArray(offset + 8, compressed, 0, compressedSize); + } + else + { + compressed = new byte[(int)Pak.Metadata.FileListSize - 4]; + view.ReadArray(offset + 4, compressed, 0, (int)Pak.Metadata.FileListSize - 4); } - var ms = new MemoryStream(uncompressedList); - var msr = new BinaryReader(ms); + int fileBufferSize = Marshal.SizeOf(typeof(TFile)) * numFiles; + var fileBuf = BinUtils.Decompress(compressed, fileBufferSize, CompressionFlags.MethodLZ4); + + using var ms = new MemoryStream(fileBuf); + using var msr = new BinaryReader(ms); var entries = new TFile[numFiles]; BinUtils.ReadStructs(msr, entries); foreach (var entry in entries) { - package.Files.Add(PackagedFileInfo.CreateFromEntry(entry, Streams[entry.ArchivePartNumber()])); + Pak.Files.Add(PackagedFileInfo.CreateFromEntry(entry, Pak.Parts[entry.ArchivePartNumber()], Pak.Views[entry.ArchivePartNumber()])); } } - private void ReadFileList(BinaryReader reader, Package package) where TFile : ILSPKFile + private void ReadFileList(MemoryMappedViewAccessor view, long offset) + where TFile : struct, ILSPKFile { - var entries = new TFile[package.Metadata.NumFiles]; - BinUtils.ReadStructs(reader, entries); + var entries = new TFile[Pak.Metadata.NumFiles]; + view.ReadArray(offset, entries, 0, (int)Pak.Metadata.NumFiles); foreach (var entry in entries) { - var file = PackagedFileInfo.CreateFromEntry(entry, Streams[entry.ArchivePartNumber()]); + var file = PackagedFileInfo.CreateFromEntry(entry, Pak.Parts[entry.ArchivePartNumber()], Pak.Views[entry.ArchivePartNumber()]); if (file.ArchivePart == 0) { - file.OffsetInFile += package.Metadata.DataOffset; + file.OffsetInFile += Pak.Metadata.DataOffset; } - package.Files.Add(file); + Pak.Files.Add(file); } } - private Package ReadHeaderAndFileList(FileStream mainStream, BinaryReader reader) - where THeader : ILSPKHeader - where TFile : ILSPKFile + private Package ReadHeaderAndFileList(MemoryMappedViewAccessor view, long offset) + where THeader : struct, ILSPKHeader + where TFile : struct, ILSPKFile { - var package = new Package(); - var header = BinUtils.ReadStruct(reader); + view.Read(offset, out var header); - package.Metadata = header.ToCommonHeader(); - package.Version = (PackageVersion)package.Metadata.Version; + Pak.Metadata = header.ToCommonHeader(); - if (metadataOnly) return package; + if (MetadataOnly) return Pak; - OpenStreams(mainStream, (int)package.Metadata.NumParts); + Pak.OpenStreams((int)Pak.Metadata.NumParts); - if (package.Metadata.Version > 10) + if (Pak.Metadata.Version > 10) { - mainStream.Seek((long)package.Metadata.FileListOffset, SeekOrigin.Begin); - ReadCompressedFileList(reader, package); + ReadCompressedFileList(view, (long)Pak.Metadata.FileListOffset); } else { - ReadFileList(reader, package); + ReadFileList(view, offset + Marshal.SizeOf()); } - if (((PackageFlags)package.Metadata.Flags).HasFlag(PackageFlags.Solid) && package.Files.Count > 0) + if (Pak.Metadata.Flags.HasFlag(PackageFlags.Solid) && Pak.Files.Count > 0) { - UnpackSolidSegment(mainStream, package); + UnpackSolidSegment(view); } - return package; + return Pak; } - private void UnpackSolidSegment(FileStream mainStream, Package package) + private void UnpackSolidSegment(MemoryMappedViewAccessor view) { // Calculate compressed frame offset and bounds ulong totalUncompressedSize = 0; @@ -141,7 +192,7 @@ private void UnpackSolidSegment(FileStream mainStream, Package package) ulong firstOffset = 0xffffffff; ulong lastOffset = 0; - foreach (var entry in package.Files) + foreach (var entry in Pak.Files) { var file = entry as PackagedFileInfo; @@ -165,8 +216,7 @@ private void UnpackSolidSegment(FileStream mainStream, Package package) // Decompress all files as a single frame (solid) byte[] frame = new byte[lastOffset]; - mainStream.Seek(0, SeekOrigin.Begin); - mainStream.Read(frame, 0, (int)lastOffset); + view.ReadArray(0, frame, 0, (int)lastOffset); byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); var decompressedStream = new MemoryStream(decompressed); @@ -174,7 +224,7 @@ private void UnpackSolidSegment(FileStream mainStream, Package package) // Update offsets to point to the decompressed chunk ulong offset = 7; ulong compressedOffset = 0; - foreach (var entry in package.Files) + foreach (var entry in Pak.Files) { var file = entry as PackagedFileInfo; @@ -190,46 +240,42 @@ private void UnpackSolidSegment(FileStream mainStream, Package package) } } - public Package Read() + public Package Read(string path, bool metadataOnly = false) { - var mainStream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new BinaryReader(mainStream, new UTF8Encoding(), true); + MetadataOnly = metadataOnly; + + Pak = new Package(path); + var view = Pak.MetadataView; // Check for v13 package headers - mainStream.Seek(-8, SeekOrigin.End); - Int32 headerSize = reader.ReadInt32(); - byte[] signature = reader.ReadBytes(4); - if (Package.Signature.SequenceEqual(signature)) + var headerSize = view.ReadInt32(view.Capacity - 8); + var signature = view.ReadUInt32(view.Capacity - 4); + if (signature == PackageHeaderCommon.Signature) { - mainStream.Seek(-headerSize, SeekOrigin.End); - return ReadHeaderAndFileList(mainStream, reader); + return ReadHeaderAndFileList(view, view.Capacity - headerSize); } // Check for v10 package headers - mainStream.Seek(0, SeekOrigin.Begin); - signature = reader.ReadBytes(4); + signature = view.ReadUInt32(0); Int32 version; - if (Package.Signature.SequenceEqual(signature)) + if (signature == PackageHeaderCommon.Signature) { - version = reader.ReadInt32(); - mainStream.Seek(4, SeekOrigin.Begin); + version = view.ReadInt32(4); return version switch { - 10 => ReadHeaderAndFileList(mainStream, reader), - 15 => ReadHeaderAndFileList(mainStream, reader), - 16 => ReadHeaderAndFileList(mainStream, reader), - 18 => ReadHeaderAndFileList(mainStream, reader), + 10 => ReadHeaderAndFileList(view, 4), + 15 => ReadHeaderAndFileList(view, 4), + 16 => ReadHeaderAndFileList(view, 4), + 18 => ReadHeaderAndFileList(view, 4), _ => throw new InvalidDataException($"Package version v{version} not supported") }; } // Check for v9 and v7 package headers - mainStream.Seek(0, SeekOrigin.Begin); - version = reader.ReadInt32(); + version = view.ReadInt32(4); if (version == 7 || version == 9) { - mainStream.Seek(0, SeekOrigin.Begin); - return ReadHeaderAndFileList(mainStream, reader); + return ReadHeaderAndFileList(view, 0); } throw new NotAPackageException("No valid signature found in package file"); diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 88718141..98d3fbe0 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -3,27 +3,26 @@ using System.IO; using System.IO.Hashing; using System.Linq; -using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; using System.Security.Cryptography; using System.Text; -using LSLib.Granny.GR2; using LSLib.LS.Enums; -using LSLib.VirtualTextures; using LZ4; namespace LSLib.LS; -public class PackageWriter(Package package, string path) : IDisposable + + +public class PackageBuildTransientFile : PackagedFileInfoCommon +{ +} + +public class PackageWriter(PackageBuildData Build, string PackagePath) : IDisposable { - public delegate void WriteProgressDelegate(IAbstractFileInfo abstractFile, long numerator, long denominator); + public delegate void WriteProgressDelegate(PackageBuildInputFile file, long numerator, long denominator); - private const long MaxPackageSizeDOS = 0x40000000; - private const long MaxPackageSizeBG3 = 0x100000000; - public CompressionMethod Compression = CompressionMethod.None; - public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + private readonly PackageHeaderCommon Metadata = new(); private readonly List Streams = []; - public PackageVersion Version = Package.CurrentVersion; public WriteProgressDelegate WriteProgress = delegate { }; public void Dispose() @@ -34,63 +33,58 @@ public void Dispose() } } - public int PaddingLength() => Version <= PackageVersion.V9 ? 0x8000 : 0x40; - - public PackagedFileInfo WriteFile(IAbstractFileInfo info) + private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) { - // Assume that all files are written uncompressed (worst-case) when calculating package sizes - long size = (long)info.Size(); - if ((Version < PackageVersion.V15 && Streams.Last().Position + size > MaxPackageSizeDOS) - || (Version >= PackageVersion.V16 && Streams.Last().Position + size > MaxPackageSizeBG3)) + using var inputStream = input.MakeInputStream(); + + var compression = Build.Compression; + var compressionLevel = Build.CompressionLevel; + + if (input.Path.EndsWith(".gts") || input.Path.EndsWith(".gtp") || inputStream.Length == 0) { - // Start a new package file if the current one is full. - string partPath = Package.MakePartFilename(path, Streams.Count); - var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); - Streams.Add(nextPart); + compression = CompressionMethod.None; + compressionLevel = LSCompressionLevel.Fast; } - var compression = Compression; - var compressionLevel = LSCompressionLevel; + var uncompressed = new byte[inputStream.Length]; + inputStream.ReadExactly(uncompressed, 0, uncompressed.Length); + var compressed = BinUtils.Compress(uncompressed, compression, compressionLevel); - if (info.Name.EndsWith(".gts") || info.Name.EndsWith(".gtp") || size == 0) + if (Streams.Last().Position + compressed.Length > Build.Version.MaxPackageSize()) { - compression = CompressionMethod.None; - compressionLevel = LSCompressionLevel.FastCompression; + // Start a new package file if the current one is full. + string partPath = Package.MakePartFilename(PackagePath, Streams.Count); + var nextPart = File.Open(partPath, FileMode.Create, FileAccess.Write); + Streams.Add(nextPart); } Stream stream = Streams.Last(); - var packaged = new PackagedFileInfo + var packaged = new PackageBuildTransientFile { - PackageStream = stream, - FileName = info.Name, - UncompressedSize = (ulong)size, - ArchivePart = (UInt32) (Streams.Count - 1), - OffsetInFile = (UInt32) stream.Position, + FileName = input.Path, + UncompressedSize = (ulong)uncompressed.Length, + SizeOnDisk = (ulong)compressed.Length, + ArchivePart = (UInt32)(Streams.Count - 1), + OffsetInFile = (ulong)stream.Position, Flags = BinUtils.MakeCompressionFlags(compression, compressionLevel) }; - Stream packagedStream = info.MakeStream(); - byte[] compressed; - try + stream.Write(compressed, 0, compressed.Length); + + if (Build.Version.HasCrc()) { - using var reader = new BinaryReader(packagedStream, Encoding.UTF8, true); - byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); - compressed = BinUtils.Compress(uncompressed, compression, compressionLevel); - stream.Write(compressed, 0, compressed.Length); + packaged.Crc = Crc32.HashToUInt32(compressed); } - finally + else { - info.ReleaseStream(); + packaged.Crc = 0; } - packaged.SizeOnDisk = (UInt64) (stream.Position - (long)packaged.OffsetInFile); - packaged.Crc = Crc32.HashToUInt32(compressed); - - if (!package.Metadata.Flags.HasFlag(PackageFlags.Solid)) + if (!Build.Flags.HasFlag(PackageFlags.Solid)) { - int padLength = PaddingLength(); + int padLength = Build.Version.PaddingSize(); long alignTo; - if (Version >= PackageVersion.V16) + if (Build.Version >= PackageVersion.V16) { alignTo = stream.Position - Marshal.SizeOf(typeof(LSPKHeader16)) - 4; } @@ -117,32 +111,32 @@ private void PackV7(FileStream mainStream) where THeader : ILSPKHeader where TFile : ILSPKFile { - package.Metadata.NumFiles = (uint)package.Files.Count; - package.Metadata.FileListSize = (UInt32)(Marshal.SizeOf(typeof(TFile)) * package.Files.Count); + Metadata.NumFiles = (uint)Build.Files.Count; + Metadata.FileListSize = (UInt32)(Marshal.SizeOf(typeof(TFile)) * Build.Files.Count); using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - package.Metadata.DataOffset = 4 + (UInt32)Marshal.SizeOf(typeof(THeader)) + package.Metadata.FileListSize; + Metadata.DataOffset = 4 + (UInt32)Marshal.SizeOf(typeof(THeader)) + Metadata.FileListSize; - int paddingLength = PaddingLength(); - if (package.Metadata.DataOffset % paddingLength > 0) + int paddingLength = Build.Version.PaddingSize(); + if (Metadata.DataOffset % paddingLength > 0) { - package.Metadata.DataOffset += (UInt32)(paddingLength - package.Metadata.DataOffset % paddingLength); + Metadata.DataOffset += (UInt32)(paddingLength - Metadata.DataOffset % paddingLength); } // Write a placeholder instead of the actual headers; we'll write them after we // compressed and flushed all files to disk - var placeholder = new byte[package.Metadata.DataOffset]; + var placeholder = new byte[Metadata.DataOffset]; writer.Write(placeholder); var writtenFiles = PackFiles(); mainStream.Seek(0, SeekOrigin.Begin); - writer.Write(Package.Signature); - package.Metadata.NumParts = (UInt16)Streams.Count; - package.Metadata.Md5 = ComputeArchiveHash(); + writer.Write(PackageHeaderCommon.Signature); + Metadata.NumParts = (UInt16)Streams.Count; + Metadata.Md5 = ComputeArchiveHash(); - var header = (THeader)THeader.FromCommonHeader(package.Metadata); + var header = (THeader)THeader.FromCommonHeader(Metadata); BinUtils.WriteStruct(writer, ref header); WriteFileList(writer, writtenFiles); @@ -156,62 +150,62 @@ private void PackV13(FileStream mainStream) using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - package.Metadata.FileListOffset = (UInt64)mainStream.Position; + Metadata.FileListOffset = (UInt64)mainStream.Position; WriteCompressedFileList(writer, writtenFiles); - package.Metadata.FileListSize = (UInt32)(mainStream.Position - (long)package.Metadata.FileListOffset); - package.Metadata.Md5 = ComputeArchiveHash(); - package.Metadata.NumParts = (UInt16)Streams.Count; + Metadata.FileListSize = (UInt32)(mainStream.Position - (long)Metadata.FileListOffset); + Metadata.Md5 = ComputeArchiveHash(); + Metadata.NumParts = (UInt16)Streams.Count; - var header = (THeader)THeader.FromCommonHeader(package.Metadata); + var header = (THeader)THeader.FromCommonHeader(Metadata); BinUtils.WriteStruct(writer, ref header); writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(THeader)))); - writer.Write(Package.Signature); + writer.Write(PackageHeaderCommon.Signature); } - private List PackFiles() + private List PackFiles() { - long totalSize = package.Files.Sum(p => (long)p.Size()); + long totalSize = Build.Files.Sum(p => (long)p.Size()); long currentSize = 0; - var writtenFiles = new List(); - foreach (var file in package.Files) + var writtenFiles = new List(); + foreach (var file in Build.Files) { WriteProgress(file, currentSize, totalSize); writtenFiles.Add(WriteFile(file)); - currentSize += (long)file.Size(); + currentSize += file.Size(); } return writtenFiles; } - private void WriteFileList(BinaryWriter metadataWriter, List files) + private void WriteFileList(BinaryWriter metadataWriter, List files) where TFile : ILSPKFile { foreach (var file in files) { if (file.ArchivePart == 0) { - file.OffsetInFile -= package.Metadata.DataOffset; + file.OffsetInFile -= Metadata.DataOffset; } // <= v10 packages don't support compression level in the flags field - file.Flags &= 0x0f; + file.Flags = (CompressionFlags)((byte)file.Flags & 0x0f); var entry = (TFile)TFile.FromCommon(file); BinUtils.WriteStruct(metadataWriter, ref entry); } } - private void WriteCompressedFileList(BinaryWriter metadataWriter, List files) + private void WriteCompressedFileList(BinaryWriter metadataWriter, List files) where TFile : ILSPKFile { byte[] fileListBuf; using (var fileList = new MemoryStream()) using (var fileListWriter = new BinaryWriter(fileList)) { - foreach (PackagedFileInfo file in files) + foreach (var file in files) { var entry = (TFile)TFile.FromCommon(file); BinUtils.WriteStruct(fileListWriter, ref entry); @@ -224,13 +218,13 @@ private void WriteCompressedFileList(BinaryWriter metadataWriter, List PackageVersion.V13) + if (Build.Version > PackageVersion.V13) { metadataWriter.Write((UInt32)compressedFileList.Length); } else { - package.Metadata.FileListSize = (uint)compressedFileList.Length + 4; + Metadata.FileListSize = (uint)compressedFileList.Length + 4; } metadataWriter.Write(compressedFileList); @@ -242,8 +236,8 @@ private void PackV15(FileStream mainStream) { using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) { - writer.Write(Package.Signature); - var header = (THeader)THeader.FromCommonHeader(package.Metadata); + writer.Write(PackageHeaderCommon.Signature); + var header = (THeader)THeader.FromCommonHeader(Metadata); BinUtils.WriteStruct(writer, ref header); } @@ -251,15 +245,23 @@ private void PackV15(FileStream mainStream) using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) { - package.Metadata.FileListOffset = (UInt64)mainStream.Position; + Metadata.FileListOffset = (UInt64)mainStream.Position; WriteCompressedFileList(writer, writtenFiles); - package.Metadata.FileListSize = (UInt32)(mainStream.Position - (long)package.Metadata.FileListOffset); - package.Metadata.Md5 = ComputeArchiveHash(); - package.Metadata.NumParts = (UInt16)Streams.Count; + Metadata.FileListSize = (UInt32)(mainStream.Position - (long)Metadata.FileListOffset); + if (Build.Hash) + { + Metadata.Md5 = ComputeArchiveHash(); + } + else + { + Metadata.Md5 = new byte[0x10]; + } + + Metadata.NumParts = (UInt16)Streams.Count; mainStream.Seek(4, SeekOrigin.Begin); - var header = (THeader)THeader.FromCommonHeader(package.Metadata); + var header = (THeader)THeader.FromCommonHeader(Metadata); BinUtils.WriteStruct(writer, ref header); } } @@ -267,31 +269,23 @@ private void PackV15(FileStream mainStream) public byte[] ComputeArchiveHash() { // MD5 is computed over the contents of all files in an alphabetically sorted order - var orderedFileList = package.Files.Select(item => item).ToList(); - if (Version < PackageVersion.V15) + var orderedFileList = Build.Files.Select(item => item).ToList(); + if (Build.Version < PackageVersion.V15) { - orderedFileList.Sort((a, b) => String.CompareOrdinal(a.Name, b.Name)); + orderedFileList.Sort((a, b) => String.CompareOrdinal(a.Path, b.Path)); } using MD5 md5 = MD5.Create(); foreach (var file in orderedFileList) { - Stream packagedStream = file.MakeStream(); - try - { - using (var reader = new BinaryReader(packagedStream)) - { - byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); - md5.TransformBlock(uncompressed, 0, uncompressed.Length, uncompressed, 0); - } - } - finally - { - file.ReleaseStream(); - } + using var packagedStream = file.MakeInputStream(); + using var reader = new BinaryReader(packagedStream); + + byte[] uncompressed = reader.ReadBytes((int)reader.BaseStream.Length); + md5.TransformBlock(uncompressed, 0, uncompressed.Length, uncompressed, 0); } - md5.TransformFinalBlock(new byte[0], 0, 0); + md5.TransformFinalBlock(Array.Empty(), 0, 0); byte[] hash = md5.Hash; // All hash bytes are incremented by 1 @@ -305,10 +299,14 @@ public byte[] ComputeArchiveHash() public void Write() { - var mainStream = File.Open(path, FileMode.Create, FileAccess.Write); + var mainStream = File.Open(PackagePath, FileMode.Create, FileAccess.Write); Streams.Add(mainStream); - switch (Version) + Metadata.Version = (UInt32)Build.Version; + Metadata.Flags = Build.Flags; + Metadata.Priority = Build.Priority; + + switch (Build.Version) { case PackageVersion.V18: PackV15(mainStream); break; case PackageVersion.V16: PackV15(mainStream); break; @@ -317,7 +315,7 @@ public void Write() case PackageVersion.V10: PackV7(mainStream); break; case PackageVersion.V9: case PackageVersion.V7: PackV7(mainStream); break; - default: throw new ArgumentException($"Cannot write version {Version} packages"); + default: throw new ArgumentException($"Cannot write version {Build.Version} packages"); } } } diff --git a/LSLib/LS/ResourceUtils.cs b/LSLib/LS/ResourceUtils.cs index 8ebf02c4..08c564f9 100644 --- a/LSLib/LS/ResourceUtils.cs +++ b/LSLib/LS/ResourceUtils.cs @@ -60,7 +60,7 @@ public class ResourceConversionParameters /// /// LSF/LSB compression level (i.e. size/compression time tradeoff) /// - public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + public LSCompressionLevel CompressionLevel = LSCompressionLevel.Default; /// /// Byte-swap the last 8 bytes of GUIDs when serializing to/from string @@ -189,7 +189,7 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo Version = conversionParams.LSF, EncodeSiblingData = conversionParams.LSFEncodeSiblingData, Compression = conversionParams.Compression, - LSCompressionLevel = conversionParams.LSCompressionLevel + CompressionLevel = conversionParams.CompressionLevel }; writer.Write(resource); break; diff --git a/LSLib/LS/Resources/LSF/LSFCommon.cs b/LSLib/LS/Resources/LSF/LSFCommon.cs index 026827e7..ab6c5ea0 100644 --- a/LSLib/LS/Resources/LSF/LSFCommon.cs +++ b/LSLib/LS/Resources/LSF/LSFCommon.cs @@ -79,7 +79,7 @@ internal struct LSFMetadataV5 /// Compression method and level used for the string, node, attribute and value buffers. /// Uses the same format as packages (see BinUtils.MakeCompressionFlags) /// - public Byte CompressionFlags; + public CompressionFlags CompressionFlags; /// /// Possibly unused, always 0 /// @@ -131,7 +131,7 @@ internal struct LSFMetadataV6 /// Compression method and level used for the string, node, attribute and value buffers. /// Uses the same format as packages (see BinUtils.MakeCompressionFlags) /// - public Byte CompressionFlags; + public CompressionFlags CompressionFlags; /// /// Possibly unused, always 0 /// diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index 24466d70..1f79d2bc 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -9,7 +9,7 @@ namespace LSLib.LS; -public class LSFReader(Stream stream) : IDisposable +public class LSFReader(Stream stream, bool keepOpen = false) : IDisposable { /// /// Input stream @@ -48,7 +48,10 @@ public class LSFReader(Stream stream) : IDisposable public void Dispose() { - Stream.Dispose(); + if (!keepOpen) + { + Stream.Dispose(); + } } /// @@ -281,7 +284,7 @@ private MemoryStream Decompress(BinaryReader reader, uint sizeOnDisk, uint uncom } bool chunked = (Version >= LSFVersion.VerChunkedCompress && allowChunked); - bool isCompressed = BinUtils.CompressionFlagsToMethod(Metadata.CompressionFlags) != CompressionMethod.None; + bool isCompressed = Metadata.CompressionFlags.Method() != CompressionMethod.None; uint compressedSize = isCompressed ? sizeOnDisk : uncompressedSize; byte[] compressed = reader.ReadBytes((int)compressedSize); var uncompressed = BinUtils.Decompress(compressed, (int)uncompressedSize, Metadata.CompressionFlags, chunked); diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index 40804e0a..0982cb5d 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -32,7 +32,7 @@ public class LSFWriter(Stream stream) public LSFVersion Version = LSFVersion.MaxWriteVersion; public bool EncodeSiblingData = false; public CompressionMethod Compression = CompressionMethod.LZ4; - public LSCompressionLevel LSCompressionLevel = LSCompressionLevel.DefaultCompression; + public LSCompressionLevel CompressionLevel = LSCompressionLevel.Default; public void Write(Resource resource) { @@ -114,10 +114,10 @@ public void Write(Resource resource) } bool chunked = Version >= LSFVersion.VerChunkedCompress; - byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, LSCompressionLevel); - byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, LSCompressionLevel, chunked); - byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, LSCompressionLevel, chunked); - byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, LSCompressionLevel, chunked); + byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, CompressionLevel); + byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, CompressionLevel, chunked); + byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, CompressionLevel, chunked); + byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, CompressionLevel, chunked); if (Version < LSFVersion.VerBG3AdditionalBlob) { @@ -144,7 +144,7 @@ public void Write(Resource resource) meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; } - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); + meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); meta.Unknown2 = 0; meta.Unknown3 = 0; meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; @@ -177,7 +177,7 @@ public void Write(Resource resource) } meta.Unknown = 0; - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, LSCompressionLevel); + meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); meta.Unknown2 = 0; meta.Unknown3 = 0; meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index e2e401f5..7910bbf7 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -1,5 +1,4 @@ -using LSLib.Granny; -using LSLib.LS.Enums; +using LSLib.LS.Enums; using LSLib.LS.Story; using System; using System.Collections.Generic; @@ -10,18 +9,17 @@ namespace LSLib.LS.Save; public class SavegameHelpers : IDisposable { - private readonly PackageReader Reader; private readonly Package Package; public SavegameHelpers(string path) { - Reader = new PackageReader(path); - Package = Reader.Read(); + var reader = new PackageReader(); + Package = reader.Read(path); } public void Dispose() { - Reader.Dispose(); + Package.Dispose(); } public Resource LoadGlobals() @@ -32,19 +30,9 @@ public Resource LoadGlobals() throw new InvalidDataException("The specified package is not a valid savegame (globals.lsf not found)"); } - Resource resource; - Stream rsrcStream = globalsInfo.MakeStream(); - try - { - using var rsrcReader = new LSFReader(rsrcStream); - resource = rsrcReader.Read(); - } - finally - { - globalsInfo.ReleaseStream(); - } - - return resource; + using var rsrcStream = globalsInfo.CreateContentReader(); + using var rsrcReader = new LSFReader(rsrcStream); + return rsrcReader.Read(); } public Story.Story LoadStory(Stream s) @@ -58,15 +46,8 @@ public Story.Story LoadStory() var storyInfo = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); if (storyInfo != null) { - Stream rsrcStream = storyInfo.MakeStream(); - try - { - return LoadStory(rsrcStream); - } - finally - { - storyInfo.ReleaseStream(); - } + using var rsrcStream = storyInfo.CreateContentReader(); + return LoadStory(rsrcStream); } else { @@ -78,7 +59,7 @@ public Story.Story LoadStory() } } - public MemoryStream ResaveStoryToGlobals(Story.Story story, ResourceConversionParameters conversionParams) + public byte[] ResaveStoryToGlobals(Story.Story story, ResourceConversionParameters conversionParams) { var globals = LoadGlobals(); @@ -101,26 +82,38 @@ public MemoryStream ResaveStoryToGlobals(Story.Story story, ResourceConversionPa }; rsrcWriter.Write(globals); rewrittenStream.Seek(0, SeekOrigin.Begin); - return rewrittenStream; + return rewrittenStream.ToArray(); } public void ResaveStory(Story.Story story, Game game, string path) { // Re-package global.lsf/StorySave.bin - var rewrittenPackage = new Package(); var conversionParams = ResourceConversionParameters.FromGameVersion(game); + var build = new PackageBuildData + { + Version = conversionParams.PAKVersion, + Compression = CompressionMethod.Zlib, + CompressionLevel = LSCompressionLevel.Default + }; + var storyBin = Package.Files.FirstOrDefault(p => p.Name == "StorySave.bin"); if (storyBin == null) { - var globalsStream = ResaveStoryToGlobals(story, conversionParams); + var globals = ResaveStoryToGlobals(story, conversionParams); var globalsLsf = Package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); - StreamFileInfo globalsRepacked = StreamFileInfo.CreateFromStream(globalsStream, globalsLsf.Name); - rewrittenPackage.Files.Add(globalsRepacked); + var globalsRepacked = PackageBuildInputFile.CreateFromBlob(globals, globalsLsf.Name); + build.Files.Add(globalsRepacked); + + foreach (var file in Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf")) + { + using var stream = file.CreateContentReader(); + var contents = new byte[stream.Length]; + stream.ReadExactly(contents, 0, contents.Length); - var files = Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf").ToList(); - rewrittenPackage.Files.AddRange(files); + build.Files.Add(PackageBuildInputFile.CreateFromBlob(contents, file.Name)); + } } else { @@ -128,20 +121,22 @@ public void ResaveStory(Story.Story story, Game game, string path) var storyStream = new MemoryStream(); var storyWriter = new StoryWriter(); storyWriter.Write(storyStream, story, true); - storyStream.Seek(0, SeekOrigin.Begin); - StreamFileInfo storyRepacked = StreamFileInfo.CreateFromStream(storyStream, "StorySave.bin"); - rewrittenPackage.Files.Add(storyRepacked); + var storyRepacked = PackageBuildInputFile.CreateFromBlob(storyStream.ToArray(), "StorySave.bin"); + build.Files.Add(storyRepacked); + + foreach (var file in Package.Files.Where(x => x.Name.ToLowerInvariant() != "StorySave.bin")) + { + using var stream = file.CreateContentReader(); + var contents = new byte[stream.Length]; + stream.ReadExactly(contents, 0, contents.Length); - var files = Package.Files.Where(x => x.Name != "StorySave.bin").ToList(); - rewrittenPackage.Files.AddRange(files); + build.Files.Add(PackageBuildInputFile.CreateFromBlob(contents, file.Name)); + } } - using (var packageWriter = new PackageWriter(rewrittenPackage, path)) + using (var packageWriter = new PackageWriter(build, path)) { - packageWriter.Version = conversionParams.PAKVersion; - packageWriter.Compression = CompressionMethod.Zlib; - packageWriter.LSCompressionLevel = LSCompressionLevel.DefaultCompression; packageWriter.Write(); } } diff --git a/StatParser/StatChecker.cs b/StatParser/StatChecker.cs index b97343e0..83da8ff6 100644 --- a/StatParser/StatChecker.cs +++ b/StatParser/StatChecker.cs @@ -33,15 +33,8 @@ private void LoadStats(ModInfo mod) { foreach (var file in mod.Stats) { - var statStream = file.Value.MakeStream(); - try - { - Loader.LoadStatsFromStream(file.Key, statStream); - } - finally - { - file.Value.ReleaseStream(); - } + using var statStream = file.Value.CreateContentReader(); + Loader.LoadStatsFromStream(file.Key, statStream); } } @@ -49,17 +42,11 @@ private XmlDocument LoadXml(IAbstractFileInfo file) { if (file == null) return null; - var stream = file.MakeStream(); - try - { - var doc = new XmlDocument(); - doc.Load(stream); - return doc; - } - finally - { - file.ReleaseStream(); - } + using var stream = file.CreateContentReader(); + + var doc = new XmlDocument(); + doc.Load(stream); + return doc; } private void LoadGuidResources(ModInfo mod) @@ -91,8 +78,8 @@ private void LoadMod(string modName) private void LoadStatDefinitions(ModResources resources) { Definitions = new StatDefinitionRepository(); - Definitions.LoadEnumerations(resources.Mods["Shared"].ValueListsFile.MakeStream()); - Definitions.LoadDefinitions(resources.Mods["Shared"].ModifiersFile.MakeStream()); + Definitions.LoadEnumerations(resources.Mods["Shared"].ValueListsFile.CreateContentReader()); + Definitions.LoadDefinitions(resources.Mods["Shared"].ModifiersFile.CreateContentReader()); } private void CompilationDiagnostic(StatLoadingError message) diff --git a/StoryCompiler/DebugInfoSaver.cs b/StoryCompiler/DebugInfoSaver.cs index 081a0b95..4ab88409 100644 --- a/StoryCompiler/DebugInfoSaver.cs +++ b/StoryCompiler/DebugInfoSaver.cs @@ -192,7 +192,7 @@ public void Save(Stream stream, StoryDebugInfo debugInfo) codedStream.Flush(); byte[] proto = ms.ToArray(); - byte flags = BinUtils.MakeCompressionFlags(LSLib.LS.Enums.CompressionMethod.LZ4, LSLib.LS.Enums.LSCompressionLevel.FastCompression); + var flags = BinUtils.MakeCompressionFlags(CompressionMethod.LZ4, LSCompressionLevel.Fast); byte[] compressed = BinUtils.Compress(proto, flags); stream.Write(compressed, 0, compressed.Length); diff --git a/StoryCompiler/ModCompiler.cs b/StoryCompiler/ModCompiler.cs index fc5bb3db..b57da836 100644 --- a/StoryCompiler/ModCompiler.cs +++ b/StoryCompiler/ModCompiler.cs @@ -228,36 +228,28 @@ private void LoadGoals(ModInfo mod) { foreach (var file in mod.Scripts) { - var scriptStream = file.Value.MakeStream(); - try - { - using (var reader = new BinaryReader(scriptStream)) - { - string path; - if (file.Value is PackagedFileInfo) - { - var pkgd = file.Value as PackagedFileInfo; - path = (pkgd.PackageStream as FileStream).Name + ":/" + pkgd.FileName; - } - else - { - var fs = file.Value as FilesystemFileInfo; - path = fs.FilesystemPath; - } + using var scriptStream = file.Value.CreateContentReader(); + using var reader = new BinaryReader(scriptStream); - var script = new GoalScript - { - Name = Path.GetFileNameWithoutExtension(file.Value.Name), - Path = path, - ScriptBody = reader.ReadBytes((int)scriptStream.Length) - }; - GoalScripts.Add(script); - } + string path; + if (file.Value is PackagedFileInfo) + { + var pkgd = file.Value as PackagedFileInfo; + path = "packaged:/" + pkgd.FileName; } - finally + else { - file.Value.ReleaseStream(); + var fs = file.Value as FilesystemFileInfo; + path = fs.FilesystemPath; } + + var script = new GoalScript + { + Name = Path.GetFileNameWithoutExtension(file.Value.Name), + Path = path, + ScriptBody = reader.ReadBytes((int)scriptStream.Length) + }; + GoalScripts.Add(script); } } @@ -265,66 +257,41 @@ private void LoadOrphanQueryIgnores(ModInfo mod) { if (mod.OrphanQueryIgnoreList == null) return; - var ignoreStream = mod.OrphanQueryIgnoreList.MakeStream(); - try + using var ignoreStream = mod.OrphanQueryIgnoreList.CreateContentReader(); + using var reader = new StreamReader(ignoreStream); + + var ignoreRe = new Regex("^([a-zA-Z0-9_]+)\\s+([0-9]+)$"); + while (!reader.EndOfStream) { - using (var reader = new StreamReader(ignoreStream)) + string ignoreLine = reader.ReadLine(); + var match = ignoreRe.Match(ignoreLine); + if (match.Success) { - var ignoreRe = new Regex("^([a-zA-Z0-9_]+)\\s+([0-9]+)$"); - - while (!reader.EndOfStream) - { - string ignoreLine = reader.ReadLine(); - var match = ignoreRe.Match(ignoreLine); - if (match.Success) - { - var signature = new FunctionNameAndArity( - match.Groups[1].Value, Int32.Parse(match.Groups[2].Value)); - Compiler.IgnoreUnusedDatabases.Add(signature); - } - } + var signature = new FunctionNameAndArity( + match.Groups[1].Value, Int32.Parse(match.Groups[2].Value)); + Compiler.IgnoreUnusedDatabases.Add(signature); } } - finally - { - mod.OrphanQueryIgnoreList.ReleaseStream(); - } } private void LoadGameObjects(ModInfo mod) { foreach (var file in mod.Globals) { - var globalStream = file.Value.MakeStream(); - try - { - using (var reader = new BinaryReader(globalStream)) - { - var globalLsf = reader.ReadBytes((int)globalStream.Length); - GameObjectLSFs.Add(globalLsf); - } - } - finally - { - file.Value.ReleaseStream(); - } + using var globalStream = file.Value.CreateContentReader(); + using var reader = new BinaryReader(globalStream); + + var globalLsf = reader.ReadBytes((int)globalStream.Length); + GameObjectLSFs.Add(globalLsf); } foreach (var file in mod.LevelObjects) { - var objectStream = file.Value.MakeStream(); - try - { - using (var reader = new BinaryReader(objectStream)) - { - var levelLsf = reader.ReadBytes((int)objectStream.Length); - GameObjectLSFs.Add(levelLsf); - } - } - finally - { - file.Value.ReleaseStream(); - } + using var objectStream = file.Value.CreateContentReader(); + using var reader = new BinaryReader(objectStream); + + var levelLsf = reader.ReadBytes((int)objectStream.Length); + GameObjectLSFs.Add(levelLsf); } } @@ -400,9 +367,8 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) if (storyHeaderFile != null) { - var storyStream = storyHeaderFile.MakeStream(); + using var storyStream = storyHeaderFile.CreateContentReader(); LoadStoryHeaders(storyStream); - storyHeaderFile.ReleaseStream(); } else { @@ -412,9 +378,8 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) if (typeCoercionWhitelistFile != null) { - var typeCoercionStream = typeCoercionWhitelistFile.MakeStream(); + using var typeCoercionStream = typeCoercionWhitelistFile.CreateContentReader(); LoadTypeCoercionWhitelist(typeCoercionStream); - typeCoercionWhitelistFile.ReleaseStream(); Compiler.TypeCoercionWhitelist = TypeCoercionWhitelist; } diff --git a/StoryDecompiler/Program.cs b/StoryDecompiler/Program.cs index 727c95d0..e750e73d 100644 --- a/StoryDecompiler/Program.cs +++ b/StoryDecompiler/Program.cs @@ -12,10 +12,9 @@ class Program { private static MemoryStream LoadStoryStreamFromSave(String path) { - using (var packageReader = new PackageReader(path)) + var reader = new PackageReader(); + using (var package = reader.Read(path)) { - Package package = packageReader.Read(); - var globalsFile = package.Files.FirstOrDefault(p => p.Name.ToLowerInvariant() == "globals.lsf"); if (globalsFile == null) { @@ -23,17 +22,10 @@ private static MemoryStream LoadStoryStreamFromSave(String path) } Resource resource; - Stream rsrcStream = globalsFile.MakeStream(); - try - { - using (var rsrcReader = new LSFReader(rsrcStream)) - { - resource = rsrcReader.Read(); - } - } - finally + using (var rsrcStream = globalsFile.CreateContentReader()) + using (var rsrcReader = new LSFReader(rsrcStream)) { - globalsFile.ReleaseStream(); + resource = rsrcReader.Read(); } LSLib.LS.Node storyNode = resource.Regions["Story"].Children["Story"][0]; From 58c7f30e56524d253e4e5df4b119fbb3ae7a22fa Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 22 Dec 2023 16:13:06 +0100 Subject: [PATCH 055/139] Add VFS --- Divine/CLI/CommandLineActions.cs | 2 +- Divine/CLI/CommandLinePackageProcessor.cs | 12 +- LSLib/LS/Mods/ModResources.cs | 493 +++++----------------- LSLib/LS/PackageCommon.cs | 90 +--- LSLib/LS/PackageFormat.cs | 18 +- LSLib/LS/PackageReader.cs | 6 +- LSLib/LS/PackageWriter.cs | 2 +- LSLib/LS/VFS.cs | 358 ++++++++++++++++ StatParser/StatChecker.cs | 37 +- StoryCompiler/ModCompiler.cs | 52 +-- 10 files changed, 541 insertions(+), 529 deletions(-) create mode 100644 LSLib/LS/VFS.cs diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index 125708e2..533be3b7 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -125,7 +125,7 @@ private static void SetUpAndValidate(CommandLineArguments args) private static void Process(CommandLineArguments args) { - Func filter; + Func filter; if (args.Expression != null) { diff --git a/Divine/CLI/CommandLinePackageProcessor.cs b/Divine/CLI/CommandLinePackageProcessor.cs index bd1ab938..66fad61f 100644 --- a/Divine/CLI/CommandLinePackageProcessor.cs +++ b/Divine/CLI/CommandLinePackageProcessor.cs @@ -16,7 +16,7 @@ public static void Create() CreatePackageResource(); } - public static void ListFiles(Func filter = null) + public static void ListFiles(Func filter = null) { if (CommandLineActions.SourcePath == null) { @@ -68,7 +68,7 @@ private static void ExtractSingleFile(string packagePath, string destinationPath } } - private static void ListPackageFiles(string packagePath, Func filter = null) + private static void ListPackageFiles(string packagePath, Func filter = null) { try { @@ -83,7 +83,7 @@ private static void ListPackageFiles(string packagePath, Func obj.Name)) { - Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.CRC()}"); + Console.WriteLine($"{fileInfo.Name}\t{fileInfo.Size()}\t{fileInfo.Crc}"); } } catch (NotAPackageException) @@ -97,7 +97,7 @@ private static void ListPackageFiles(string packagePath, Func filter = null) + public static void Extract(Func filter = null) { if (CommandLineActions.SourcePath == null) { @@ -113,7 +113,7 @@ public static void Extract(Func filter = null) } } - public static void BatchExtract(Func filter = null) + public static void BatchExtract(Func filter = null) { string[] files = Directory.GetFiles(CommandLineActions.SourcePath, $"*.{Args.InputFormat}"); @@ -156,7 +156,7 @@ private static void CreatePackageResource(string file = "") CommandLineLogger.LogInfo("Package created successfully."); } - private static void ExtractPackageResource(string file = "", string folder = "", Func filter = null) + private static void ExtractPackageResource(string file = "", string folder = "", Func filter = null) { if (string.IsNullOrEmpty(file)) { diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 8838e845..84fcacaf 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -10,19 +10,23 @@ namespace LSLib.LS; public class ModInfo(string name) { public string Name = name; - public IAbstractFileInfo Meta; - public Dictionary Scripts = []; - public Dictionary Stats = []; - public Dictionary Globals = []; - public Dictionary LevelObjects = []; - public IAbstractFileInfo OrphanQueryIgnoreList; - public IAbstractFileInfo StoryHeaderFile; - public IAbstractFileInfo TypeCoercionWhitelistFile; - public IAbstractFileInfo ModifiersFile; - public IAbstractFileInfo ValueListsFile; - public IAbstractFileInfo ActionResourcesFile; - public IAbstractFileInfo ActionResourceGroupsFile; - public List TagFiles = []; + + public string ModsPath; + public string PublicPath; + + public string Meta; + public List Scripts = []; + public List Stats = []; + public List Globals = []; + public List LevelObjects = []; + public string OrphanQueryIgnoreList; + public string StoryHeaderFile; + public string TypeCoercionWhitelistFile; + public string ModifiersFile; + public string ValueListsFile; + public string ActionResourcesFile; + public string ActionResourceGroupsFile; + public List TagFiles = []; } public class ModResources : IDisposable @@ -39,19 +43,12 @@ public void Dispose() public partial class ModPathVisitor { - private static readonly Regex metaRe = MetaRegex(); - private static readonly Regex scriptRe = ScriptRegex(); - private static readonly Regex statRe = StatRegex(); - private static readonly Regex staticLsxRe = StaticLsxRegex(); - private static readonly Regex statStructureRe = StatStructureRegex(); - private static readonly Regex orphanQueryIgnoresRe = OrphanQueryIgnoresRegex(); - private static readonly Regex storyDefinitionsRe = StoryDefinitionsRegex(); - private static readonly Regex typeCoercionWhitelistRe = TypeCoercionWhitelistRegex(); - private static readonly Regex globalsRe = GlobalsRegex(); - private static readonly Regex levelObjectsRe = LevelObjectsRegex(); // Pattern for excluding subsequent parts of a multi-part archive public static readonly Regex archivePartRe = ArchivePartRegex(); + public const string ModsPath = "Mods"; + public const string PublicPath = "Public"; + public readonly ModResources Resources; public bool CollectStoryGoals = false; @@ -59,31 +56,13 @@ public partial class ModPathVisitor public bool CollectGlobals = false; public bool CollectLevels = false; public bool CollectGuidResources = false; - public bool LoadPackages = true; public TargetGame Game = TargetGame.DOS2; + public VFS FS; - public ModPathVisitor(ModResources resources) + public ModPathVisitor(ModResources resources, VFS fs) { Resources = resources; - } - - private static void EnumerateFiles(List paths, string rootPath, string currentPath, string pattern) - { - foreach (string filePath in Directory.GetFiles(currentPath, pattern)) - { - var relativePath = filePath[rootPath.Length..]; - if (relativePath[0] == '/' || relativePath[0] == '\\') - { - relativePath = relativePath[1..]; - } - - paths.Add(relativePath); - } - - foreach (string directoryPath in Directory.GetDirectories(currentPath)) - { - EnumerateFiles(paths, rootPath, directoryPath, pattern); - } + FS = fs; } private ModInfo GetMod(string modName) @@ -97,443 +76,183 @@ private ModInfo GetMod(string modName) return mod; } - private void AddMetadataToMod(string modName, IAbstractFileInfo file) + private void AddGlobalsToMod(string modName, string path) { - GetMod(modName).Meta = file; + GetMod(modName).Globals.Add(path); } - private void AddStatToMod(string modName, string path, IAbstractFileInfo file) + private void AddLevelObjectsToMod(string modName, string path) { - GetMod(modName).Stats[path] = file; + GetMod(modName).LevelObjects.Add(path); } - private void AddScriptToMod(string modName, string scriptName, IAbstractFileInfo file) + private void DiscoverModGoals(ModInfo mod) { - GetMod(modName).Scripts[scriptName] = file; - } - - private void AddGlobalsToMod(string modName, string path, IAbstractFileInfo file) - { - GetMod(modName).Globals[path] = file; - } - - private void AddLevelObjectsToMod(string modName, string path, IAbstractFileInfo file) - { - GetMod(modName).LevelObjects[path] = file; - } - - private void DiscoverPackagedFile(IAbstractFileInfo file) - { - if (file.IsDeletion()) return; - - if (file.Name.EndsWith("meta.lsx", StringComparison.Ordinal)) - { - var match = metaRe.Match(file.Name); - if (match != null && match.Success) - { - AddMetadataToMod(match.Groups[1].Value, file); - } - } + var goalPath = Path.Join(mod.ModsPath, @"Story/RawFiles/Goals"); + if (!FS.DirectoryExists(goalPath)) return; - if (CollectStoryGoals) - { - if (file.Name.EndsWith(".txt", StringComparison.Ordinal) && file.Name.Contains("/Story/RawFiles/Goals")) - { - var match = scriptRe.Match(file.Name); - if (match != null && match.Success) - { - AddScriptToMod(match.Groups[1].Value, match.Groups[2].Value, file); - } - } - - if (file.Name.EndsWith("/Story/story_orphanqueries_ignore_local.txt", StringComparison.Ordinal)) - { - var match = orphanQueryIgnoresRe.Match(file.Name); - if (match != null && match.Success) - { - GetMod(match.Groups[1].Value).OrphanQueryIgnoreList = file; - } - } - - if (file.Name.EndsWith("/Story/RawFiles/story_header.div", StringComparison.Ordinal)) - { - var match = storyDefinitionsRe.Match(file.Name); - if (match != null && match.Success) - { - GetMod(match.Groups[1].Value).StoryHeaderFile = file; - } - } - - if (file.Name.EndsWith("/Story/RawFiles/TypeCoercionWhitelist.txt", StringComparison.Ordinal)) - { - var match = typeCoercionWhitelistRe.Match(file.Name); - if (match != null && match.Success) - { - GetMod(match.Groups[1].Value).TypeCoercionWhitelistFile = file; - } - } - } + var goalFiles = FS.EnumerateFiles(goalPath, false, p => Path.GetExtension(p) == ".txt"); - if (CollectStats) - { - if (file.Name.EndsWith(".txt", StringComparison.Ordinal)) - { - if (file.Name.Contains("/Stats/Generated/Data")) - { - var match = statRe.Match(file.Name); - if (match != null && match.Success) - { - AddStatToMod(match.Groups[1].Value, match.Groups[2].Value, file); - } - } - else if (file.Name.Contains("/Stats/Generated/Structure")) - { - var match = statStructureRe.Match(file.Name); - if (match != null && match.Success) - { - if (file.Name.EndsWith("Modifiers.txt")) - { - GetMod(match.Groups[1].Value).ModifiersFile = file; - } - else if (file.Name.EndsWith("ValueLists.txt")) - { - GetMod(match.Groups[1].Value).ValueListsFile = file; - } - } - } - } - } - - if (CollectGuidResources) - { - if (file.Name.EndsWith(".lsx", StringComparison.Ordinal)) - { - var match = staticLsxRe.Match(file.Name); - if (match != null && match.Success) - { - if (match.Groups[2].Value == "ActionResourceDefinitions/ActionResourceDefinitions.lsx") - { - GetMod(match.Groups[1].Value).ActionResourcesFile = file; - } - else if (match.Groups[2].Value == "ActionResourceGroupDefinitions/ActionResourceGroupDefinitions.lsx") - { - GetMod(match.Groups[1].Value).ActionResourceGroupsFile = file; - } - else if (match.Groups[2].Value.StartsWith("Tags/")) - { - GetMod(match.Groups[1].Value).TagFiles.Add(file); - } - } - } - } - - if (CollectGlobals) - { - if (file.Name.EndsWith(".lsf", StringComparison.Ordinal) && file.Name.Contains("/Globals/")) - { - var match = globalsRe.Match(file.Name); - if (match != null && match.Success) - { - AddGlobalsToMod(match.Groups[1].Value, match.Groups[0].Value, file); - } - } - } - - if (CollectLevels) + foreach (var goalFile in goalFiles) { - if (file.Name.EndsWith(".lsf", StringComparison.Ordinal) && file.Name.Contains("/Levels/")) - { - var match = levelObjectsRe.Match(file.Name); - if (match != null && match.Success) - { - AddLevelObjectsToMod(match.Groups[1].Value, match.Groups[0].Value, file); - } - } + mod.Scripts.Add(goalFile); } } - public void DiscoverPackage(string packagePath) + private void DiscoverModStats(ModInfo mod) { - var reader = new PackageReader(); - var package = reader.Read(packagePath); - Resources.LoadedPackages.Add(package); + var statsPath = Path.Join(mod.PublicPath, @"Stats/Generated/Data"); + if (!FS.DirectoryExists(statsPath)) return; - foreach (var file in package.Files) + var statFiles = FS.EnumerateFiles(statsPath, false, p => Path.GetExtension(p) == ".txt"); + + foreach (var statFile in statFiles) { - DiscoverPackagedFile(file); + mod.Stats.Add(statFile); } } - public void DiscoverBuiltinPackages(string gameDataPath) + private void DiscoverModStatsStructure(ModInfo mod) { - // List of packages we won't ever load - // These packages don't contain any mod resources, but have a large - // file table that makes loading unneccessarily slow. - HashSet packageBlacklist = - [ - "Assets.pak", - "Effects.pak", - "Engine.pak", - "EngineShaders.pak", - "Game.pak", - "GamePlatform.pak", - "Gustav_NavCloud.pak", - "Gustav_Textures.pak", - "Gustav_Video.pak", - "Icons.pak", - "LowTex.pak", - "Materials.pak", - "Minimaps.pak", - "Models.pak", - "PsoCache.pak", - "SharedSoundBanks.pak", - "SharedSounds.pak", - "Textures.pak", - "VirtualTextures.pak" - ]; - - // Collect priority value from headers - var packagePriorities = new List>(); - - foreach (var path in Directory.GetFiles(gameDataPath, "*.pak")) + var modifiersPath = Path.Join(mod.PublicPath, @"Stats/Generated/Structure/Modifiers.txt"); + if (FS.FileExists(modifiersPath)) { - var baseName = Path.GetFileName(path); - if (!packageBlacklist.Contains(baseName) - // Don't load 2nd, 3rd, ... parts of a multi-part archive - && !archivePartRe.IsMatch(baseName)) - { - var reader = new PackageReader(); - var package = reader.Read(path, true); - packagePriorities.Add(new Tuple(path, package.Metadata.Priority)); - } + mod.ModifiersFile = modifiersPath; } - - packagePriorities.Sort( - delegate (Tuple a, Tuple b) - { - return a.Item2.CompareTo(b.Item2); - } - ); - - // Load non-patch packages first - foreach (var package in packagePriorities) + + var valueListsPath = Path.Join(mod.PublicPath, @"Stats/Generated/Structure/ValueLists.txt"); + if (FS.FileExists(valueListsPath)) { - DiscoverPackage(package.Item1); + mod.ValueListsFile = valueListsPath; } } - public void DiscoverUserPackages(string gameDataPath) + private void DiscoverModGuidResources(ModInfo mod) { - foreach (var packagePath in Directory.GetFiles(gameDataPath, "*.pak")) + var actionResGrpPath = Path.Join(mod.PublicPath, @"ActionResourceGroupDefinitions/ActionResourceGroupDefinitions.lsx"); + if (FS.FileExists(actionResGrpPath)) { - // Don't load 2nd, 3rd, ... parts of a multi-part archive - if (!archivePartRe.IsMatch(packagePath)) - { - DiscoverPackage(packagePath); - } + mod.ActionResourceGroupsFile = actionResGrpPath; } - } - private void DiscoverModGoals(string modName, string modPath) - { - var goalPath = Path.Join(modPath, @"Story\RawFiles\Goals"); - if (!Directory.Exists(goalPath)) return; - - List goalFiles = []; - EnumerateFiles(goalFiles, goalPath, goalPath, "*.txt"); - - foreach (var goalFile in goalFiles) + var actionResPath = Path.Join(mod.PublicPath, @"ActionResourceDefinitions/ActionResourceDefinitions.lsx"); + if (FS.FileExists(actionResPath)) { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = Path.Join(goalPath, goalFile), - FileName = goalFile - }; - AddScriptToMod(modName, goalFile, fileInfo); + mod.ActionResourcesFile = actionResPath; } - } - private void DiscoverModStats(string modName, string modPublicPath) - { - var statsPath = Path.Join(modPublicPath, @"Stats\Generated\Data"); - if (!Directory.Exists(statsPath)) return; - - List statFiles = []; - EnumerateFiles(statFiles, statsPath, statsPath, "*.txt"); - - foreach (var statFile in statFiles) + var tagPath = Path.Join(mod.PublicPath, @"Tags"); + if (FS.DirectoryExists(tagPath)) { - var fileInfo = new FilesystemFileInfo + var tagFiles = FS.EnumerateFiles(tagPath, false, p => Path.GetExtension(p) == ".lsf"); + + foreach (var tagFile in tagFiles) { - FilesystemPath = Path.Join(statsPath, statFile), - FileName = statFile - }; - AddStatToMod(modName, statFile, fileInfo); + mod.TagFiles.Add(tagFile); + } } } - private void DiscoverModGlobals(string modName, string modPath) + private void DiscoverModGlobals(ModInfo mod) { - var globalsPath = Path.Join(modPath, "Globals"); - if (!Directory.Exists(globalsPath)) return; + var globalsPath = Path.Join(mod.ModsPath, "Globals"); + if (!FS.DirectoryExists(globalsPath)) return; - List globalFiles = []; - EnumerateFiles(globalFiles, globalsPath, globalsPath, "*.lsf"); + var globalFiles = FS.EnumerateFiles(globalsPath, false, p => Path.GetExtension(p) == ".lsf"); foreach (var globalFile in globalFiles) { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = Path.Join(globalsPath, globalFile), - FileName = globalFile - }; - AddGlobalsToMod(modName, globalFile, fileInfo); + mod.Globals.Add(globalFile); } } - private void DiscoverModLevelObjects(string modName, string modPath) + private void DiscoverModLevelObjects(ModInfo mod) { - var levelsPath = Path.Join(modPath, "Levels"); - if (!Directory.Exists(levelsPath)) return; + var levelsPath = Path.Join(mod.ModsPath, "Levels"); + if (!FS.DirectoryExists(levelsPath)) return; - List levelFiles = []; - EnumerateFiles(levelFiles, levelsPath, levelsPath, "*.lsf"); + var levelFiles = FS.EnumerateFiles(levelsPath, false, p => Path.GetExtension(p) == ".lsf"); - var levelObjectsRe = LevelObjectsLocalRegex(); foreach (var levelFile in levelFiles) { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = Path.Join(levelsPath, levelFile), - FileName = levelFile - }; - AddLevelObjectsToMod(modName, levelFile, fileInfo); + mod.LevelObjects.Add(levelFile); } } - public void DiscoverModDirectory(string modName, string modPath, string publicPath) + public void DiscoverModDirectory(ModInfo mod) { - // Trigger mod entry creation even if there are no resources - GetMod(modName); - if (CollectStoryGoals) { - DiscoverModGoals(modName, modPath); + DiscoverModGoals(mod); - var headerPath = Path.Join(modPath, @"Story\RawFiles\story_header.div"); - if (File.Exists(headerPath)) + var headerPath = Path.Join(mod.ModsPath, @"Story/RawFiles/story_header.div"); + if (FS.FileExists(headerPath)) { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = headerPath, - FileName = headerPath - }; - GetMod(modName).StoryHeaderFile = fileInfo; + mod.StoryHeaderFile = headerPath; } - var orphanQueryIgnoresPath = Path.Join(modPath, @"Story\story_orphanqueries_ignore_local.txt"); - if (File.Exists(orphanQueryIgnoresPath)) + var orphanQueryIgnoresPath = Path.Join(mod.ModsPath, @"Story/story_orphanqueries_ignore_local.txt"); + if (FS.FileExists(orphanQueryIgnoresPath)) { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = orphanQueryIgnoresPath, - FileName = orphanQueryIgnoresPath - }; - GetMod(modName).OrphanQueryIgnoreList = fileInfo; + mod.OrphanQueryIgnoreList = orphanQueryIgnoresPath; } - var typeCoercionWhitelistPath = Path.Join(modPath, @"Story\RawFiles\TypeCoercionWhitelist.txt"); - if (File.Exists(typeCoercionWhitelistPath)) + var typeCoercionWhitelistPath = Path.Join(mod.ModsPath, @"Story/RawFiles/TypeCoercionWhitelist.txt"); + if (FS.FileExists(typeCoercionWhitelistPath)) { - var fileInfo = new FilesystemFileInfo - { - FilesystemPath = typeCoercionWhitelistPath, - FileName = typeCoercionWhitelistPath - }; - GetMod(modName).TypeCoercionWhitelistFile = fileInfo; + mod.TypeCoercionWhitelistFile = typeCoercionWhitelistPath; } } if (CollectStats) { - DiscoverModStats(modName, publicPath); + DiscoverModStats(mod); + DiscoverModStatsStructure(mod); + } + + if (CollectGuidResources) + { + DiscoverModGuidResources(mod); } if (CollectGlobals) { - DiscoverModGlobals(modName, modPath); + DiscoverModGlobals(mod); } if (CollectLevels) { - DiscoverModLevelObjects(modName, modPath); + DiscoverModLevelObjects(mod); } } - public void DiscoverMods(string gameDataPath) + public void DiscoverMods() { - var modsPath = Path.Combine(gameDataPath, "Mods"); - var publicPath = Path.Combine(gameDataPath, "Public"); + var modPaths = FS.EnumerateDirectories(ModsPath); - if (Directory.Exists(modsPath)) + foreach (var modPath in modPaths) { - var modPaths = Directory.GetDirectories(modsPath); + var modName = Path.GetFileName(modPath); + var metaPath = Path.Combine(modPath, "meta.lsx"); - foreach (var modPath in modPaths) + if (FS.FileExists(metaPath)) { - if (File.Exists(Path.Combine(modPath, "meta.lsx"))) - { - var modName = Path.GetFileNameWithoutExtension(modPath); - var modPublicPath = Path.Combine(publicPath, Path.GetFileName(modPath)); - DiscoverModDirectory(modName, modPath, modPublicPath); - } + var mod = GetMod(modName); + mod.ModsPath = modPath; + mod.PublicPath = Path.Combine(PublicPath, Path.GetFileName(modPath)); + mod.Meta = metaPath; + + DiscoverModDirectory(mod); } } } - public void Discover(String gameDataPath) + public void Discover() { - if (LoadPackages) - { - DiscoverBuiltinPackages(gameDataPath); - } - - DiscoverMods(gameDataPath); + DiscoverMods(); } - [GeneratedRegex("^Mods/([^/]+)/meta\\.lsx$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex MetaRegex(); - - [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/Goals/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex ScriptRegex(); - - [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Data/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StatRegex(); - - [GeneratedRegex("^Public/([^/]+)/(.*\\.lsx)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StaticLsxRegex(); - - [GeneratedRegex("^Public/([^/]+)/Stats/Generated/Structure/(.*\\.txt)$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StatStructureRegex(); - - [GeneratedRegex("^Mods/([^/]+)/Story/story_orphanqueries_ignore_local\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex OrphanQueryIgnoresRegex(); - - [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/story_header\\.div$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex StoryDefinitionsRegex(); - - [GeneratedRegex("^Mods/([^/]+)/Story/RawFiles/TypeCoercionWhitelist\\.txt$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex TypeCoercionWhitelistRegex(); - - [GeneratedRegex("^Mods/([^/]+)/Globals/.*/.*/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex GlobalsRegex(); - - [GeneratedRegex("^Mods/([^/]+)/Levels/.*/(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] - private static partial Regex LevelObjectsRegex(); [GeneratedRegex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] private static partial Regex ArchivePartRegex(); - - [GeneratedRegex("^(Characters|Items|Triggers)/.*\\.lsf$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant)] - private static partial Regex LevelObjectsLocalRegex(); } diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 7158b0db..b14932f6 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -8,32 +8,17 @@ namespace LSLib.LS; -public interface IAbstractFileInfo -{ - public abstract String GetName(); - public abstract UInt64 Size(); - public abstract UInt32 CRC(); - public abstract Stream CreateContentReader(); - public abstract bool IsDeletion(); - - public string Name { get { return GetName(); } } -} - - -public class PackagedFileInfo : PackagedFileInfoCommon, IAbstractFileInfo +public class PackagedFileInfo : PackagedFileInfoCommon { + public Package Package; public MemoryMappedFile PackageFile; public MemoryMappedViewAccessor PackageView; public bool Solid; public ulong SolidOffset; public Stream SolidStream; - public String GetName() => FileName; - public UInt64 Size() => Flags.Method() == CompressionMethod.None ? SizeOnDisk : UncompressedSize; - public UInt32 CRC() => Crc; - public Stream CreateContentReader() { if (IsDeletion()) @@ -52,10 +37,11 @@ public Stream CreateContentReader() } } - internal static PackagedFileInfo CreateFromEntry(ILSPKFile entry, MemoryMappedFile file, MemoryMappedViewAccessor view) + internal static PackagedFileInfo CreateFromEntry(Package package, ILSPKFile entry, MemoryMappedFile file, MemoryMappedViewAccessor view) { var info = new PackagedFileInfo { + Package = package, PackageFile = file, PackageView = view, Solid = false @@ -78,68 +64,6 @@ public bool IsDeletion() } } -public class FilesystemFileInfo : IAbstractFileInfo -{ - public long CachedSize; - public string FilesystemPath; - public string FileName; - - public String GetName() => FileName; - - public UInt64 Size() => (UInt64) CachedSize; - - public UInt32 CRC() => throw new NotImplementedException("!"); - - public Stream CreateContentReader() => File.Open(FilesystemPath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); - - public static FilesystemFileInfo CreateFromEntry(string filesystemPath, string name) - { - var info = new FilesystemFileInfo - { - FileName = name, - FilesystemPath = filesystemPath - }; - - var fsInfo = new FileInfo(filesystemPath); - info.CachedSize = fsInfo.Length; - return info; - } - - public bool IsDeletion() - { - return false; - } -} - -public class StreamFileInfo : IAbstractFileInfo -{ - public Stream Stream; - public String FileName; - - public String GetName() => FileName; - - public UInt64 Size() => (UInt64) Stream.Length; - - public UInt32 CRC() => throw new NotImplementedException("!"); - - public Stream CreateContentReader() => Stream; - - public static StreamFileInfo CreateFromStream(Stream stream, string name) - { - var info = new StreamFileInfo - { - FileName = name, - Stream = stream - }; - return info; - } - - public bool IsDeletion() - { - return false; - } -} - public class PackageBuildInputFile { public string Path; @@ -212,14 +136,14 @@ private void WriteProgressUpdate(PackageBuildInputFile file, long numerator, lon ProgressUpdate(file.Path, numerator, denominator); } - public void UncompressPackage(Package package, string outputPath, Func filter = null) + public void UncompressPackage(Package package, string outputPath, Func filter = null) { if (outputPath.Length > 0 && !outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) { outputPath += Path.DirectorySeparatorChar; } - List files = package.Files; + List files = package.Files; if (filter != null) { @@ -246,7 +170,7 @@ public void UncompressPackage(Package package, string outputPath, Func filter = null) + public void UncompressPackage(string packagePath, string outputPath, Func filter = null) { ProgressUpdate("Reading package headers ...", 0, 1); var reader = new PackageReader(); diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index 074b9f01..37e27b29 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -276,7 +276,7 @@ public enum PackageFlags abstract public class PackagedFileInfoCommon { - public string FileName; + public string Name; public UInt32 ArchivePart; public UInt32 Crc; public CompressionFlags Flags; @@ -305,7 +305,7 @@ internal struct FileEntry7 : ILSPKFile public readonly void ToCommon(PackagedFileInfoCommon info) { - info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.Name = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = 0; info.Flags = UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.Default) : 0; @@ -318,7 +318,7 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) { return new FileEntry7 { - Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + Name = BinUtils.StringToNullTerminatedBytes(info.Name, 256), OffsetInFile = (uint)info.OffsetInFile, SizeOnDisk = (uint)info.SizeOnDisk, UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize, @@ -344,7 +344,7 @@ internal struct FileEntry10 : ILSPKFile public readonly void ToCommon(PackagedFileInfoCommon info) { - info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.Name = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = Crc; info.Flags = (CompressionFlags)Flags; @@ -357,7 +357,7 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) { return new FileEntry10 { - Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + Name = BinUtils.StringToNullTerminatedBytes(info.Name, 256), OffsetInFile = (uint)info.OffsetInFile, SizeOnDisk = (uint)info.SizeOnDisk, UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize, @@ -386,7 +386,7 @@ internal struct FileEntry15 : ILSPKFile public readonly void ToCommon(PackagedFileInfoCommon info) { - info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.Name = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = Crc; info.Flags = (CompressionFlags)Flags; @@ -399,7 +399,7 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) { return new FileEntry15 { - Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + Name = BinUtils.StringToNullTerminatedBytes(info.Name, 256), OffsetInFile = (uint)info.OffsetInFile, SizeOnDisk = (uint)info.SizeOnDisk, UncompressedSize = info.Flags.Method() == CompressionMethod.None ? 0 : (uint)info.UncompressedSize, @@ -428,7 +428,7 @@ internal struct FileEntry18 : ILSPKFile public readonly void ToCommon(PackagedFileInfoCommon info) { - info.FileName = BinUtils.NullTerminatedBytesToString(Name); + info.Name = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = 0; info.Flags = (CompressionFlags)Flags; @@ -441,7 +441,7 @@ public static ILSPKFile FromCommon(PackagedFileInfoCommon info) { return new FileEntry18 { - Name = BinUtils.StringToNullTerminatedBytes(info.FileName, 256), + Name = BinUtils.StringToNullTerminatedBytes(info.Name, 256), OffsetInFile1 = (uint)(info.OffsetInFile & 0xffffffff), OffsetInFile2 = (ushort)((info.OffsetInFile >> 32) & 0xffff), ArchivePart = (byte)info.ArchivePart, diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 193d910b..5e3f50c3 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -37,7 +37,7 @@ public class Package : IDisposable internal MemoryMappedViewAccessor[] Views; public PackageHeaderCommon Metadata; - public List Files = []; + public List Files = []; public PackageVersion Version { @@ -133,7 +133,7 @@ private void ReadCompressedFileList(MemoryMappedViewAccessor view, long o foreach (var entry in entries) { - Pak.Files.Add(PackagedFileInfo.CreateFromEntry(entry, Pak.Parts[entry.ArchivePartNumber()], Pak.Views[entry.ArchivePartNumber()])); + Pak.Files.Add(PackagedFileInfo.CreateFromEntry(Pak, entry, Pak.Parts[entry.ArchivePartNumber()], Pak.Views[entry.ArchivePartNumber()])); } } @@ -145,7 +145,7 @@ private void ReadFileList(MemoryMappedViewAccessor view, long offset) foreach (var entry in entries) { - var file = PackagedFileInfo.CreateFromEntry(entry, Pak.Parts[entry.ArchivePartNumber()], Pak.Views[entry.ArchivePartNumber()]); + var file = PackagedFileInfo.CreateFromEntry(Pak, entry, Pak.Parts[entry.ArchivePartNumber()], Pak.Views[entry.ArchivePartNumber()]); if (file.ArchivePart == 0) { file.OffsetInFile += Pak.Metadata.DataOffset; diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 98d3fbe0..7b9524ae 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -61,7 +61,7 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) Stream stream = Streams.Last(); var packaged = new PackageBuildTransientFile { - FileName = input.Path, + Name = input.Path, UncompressedSize = (ulong)uncompressed.Length, SizeOnDisk = (ulong)compressed.Length, ArchivePart = (UInt32)(Streams.Count - 1), diff --git a/LSLib/LS/VFS.cs b/LSLib/LS/VFS.cs new file mode 100644 index 00000000..5ade6d86 --- /dev/null +++ b/LSLib/LS/VFS.cs @@ -0,0 +1,358 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace LSLib.LS; + +public class VFSDirectory +{ + public Dictionary Dirs; + public Dictionary Files; + + public VFSDirectory GetOrAddDirectory(string name) + { + Dirs ??= []; + + if (!Dirs.TryGetValue(name, out var dir)) + { + dir = new VFSDirectory(); + Dirs[name] = dir; + } + + return dir; + } + + public bool TryGetDirectory(string name, out VFSDirectory dir) + { + if (Dirs?.TryGetValue(name, out dir) == true) + { + return true; + } + + dir = null; + return false; + } + + public void AddFile(string name, PackagedFileInfo file) + { + Files ??= []; + + if (!Files.TryGetValue(name, out var curFile) || curFile.Package.Metadata.Priority < file.Package.Metadata.Priority) + { + Files[name] = file; + } + } + + public bool TryGetFile(string name, out PackagedFileInfo file) + { + if (Files?.TryGetValue(name, out file) == true) + { + return true; + } + + file = null; + return false; + } +} + +public class VFS : IDisposable +{ + private List Packages = []; + private string RootDir; + private VFSDirectory Root = new(); + + public void Dispose() + { + Packages.ForEach(p => p.Dispose()); + } + + public void AttachRoot(string path) + { + RootDir = path; + } + + public void AttachGameDirectory(string gameDataPath) + { + AttachRoot(gameDataPath); + + // List of packages we won't ever load + // These packages don't contain any mod resources, but have a large + // file table that makes loading unneccessarily slow. + HashSet packageBlacklist = + [ + "Assets.pak", + "Effects.pak", + "Engine.pak", + "EngineShaders.pak", + "Game.pak", + "GamePlatform.pak", + "Gustav_NavCloud.pak", + "Gustav_Textures.pak", + "Gustav_Video.pak", + "Icons.pak", + "LowTex.pak", + "Materials.pak", + "Minimaps.pak", + "Models.pak", + "PsoCache.pak", + "SharedSoundBanks.pak", + "SharedSounds.pak", + "Textures.pak", + "VirtualTextures.pak" + ]; + + // Collect priority value from headers + var packagePriorities = new List>(); + + foreach (var path in Directory.GetFiles(gameDataPath, "*.pak")) + { + var baseName = Path.GetFileName(path); + if (!packageBlacklist.Contains(baseName) + // Don't load 2nd, 3rd, ... parts of a multi-part archive + && !ModPathVisitor.archivePartRe.IsMatch(baseName)) + { + AttachPackage(path); + } + } + } + + public void AttachPackage(string path) + { + var reader = new PackageReader(); + var package = reader.Read(path); + Packages.Add(package); + } + + public void FinishBuild() + { + foreach (var package in Packages) + { + foreach (var file in package.Files) + { + TryAddFile(file); + } + } + } + + private void TryAddFile(PackagedFileInfo file) + { + var path = file.Name; + var namePos = 0; + var node = Root; + do + { + var endPos = path.IndexOf('/', namePos); + if (endPos >= 0) + { + node = node.GetOrAddDirectory(path.Substring(namePos, endPos - namePos)); + namePos = endPos + 1; + } + else + { + node.AddFile(path.Substring(namePos), file); + break; + } + } while (true); + } + + public VFSDirectory FindVFSDirectory(string path) + { + var namePos = 0; + var node = Root; + do + { + var endPos = path.IndexOf('/', namePos); + if (endPos >= 0) + { + if (!node.TryGetDirectory(path.Substring(namePos, endPos - namePos), out node)) + { + return null; + } + + namePos = endPos + 1; + } + else + { + if (node.TryGetDirectory(path.Substring(namePos), out node)) + { + return node; + } + else + { + return null; + } + } + } while (true); + } + + public bool DirectoryExists(string path) + { + if (FindVFSDirectory(path) != null) return true; + return RootDir != null && Directory.Exists(Path.Combine(RootDir, path)); + } + + public PackagedFileInfo FindVFSFile(string path) + { + var namePos = 0; + var node = Root; + do + { + var endPos = path.IndexOf('/', namePos); + if (endPos >= 0) + { + if (!node.TryGetDirectory(path.Substring(namePos, endPos - namePos), out node)) + { + return null; + } + + namePos = endPos + 1; + } + else + { + if (node.TryGetFile(path.Substring(namePos), out var file)) + { + return file; + } + else + { + return null; + } + } + } while (true); + } + + public bool FileExists(string path) + { + if (FindVFSFile(path) != null) return true; + return RootDir != null && File.Exists(Path.Combine(RootDir, path)); + } + + public List EnumerateFiles(string path, bool recursive = false) + { + return EnumerateFiles(path, recursive, (path) => true); + } + + public List EnumerateFiles(string path, bool recursive, Func filter) + { + List results = []; + EnumerateFiles(results, path, recursive, filter); + return results; + } + + public void EnumerateFiles(List results, string path, bool recursive, Func filter) + { + var dir = FindVFSDirectory(path); + if (dir != null) + { + EnumerateFiles(results, dir, recursive, filter); + } + + if (RootDir != null) + { + var files = Directory.EnumerateFiles(path, "*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly); + foreach (var file in files) + { + if (filter(file)) + { + results.Add(Path.GetRelativePath(path, file)); + } + } + } + } + + public List EnumerateDirectories(string path) + { + List results = []; + EnumerateDirectories(results, path); + return results; + } + + public void EnumerateDirectories(List results, string path) + { + var dir = FindVFSDirectory(path); + if (dir?.Dirs != null) + { + foreach (var subdir in dir.Dirs) + { + results.Add(subdir.Key); + } + } + + if (RootDir != null) + { + foreach (var subdir in Directory.EnumerateDirectories(path)) + { + results.Add(Path.GetRelativePath(path, subdir)); + } + } + } + + private void EnumerateFiles(List results, VFSDirectory dir, bool recursive, Func filter) + { + if (dir.Files != null) + { + foreach (var file in dir.Files) + { + if (filter(file.Key)) + { + results.Add(file.Key); + } + } + } + + if (recursive && dir.Dirs != null) + { + foreach (var subdir in dir.Dirs) + { + EnumerateFiles(results, subdir.Value, recursive, filter); + } + } + } + + public bool TryOpenFromVFS(string path, out Stream stream) + { + var file = FindVFSFile(path); + if (file != null) + { + stream = file.CreateContentReader(); + return true; + } + else + { + stream = null; + return false; + } + } + + public bool TryOpen(string path, out Stream stream) + { + if (TryOpenFromVFS(path, out stream)) return true; + + if (RootDir != null) + { + try + { + stream = File.OpenRead(Path.Join(RootDir, path)); + return true; + } + catch (FileNotFoundException) { } + catch (DirectoryNotFoundException) { } + } + + stream = null; + return false; + } + + public Stream Open(string path) + { + if (!TryOpen(path, out var stream)) + { + throw new FileNotFoundException($"File not found in VFS: {path}", path); + } + + return stream; + } +} diff --git a/StatParser/StatChecker.cs b/StatParser/StatChecker.cs index 83da8ff6..6bd80207 100644 --- a/StatParser/StatChecker.cs +++ b/StatParser/StatChecker.cs @@ -11,6 +11,7 @@ namespace LSTools.StatParser; class StatChecker : IDisposable { private string GameDataPath; + private VFS FS; private ModResources Mods = new ModResources(); private StatDefinitionRepository Definitions; private StatLoadingContext Context; @@ -33,16 +34,16 @@ private void LoadStats(ModInfo mod) { foreach (var file in mod.Stats) { - using var statStream = file.Value.CreateContentReader(); - Loader.LoadStatsFromStream(file.Key, statStream); + using var statStream = FS.Open(file); + Loader.LoadStatsFromStream(file, statStream); } } - private XmlDocument LoadXml(IAbstractFileInfo file) + private XmlDocument LoadXml(string path) { - if (file == null) return null; + if (path == null) return null; - using var stream = file.CreateContentReader(); + using var stream = FS.Open(path); var doc = new XmlDocument(); doc.Load(stream); @@ -78,8 +79,8 @@ private void LoadMod(string modName) private void LoadStatDefinitions(ModResources resources) { Definitions = new StatDefinitionRepository(); - Definitions.LoadEnumerations(resources.Mods["Shared"].ValueListsFile.CreateContentReader()); - Definitions.LoadDefinitions(resources.Mods["Shared"].ModifiersFile.CreateContentReader()); + Definitions.LoadEnumerations(FS.Open(resources.Mods["Shared"].ValueListsFile)); + Definitions.LoadDefinitions(FS.Open(resources.Mods["Shared"].ModifiersFile)); } private void CompilationDiagnostic(StatLoadingError message) @@ -110,16 +111,26 @@ public void Check(List mods, List dependencies, List pac Context = new StatLoadingContext(); Loader = new StatLoader(Context); - - var visitor = new ModPathVisitor(Mods) + + FS = new VFS(); + if (LoadPackages) + { + FS.AttachGameDirectory(GameDataPath); + } + else + { + FS.AttachRoot(GameDataPath); + } + packagePaths.ForEach(path => FS.AttachPackage(path)); + FS.FinishBuild(); + + var visitor = new ModPathVisitor(Mods, FS) { Game = LSLib.LS.Story.Compiler.TargetGame.DOS2DE, CollectStats = true, - CollectGuidResources = true, - LoadPackages = LoadPackages + CollectGuidResources = true }; - visitor.Discover(GameDataPath); - packagePaths.ForEach(path => visitor.DiscoverUserPackages(path)); + visitor.Discover(); LoadStatDefinitions(visitor.Resources); Context.Definitions = Definitions; diff --git a/StoryCompiler/ModCompiler.cs b/StoryCompiler/ModCompiler.cs index b57da836..84f26976 100644 --- a/StoryCompiler/ModCompiler.cs +++ b/StoryCompiler/ModCompiler.cs @@ -24,6 +24,7 @@ class GoalScript private Logger Logger; private String GameDataPath; + private VFS FS; private Compiler Compiler = new Compiler(); private ModResources Mods = new ModResources(); private List GoalScripts = new List(); @@ -228,25 +229,13 @@ private void LoadGoals(ModInfo mod) { foreach (var file in mod.Scripts) { - using var scriptStream = file.Value.CreateContentReader(); + using var scriptStream = FS.Open(file); using var reader = new BinaryReader(scriptStream); - string path; - if (file.Value is PackagedFileInfo) - { - var pkgd = file.Value as PackagedFileInfo; - path = "packaged:/" + pkgd.FileName; - } - else - { - var fs = file.Value as FilesystemFileInfo; - path = fs.FilesystemPath; - } - var script = new GoalScript { - Name = Path.GetFileNameWithoutExtension(file.Value.Name), - Path = path, + Name = Path.GetFileNameWithoutExtension(file), + Path = file, ScriptBody = reader.ReadBytes((int)scriptStream.Length) }; GoalScripts.Add(script); @@ -257,7 +246,7 @@ private void LoadOrphanQueryIgnores(ModInfo mod) { if (mod.OrphanQueryIgnoreList == null) return; - using var ignoreStream = mod.OrphanQueryIgnoreList.CreateContentReader(); + using var ignoreStream = FS.Open(mod.OrphanQueryIgnoreList); using var reader = new StreamReader(ignoreStream); var ignoreRe = new Regex("^([a-zA-Z0-9_]+)\\s+([0-9]+)$"); @@ -278,7 +267,7 @@ private void LoadGameObjects(ModInfo mod) { foreach (var file in mod.Globals) { - using var globalStream = file.Value.CreateContentReader(); + using var globalStream = FS.Open(file); using var reader = new BinaryReader(globalStream); var globalLsf = reader.ReadBytes((int)globalStream.Length); @@ -287,7 +276,7 @@ private void LoadGameObjects(ModInfo mod) foreach (var file in mod.LevelObjects) { - using var objectStream = file.Value.CreateContentReader(); + using var objectStream = FS.Open(file); using var reader = new BinaryReader(objectStream); var levelLsf = reader.ReadBytes((int)objectStream.Length); @@ -320,16 +309,27 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) if (mods.Count > 0) { + Logger.TaskStarted("Building VFS"); + FS = new VFS(); + if (LoadPackages) + { + FS.AttachGameDirectory(GameDataPath); + } + else + { + FS.AttachRoot(GameDataPath); + } + FS.FinishBuild(); + Logger.TaskStarted("Discovering module files"); - var visitor = new ModPathVisitor(Mods) + var visitor = new ModPathVisitor(Mods, FS) { Game = Game, CollectStoryGoals = true, CollectGlobals = CheckGameObjects, - CollectLevels = CheckGameObjects, - LoadPackages = LoadPackages + CollectLevels = CheckGameObjects }; - visitor.Discover(GameDataPath); + visitor.Discover(); Logger.TaskFinished(); Logger.TaskStarted("Loading module files"); @@ -348,8 +348,8 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) LoadMod(modName); } - IAbstractFileInfo storyHeaderFile = null; - IAbstractFileInfo typeCoercionWhitelistFile = null; + string storyHeaderFile = null; + string typeCoercionWhitelistFile = null; var modsSearchPath = mods.ToList(); modsSearchPath.Reverse(); foreach (var modName in modsSearchPath) @@ -367,7 +367,7 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) if (storyHeaderFile != null) { - using var storyStream = storyHeaderFile.CreateContentReader(); + using var storyStream = FS.Open(storyHeaderFile); LoadStoryHeaders(storyStream); } else @@ -378,7 +378,7 @@ public bool Compile(string outputPath, string debugInfoPath, List mods) if (typeCoercionWhitelistFile != null) { - using var typeCoercionStream = typeCoercionWhitelistFile.CreateContentReader(); + using var typeCoercionStream = FS.Open(typeCoercionWhitelistFile); LoadTypeCoercionWhitelist(typeCoercionStream); Compiler.TypeCoercionWhitelist = TypeCoercionWhitelist; } From 0325ac8f8de21f2931d6c86fe57952a4557dab9e Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 22 Dec 2023 22:03:13 +0100 Subject: [PATCH 056/139] Various package loading/VFS fixes --- LSLib/LS/BinUtils.cs | 6 +++ LSLib/LS/PackageFormat.cs | 1 - LSLib/LS/PackageReader.cs | 11 +++-- LSLib/LS/PackageWriter.cs | 2 - LSLib/LS/VFS.cs | 99 +++++++++++++++++++++++---------------- 5 files changed, 71 insertions(+), 48 deletions(-) diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 9c5a2034..dfca1f11 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -459,6 +459,12 @@ public static byte[] Decompress(byte[] compressed, int decompressedSize, Compres public static Stream Decompress(MemoryMappedFile file, MemoryMappedViewAccessor view, long sourceOffset, int sourceSize, int decompressedSize, CompressionFlags compression) { + // MemoryMappedView considers a size of 0 to mean "entire stream" + if (sourceSize == 0) + { + return new MemoryStream(); + } + switch (compression.Method()) { case CompressionMethod.None: diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index 37e27b29..1460810a 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -1,7 +1,6 @@ using LSLib.Granny.GR2; using LSLib.LS.Enums; using System; -using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; namespace LSLib.LS; diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 5e3f50c3..433f8e71 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -48,7 +48,7 @@ public void OpenPart(int index, string path) { var file = File.OpenRead(path); Parts[index] = MemoryMappedFile.CreateFromFile(file, null, file.Length, MemoryMappedFileAccess.Read, HandleInheritability.None, false); - Views[index] = MetadataFile.CreateViewAccessor(0, file.Length, MemoryMappedFileAccess.Read); + Views[index] = MetadataFile.CreateViewAccessor(0, 0, MemoryMappedFileAccess.Read); } public void OpenStreams(int numParts) @@ -169,6 +169,7 @@ private Package ReadHeaderAndFileList(MemoryMappedViewAccessor v if (Pak.Metadata.Version > 10) { + Pak.Metadata.DataOffset = (uint)(offset + Marshal.SizeOf()); ReadCompressedFileList(view, (long)Pak.Metadata.FileListOffset); } else @@ -208,21 +209,21 @@ private void UnpackSolidSegment(MemoryMappedViewAccessor view) } } - if (firstOffset != 7 || lastOffset - firstOffset != totalSizeOnDisk) + if (firstOffset != Pak.Metadata.DataOffset + 7 || lastOffset - firstOffset != totalSizeOnDisk) { string msg = $"Incorrectly compressed solid archive; offsets {firstOffset}/{lastOffset}, bytes {totalSizeOnDisk}"; throw new InvalidDataException(msg); } // Decompress all files as a single frame (solid) - byte[] frame = new byte[lastOffset]; - view.ReadArray(0, frame, 0, (int)lastOffset); + byte[] frame = new byte[lastOffset - Pak.Metadata.DataOffset]; + view.ReadArray(Pak.Metadata.DataOffset, frame, 0, (int)(lastOffset - Pak.Metadata.DataOffset)); byte[] decompressed = Native.LZ4FrameCompressor.Decompress(frame); var decompressedStream = new MemoryStream(decompressed); // Update offsets to point to the decompressed chunk - ulong offset = 7; + ulong offset = Pak.Metadata.DataOffset + 7; ulong compressedOffset = 0; foreach (var entry in Pak.Files) { diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 7b9524ae..a2ced39d 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -11,8 +11,6 @@ namespace LSLib.LS; - - public class PackageBuildTransientFile : PackagedFileInfoCommon { } diff --git a/LSLib/LS/VFS.cs b/LSLib/LS/VFS.cs index 5ade6d86..bded8626 100644 --- a/LSLib/LS/VFS.cs +++ b/LSLib/LS/VFS.cs @@ -9,16 +9,18 @@ namespace LSLib.LS; public class VFSDirectory { + public string Path; public Dictionary Dirs; public Dictionary Files; - public VFSDirectory GetOrAddDirectory(string name) + public VFSDirectory GetOrAddDirectory(string absolutePath, string name) { Dirs ??= []; if (!Dirs.TryGetValue(name, out var dir)) { dir = new VFSDirectory(); + dir.Path = absolutePath; Dirs[name] = dir; } @@ -74,35 +76,39 @@ public void AttachRoot(string path) RootDir = path; } - public void AttachGameDirectory(string gameDataPath) + public void AttachGameDirectory(string gameDataPath, bool excludeAssets = true) { AttachRoot(gameDataPath); // List of packages we won't ever load // These packages don't contain any mod resources, but have a large // file table that makes loading unneccessarily slow. - HashSet packageBlacklist = - [ - "Assets.pak", - "Effects.pak", - "Engine.pak", - "EngineShaders.pak", - "Game.pak", - "GamePlatform.pak", - "Gustav_NavCloud.pak", - "Gustav_Textures.pak", - "Gustav_Video.pak", - "Icons.pak", - "LowTex.pak", - "Materials.pak", - "Minimaps.pak", - "Models.pak", - "PsoCache.pak", - "SharedSoundBanks.pak", - "SharedSounds.pak", - "Textures.pak", - "VirtualTextures.pak" - ]; + HashSet packageBlacklist = []; + + if (excludeAssets) + { + packageBlacklist = [ + "Assets.pak", + "Effects.pak", + "Engine.pak", + "EngineShaders.pak", + "Game.pak", + "GamePlatform.pak", + "Gustav_NavCloud.pak", + "Gustav_Textures.pak", + "Gustav_Video.pak", + "Icons.pak", + "LowTex.pak", + "Materials.pak", + "Minimaps.pak", + "Models.pak", + "PsoCache.pak", + "SharedSoundBanks.pak", + "SharedSounds.pak", + "Textures.pak", + "VirtualTextures.pak" + ]; + } // Collect priority value from headers var packagePriorities = new List>(); @@ -147,7 +153,7 @@ private void TryAddFile(PackagedFileInfo file) var endPos = path.IndexOf('/', namePos); if (endPos >= 0) { - node = node.GetOrAddDirectory(path.Substring(namePos, endPos - namePos)); + node = node.GetOrAddDirectory(path.Substring(0, endPos), path.Substring(namePos, endPos - namePos)); namePos = endPos + 1; } else @@ -190,7 +196,7 @@ public VFSDirectory FindVFSDirectory(string path) public bool DirectoryExists(string path) { - if (FindVFSDirectory(path) != null) return true; + if (FindVFSDirectory(Canonicalize(path)) != null) return true; return RootDir != null && Directory.Exists(Path.Combine(RootDir, path)); } @@ -224,9 +230,14 @@ public PackagedFileInfo FindVFSFile(string path) } while (true); } + public string Canonicalize(string path) + { + return path.Replace('\\', '/'); + } + public bool FileExists(string path) { - if (FindVFSFile(path) != null) return true; + if (FindVFSFile(Canonicalize(path)) != null) return true; return RootDir != null && File.Exists(Path.Combine(RootDir, path)); } @@ -244,7 +255,7 @@ public List EnumerateFiles(string path, bool recursive, Func results, string path, bool recursive, Func filter) { - var dir = FindVFSDirectory(path); + var dir = FindVFSDirectory(Canonicalize(path)); if (dir != null) { EnumerateFiles(results, dir, recursive, filter); @@ -252,12 +263,16 @@ public void EnumerateFiles(List results, string path, bool recursive, Fu if (RootDir != null) { - var files = Directory.EnumerateFiles(path, "*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly); - foreach (var file in files) + var fsDir = Path.Join(RootDir, path); + if (Directory.Exists(fsDir)) { - if (filter(file)) + var files = Directory.EnumerateFiles(fsDir, "*", recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly); + foreach (var file in files) { - results.Add(Path.GetRelativePath(path, file)); + if (filter(file)) + { + results.Add(Path.GetRelativePath(RootDir, file)); + } } } } @@ -272,20 +287,24 @@ public List EnumerateDirectories(string path) public void EnumerateDirectories(List results, string path) { - var dir = FindVFSDirectory(path); + var dir = FindVFSDirectory(Canonicalize(path)); if (dir?.Dirs != null) { foreach (var subdir in dir.Dirs) { - results.Add(subdir.Key); + results.Add(subdir.Value.Path); } } if (RootDir != null) { - foreach (var subdir in Directory.EnumerateDirectories(path)) + var fsDir = Path.Join(RootDir, path); + if (Directory.Exists(fsDir)) { - results.Add(Path.GetRelativePath(path, subdir)); + foreach (var subdir in Directory.EnumerateDirectories(fsDir)) + { + results.Add(Path.GetRelativePath(RootDir, subdir)); + } } } } @@ -296,9 +315,9 @@ private void EnumerateFiles(List results, VFSDirectory dir, bool recursi { foreach (var file in dir.Files) { - if (filter(file.Key)) + if (!file.Value.IsDeletion() && filter(file.Key)) { - results.Add(file.Key); + results.Add(file.Value.Name); } } } @@ -314,8 +333,8 @@ private void EnumerateFiles(List results, VFSDirectory dir, bool recursi public bool TryOpenFromVFS(string path, out Stream stream) { - var file = FindVFSFile(path); - if (file != null) + var file = FindVFSFile(Canonicalize(path)); + if (file != null && !file.IsDeletion()) { stream = file.CreateContentReader(); return true; From dfda62a2e2b44e98d8b46952daf4033a361dd733 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:49:55 +0100 Subject: [PATCH 057/139] Allow partial LSX serialization of resources --- LSLib/LS/Resources/LSX/LSXWriter.cs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 7a0d9bc5..a618063b 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -14,9 +14,9 @@ public class LSXWriter(Stream stream) public LSXVersion Version = LSXVersion.V3; public NodeSerializationSettings SerializationSettings = new(); - public void Write(Resource rsrc) + private XmlWriter PrepareWrite(uint? majorVersion) { - if (Version == LSXVersion.V3 && rsrc.Metadata.MajorVersion == 4) + if (Version == LSXVersion.V3 && majorVersion != null && majorVersion == 4) { throw new InvalidDataException("Cannot resave a BG3 (v4.x) resource in D:OS2 (v3.x) file format, maybe you have the wrong game selected?"); } @@ -27,7 +27,12 @@ public void Write(Resource rsrc) IndentChars = "\t" }; - using (this.writer = XmlWriter.Create(stream, settings)) + return XmlWriter.Create(stream, settings); + } + + public void Write(Resource rsrc) + { + using (this.writer = PrepareWrite(rsrc.Metadata.MajorVersion)) { writer.WriteStartElement("save"); @@ -43,7 +48,14 @@ public void Write(Resource rsrc) WriteRegions(rsrc); writer.WriteEndElement(); - writer.Flush(); + } + } + + public void Write(Node node) + { + using (this.writer = PrepareWrite(null)) + { + WriteNode(node); } } From 786b998a552b5833d860044559e7829f133a8c6c Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:50:15 +0100 Subject: [PATCH 058/139] Add support for reading BG3-style LSX vectors/matrices --- LSLib/LS/Resources/LSX/LSXReader.cs | 68 ++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index bafc32d8..9516dd87 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -1,4 +1,5 @@ using LSLib.LS.Enums; +using OpenTK.Mathematics; using System; using System.Collections.Generic; using System.Diagnostics; @@ -18,6 +19,8 @@ public class LSXReader(Stream stream) : IDisposable private int lastLine, lastColumn; private LSXVersion Version = LSXVersion.V3; public NodeSerializationSettings SerializationSettings = new(); + private NodeAttribute LastAttribute = null; + private int ValueOffset = 0; public void Dispose() { @@ -163,6 +166,27 @@ private void ReadElement() { attr.FromString(attrValue, SerializationSettings); } + else + { + // Preallocate value for vector/matrix types + switch (attr.Type) + { + case NodeAttribute.DataType.DT_Vec2: attr.Value = new float[2]; break; + case NodeAttribute.DataType.DT_Vec3: attr.Value = new float[3]; break; + case NodeAttribute.DataType.DT_Vec4: attr.Value = new float[4]; break; + case NodeAttribute.DataType.DT_Mat2: attr.Value = new float[2*2]; break; + case NodeAttribute.DataType.DT_Mat3: attr.Value = new float[3*3]; break; + case NodeAttribute.DataType.DT_Mat3x4: attr.Value = new float[3*4]; break; + case NodeAttribute.DataType.DT_Mat4: attr.Value = new float[4*4]; break; + case NodeAttribute.DataType.DT_Mat4x3: attr.Value = new float[4*3]; break; + case NodeAttribute.DataType.DT_TranslatedString: break; + case NodeAttribute.DataType.DT_TranslatedFSString: break; + default: throw new Exception($"Attribute of type {attr.Type} should have an inline value!"); + } + + ValueOffset = 0; + LastAttribute = attr; + } if (attr.Type == NodeAttribute.DataType.DT_TranslatedString) { @@ -186,12 +210,45 @@ private void ReadElement() stack.Last().Attributes.Add(attrName, attr); break; + case "float2": + { + var val = (float[])LastAttribute.Value; + val[ValueOffset++] = Single.Parse(reader["x"]); + val[ValueOffset++] = Single.Parse(reader["y"]); + break; + } + + case "float3": + { + var val = (float[])LastAttribute.Value; + val[ValueOffset++] = Single.Parse(reader["x"]); + val[ValueOffset++] = Single.Parse(reader["y"]); + val[ValueOffset++] = Single.Parse(reader["z"]); + break; + } + + case "float4": + { + var val = (float[])LastAttribute.Value; + val[ValueOffset++] = Single.Parse(reader["x"]); + val[ValueOffset++] = Single.Parse(reader["y"]); + val[ValueOffset++] = Single.Parse(reader["z"]); + val[ValueOffset++] = Single.Parse(reader["w"]); + break; + } + + case "mat2": + case "mat3": + case "mat4": + // These are read in the float2/3/4 nodes + break; + case "children": // Child nodes are handled in the "node" case break; default: - throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); + throw new InvalidFormatException($"Unknown element encountered: {reader.Name}"); } } @@ -218,6 +275,15 @@ private void ReadEndElement() stack.RemoveAt(stack.Count - 1); break; + // Value nodes, processed in ReadElement() + case "float2": + case "float3": + case "float4": + case "mat2": + case "mat3": + case "mat4": + break; + default: throw new InvalidFormatException(String.Format("Unknown element encountered: {0}", reader.Name)); } From b18e70dc81a6d84e9e2707abea7c3697ab56d2a5 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:50:26 +0100 Subject: [PATCH 059/139] Fix empty TranslatedString string representation --- LSLib/LS/NodeAttribute.cs | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index 57789919..a66bcb23 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -12,7 +12,14 @@ public class TranslatedString public override string ToString() { - return Value; + if (Value != null && Value != "") + { + return Value; + } + else + { + return Handle + ";" + Version; + } } } From 0f8248fd09ccad555970f3d53117b2ed07fd4599 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:51:08 +0100 Subject: [PATCH 060/139] Workaround for reading loca from unseekable stream --- LSLib/LS/Localization.cs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/LSLib/LS/Localization.cs b/LSLib/LS/Localization.cs index 0d359709..6e028b4c 100644 --- a/LSLib/LS/Localization.cs +++ b/LSLib/LS/Localization.cs @@ -87,7 +87,11 @@ public LocaResource Read() var entries = new LocaEntry[header.NumEntries]; BinUtils.ReadStructs(reader, entries); - Stream.Position = header.TextsOffset; + if (Stream.Position != header.TextsOffset) + { + Stream.Position = header.TextsOffset; + } + foreach (var entry in entries) { var text = Encoding.UTF8.GetString(reader.ReadBytes((int)entry.Length - 1)); @@ -237,7 +241,7 @@ public enum LocaFormat Xml }; -public class LocaUtils +public static class LocaUtils { public static LocaFormat ExtensionToFileFormat(string path) { From c25bfc4f4a8a99fb7618bec256cb4d1966df8b42 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:51:40 +0100 Subject: [PATCH 061/139] Fix VFS path handling --- LSLib/LS/VFS.cs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/LSLib/LS/VFS.cs b/LSLib/LS/VFS.cs index bded8626..f7d93fe0 100644 --- a/LSLib/LS/VFS.cs +++ b/LSLib/LS/VFS.cs @@ -352,13 +352,12 @@ public bool TryOpen(string path, out Stream stream) if (RootDir != null) { - try + var realPath = Path.Join(RootDir, path); + if (File.Exists(realPath)) { - stream = File.OpenRead(Path.Join(RootDir, path)); + stream = File.OpenRead(realPath); return true; } - catch (FileNotFoundException) { } - catch (DirectoryNotFoundException) { } } stream = null; From 68ea05c90beccb7b764193d84df9c7a499c70ae9 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:51:56 +0100 Subject: [PATCH 062/139] Read localization packages during VFS startup --- LSLib/LS/VFS.cs | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/LSLib/LS/VFS.cs b/LSLib/LS/VFS.cs index f7d93fe0..08d34b4c 100644 --- a/LSLib/LS/VFS.cs +++ b/LSLib/LS/VFS.cs @@ -106,7 +106,11 @@ public void AttachGameDirectory(string gameDataPath, bool excludeAssets = true) "SharedSoundBanks.pak", "SharedSounds.pak", "Textures.pak", - "VirtualTextures.pak" + "VirtualTextures.pak", + // Localization + "English_Animations.pak", + "VoiceMeta.pak", + "Voice.pak" ]; } @@ -123,6 +127,17 @@ public void AttachGameDirectory(string gameDataPath, bool excludeAssets = true) AttachPackage(path); } } + + foreach (var path in Directory.GetFiles(Path.Join(gameDataPath, "Localization"), "*.pak")) + { + var baseName = Path.GetFileName(path); + if (!packageBlacklist.Contains(baseName) + // Don't load 2nd, 3rd, ... parts of a multi-part archive + && !ModPathVisitor.archivePartRe.IsMatch(baseName)) + { + AttachPackage(path); + } + } } public void AttachPackage(string path) From 93de70e5a0bedf6d282c19c32f65327ee8cb774a Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:52:10 +0100 Subject: [PATCH 063/139] Fix empty page list after VT build --- LSLib/VirtualTextures/Build.cs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/LSLib/VirtualTextures/Build.cs b/LSLib/VirtualTextures/Build.cs index 82e5d359..8043613b 100644 --- a/LSLib/VirtualTextures/Build.cs +++ b/LSLib/VirtualTextures/Build.cs @@ -857,7 +857,10 @@ private void PreparePageFiles() { PageFiles = SetBuilder.BuildSingleFile(); } + } + private void BuildPageFileMetadata() + { TileSet.PageFileInfos = []; uint firstPageIndex = 0; foreach (var file in PageFiles) @@ -917,6 +920,7 @@ private void BuildGTS() SetBuilder.CommitPageFiles(); OnStepStarted("Generating tile lists"); + BuildPageFileMetadata(); BuildFlatTileList(); OnStepStarted("Building metadata"); From 104312bfea2004c71c4b4e5bc4b7943f32231d56 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 29 Dec 2023 19:52:39 +0100 Subject: [PATCH 064/139] Fix tile set grid grow logic --- LSLib/LS/Common.cs | 2 +- LSLib/VirtualTextures/Geometry.cs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 7a4ca861..3134e332 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -9,7 +9,7 @@ public static class Common public const int MinorVersion = 19; - public const int PatchVersion = 1; + public const int PatchVersion = 2; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; diff --git a/LSLib/VirtualTextures/Geometry.cs b/LSLib/VirtualTextures/Geometry.cs index 596e8e63..17e6a68c 100644 --- a/LSLib/VirtualTextures/Geometry.cs +++ b/LSLib/VirtualTextures/Geometry.cs @@ -23,7 +23,7 @@ private void ResizePlacementGrid(int w, int h) private void GrowPlacementGrid() { - if (PlacementGridWidth <= PlacementGridHeight) + if (PlacementGridWidth * PlacementTileWidth <= PlacementGridHeight * PlacementTileHeight) { ResizePlacementGrid(PlacementGridWidth * 2, PlacementGridHeight); } From bc22053b8c52d5a5cc3fd3304ef38de27a1ea158 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 30 Dec 2023 16:00:05 +0100 Subject: [PATCH 065/139] Fix VT mip count calculation to match graphine --- LSLib/VirtualTextures/Geometry.cs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/LSLib/VirtualTextures/Geometry.cs b/LSLib/VirtualTextures/Geometry.cs index 17e6a68c..8f684949 100644 --- a/LSLib/VirtualTextures/Geometry.cs +++ b/LSLib/VirtualTextures/Geometry.cs @@ -124,23 +124,23 @@ private void UpdateGeometry() var minTexSize = 0x10000; foreach (var tex in Textures) { - minTexSize = Math.Min(minTexSize, Math.Min(tex.Height, tex.Width)); + minTexSize = Math.Min(minTexSize, Math.Min(tex.Height / BuildData.RawTileHeight, tex.Width / BuildData.RawTileHeight)); } BuildData.MipFileStartLevel = 0; - while (minTexSize >= BuildData.RawTileHeight) + while (minTexSize > 0) { BuildData.MipFileStartLevel++; minTexSize >>= 1; } - // Max W/H of all textures - var maxSize = Math.Max(BuildData.TotalWidth, BuildData.TotalHeight); + // Min W/H of all textures + var minSize = Math.Min(BuildData.TotalWidth / BuildData.RawTileHeight, BuildData.TotalHeight / BuildData.RawTileHeight); BuildData.PageFileLevels = 0; - while (maxSize >= BuildData.RawTileHeight) + while (minSize > 0) { BuildData.PageFileLevels++; - maxSize >>= 1; + minSize >>= 1; } BuildData.BuildLevels = BuildData.PageFileLevels + 1; From 08caebed541306b59a3672dc9a3983bb2f115f75 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 1 Jan 2024 20:55:02 +0100 Subject: [PATCH 066/139] Various header management fixes --- LSLib/LS/BinUtils.cs | 16 ++++++++++++++++ LSLib/LS/PackageFormat.cs | 26 +++++++++++++++++--------- LSLib/LS/PackageReader.cs | 2 +- LSLib/LS/PackageWriter.cs | 1 + 4 files changed, 35 insertions(+), 10 deletions(-) diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index dfca1f11..34da3a41 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -156,6 +156,22 @@ public static void ReadStructs(BinaryReader reader, T[] elements) handle.Free(); } + public static void ReadStructs(MemoryMappedViewAccessor view, long offset, T[] elements) + { + int elementSize = Marshal.SizeOf(typeof(T)); + int bytes = elementSize * elements.Length; + byte[] readBuffer = new byte[bytes]; + view.ReadArray(offset, readBuffer, 0, bytes); + GCHandle handle = GCHandle.Alloc(readBuffer, GCHandleType.Pinned); + var addr = handle.AddrOfPinnedObject(); + for (var i = 0; i < elements.Length; i++) + { + var elementAddr = new IntPtr(addr.ToInt64() + elementSize * i); + elements[i] = Marshal.PtrToStructure(elementAddr); + } + handle.Free(); + } + public static void WriteStruct(BinaryWriter writer, ref T inStruct) { int count = Marshal.SizeOf(typeof(T)); diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index 1460810a..bdf6edce 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -1,6 +1,7 @@ using LSLib.Granny.GR2; using LSLib.LS.Enums; using System; +using System.Reflection.PortableExecutable; using System.Runtime.InteropServices; namespace LSLib.LS; @@ -114,7 +115,7 @@ public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) } [StructLayout(LayoutKind.Sequential, Pack = 1)] -internal struct LSPKHeader13 : ILSPKHeader +internal unsafe struct LSPKHeader13 : ILSPKHeader { public UInt32 Version; public UInt32 FileListOffset; @@ -122,13 +123,11 @@ internal struct LSPKHeader13 : ILSPKHeader public UInt16 NumParts; public Byte Flags; public Byte Priority; - - [MarshalAs(UnmanagedType.ByValArray, SizeConst = 16)] - public byte[] Md5; + public fixed byte Md5[16]; public readonly PackageHeaderCommon ToCommonHeader() { - return new PackageHeaderCommon + var header = new PackageHeaderCommon { Version = Version, DataOffset = 0, @@ -137,22 +136,31 @@ public readonly PackageHeaderCommon ToCommonHeader() NumParts = NumParts, Flags = (PackageFlags)Flags, Priority = Priority, - Md5 = Md5 + Md5 = new byte[16] }; + + fixed (byte* md = Md5) + { + Marshal.Copy(new IntPtr(md), header.Md5, 0, 0x10); + } + + return header; } public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) { - return new LSPKHeader13 + var header = new LSPKHeader13 { Version = h.Version, FileListOffset = (UInt32)h.FileListOffset, FileListSize = h.FileListSize, NumParts = (UInt16)h.NumParts, Flags = (byte)h.Flags, - Priority = h.Priority, - Md5 = h.Md5 + Priority = h.Priority }; + + Marshal.Copy(h.Md5, 0, new IntPtr(header.Md5), 0x10); + return header; } } diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 433f8e71..a9abc500 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -141,7 +141,7 @@ private void ReadFileList(MemoryMappedViewAccessor view, long offset) where TFile : struct, ILSPKFile { var entries = new TFile[Pak.Metadata.NumFiles]; - view.ReadArray(offset, entries, 0, (int)Pak.Metadata.NumFiles); + BinUtils.ReadStructs(view, offset, entries); foreach (var entry in entries) { diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index a2ced39d..5624f29d 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -303,6 +303,7 @@ public void Write() Metadata.Version = (UInt32)Build.Version; Metadata.Flags = Build.Flags; Metadata.Priority = Build.Priority; + Metadata.Md5 = new byte[16]; switch (Build.Version) { From cce5f57f33dc37d51f0729dae103df8500d2c43d Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 2 Jan 2024 16:29:50 +0100 Subject: [PATCH 067/139] Fix resource leak on failed package read --- LSLib/LS/PackageReader.cs | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index a9abc500..ded94ae9 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -241,10 +241,8 @@ private void UnpackSolidSegment(MemoryMappedViewAccessor view) } } - public Package Read(string path, bool metadataOnly = false) + public Package ReadInternal(string path) { - MetadataOnly = metadataOnly; - Pak = new Package(path); var view = Pak.MetadataView; @@ -281,4 +279,19 @@ public Package Read(string path, bool metadataOnly = false) throw new NotAPackageException("No valid signature found in package file"); } + + public Package Read(string path, bool metadataOnly = false) + { + MetadataOnly = metadataOnly; + + try + { + return ReadInternal(path); + } + catch (Exception) + { + Pak.Dispose(); + throw; + } + } } From d315471e13a9d7db80d91aee7e307f9c6c83e821 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 2 Jan 2024 16:30:05 +0100 Subject: [PATCH 068/139] v7/v9 package padding logic fix --- LSLib/LS/PackageFormat.cs | 4 ++-- LSLib/LS/PackageReader.cs | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index bdf6edce..9051982a 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -63,7 +63,7 @@ public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) return new LSPKHeader7 { Version = h.Version, - DataOffset = (uint)(h.FileListOffset + h.FileListSize), + DataOffset = h.DataOffset, NumParts = h.NumParts, FileListSize = h.FileListSize, LittleEndian = 0, @@ -104,7 +104,7 @@ public static ILSPKHeader FromCommonHeader(PackageHeaderCommon h) return new LSPKHeader10 { Version = h.Version, - DataOffset = (uint)(h.FileListOffset + 4 + h.FileListSize), + DataOffset = h.DataOffset, FileListSize = h.FileListSize, NumParts = (UInt16)h.NumParts, Flags = (byte)h.Flags, diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index ded94ae9..f433ef47 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -271,7 +271,7 @@ public Package ReadInternal(string path) } // Check for v9 and v7 package headers - version = view.ReadInt32(4); + version = view.ReadInt32(0); if (version == 7 || version == 9) { return ReadHeaderAndFileList(view, 0); From fcdfec2562e77397c4ee3dc72a2185c8a29a855a Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 2 Jan 2024 16:30:20 +0100 Subject: [PATCH 069/139] Package path canonicalization fix --- LSLib/LS/PackageWriter.cs | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 5624f29d..f8f9d4d6 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -59,7 +59,7 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) Stream stream = Streams.Last(); var packaged = new PackageBuildTransientFile { - Name = input.Path, + Name = input.Path.Replace('\\', '/'), UncompressedSize = (ulong)uncompressed.Length, SizeOnDisk = (ulong)compressed.Length, ArchivePart = (UInt32)(Streams.Count - 1), @@ -109,12 +109,22 @@ private void PackV7(FileStream mainStream) where THeader : ILSPKHeader where TFile : ILSPKFile { + // <= v9 packages don't support LZ4 + if ((Build.Version == PackageVersion.V7 || Build.Version == PackageVersion.V9) && Build.Compression == CompressionMethod.LZ4) + { + Build.Compression = CompressionMethod.Zlib; + } + Metadata.NumFiles = (uint)Build.Files.Count; Metadata.FileListSize = (UInt32)(Marshal.SizeOf(typeof(TFile)) * Build.Files.Count); using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - Metadata.DataOffset = 4 + (UInt32)Marshal.SizeOf(typeof(THeader)) + Metadata.FileListSize; + Metadata.DataOffset = (UInt32)Marshal.SizeOf(typeof(THeader)) + Metadata.FileListSize; + if (Metadata.Version >= 10) + { + Metadata.DataOffset += 4; + } int paddingLength = Build.Version.PaddingSize(); if (Metadata.DataOffset % paddingLength > 0) @@ -130,7 +140,10 @@ private void PackV7(FileStream mainStream) var writtenFiles = PackFiles(); mainStream.Seek(0, SeekOrigin.Begin); - writer.Write(PackageHeaderCommon.Signature); + if (Metadata.Version >= 10) + { + writer.Write(PackageHeaderCommon.Signature); + } Metadata.NumParts = (UInt16)Streams.Count; Metadata.Md5 = ComputeArchiveHash(); From e9dc952496bc7f9b42a54325460afede8965bbd3 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 6 Jan 2024 12:36:57 +0100 Subject: [PATCH 070/139] Fix debug dumping from compressed stream --- ConverterApp/DebugDumper.cs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ConverterApp/DebugDumper.cs b/ConverterApp/DebugDumper.cs index 3a020106..df1d2ad8 100644 --- a/ConverterApp/DebugDumper.cs +++ b/ConverterApp/DebugDumper.cs @@ -253,7 +253,10 @@ private void RunTasks() Stream storyStream; if (storySave != null) { - storyStream = storySave.CreateContentReader(); + var bin = storySave.CreateContentReader(); + storyStream = new MemoryStream(); + bin.CopyTo(storyStream); + storyStream.Position = 0; } else { From 7175e27106f3c601a912eb77d197e8e6302ba705 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 6 Jan 2024 12:37:09 +0100 Subject: [PATCH 071/139] Fix reading stat enumerations --- LSLib/LS/Mods/ModResources.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 84fcacaf..2d5ecf01 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -120,7 +120,7 @@ private void DiscoverModStatsStructure(ModInfo mod) mod.ModifiersFile = modifiersPath; } - var valueListsPath = Path.Join(mod.PublicPath, @"Stats/Generated/Structure/ValueLists.txt"); + var valueListsPath = Path.Join(mod.PublicPath, @"Stats/Generated/Structure/Base/ValueLists.txt"); if (FS.FileExists(valueListsPath)) { mod.ValueListsFile = valueListsPath; From 38986c1afad781942ff5390dc0e70fb482b50831 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 6 Jan 2024 12:37:27 +0100 Subject: [PATCH 072/139] Avoid throwing when a pak could not be fully loaded --- LSLib/LS/PackageReader.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index f433ef47..81fca9d9 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -290,7 +290,7 @@ public Package Read(string path, bool metadataOnly = false) } catch (Exception) { - Pak.Dispose(); + Pak?.Dispose(); throw; } } From 571ca427c9871d1ff475a13a30ff000b747f2d3f Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 6 Jan 2024 12:41:42 +0100 Subject: [PATCH 073/139] Use correct line numbers for diagnostics --- LSLib/LS/Stats/Parser/Stat.lex | 2 +- LSLib/LS/Stats/Parser/StatParser.cs | 4 ++-- LSLib/LS/Stats/StatFileParser.cs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/LSLib/LS/Stats/Parser/Stat.lex b/LSLib/LS/Stats/Parser/Stat.lex index dd7ce328..ac117c1f 100644 --- a/LSLib/LS/Stats/Parser/Stat.lex +++ b/LSLib/LS/Stats/Parser/Stat.lex @@ -10,7 +10,7 @@ namechar [a-zA-Z_] %% -data([ ]+)\"([^\"]+)\"([ ]+)\"(.*)\" { yylval = MakeDataProperty(yylloc, yytext); return (int)StatTokens.DATA_ITEM; } +data([ ]+)\"([^\"]+)\"([ ]+)\"(.*)\" { yylval = MakeDataProperty(tokLin, tokCol, tokELin, tokECol, yytext); return (int)StatTokens.DATA_ITEM; } /* Reserved words */ "new" return (int)StatTokens.NEW; diff --git a/LSLib/LS/Stats/Parser/StatParser.cs b/LSLib/LS/Stats/Parser/StatParser.cs index 48687691..b6fec8c7 100644 --- a/LSLib/LS/Stats/Parser/StatParser.cs +++ b/LSLib/LS/Stats/Parser/StatParser.cs @@ -32,7 +32,7 @@ protected string MakeString(string lit) return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); } - protected StatProperty MakeDataProperty(CodeLocation location, string lit) + protected StatProperty MakeDataProperty(int startLine, int startCol, int endLine, int endCol, string lit) { var re = new Regex(@"data\s+""([^""]+)""\s+""(.*)""\s*", RegexOptions.CultureInvariant); var matches = re.Match(lit); @@ -45,7 +45,7 @@ protected StatProperty MakeDataProperty(CodeLocation location, string lit) { Key = matches.Groups[1].Value, Value = matches.Groups[2].Value, - Location = location + Location = new CodeLocation(null, startLine, startCol, endLine, endCol) }; } } diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index 7409b8b6..a352bbe4 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -83,7 +83,7 @@ public void LogError(string code, string message, string path = null, int line = Code = code, Message = message, Path = path, - Line = line > 0 ? (line + 1) : 0, + Line = line, StatObjectName = statObjectName }); } From 8924b57c76cae6f009a41ea0576f4d2bfa466744 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 6 Jan 2024 12:46:21 +0100 Subject: [PATCH 074/139] Allow disabling reference validation --- .gitignore | 13 +--- LSLib/LS/Stats/Parser/StatPropertyParser.cs | 14 ++-- LSLib/LS/Stats/StatFileParser.cs | 8 +- LSLib/LS/Stats/StatValueParsers.cs | 83 ++++++++++++--------- 4 files changed, 67 insertions(+), 51 deletions(-) diff --git a/.gitignore b/.gitignore index 6f53fd7c..71a86311 100644 --- a/.gitignore +++ b/.gitignore @@ -51,30 +51,25 @@ _ReSharper.Caches/ */x64 /External/gplex /External/gppg -/External/bullet-2.77 /External/protoc /External/protobuf-3.6.1 /External/vcpkg-master /External/x64-windows -/External/Glasses /External/*.zip -/TestApp /packages LSTools.VC.db *.opendb +launchSettings.json # Generated GpLex/GpPg files *.lex.cs *.yy.cs # Generated Protobuf files -DebuggerFrontend/osidebug.pb.h -DebuggerFrontend/osidebug.pb.cc DebuggerFrontend/Osidebug.cs StoryCompiler/Debuginfo.cs DebuggerFrontend/Debuginfo.cs DebuggerFrontend/DbgProtocol.cs -WwiseTool -UnpackGrid/Properties/launchSettings.json -TerrainFixup/Properties/launchSettings.json -VTexTool/Properties/launchSettings.json +StatFastParser +LSLibSearch +LSLibSearchIndexer diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLib/LS/Stats/Parser/StatPropertyParser.cs index 107726a0..1ec955ba 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLib/LS/Stats/Parser/StatPropertyParser.cs @@ -36,15 +36,17 @@ public abstract class StatPropertyScanBase : AbstractScanner 0) { var parser = ParserFactory.CreateParser(arg.Type, null, null, Definitions); - parser.Parse(args[i], ref succeeded, ref errorText); + parser.Parse(Context, args[i], ref succeeded, ref errorText); if (!succeeded) { OnError($"'{action.Action}' argument {i + 1}: {errorText}"); @@ -126,6 +128,7 @@ public partial class StatPropertyParser { private IStatValueParser RequirementParser; private StatEnumeration RequirementsWithArgument; + private DiagnosticContext Context; private int LiteralStart; private StatActionValidator ActionValidator; private byte[] Source; @@ -136,11 +139,12 @@ public partial class StatPropertyParser private StatPropertyScanner StatScanner; public StatPropertyParser(StatPropertyScanner scnr, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory, byte[] source, ExpressionType type) : base(scnr) + DiagnosticContext ctx, StatValueParserFactory parserFactory, byte[] source, ExpressionType type) : base(scnr) { + Context = ctx; StatScanner = scnr; Source = source; - ActionValidator = new StatActionValidator(definitions, parserFactory, type); + ActionValidator = new StatActionValidator(definitions, ctx, parserFactory, type); ActionValidator.OnError += (message) => { OnError(message); }; } @@ -268,7 +272,7 @@ private void Validate(IStatValueParser parser, string value) { bool succeeded = false; string errorText = null; - parser.Parse(value, ref succeeded, ref errorText); + parser.Parse(Context, value, ref succeeded, ref errorText); if (!succeeded) { errorText = $"'{value}': {errorText}"; diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index a352bbe4..e299f95a 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -210,12 +210,14 @@ public class StatLoader private readonly StatLoadingContext Context; private readonly StatValueParserFactory ParserFactory; private readonly StatLoaderReferenceValidator ReferenceValidator; + public readonly DiagnosticContext DiagContext; public StatLoader(StatLoadingContext ctx) { Context = ctx; - ReferenceValidator = new StatLoaderReferenceValidator(ctx); - ParserFactory = new StatValueParserFactory(ReferenceValidator); + ReferenceValidator = new(ctx); + ParserFactory = new(ReferenceValidator); + DiagContext = new(); } private List ParseStatStream(string path, Stream stream) @@ -312,7 +314,7 @@ private object ParseProperty(StatEntryType type, string propertyName, object val else if (field.Type != "Passthrough") { var parser = field.GetParser(ParserFactory, Context.Definitions); - parsed = parser.Parse((string)value, ref succeeded, ref errorText); + parsed = parser.Parse(DiagContext, value, ref succeeded, ref errorText); } else { diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs index 43e5a679..e0d78872 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -8,9 +8,24 @@ namespace LSLib.LS.Stats; +public class DiagnosticContext +{ + public bool IgnoreMissingReferences = false; +} + public interface IStatValueParser { - object Parse(string value, ref bool succeeded, ref string errorText); + object Parse(DiagnosticContext ctx, object value, ref bool succeeded, ref string errorText); +} + +abstract public class StatStringParser : IStatValueParser +{ + abstract public object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText); + + public object Parse(DiagnosticContext ctx, object value, ref bool succeeded, ref string errorText) + { + return Parse(ctx, (string)value, ref succeeded, ref errorText); + } } public class StatReferenceConstraint @@ -24,9 +39,9 @@ public interface IStatReferenceValidator bool IsValidGuidResource(string name, string resourceType); } -public class BooleanParser : IStatValueParser +public class BooleanParser : StatStringParser { - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value == "true" || value == "false" || value == "") { @@ -42,9 +57,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class Int32Parser : IStatValueParser +public class Int32Parser : StatStringParser { - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value == "") { @@ -65,9 +80,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class FloatParser : IStatValueParser +public class FloatParser : StatStringParser { - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value == "") { @@ -88,11 +103,11 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class EnumParser(StatEnumeration enumeration) : IStatValueParser +public class EnumParser(StatEnumeration enumeration) : StatStringParser { private readonly StatEnumeration Enumeration = enumeration ?? throw new ArgumentNullException(); - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value == null || value == "") { @@ -120,11 +135,11 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class MultiValueEnumParser(StatEnumeration enumeration) : IStatValueParser +public class MultiValueEnumParser(StatEnumeration enumeration) : StatStringParser { private readonly EnumParser Parser = new(enumeration); - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { succeeded = true; @@ -135,7 +150,7 @@ public object Parse(string value, ref bool succeeded, ref string errorText) foreach (var item in value.Split([';'])) { - Parser.Parse(item.Trim([' ']), ref succeeded, ref errorText); + Parser.Parse(ctx, item.Trim([' ']), ref succeeded, ref errorText); if (!succeeded) { errorText = $"Value '{item}' not supported; {errorText}"; @@ -147,9 +162,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class StringParser : IStatValueParser +public class StringParser : StatStringParser { - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value.Length > 2048) { @@ -166,9 +181,9 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class UUIDParser : IStatValueParser +public class UUIDParser : StatStringParser { - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value == "") { @@ -189,11 +204,11 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class StatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser +public class StatReferenceParser(IStatReferenceValidator validator, List constraints) : StatStringParser { - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { - if (value == "") + if (ctx.IgnoreMissingReferences || value == "") { succeeded = true; return value; @@ -215,11 +230,11 @@ public object Parse(string value, ref bool succeeded, ref string errorText) } } -public class MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) : IStatValueParser +public class MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) : StatStringParser { private readonly StatReferenceParser Parser = new(validator, constraints); - public object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { succeeded = true; @@ -228,7 +243,7 @@ public object Parse(string value, ref bool succeeded, ref string errorText) var trimmed = item.Trim([' ']); if (trimmed.Length > 0) { - Parser.Parse(trimmed, ref succeeded, ref errorText); + Parser.Parse(ctx, trimmed, ref succeeded, ref errorText); if (!succeeded) { return null; @@ -248,9 +263,9 @@ public enum ExpressionType }; public class ExpressionParser(String validatorType, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory, ExpressionType type) : IStatValueParser + StatValueParserFactory parserFactory, ExpressionType type) : StatStringParser { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + validatorType + "__ " + value.TrimEnd()); using var buf = new MemoryStream(valueBytes); @@ -258,7 +273,7 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe var scanner = new StatPropertyScanner(); scanner.SetSource(buf); - var parser = new StatPropertyParser(scanner, definitions, parserFactory, valueBytes, type); + var parser = new StatPropertyParser(scanner, definitions, ctx, parserFactory, valueBytes, type); parser.OnError += (string message) => errorTexts.Add(message); succeeded = parser.Parse(); if (!succeeded) @@ -282,9 +297,9 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe } } -public class LuaExpressionParser : IStatValueParser +public class LuaExpressionParser : StatStringParser { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { value = "BHAALS_BOON_SLAYER.Duration-1"; var valueBytes = Encoding.UTF8.GetBytes(value); @@ -307,9 +322,9 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe } } -public class UseCostsParser(IStatReferenceValidator validator) : IStatValueParser +public class UseCostsParser(IStatReferenceValidator validator) : StatStringParser { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value.Length == 0) return value; @@ -325,7 +340,7 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe return null; } - if (!validator.IsValidGuidResource(parts[0], "ActionResource") && !validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) + if (!ctx.IgnoreMissingReferences && !validator.IsValidGuidResource(parts[0], "ActionResource") && !validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) { errorText = $"Nonexistent action resource or action resource group: {parts[0]}"; return null; @@ -365,9 +380,9 @@ public virtual object Parse(string value, ref bool succeeded, ref string errorTe } } -public class DiceRollParser : IStatValueParser +public class DiceRollParser : StatStringParser { - public virtual object Parse(string value, ref bool succeeded, ref string errorText) + public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) { if (value.Length == 0) return value; @@ -395,14 +410,14 @@ public class AnyParser(IEnumerable parsers, string message = n { private readonly List Parsers = parsers.ToList(); - public object Parse(string value, ref bool succeeded, ref string errorText) + public object Parse(DiagnosticContext ctx, object value, ref bool succeeded, ref string errorText) { List errors = []; foreach (var parser in Parsers) { succeeded = false; string error = null; - var result = parser.Parse(value, ref succeeded, ref error); + var result = parser.Parse(ctx, value, ref succeeded, ref error); if (succeeded) { return result; From def049512eed443ce7a84c5e54febf8fc3b7926e Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 9 Jan 2024 21:29:46 +0100 Subject: [PATCH 075/139] Silence warnings from gppg --- .editorconfig | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..5be8a60b --- /dev/null +++ b/.editorconfig @@ -0,0 +1,4 @@ +root = true + +[*.cs] +dotnet_diagnostic.SYSLIB0051.severity=silent From 3fbdf87c3aef0ac5d5062d90d351ea3c4a0ead6b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 9 Jan 2024 21:32:35 +0100 Subject: [PATCH 076/139] Write debug output to debug stream --- LSLib/Granny/ColladaSchema.cs | 11 +--- LSLib/Granny/GR2/Format.cs | 6 +- LSLib/Granny/GR2/Reader.cs | 86 ++++++++++++++--------------- LSLib/LS/Resources/LSF/LSFReader.cs | 24 ++++---- 4 files changed, 60 insertions(+), 67 deletions(-) diff --git a/LSLib/Granny/ColladaSchema.cs b/LSLib/Granny/ColladaSchema.cs index 84570078..e07ee0a6 100644 --- a/LSLib/Granny/ColladaSchema.cs +++ b/LSLib/Granny/ColladaSchema.cs @@ -9910,15 +9910,8 @@ internal static double[] ConvertDoubleArray(string arrayStr) { string[] elements = regex.Split(arrayStr.Trim()); double[] ret = new double[elements.Length]; - try - { - for (int i = 0; i < ret.Length; i++) - ret[i] = double.Parse(elements[i], NumberStyles.Float, CultureInfo.InvariantCulture); - } - catch (Exception ex) - { - Console.WriteLine(ex); - } + for (int i = 0; i < ret.Length; i++) + ret[i] = double.Parse(elements[i], NumberStyles.Float, CultureInfo.InvariantCulture); return ret; } diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index 6913b148..04f437f5 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -665,7 +665,7 @@ public StructDefinition Resolve(GR2Reader gr2) { // We haven't seen this type before, read its definition from the file and cache it #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Struct definition at {0:X8} ===== ", Offset)); + Debug.WriteLine(String.Format(" ===== Struct definition at {0:X8} ===== ", Offset)); #endif var originalPos = gr2.Stream.Position; gr2.Seek(this); @@ -734,7 +734,7 @@ public List Resolve(GR2Reader gr2) if (Items == null) { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" (Reference list at {0:X8})", Offset)); + Debug.WriteLine(String.Format(" (Reference list at {0:X8})", Offset)); #endif var originalPos = gr2.Stream.Position; gr2.Seek(this); @@ -743,7 +743,7 @@ public List Resolve(GR2Reader gr2) { Items.Add(gr2.ReadReference()); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0:X8}", r.Offset)); + Debug.WriteLine(String.Format(" {0:X8}", r.Offset)); #endif } gr2.Stream.Seek(originalPos, SeekOrigin.Begin); diff --git a/LSLib/Granny/GR2/Reader.cs b/LSLib/Granny/GR2/Reader.cs index ff474ecb..b1324e5c 100644 --- a/LSLib/Granny/GR2/Reader.cs +++ b/LSLib/Granny/GR2/Reader.cs @@ -106,10 +106,10 @@ private Magic ReadMagic() Debug.Assert(magic.reserved2 == 0); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" ===== GR2 Magic ===== "); - System.Console.WriteLine(String.Format("Format: {0}", magic.format)); - System.Console.WriteLine(String.Format("Headers size: {0:X8}, format: ", magic.headersSize, magic.headerFormat)); - System.Console.WriteLine(String.Format("Reserved1-2: {0:X8} {1:X8}", magic.reserved1, magic.reserved2)); + Debug.WriteLine(" ===== GR2 Magic ===== "); + Debug.WriteLine(String.Format("Format: {0}", magic.format)); + Debug.WriteLine(String.Format("Headers size: {0:X8}, format: ", magic.headersSize, magic.headerFormat)); + Debug.WriteLine(String.Format("Reserved1-2: {0:X8} {1:X8}", magic.reserved1, magic.reserved2)); #endif return magic; } @@ -156,14 +156,14 @@ private Header ReadHeader() Debug.Assert(header.reserved3 == 0); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" ===== GR2 Header ===== "); - System.Console.WriteLine(String.Format("Version {0}, Size {1}, CRC {2:X8}", header.version, header.fileSize, header.crc)); - System.Console.WriteLine(String.Format("Offset of sections: {0}, num sections: {1}", header.sectionsOffset, header.numSections)); - System.Console.WriteLine(String.Format("Root type section {0}, Root type offset {1:X8}", header.rootType.Section, header.rootType.Offset)); - System.Console.WriteLine(String.Format("Root node section {0} {1:X8}", header.rootNode.Section, header.rootNode.Offset)); - System.Console.WriteLine(String.Format("Tag: {0:X8}, Strings CRC: {1:X8}", header.tag, header.stringTableCrc)); - System.Console.WriteLine(String.Format("Extra tags: {0:X8} {1:X8} {2:X8} {3:X8}", header.extraTags[0], header.extraTags[1], header.extraTags[2], header.extraTags[3])); - System.Console.WriteLine(String.Format("Reserved: {0:X8} {1:X8} {2:X8}", new object[] { header.reserved1, header.reserved2, header.reserved3 })); + Debug.WriteLine(" ===== GR2 Header ===== "); + Debug.WriteLine(String.Format("Version {0}, Size {1}, CRC {2:X8}", header.version, header.fileSize, header.crc)); + Debug.WriteLine(String.Format("Offset of sections: {0}, num sections: {1}", header.sectionsOffset, header.numSections)); + Debug.WriteLine(String.Format("Root type section {0}, Root type offset {1:X8}", header.rootType.Section, header.rootType.Offset)); + Debug.WriteLine(String.Format("Root node section {0} {1:X8}", header.rootNode.Section, header.rootNode.Offset)); + Debug.WriteLine(String.Format("Tag: {0:X8}, Strings CRC: {1:X8}", header.tag, header.stringTableCrc)); + Debug.WriteLine(String.Format("Extra tags: {0:X8} {1:X8} {2:X8} {3:X8}", header.extraTags[0], header.extraTags[1], header.extraTags[2], header.extraTags[3])); + Debug.WriteLine(String.Format("Reserved: {0:X8} {1:X8} {2:X8}", new object[] { header.reserved1, header.reserved2, header.reserved3 })); #endif return header; @@ -199,13 +199,13 @@ private SectionHeader ReadSectionHeader() } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" ===== Section Header ===== "); - System.Console.WriteLine(String.Format("Compression: {0}", header.compression)); - System.Console.WriteLine(String.Format("Offset {0:X8} Comp/UncompSize {1:X8}/{2:X8}", header.offsetInFile, header.compressedSize, header.uncompressedSize)); - System.Console.WriteLine(String.Format("Alignment {0}", header.alignment)); - System.Console.WriteLine(String.Format("First 16/8bit: {0:X8}/{1:X8}", header.first16bit, header.first8bit)); - System.Console.WriteLine(String.Format("Relocations: {0:X8} count {1}", header.relocationsOffset, header.numRelocations)); - System.Console.WriteLine(String.Format("Marshalling data: {0:X8} count {1}", header.mixedMarshallingDataOffset, header.numMixedMarshallingData)); + Debug.WriteLine(" ===== Section Header ===== "); + Debug.WriteLine(String.Format("Compression: {0}", header.compression)); + Debug.WriteLine(String.Format("Offset {0:X8} Comp/UncompSize {1:X8}/{2:X8}", header.offsetInFile, header.compressedSize, header.uncompressedSize)); + Debug.WriteLine(String.Format("Alignment {0}", header.alignment)); + Debug.WriteLine(String.Format("First 16/8bit: {0:X8}/{1:X8}", header.first16bit, header.first8bit)); + Debug.WriteLine(String.Format("Relocations: {0:X8} count {1}", header.relocationsOffset, header.numRelocations)); + Debug.WriteLine(String.Format("Marshalling data: {0:X8} count {1}", header.mixedMarshallingDataOffset, header.numMixedMarshallingData)); #endif return header; } @@ -213,7 +213,7 @@ private SectionHeader ReadSectionHeader() private void UncompressStream() { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Repacking sections ===== ")); + Debug.WriteLine(String.Format(" ===== Repacking sections ===== ")); #endif uint totalSize = 0; @@ -260,7 +260,7 @@ private void UncompressStream() } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0}: {1:X8} ({2}) --> {3:X8} ({4})", i, originalOffset, hdr.compressedSize, hdr.offsetInFile, hdr.uncompressedSize)); + Debug.WriteLine(String.Format(" {0}: {1:X8} ({2}) --> {3:X8} ({4})", i, originalOffset, hdr.compressedSize, hdr.offsetInFile, hdr.uncompressedSize)); #endif } } @@ -268,7 +268,7 @@ private void UncompressStream() private void ReadSectionRelocationsInternal(Section section, Stream relocationsStream) { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); + Debug.WriteLine(String.Format(" ===== Relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); #endif using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); @@ -283,8 +283,8 @@ private void ReadSectionRelocationsInternal(Section section, Stream relocationsS Stream.Write(BitConverter.GetBytes(fixupAddress), 0, 4); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" LOCAL {0:X8} --> {1}:{2:X8}", offsetInSection, (SectionType)reference.Section, reference.Offset)); - System.Console.WriteLine(String.Format(" GLOBAL {0:X8} --> {1:X8}", + Debug.WriteLine(String.Format(" LOCAL {0:X8} --> {1}:{2:X8}", offsetInSection, (SectionType)reference.Section, reference.Offset)); + Debug.WriteLine(String.Format(" GLOBAL {0:X8} --> {1:X8}", offsetInSection + section.Header.offsetInFile, reference.Offset + Sections[(int)reference.Section].Header.offsetInFile)); #endif @@ -352,7 +352,7 @@ private void MixedMarshal(UInt32 count, StructDefinition definition) private void ReadSectionMixedMarshallingRelocationsInternal(Section section, Stream relocationsStream) { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" ===== Mixed marshalling relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); + Debug.WriteLine(String.Format(" ===== Mixed marshalling relocations for section at {0:X8} ===== ", section.Header.offsetInFile)); #endif using var relocationsReader = new BinaryReader(relocationsStream, Encoding.Default, true); @@ -371,7 +371,7 @@ private void ReadSectionMixedMarshallingRelocationsInternal(Section section, Str MixedMarshal(count, typeDefn.Resolve(this)); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" {0:X8} [{1}] --> {2}:{3:X8}", offsetInSection, count, (SectionType)type.Section, type.Offset)); + Debug.WriteLine(String.Format(" {0:X8} [{1}] --> {2}:{3:X8}", offsetInSection, count, (SectionType)type.Section, type.Offset)); #endif } } @@ -537,9 +537,9 @@ public MemberDefinition ReadMemberDefinition() if (!DebugPendingResolve.Contains(defn.Definition)) { DebugPendingResolve.Add(defn.Definition); - System.Console.WriteLine(String.Format(" ===== Debug resolve for {0:X8} ===== ", defn.Definition.Offset)); + Debug.WriteLine(String.Format(" ===== Debug resolve for {0:X8} ===== ", defn.Definition.Offset)); defn.Definition.Resolve(this); - System.Console.WriteLine(String.Format(" ===== End debug resolve for {0:X8} ===== ", defn.Definition.Offset)); + Debug.WriteLine(String.Format(" ===== End debug resolve for {0:X8} ===== ", defn.Definition.Offset)); } description += String.Format(" ", defn.Definition.Offset); } @@ -552,7 +552,7 @@ public MemberDefinition ReadMemberDefinition() description = String.Format(" : {0}", defn.Type.ToString()); } - System.Console.WriteLine(description); + Debug.WriteLine(description); #endif return defn; } @@ -579,7 +579,7 @@ internal object ReadStruct(StructDefinition definition, MemberType memberType, o if (memberType != MemberType.Inline && CachedStructs.TryGetValue(offset, out cachedNode)) { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format("Skipped cached struct {1} at {0:X8}", offset, node.ToString())); + Debug.WriteLine(String.Format("Skipped cached struct {1} at {0:X8}", offset, node.ToString())); #endif Stream.Position += definition.Size(this); return cachedNode; @@ -765,14 +765,14 @@ private object ReadElement(MemberDefinition definition, object node, Type proper case MemberType.Inline: Debug.Assert(definition.Definition.IsValid); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Inline Struct {0} === ", definition.Name)); + Debug.WriteLine(String.Format(" === Inline Struct {0} === ", definition.Name)); #endif if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) node = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); else node = ReadStruct(definition.Definition.Resolve(this), definition.Type, node, parent); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + Debug.WriteLine(" === End Struct === "); #endif break; @@ -786,14 +786,14 @@ private object ReadElement(MemberDefinition definition, object node, Type proper var originalPos = Stream.Position; Seek(r); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); + Debug.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) node = definition.Serializer.Read(this, definition.Definition.Resolve(this), definition, 0, parent); else node = ReadStruct(definition.Definition.Resolve(this), definition.Type, node, parent); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + Debug.WriteLine(" === End Struct === "); #endif Stream.Seek(originalPos, SeekOrigin.Begin); } @@ -820,14 +820,14 @@ private object ReadElement(MemberDefinition definition, object node, Type proper var originalPos = Stream.Position; Seek(r); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Variant Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); + Debug.WriteLine(String.Format(" === Variant Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif if (kind == SerializationKind.UserElement || kind == SerializationKind.UserMember) node = definition.Serializer.Read(this, structDefn, definition, 0, parent); else node = ReadStruct(structRef.Resolve(this), definition.Type, node, parent); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + Debug.WriteLine(" === End Struct === "); #endif Stream.Seek(originalPos, SeekOrigin.Begin); } @@ -844,7 +844,7 @@ private object ReadElement(MemberDefinition definition, object node, Type proper Debug.Assert(definition.Definition.IsValid); var indices = ReadArrayIndicesReference(); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" Array of references at [{0:X8}]", indices.Offset)); + Debug.WriteLine(String.Format(" Array of references at [{0:X8}]", indices.Offset)); #endif if (Header.version >= 7) @@ -863,7 +863,7 @@ private object ReadElement(MemberDefinition definition, object node, Type proper { Seek(refs[i]); #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); + Debug.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif if (kind == SerializationKind.UserElement) { @@ -879,7 +879,7 @@ private object ReadElement(MemberDefinition definition, object node, Type proper } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + Debug.WriteLine(" === End Struct === "); #endif } @@ -934,7 +934,7 @@ private object ReadElement(MemberDefinition definition, object node, Type proper for (int i = 0; i < itemsRef.Size; i++) { #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); + Debug.WriteLine(String.Format(" === Struct <{0}> at {1:X8} === ", definition.Name, Stream.Position)); #endif if (kind == SerializationKind.UserElement) { @@ -948,7 +948,7 @@ private object ReadElement(MemberDefinition definition, object node, Type proper items.Add(element); } #if DEBUG_GR2_SERIALIZATION - System.Console.WriteLine(" === End Struct === "); + Debug.WriteLine(" === End Struct === "); #endif } } @@ -1030,9 +1030,9 @@ private object ReadElement(MemberDefinition definition, object node, Type proper #if DEBUG_GR2_SERIALIZATION if (node != null) - System.Console.WriteLine(String.Format(" [{0:X8}] {1}: {2}", offsetInFile, definition.Name, node.ToString())); + Debug.WriteLine(String.Format(" [{0:X8}] {1}: {2}", offsetInFile, definition.Name, node.ToString())); else - System.Console.WriteLine(String.Format(" [{0:X8}] {1}: ", offsetInFile, definition.Name)); + Debug.WriteLine(String.Format(" [{0:X8}] {1}: ", offsetInFile, definition.Name)); #endif return node; diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index 1f79d2bc..adea33af 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -61,7 +61,7 @@ public void Dispose() private void ReadNames(Stream s) { #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF NAME TABLE -----"); + Debug.WriteLine(" ----- DUMP OF NAME TABLE -----"); #endif // Format: @@ -85,7 +85,7 @@ private void ReadNames(Stream s) var name = System.Text.Encoding.UTF8.GetString(bytes); hash.Add(name); #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format("{0,3:X}/{1}: {2}", Names.Count - 1, hash.Count - 1, name)); + Debug.WriteLine(String.Format("{0,3:X}/{1}: {2}", Names.Count - 1, hash.Count - 1, name)); #endif } } @@ -99,7 +99,7 @@ private void ReadNames(Stream s) private void ReadNodes(Stream s, bool longNodes) { #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF NODE TABLE -----"); + Debug.WriteLine(" ----- DUMP OF NODE TABLE -----"); #endif using var reader = new BinaryReader(s); @@ -129,7 +129,7 @@ private void ReadNodes(Stream s, bool longNodes) } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format( + Debug.WriteLine(String.Format( "{0}: {1} @ {2:X} (parent {3}, firstAttribute {4})", index, Names[resolved.NameIndex][resolved.NameOffset], pos, resolved.ParentIndex, resolved.FirstAttributeIndex @@ -199,14 +199,14 @@ private void ReadAttributesV2(Stream s) } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); + Debug.WriteLine(" ----- DUMP OF ATTRIBUTE REFERENCES -----"); for (int i = 0; i < prevAttributeRefs.Count; i++) { - Console.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); + Debug.WriteLine(String.Format("Node {0}: last attribute {1}", i, prevAttributeRefs[i])); } - Console.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); + Debug.WriteLine(" ----- DUMP OF V2 ATTRIBUTE TABLE -----"); for (int i = 0; i < Attributes.Count; i++) { var resolved = Attributes[i]; @@ -217,7 +217,7 @@ private void ReadAttributesV2(Stream s) i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, resolved.TypeId, resolved.NextAttributeIndex, attribute.NodeIndex ); - Console.WriteLine(debug); + Debug.WriteLine(debug); } #endif } @@ -247,7 +247,7 @@ private void ReadAttributesV3(Stream s) } #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); + Debug.WriteLine(" ----- DUMP OF V3 ATTRIBUTE TABLE -----"); for (int i = 0; i < Attributes.Count; i++) { var resolved = Attributes[i]; @@ -257,7 +257,7 @@ private void ReadAttributesV3(Stream s) i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, resolved.TypeId, resolved.NextAttributeIndex ); - Console.WriteLine(debug); + Debug.WriteLine(debug); } #endif } @@ -443,7 +443,7 @@ private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) node.Name = Names[defn.NameIndex][defn.NameOffset]; #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format("Begin node {0}", node.Name)); + Debug.WriteLine(String.Format("Begin node {0}", node.Name)); #endif if (defn.FirstAttributeIndex != -1) @@ -456,7 +456,7 @@ private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) node.Attributes[Names[attribute.NameIndex][attribute.NameOffset]] = value; #if DEBUG_LSF_SERIALIZATION - Console.WriteLine(String.Format(" {0:X}: {1} ({2})", attribute.DataOffset, Names[attribute.NameIndex][attribute.NameOffset], value)); + Debug.WriteLine(String.Format(" {0:X}: {1} ({2})", attribute.DataOffset, Names[attribute.NameIndex][attribute.NameOffset], value)); #endif if (attribute.NextAttributeIndex == -1) From 8fe3e4388530921e855563c93aa638864fb9538b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 9 Jan 2024 21:34:11 +0100 Subject: [PATCH 077/139] Fix QTangent normal reflections being encoded incorrectly --- LSLib/Granny/Model/VertexSerialization.cs | 9 ++- LSLib/LS/Common.cs | 92 +++++++++++------------ 2 files changed, 51 insertions(+), 50 deletions(-) diff --git a/LSLib/Granny/Model/VertexSerialization.cs b/LSLib/Granny/Model/VertexSerialization.cs index 8f118345..5f7e3659 100644 --- a/LSLib/Granny/Model/VertexSerialization.cs +++ b/LSLib/Granny/Model/VertexSerialization.cs @@ -139,11 +139,13 @@ private static Quaternion MatrixToQTangent(Matrix3 mm, bool reflect) var quat = Quaternion.FromMatrix(m); quat.Normalize(); - quat.Conjugate(); if (quat.W < 0.0f) + { + quat.W = -quat.W; + } + else { quat.Conjugate(); - quat.Invert(); } // Make sure we don't end up with 0 as w component @@ -159,8 +161,7 @@ private static Quaternion MatrixToQTangent(Matrix3 mm, bool reflect) // if Y axis needs to be flipped, positive otherwise if (reflect) { - quat.Conjugate(); - quat.Invert(); + quat = new Quaternion(-quat.Xyz, -quat.W); } return quat; diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 3134e332..f03f3743 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -1,46 +1,46 @@ -using System; -using System.Text.RegularExpressions; - -namespace LSLib.LS; - -public static class Common -{ - public const int MajorVersion = 1; - - public const int MinorVersion = 19; - - public const int PatchVersion = 2; - - // Version of LSTools profile data in generated DAE files - public const int ColladaMetadataVersion = 3; - - /// - /// Returns the version number of the LSLib library - /// - public static string LibraryVersion() - { - return String.Format("{0}.{1}.{2}", MajorVersion, MinorVersion, PatchVersion); - } - - /// - /// Compares the string against a given pattern. - /// - /// The string - /// The pattern to match, where "*" means any sequence of characters, and "?" means any single character - /// true if the string matches the given pattern; otherwise false. - public static bool Like(this string str, string pattern) - { - return new Regex("^" + Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline).IsMatch(str); - } - - /// - /// Compares the string against a given pattern. - /// - /// The string - /// The pattern to match as a RegEx object - /// true if the string matches the given pattern; otherwise false. - public static bool Like(this string str, Regex pattern) - { - return pattern.IsMatch(str); - } -} +using System; +using System.Text.RegularExpressions; + +namespace LSLib.LS; + +public static class Common +{ + public const int MajorVersion = 1; + + public const int MinorVersion = 19; + + public const int PatchVersion = 3; + + // Version of LSTools profile data in generated DAE files + public const int ColladaMetadataVersion = 3; + + /// + /// Returns the version number of the LSLib library + /// + public static string LibraryVersion() + { + return String.Format("{0}.{1}.{2}", MajorVersion, MinorVersion, PatchVersion); + } + + /// + /// Compares the string against a given pattern. + /// + /// The string + /// The pattern to match, where "*" means any sequence of characters, and "?" means any single character + /// true if the string matches the given pattern; otherwise false. + public static bool Like(this string str, string pattern) + { + return new Regex("^" + Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline).IsMatch(str); + } + + /// + /// Compares the string against a given pattern. + /// + /// The string + /// The pattern to match as a RegEx object + /// true if the string matches the given pattern; otherwise false. + public static bool Like(this string str, Regex pattern) + { + return pattern.IsMatch(str); + } +} From 6180de4bcd75656961a21bc643f9adb3cf9ae63f Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 9 Jan 2024 21:34:26 +0100 Subject: [PATCH 078/139] Add TreasureTable.txt to parsed stats files --- LSLib/LS/Mods/ModResources.cs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 2d5ecf01..d491591a 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -110,6 +110,12 @@ private void DiscoverModStats(ModInfo mod) { mod.Stats.Add(statFile); } + + var treasurePath = Path.Join(mod.PublicPath, @"Stats/Generated/TreasureTable.txt"); + if (FS.FileExists(treasurePath)) + { + mod.Stats.Add(treasurePath); + } } private void DiscoverModStatsStructure(ModInfo mod) From 4fd318c8bb783c87d9c47b5fa4a737c8a5aa7085 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 9 Jan 2024 21:35:32 +0100 Subject: [PATCH 079/139] Improve stat checker diagnostics --- LSLib/LS/Stats/Parser/PropertyDefinitions.cs | 5 +- LSLib/LS/Stats/Parser/Stat.lex | 3 +- LSLib/LS/Stats/Parser/Stat.yy | 45 +- LSLib/LS/Stats/Parser/StatLua.lex | 1 + LSLib/LS/Stats/Parser/StatLua.yy | 3 + LSLib/LS/Stats/Parser/StatNodes.cs | 9 +- LSLib/LS/Stats/Parser/StatParser.cs | 84 +-- LSLib/LS/Stats/Parser/StatProperty.lex | 3 - LSLib/LS/Stats/Parser/StatProperty.yy | 33 +- LSLib/LS/Stats/Parser/StatPropertyParser.cs | 163 ++---- LSLib/LS/Stats/StatDefinitions.cs | 37 +- LSLib/LS/Stats/StatFileParser.cs | 181 +++--- LSLib/LS/Stats/StatValueParsers.cs | 565 ++++++++++--------- LSLib/LSLib.csproj | 1 + LSTools.sln | 50 ++ StatParser/StatChecker.cs | 10 +- 16 files changed, 588 insertions(+), 605 deletions(-) diff --git a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs index 913e742f..1f1cc488 100644 --- a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs +++ b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs @@ -1,4 +1,5 @@ -using System; +using LSLib.LS.Story.GoalParser; +using System; using System.Collections.Generic; namespace LSLib.LS.Stats.Properties; @@ -27,6 +28,8 @@ public class PropertyAction { public string Action; public List Arguments; + public int StartPos; + public int EndPos; } public enum ConditionOperator diff --git a/LSLib/LS/Stats/Parser/Stat.lex b/LSLib/LS/Stats/Parser/Stat.lex index ac117c1f..071561fb 100644 --- a/LSLib/LS/Stats/Parser/Stat.lex +++ b/LSLib/LS/Stats/Parser/Stat.lex @@ -82,8 +82,9 @@ data([ ]+)\"([^\"]+)\"([ ]+)\"(.*)\" { yylval = MakeDataProperty(tokLin, tokCol, "EndLevel" return (int)StatTokens.END_LEVEL; "MinLevel" return (int)StatTokens.MIN_LEVEL; "MaxLevel" return (int)StatTokens.MAX_LEVEL; +"CanMerge" return (int)StatTokens.CAN_MERGE; "IgnoreLevelDiff" return (int)StatTokens.IGNORE_LEVEL_DIFF; -"UseTreasureGroups" return (int)StatTokens.USE_TREASURE_GROUPS; +"UseTreasureGroupCounters" return (int)StatTokens.USE_TREASURE_GROUPS; /* Special characters */ "," return (int)','; diff --git a/LSLib/LS/Stats/Parser/Stat.yy b/LSLib/LS/Stats/Parser/Stat.yy index 902a35d7..93768282 100644 --- a/LSLib/LS/Stats/Parser/Stat.yy +++ b/LSLib/LS/Stats/Parser/Stat.yy @@ -76,6 +76,7 @@ %token END_LEVEL %token MIN_LEVEL %token MAX_LEVEL +%token CAN_MERGE %token IGNORE_LEVEL_DIFF %token USE_TREASURE_GROUPS @@ -245,6 +246,7 @@ EntryProperty : EntryType | TreasureSubtable | TreasureTableMinLevel | TreasureTableMaxLevel + | TreasureTableCanMerge | TreasureTableIgnoreLevelDiff | TreasureTableUseTreasureGroups ; @@ -297,7 +299,7 @@ DeltaModifierBoost : NEW_BOOST STRING ',' INTEGER {"Multiplier", Unwrap($4)}, }); }; -EquipmentGroup : ADD_EQUIPMENTGROUP EquipmentEntries { $$ = MakeElement("EquipmentGroups", $2); }; +EquipmentGroup : ADD_EQUIPMENTGROUP EquipmentEntries { $$ = MakeElement("EquipmentGroups", $2, @2); }; EquipmentEntries : /* empty */ { $$ = MakeCollection(); } | EquipmentEntries EquipmentEntry { $$ = AddElement($1, $2); } @@ -305,28 +307,28 @@ EquipmentEntries : /* empty */ { $$ = MakeCollection(); } EquipmentEntry : ADD_EQUIPMENT_ENTRY STRING { $$ = $2; }; -ItemComboPropertyEntry: NEW_ITEMCOMBOPROPERTYENTRY EntrySubProperties { $$ = MakeElement("Entries", $2); }; +ItemComboPropertyEntry: NEW_ITEMCOMBOPROPERTYENTRY EntrySubProperties { $$ = MakeElement("Entries", $2, @2); }; EntrySubProperties : /* empty */ { $$ = MakeDeclaration(); } | EntrySubProperties EntryData { $$ = AddProperty($1, $2); } ; -SkillSetSkill : ADD SKILL STRING { $$ = MakeElement("NameGroups", $3); }; +SkillSetSkill : ADD SKILL STRING { $$ = MakeElement("NameGroups", $3, @3); }; TreasureGroupWeaponCounter : WEAPON_COUNTER STRING ',' STRING - { $$ = MakeDeclaration(new [] { + { $$ = MakeDeclaration(@$, new [] { MakeProperty(@2, "WeaponTreasureGroup", $2), MakeProperty(@4, "WeaponDefaultCounter", $4) }); }; TreasureGroupSkillbookCounter : SKILLBOOK_COUNTER STRING ',' STRING - { $$ = MakeDeclaration(new [] { + { $$ = MakeDeclaration(@$, new [] { MakeProperty(@2, "SkillbookTreasureGroup", $2), MakeProperty(@4, "SkillbookDefaultCounter", $4) }); }; TreasureGroupArmorCounter : ARMOR_COUNTER STRING ',' STRING - { $$ = MakeDeclaration(new [] { + { $$ = MakeDeclaration(@$, new [] { MakeProperty(@2, "ArmorTreasureGroup", $2), MakeProperty(@4, "ArmorDefaultCounter", $4) }); }; @@ -334,18 +336,21 @@ TreasureGroupArmorCounter : ARMOR_COUNTER STRING ',' STRING TreasureSubtable : NEW_SUBTABLE STRING TreasureTableObjects { $$ = MakeElement("Subtables", AddProperty( - MakeDeclaration(new [] { MakeProperty(@2, "DropCount", $2) }), + MakeDeclaration(@$, new [] { MakeProperty(@2, "DropCount", $2) }), $3 - )); + ), @$); }; TreasureTableMinLevel : MIN_LEVEL STRING { $$ = MakeProperty(@$, "MinLevel", $2); }; TreasureTableMaxLevel : MAX_LEVEL STRING { $$ = MakeProperty(@$, "MaxLevel", $2); }; +TreasureTableCanMerge : CAN_MERGE STRING { $$ = MakeProperty(@$, "CanMerge", $2); } + | CAN_MERGE INTEGER { $$ = MakeProperty(@$, "CanMerge", $2); }; + TreasureTableIgnoreLevelDiff : IGNORE_LEVEL_DIFF INTEGER { $$ = MakeProperty(@$, "IgnoreLevelDiff", $2); }; -TreasureTableUseTreasureGroups : USE_TREASURE_GROUPS INTEGER { $$ = MakeProperty(@$, "UseTreasureGroups", $2); }; +TreasureTableUseTreasureGroups : USE_TREASURE_GROUPS INTEGER { $$ = MakeProperty(@$, "UseTreasureGroupCounters", $2); }; TreasureTableObjects : /* empty */ { $$ = MakeDeclaration(); } | TreasureTableObjects TreasureTableEntry { $$ = AddProperty($1, $2); } @@ -361,15 +366,15 @@ TreasureTableObjectStartLevel : START_LEVEL STRING { $$ = MakeProperty(@$, "Star TreasureTableObjectEndLevel : END_LEVEL STRING { $$ = MakeProperty(@$, "EndLevel", $2); }; TreasureTableObject : OBJECT_CATEGORY STRING ',' INTEGER ',' INTEGER ',' INTEGER ',' INTEGER ',' INTEGER ',' INTEGER ',' INTEGER ',' INTEGER - { $$ = MakeElement("Objects", MakeDeclaration(new [] { - MakeProperty("ObjectCategory", $2), - MakeProperty("Frequency", $4), - MakeProperty("Common", $6), - MakeProperty("Uncommon", $8), - MakeProperty("Rare", $10), - MakeProperty("Epic", $12), - MakeProperty("Legendary", $14), - MakeProperty("Divine", $16), - MakeProperty("Unique", $18), - })); + { $$ = MakeElement("Objects", MakeDeclaration(@$, new [] { + MakeProperty(@2, "Drop", $2), + MakeProperty(@4, "Frequency", $4), + MakeProperty(@6, "Common", $6), + MakeProperty(@8, "Uncommon", $8), + MakeProperty(@10, "Rare", $10), + MakeProperty(@12, "Epic", $12), + MakeProperty(@14, "Legendary", $14), + MakeProperty(@16, "Divine", $16), + MakeProperty(@18, "Unique", $18), + }), @$); }; \ No newline at end of file diff --git a/LSLib/LS/Stats/Parser/StatLua.lex b/LSLib/LS/Stats/Parser/StatLua.lex index c0be153d..0d9d4d3a 100644 --- a/LSLib/LS/Stats/Parser/StatLua.lex +++ b/LSLib/LS/Stats/Parser/StatLua.lex @@ -59,6 +59,7 @@ nonseparator [^,;:()\[\]!+*/^&%~|><=.# ] '[^']*' { yylval = yytext; return (int)StatLuaTokens.LITERAL_STRING; } {letter}({namechar})+ { yylval = yytext; return (int)StatLuaTokens.NAME; } {digit}({digit})* { yylval = yytext; return (int)StatLuaTokens.INTEGER; } +{digit}({digit})*\.{digit}({digit})* { yylval = yytext; return (int)StatLuaTokens.FLOAT; } {digit}{digit}*d{digit}{digit}* { yylval = yytext; return (int)StatLuaTokens.DICE_ROLL; } . return ((int)StatLuaTokens.BAD); diff --git a/LSLib/LS/Stats/Parser/StatLua.yy b/LSLib/LS/Stats/Parser/StatLua.yy index ce15f3c8..cea2f874 100644 --- a/LSLib/LS/Stats/Parser/StatLua.yy +++ b/LSLib/LS/Stats/Parser/StatLua.yy @@ -12,6 +12,8 @@ /* Integer literal */ %token INTEGER +/* Floating point literal */ +%token FLOAT /* Text-like (unquoted) literal */ %token NAME /* eg. 1d10 */ @@ -53,6 +55,7 @@ LExp : LExpNoUnOp LExpNoUnOp : LUA_RESERVED_VAL | INTEGER + | FLOAT | LITERAL_STRING | DICE_ROLL | LPrefixExp diff --git a/LSLib/LS/Stats/Parser/StatNodes.cs b/LSLib/LS/Stats/Parser/StatNodes.cs index d8e75e9e..5b342942 100644 --- a/LSLib/LS/Stats/Parser/StatNodes.cs +++ b/LSLib/LS/Stats/Parser/StatNodes.cs @@ -10,9 +10,8 @@ namespace LSLib.LS.Stats.StatParser; public class StatDeclaration { public CodeLocation Location; - public Dictionary Properties = new Dictionary(); - public Dictionary PropertyLocations = new Dictionary(); - public bool WasInstantiated = false; + public Dictionary Properties = []; + public bool WasValidated = false; } /// @@ -20,9 +19,10 @@ public class StatDeclaration /// public class StatProperty { - public CodeLocation Location; public String Key; public object Value; + public CodeLocation Location; + public CodeLocation ValueLocation; } /// @@ -32,4 +32,5 @@ public class StatElement { public String Collection; public object Value; + public CodeLocation Location; } diff --git a/LSLib/LS/Stats/Parser/StatParser.cs b/LSLib/LS/Stats/Parser/StatParser.cs index b6fec8c7..87306e19 100644 --- a/LSLib/LS/Stats/Parser/StatParser.cs +++ b/LSLib/LS/Stats/Parser/StatParser.cs @@ -45,7 +45,8 @@ protected StatProperty MakeDataProperty(int startLine, int startCol, int endLine { Key = matches.Groups[1].Value, Value = matches.Groups[2].Value, - Location = new CodeLocation(null, startLine, startCol, endLine, endCol) + Location = new CodeLocation(null, startLine, startCol, endLine, endCol), + ValueLocation = new CodeLocation(null, startLine, startCol + matches.Groups[2].Index, endLine, startCol + matches.Groups[2].Index + matches.Groups[2].Value.Length) }; } } @@ -127,39 +128,31 @@ private StatDeclaration MergeItemCombo(object comboNode, object resultNode) private StatDeclaration AddProperty(object declaration, object property) { var decl = (StatDeclaration)declaration; - if (property is StatProperty) + if (property is StatProperty prop) { - var prop = (StatProperty)property; - decl.Properties[prop.Key] = prop.Value; - if (prop.Location != null) - { - decl.PropertyLocations[prop.Key] = prop.Location; - } + decl.Properties[prop.Key] = prop; } - else if (property is StatElement) + else if (property is StatElement ele) { - var ele = (StatElement)property; - object cont; - if (!decl.Properties.TryGetValue(ele.Collection, out cont)) + if (!decl.Properties.TryGetValue(ele.Collection, out prop)) { - cont = new List(); - decl.Properties[ele.Collection] = cont; + prop = new StatProperty + { + Key = ele.Collection, + Value = new StatCollection(), + Location = ele.Location + }; + decl.Properties[ele.Collection] = prop; } - (cont as List).Add(ele.Value); + (prop.Value as StatCollection).Add(ele.Value); } - else if (property is StatDeclaration) + else if (property is StatDeclaration otherDecl) { - var otherDecl = (StatDeclaration)property; foreach (var kv in otherDecl.Properties) { decl.Properties[kv.Key] = kv.Value; } - - foreach (var kv in otherDecl.PropertyLocations) - { - decl.PropertyLocations[kv.Key] = kv.Value; - } } else { @@ -210,42 +203,21 @@ private StatDeclaration AddProperty(object declaration, object property) private StatElement MakeElement(String key, object value) { - if (value is string) - { - return new StatElement() - { - Collection = key, - Value = (string)value - }; - } - else if (value is StatCollection) - { - return new StatElement() - { - Collection = key, - Value = (StatCollection)value - }; - } - else if (value is Dictionary) + return new StatElement() { - return new StatElement() - { - Collection = key, - Value = (Dictionary)value - }; - } - else if (value is StatDeclaration) - { - return new StatElement() - { - Collection = key, - Value = ((StatDeclaration)value).Properties - }; - } - else + Collection = key, + Value = value + }; + } + + private StatElement MakeElement(String key, object value, CodeLocation location) + { + return new StatElement() { - throw new Exception("Unknown stat element type"); - } + Location = location, + Collection = key, + Value = value + }; } private StatCollection MakeCollection() => new List(); diff --git a/LSLib/LS/Stats/Parser/StatProperty.lex b/LSLib/LS/Stats/Parser/StatProperty.lex index 40739323..769ba8c5 100644 --- a/LSLib/LS/Stats/Parser/StatProperty.lex +++ b/LSLib/LS/Stats/Parser/StatProperty.lex @@ -14,7 +14,6 @@ nonseparator [^,;:()\[\]! ] /* Special trigger words to determine expression type */ "__TYPE_Properties__" return (int)StatPropertyTokens.EXPR_PROPERTIES; "__TYPE_DescriptionParams__" return (int)StatPropertyTokens.EXPR_DESCRIPTION_PARAMS; -"__TYPE_Requirements__" return (int)StatPropertyTokens.EXPR_REQUIREMENTS; /* Reserved words */ "IF" return (int)StatPropertyTokens.IF; @@ -81,8 +80,6 @@ nonseparator [^,;:()\[\]! ] "." return (int)'.'; [ ] ; -"Tag" { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.REQUIREMENT_TAG; } - {letter}({namechar})+ { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.NAME; } (-)?{digit}({digit})* { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.INTEGER; } {digit}{digit}*d{digit}{digit}* { yylval = yytext; return (int)StatPropertyTokens.DICE_ROLL; } diff --git a/LSLib/LS/Stats/Parser/StatProperty.yy b/LSLib/LS/Stats/Parser/StatProperty.yy index 392b3ea7..3ef5939b 100644 --- a/LSLib/LS/Stats/Parser/StatProperty.yy +++ b/LSLib/LS/Stats/Parser/StatProperty.yy @@ -10,10 +10,6 @@ /* Trigger Lexemes */ %token EXPR_PROPERTIES %token EXPR_DESCRIPTION_PARAMS -%token EXPR_REQUIREMENTS - -/* Requirements */ -%token REQUIREMENT_TAG /* Reserved words */ %token IF @@ -36,33 +32,8 @@ /* A special "trigger word" is prepended to support parsing multiple types from the same lexer/parser */ Root : EXPR_PROPERTIES Properties { $$ = $2; } | EXPR_DESCRIPTION_PARAMS OptionalFunctorArgs { $$ = $2; } - | EXPR_REQUIREMENTS Requirements { $$ = $2; } ; - -/****************************************************************** - * - * REQUIREMENTS PARSING - * - ******************************************************************/ - -Requirements : /* empty */ { $$ = MakeRequirements(); } - | UnaryRequirement { $$ = AddRequirement(MakeRequirements(), $1); } - | Requirements ';' - | Requirements ';' UnaryRequirement { $$ = AddRequirement($1, $3); } - ; - -UnaryRequirement : Requirement - | '!' Requirement { $$ = MakeNotRequirement($2); } - ; - -Requirement : NAME { $$ = MakeRequirement($1); } - | NAME INTEGER { $$ = MakeIntRequirement($1, $2); } - | REQUIREMENT_TAG TEXT { $$ = MakeTagRequirement($1, $2); } - | REQUIREMENT_TAG NAME { $$ = MakeTagRequirement($1, $2); } - ; - - /****************************************************************** * * PROPERTY PARSING @@ -97,9 +68,7 @@ PropCondition : /* empty */ FunctorCall : FunctorName OptionalFunctorArgList { $$ = MakeAction($1, $2); }; -FunctorName : NAME - | REQUIREMENT_TAG - ; +FunctorName : NAME { $$ = $1; MarkActionStart(); }; OptionalFunctorArgList : /* empty */ { $$ = MakeArgumentList(); } | '(' OptionalFunctorArgs ')' { $$ = $2; } diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLib/LS/Stats/Parser/StatPropertyParser.cs index 1ec955ba..d18eb4a6 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLib/LS/Stats/Parser/StatPropertyParser.cs @@ -1,10 +1,9 @@ -using LSLib.Granny; +using LSLib.LS.Story.GoalParser; using QUT.Gppg; using System; using System.Collections.Generic; using System.Linq; using System.Text; -using static LSLib.Granny.Model.CurveData.AnimationCurveData; namespace LSLib.LS.Stats.Properties; @@ -35,23 +34,20 @@ public abstract class StatPropertyScanBase : AbstractScanner functors = null; switch (ExprType) @@ -65,7 +61,7 @@ public void Validate(PropertyAction action) { if (ExprType != ExpressionType.DescriptionParams) { - OnError($"'{action.Action}' is not a valid {ExprType}"); + errors.Add($"'{action.Action}' is not a valid {ExprType}"); } return; @@ -95,29 +91,28 @@ public void Validate(PropertyAction action) if (args.Count > functor.Args.Count) { - OnError($"Too many arguments to '{action.Action}'; {args.Count} passed, expected at most {functor.Args.Count}"); - return; + errors.Add($"Too many arguments to '{action.Action}'; {args.Count} passed, expected at most {functor.Args.Count}"); } if (args.Count < functor.RequiredArgs) { - OnError($"Not enough arguments to '{action.Action}'; {args.Count} passed, expected at least {functor.RequiredArgs}"); - return; + errors.Add($"Not enough arguments to '{action.Action}'; {args.Count} passed, expected at least {functor.RequiredArgs}"); } + var argErrors = new PropertyDiagnosticContainer(); for (var i = 0; i < Math.Min(args.Count, functor.Args.Count); i++) { - bool succeeded = false; - string errorText = null; - var arg = functor.Args[i]; if (arg.Type.Length > 0) { - var parser = ParserFactory.CreateParser(arg.Type, null, null, Definitions); - parser.Parse(Context, args[i], ref succeeded, ref errorText); - if (!succeeded) + var validator = ValidatorFactory.CreateValidator(arg.Type, null, null, Definitions); + // FIXME pass codelocation + validator.Validate(Context, null, args[i], argErrors); + if (!argErrors.Empty) { - OnError($"'{action.Action}' argument {i + 1}: {errorText}"); + argErrors.AddContext(PropertyDiagnosticContextType.Argument, $"argument {i + 1} ({arg.Name})"); + argErrors.MergeInto(errors); + argErrors.Clear(); } } } @@ -126,26 +121,28 @@ public void Validate(PropertyAction action) public partial class StatPropertyParser { - private IStatValueParser RequirementParser; - private StatEnumeration RequirementsWithArgument; - private DiagnosticContext Context; - private int LiteralStart; - private StatActionValidator ActionValidator; - private byte[] Source; - - public delegate void ErrorReportingDelegate(string message); - public event ErrorReportingDelegate OnError; + private readonly DiagnosticContext Context; + private readonly StatActionValidator ActionValidator; + private readonly byte[] Source; + private readonly PropertyDiagnosticContainer Errors; + private readonly CodeLocation RootLocation; + private readonly StatPropertyScanner StatScanner; + private readonly int TokenOffset; - private StatPropertyScanner StatScanner; + private int LiteralStart; + private int ActionStart; public StatPropertyParser(StatPropertyScanner scnr, StatDefinitionRepository definitions, - DiagnosticContext ctx, StatValueParserFactory parserFactory, byte[] source, ExpressionType type) : base(scnr) + DiagnosticContext ctx, StatValueValidatorFactory validatorFactory, byte[] source, ExpressionType type, + PropertyDiagnosticContainer errors, CodeLocation rootLocation, int tokenOffset) : base(scnr) { Context = ctx; StatScanner = scnr; Source = source; - ActionValidator = new StatActionValidator(definitions, ctx, parserFactory, type); - ActionValidator.OnError += (message) => { OnError(message); }; + ActionValidator = new StatActionValidator(definitions, ctx, validatorFactory, type); + Errors = errors; + RootLocation = rootLocation; + TokenOffset = tokenOffset; } public object GetParsedObject() @@ -153,65 +150,6 @@ public object GetParsedObject() return CurrentSemanticValue; } - private List MakeRequirements() => new List(); - - private List AddRequirement(object requirements, object requirement) - { - var req = requirements as List; - req.Add(requirement as Requirement); - return req; - } - - private Requirement MakeNotRequirement(object requirement) - { - var req = requirement as Requirement; - req.Not = true; - return req; - } - - private Requirement MakeRequirement(object name) - { - Validate(RequirementParser, name as string); - - return new Requirement - { - Not = false, - RequirementName = name as string, - IntParam = 0, - TagParam = "" - }; - } - - private Requirement MakeIntRequirement(object name, object intArg) - { - var reqmtName = name as string; - Validate(RequirementParser, reqmtName); - - if (!RequirementsWithArgument.ValueToIndexMap.ContainsKey(reqmtName)) - { - OnError?.Invoke($"Requirement '{reqmtName}' doesn't need any arguments"); - } - - return new Requirement - { - Not = false, - RequirementName = reqmtName, - IntParam = Int32.Parse(intArg as string), - TagParam = "" - }; - } - - private Requirement MakeTagRequirement(object name, object tag) - { - return new Requirement - { - Not = false, - RequirementName = name as string, - IntParam = 0, - TagParam = tag as string - }; - } - private List MakePropertyList() => new List(); private List SetTextKey(object properties, object textKey) @@ -255,30 +193,35 @@ private List AddArgument(object arguments, object arg) return args; } + private object MarkActionStart() + { + ActionStart = StatScanner.TokenStartPos(); + return null; + } + private PropertyAction MakeAction(object action, object arguments) { + var callErrors = new PropertyDiagnosticContainer(); var act = new PropertyAction { Action = action as string, - Arguments = arguments as List + Arguments = arguments as List, + StartPos = ActionStart, + EndPos = StatScanner.TokenEndPos() }; - ActionValidator.Validate(act); - return act; - } + ActionValidator.Validate(act, callErrors); - private void Validate(IStatValueParser parser, string value) - { - if (parser != null) + CodeLocation location = null; + if (RootLocation != null) { - bool succeeded = false; - string errorText = null; - parser.Parse(Context, value, ref succeeded, ref errorText); - if (!succeeded) - { - errorText = $"'{value}': {errorText}"; - OnError?.Invoke(errorText); - } + location = new CodeLocation(RootLocation.FileName, + RootLocation.StartLine, RootLocation.StartColumn + act.StartPos - TokenOffset, + RootLocation.StartLine, RootLocation.StartColumn + act.EndPos - TokenOffset); } + + callErrors.AddContext(PropertyDiagnosticContextType.Call, act.Action, location); + callErrors.MergeInto(Errors); + return act; } private object InitLiteral() diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs index d0f068c7..f4afccc5 100644 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ b/LSLib/LS/Stats/StatDefinitions.cs @@ -37,12 +37,12 @@ public class StatField public StatEnumeration EnumType; public List ReferenceTypes; - private IStatValueParser parser; + private IStatValueValidator Validator; - public IStatValueParser GetParser(StatValueParserFactory factory, StatDefinitionRepository definitions) + public IStatValueValidator GetValidator(StatValueValidatorFactory factory, StatDefinitionRepository definitions) { - parser ??= factory.CreateParser(this, definitions); - return parser; + Validator ??= factory.CreateValidator(this, definitions); + return Validator; } } @@ -214,6 +214,35 @@ public void LoadDefinitions(Stream stream) AddField(dataType, "Key", "FixedString"); AddField(dataType, "Value", "FixedString"); + var treasureTableType = new StatEntryType("TreasureTable", "Name", null); + Types.Add(treasureTableType.Name, treasureTableType); + AddField(treasureTableType, "Name", "FixedString"); + AddField(treasureTableType, "MinLevel", "ConstantInt"); + AddField(treasureTableType, "MaxLevel", "ConstantInt"); + AddField(treasureTableType, "CanMerge", "ConstantInt"); + AddField(treasureTableType, "IgnoreLevelDiff", "ConstantInt"); + AddField(treasureTableType, "UseTreasureGroupCounters", "ConstantInt"); + AddField(treasureTableType, "Subtables", "TreasureSubtables"); + + var treasureSubtableType = new StatEntryType("TreasureSubtable", null, null); + Types.Add(treasureSubtableType.Name, treasureSubtableType); + AddField(treasureSubtableType, "DropCount", "FixedString"); // FIXME validate + AddField(treasureSubtableType, "StartLevel", "ConstantInt"); + AddField(treasureSubtableType, "EndLevel", "ConstantInt"); + AddField(treasureSubtableType, "Objects", "TreasureSubtableObject"); + + var treasureObjectType = new StatEntryType("TreasureSubtableObject", null, null); + Types.Add(treasureObjectType.Name, treasureObjectType); + AddField(treasureObjectType, "Drop", "TreasureDrop"); // FIXME validate + AddField(treasureObjectType, "Frequency", "ConstantInt"); + AddField(treasureObjectType, "Common", "ConstantInt"); + AddField(treasureObjectType, "Uncommon", "ConstantInt"); + AddField(treasureObjectType, "Rare", "ConstantInt"); + AddField(treasureObjectType, "Epic", "ConstantInt"); + AddField(treasureObjectType, "Legendary", "ConstantInt"); + AddField(treasureObjectType, "Divine", "ConstantInt"); + AddField(treasureObjectType, "Unique", "ConstantInt"); + AddEnumeration("ResurrectType", [ "Living", diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index e299f95a..632f37a8 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -1,7 +1,9 @@ using LSLib.LS.Stats.StatParser; +using LSLib.LS.Story; using LSLib.LS.Story.GoalParser; using System; using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; using System.Data; using System.IO; using System.Linq; @@ -63,9 +65,8 @@ public class StatLoadingError { public string Code; public string Message; - public string Path; - public Int32 Line; - public string StatObjectName; + public CodeLocation Location; + public List Contexts; } public class StatLoadingContext @@ -75,16 +76,17 @@ public class StatLoadingContext public Dictionary> DeclarationsByType = []; public Dictionary> ResolvedDeclarationsByType = []; public Dictionary> GuidResources = []; + public readonly HashSet ObjectCategories = []; - public void LogError(string code, string message, string path = null, int line = 0, string statObjectName = null) + public void LogError(string code, string message, CodeLocation location = null, + List contexts = null) { Errors.Add(new StatLoadingError { Code = code, Message = message, - Path = path, - Line = line, - StatObjectName = statObjectName + Location = location, + Contexts = contexts }); } } @@ -105,10 +107,10 @@ public bool ResolveUsageRef( out StatDeclaration basedOn) { var props = declaration.Properties; - var name = (string)props[type.NameProperty]; - if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out object value)) + var name = (string)props[type.NameProperty].Value; + if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out StatProperty prop)) { - var baseClass = (string)value; + var baseClass = (string)prop.Value; if (declarations.TryGetValue(baseClass, out StatDeclaration baseDeclaration)) { @@ -118,7 +120,7 @@ public bool ResolveUsageRef( else { context.LogError(DiagnosticCode.StatBaseClassNotKnown, $"Stats entry '{name}' references nonexistent base '{baseClass}'", - declaration.Location.FileName, declaration.Location.StartLine, name); + declaration.Location); basedOn = null; return false; } @@ -132,10 +134,11 @@ private void PropagateInheritedProperties(StatDeclaration parent, StatDeclaratio { foreach (var prop in parent.Properties) { - if (!descendant.Properties.ContainsKey(prop.Key)) + if (!descendant.Properties.ContainsKey(prop.Key) + // Only propagate types that are required to determine properties of stats entry subtypes + && (prop.Key == "SpellType" || prop.Key == "StatusType")) { descendant.Properties[prop.Key] = prop.Value; - descendant.PropertyLocations[prop.Key] = parent.PropertyLocations[prop.Key]; } } } @@ -151,6 +154,20 @@ private void PropagateInheritedProperties(List mappings) } } + private void ResolveObjectCategories(StatDeclaration declaration) + { + if (declaration.Properties.TryGetValue("ObjectCategory", out var prop)) + { + foreach (var category in ((string)prop.Value).Split(';')) + { + if (category.Length > 0) + { + context.ObjectCategories.Add(category); + } + } + } + } + public Dictionary ResolveUsageRefs(StatEntryType type, Dictionary declarations) { var mappings = new List(); @@ -158,7 +175,7 @@ public Dictionary ResolveUsageRefs(StatEntryType type, foreach (var declaration in declarations) { - if (declaration.Value.WasInstantiated) continue; + if (declaration.Value.WasValidated) continue; var succeeded = ResolveUsageRef(type, declaration.Value, declarations, out StatDeclaration baseClass); if (succeeded && baseClass != null) @@ -173,6 +190,7 @@ public Dictionary ResolveUsageRefs(StatEntryType type, if (succeeded || AllowMappingErrors) { resolved.Add(declaration.Key, declaration.Value); + ResolveObjectCategories(declaration.Value); } } @@ -186,7 +204,11 @@ class StatLoaderReferenceValidator(StatLoadingContext ctx) : IStatReferenceValid { public bool IsValidReference(string reference, string statType) { - if (ctx.DeclarationsByType.TryGetValue(statType, out var stats)) + if (statType == "ObjectCategory") + { + return ctx.ObjectCategories.Contains(reference); + } + else if (ctx.DeclarationsByType.TryGetValue(statType, out var stats)) { return stats.TryGetValue(reference, out _); } @@ -205,10 +227,15 @@ public bool IsValidGuidResource(string name, string resourceType) } } -public class StatLoader +public interface IPropertyValidator +{ + public void ValidateEntry(StatEntryType type, string declarationName, StatDeclaration declaration, PropertyDiagnosticContainer errors); +} + +public class StatLoader : IPropertyValidator { private readonly StatLoadingContext Context; - private readonly StatValueParserFactory ParserFactory; + private readonly StatValueValidatorFactory ValidatorFactory; private readonly StatLoaderReferenceValidator ReferenceValidator; public readonly DiagnosticContext DiagContext; @@ -216,7 +243,7 @@ public StatLoader(StatLoadingContext ctx) { Context = ctx; ReferenceValidator = new(ctx); - ParserFactory = new(ReferenceValidator); + ValidatorFactory = new(ReferenceValidator, this); DiagContext = new(); } @@ -229,7 +256,7 @@ private List ParseStatStream(string path, Stream stream) if (!parsed) { var location = scanner.LastLocation(); - Context.LogError(DiagnosticCode.StatSyntaxError, $"Syntax error at or near line {location.StartLine}, column {location.StartColumn}", path, location.StartLine); + Context.LogError(DiagnosticCode.StatSyntaxError, $"Syntax error at or near line {location.StartLine}, column {location.StartColumn}", location); } return parsed ? parser.GetDeclarations() : null; @@ -242,21 +269,21 @@ private void AddDeclarations(List declarations) // Fixup type if (!declaration.Properties.ContainsKey("EntityType")) { - Context.LogError(DiagnosticCode.StatEntityTypeUnknown, "Unable to determine type of stat declaration", declaration.Location.FileName, declaration.Location.StartLine); + Context.LogError(DiagnosticCode.StatEntityTypeUnknown, "Unable to determine type of stat declaration", declaration.Location); continue; } - var statType = declaration.Properties["EntityType"].ToString(); + var statType = declaration.Properties["EntityType"].Value.ToString(); if (!Context.Definitions.Types.TryGetValue(statType, out StatEntryType type)) { - Context.LogError(DiagnosticCode.StatEntityTypeUnknown, $"No definition exists for stat type '{statType}'", declaration.Location.FileName, declaration.Location.StartLine); + Context.LogError(DiagnosticCode.StatEntityTypeUnknown, $"No definition exists for stat type '{statType}'", declaration.Location); continue; } if (!declaration.Properties.ContainsKey(type.NameProperty)) { - Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{type.NameProperty}' property", declaration.Location.FileName, declaration.Location.StartLine); + Context.LogError(DiagnosticCode.StatNameMissing, $"Stat entry has no '{type.NameProperty}' property", declaration.Location); continue; } @@ -267,7 +294,7 @@ private void AddDeclarations(List declarations) } // TODO - duplicate declaration check? - var name = declaration.Properties[type.NameProperty].ToString(); + var name = declaration.Properties[type.NameProperty].Value.ToString(); declarationsByType[name] = declaration; } } @@ -291,109 +318,69 @@ public void ResolveUsageRef() } } - private object ParseProperty(StatEntryType type, string propertyName, object value, CodeLocation location, - string declarationName) + public void ValidateProperty(StatEntryType type, StatProperty property, + string declarationName, PropertyDiagnosticContainer errors) { - if (!type.Fields.TryGetValue(propertyName, out StatField field)) + if (!type.Fields.TryGetValue(property.Key, out StatField field)) { - Context.LogError(DiagnosticCode.StatPropertyUnsupported, $"Property '{propertyName}' is not supported on {type.Name} '{declarationName}'", - location?.FileName, location?.StartLine ?? 0, declarationName); - return null; + errors.Add($"Property '{property.Key}' is not supported on type {type.Name}"); + return; } - bool succeeded = false; - string errorText = null; - object parsed; - - if (value is String && propertyName.Length + ((string)value).Length > 4085) + if (property.Value is String && property.Key.Length + ((string)property.Value).Length > 4085) { - parsed = null; - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: Line cannot be longer than 4095 characters", - location?.FileName, location?.StartLine ?? 0, declarationName); + errors.Add("Line cannot be longer than 4095 characters"); } else if (field.Type != "Passthrough") { - var parser = field.GetParser(ParserFactory, Context.Definitions); - parsed = parser.Parse(DiagContext, value, ref succeeded, ref errorText); - } - else - { - parsed = value; - succeeded = true; - } - - if (errorText != null) - { - if (value is string v && v.Length > 500) - { - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: {errorText}", - location?.FileName, location?.StartLine ?? 0, declarationName); - } - else - { - Context.LogError(DiagnosticCode.StatPropertyValueInvalid, $"{type.Name} '{declarationName}' has invalid {propertyName}: '{value}' ({errorText})", - location?.FileName, location?.StartLine ?? 0, declarationName); - } - } - - if (succeeded) - { - return parsed; - } - else - { - return null; + var validator = field.GetValidator(ValidatorFactory, Context.Definitions); + validator.Validate(DiagContext, property.ValueLocation, property.Value, errors); } } - private StatEntry InstantiateEntry(StatEntryType type, string declarationName, StatDeclaration declaration) + public void ValidateEntry(StatEntryType type, string declarationName, StatDeclaration declaration, PropertyDiagnosticContainer entryErrors) { - return InstantiateEntryInternal(type, declarationName, declaration.Location, - declaration.Properties, declaration.PropertyLocations); - } - - private StatEntry InstantiateEntryInternal(StatEntryType type, string declarationName, - CodeLocation location, Dictionary properties, Dictionary propertyLocations) - { - var entity = new StatEntry - { - Name = declarationName, - Type = type, - BasedOn = null, // FIXME - Location = location, - Properties = [], - PropertyLocations = propertyLocations - }; - - foreach (var property in properties) + var errors = new PropertyDiagnosticContainer(); + foreach (var property in declaration.Properties) { if (property.Key == "EntityType") { continue; } - propertyLocations.TryGetValue(property.Key, out CodeLocation propLocation); - var parsed = ParseProperty(type, property.Key, property.Value, propLocation, declarationName); - if (parsed != null) + var lastPropertySpan = DiagContext.PropertyValueSpan; + DiagContext.PropertyValueSpan = property.Value.ValueLocation; + ValidateProperty(type, property.Value, declarationName, errors); + DiagContext.PropertyValueSpan = lastPropertySpan; + + if (!errors.Empty) { - entity.Properties.Add(property.Key, parsed); + errors.AddContext(PropertyDiagnosticContextType.Property, property.Key, property.Value.ValueLocation ?? property.Value.Location); + errors.MergeInto(entryErrors); + errors.Clear(); } } - - return entity; } - public void InstantiateEntries() + public void ValidateEntries() { + var errors = new PropertyDiagnosticContainer(); foreach (var type in Context.ResolvedDeclarationsByType) { var typeDefn = Context.Definitions.Types[type.Key]; foreach (var declaration in type.Value) { - if (!declaration.Value.WasInstantiated) + if (!declaration.Value.WasValidated) { - InstantiateEntry(typeDefn, declaration.Key, declaration.Value); - declaration.Value.WasInstantiated = true; + ValidateEntry(typeDefn, declaration.Key, declaration.Value, errors); + declaration.Value.WasValidated = true; + + if (!errors.Empty) + { + errors.AddContext(PropertyDiagnosticContextType.Entry, declaration.Key, declaration.Value.Location); + errors.MergeInto(Context, declaration.Key); + errors.Clear(); + } } } } diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs index e0d78872..2760a6f4 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -1,4 +1,7 @@ using LSLib.LS.Stats.Properties; +using LSLib.LS.Stats.StatParser; +using LSLib.LS.Story; +using LSLib.LS.Story.GoalParser; using System; using System.Collections.Generic; using System.Globalization; @@ -11,20 +14,112 @@ namespace LSLib.LS.Stats; public class DiagnosticContext { public bool IgnoreMissingReferences = false; + public StatDeclaration CurrentDeclaration; + public CodeLocation PropertyValueSpan; } -public interface IStatValueParser +public enum PropertyDiagnosticContextType { - object Parse(DiagnosticContext ctx, object value, ref bool succeeded, ref string errorText); + Argument, + Call, + Property, + Entry } -abstract public class StatStringParser : IStatValueParser +public struct PropertyDiagnosticContext { - abstract public object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText); + public PropertyDiagnosticContextType Type; + public string Context; + public CodeLocation Location; +} + +public class PropertyDiagnostic +{ + public string Message; + public CodeLocation Location; + public List Contexts; +} + +public class PropertyDiagnosticContainer +{ + public List Messages; + + public bool Empty + { + get { return Messages == null || Messages.Count == 0; } + } + + public void AddContext(PropertyDiagnosticContextType type, string name, CodeLocation location = null) + { + if (Empty) return; + + var context = new PropertyDiagnosticContext + { + Type = type, + Context = name, + Location = location + }; + + foreach (var msg in Messages) + { + msg.Contexts ??= []; + msg.Contexts.Add(context); + } + } + + public void Add(string message, CodeLocation location = null) + { + Messages ??= []; + Messages.Add(new PropertyDiagnostic + { + Message = message, + Location = location + }); + } + + public void MergeInto(PropertyDiagnosticContainer container) + { + if (Empty) return; + + container.Messages ??= []; + container.Messages.AddRange(Messages); + } + + public void MergeInto(StatLoadingContext context, string declarationName) + { + if (Empty) return; + + foreach (var message in Messages) + { + var location = message.Location; + foreach (var ctx in message.Contexts) + { + location ??= ctx.Location; + } + + context.LogError(DiagnosticCode.StatPropertyValueInvalid, message.Message, + location, message.Contexts); + } + } + + public void Clear() + { + Messages?.Clear(); + } +} + +public interface IStatValueValidator +{ + void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors); +} + +abstract public class StatStringValidator : IStatValueValidator +{ + abstract public void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors); - public object Parse(DiagnosticContext ctx, object value, ref bool succeeded, ref string errorText) + public void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors) { - return Parse(ctx, (string)value, ref succeeded, ref errorText); + Validate(ctx, (string)value, errors); } } @@ -39,219 +134,130 @@ public interface IStatReferenceValidator bool IsValidGuidResource(string name, string resourceType); } -public class BooleanParser : StatStringParser +public class BooleanValidator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value == "true" || value == "false" || value == "") + if (value != "true" && value != "false" && value != "") { - succeeded = true; - return (value == "true"); - } - else - { - succeeded = false; - errorText = "expected boolean value 'true' or 'false'"; - return null; + errors.Add("expected boolean value 'true' or 'false'"); } } } -public class Int32Parser : StatStringParser +public class Int32Validator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value == "") - { - succeeded = true; - return 0; - } - else if (Int32.TryParse(value, out int intval)) + if (value != "" && !Int32.TryParse(value, out int intval)) { - succeeded = true; - return intval; - } - else - { - succeeded = false; - errorText = "expected an integer value"; - return null; + errors.Add("expected an integer value"); } } } -public class FloatParser : StatStringParser +public class FloatValidator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value == "") - { - succeeded = true; - return 0.0f; - } - else if (Single.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) + if (value != "" && !Single.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) { - succeeded = true; - return floatval; - } - else - { - succeeded = false; - errorText = "expected a float value"; - return null; + errors.Add("expected a float value"); } } } -public class EnumParser(StatEnumeration enumeration) : StatStringParser +public class EnumValidator(StatEnumeration enumeration) : StatStringValidator { private readonly StatEnumeration Enumeration = enumeration ?? throw new ArgumentNullException(); - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value == null || value == "") + if (value != "" && !Enumeration.ValueToIndexMap.ContainsKey(value)) { - value = Enumeration.Values[0]; - } - - if (Enumeration.ValueToIndexMap.ContainsKey(value)) - { - succeeded = true; - return value; - } - else - { - succeeded = false; - if (Enumeration.Values.Count > 4) + if (Enumeration.Values.Count > 20) { - errorText = "expected one of: " + String.Join(", ", Enumeration.Values.Take(4)) + ", ..."; + errors.Add("expected one of: " + String.Join(", ", Enumeration.Values.Take(20)) + ", ..."); } else { - errorText = "expected one of: " + String.Join(", ", Enumeration.Values); + errors.Add("expected one of: " + String.Join(", ", Enumeration.Values)); } - return null; } } } -public class MultiValueEnumParser(StatEnumeration enumeration) : StatStringParser +public class MultiValueEnumValidator(StatEnumeration enumeration) : StatStringValidator { - private readonly EnumParser Parser = new(enumeration); + private readonly EnumValidator Validator = new(enumeration); - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - succeeded = true; - - if (value.Length == 0) - { - return true; - } + if (value.Length == 0) return; foreach (var item in value.Split([';'])) { - Parser.Parse(ctx, item.Trim([' ']), ref succeeded, ref errorText); - if (!succeeded) - { - errorText = $"Value '{item}' not supported; {errorText}"; - return null; - } + Validator.Validate(ctx, item.Trim([' ']), errors); } - - return value; } } -public class StringParser : StatStringParser +public class StringValidator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value.Length > 2048) + if (value.Length > 2047) { - errorText = "Value cannot be longer than 2048 characters"; - succeeded = false; - return null; - } - else - { - errorText = null; - succeeded = true; - return value; + // FixedString constructors crash over 2047 chars as there is no pool for that string size + errors.Add("Value cannot be longer than 2047 characters"); } } } -public class UUIDParser : StatStringParser +public class UUIDValidator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value == "") - { - succeeded = true; - return Guid.Empty; - } - else if (Guid.TryParseExact(value, "D", out Guid parsed)) + if (value != "" && !Guid.TryParseExact(value, "D", out Guid parsed)) { - succeeded = true; - return parsed; - } - else - { - errorText = $"'{value}' is not a valid UUID"; - succeeded = false; - return null; + errors.Add($"'{value}' is not a valid UUID"); } } } -public class StatReferenceParser(IStatReferenceValidator validator, List constraints) : StatStringParser +public class StatReferenceValidator(IStatReferenceValidator validator, List constraints) : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (ctx.IgnoreMissingReferences || value == "") - { - succeeded = true; - return value; - } + if (ctx.IgnoreMissingReferences || value == "") return; foreach (var constraint in constraints) { if (validator.IsValidReference(value, constraint.StatType)) { - succeeded = true; - return value; + return; } } var refTypes = String.Join("/", constraints.Select(c => c.StatType)); - errorText = $"'{value}' is not a valid {refTypes} reference"; - succeeded = false; - return null; + errors.Add($"'{value}' is not a valid {refTypes} reference"); } } -public class MultiValueStatReferenceParser(IStatReferenceValidator validator, List constraints) : StatStringParser +public class MultiValueStatReferenceValidator(IStatReferenceValidator validator, List constraints) : StatStringValidator { - private readonly StatReferenceParser Parser = new(validator, constraints); + private readonly StatReferenceValidator Validator = new(validator, constraints); - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - succeeded = true; - foreach (var item in value.Split([';'])) { var trimmed = item.Trim([' ']); if (trimmed.Length > 0) { - Parser.Parse(ctx, trimmed, ref succeeded, ref errorText); - if (!succeeded) - { - return null; - } + Validator.Validate(ctx, trimmed, errors); } } - - return value; } } @@ -262,71 +268,60 @@ public enum ExpressionType DescriptionParams }; -public class ExpressionParser(String validatorType, StatDefinitionRepository definitions, - StatValueParserFactory parserFactory, ExpressionType type) : StatStringParser +public class ExpressionValidator(String validatorType, StatDefinitionRepository definitions, + StatValueValidatorFactory validatorFactory, ExpressionType type) : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { + var typeLen = 10 + validatorType.Length; var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + validatorType + "__ " + value.TrimEnd()); using var buf = new MemoryStream(valueBytes); - List errorTexts = []; var scanner = new StatPropertyScanner(); scanner.SetSource(buf); - var parser = new StatPropertyParser(scanner, definitions, ctx, parserFactory, valueBytes, type); - parser.OnError += (string message) => errorTexts.Add(message); - succeeded = parser.Parse(); + var parser = new StatPropertyParser(scanner, definitions, ctx, validatorFactory, valueBytes, type, errors, ctx.PropertyValueSpan, typeLen); + var succeeded = parser.Parse(); if (!succeeded) { + // FIXME pass location to error container var location = scanner.LastLocation(); - var column = location.StartColumn - 10 - validatorType.Length + 1; - errorText = $"Syntax error at or near character {column}"; - return null; - } - else if (errorTexts.Count > 0) - { - succeeded = false; - errorText = String.Join("; ", errorTexts); - return null; - } - else - { - succeeded = true; - return parser.GetParsedObject(); + var column = location.StartColumn - typeLen; + errors.Add($"Syntax error at or near character {column}"); } } } -public class LuaExpressionParser : StatStringParser +public class LuaExpressionValidator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - value = "BHAALS_BOON_SLAYER.Duration-1"; var valueBytes = Encoding.UTF8.GetBytes(value); using var buf = new MemoryStream(valueBytes); var scanner = new Lua.StatLuaScanner(); scanner.SetSource(buf); var parser = new Lua.StatLuaParser(scanner); - succeeded = parser.Parse(); + var succeeded = parser.Parse(); if (!succeeded) { + // FIXME pass location to error container var location = scanner.LastLocation(); - errorText = $"Syntax error at or near character {location.StartColumn}"; - return null; - } - else - { - succeeded = true; - return null; + if (location.StartColumn != -1) + { + errors.Add($"Syntax error at or near character {location.StartColumn}"); + } + else + { + errors.Add($"Syntax error"); + } } } } -public class UseCostsParser(IStatReferenceValidator validator) : StatStringParser +public class UseCostsValidator(IStatReferenceValidator validator) : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value.Length == 0) return value; + if (value.Length == 0) return; foreach (var resource in value.Split(';')) { @@ -336,14 +331,13 @@ public override object Parse(DiagnosticContext ctx, string value, ref bool succe var parts = res.Split(':'); if (parts.Length < 2 || parts.Length > 4) { - errorText = $"Malformed use costs"; - return null; + errors.Add($"Malformed use costs"); + return; } if (!ctx.IgnoreMissingReferences && !validator.IsValidGuidResource(parts[0], "ActionResource") && !validator.IsValidGuidResource(parts[0], "ActionResourceGroup")) { - errorText = $"Nonexistent action resource or action resource group: {parts[0]}"; - return null; + errors.Add($"Nonexistent action resource or action resource group: {parts[0]}"); } var distanceExpr = parts[1].Split('*'); @@ -351,93 +345,114 @@ public override object Parse(DiagnosticContext ctx, string value, ref bool succe { if (distanceExpr.Length > 1 && !Single.TryParse(distanceExpr[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) { - errorText = $"Malformed distance multiplier: {distanceExpr[1]}"; - return null; + errors.Add($"Malformed distance multiplier: {distanceExpr[1]}"); + continue; } } else if (!Single.TryParse(parts[1], NumberStyles.Float, CultureInfo.InvariantCulture, out float floatval)) { - errorText = $"Malformed resource amount: {parts[1]}"; - return null; + errors.Add($"Malformed resource amount: {parts[1]}"); + continue; } if (parts.Length == 3 && !Int32.TryParse(parts[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out int intval)) { - errorText = $"Malformed level: {parts[2]}"; - return null; + errors.Add($"Malformed level: {parts[2]}"); + continue; } if (parts.Length == 4 && !Int32.TryParse(parts[3], NumberStyles.Integer, CultureInfo.InvariantCulture, out intval)) { - errorText = $"Malformed level: {parts[3]}"; - return null; + errors.Add($"Malformed level: {parts[3]}"); + continue; } } - - succeeded = true; - return value; } } -public class DiceRollParser : StatStringParser +public class DiceRollValidator : StatStringValidator { - public override object Parse(DiagnosticContext ctx, string value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - if (value.Length == 0) return value; + if (value.Length == 0) return; var parts = value.Split('d'); if (parts.Length != 2 || !Int32.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out int numDice) || !Int32.TryParse(parts[1], NumberStyles.Integer, CultureInfo.InvariantCulture, out int dieSize)) { - errorText = $"Malformed dice roll"; - return null; + errors.Add($"Malformed dice roll"); + return; } if (dieSize != 4 && dieSize != 6 && dieSize != 8 && dieSize != 10 && dieSize != 12 && dieSize != 20 && dieSize != 100) { - errorText = $"Invalid die size: {dieSize}"; - return null; + errors.Add($"Invalid die size: {dieSize}"); + return; } - - succeeded = true; - return value; } } -public class AnyParser(IEnumerable parsers, string message = null) : IStatValueParser +public class TreasureDropValidator(IStatReferenceValidator validator) : StatStringValidator { - private readonly List Parsers = parsers.ToList(); - - public object Parse(DiagnosticContext ctx, object value, ref bool succeeded, ref string errorText) + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { - List errors = []; - foreach (var parser in Parsers) + if (value.Length > 2 && value[0] == 'I' && value[1] == '_') { - succeeded = false; - string error = null; - var result = parser.Parse(ctx, value, ref succeeded, ref error); - if (succeeded) + var item = value.Substring(2); + if (!validator.IsValidReference(item, "Object") + && !validator.IsValidReference(item, "Armor") + && !validator.IsValidReference(item, "Weapon")) { - return result; + errors.Add($"Nonexistent object, armor or weapon: {item}"); } - else + } + else if (value.Length > 2 && value[0] == 'T' && value[1] == '_') + { + var treasureTable = value.Substring(2); + if (!validator.IsValidReference(treasureTable, "TreasureTable")) { - errors.Add(error); + errors.Add($"Nonexistent treasure table: {treasureTable}"); } } + else if (!validator.IsValidReference(value, "ObjectCategory")) + { + errors.Add($"Nonexistent object category: {value}"); + } + } +} - if (message != null && message.Length > 0) +public class ObjectListValidator(IPropertyValidator PropertyValidator, StatEntryType ObjectType) : IStatValueValidator +{ + public void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors) + { + var objs = (IEnumerable)value; + foreach (var subobject in objs) { - errorText = $"'{value}': {message}"; + // FIXME - pass declaration name from ctx + PropertyValidator.ValidateEntry(ObjectType, "", (StatDeclaration)subobject, errors); } - else + } +} + +public class AnyParser(IEnumerable validators, string message = null) : IStatValueValidator +{ + private readonly List Validators = validators.ToList(); + + public void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors) + { + foreach (var validator in Validators) { - errorText = String.Join("; ", errors); + errors.Messages?.Clear(); + validator.Validate(ctx, location, value, errors); + if (errors.Messages == null || errors.Messages.Count == 0) return; } - return null; + if (message != null) + { + errors.Add(message); + } } } @@ -447,14 +462,14 @@ public class AnyType public string Message; } -public class StatValueParserFactory(IStatReferenceValidator referenceValidator) +public class StatValueValidatorFactory(IStatReferenceValidator ReferenceValidator, IPropertyValidator PropertyValidator) { - public IStatValueParser CreateReferenceParser(List constraints) + public IStatValueValidator CreateReferenceValidator(List constraints) { - return new StatReferenceParser(referenceValidator, constraints); + return new StatReferenceValidator(ReferenceValidator, constraints); } - public IStatValueParser CreateParser(StatField field, StatDefinitionRepository definitions) + public IStatValueValidator CreateValidator(StatField field, StatDefinitionRepository definitions) { switch (field.Name) { @@ -462,38 +477,38 @@ public IStatValueParser CreateParser(StatField field, StatDefinitionRepository d case "DefaultBoosts": case "BoostsOnEquipMainHand": case "BoostsOnEquipOffHand": - return new ExpressionParser("Properties", definitions, this, ExpressionType.Boost); + return new ExpressionValidator("Properties", definitions, this, ExpressionType.Boost); case "TooltipDamage": case "TooltipDamageList": case "TooltipStatusApply": case "TooltipConditionalDamage": - return new ExpressionParser("Properties", definitions, this, ExpressionType.DescriptionParams); + return new ExpressionValidator("Properties", definitions, this, ExpressionType.DescriptionParams); case "DescriptionParams": case "ExtraDescriptionParams": case "ShortDescriptionParams": case "TooltipUpcastDescriptionParams": - return new ExpressionParser("DescriptionParams", definitions, this, ExpressionType.DescriptionParams); + return new ExpressionValidator("DescriptionParams", definitions, this, ExpressionType.DescriptionParams); case "ConcentrationSpellID": case "CombatAIOverrideSpell": case "SpellContainerID": case "FollowUpOriginalSpell": case "RootSpellID": - return new StatReferenceParser(referenceValidator, + return new StatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "SpellData" } ]); case "ContainerSpells": - return new MultiValueStatReferenceParser(referenceValidator, + return new MultiValueStatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "SpellData" } ]); case "InterruptPrototype": - return new StatReferenceParser(referenceValidator, + return new StatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "InterruptData" } ]); @@ -502,14 +517,14 @@ public IStatValueParser CreateParser(StatField field, StatDefinitionRepository d case "PassivesOnEquip": case "PassivesMainHand": case "PassivesOffHand": - return new MultiValueStatReferenceParser(referenceValidator, + return new MultiValueStatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "PassiveData" } ]); case "StatusOnEquip": case "StatusInInventory": - return new MultiValueStatReferenceParser(referenceValidator, + return new MultiValueStatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "StatusData" } ]); @@ -521,12 +536,12 @@ public IStatValueParser CreateParser(StatField field, StatDefinitionRepository d case "TooltipUseCosts": case "RitualCosts": case "HitCosts": - return new UseCostsParser(referenceValidator); + return new UseCostsValidator(ReferenceValidator); case "Damage": case "VersatileDamage": case "StableRoll": - return new DiceRollParser(); + return new DiceRollValidator(); case "Template": case "StatusEffectOverride": @@ -544,16 +559,16 @@ public IStatValueParser CreateParser(StatField field, StatDefinitionRepository d case "CastEffect": case "PrepareEffect": case "TooltipOnSave": - return new UUIDParser(); + return new UUIDValidator(); case "AmountOfTargets": - return new LuaExpressionParser(); + return new LuaExpressionValidator(); } - return CreateParser(field.Type, field.EnumType, field.ReferenceTypes, definitions); + return CreateValidator(field.Type, field.EnumType, field.ReferenceTypes, definitions); } - public IStatValueParser CreateParser(string type, StatEnumeration enumType, List constraints, StatDefinitionRepository definitions) + public IStatValueValidator CreateValidator(string type, StatEnumeration enumType, List constraints, StatDefinitionRepository definitions) { if (enumType == null && definitions.Enumerations.TryGetValue(type, out StatEnumeration enumInfo) && enumInfo.Values.Count > 0) { @@ -581,73 +596,79 @@ public IStatValueParser CreateParser(string type, StatEnumeration enumType, List || type == "StatusGroupFlags" || type == "StatsFunctorContext") { - return new MultiValueEnumParser(enumType); + return new MultiValueEnumValidator(enumType); } else { - return new EnumParser(enumType); + return new EnumValidator(enumType); } } return type switch { - "Boolean" => new BooleanParser(), - "ConstantInt" or "Int" => new Int32Parser(), - "ConstantFloat" or "Float" => new FloatParser(), - "String" or "FixedString" or "TranslatedString" => new StringParser(), - "Guid" => new UUIDParser(), - "Requirements" => new ExpressionParser("Requirements", definitions, this, ExpressionType.Functor), - "StatsFunctors" => new ExpressionParser("Properties", definitions, this, ExpressionType.Functor), - "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionParser(), - "UseCosts" => new UseCostsParser(referenceValidator), - "StatReference" => new StatReferenceParser(referenceValidator, constraints), - "StatusId" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["EngineStatusType"]), - new StatReferenceParser(referenceValidator, + "Boolean" => new BooleanValidator(), + "ConstantInt" or "Int" => new Int32Validator(), + "ConstantFloat" or "Float" => new FloatValidator(), + "String" or "FixedString" or "TranslatedString" => new StringValidator(), + "Guid" => new UUIDValidator(), + "Requirements" => new ExpressionValidator("Requirements", definitions, this, ExpressionType.Functor), + "StatsFunctors" => new ExpressionValidator("Properties", definitions, this, ExpressionType.Functor), + "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionValidator(), + "UseCosts" => new UseCostsValidator(ReferenceValidator), + "StatReference" => new StatReferenceValidator(ReferenceValidator, constraints), + "StatusId" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["EngineStatusType"]), + new StatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "StatusData" } ]) }, "Expected a status name"), - "ResurrectTypes" => new MultiValueEnumParser(definitions.Enumerations["ResurrectType"]), - "StatusIdOrGroup" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["StatusGroupFlags"]), - new EnumParser(definitions.Enumerations["EngineStatusType"]), - new StatReferenceParser(referenceValidator, + "ResurrectTypes" => new MultiValueEnumValidator(definitions.Enumerations["ResurrectType"]), + "StatusIdOrGroup" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["StatusGroupFlags"]), + new EnumValidator(definitions.Enumerations["EngineStatusType"]), + new StatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "StatusData" } ]) }, "Expected a status or StatusGroup name"), - "SummonDurationOrInt" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["SummonDuration"]), - new Int32Parser() + "SummonDurationOrInt" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["SummonDuration"]), + new Int32Validator() }), - "AllOrDamageType" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["AllEnum"]), - new EnumParser(definitions.Enumerations["Damage Type"]), + "AllOrDamageType" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["AllEnum"]), + new EnumValidator(definitions.Enumerations["Damage Type"]), }), - "RollAdjustmentTypeOrDamageType" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["RollAdjustmentType"]), - new EnumParser(definitions.Enumerations["Damage Type"]), + "RollAdjustmentTypeOrDamageType" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["RollAdjustmentType"]), + new EnumValidator(definitions.Enumerations["Damage Type"]), }), - "AbilityOrAttackRollAbility" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["Ability"]), - new EnumParser(definitions.Enumerations["AttackRollAbility"]), + "AbilityOrAttackRollAbility" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["Ability"]), + new EnumValidator(definitions.Enumerations["AttackRollAbility"]), }), - "DamageTypeOrDealDamageWeaponDamageType" => new AnyParser(new List { - new EnumParser(definitions.Enumerations["Damage Type"]), - new EnumParser(definitions.Enumerations["DealDamageWeaponDamageType"]), + "DamageTypeOrDealDamageWeaponDamageType" => new AnyParser(new List { + new EnumValidator(definitions.Enumerations["Damage Type"]), + new EnumValidator(definitions.Enumerations["DealDamageWeaponDamageType"]), }), - "SpellId" => new StatReferenceParser(referenceValidator, + "SpellId" => new StatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "SpellData" } ]), - "Interrupt" => new StatReferenceParser(referenceValidator, + "Interrupt" => new StatReferenceValidator(ReferenceValidator, [ new StatReferenceConstraint{ StatType = "InterruptData" } ]), - // THESE NEED TO BE FIXED! - "StatusIDs" => new StringParser(), - _ => throw new ArgumentException($"Could not create parser for type '{type}'"), + "TreasureSubtables" => new ObjectListValidator(PropertyValidator, definitions.Types["TreasureSubtable"]), + "TreasureSubtableObject" => new ObjectListValidator(PropertyValidator, definitions.Types["TreasureSubtableObject"]), + "TreasureDrop" => new TreasureDropValidator(ReferenceValidator), + "StatusIDs" => + new MultiValueStatReferenceValidator(ReferenceValidator, + [ + new StatReferenceConstraint { StatType = "StatusData" } + ]), + _ => throw new ArgumentException($"Could not create parser for type '{type}'"), }; } } diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 5a71b1a4..64702a01 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -55,5 +55,6 @@ Copyright © Norbyte 2012-2023 1.18.5.0 1.18.5.0 + x64 \ No newline at end of file diff --git a/LSTools.sln b/LSTools.sln index ab953a61..ce091c87 100644 --- a/LSTools.sln +++ b/LSTools.sln @@ -31,6 +31,20 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "VTexTool", "VTexTool\VTexTo {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} = {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} EndProjectSection EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LSLibSearch", "LSLibSearch\LSLibSearch.csproj", "{0DD93214-DCD4-4588-A33F-E4AF3100361A}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StatFastParser", "StatFastParser\StatFastParser.csproj", "{072FD9B6-F2C5-48BC-9209-F2A8F7500345}" + ProjectSection(ProjectDependencies) = postProject + {46372C50-4288-4B8E-AF21-C934560600E0} = {46372C50-4288-4B8E-AF21-C934560600E0} + EndProjectSection +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LSLibSearchIndexer", "LSLibSearchIndexer\LSLibSearchIndexer.csproj", "{2E23150D-244A-4950-B50C-135225924374}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{6DF990BA-71FD-4DAE-9BAD-53B4B9097C13}" + ProjectSection(SolutionItems) = preProject + .editorconfig = .editorconfig + EndProjectSection +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -158,6 +172,42 @@ Global {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|x64.ActiveCfg = Debug|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|x64.Build.0 = Debug|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|Any CPU.Build.0 = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|x64.ActiveCfg = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|x64.Build.0 = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU + {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|Any CPU.Build.0 = Debug|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|x64.ActiveCfg = Debug|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|x64.Build.0 = Debug|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|Any CPU.ActiveCfg = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|Any CPU.Build.0 = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|x64.ActiveCfg = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|x64.Build.0 = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU + {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Debug|x64.ActiveCfg = Debug|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Debug|x64.Build.0 = Debug|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Release|Any CPU.Build.0 = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Release|x64.ActiveCfg = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.Release|x64.Build.0 = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU + {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|x64.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/StatParser/StatChecker.cs b/StatParser/StatChecker.cs index 6bd80207..79565001 100644 --- a/StatParser/StatChecker.cs +++ b/StatParser/StatChecker.cs @@ -96,10 +96,10 @@ private void CompilationDiagnostic(StatLoadingError message) Console.Write("WARN "); } - if (message.Path != null) + if (message.Location != null) { - var baseName = Path.GetFileName(message.Path); - Console.Write($"{baseName}:{message.Line}: "); + var baseName = Path.GetFileName(message.Location.FileName); + Console.Write($"{baseName}:{message.Location.StartLine}: "); } Console.WriteLine("[{0}] {1}", message.Code, message.Message); @@ -141,7 +141,7 @@ public void Check(List mods, List dependencies, List pac } Loader.ResolveUsageRef(); - Loader.InstantiateEntries(); + Loader.ValidateEntries(); Context.Errors.Clear(); @@ -151,7 +151,7 @@ public void Check(List mods, List dependencies, List pac } Loader.ResolveUsageRef(); - Loader.InstantiateEntries(); + Loader.ValidateEntries(); foreach (var message in Context.Errors) { From 6ba2a1cb203b7a523d600d7f10b2fb2516aa17f1 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 10 Jan 2024 23:38:01 +0100 Subject: [PATCH 080/139] Fix stitching logic for partial tiles --- LSLib/VirtualTextures/Build.cs | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/LSLib/VirtualTextures/Build.cs b/LSLib/VirtualTextures/Build.cs index 8043613b..535bd98a 100644 --- a/LSLib/VirtualTextures/Build.cs +++ b/LSLib/VirtualTextures/Build.cs @@ -631,21 +631,24 @@ private void StitchTiles(BuildLevel level, int layer, int x, int y, BC5Image mip int sourceY = 0; for (var tileY = firstTileY; tileY <= lastTileY; tileY++) { + var tileYPixelsMin = tileY * BuildData.RawTileHeight; + var tileYPixelsMax = tileYPixelsMin + BuildData.RawTileHeight; + + var stitchYMin = Clamp(y, tileYPixelsMin, tileYPixelsMax); + var stitchYMax = Clamp(y + mip.Height, tileYPixelsMin, tileYPixelsMax); + + var stitchH = stitchYMax - stitchYMin; + int sourceX = 0; for (var tileX = firstTileX; tileX <= lastTileX; tileX++) { var tileXPixelsMin = tileX * BuildData.RawTileWidth; - var tileYPixelsMin = tileY * BuildData.RawTileHeight; var tileXPixelsMax = tileXPixelsMin + BuildData.RawTileWidth; - var tileYPixelsMax = tileYPixelsMin + BuildData.RawTileHeight; var stitchXMin = Clamp(x, tileXPixelsMin, tileXPixelsMax); - var stitchYMin = Clamp(y, tileYPixelsMin, tileYPixelsMax); var stitchXMax = Clamp(x + mip.Width, tileXPixelsMin, tileXPixelsMax); - var stitchYMax = Clamp(y + mip.Height, tileYPixelsMin, tileYPixelsMax); var stitchW = stitchXMax - stitchXMin; - var stitchH = stitchYMax - stitchYMin; // GIGA JANK if (stitchW >= 4 && stitchH >= 4) @@ -660,10 +663,10 @@ private void StitchTiles(BuildLevel level, int layer, int x, int y, BC5Image mip ); } - sourceX += BuildData.RawTileWidth; + sourceX += stitchW; } - sourceY += BuildData.RawTileHeight; + sourceY += stitchH; } } From dd24b90089cde38268d10d53806623b71cce74c6 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 20 Jan 2024 13:29:20 +0100 Subject: [PATCH 081/139] Add Zstd compression --- ConverterApp/PackagePane.Designer.cs | 6 +++- ConverterApp/PackagePane.cs | 18 ++++++++++++ LSLib/LS/BinUtils.cs | 43 +++++++++++++++++++++++----- LSLib/LS/Enums/Compression.cs | 6 +++- LSLib/LSLib.csproj | 1 + 5 files changed, 65 insertions(+), 9 deletions(-) diff --git a/ConverterApp/PackagePane.Designer.cs b/ConverterApp/PackagePane.Designer.cs index 5c213f96..fb5912fb 100644 --- a/ConverterApp/PackagePane.Designer.cs +++ b/ConverterApp/PackagePane.Designer.cs @@ -349,7 +349,11 @@ private void InitializeComponent() "Zlib Fast", "Zlib Optimal", "LZ4", - "LZ4 HC"}); + "LZ4 HC", + "ZStd Fast", + "ZStd Optimal", + "ZStd Max" + }); this.compressionMethod.Location = new System.Drawing.Point(353, 156); this.compressionMethod.Margin = new System.Windows.Forms.Padding(4); this.compressionMethod.Name = "compressionMethod"; diff --git a/ConverterApp/PackagePane.cs b/ConverterApp/PackagePane.cs index 6e227b88..c16dd276 100644 --- a/ConverterApp/PackagePane.cs +++ b/ConverterApp/PackagePane.cs @@ -146,6 +146,24 @@ private void createPackageBtn_Click(object sender, EventArgs e) build.Compression = CompressionMethod.LZ4; break; } + case 5: + { + build.Compression = CompressionMethod.Zstd; + build.CompressionLevel = LSCompressionLevel.Fast; + break; + } + case 6: + { + build.Compression = CompressionMethod.Zstd; + build.CompressionLevel = LSCompressionLevel.Default; + break; + } + case 7: + { + build.Compression = CompressionMethod.Zstd; + build.CompressionLevel = LSCompressionLevel.Max; + break; + } } // Fallback to Zlib, if the package version doesn't support LZ4 diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 34da3a41..74e86037 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -438,13 +438,7 @@ public static byte[] Decompress(byte[] compressed, int decompressedSize, Compres using (var decompressedStream = new MemoryStream()) using (var stream = new ZLibStream(compressedStream, CompressionMode.Decompress)) { - byte[] buf = new byte[0x10000]; - int length = 0; - while ((length = stream.Read(buf, 0, buf.Length)) > 0) - { - decompressedStream.Write(buf, 0, length); - } - + stream.CopyTo(decompressedStream); return decompressedStream.ToArray(); } } @@ -467,6 +461,17 @@ public static byte[] Decompress(byte[] compressed, int decompressedSize, Compres return decompressed; } + case CompressionMethod.Zstd: + { + using (var compressedStream = new MemoryStream(compressed)) + using (var decompressedStream = new MemoryStream()) + using (var stream = new ZstdSharp.DecompressionStream(compressedStream)) + { + stream.CopyTo(decompressedStream); + return decompressedStream.ToArray(); + } + } + default: throw new InvalidDataException($"No decompressor found for this format: {compression}"); } @@ -493,6 +498,10 @@ public static Stream Decompress(MemoryMappedFile file, MemoryMappedViewAccessor case CompressionMethod.LZ4: return new LZ4DecompressionStream(view, sourceOffset, sourceSize, decompressedSize); + case CompressionMethod.Zstd: + var zstdStream = file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); + return new ZstdSharp.DecompressionStream(zstdStream); + default: throw new InvalidDataException($"No decompressor found for this format: {compression}"); } @@ -510,6 +519,7 @@ public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSC CompressionMethod.None => uncompressed, CompressionMethod.Zlib => CompressZlib(uncompressed, level), CompressionMethod.LZ4 => CompressLZ4(uncompressed, level, chunked), + CompressionMethod.Zstd => CompressZstd(uncompressed, level), _ => throw new ArgumentException("Invalid compression method specified") }; } @@ -549,4 +559,23 @@ public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compres return LZ4Codec.EncodeHC(uncompressed, 0, uncompressed.Length); } } + + public static byte[] CompressZstd(byte[] uncompressed, LSCompressionLevel level) + { + var zLevel = level switch + { + LSCompressionLevel.Fast => 3, + LSCompressionLevel.Default => 9, + LSCompressionLevel.Max => 22, + _ => throw new ArgumentException() + }; + + using var outputStream = new MemoryStream(); + using (var compressor = new ZstdSharp.CompressionStream(outputStream, zLevel, 0, true)) + { + compressor.Write(uncompressed, 0, uncompressed.Length); + } + + return outputStream.ToArray(); + } } diff --git a/LSLib/LS/Enums/Compression.cs b/LSLib/LS/Enums/Compression.cs index aa8884f6..95b7cff2 100644 --- a/LSLib/LS/Enums/Compression.cs +++ b/LSLib/LS/Enums/Compression.cs @@ -6,7 +6,8 @@ public enum CompressionMethod { None, Zlib, - LZ4 + LZ4, + Zstd }; public enum LSCompressionLevel @@ -21,6 +22,7 @@ public enum CompressionFlags : byte MethodNone = 0, MethodZlib = 1, MethodLZ4 = 2, + MethodZstd = 3, FastCompress = 0x10, DefaultCompress = 0x20, MaxCompress = 0x40 @@ -35,6 +37,7 @@ public static CompressionMethod Method(this CompressionFlags f) CompressionFlags.MethodNone => CompressionMethod.None, CompressionFlags.MethodZlib => CompressionMethod.Zlib, CompressionFlags.MethodLZ4 => CompressionMethod.LZ4, + CompressionFlags.MethodZstd => CompressionMethod.Zstd, _ => throw new NotSupportedException($"Unsupported compression method: {(byte)f & 0x0F}") }; } @@ -58,6 +61,7 @@ public static CompressionFlags ToFlags(this CompressionMethod method) CompressionMethod.None => CompressionFlags.MethodNone, CompressionMethod.Zlib => CompressionFlags.MethodZlib, CompressionMethod.LZ4 => CompressionFlags.MethodLZ4, + CompressionMethod.Zstd => CompressionFlags.MethodZstd, _ => throw new NotSupportedException($"Unsupported compression method: {method}") }; } diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 64702a01..5ebfef5a 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -32,6 +32,7 @@ + From 7c88ba5aa3030a438ec88177716be68f424279db Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 21 Jan 2024 12:53:48 +0100 Subject: [PATCH 082/139] Simplify imports --- LSLib/Granny/Collada.cs | 5 +- LSLib/Granny/ColladaAnimation.cs | 5 +- LSLib/Granny/GR2/Format.cs | 7 +- LSLib/Granny/GR2/Helpers.cs | 4 +- LSLib/Granny/GR2/Reader.cs | 5 - LSLib/Granny/GR2/Writer.cs | 8 +- LSLib/Granny/GR2Utils.cs | 6 +- LSLib/Granny/Model/Animation.cs | 5 +- LSLib/Granny/Model/ColladaExporter.cs | 7 +- LSLib/Granny/Model/ColladaHelpers.cs | 2 - LSLib/Granny/Model/ColladaImporter.cs | 6 +- LSLib/Granny/Model/ColladaMesh.cs | 5 +- .../Model/CurveData/AnimationCurveData.cs | 4 +- LSLib/Granny/Model/CurveData/D3Constant32f.cs | 4 +- LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs | 4 +- LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs | 4 +- LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs | 4 +- LSLib/Granny/Model/CurveData/D3K16uC16u.cs | 4 +- LSLib/Granny/Model/CurveData/D3K8uC8u.cs | 4 +- LSLib/Granny/Model/CurveData/D4Constant32f.cs | 4 +- LSLib/Granny/Model/CurveData/D4nK16uC15u.cs | 4 +- LSLib/Granny/Model/CurveData/D4nK8uC7u.cs | 4 +- LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs | 4 +- LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs | 4 +- LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs | 4 +- LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs | 4 +- LSLib/Granny/Model/CurveData/DaConstant32f.cs | 4 +- LSLib/Granny/Model/CurveData/DaIdentity.cs | 4 +- LSLib/Granny/Model/CurveData/DaK16uC16u.cs | 4 +- LSLib/Granny/Model/CurveData/DaK32fC32f.cs | 5 +- LSLib/Granny/Model/CurveData/DaK8uC8u.cs | 4 +- .../Granny/Model/CurveData/DaKeyframes32f.cs | 5 +- LSLib/Granny/Model/DivinityMesh.cs | 2 - LSLib/Granny/Model/Exporter.cs | 4 - LSLib/Granny/Model/Mesh.cs | 6 +- LSLib/Granny/Model/Metadata.cs | 3 +- LSLib/Granny/Model/Model.cs | 3 +- LSLib/Granny/Model/Root.cs | 5 +- LSLib/Granny/Model/Skeleton.cs | 5 +- LSLib/Granny/Model/Vertex.cs | 2 - LSLib/Granny/Model/VertexSerialization.cs | 2 - LSLib/Granny/Utils.cs | 1 - LSLib/LS/BinUtils.cs | 7 -- LSLib/LS/Common.cs | 99 ++++++++++--------- LSLib/LS/Enums/Compression.cs | 4 +- LSLib/LS/Enums/Game.cs | 1 - LSLib/LS/Enums/PackageVersion.cs | 4 +- LSLib/LS/FileManager.cs | 5 +- LSLib/LS/Localization.cs | 7 +- LSLib/LS/Matrix.cs | 1 - LSLib/LS/Mods/ModResources.cs | 27 ++++- LSLib/LS/PackageCommon.cs | 7 +- LSLib/LS/PackageFormat.cs | 6 +- LSLib/LS/PackageReader.cs | 11 +-- LSLib/LS/PackageWriter.cs | 8 +- LSLib/LS/Resource.cs | 7 +- LSLib/LS/ResourceUtils.cs | 5 +- LSLib/LS/Resources/LSB/LSBReader.cs | 6 +- LSLib/LS/Resources/LSB/LSBWriter.cs | 6 +- LSLib/LS/Resources/LSF/LSFCommon.cs | 5 +- LSLib/LS/Resources/LSF/LSFReader.cs | 4 - LSLib/LS/Resources/LSF/LSFWriter.cs | 6 +- LSLib/LS/Resources/LSJ/LSJReader.cs | 4 +- .../LS/Resources/LSJ/LSJResourceConverter.cs | 5 +- LSLib/LS/Resources/LSJ/LSJWriter.cs | 3 +- LSLib/LS/Resources/LSX/LSXReader.cs | 5 - LSLib/LS/Resources/LSX/LSXWriter.cs | 2 - LSLib/LS/Save/SavegameHelpers.cs | 4 - LSLib/LS/Save/VariableManager.cs | 5 - LSLib/LS/Stats/Parser/PropertyDefinitions.cs | 6 +- LSLib/LS/Stats/Parser/StatNodes.cs | 2 - LSLib/LS/Stats/Parser/StatParser.cs | 2 - LSLib/LS/Stats/Parser/StatPropertyParser.cs | 4 - LSLib/LS/Stats/ShiftReduceParser.cs | 4 - LSLib/LS/Stats/StatDefinitions.cs | 7 +- LSLib/LS/Stats/StatFileParser.cs | 7 -- LSLib/LS/Stats/StatValueParsers.cs | 6 -- LSLib/LS/Story/Adapter.cs | 6 +- LSLib/LS/Story/Call.cs | 6 +- LSLib/LS/Story/Common.cs | 9 +- LSLib/LS/Story/Compiler/CompilationContext.cs | 2 - LSLib/LS/Story/Compiler/Compiler.cs | 5 +- LSLib/LS/Story/Compiler/DebugInfo.cs | 7 +- LSLib/LS/Story/Compiler/HeaderLoader.cs | 6 -- LSLib/LS/Story/Compiler/IR.cs | 5 - LSLib/LS/Story/Compiler/IRGenerator.cs | 6 -- LSLib/LS/Story/Compiler/Preprocessor.cs | 8 +- LSLib/LS/Story/Compiler/StoryEmitter.cs | 6 -- LSLib/LS/Story/DataNode.cs | 5 +- LSLib/LS/Story/Database.cs | 5 +- LSLib/LS/Story/DatabaseNode.cs | 4 +- LSLib/LS/Story/DebugExport.cs | 6 +- LSLib/LS/Story/Function.cs | 6 +- LSLib/LS/Story/Goal.cs | 6 +- LSLib/LS/Story/GoalParser/ASTNodes.cs | 2 - LSLib/LS/Story/GoalParser/GoalParser.cs | 3 - LSLib/LS/Story/HeaderParser/ASTNodes.cs | 2 - LSLib/LS/Story/HeaderParser/HeaderParser.cs | 1 - LSLib/LS/Story/Join.cs | 4 +- LSLib/LS/Story/Node.cs | 5 +- LSLib/LS/Story/Proc.cs | 4 +- LSLib/LS/Story/Query.cs | 4 +- LSLib/LS/Story/Reference.cs | 5 +- LSLib/LS/Story/Rel.cs | 4 +- LSLib/LS/Story/RelOp.cs | 5 +- LSLib/LS/Story/Rule.cs | 6 +- LSLib/LS/Story/Story.cs | 7 +- LSLib/LS/Story/Value.cs | 7 +- LSLib/LS/VFS.cs | 9 +- LSLib/VirtualTextures/BC5Image.cs | 6 +- LSLib/VirtualTextures/Build.cs | 7 +- LSLib/VirtualTextures/Compression.cs | 4 +- LSLib/VirtualTextures/Geometry.cs | 5 +- LSLib/VirtualTextures/PageFile.cs | 3 - LSLib/VirtualTextures/PageFileBuild.cs | 5 - LSLib/VirtualTextures/VirtualTexture.cs | 4 - .../VirtualTextures/VirtualTextureFormats.cs | 7 +- 117 files changed, 158 insertions(+), 521 deletions(-) diff --git a/LSLib/Granny/Collada.cs b/LSLib/Granny/Collada.cs index dccccc04..1f188aa0 100644 --- a/LSLib/Granny/Collada.cs +++ b/LSLib/Granny/Collada.cs @@ -1,7 +1,4 @@ -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Security.Cryptography; +using System.Security.Cryptography; namespace LSLib.Granny; diff --git a/LSLib/Granny/ColladaAnimation.cs b/LSLib/Granny/ColladaAnimation.cs index e91f5f37..fa2b9d75 100644 --- a/LSLib/Granny/ColladaAnimation.cs +++ b/LSLib/Granny/ColladaAnimation.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; using LSLib.Granny.Model; using OpenTK.Mathematics; diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index 04f437f5..62efd541 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -1,10 +1,5 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Text; +using System.Diagnostics; using OpenTK.Mathematics; -using System.IO; using System.Reflection; using System.IO.Hashing; diff --git a/LSLib/Granny/GR2/Helpers.cs b/LSLib/Granny/GR2/Helpers.cs index 3f2cc86d..cfeceb60 100644 --- a/LSLib/Granny/GR2/Helpers.cs +++ b/LSLib/Granny/GR2/Helpers.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq.Expressions; +using System.Linq.Expressions; namespace LSLib.Granny.GR2; diff --git a/LSLib/Granny/GR2/Reader.cs b/LSLib/Granny/GR2/Reader.cs index b1324e5c..d93e7c2a 100644 --- a/LSLib/Granny/GR2/Reader.cs +++ b/LSLib/Granny/GR2/Reader.cs @@ -1,12 +1,7 @@ // #define DEBUG_GR2_SERIALIZATION // #define DEBUG_GR2_FORMAT_DIFFERENCES -using System; -using System.IO; -using System.Collections.Generic; using System.Diagnostics; -using System.Linq; -using System.Text; using LSLib.Native; namespace LSLib.Granny.GR2; diff --git a/LSLib/Granny/GR2/Writer.cs b/LSLib/Granny/GR2/Writer.cs index 2c1d207d..874e05b8 100644 --- a/LSLib/Granny/GR2/Writer.cs +++ b/LSLib/Granny/GR2/Writer.cs @@ -1,10 +1,4 @@ -using System; -using System.IO; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Text; -using OpenTK; +using System.Diagnostics; namespace LSLib.Granny.GR2; diff --git a/LSLib/Granny/GR2Utils.cs b/LSLib/Granny/GR2Utils.cs index 03c5db71..532630e6 100644 --- a/LSLib/Granny/GR2Utils.cs +++ b/LSLib/Granny/GR2Utils.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; using LSLib.Granny.Model; using LSLib.LS; diff --git a/LSLib/Granny/Model/Animation.cs b/LSLib/Granny/Model/Animation.cs index 4c6c2d7d..1e0e38fa 100644 --- a/LSLib/Granny/Model/Animation.cs +++ b/LSLib/Granny/Model/Animation.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.Model.CurveData; using LSLib.Granny.GR2; diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index c5cb3887..52583a0f 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -1,12 +1,7 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; using LSLib.LS; using System.Xml; -using System.Xml.Linq; using LSLib.LS.Enums; -using System.IO; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/ColladaHelpers.cs b/LSLib/Granny/Model/ColladaHelpers.cs index c477662c..ab133b12 100644 --- a/LSLib/Granny/Model/ColladaHelpers.cs +++ b/LSLib/Granny/Model/ColladaHelpers.cs @@ -1,7 +1,5 @@ using LSLib.Granny.GR2; using OpenTK.Mathematics; -using System; -using System.Collections.Generic; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 9424be80..5127c370 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; +using System.Diagnostics; using LSLib.Granny.GR2; using LSLib.LS; using OpenTK.Mathematics; diff --git a/LSLib/Granny/Model/ColladaMesh.cs b/LSLib/Granny/Model/ColladaMesh.cs index 30f5692b..bf4873e7 100644 --- a/LSLib/Granny/Model/ColladaMesh.cs +++ b/LSLib/Granny/Model/ColladaMesh.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/CurveData/AnimationCurveData.cs b/LSLib/Granny/Model/CurveData/AnimationCurveData.cs index efed85e2..51230234 100644 --- a/LSLib/Granny/Model/CurveData/AnimationCurveData.cs +++ b/LSLib/Granny/Model/CurveData/AnimationCurveData.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D3Constant32f.cs b/LSLib/Granny/Model/CurveData/D3Constant32f.cs index 005d4fad..21c888cc 100644 --- a/LSLib/Granny/Model/CurveData/D3Constant32f.cs +++ b/LSLib/Granny/Model/CurveData/D3Constant32f.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs b/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs index 582365b6..924c3e8f 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K16uC16u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs b/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs index c6f8d09f..2472ebab 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K32fC32f.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs b/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs index e9abb44d..0d9d1c47 100644 --- a/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D3I1K8uC8u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D3K16uC16u.cs b/LSLib/Granny/Model/CurveData/D3K16uC16u.cs index d8e86537..27906a06 100644 --- a/LSLib/Granny/Model/CurveData/D3K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D3K16uC16u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D3K8uC8u.cs b/LSLib/Granny/Model/CurveData/D3K8uC8u.cs index 0f4c933f..db04a381 100644 --- a/LSLib/Granny/Model/CurveData/D3K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D3K8uC8u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D4Constant32f.cs b/LSLib/Granny/Model/CurveData/D4Constant32f.cs index 5fb949ae..ea40c05f 100644 --- a/LSLib/Granny/Model/CurveData/D4Constant32f.cs +++ b/LSLib/Granny/Model/CurveData/D4Constant32f.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs b/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs index 1b41e41a..2726b9cf 100644 --- a/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs +++ b/LSLib/Granny/Model/CurveData/D4nK16uC15u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs b/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs index 47b91590..621ee1ff 100644 --- a/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs +++ b/LSLib/Granny/Model/CurveData/D4nK8uC7u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs b/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs index 63ad5fc5..062cf827 100644 --- a/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D9I1K16uC16u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs b/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs index bff9f53d..c9f4729e 100644 --- a/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D9I1K8uC8u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs b/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs index fa776453..e3e1f651 100644 --- a/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/D9I3K16uC16u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs b/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs index 22aeb973..7b6f7ba4 100644 --- a/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/D9I3K8uC8u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/DaConstant32f.cs b/LSLib/Granny/Model/CurveData/DaConstant32f.cs index 991b954c..2d8352d9 100644 --- a/LSLib/Granny/Model/CurveData/DaConstant32f.cs +++ b/LSLib/Granny/Model/CurveData/DaConstant32f.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; using System.Diagnostics; diff --git a/LSLib/Granny/Model/CurveData/DaIdentity.cs b/LSLib/Granny/Model/CurveData/DaIdentity.cs index 65170715..acd151e5 100644 --- a/LSLib/Granny/Model/CurveData/DaIdentity.cs +++ b/LSLib/Granny/Model/CurveData/DaIdentity.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/DaK16uC16u.cs b/LSLib/Granny/Model/CurveData/DaK16uC16u.cs index d4048cbc..741fe25b 100644 --- a/LSLib/Granny/Model/CurveData/DaK16uC16u.cs +++ b/LSLib/Granny/Model/CurveData/DaK16uC16u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using System.Diagnostics; using LSLib.Granny.GR2; diff --git a/LSLib/Granny/Model/CurveData/DaK32fC32f.cs b/LSLib/Granny/Model/CurveData/DaK32fC32f.cs index c615bcfc..094f1ad8 100644 --- a/LSLib/Granny/Model/CurveData/DaK32fC32f.cs +++ b/LSLib/Granny/Model/CurveData/DaK32fC32f.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/CurveData/DaK8uC8u.cs b/LSLib/Granny/Model/CurveData/DaK8uC8u.cs index 0f78619d..deb3e33b 100644 --- a/LSLib/Granny/Model/CurveData/DaK8uC8u.cs +++ b/LSLib/Granny/Model/CurveData/DaK8uC8u.cs @@ -1,6 +1,4 @@ -using System; -using System.Collections.Generic; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using System.Diagnostics; using LSLib.Granny.GR2; diff --git a/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs b/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs index 0195a179..22b006d2 100644 --- a/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs +++ b/LSLib/Granny/Model/CurveData/DaKeyframes32f.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model.CurveData; diff --git a/LSLib/Granny/Model/DivinityMesh.cs b/LSLib/Granny/Model/DivinityMesh.cs index 8ffa3590..3c15e006 100644 --- a/LSLib/Granny/Model/DivinityMesh.cs +++ b/LSLib/Granny/Model/DivinityMesh.cs @@ -1,6 +1,4 @@ using LSLib.Granny.GR2; -using System; -using System.Collections.Generic; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index bcebc546..560b508a 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -1,8 +1,4 @@ using LSLib.Granny.GR2; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; using LSLib.LS; using LSLib.LS.Enums; diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 89fb09a9..323a3cc0 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/Metadata.cs b/LSLib/Granny/Model/Metadata.cs index 345152af..c30f5efe 100644 --- a/LSLib/Granny/Model/Metadata.cs +++ b/LSLib/Granny/Model/Metadata.cs @@ -1,5 +1,4 @@ -using System; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/Model.cs b/LSLib/Granny/Model/Model.cs index f3578f60..f64ad8c5 100644 --- a/LSLib/Granny/Model/Model.cs +++ b/LSLib/Granny/Model/Model.cs @@ -1,5 +1,4 @@ -using System.Collections.Generic; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/Root.cs b/LSLib/Granny/Model/Root.cs index c54e819a..2ad246e2 100644 --- a/LSLib/Granny/Model/Root.cs +++ b/LSLib/Granny/Model/Root.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; using OpenTK.Mathematics; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index 1b7de565..b26153fc 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using OpenTK.Mathematics; +using OpenTK.Mathematics; using LSLib.Granny.GR2; using System.Xml; diff --git a/LSLib/Granny/Model/Vertex.cs b/LSLib/Granny/Model/Vertex.cs index b7fe53c9..18440bb3 100644 --- a/LSLib/Granny/Model/Vertex.cs +++ b/LSLib/Granny/Model/Vertex.cs @@ -1,7 +1,5 @@ using LSLib.Granny.GR2; using OpenTK.Mathematics; -using System; -using System.Collections.Generic; namespace LSLib.Granny.Model; diff --git a/LSLib/Granny/Model/VertexSerialization.cs b/LSLib/Granny/Model/VertexSerialization.cs index 5f7e3659..e3ab1e17 100644 --- a/LSLib/Granny/Model/VertexSerialization.cs +++ b/LSLib/Granny/Model/VertexSerialization.cs @@ -1,7 +1,5 @@ using LSLib.Granny.GR2; using OpenTK.Mathematics; -using System; -using System.Collections.Generic; using System.Reflection; using System.Reflection.Emit; diff --git a/LSLib/Granny/Utils.cs b/LSLib/Granny/Utils.cs index f6d42d7e..d33b6fe8 100644 --- a/LSLib/Granny/Utils.cs +++ b/LSLib/Granny/Utils.cs @@ -1,5 +1,4 @@ namespace LSLib.Granny; - abstract class Utils { public static void Warn(string message) diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 74e86037..52735f4d 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -1,12 +1,5 @@ using LZ4; -using System; -using System.IO; -using System.Runtime.InteropServices; -using LSLib.LS.Enums; using System.IO.Compression; -using System.Text; -using System.Threading.Tasks; -using System.Threading; using System.IO.MemoryMappedFiles; namespace LSLib.LS; diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index f03f3743..3b8a6b63 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -1,46 +1,53 @@ -using System; -using System.Text.RegularExpressions; - -namespace LSLib.LS; - -public static class Common -{ - public const int MajorVersion = 1; - - public const int MinorVersion = 19; - - public const int PatchVersion = 3; - - // Version of LSTools profile data in generated DAE files - public const int ColladaMetadataVersion = 3; - - /// - /// Returns the version number of the LSLib library - /// - public static string LibraryVersion() - { - return String.Format("{0}.{1}.{2}", MajorVersion, MinorVersion, PatchVersion); - } - - /// - /// Compares the string against a given pattern. - /// - /// The string - /// The pattern to match, where "*" means any sequence of characters, and "?" means any single character - /// true if the string matches the given pattern; otherwise false. - public static bool Like(this string str, string pattern) - { - return new Regex("^" + Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline).IsMatch(str); - } - - /// - /// Compares the string against a given pattern. - /// - /// The string - /// The pattern to match as a RegEx object - /// true if the string matches the given pattern; otherwise false. - public static bool Like(this string str, Regex pattern) - { - return pattern.IsMatch(str); - } -} +global using System; +global using System.Collections.Generic; +global using System.IO; +global using System.Linq; +global using System.Runtime.InteropServices; +global using System.Text; +global using System.Threading.Tasks; +global using System.Threading; +using System.Text.RegularExpressions; + +namespace LSLib.LS; + +public static class Common +{ + public const int MajorVersion = 1; + + public const int MinorVersion = 19; + + public const int PatchVersion = 3; + + // Version of LSTools profile data in generated DAE files + public const int ColladaMetadataVersion = 3; + + /// + /// Returns the version number of the LSLib library + /// + public static string LibraryVersion() + { + return String.Format("{0}.{1}.{2}", MajorVersion, MinorVersion, PatchVersion); + } + + /// + /// Compares the string against a given pattern. + /// + /// The string + /// The pattern to match, where "*" means any sequence of characters, and "?" means any single character + /// true if the string matches the given pattern; otherwise false. + public static bool Like(this string str, string pattern) + { + return new Regex("^" + Regex.Escape(pattern).Replace(@"\*", ".*").Replace(@"\?", ".") + "$", RegexOptions.Singleline).IsMatch(str); + } + + /// + /// Compares the string against a given pattern. + /// + /// The string + /// The pattern to match as a RegEx object + /// true if the string matches the given pattern; otherwise false. + public static bool Like(this string str, Regex pattern) + { + return pattern.IsMatch(str); + } +} diff --git a/LSLib/LS/Enums/Compression.cs b/LSLib/LS/Enums/Compression.cs index 95b7cff2..1e857846 100644 --- a/LSLib/LS/Enums/Compression.cs +++ b/LSLib/LS/Enums/Compression.cs @@ -1,6 +1,4 @@ -using System; - -namespace LSLib.LS; +namespace LSLib.LS; public enum CompressionMethod { diff --git a/LSLib/LS/Enums/Game.cs b/LSLib/LS/Enums/Game.cs index ae65b037..f5ddbf27 100644 --- a/LSLib/LS/Enums/Game.cs +++ b/LSLib/LS/Enums/Game.cs @@ -1,5 +1,4 @@ namespace LSLib.LS.Enums; - public enum Game { DivinityOriginalSin = 0, diff --git a/LSLib/LS/Enums/PackageVersion.cs b/LSLib/LS/Enums/PackageVersion.cs index 6ebddeba..64343744 100644 --- a/LSLib/LS/Enums/PackageVersion.cs +++ b/LSLib/LS/Enums/PackageVersion.cs @@ -1,6 +1,4 @@ -using System; - -namespace LSLib.LS.Enums; +namespace LSLib.LS.Enums; public enum PackageVersion { diff --git a/LSLib/LS/FileManager.cs b/LSLib/LS/FileManager.cs index f8172811..c236f89a 100644 --- a/LSLib/LS/FileManager.cs +++ b/LSLib/LS/FileManager.cs @@ -1,7 +1,4 @@ -using System; -using System.IO; - -namespace LSLib.LS; +namespace LSLib.LS; public class FileManager { diff --git a/LSLib/LS/Localization.cs b/LSLib/LS/Localization.cs index 6e028b4c..6488f59e 100644 --- a/LSLib/LS/Localization.cs +++ b/LSLib/LS/Localization.cs @@ -1,9 +1,4 @@ -using System; -using System.IO; -using System.Runtime.InteropServices; -using System.Text; -using System.Collections.Generic; -using System.Xml; +using System.Xml; namespace LSLib.LS; diff --git a/LSLib/LS/Matrix.cs b/LSLib/LS/Matrix.cs index fbcea8ac..f7204bf8 100644 --- a/LSLib/LS/Matrix.cs +++ b/LSLib/LS/Matrix.cs @@ -33,7 +33,6 @@ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT OTHER DEALINGS IN THE SOFTWARE. */ -using System; using System.Text.RegularExpressions; namespace LSLib.LS; diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index d491591a..6c000edb 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -1,8 +1,4 @@ using LSLib.LS.Story.Compiler; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; using System.Text.RegularExpressions; namespace LSLib.LS; @@ -262,3 +258,26 @@ public void Discover() [GeneratedRegex("^(.*)_[0-9]+\\.pak$", RegexOptions.IgnoreCase | RegexOptions.Compiled | RegexOptions.CultureInvariant)] private static partial Regex ArchivePartRegex(); } + +public class GameDataContext +{ + public VFS FS; + public ModResources Resources; + + public GameDataContext(string path, TargetGame game = TargetGame.BG3, bool excludeAssets = true) + { + FS = new VFS(); + FS.AttachGameDirectory(path, excludeAssets); + FS.FinishBuild(); + + Resources = new ModResources(); + var visitor = new ModPathVisitor(Resources, FS) + { + Game = game, + CollectStoryGoals = true, + CollectGlobals = false, + CollectLevels = false + }; + visitor.Discover(); + } +} diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index b14932f6..a8725827 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.IO.MemoryMappedFiles; -using System.Linq; -using System.Threading.Tasks; +using System.IO.MemoryMappedFiles; using LSLib.LS.Enums; namespace LSLib.LS; diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index 9051982a..e2b7dd90 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -1,8 +1,4 @@ -using LSLib.Granny.GR2; -using LSLib.LS.Enums; -using System; -using System.Reflection.PortableExecutable; -using System.Runtime.InteropServices; +using LSLib.LS.Enums; namespace LSLib.LS; diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index 81fca9d9..c7f4dea0 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -1,14 +1,5 @@ -using System; -using System.IO; -using System.Linq; -using System.Runtime.InteropServices; -using System.Text; -using LZ4; -using LSLib.LS.Enums; +using LSLib.LS.Enums; using System.IO.MemoryMappedFiles; -using System.Reflection; -using LSLib.VirtualTextures; -using System.Collections.Generic; namespace LSLib.LS; diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index f8f9d4d6..904f10f2 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -1,11 +1,5 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.IO.Hashing; -using System.Linq; -using System.Runtime.InteropServices; +using System.IO.Hashing; using System.Security.Cryptography; -using System.Text; using LSLib.LS.Enums; using LZ4; diff --git a/LSLib/LS/Resource.cs b/LSLib/LS/Resource.cs index aa585fcc..52fd0177 100644 --- a/LSLib/LS/Resource.cs +++ b/LSLib/LS/Resource.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Runtime.InteropServices; - -namespace LSLib.LS; +namespace LSLib.LS; public class InvalidFormatException(string message) : Exception(message) { diff --git a/LSLib/LS/ResourceUtils.cs b/LSLib/LS/ResourceUtils.cs index 08c564f9..7be8464a 100644 --- a/LSLib/LS/ResourceUtils.cs +++ b/LSLib/LS/ResourceUtils.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using LSLib.LS.Enums; +using LSLib.LS.Enums; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSB/LSBReader.cs b/LSLib/LS/Resources/LSB/LSBReader.cs index 41965b7a..28ecc311 100644 --- a/LSLib/LS/Resources/LSB/LSBReader.cs +++ b/LSLib/LS/Resources/LSB/LSBReader.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS; +namespace LSLib.LS; public class LSBReader(Stream stream) : IDisposable { diff --git a/LSLib/LS/Resources/LSB/LSBWriter.cs b/LSLib/LS/Resources/LSB/LSBWriter.cs index 84254992..6df93c9f 100644 --- a/LSLib/LS/Resources/LSB/LSBWriter.cs +++ b/LSLib/LS/Resources/LSB/LSBWriter.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS; +namespace LSLib.LS; public class LSBWriter(Stream stream) { diff --git a/LSLib/LS/Resources/LSF/LSFCommon.cs b/LSLib/LS/Resources/LSF/LSFCommon.cs index ab6c5ea0..a970d742 100644 --- a/LSLib/LS/Resources/LSF/LSFCommon.cs +++ b/LSLib/LS/Resources/LSF/LSFCommon.cs @@ -1,7 +1,4 @@ -using System; -using System.Runtime.InteropServices; - -namespace LSLib.LS; +namespace LSLib.LS; [StructLayout(LayoutKind.Sequential, Pack = 1)] internal struct LSFMagic diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index adea33af..b929f4e0 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -1,10 +1,6 @@ // #define DEBUG_LSF_SERIALIZATION // #define DUMP_LSF_SERIALIZATION -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; using LSLib.LS.Enums; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index 0982cb5d..795c3eef 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; -using LSLib.LS.Enums; +using LSLib.LS.Enums; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSJ/LSJReader.cs b/LSLib/LS/Resources/LSJ/LSJReader.cs index bb420db4..5e741f21 100644 --- a/LSLib/LS/Resources/LSJ/LSJReader.cs +++ b/LSLib/LS/Resources/LSJ/LSJReader.cs @@ -1,6 +1,4 @@ -using System; -using System.IO; -using Newtonsoft.Json; +using Newtonsoft.Json; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index dfd91949..637bbd56 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -1,9 +1,6 @@ -using System; -using Newtonsoft.Json; -using System.IO; +using Newtonsoft.Json; using System.Text.RegularExpressions; using System.Numerics; -using System.Collections.Generic; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSJ/LSJWriter.cs b/LSLib/LS/Resources/LSJ/LSJWriter.cs index bf5ef244..3c600f4b 100644 --- a/LSLib/LS/Resources/LSJ/LSJWriter.cs +++ b/LSLib/LS/Resources/LSJ/LSJWriter.cs @@ -1,5 +1,4 @@ -using System.IO; -using Newtonsoft.Json; +using Newtonsoft.Json; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index 9516dd87..15c797ee 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -1,10 +1,5 @@ using LSLib.LS.Enums; -using OpenTK.Mathematics; -using System; -using System.Collections.Generic; using System.Diagnostics; -using System.IO; -using System.Linq; using System.Xml; namespace LSLib.LS; diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index a618063b..728a5236 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -1,6 +1,4 @@ using LSLib.LS.Enums; -using System.Collections.Generic; -using System.IO; using System.Xml; namespace LSLib.LS; diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index 7910bbf7..3ab47d28 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -1,9 +1,5 @@ using LSLib.LS.Enums; using LSLib.LS.Story; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; namespace LSLib.LS.Save; diff --git a/LSLib/LS/Save/VariableManager.cs b/LSLib/LS/Save/VariableManager.cs index beb6a00e..4ff58935 100644 --- a/LSLib/LS/Save/VariableManager.cs +++ b/LSLib/LS/Save/VariableManager.cs @@ -1,9 +1,4 @@ using OpenTK.Mathematics; -using System; -using System.Collections.Generic; -using System.IO; -using System.Runtime.InteropServices; -using System.Text; namespace LSLib.LS.Save; diff --git a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs index 1f1cc488..855fde16 100644 --- a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs +++ b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs @@ -1,8 +1,4 @@ -using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; - -namespace LSLib.LS.Stats.Properties; +namespace LSLib.LS.Stats.Properties; public class Requirement { diff --git a/LSLib/LS/Stats/Parser/StatNodes.cs b/LSLib/LS/Stats/Parser/StatNodes.cs index 5b342942..a0ee3c12 100644 --- a/LSLib/LS/Stats/Parser/StatNodes.cs +++ b/LSLib/LS/Stats/Parser/StatNodes.cs @@ -1,6 +1,4 @@ using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; namespace LSLib.LS.Stats.StatParser; diff --git a/LSLib/LS/Stats/Parser/StatParser.cs b/LSLib/LS/Stats/Parser/StatParser.cs index 87306e19..b930d2e3 100644 --- a/LSLib/LS/Stats/Parser/StatParser.cs +++ b/LSLib/LS/Stats/Parser/StatParser.cs @@ -1,7 +1,5 @@ using LSLib.LS.Story.GoalParser; using QUT.Gppg; -using System; -using System.Collections.Generic; using System.Text.RegularExpressions; namespace LSLib.LS.Stats.StatParser; diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLib/LS/Stats/Parser/StatPropertyParser.cs index d18eb4a6..d1bf45dc 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLib/LS/Stats/Parser/StatPropertyParser.cs @@ -1,9 +1,5 @@ using LSLib.LS.Story.GoalParser; using QUT.Gppg; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; namespace LSLib.LS.Stats.Properties; diff --git a/LSLib/LS/Stats/ShiftReduceParser.cs b/LSLib/LS/Stats/ShiftReduceParser.cs index f6aa09cf..e8bf96c5 100644 --- a/LSLib/LS/Stats/ShiftReduceParser.cs +++ b/LSLib/LS/Stats/ShiftReduceParser.cs @@ -2,11 +2,7 @@ // Copyright (c) Wayne Kelly, QUT 2005-2014 // (see accompanying GPPGcopyright.rtf) -using System; -using System.Text; using System.Globalization; -using System.Collections.Generic; -using System.Runtime.Serialization; using System.Diagnostics.CodeAnalysis; namespace QUT.Gppg diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs index f4afccc5..a1b50fac 100644 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ b/LSLib/LS/Stats/StatDefinitions.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; - -namespace LSLib.LS.Stats; +namespace LSLib.LS.Stats; public class StatEnumeration(string name) { diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index 632f37a8..5cc97d8f 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -1,12 +1,5 @@ using LSLib.LS.Stats.StatParser; -using LSLib.LS.Story; using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; -using System.ComponentModel.DataAnnotations; -using System.Data; -using System.IO; -using System.Linq; using System.Xml; namespace LSLib.LS.Stats; diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs index 2760a6f4..e3262fa3 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -1,13 +1,7 @@ using LSLib.LS.Stats.Properties; using LSLib.LS.Stats.StatParser; -using LSLib.LS.Story; using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; using System.Globalization; -using System.IO; -using System.Linq; -using System.Text; namespace LSLib.LS.Stats; diff --git a/LSLib/LS/Story/Adapter.cs b/LSLib/LS/Story/Adapter.cs index d08154a8..93c337d7 100644 --- a/LSLib/LS/Story/Adapter.cs +++ b/LSLib/LS/Story/Adapter.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class Adapter : OsirisSerializable { diff --git a/LSLib/LS/Story/Call.cs b/LSLib/LS/Story/Call.cs index 9c28a722..02fa698d 100644 --- a/LSLib/LS/Story/Call.cs +++ b/LSLib/LS/Story/Call.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class Call : OsirisSerializable { diff --git a/LSLib/LS/Story/Common.cs b/LSLib/LS/Story/Common.cs index c6c1d93b..7e55d33e 100644 --- a/LSLib/LS/Story/Common.cs +++ b/LSLib/LS/Story/Common.cs @@ -1,11 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Reflection; -using System.Text; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public interface OsirisSerializable { diff --git a/LSLib/LS/Story/Compiler/CompilationContext.cs b/LSLib/LS/Story/Compiler/CompilationContext.cs index dc1f54fb..f5304035 100644 --- a/LSLib/LS/Story/Compiler/CompilationContext.cs +++ b/LSLib/LS/Story/Compiler/CompilationContext.cs @@ -1,6 +1,4 @@ using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/Compiler.cs b/LSLib/LS/Story/Compiler/Compiler.cs index 109694a2..2272908a 100644 --- a/LSLib/LS/Story/Compiler/Compiler.cs +++ b/LSLib/LS/Story/Compiler/Compiler.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; +using System.Diagnostics; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/DebugInfo.cs b/LSLib/LS/Story/Compiler/DebugInfo.cs index 65d5cb23..89106e43 100644 --- a/LSLib/LS/Story/Compiler/DebugInfo.cs +++ b/LSLib/LS/Story/Compiler/DebugInfo.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - + namespace LSLib.LS.Story.Compiler; public class DatabaseDebugInfo diff --git a/LSLib/LS/Story/Compiler/HeaderLoader.cs b/LSLib/LS/Story/Compiler/HeaderLoader.cs index 7b63f5c0..05e0acb6 100644 --- a/LSLib/LS/Story/Compiler/HeaderLoader.cs +++ b/LSLib/LS/Story/Compiler/HeaderLoader.cs @@ -1,10 +1,4 @@ using LSLib.LS.Story.HeaderParser; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/IR.cs b/LSLib/LS/Story/Compiler/IR.cs index d6fb4bc1..c1db11da 100644 --- a/LSLib/LS/Story/Compiler/IR.cs +++ b/LSLib/LS/Story/Compiler/IR.cs @@ -1,9 +1,4 @@ using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/IRGenerator.cs b/LSLib/LS/Story/Compiler/IRGenerator.cs index b3922eb1..20db2cac 100644 --- a/LSLib/LS/Story/Compiler/IRGenerator.cs +++ b/LSLib/LS/Story/Compiler/IRGenerator.cs @@ -1,10 +1,4 @@ using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/Preprocessor.cs b/LSLib/LS/Story/Compiler/Preprocessor.cs index d90fc5e1..9d194b4b 100644 --- a/LSLib/LS/Story/Compiler/Preprocessor.cs +++ b/LSLib/LS/Story/Compiler/Preprocessor.cs @@ -1,10 +1,4 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - -namespace LSLib.LS.Story.Compiler; +namespace LSLib.LS.Story.Compiler; public class Preprocessor { diff --git a/LSLib/LS/Story/Compiler/StoryEmitter.cs b/LSLib/LS/Story/Compiler/StoryEmitter.cs index c1c8d42b..33aaa25d 100644 --- a/LSLib/LS/Story/Compiler/StoryEmitter.cs +++ b/LSLib/LS/Story/Compiler/StoryEmitter.cs @@ -1,11 +1,5 @@ using LSLib.LS.Story.GoalParser; -using System; -using System.Collections.Generic; using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/DataNode.cs b/LSLib/LS/Story/DataNode.cs index ff31500d..825e16bb 100644 --- a/LSLib/LS/Story/DataNode.cs +++ b/LSLib/LS/Story/DataNode.cs @@ -1,7 +1,4 @@ -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public abstract class DataNode : Node { diff --git a/LSLib/LS/Story/Database.cs b/LSLib/LS/Story/Database.cs index 538fb194..b23ce7f0 100644 --- a/LSLib/LS/Story/Database.cs +++ b/LSLib/LS/Story/Database.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; -using System.ComponentModel; -using System.IO; +using System.ComponentModel; namespace LSLib.LS.Story; diff --git a/LSLib/LS/Story/DatabaseNode.cs b/LSLib/LS/Story/DatabaseNode.cs index 9e04b1ff..0de265c6 100644 --- a/LSLib/LS/Story/DatabaseNode.cs +++ b/LSLib/LS/Story/DatabaseNode.cs @@ -1,6 +1,4 @@ -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class DatabaseNode : DataNode { diff --git a/LSLib/LS/Story/DebugExport.cs b/LSLib/LS/Story/DebugExport.cs index 0fbdf83d..13ed3a39 100644 --- a/LSLib/LS/Story/DebugExport.cs +++ b/LSLib/LS/Story/DebugExport.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using Newtonsoft.Json; +using Newtonsoft.Json; namespace LSLib.LS.Story; diff --git a/LSLib/LS/Story/Function.cs b/LSLib/LS/Story/Function.cs index 0058aeaf..3a7491ef 100644 --- a/LSLib/LS/Story/Function.cs +++ b/LSLib/LS/Story/Function.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class FunctionSignature : OsirisSerializable { diff --git a/LSLib/LS/Story/Goal.cs b/LSLib/LS/Story/Goal.cs index a26d9c11..d75d4b7c 100644 --- a/LSLib/LS/Story/Goal.cs +++ b/LSLib/LS/Story/Goal.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class Goal : OsirisSerializable { diff --git a/LSLib/LS/Story/GoalParser/ASTNodes.cs b/LSLib/LS/Story/GoalParser/ASTNodes.cs index 5089736d..370efc63 100644 --- a/LSLib/LS/Story/GoalParser/ASTNodes.cs +++ b/LSLib/LS/Story/GoalParser/ASTNodes.cs @@ -1,6 +1,4 @@ using LSLib.LS.Story.Compiler; -using System; -using System.Collections.Generic; namespace LSLib.LS.Story.GoalParser; diff --git a/LSLib/LS/Story/GoalParser/GoalParser.cs b/LSLib/LS/Story/GoalParser/GoalParser.cs index f9d9a9f4..d4838bba 100644 --- a/LSLib/LS/Story/GoalParser/GoalParser.cs +++ b/LSLib/LS/Story/GoalParser/GoalParser.cs @@ -1,10 +1,7 @@ using LSLib.LS.Story.Compiler; -using System; using System.Globalization; -using System.IO; using System.Text.RegularExpressions; using QUT.Gppg; -using System.Collections.Generic; namespace LSLib.LS.Story.GoalParser; diff --git a/LSLib/LS/Story/HeaderParser/ASTNodes.cs b/LSLib/LS/Story/HeaderParser/ASTNodes.cs index 4bc27a44..57509184 100644 --- a/LSLib/LS/Story/HeaderParser/ASTNodes.cs +++ b/LSLib/LS/Story/HeaderParser/ASTNodes.cs @@ -1,6 +1,4 @@ using LSLib.LS.Story.Compiler; -using System; -using System.Collections.Generic; namespace LSLib.LS.Story.HeaderParser; diff --git a/LSLib/LS/Story/HeaderParser/HeaderParser.cs b/LSLib/LS/Story/HeaderParser/HeaderParser.cs index e93a3cac..2253d2a0 100644 --- a/LSLib/LS/Story/HeaderParser/HeaderParser.cs +++ b/LSLib/LS/Story/HeaderParser/HeaderParser.cs @@ -1,6 +1,5 @@ using LSLib.LS.Story.Compiler; using QUT.Gppg; -using System; using System.Text.RegularExpressions; namespace LSLib.LS.Story.HeaderParser; diff --git a/LSLib/LS/Story/Join.cs b/LSLib/LS/Story/Join.cs index 10bc7283..fdafbc33 100644 --- a/LSLib/LS/Story/Join.cs +++ b/LSLib/LS/Story/Join.cs @@ -1,6 +1,4 @@ -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public abstract class JoinNode : TreeNode { diff --git a/LSLib/LS/Story/Node.cs b/LSLib/LS/Story/Node.cs index 358bfbc1..abe8411c 100644 --- a/LSLib/LS/Story/Node.cs +++ b/LSLib/LS/Story/Node.cs @@ -1,7 +1,4 @@ -using System; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public abstract class Node : OsirisSerializable { diff --git a/LSLib/LS/Story/Proc.cs b/LSLib/LS/Story/Proc.cs index e2f780d3..f4c63ae1 100644 --- a/LSLib/LS/Story/Proc.cs +++ b/LSLib/LS/Story/Proc.cs @@ -1,6 +1,4 @@ -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class ProcNode : DataNode { diff --git a/LSLib/LS/Story/Query.cs b/LSLib/LS/Story/Query.cs index 0286088f..aa89c3dc 100644 --- a/LSLib/LS/Story/Query.cs +++ b/LSLib/LS/Story/Query.cs @@ -1,6 +1,4 @@ -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public abstract class QueryNode : Node { diff --git a/LSLib/LS/Story/Reference.cs b/LSLib/LS/Story/Reference.cs index 29a1f8ff..b472f60d 100644 --- a/LSLib/LS/Story/Reference.cs +++ b/LSLib/LS/Story/Reference.cs @@ -1,7 +1,4 @@ -using System; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public abstract class OsiReference : OsirisSerializable { diff --git a/LSLib/LS/Story/Rel.cs b/LSLib/LS/Story/Rel.cs index f2e7efc2..27a5c4b9 100644 --- a/LSLib/LS/Story/Rel.cs +++ b/LSLib/LS/Story/Rel.cs @@ -1,6 +1,4 @@ -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public abstract class RelNode : TreeNode { diff --git a/LSLib/LS/Story/RelOp.cs b/LSLib/LS/Story/RelOp.cs index 56044555..21f6bc25 100644 --- a/LSLib/LS/Story/RelOp.cs +++ b/LSLib/LS/Story/RelOp.cs @@ -1,7 +1,4 @@ -using System; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public enum RelOpType : byte { diff --git a/LSLib/LS/Story/Rule.cs b/LSLib/LS/Story/Rule.cs index cea2c30a..644d8d3f 100644 --- a/LSLib/LS/Story/Rule.cs +++ b/LSLib/LS/Story/Rule.cs @@ -1,8 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public enum RuleType { diff --git a/LSLib/LS/Story/Story.cs b/LSLib/LS/Story/Story.cs index 6805c373..aa8212ee 100644 --- a/LSLib/LS/Story/Story.cs +++ b/LSLib/LS/Story/Story.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class Story { diff --git a/LSLib/LS/Story/Value.cs b/LSLib/LS/Story/Value.cs index a854d77f..59707ccd 100644 --- a/LSLib/LS/Story/Value.cs +++ b/LSLib/LS/Story/Value.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; - -namespace LSLib.LS.Story; +namespace LSLib.LS.Story; public class Value : OsirisSerializable { diff --git a/LSLib/LS/VFS.cs b/LSLib/LS/VFS.cs index 08d34b4c..41bd4fad 100644 --- a/LSLib/LS/VFS.cs +++ b/LSLib/LS/VFS.cs @@ -1,11 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; -using System.Threading.Tasks; - -namespace LSLib.LS; +namespace LSLib.LS; public class VFSDirectory { diff --git a/LSLib/VirtualTextures/BC5Image.cs b/LSLib/VirtualTextures/BC5Image.cs index ffc3b8f1..6a164f04 100644 --- a/LSLib/VirtualTextures/BC5Image.cs +++ b/LSLib/VirtualTextures/BC5Image.cs @@ -1,8 +1,4 @@ -using LSLib.Granny; -using LSLib.LS; -using System; -using System.Collections.Generic; -using System.IO; +using LSLib.LS; namespace LSLib.VirtualTextures; diff --git a/LSLib/VirtualTextures/Build.cs b/LSLib/VirtualTextures/Build.cs index 535bd98a..a9b31ad7 100644 --- a/LSLib/VirtualTextures/Build.cs +++ b/LSLib/VirtualTextures/Build.cs @@ -1,9 +1,4 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Runtime.InteropServices; -using System.Xml; +using System.Xml; namespace LSLib.VirtualTextures; diff --git a/LSLib/VirtualTextures/Compression.cs b/LSLib/VirtualTextures/Compression.cs index b473b8c5..10d10f92 100644 --- a/LSLib/VirtualTextures/Compression.cs +++ b/LSLib/VirtualTextures/Compression.cs @@ -1,6 +1,4 @@ -using System; -using System.IO; -using LZ4; +using LZ4; namespace LSLib.VirtualTextures; diff --git a/LSLib/VirtualTextures/Geometry.cs b/LSLib/VirtualTextures/Geometry.cs index 8f684949..21106e04 100644 --- a/LSLib/VirtualTextures/Geometry.cs +++ b/LSLib/VirtualTextures/Geometry.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; - -namespace LSLib.VirtualTextures; +namespace LSLib.VirtualTextures; public class TileSetGeometryCalculator { diff --git a/LSLib/VirtualTextures/PageFile.cs b/LSLib/VirtualTextures/PageFile.cs index b76ad953..e7324ba9 100644 --- a/LSLib/VirtualTextures/PageFile.cs +++ b/LSLib/VirtualTextures/PageFile.cs @@ -1,7 +1,4 @@ using LSLib.LS; -using System; -using System.Collections.Generic; -using System.IO; namespace LSLib.VirtualTextures; diff --git a/LSLib/VirtualTextures/PageFileBuild.cs b/LSLib/VirtualTextures/PageFileBuild.cs index 31ebeecc..512ea43f 100644 --- a/LSLib/VirtualTextures/PageFileBuild.cs +++ b/LSLib/VirtualTextures/PageFileBuild.cs @@ -1,9 +1,4 @@ using LSLib.LS; -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Runtime.InteropServices; using System.Security.Cryptography; namespace LSLib.VirtualTextures; diff --git a/LSLib/VirtualTextures/VirtualTexture.cs b/LSLib/VirtualTextures/VirtualTexture.cs index fadce1cf..c95d5137 100644 --- a/LSLib/VirtualTextures/VirtualTexture.cs +++ b/LSLib/VirtualTextures/VirtualTexture.cs @@ -1,9 +1,5 @@ using LSLib.LS; -using System; -using System.Collections.Generic; using System.Diagnostics; -using System.IO; -using System.Text; namespace LSLib.VirtualTextures; diff --git a/LSLib/VirtualTextures/VirtualTextureFormats.cs b/LSLib/VirtualTextures/VirtualTextureFormats.cs index 93d318b0..b82b0d24 100644 --- a/LSLib/VirtualTextures/VirtualTextureFormats.cs +++ b/LSLib/VirtualTextures/VirtualTextureFormats.cs @@ -1,9 +1,4 @@ -using LSLib.Granny; -using System; -using System.Runtime.InteropServices; -using System.Text; - -namespace LSLib.VirtualTextures; +namespace LSLib.VirtualTextures; [StructLayout(LayoutKind.Sequential, Pack = 1)] public struct DDSHeader From 79323f6f0fd6311292ee6b6c81506c7d96b27e63 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 26 Jan 2024 17:49:29 +0100 Subject: [PATCH 083/139] Clean up attribute type logic --- LSLib/LS/BinUtils.cs | 110 ++--- LSLib/LS/NodeAttribute.cs | 441 ++++++++++-------- LSLib/LS/PackageWriter.cs | 6 +- LSLib/LS/Resource.cs | 141 +++--- LSLib/LS/Resources/LSB/LSBReader.cs | 22 +- LSLib/LS/Resources/LSB/LSBWriter.cs | 16 +- LSLib/LS/Resources/LSF/LSFReader.cs | 22 +- LSLib/LS/Resources/LSF/LSFWriter.cs | 18 +- .../LS/Resources/LSJ/LSJResourceConverter.cs | 154 +++--- LSLib/LS/Resources/LSX/LSXReader.cs | 36 +- LSLib/LS/Resources/LSX/LSXWriter.cs | 4 +- 11 files changed, 505 insertions(+), 465 deletions(-) diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 52735f4d..22f712d7 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -206,47 +206,47 @@ public static byte[] StringToNullTerminatedBytes(string s, int length) return b; } - public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader) + public static NodeAttribute ReadAttribute(AttributeType type, BinaryReader reader) { var attr = new NodeAttribute(type); switch (type) { - case NodeAttribute.DataType.DT_None: + case AttributeType.None: break; - case NodeAttribute.DataType.DT_Byte: + case AttributeType.Byte: attr.Value = reader.ReadByte(); break; - case NodeAttribute.DataType.DT_Short: + case AttributeType.Short: attr.Value = reader.ReadInt16(); break; - case NodeAttribute.DataType.DT_UShort: + case AttributeType.UShort: attr.Value = reader.ReadUInt16(); break; - case NodeAttribute.DataType.DT_Int: + case AttributeType.Int: attr.Value = reader.ReadInt32(); break; - case NodeAttribute.DataType.DT_UInt: + case AttributeType.UInt: attr.Value = reader.ReadUInt32(); break; - case NodeAttribute.DataType.DT_Float: + case AttributeType.Float: attr.Value = reader.ReadSingle(); break; - case NodeAttribute.DataType.DT_Double: + case AttributeType.Double: attr.Value = reader.ReadDouble(); break; - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: { - int columns = attr.GetColumns(); + int columns = attr.Type.GetColumns(); var vec = new int[columns]; for (int i = 0; i < columns; i++) vec[i] = reader.ReadInt32(); @@ -254,11 +254,11 @@ public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryRea break; } - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: { - int columns = attr.GetColumns(); + int columns = attr.Type.GetColumns(); var vec = new float[columns]; for (int i = 0; i < columns; i++) vec[i] = reader.ReadSingle(); @@ -266,14 +266,14 @@ public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryRea break; } - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: + case AttributeType.Mat2: + case AttributeType.Mat3: + case AttributeType.Mat3x4: + case AttributeType.Mat4x3: + case AttributeType.Mat4: { - int columns = attr.GetColumns(); - int rows = attr.GetRows(); + int columns = attr.Type.GetColumns(); + int rows = attr.Type.GetRows(); var mat = new Matrix(rows, columns); attr.Value = mat; @@ -287,24 +287,24 @@ public static NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryRea break; } - case NodeAttribute.DataType.DT_Bool: + case AttributeType.Bool: attr.Value = reader.ReadByte() != 0; break; - case NodeAttribute.DataType.DT_ULongLong: + case AttributeType.ULongLong: attr.Value = reader.ReadUInt64(); break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: + case AttributeType.Long: + case AttributeType.Int64: attr.Value = reader.ReadInt64(); break; - case NodeAttribute.DataType.DT_Int8: + case AttributeType.Int8: attr.Value = reader.ReadSByte(); break; - case NodeAttribute.DataType.DT_UUID: + case AttributeType.UUID: attr.Value = new Guid(reader.ReadBytes(16)); break; @@ -321,60 +321,60 @@ public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) { switch (attr.Type) { - case NodeAttribute.DataType.DT_None: + case AttributeType.None: break; - case NodeAttribute.DataType.DT_Byte: + case AttributeType.Byte: writer.Write((Byte)attr.Value); break; - case NodeAttribute.DataType.DT_Short: + case AttributeType.Short: writer.Write((Int16)attr.Value); break; - case NodeAttribute.DataType.DT_UShort: + case AttributeType.UShort: writer.Write((UInt16)attr.Value); break; - case NodeAttribute.DataType.DT_Int: + case AttributeType.Int: writer.Write((Int32)attr.Value); break; - case NodeAttribute.DataType.DT_UInt: + case AttributeType.UInt: writer.Write((UInt32)attr.Value); break; - case NodeAttribute.DataType.DT_Float: + case AttributeType.Float: writer.Write((float)attr.Value); break; - case NodeAttribute.DataType.DT_Double: + case AttributeType.Double: writer.Write((Double)attr.Value); break; - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: foreach (var item in (int[])attr.Value) { writer.Write(item); } break; - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: foreach (var item in (float[])attr.Value) { writer.Write(item); } break; - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: + case AttributeType.Mat2: + case AttributeType.Mat3: + case AttributeType.Mat3x4: + case AttributeType.Mat4x3: + case AttributeType.Mat4: { var mat = (Matrix)attr.Value; for (int col = 0; col < mat.cols; col++) @@ -387,24 +387,24 @@ public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) break; } - case NodeAttribute.DataType.DT_Bool: + case AttributeType.Bool: writer.Write((Byte)((Boolean)attr.Value ? 1 : 0)); break; - case NodeAttribute.DataType.DT_ULongLong: + case AttributeType.ULongLong: writer.Write((UInt64)attr.Value); break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: + case AttributeType.Long: + case AttributeType.Int64: writer.Write((Int64)attr.Value); break; - case NodeAttribute.DataType.DT_Int8: + case AttributeType.Int8: writer.Write((SByte)attr.Value); break; - case NodeAttribute.DataType.DT_UUID: + case AttributeType.UUID: writer.Write(((Guid)attr.Value).ToByteArray()); break; diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index a66bcb23..f4a2cc86 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -65,54 +65,125 @@ public string BuildMeta() return String.Join(",", tags); } } - -public class NodeAttribute(NodeAttribute.DataType type) +public enum AttributeType +{ + None = 0, + Byte = 1, + Short = 2, + UShort = 3, + Int = 4, + UInt = 5, + Float = 6, + Double = 7, + IVec2 = 8, + IVec3 = 9, + IVec4 = 10, + Vec2 = 11, + Vec3 = 12, + Vec4 = 13, + Mat2 = 14, + Mat3 = 15, + Mat3x4 = 16, + Mat4x3 = 17, + Mat4 = 18, + Bool = 19, + String = 20, + Path = 21, + FixedString = 22, + LSString = 23, + ULongLong = 24, + ScratchBuffer = 25, + // Seems to be unused? + Long = 26, + Int8 = 27, + TranslatedString = 28, + WString = 29, + LSWString = 30, + UUID = 31, + Int64 = 32, + TranslatedFSString = 33, + // Last supported datatype, always keep this one at the end + Max = TranslatedFSString +}; + +public static class AttributeTypeExtensions { - public enum DataType + public static int GetRows(this AttributeType type) { - DT_None = 0, - DT_Byte = 1, - DT_Short = 2, - DT_UShort = 3, - DT_Int = 4, - DT_UInt = 5, - DT_Float = 6, - DT_Double = 7, - DT_IVec2 = 8, - DT_IVec3 = 9, - DT_IVec4 = 10, - DT_Vec2 = 11, - DT_Vec3 = 12, - DT_Vec4 = 13, - DT_Mat2 = 14, - DT_Mat3 = 15, - DT_Mat3x4 = 16, - DT_Mat4x3 = 17, - DT_Mat4 = 18, - DT_Bool = 19, - DT_String = 20, - DT_Path = 21, - DT_FixedString = 22, - DT_LSString = 23, - DT_ULongLong = 24, - DT_ScratchBuffer = 25, - // Seems to be unused? - DT_Long = 26, - DT_Int8 = 27, - DT_TranslatedString = 28, - DT_WString = 29, - DT_LSWString = 30, - DT_UUID = 31, - DT_Int64 = 32, - DT_TranslatedFSString = 33, - // Last supported datatype, always keep this one at the end - DT_Max = DT_TranslatedFSString - }; - - private readonly DataType type = type; + switch (type) + { + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: + return 1; + + case AttributeType.Mat2: + return 2; + + case AttributeType.Mat3: + case AttributeType.Mat3x4: + return 3; + + case AttributeType.Mat4x3: + case AttributeType.Mat4: + return 4; + + default: + throw new NotSupportedException("Data type does not have rows"); + } + } + + public static int GetColumns(this AttributeType type) + { + switch (type) + { + case AttributeType.IVec2: + case AttributeType.Vec2: + case AttributeType.Mat2: + return 2; + + case AttributeType.IVec3: + case AttributeType.Vec3: + case AttributeType.Mat3: + case AttributeType.Mat4x3: + return 3; + + case AttributeType.IVec4: + case AttributeType.Vec4: + case AttributeType.Mat3x4: + case AttributeType.Mat4: + return 4; + + default: + throw new NotSupportedException("Data type does not have columns"); + } + } + + public static bool IsNumeric(this AttributeType type) + { + return type == AttributeType.Byte + || type == AttributeType.Short + || type == AttributeType.Short + || type == AttributeType.Int + || type == AttributeType.UInt + || type == AttributeType.Float + || type == AttributeType.Double + || type == AttributeType.ULongLong + || type == AttributeType.Long + || type == AttributeType.Int8; + } +} + +public class NodeAttribute(AttributeType type) +{ + private readonly AttributeType type = type; private object value; + public int? Line = null; - public DataType Type + public AttributeType Type { get { return type; } } @@ -143,21 +214,21 @@ public string AsString(NodeSerializationSettings settings) { switch (type) { - case DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: // ScratchBuffer is a special case, as its stored as byte[] and ToString() doesn't really do what we want return Convert.ToBase64String((byte[])value); - case DataType.DT_IVec2: - case DataType.DT_IVec3: - case DataType.DT_IVec4: + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: return String.Join(" ", new List((int[])value).ConvertAll(i => i.ToString()).ToArray()); - case DataType.DT_Vec2: - case DataType.DT_Vec3: - case DataType.DT_Vec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: return String.Join(" ", new List((float[])value).ConvertAll(i => i.ToString()).ToArray()); - case DataType.DT_UUID: + case AttributeType.UUID: if (settings.ByteSwapGuids) { return ByteSwapGuid((Guid)value).ToString(); @@ -172,77 +243,48 @@ public string AsString(NodeSerializationSettings settings) } } - public int GetRows() + public Guid AsGuid(NodeSerializationSettings settings) { - switch (this.type) - { - case DataType.DT_IVec2: - case DataType.DT_IVec3: - case DataType.DT_IVec4: - case DataType.DT_Vec2: - case DataType.DT_Vec3: - case DataType.DT_Vec4: - return 1; - - case DataType.DT_Mat2: - return 2; - - case DataType.DT_Mat3: - case DataType.DT_Mat3x4: - return 3; - - case DataType.DT_Mat4x3: - case DataType.DT_Mat4: - return 4; + return AsGuid(settings.ByteSwapGuids); + } - default: - throw new NotSupportedException("Data type does not have rows"); - } + public Guid AsGuid() + { + return AsGuid(true); } - public int GetColumns() + public Guid AsGuid(bool byteSwapGuids) { - switch (this.type) + switch (type) { - case DataType.DT_IVec2: - case DataType.DT_Vec2: - case DataType.DT_Mat2: - return 2; + case AttributeType.UUID: + return (Guid)value; - case DataType.DT_IVec3: - case DataType.DT_Vec3: - case DataType.DT_Mat3: - case DataType.DT_Mat4x3: - return 3; - - case DataType.DT_IVec4: - case DataType.DT_Vec4: - case DataType.DT_Mat3x4: - case DataType.DT_Mat4: - return 4; + case AttributeType.String: + case AttributeType.FixedString: + case AttributeType.LSString: + if (byteSwapGuids) + { + return ByteSwapGuid(Guid.Parse((string)value)); + } + else + { + return Guid.Parse((string)value); + } default: - throw new NotSupportedException("Data type does not have columns"); + throw new NotSupportedException("Type not convertible to GUID"); } } - public bool IsNumeric() + public void FromString(string str, NodeSerializationSettings settings) { - return this.type == DataType.DT_Byte - || this.type == DataType.DT_Short - || this.type == DataType.DT_Short - || this.type == DataType.DT_Int - || this.type == DataType.DT_UInt - || this.type == DataType.DT_Float - || this.type == DataType.DT_Double - || this.type == DataType.DT_ULongLong - || this.type == DataType.DT_Long - || this.type == DataType.DT_Int8; + value = ParseFromString(str, type, settings); } - public void FromString(string str, NodeSerializationSettings settings) + public static object ParseFromString(string str, AttributeType type, NodeSerializationSettings settings) { - if (IsNumeric()) + if (type.IsNumeric()) { // Workaround: Some XML files use empty strings, instead of "0" for zero values. if (str == "") @@ -256,46 +298,39 @@ public void FromString(string str, NodeSerializationSettings settings) } } - switch (this.type) + switch (type) { - case DataType.DT_None: + case AttributeType.None: // This is a null type, cannot have a value - break; + return null; - case DataType.DT_Byte: - value = Convert.ToByte(str); - break; + case AttributeType.Byte: + return Convert.ToByte(str); - case DataType.DT_Short: - value = Convert.ToInt16(str); - break; + case AttributeType.Short: + return Convert.ToInt16(str); - case DataType.DT_UShort: - value = Convert.ToUInt16(str); - break; + case AttributeType.UShort: + return Convert.ToUInt16(str); - case DataType.DT_Int: - value = Convert.ToInt32(str); - break; + case AttributeType.Int: + return Convert.ToInt32(str); - case DataType.DT_UInt: - value = Convert.ToUInt32(str); - break; + case AttributeType.UInt: + return Convert.ToUInt32(str); - case DataType.DT_Float: - value = Convert.ToSingle(str); - break; + case AttributeType.Float: + return Convert.ToSingle(str); - case DataType.DT_Double: - value = Convert.ToDouble(str); - break; + case AttributeType.Double: + return Convert.ToDouble(str); - case DataType.DT_IVec2: - case DataType.DT_IVec3: - case DataType.DT_IVec4: + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: { string[] nums = str.Split(' '); - int length = GetColumns(); + int length = type.GetColumns(); if (length != nums.Length) throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); @@ -303,16 +338,15 @@ public void FromString(string str, NodeSerializationSettings settings) for (int i = 0; i < length; i++) vec[i] = int.Parse(nums[i]); - value = vec; - break; + return vec; } - case DataType.DT_Vec2: - case DataType.DT_Vec3: - case DataType.DT_Vec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: { string[] nums = str.Split(' '); - int length = GetColumns(); + int length = type.GetColumns(); if (length != nums.Length) throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); @@ -320,83 +354,80 @@ public void FromString(string str, NodeSerializationSettings settings) for (int i = 0; i < length; i++) vec[i] = float.Parse(nums[i]); - value = vec; - break; + return vec; } - case DataType.DT_Mat2: - case DataType.DT_Mat3: - case DataType.DT_Mat3x4: - case DataType.DT_Mat4x3: - case DataType.DT_Mat4: + case AttributeType.Mat2: + case AttributeType.Mat3: + case AttributeType.Mat3x4: + case AttributeType.Mat4x3: + case AttributeType.Mat4: var mat = Matrix.Parse(str); - if (mat.cols != GetColumns() || mat.rows != GetRows()) + if (mat.cols != type.GetColumns() || mat.rows != type.GetRows()) throw new FormatException("Invalid column/row count for matrix"); - value = mat; - break; - - case DataType.DT_Bool: - if (str == "0") value = false; - else if (str == "1") value = true; - else value = Convert.ToBoolean(str); - break; - - case DataType.DT_String: - case DataType.DT_Path: - case DataType.DT_FixedString: - case DataType.DT_LSString: - case DataType.DT_WString: - case DataType.DT_LSWString: - value = str; - break; - - case DataType.DT_TranslatedString: - // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part - // That can be changed separately via attribute.Value.Handle - value ??= new TranslatedString(); - - ((TranslatedString)value).Value = str; - break; - - case DataType.DT_TranslatedFSString: - // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part - // That can be changed separately via attribute.Value.Handle - value ??= new TranslatedFSString(); - - ((TranslatedFSString)value).Value = str; - break; - - case DataType.DT_ULongLong: - value = Convert.ToUInt64(str); - break; - - case DataType.DT_ScratchBuffer: - value = Convert.FromBase64String(str); - break; - - case DataType.DT_Long: - case DataType.DT_Int64: - value = Convert.ToInt64(str); - break; - - case DataType.DT_Int8: - value = Convert.ToSByte(str); - break; - - case DataType.DT_UUID: + return mat; + + case AttributeType.Bool: + if (str == "0") return false; + else if (str == "1") return true; + else return Convert.ToBoolean(str); + + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: + case AttributeType.WString: + case AttributeType.LSWString: + return str; + + case AttributeType.TranslatedString: + { + // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part + // That can be changed separately via attribute.Value.Handle + var value = new TranslatedString + { + Value = str + }; + return value; + } + + case AttributeType.TranslatedFSString: + { + // We'll only set the value part of the translated string, not the TranslatedStringKey / Handle part + // That can be changed separately via attribute.Value.Handle + var value = new TranslatedFSString + { + Value = str + }; + return value; + } + + case AttributeType.ULongLong: + return Convert.ToUInt64(str); + + case AttributeType.ScratchBuffer: + return Convert.FromBase64String(str); + + case AttributeType.Long: + case AttributeType.Int64: + return Convert.ToInt64(str); + + case AttributeType.Int8: + return Convert.ToSByte(str); + + case AttributeType.UUID: if (settings.ByteSwapGuids) { - value = ByteSwapGuid(new Guid(str)); + return ByteSwapGuid(new Guid(str)); } else { - value = new Guid(str); + return new Guid(str); } - break; default: // This should not happen! - throw new NotImplementedException(String.Format("FromString() not implemented for type {0}", this.type)); + throw new NotImplementedException(String.Format("FromString() not implemented for type {0}", type)); } } } diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 904f10f2..0ff61f65 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -32,7 +32,11 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) var compression = Build.Compression; var compressionLevel = Build.CompressionLevel; - if (input.Path.EndsWith(".gts") || input.Path.EndsWith(".gtp") || inputStream.Length == 0) + if (input.Path.EndsWith(".gts") + || input.Path.EndsWith(".gtp") + || input.Path.EndsWith(".wem") + || input.Path.EndsWith(".bnk") + || inputStream.Length == 0) { compression = CompressionMethod.None; compressionLevel = LSCompressionLevel.Fast; diff --git a/LSLib/LS/Resource.cs b/LSLib/LS/Resource.cs index 52fd0177..e57e316e 100644 --- a/LSLib/LS/Resource.cs +++ b/LSLib/LS/Resource.cs @@ -84,80 +84,80 @@ public struct LSBHeader public static class AttributeTypeMaps { - public readonly static Dictionary TypeToId = new() + public readonly static Dictionary TypeToId = new() { - { "None", NodeAttribute.DataType.DT_None }, - { "uint8", NodeAttribute.DataType.DT_Byte }, - { "int16", NodeAttribute.DataType.DT_Short }, - { "uint16", NodeAttribute.DataType.DT_UShort }, - { "int32", NodeAttribute.DataType.DT_Int }, - { "uint32", NodeAttribute.DataType.DT_UInt }, - { "float", NodeAttribute.DataType.DT_Float }, - { "double", NodeAttribute.DataType.DT_Double }, - { "ivec2", NodeAttribute.DataType.DT_IVec2 }, - { "ivec3", NodeAttribute.DataType.DT_IVec3 }, - { "ivec4", NodeAttribute.DataType.DT_IVec4 }, - { "fvec2", NodeAttribute.DataType.DT_Vec2 }, - { "fvec3", NodeAttribute.DataType.DT_Vec3 }, - { "fvec4", NodeAttribute.DataType.DT_Vec4 }, - { "mat2x2", NodeAttribute.DataType.DT_Mat2 }, - { "mat3x3", NodeAttribute.DataType.DT_Mat3 }, - { "mat3x4", NodeAttribute.DataType.DT_Mat3x4 }, - { "mat4x3", NodeAttribute.DataType.DT_Mat4x3 }, - { "mat4x4", NodeAttribute.DataType.DT_Mat4 }, - { "bool", NodeAttribute.DataType.DT_Bool }, - { "string", NodeAttribute.DataType.DT_String }, - { "path", NodeAttribute.DataType.DT_Path }, - { "FixedString", NodeAttribute.DataType.DT_FixedString }, - { "LSString", NodeAttribute.DataType.DT_LSString }, - { "uint64", NodeAttribute.DataType.DT_ULongLong }, - { "ScratchBuffer", NodeAttribute.DataType.DT_ScratchBuffer }, - { "old_int64", NodeAttribute.DataType.DT_Long }, - { "int8", NodeAttribute.DataType.DT_Int8 }, - { "TranslatedString", NodeAttribute.DataType.DT_TranslatedString }, - { "WString", NodeAttribute.DataType.DT_WString }, - { "LSWString", NodeAttribute.DataType.DT_LSWString }, - { "guid", NodeAttribute.DataType.DT_UUID }, - { "int64", NodeAttribute.DataType.DT_Int64 }, - { "TranslatedFSString", NodeAttribute.DataType.DT_TranslatedFSString }, + { "None", AttributeType.None }, + { "uint8", AttributeType.Byte }, + { "int16", AttributeType.Short }, + { "uint16", AttributeType.UShort }, + { "int32", AttributeType.Int }, + { "uint32", AttributeType.UInt }, + { "float", AttributeType.Float }, + { "double", AttributeType.Double }, + { "ivec2", AttributeType.IVec2 }, + { "ivec3", AttributeType.IVec3 }, + { "ivec4", AttributeType.IVec4 }, + { "fvec2", AttributeType.Vec2 }, + { "fvec3", AttributeType.Vec3 }, + { "fvec4", AttributeType.Vec4 }, + { "mat2x2", AttributeType.Mat2 }, + { "mat3x3", AttributeType.Mat3 }, + { "mat3x4", AttributeType.Mat3x4 }, + { "mat4x3", AttributeType.Mat4x3 }, + { "mat4x4", AttributeType.Mat4 }, + { "bool", AttributeType.Bool }, + { "string", AttributeType.String }, + { "path", AttributeType.Path }, + { "FixedString", AttributeType.FixedString }, + { "LSString", AttributeType.LSString }, + { "uint64", AttributeType.ULongLong }, + { "ScratchBuffer", AttributeType.ScratchBuffer }, + { "old_int64", AttributeType.Long }, + { "int8", AttributeType.Int8 }, + { "TranslatedString", AttributeType.TranslatedString }, + { "WString", AttributeType.WString }, + { "LSWString", AttributeType.LSWString }, + { "guid", AttributeType.UUID }, + { "int64", AttributeType.Int64 }, + { "TranslatedFSString", AttributeType.TranslatedFSString }, }; - public readonly static Dictionary IdToType = new() + public readonly static Dictionary IdToType = new() { - { NodeAttribute.DataType.DT_None, "None" }, - { NodeAttribute.DataType.DT_Byte, "uint8" }, - { NodeAttribute.DataType.DT_Short, "int16" }, - { NodeAttribute.DataType.DT_UShort, "uint16" }, - { NodeAttribute.DataType.DT_Int, "int32" }, - { NodeAttribute.DataType.DT_UInt, "uint32" }, - { NodeAttribute.DataType.DT_Float, "float" }, - { NodeAttribute.DataType.DT_Double, "double" }, - { NodeAttribute.DataType.DT_IVec2, "ivec2" }, - { NodeAttribute.DataType.DT_IVec3, "ivec3" }, - { NodeAttribute.DataType.DT_IVec4, "ivec4" }, - { NodeAttribute.DataType.DT_Vec2, "fvec2" }, - { NodeAttribute.DataType.DT_Vec3, "fvec3" }, - { NodeAttribute.DataType.DT_Vec4, "fvec4" }, - { NodeAttribute.DataType.DT_Mat2, "mat2x2" }, - { NodeAttribute.DataType.DT_Mat3, "mat3x3" }, - { NodeAttribute.DataType.DT_Mat3x4, "mat3x4" }, - { NodeAttribute.DataType.DT_Mat4x3, "mat4x3" }, - { NodeAttribute.DataType.DT_Mat4, "mat4x4" }, - { NodeAttribute.DataType.DT_Bool, "bool" }, - { NodeAttribute.DataType.DT_String, "string" }, - { NodeAttribute.DataType.DT_Path, "path" }, - { NodeAttribute.DataType.DT_FixedString, "FixedString" }, - { NodeAttribute.DataType.DT_LSString, "LSString" }, - { NodeAttribute.DataType.DT_ULongLong, "uint64" }, - { NodeAttribute.DataType.DT_ScratchBuffer, "ScratchBuffer" }, - { NodeAttribute.DataType.DT_Long, "old_int64" }, - { NodeAttribute.DataType.DT_Int8, "int8" }, - { NodeAttribute.DataType.DT_TranslatedString, "TranslatedString" }, - { NodeAttribute.DataType.DT_WString, "WString" }, - { NodeAttribute.DataType.DT_LSWString, "LSWString" }, - { NodeAttribute.DataType.DT_UUID, "guid" }, - { NodeAttribute.DataType.DT_Int64, "int64" }, - { NodeAttribute.DataType.DT_TranslatedFSString, "TranslatedFSString" }, + { AttributeType.None, "None" }, + { AttributeType.Byte, "uint8" }, + { AttributeType.Short, "int16" }, + { AttributeType.UShort, "uint16" }, + { AttributeType.Int, "int32" }, + { AttributeType.UInt, "uint32" }, + { AttributeType.Float, "float" }, + { AttributeType.Double, "double" }, + { AttributeType.IVec2, "ivec2" }, + { AttributeType.IVec3, "ivec3" }, + { AttributeType.IVec4, "ivec4" }, + { AttributeType.Vec2, "fvec2" }, + { AttributeType.Vec3, "fvec3" }, + { AttributeType.Vec4, "fvec4" }, + { AttributeType.Mat2, "mat2x2" }, + { AttributeType.Mat3, "mat3x3" }, + { AttributeType.Mat3x4, "mat3x4" }, + { AttributeType.Mat4x3, "mat4x3" }, + { AttributeType.Mat4, "mat4x4" }, + { AttributeType.Bool, "bool" }, + { AttributeType.String, "string" }, + { AttributeType.Path, "path" }, + { AttributeType.FixedString, "FixedString" }, + { AttributeType.LSString, "LSString" }, + { AttributeType.ULongLong, "uint64" }, + { AttributeType.ScratchBuffer, "ScratchBuffer" }, + { AttributeType.Long, "old_int64" }, + { AttributeType.Int8, "int8" }, + { AttributeType.TranslatedString, "TranslatedString" }, + { AttributeType.WString, "WString" }, + { AttributeType.LSWString, "LSWString" }, + { AttributeType.UUID, "guid" }, + { AttributeType.Int64, "int64" }, + { AttributeType.TranslatedFSString, "TranslatedFSString" }, }; } @@ -183,6 +183,7 @@ public class Node public Node Parent; public Dictionary Attributes = []; public Dictionary> Children = []; + public int? Line = null; public int ChildCount { diff --git a/LSLib/LS/Resources/LSB/LSBReader.cs b/LSLib/LS/Resources/LSB/LSBReader.cs index 28ecc311..9feda7f6 100644 --- a/LSLib/LS/Resources/LSB/LSBReader.cs +++ b/LSLib/LS/Resources/LSB/LSBReader.cs @@ -71,10 +71,10 @@ private void ReadNode(Node node) { UInt32 attrNameId = reader.ReadUInt32(); UInt32 attrTypeId = reader.ReadUInt32(); - if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) + if (attrTypeId > (int)AttributeType.Max) throw new InvalidFormatException(String.Format("Unsupported attribute data type: {0}", attrTypeId)); - node.Attributes[staticStrings[attrNameId]] = ReadAttribute((NodeAttribute.DataType)attrTypeId); + node.Attributes[staticStrings[attrNameId]] = ReadAttribute((AttributeType)attrTypeId); } for (UInt32 i = 0; i < childCount; i++) @@ -88,14 +88,14 @@ private void ReadNode(Node node) } } - private NodeAttribute ReadAttribute(NodeAttribute.DataType type) + private NodeAttribute ReadAttribute(AttributeType type) { switch (type) { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: { var attr = new NodeAttribute(type) { @@ -104,8 +104,8 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type) return attr; } - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case AttributeType.WString: + case AttributeType.LSWString: { var attr = new NodeAttribute(type) { @@ -114,7 +114,7 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type) return attr; } - case NodeAttribute.DataType.DT_TranslatedString: + case AttributeType.TranslatedString: { var attr = new NodeAttribute(type); var str = new TranslatedString(); @@ -149,7 +149,7 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type) return attr; } - case NodeAttribute.DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: { var attr = new NodeAttribute(type); var bufferLength = reader.ReadInt32(); diff --git a/LSLib/LS/Resources/LSB/LSBWriter.cs b/LSLib/LS/Resources/LSB/LSBWriter.cs index 6df93c9f..0f7493a1 100644 --- a/LSLib/LS/Resources/LSB/LSBWriter.cs +++ b/LSLib/LS/Resources/LSB/LSBWriter.cs @@ -94,19 +94,19 @@ private void WriteAttribute(NodeAttribute attr) { switch (attr.Type) { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: WriteString((string)attr.Value, true); break; - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case AttributeType.WString: + case AttributeType.LSWString: WriteWideString((string)attr.Value, true); break; - case NodeAttribute.DataType.DT_TranslatedString: + case AttributeType.TranslatedString: { var str = (TranslatedString)attr.Value; if (Version >= 4 && str.Value == null) @@ -122,7 +122,7 @@ private void WriteAttribute(NodeAttribute attr) break; } - case NodeAttribute.DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: { var buffer = (byte[])attr.Value; writer.Write((UInt32)buffer.Length); diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index b929f4e0..0c133911 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -448,7 +448,7 @@ private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) while (true) { Values.Position = attribute.DataOffset; - var value = ReadAttribute((NodeAttribute.DataType)attribute.TypeId, attributeReader, attribute.Length); + var value = ReadAttribute((AttributeType)attribute.TypeId, attributeReader, attribute.Length); node.Attributes[Names[attribute.NameIndex][attribute.NameOffset]] = value; #if DEBUG_LSF_SERIALIZATION @@ -467,19 +467,19 @@ private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) } } - private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader reader, uint length) + private NodeAttribute ReadAttribute(AttributeType type, BinaryReader reader, uint length) { // LSF and LSB serialize the buffer types differently, so specialized // code is added to the LSB and LSf serializers, and the common code is // available in BinUtils.ReadAttribute() switch (type) { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: + case AttributeType.WString: + case AttributeType.LSWString: { var attr = new NodeAttribute(type) { @@ -488,7 +488,7 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader re return attr; } - case NodeAttribute.DataType.DT_TranslatedString: + case AttributeType.TranslatedString: { var attr = new NodeAttribute(type); var str = new TranslatedString(); @@ -514,7 +514,7 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader re return attr; } - case NodeAttribute.DataType.DT_TranslatedFSString: + case AttributeType.TranslatedFSString: { var attr = new NodeAttribute(type) { @@ -523,7 +523,7 @@ private NodeAttribute ReadAttribute(NodeAttribute.DataType type, BinaryReader re return attr; } - case NodeAttribute.DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: { var attr = new NodeAttribute(type) { diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index 795c3eef..57a6cea2 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -408,16 +408,16 @@ private void WriteAttributeValue(BinaryWriter writer, NodeAttribute attr) { switch (attr.Type) { - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: + case AttributeType.WString: + case AttributeType.LSWString: WriteString(writer, (string)attr.Value); break; - case NodeAttribute.DataType.DT_TranslatedString: + case AttributeType.TranslatedString: { var ts = (TranslatedString)attr.Value; if (Version >= LSFVersion.VerBG3) @@ -433,14 +433,14 @@ private void WriteAttributeValue(BinaryWriter writer, NodeAttribute attr) break; } - case NodeAttribute.DataType.DT_TranslatedFSString: + case AttributeType.TranslatedFSString: { var fs = (TranslatedFSString)attr.Value; WriteTranslatedFSString(writer, fs); break; } - case NodeAttribute.DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: { var buffer = (byte[])attr.Value; writer.Write(buffer); diff --git a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs index 637bbd56..906fa891 100644 --- a/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs +++ b/LSLib/LS/Resources/LSJ/LSJResourceConverter.cs @@ -158,15 +158,15 @@ private NodeAttribute ReadAttribute(JsonReader reader) type = (uint)AttributeTypeMaps.TypeToId[(string)reader.Value]; } - attribute = new NodeAttribute((NodeAttribute.DataType)type); - if (type == (uint)NodeAttribute.DataType.DT_TranslatedString) + attribute = new NodeAttribute((AttributeType)type); + if (type == (uint)AttributeType.TranslatedString) { attribute.Value = new TranslatedString { Handle = handle }; } - else if (type == (uint)NodeAttribute.DataType.DT_TranslatedFSString) + else if (type == (uint)AttributeType.TranslatedFSString) { attribute.Value = new TranslatedFSString { @@ -179,48 +179,48 @@ private NodeAttribute ReadAttribute(JsonReader reader) { switch (attribute.Type) { - case NodeAttribute.DataType.DT_Byte: + case AttributeType.Byte: attribute.Value = Convert.ToByte(reader.Value); break; - case NodeAttribute.DataType.DT_Short: + case AttributeType.Short: attribute.Value = Convert.ToInt16(reader.Value); break; - case NodeAttribute.DataType.DT_UShort: + case AttributeType.UShort: attribute.Value = Convert.ToUInt16(reader.Value); break; - case NodeAttribute.DataType.DT_Int: + case AttributeType.Int: attribute.Value = Convert.ToInt32(reader.Value); break; - case NodeAttribute.DataType.DT_UInt: + case AttributeType.UInt: attribute.Value = Convert.ToUInt32(reader.Value); break; - case NodeAttribute.DataType.DT_Float: + case AttributeType.Float: attribute.Value = Convert.ToSingle(reader.Value); break; - case NodeAttribute.DataType.DT_Double: + case AttributeType.Double: attribute.Value = Convert.ToDouble(reader.Value); break; - case NodeAttribute.DataType.DT_Bool: + case AttributeType.Bool: attribute.Value = Convert.ToBoolean(reader.Value); break; - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: + case AttributeType.WString: + case AttributeType.LSWString: attribute.Value = reader.Value.ToString(); break; - case NodeAttribute.DataType.DT_ULongLong: + case AttributeType.ULongLong: if (reader.Value.GetType() == typeof(System.Int64)) attribute.Value = Convert.ToUInt64((long)reader.Value); else if (reader.Value.GetType() == typeof(BigInteger)) @@ -230,20 +230,20 @@ private NodeAttribute ReadAttribute(JsonReader reader) break; // TODO: Not sure if this is the correct format - case NodeAttribute.DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: attribute.Value = Convert.FromBase64String(reader.Value.ToString()); break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: + case AttributeType.Long: + case AttributeType.Int64: attribute.Value = Convert.ToInt64(reader.Value); break; - case NodeAttribute.DataType.DT_Int8: + case AttributeType.Int8: attribute.Value = Convert.ToSByte(reader.Value); break; - case NodeAttribute.DataType.DT_TranslatedString: + case AttributeType.TranslatedString: { attribute.Value ??= new TranslatedString(); @@ -253,7 +253,7 @@ private NodeAttribute ReadAttribute(JsonReader reader) break; } - case NodeAttribute.DataType.DT_TranslatedFSString: + case AttributeType.TranslatedFSString: { attribute.Value ??= new TranslatedFSString(); @@ -265,7 +265,7 @@ private NodeAttribute ReadAttribute(JsonReader reader) break; } - case NodeAttribute.DataType.DT_UUID: + case AttributeType.UUID: if (SerializationSettings.ByteSwapGuids) { attribute.Value = NodeAttribute.ByteSwapGuid(new Guid(reader.Value.ToString())); @@ -276,12 +276,12 @@ private NodeAttribute ReadAttribute(JsonReader reader) } break; - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: { string[] nums = reader.Value.ToString().Split(' '); - int length = attribute.GetColumns(); + int length = attribute.Type.GetColumns(); if (length != nums.Length) throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); @@ -293,12 +293,12 @@ private NodeAttribute ReadAttribute(JsonReader reader) break; } - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: { string[] nums = reader.Value.ToString().Split(' '); - int length = attribute.GetColumns(); + int length = attribute.Type.GetColumns(); if (length != nums.Length) throw new FormatException(String.Format("A vector of length {0} was expected, got {1}", length, nums.Length)); @@ -310,18 +310,18 @@ private NodeAttribute ReadAttribute(JsonReader reader) break; } - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: + case AttributeType.Mat2: + case AttributeType.Mat3: + case AttributeType.Mat3x4: + case AttributeType.Mat4x3: + case AttributeType.Mat4: var mat = Matrix.Parse(reader.Value.ToString()); - if (mat.cols != attribute.GetColumns() || mat.rows != attribute.GetRows()) + if (mat.cols != attribute.Type.GetColumns() || mat.rows != attribute.Type.GetRows()) throw new FormatException("Invalid column/row count for matrix"); attribute.Value = mat; break; - case NodeAttribute.DataType.DT_None: + case AttributeType.None: default: throw new NotImplementedException("Don't know how to unserialize type " + attribute.Type.ToString()); } @@ -330,13 +330,13 @@ private NodeAttribute ReadAttribute(JsonReader reader) { if (attribute != null) { - if (attribute.Type == NodeAttribute.DataType.DT_TranslatedString) + if (attribute.Type == AttributeType.TranslatedString) { attribute.Value ??= new TranslatedString(); ((TranslatedString)attribute.Value).Handle = reader.Value.ToString(); } - else if (attribute.Type == NodeAttribute.DataType.DT_TranslatedFSString) + else if (attribute.Type == AttributeType.TranslatedFSString) { attribute.Value ??= new TranslatedFSString(); @@ -638,73 +638,73 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) writer.WriteValue((int)attribute.Value.Type); } - if (attribute.Value.Type != NodeAttribute.DataType.DT_TranslatedString) + if (attribute.Value.Type != AttributeType.TranslatedString) { writer.WritePropertyName("value"); } switch (attribute.Value.Type) { - case NodeAttribute.DataType.DT_Byte: + case AttributeType.Byte: writer.WriteValue(Convert.ToByte(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Short: + case AttributeType.Short: writer.WriteValue(Convert.ToInt16(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_UShort: + case AttributeType.UShort: writer.WriteValue(Convert.ToUInt16(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Int: + case AttributeType.Int: writer.WriteValue(Convert.ToInt32(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_UInt: + case AttributeType.UInt: writer.WriteValue(Convert.ToUInt32(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Float: + case AttributeType.Float: writer.WriteValue(Convert.ToSingle(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Double: + case AttributeType.Double: writer.WriteValue(Convert.ToDouble(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Bool: + case AttributeType.Bool: writer.WriteValue(Convert.ToBoolean(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_String: - case NodeAttribute.DataType.DT_Path: - case NodeAttribute.DataType.DT_FixedString: - case NodeAttribute.DataType.DT_LSString: - case NodeAttribute.DataType.DT_WString: - case NodeAttribute.DataType.DT_LSWString: + case AttributeType.String: + case AttributeType.Path: + case AttributeType.FixedString: + case AttributeType.LSString: + case AttributeType.WString: + case AttributeType.LSWString: writer.WriteValue(attribute.Value.AsString(SerializationSettings)); break; - case NodeAttribute.DataType.DT_ULongLong: + case AttributeType.ULongLong: writer.WriteValue(Convert.ToUInt64(attribute.Value.Value)); break; // TODO: Not sure if this is the correct format - case NodeAttribute.DataType.DT_ScratchBuffer: + case AttributeType.ScratchBuffer: writer.WriteValue(Convert.ToBase64String((byte[])attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Long: - case NodeAttribute.DataType.DT_Int64: + case AttributeType.Long: + case AttributeType.Int64: writer.WriteValue(Convert.ToInt64(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_Int8: + case AttributeType.Int8: writer.WriteValue(Convert.ToSByte(attribute.Value.Value)); break; - case NodeAttribute.DataType.DT_TranslatedString: + case AttributeType.TranslatedString: { var ts = (TranslatedString)attribute.Value.Value; @@ -725,14 +725,14 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) break; } - case NodeAttribute.DataType.DT_TranslatedFSString: + case AttributeType.TranslatedFSString: { var fs = (TranslatedFSString)attribute.Value.Value; WriteTranslatedFSStringInner(writer, fs); break; } - case NodeAttribute.DataType.DT_UUID: + case AttributeType.UUID: if (SerializationSettings.ByteSwapGuids) { writer.WriteValue((NodeAttribute.ByteSwapGuid((Guid)attribute.Value.Value)).ToString()); @@ -744,29 +744,29 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) break; // TODO: haven't seen any vectors/matrices in D:OS JSON files so far - case NodeAttribute.DataType.DT_Vec2: - case NodeAttribute.DataType.DT_Vec3: - case NodeAttribute.DataType.DT_Vec4: + case AttributeType.Vec2: + case AttributeType.Vec3: + case AttributeType.Vec4: { var vec = (float[])attribute.Value.Value; writer.WriteValue(String.Join(" ", vec)); break; } - case NodeAttribute.DataType.DT_IVec2: - case NodeAttribute.DataType.DT_IVec3: - case NodeAttribute.DataType.DT_IVec4: + case AttributeType.IVec2: + case AttributeType.IVec3: + case AttributeType.IVec4: { var ivec = (int[])attribute.Value.Value; writer.WriteValue(String.Join(" ", ivec)); break; } - case NodeAttribute.DataType.DT_Mat2: - case NodeAttribute.DataType.DT_Mat3: - case NodeAttribute.DataType.DT_Mat3x4: - case NodeAttribute.DataType.DT_Mat4x3: - case NodeAttribute.DataType.DT_Mat4: + case AttributeType.Mat2: + case AttributeType.Mat3: + case AttributeType.Mat3x4: + case AttributeType.Mat4x3: + case AttributeType.Mat4: { var mat = (Matrix)attribute.Value.Value; var str = ""; @@ -781,7 +781,7 @@ private void WriteNode(JsonWriter writer, Node node, JsonSerializer serializer) break; } - case NodeAttribute.DataType.DT_None: + case AttributeType.None: default: throw new NotImplementedException("Don't know how to serialize type " + attribute.Value.Type.ToString()); } diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index 15c797ee..5f3cfeb6 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -11,7 +11,7 @@ public class LSXReader(Stream stream) : IDisposable private Resource resource; private Region currentRegion; private List stack; - private int lastLine, lastColumn; + public int lastLine, lastColumn; private LSXVersion Version = LSXVersion.V3; public NodeSerializationSettings SerializationSettings = new(); private NodeAttribute LastAttribute = null; @@ -130,7 +130,8 @@ private void ReadElement() // New node under the current parent node = new Node { - Parent = stack.Last() + Parent = stack.Last(), + Line = ((IXmlLineInfo)reader).LineNumber }; } @@ -150,11 +151,14 @@ private void ReadElement() } var attrName = reader["id"]; - if (attrTypeId > (int)NodeAttribute.DataType.DT_Max) + if (attrTypeId > (int)AttributeType.Max) throw new InvalidFormatException(String.Format("Unsupported attribute data type: {0}", attrTypeId)); Debug.Assert(attrName != null); - var attr = new NodeAttribute((NodeAttribute.DataType)attrTypeId); + var attr = new NodeAttribute((AttributeType)attrTypeId) + { + Line = ((IXmlLineInfo)reader).LineNumber + }; var attrValue = reader["value"]; if (attrValue != null) @@ -166,16 +170,16 @@ private void ReadElement() // Preallocate value for vector/matrix types switch (attr.Type) { - case NodeAttribute.DataType.DT_Vec2: attr.Value = new float[2]; break; - case NodeAttribute.DataType.DT_Vec3: attr.Value = new float[3]; break; - case NodeAttribute.DataType.DT_Vec4: attr.Value = new float[4]; break; - case NodeAttribute.DataType.DT_Mat2: attr.Value = new float[2*2]; break; - case NodeAttribute.DataType.DT_Mat3: attr.Value = new float[3*3]; break; - case NodeAttribute.DataType.DT_Mat3x4: attr.Value = new float[3*4]; break; - case NodeAttribute.DataType.DT_Mat4: attr.Value = new float[4*4]; break; - case NodeAttribute.DataType.DT_Mat4x3: attr.Value = new float[4*3]; break; - case NodeAttribute.DataType.DT_TranslatedString: break; - case NodeAttribute.DataType.DT_TranslatedFSString: break; + case AttributeType.Vec2: attr.Value = new float[2]; break; + case AttributeType.Vec3: attr.Value = new float[3]; break; + case AttributeType.Vec4: attr.Value = new float[4]; break; + case AttributeType.Mat2: attr.Value = new float[2*2]; break; + case AttributeType.Mat3: attr.Value = new float[3*3]; break; + case AttributeType.Mat3x4: attr.Value = new float[3*4]; break; + case AttributeType.Mat4: attr.Value = new float[4*4]; break; + case AttributeType.Mat4x3: attr.Value = new float[4*3]; break; + case AttributeType.TranslatedString: break; + case AttributeType.TranslatedFSString: break; default: throw new Exception($"Attribute of type {attr.Type} should have an inline value!"); } @@ -183,7 +187,7 @@ private void ReadElement() LastAttribute = attr; } - if (attr.Type == NodeAttribute.DataType.DT_TranslatedString) + if (attr.Type == AttributeType.TranslatedString) { attr.Value ??= new TranslatedString(); @@ -196,7 +200,7 @@ private void ReadElement() ts.Version = UInt16.Parse(reader["version"]); } } - else if (attr.Type == NodeAttribute.DataType.DT_TranslatedFSString) + else if (attr.Type == AttributeType.TranslatedFSString) { var fs = ((TranslatedFSString)attr.Value); ReadTranslatedFSString(fs); diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 728a5236..76b0f83a 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -115,7 +115,7 @@ private void WriteNode(Node node) writer.WriteAttributeString("type", ((int)attribute.Value.Type).ToString()); } - if (attribute.Value.Type == NodeAttribute.DataType.DT_TranslatedString) + if (attribute.Value.Type == AttributeType.TranslatedString) { var ts = ((TranslatedString)attribute.Value.Value); writer.WriteAttributeString("handle", ts.Handle); @@ -128,7 +128,7 @@ private void WriteNode(Node node) writer.WriteAttributeString("version", ts.Version.ToString()); } } - else if (attribute.Value.Type == NodeAttribute.DataType.DT_TranslatedFSString) + else if (attribute.Value.Type == AttributeType.TranslatedFSString) { var fs = ((TranslatedFSString)attribute.Value.Value); writer.WriteAttributeString("value", fs.Value); From 76963d3ee2c2c58b4d190c6b760cd4f7feca6009 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 24 Feb 2024 10:58:58 +0100 Subject: [PATCH 084/139] Remove references to dead projects --- LSTools.sln | 45 --------------------------------------------- 1 file changed, 45 deletions(-) diff --git a/LSTools.sln b/LSTools.sln index ce091c87..174e0718 100644 --- a/LSTools.sln +++ b/LSTools.sln @@ -31,15 +31,6 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "VTexTool", "VTexTool\VTexTo {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} = {D8B26B12-E45C-47EA-88F7-56628EB2CCD1} EndProjectSection EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LSLibSearch", "LSLibSearch\LSLibSearch.csproj", "{0DD93214-DCD4-4588-A33F-E4AF3100361A}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "StatFastParser", "StatFastParser\StatFastParser.csproj", "{072FD9B6-F2C5-48BC-9209-F2A8F7500345}" - ProjectSection(ProjectDependencies) = postProject - {46372C50-4288-4B8E-AF21-C934560600E0} = {46372C50-4288-4B8E-AF21-C934560600E0} - EndProjectSection -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LSLibSearchIndexer", "LSLibSearchIndexer\LSLibSearchIndexer.csproj", "{2E23150D-244A-4950-B50C-135225924374}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{6DF990BA-71FD-4DAE-9BAD-53B4B9097C13}" ProjectSection(SolutionItems) = preProject .editorconfig = .editorconfig @@ -172,42 +163,6 @@ Global {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.Build.0 = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|x64.ActiveCfg = Debug|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Debug|x64.Build.0 = Debug|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|Any CPU.Build.0 = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|x64.ActiveCfg = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.Release|x64.Build.0 = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {0DD93214-DCD4-4588-A33F-E4AF3100361A}.RelWithDebInfo|x64.Build.0 = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|Any CPU.Build.0 = Debug|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|x64.ActiveCfg = Debug|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Debug|x64.Build.0 = Debug|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|Any CPU.ActiveCfg = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|Any CPU.Build.0 = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|x64.ActiveCfg = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.Release|x64.Build.0 = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {072FD9B6-F2C5-48BC-9209-F2A8F7500345}.RelWithDebInfo|x64.Build.0 = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Debug|x64.ActiveCfg = Debug|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Debug|x64.Build.0 = Debug|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Release|Any CPU.Build.0 = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Release|x64.ActiveCfg = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.Release|x64.Build.0 = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU - {2E23150D-244A-4950-B50C-135225924374}.RelWithDebInfo|x64.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE From 3def3c6629c886190e68c6fccf43a88ff43a59e2 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Thu, 7 Mar 2024 21:36:37 +0100 Subject: [PATCH 085/139] Remove legacy physics code --- LSLibNative/LSLibNative.vcxproj | 6 +- LSLibNative/LSLibNative.vcxproj.filters | 6 - LSLibNative/physics.cpp | 173 ------------------------ LSLibNative/physics.h | 91 ------------- 4 files changed, 2 insertions(+), 274 deletions(-) delete mode 100644 LSLibNative/physics.cpp delete mode 100644 LSLibNative/physics.h diff --git a/LSLibNative/LSLibNative.vcxproj b/LSLibNative/LSLibNative.vcxproj index 2d072f91..a265ce6a 100644 --- a/LSLibNative/LSLibNative.vcxproj +++ b/LSLibNative/LSLibNative.vcxproj @@ -72,6 +72,7 @@ WIN32;_DEBUG;%(PreprocessorDefinitions) NotUsing ../external/bullet-2.77/src;../external/bullet-2.77/Extras/Serialize + stdcpp20 true @@ -86,6 +87,7 @@ WIN32;NDEBUG;%(PreprocessorDefinitions) NotUsing ../external/bullet-2.77/src;../external/bullet-2.77/Extras/Serialize + stdcpp20 true @@ -122,7 +124,6 @@ - @@ -152,9 +153,6 @@ false false - - - \ No newline at end of file diff --git a/LSLibNative/LSLibNative.vcxproj.filters b/LSLibNative/LSLibNative.vcxproj.filters index 8e60d1e1..5e964d14 100644 --- a/LSLibNative/LSLibNative.vcxproj.filters +++ b/LSLibNative/LSLibNative.vcxproj.filters @@ -21,9 +21,6 @@ - - Header Files - Header Files @@ -53,9 +50,6 @@ Source Files - - Source Files - Source Files\lz4 diff --git a/LSLibNative/physics.cpp b/LSLibNative/physics.cpp deleted file mode 100644 index b279ba67..00000000 --- a/LSLibNative/physics.cpp +++ /dev/null @@ -1,173 +0,0 @@ -#include "physics.h" - -#if defined(HAS_BULLET) -#pragma managed(push, off) -#include -#include -#include -#include "BulletWorldImporter/btBulletWorldImporter.h" -#include "BulletCollision/CollisionShapes/btShapeHull.h" -#pragma comment(lib, "BulletCollision.lib") -#pragma comment(lib, "BulletDynamics.lib") -#pragma comment(lib, "BulletWorldImporter.lib") -#pragma comment(lib, "LinearMath.lib") -#pragma managed(pop) - -namespace LSLib { - namespace Native { - - PhysicsAssetExporter::PhysicsAssetExporter() - : vertices_(nullptr) - { - } - - void PhysicsAssetExporter::exportBullet(ExporterOptions ^ options) - { - pin_ptr inertia(&options->Inertia[options->Inertia->GetLowerBound(0)]); - btCollisionShape * shape = nullptr; - - MeshShapeOptions ^ meshOpts = dynamic_cast(options); - if (meshOpts) - { - pin_ptr vertices(&meshOpts->Vertices[meshOpts->Vertices->GetLowerBound(0)]); - pin_ptr indices(&meshOpts->Indices[meshOpts->Indices->GetLowerBound(0)]); - vertices_ = new btTriangleIndexVertexArray( - meshOpts->Indices->Length / 3, indices, sizeof(int)* 3, - meshOpts->Vertices->Length, reinterpret_cast(vertices), sizeof(float)* 3 - ); - - switch (meshOpts->Type) - { - case MESH_CONCAVE: - shape = new btBvhTriangleMeshShape(vertices_, true, true); - break; - - case MESH_CONVEX_HULL: - { - auto * hull = new btConvexHullShape(); - shape = hull; - - for (int i = 0; i < meshOpts->Vertices->Length; i++) - hull->addPoint(*reinterpret_cast(&vertices[i * 3])); - break; - } - - case MESH_SIMPLIFIED_CONVEX_HULL: - { - auto triShape = new btConvexTriangleMeshShape(vertices_); - btShapeHull * hull = new btShapeHull(triShape); - btScalar margin = triShape->getMargin(); - hull->buildHull(margin); - shape = new btConvexHullShape((const btScalar *)hull->getVertexPointer(), hull->numVertices()); - break; - } - - default: - throw gcnew Exception("Unsupported physics mesh format"); - - } - } - else - { - SphereShapeOptions ^ sphereOpts = dynamic_cast(options); - BoxShapeOptions ^ boxOpts = dynamic_cast(options); - if (sphereOpts) - { - shape = new btSphereShape(sphereOpts->Radius); - } - else if (boxOpts) - { - pin_ptr extents(&boxOpts->Extents[boxOpts->Extents->GetLowerBound(0)]); - shape = createBoxShape(extents); - } - } - - if (!shape) - throw gcnew Exception("Invalid physics shape specification"); - - shape->setMargin(options->Shape->Margin); - - pin_ptr translation(&options->Translation[options->Translation->GetLowerBound(0)]); - btRigidBody * rb = createRigidBody(options->Mass, shape, translation, options->AngularDamping, options->LinearDamping, options->Friction, options->Restitution, inertia); - - int collisionFlags = 0; - if (options->Flags & EF_STATIC_OBJECT) - collisionFlags |= btCollisionObject::CF_STATIC_OBJECT; - if (options->Flags & EF_KINEMATIC_OBJECT) - collisionFlags |= btCollisionObject::CF_KINEMATIC_OBJECT; - if (options->Flags & EF_NO_CONTACT_RESPONSE) - collisionFlags |= btCollisionObject::CF_NO_CONTACT_RESPONSE; - if (options->Flags & EF_CHARACTER_OBJECT) - collisionFlags |= btCollisionObject::CF_CHARACTER_OBJECT; - rb->setCollisionFlags(collisionFlags); - - int rbFlags = 0; - if (options->Flags & EF_DISABLE_WORLD_GRAVITY) - rbFlags |= BT_DISABLE_WORLD_GRAVITY; - rb->setFlags(rbFlags); - - auto outStr = options->OutputPath; - auto path = msclr::interop::marshal_as(outStr); - exportWorld(rb, path, (options->Flags & EF_STEP_SIMULATION) == EF_STEP_SIMULATION); - - delete shape; - delete rb; - delete vertices_; - vertices_ = nullptr; - } - - // C++/CLR doesn't really work well with SSE-optimized code / aligned Bullet objects, so we need to move - // those calls into separate native functions -#pragma managed(push, off) - void PhysicsAssetExporter::exportWorld(btRigidBody * rb, std::string const & path, bool stepSimulation) - { - btDefaultCollisionConfiguration collisionConfiguration; - btCollisionDispatcher dispatcher(&collisionConfiguration); - btDbvtBroadphase broadphase; - btSequentialImpulseConstraintSolver solver; - btDiscreteDynamicsWorld world(&dispatcher, &broadphase, &solver, &collisionConfiguration); - - world.addCollisionObject(rb); - if (stepSimulation) - world.stepSimulation(0.25f); - - btDefaultSerializer serializer(1024 * 1024); - world.serialize(&serializer); - - std::ofstream bulletFile; - bulletFile.open(path.c_str(), std::iostream::binary | std::iostream::out); - if (bulletFile.fail()) - throw std::exception("Failed to open output file"); - - int bufferSize = serializer.getCurrentBufferSize(); - const char * buffer = reinterpret_cast(serializer.getBufferPointer()); - bulletFile.write(buffer, bufferSize); - bulletFile.close(); - } - - btRigidBody * PhysicsAssetExporter::createRigidBody(float mass, btCollisionShape * shape, float * translation, float angularDamping, - float linearDamping, float friction, float restitution, float * inertia) - { - btVector3 btInertia(inertia[0], inertia[1], inertia[2]); - btRigidBody::btRigidBodyConstructionInfo rbInfo(mass, nullptr, shape, btInertia); - rbInfo.m_angularDamping = angularDamping; - rbInfo.m_linearDamping = linearDamping; - rbInfo.m_friction = friction; - rbInfo.m_restitution = restitution; - btRigidBody * rb = new btRigidBody(rbInfo); - - btVector3 btTranslation(translation[0], translation[1], translation[2]); - rb->translate(btTranslation); - return rb; - } - - btCollisionShape * PhysicsAssetExporter::createBoxShape(float * extents) - { - btVector3 btExtents(extents[0], extents[1], extents[2]); - return new btBoxShape(btExtents); - } -#pragma managed(pop) - - } -} -#endif \ No newline at end of file diff --git a/LSLibNative/physics.h b/LSLibNative/physics.h deleted file mode 100644 index 68ee3fca..00000000 --- a/LSLibNative/physics.h +++ /dev/null @@ -1,91 +0,0 @@ -#pragma once - -#if defined(HAS_BULLET) -#include -#pragma managed(push, off) -#include -#pragma managed(pop) - -using namespace System; -using namespace System::Collections::Generic; - -namespace LSLib { - namespace Native { - - enum ExporterFlags - { - EF_STATIC_OBJECT = 1, - EF_KINEMATIC_OBJECT = 2, - EF_NO_CONTACT_RESPONSE = 4, - EF_CHARACTER_OBJECT = 8, - EF_DISABLE_WORLD_GRAVITY = 16, - EF_STEP_SIMULATION = 32 - }; - - enum MeshType - { - MESH_CONCAVE, - MESH_CONVEX_HULL, - MESH_SIMPLIFIED_CONVEX_HULL - }; - - public ref class ShapeOptions - { - public: - float Margin; - }; - - public ref class SphereShapeOptions : public ShapeOptions - { - public: - float Radius; - }; - - public ref class BoxShapeOptions : public ShapeOptions - { - public: - array ^ Extents; - }; - - public ref class MeshShapeOptions : public ShapeOptions - { - public: - MeshType Type; - array ^ Vertices; - array ^ Indices; - }; - - public ref class ExporterOptions - { - public: - ExporterFlags Flags; - String ^ OutputPath; - array ^ Translation; - array ^ Inertia; - float Mass; - float LinearDamping; - float AngularDamping; - float Friction; - float Restitution; - ShapeOptions ^ Shape; - }; - - public class PhysicsAssetExporter - { - public: - PhysicsAssetExporter(); - void exportBullet(ExporterOptions ^ options); - - private: - btTriangleIndexVertexArray * vertices_; - - void exportWorld(btRigidBody * rb, std::string const & path, bool stepSimulation); - btRigidBody * createRigidBody(float mass, btCollisionShape * shape, float * translation, float angularDamping, - float linearDamping, float friction, float restitution, float * inertia); - // class btCollisionShape * createCollisionShape(ShapeOptions ^ options); - btCollisionShape * createBoxShape(float * extents); - }; - - } -} -#endif \ No newline at end of file From cb4293fb1a5c3478e00c16f113728291f33f162c Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 10 Mar 2024 11:19:42 +0100 Subject: [PATCH 086/139] Reshuffle shared parser code --- LSLib/LS/NodeAttribute.cs | 6 +- LSLib/LS/ParserCommon.cs | 69 +++++++++++++++++++ LSLib/LS/Stats/Parser/Stat.lex | 2 +- LSLib/LS/Stats/Parser/Stat.yy | 2 +- LSLib/LS/Stats/Parser/StatNodes.cs | 2 +- LSLib/LS/Stats/Parser/StatParser.cs | 2 +- LSLib/LS/Stats/Parser/StatPropertyParser.cs | 1 + LSLib/LS/Stats/StatFileParser.cs | 2 +- LSLib/LS/Stats/StatValueParsers.cs | 2 +- LSLib/LS/Story/Compiler/CompilationContext.cs | 2 +- LSLib/LS/Story/Compiler/IR.cs | 1 + LSLib/LS/Story/Compiler/IRGenerator.cs | 1 + LSLib/LS/Story/Compiler/StoryEmitter.cs | 2 +- LSLib/LS/Story/GoalParser/ASTNodes.cs | 1 + LSLib/LS/Story/GoalParser/Goal.lex | 2 +- LSLib/LS/Story/GoalParser/Goal.yy | 2 +- LSLib/LS/Story/GoalParser/GoalParser.cs | 68 +----------------- LSLib/LSLib.csproj | 22 +++--- 18 files changed, 99 insertions(+), 90 deletions(-) create mode 100644 LSLib/LS/ParserCommon.cs diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index f4a2cc86..fc4230af 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -279,10 +279,10 @@ public Guid AsGuid(bool byteSwapGuids) public void FromString(string str, NodeSerializationSettings settings) { - value = ParseFromString(str, type, settings); + value = ParseFromString(str, type, settings.ByteSwapGuids); } - public static object ParseFromString(string str, AttributeType type, NodeSerializationSettings settings) + public static object ParseFromString(string str, AttributeType type, bool byteSwapGuids) { if (type.IsNumeric()) { @@ -416,7 +416,7 @@ public static object ParseFromString(string str, AttributeType type, NodeSeriali return Convert.ToSByte(str); case AttributeType.UUID: - if (settings.ByteSwapGuids) + if (byteSwapGuids) { return ByteSwapGuid(new Guid(str)); } diff --git a/LSLib/LS/ParserCommon.cs b/LSLib/LS/ParserCommon.cs new file mode 100644 index 00000000..9629161c --- /dev/null +++ b/LSLib/LS/ParserCommon.cs @@ -0,0 +1,69 @@ +using QUT.Gppg; + +namespace LSLib.Parser; +public class CodeLocation : IMerge +{ + private string fileName; + private int startLine; // start line + private int startColumn; // start column + private int endLine; // end line + private int endColumn; // end column + + /// + /// The line at which the text span starts. + /// + public string FileName { get { return fileName; } } + + /// + /// The line at which the text span starts. + /// + public int StartLine { get { return startLine; } } + + /// + /// The column at which the text span starts. + /// + public int StartColumn { get { return startColumn; } } + + /// + /// The line on which the text span ends. + /// + public int EndLine { get { return endLine; } } + + /// + /// The column of the first character + /// beyond the end of the text span. + /// + public int EndColumn { get { return endColumn; } } + + /// + /// Default no-arg constructor. + /// + public CodeLocation() { } + + /// + /// Constructor for text-span with given start and end. + /// + /// start line + /// start column + /// end line + /// end column + public CodeLocation(string fl, int sl, int sc, int el, int ec) + { + fileName = fl; + startLine = sl; + startColumn = sc; + endLine = el; + endColumn = ec; + } + + /// + /// Create a text location which spans from the + /// start of "this" to the end of the argument "last" + /// + /// The last location in the result span + /// The merged span + public CodeLocation Merge(CodeLocation last) + { + return new CodeLocation(this.fileName, this.startLine, this.startColumn, last.endLine, last.endColumn); + } +} diff --git a/LSLib/LS/Stats/Parser/Stat.lex b/LSLib/LS/Stats/Parser/Stat.lex index 071561fb..be5707d6 100644 --- a/LSLib/LS/Stats/Parser/Stat.lex +++ b/LSLib/LS/Stats/Parser/Stat.lex @@ -100,5 +100,5 @@ L?\"(\\.|[^\\"])*\" { yylval = MakeString(yytext); return (int)StatTokens.STRING . return ((int)StatTokens.BAD); %{ - yylloc = new LSLib.LS.Story.GoalParser.CodeLocation(fileName, tokLin, tokCol, tokELin, tokECol); + yylloc = new LSLib.Parser.CodeLocation(fileName, tokLin, tokCol, tokELin, tokECol); %} diff --git a/LSLib/LS/Stats/Parser/Stat.yy b/LSLib/LS/Stats/Parser/Stat.yy index 93768282..e28bcfcd 100644 --- a/LSLib/LS/Stats/Parser/Stat.yy +++ b/LSLib/LS/Stats/Parser/Stat.yy @@ -4,7 +4,7 @@ %parsertype StatParser %tokentype StatTokens %YYSTYPE System.Object -%YYLTYPE LSLib.LS.Story.GoalParser.CodeLocation +%YYLTYPE LSLib.Parser.CodeLocation %start StatFile diff --git a/LSLib/LS/Stats/Parser/StatNodes.cs b/LSLib/LS/Stats/Parser/StatNodes.cs index a0ee3c12..f254ea03 100644 --- a/LSLib/LS/Stats/Parser/StatNodes.cs +++ b/LSLib/LS/Stats/Parser/StatNodes.cs @@ -1,4 +1,4 @@ -using LSLib.LS.Story.GoalParser; +using LSLib.Parser; namespace LSLib.LS.Stats.StatParser; diff --git a/LSLib/LS/Stats/Parser/StatParser.cs b/LSLib/LS/Stats/Parser/StatParser.cs index b930d2e3..45be77db 100644 --- a/LSLib/LS/Stats/Parser/StatParser.cs +++ b/LSLib/LS/Stats/Parser/StatParser.cs @@ -1,4 +1,4 @@ -using LSLib.LS.Story.GoalParser; +using LSLib.Parser; using QUT.Gppg; using System.Text.RegularExpressions; diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLib/LS/Stats/Parser/StatPropertyParser.cs index d1bf45dc..9d0f30dc 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLib/LS/Stats/Parser/StatPropertyParser.cs @@ -1,4 +1,5 @@ using LSLib.LS.Story.GoalParser; +using LSLib.Parser; using QUT.Gppg; namespace LSLib.LS.Stats.Properties; diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLib/LS/Stats/StatFileParser.cs index 5cc97d8f..6a915e1b 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLib/LS/Stats/StatFileParser.cs @@ -1,5 +1,5 @@ using LSLib.LS.Stats.StatParser; -using LSLib.LS.Story.GoalParser; +using LSLib.Parser; using System.Xml; namespace LSLib.LS.Stats; diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLib/LS/Stats/StatValueParsers.cs index e3262fa3..cc6f613f 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLib/LS/Stats/StatValueParsers.cs @@ -1,6 +1,6 @@ using LSLib.LS.Stats.Properties; using LSLib.LS.Stats.StatParser; -using LSLib.LS.Story.GoalParser; +using LSLib.Parser; using System.Globalization; namespace LSLib.LS.Stats; diff --git a/LSLib/LS/Story/Compiler/CompilationContext.cs b/LSLib/LS/Story/Compiler/CompilationContext.cs index f5304035..985e2279 100644 --- a/LSLib/LS/Story/Compiler/CompilationContext.cs +++ b/LSLib/LS/Story/Compiler/CompilationContext.cs @@ -1,4 +1,4 @@ -using LSLib.LS.Story.GoalParser; +using LSLib.Parser; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/IR.cs b/LSLib/LS/Story/Compiler/IR.cs index c1db11da..72b0cc5c 100644 --- a/LSLib/LS/Story/Compiler/IR.cs +++ b/LSLib/LS/Story/Compiler/IR.cs @@ -1,4 +1,5 @@ using LSLib.LS.Story.GoalParser; +using LSLib.Parser; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/IRGenerator.cs b/LSLib/LS/Story/Compiler/IRGenerator.cs index 20db2cac..1c5443e2 100644 --- a/LSLib/LS/Story/Compiler/IRGenerator.cs +++ b/LSLib/LS/Story/Compiler/IRGenerator.cs @@ -1,4 +1,5 @@ using LSLib.LS.Story.GoalParser; +using LSLib.Parser; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/Compiler/StoryEmitter.cs b/LSLib/LS/Story/Compiler/StoryEmitter.cs index 33aaa25d..a404da7c 100644 --- a/LSLib/LS/Story/Compiler/StoryEmitter.cs +++ b/LSLib/LS/Story/Compiler/StoryEmitter.cs @@ -1,4 +1,4 @@ -using LSLib.LS.Story.GoalParser; +using LSLib.Parser; using System.Diagnostics; namespace LSLib.LS.Story.Compiler; diff --git a/LSLib/LS/Story/GoalParser/ASTNodes.cs b/LSLib/LS/Story/GoalParser/ASTNodes.cs index 370efc63..c922a987 100644 --- a/LSLib/LS/Story/GoalParser/ASTNodes.cs +++ b/LSLib/LS/Story/GoalParser/ASTNodes.cs @@ -1,4 +1,5 @@ using LSLib.LS.Story.Compiler; +using LSLib.Parser; namespace LSLib.LS.Story.GoalParser; diff --git a/LSLib/LS/Story/GoalParser/Goal.lex b/LSLib/LS/Story/GoalParser/Goal.lex index dbc4573d..93c917af 100644 --- a/LSLib/LS/Story/GoalParser/Goal.lex +++ b/LSLib/LS/Story/GoalParser/Goal.lex @@ -69,5 +69,5 @@ L?\"(\\.|[^\\"])*\" { yylval = MakeString(yytext); return (int)GoalT . return ((int)GoalTokens.BAD); %{ - yylloc = new CodeLocation(fileName, tokLin, tokCol, tokELin, tokECol/*, tokPos, tokEPos, buffer*/); + yylloc = new LSLib.Parser.CodeLocation(fileName, tokLin, tokCol, tokELin, tokECol/*, tokPos, tokEPos, buffer*/); %} diff --git a/LSLib/LS/Story/GoalParser/Goal.yy b/LSLib/LS/Story/GoalParser/Goal.yy index 2769b987..79b436e6 100644 --- a/LSLib/LS/Story/GoalParser/Goal.yy +++ b/LSLib/LS/Story/GoalParser/Goal.yy @@ -4,7 +4,7 @@ %parsertype GoalParser %tokentype GoalTokens %YYSTYPE System.Object -%YYLTYPE LSLib.LS.Story.GoalParser.CodeLocation +%YYLTYPE LSLib.Parser.CodeLocation %start GoalFile diff --git a/LSLib/LS/Story/GoalParser/GoalParser.cs b/LSLib/LS/Story/GoalParser/GoalParser.cs index d4838bba..807b1a6b 100644 --- a/LSLib/LS/Story/GoalParser/GoalParser.cs +++ b/LSLib/LS/Story/GoalParser/GoalParser.cs @@ -2,6 +2,7 @@ using System.Globalization; using System.Text.RegularExpressions; using QUT.Gppg; +using LSLib.Parser; namespace LSLib.LS.Story.GoalParser; @@ -59,73 +60,6 @@ internal class ParserConstants public static CultureInfo ParserCulture = new CultureInfo("en-US"); } -public class CodeLocation : IMerge -{ - private string fileName; - private int startLine; // start line - private int startColumn; // start column - private int endLine; // end line - private int endColumn; // end column - - /// - /// The line at which the text span starts. - /// - public string FileName { get { return fileName; } } - - /// - /// The line at which the text span starts. - /// - public int StartLine { get { return startLine; } } - - /// - /// The column at which the text span starts. - /// - public int StartColumn { get { return startColumn; } } - - /// - /// The line on which the text span ends. - /// - public int EndLine { get { return endLine; } } - - /// - /// The column of the first character - /// beyond the end of the text span. - /// - public int EndColumn { get { return endColumn; } } - - /// - /// Default no-arg constructor. - /// - public CodeLocation() { } - - /// - /// Constructor for text-span with given start and end. - /// - /// start line - /// start column - /// end line - /// end column - public CodeLocation(string fl, int sl, int sc, int el, int ec) - { - fileName = fl; - startLine = sl; - startColumn = sc; - endLine = el; - endColumn = ec; - } - - /// - /// Create a text location which spans from the - /// start of "this" to the end of the argument "last" - /// - /// The last location in the result span - /// The merged span - public CodeLocation Merge(CodeLocation last) - { - return new CodeLocation(this.fileName, this.startLine, this.startColumn, last.endLine, last.endColumn); - } -} - public abstract class GoalScanBase : AbstractScanner { protected String fileName; diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 5ebfef5a..05a6f63e 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -36,20 +36,22 @@ - "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.lex.cs" "$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.yy.cs" "$(SolutionDir)\LSLib\LS\Story\GoalParser\Goal.yy" + "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Story\GoalParser\Goal.lex.cs" "$(ProjectDir)\LS\Story\GoalParser\Goal.lex" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.lex.cs" "$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.yy.cs" "$(SolutionDir)\LSLib\LS\Story\HeaderParser\StoryHeader.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.lex.cs" "$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.yy.cs" "$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.lex.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.yy.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\Stat.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.lex.cs" "$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.yy.cs" "$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.lex.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.yy.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatProperty.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.lex.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.yy.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.lex.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.yy.cs" "$(SolutionDir)\LSLib\LS\Stats\Parser\StatLua.yy" +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.lex.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.yy.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.yy" + +"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.lex.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.lex" +"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.yy.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.yy" LSLib LSLib From f5023653b1efb5218279a071c4eac7695d994bca Mon Sep 17 00:00:00 2001 From: Norbyte Date: Mon, 11 Mar 2024 19:01:34 +0100 Subject: [PATCH 087/139] Add physics blob converter tool --- LSTools.sln | 83 +++ PhysicsTool/PhysicsTool.cpp | 169 +++++++ PhysicsTool/PhysicsTool.h | 37 ++ PhysicsTool/PhysicsTool.vcxproj | 162 ++++++ PhysicsTool/PhysicsTool.vcxproj.filters | 36 ++ PhysicsTool/PxEncoder.cpp | 572 +++++++++++++++++++++ PhysicsTool/PxLoader.cpp | 641 ++++++++++++++++++++++++ PhysicsTool/packages.config | 5 + 8 files changed, 1705 insertions(+) create mode 100644 PhysicsTool/PhysicsTool.cpp create mode 100644 PhysicsTool/PhysicsTool.h create mode 100644 PhysicsTool/PhysicsTool.vcxproj create mode 100644 PhysicsTool/PhysicsTool.vcxproj.filters create mode 100644 PhysicsTool/PxEncoder.cpp create mode 100644 PhysicsTool/PxLoader.cpp create mode 100644 PhysicsTool/packages.config diff --git a/LSTools.sln b/LSTools.sln index 174e0718..d017e0b2 100644 --- a/LSTools.sln +++ b/LSTools.sln @@ -36,133 +36,216 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution .editorconfig = .editorconfig EndProjectSection EndProject +Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "PhysicsTool", "PhysicsTool\PhysicsTool.vcxproj", "{043514DF-5822-41A0-A5CE-CBC349B1398B}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 Release|Any CPU = Release|Any CPU Release|x64 = Release|x64 + Release|x86 = Release|x86 RelWithDebInfo|Any CPU = RelWithDebInfo|Any CPU RelWithDebInfo|x64 = RelWithDebInfo|x64 + RelWithDebInfo|x86 = RelWithDebInfo|x86 EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution {46372C50-4288-4B8E-AF21-C934560600E0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Debug|Any CPU.Build.0 = Debug|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Debug|x64.ActiveCfg = Debug|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Debug|x64.Build.0 = Debug|Any CPU + {46372C50-4288-4B8E-AF21-C934560600E0}.Debug|x86.ActiveCfg = Debug|Any CPU + {46372C50-4288-4B8E-AF21-C934560600E0}.Debug|x86.Build.0 = Debug|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Release|Any CPU.ActiveCfg = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Release|Any CPU.Build.0 = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Release|x64.ActiveCfg = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.Release|x64.Build.0 = Release|Any CPU + {46372C50-4288-4B8E-AF21-C934560600E0}.Release|x86.ActiveCfg = Release|Any CPU + {46372C50-4288-4B8E-AF21-C934560600E0}.Release|x86.Build.0 = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {46372C50-4288-4B8E-AF21-C934560600E0}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {46372C50-4288-4B8E-AF21-C934560600E0}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {46372C50-4288-4B8E-AF21-C934560600E0}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Debug|Any CPU.ActiveCfg = Debug|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Debug|Any CPU.Build.0 = Debug|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Debug|x64.ActiveCfg = Debug|x64 + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Debug|x86.ActiveCfg = Debug|x64 + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Debug|x86.Build.0 = Debug|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Release|Any CPU.ActiveCfg = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Release|Any CPU.Build.0 = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Release|x64.ActiveCfg = Release|x64 + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Release|x86.ActiveCfg = Release|x64 + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.Release|x86.Build.0 = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|Any CPU.ActiveCfg = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|Any CPU.Build.0 = Release|x64 {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|x64.ActiveCfg = Release|x64 + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|x86.ActiveCfg = Release|x64 + {D8B26B12-E45C-47EA-88F7-56628EB2CCD1}.RelWithDebInfo|x86.Build.0 = Release|x64 {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|Any CPU.Build.0 = Debug|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|x64.ActiveCfg = Debug|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|x64.Build.0 = Debug|Any CPU + {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|x86.ActiveCfg = Debug|Any CPU + {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Debug|x86.Build.0 = Debug|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Release|Any CPU.ActiveCfg = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Release|Any CPU.Build.0 = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Release|x64.ActiveCfg = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Release|x64.Build.0 = Release|Any CPU + {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Release|x86.ActiveCfg = Release|Any CPU + {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.Release|x86.Build.0 = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {FAD67294-6223-47E0-8838-E4E7FBC53ED2}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Debug|Any CPU.Build.0 = Debug|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Debug|x64.ActiveCfg = Debug|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Debug|x64.Build.0 = Debug|Any CPU + {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Debug|x86.ActiveCfg = Debug|Any CPU + {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Debug|x86.Build.0 = Debug|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Release|Any CPU.ActiveCfg = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Release|Any CPU.Build.0 = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Release|x64.ActiveCfg = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Release|x64.Build.0 = Release|Any CPU + {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Release|x86.ActiveCfg = Release|Any CPU + {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.Release|x86.Build.0 = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {CBFEE38F-5F12-4D6F-B4FB-267FB68A6BEA}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Debug|Any CPU.Build.0 = Debug|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Debug|x64.ActiveCfg = Debug|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Debug|x64.Build.0 = Debug|Any CPU + {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Debug|x86.ActiveCfg = Debug|Any CPU + {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Debug|x86.Build.0 = Debug|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Release|Any CPU.ActiveCfg = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Release|Any CPU.Build.0 = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Release|x64.ActiveCfg = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Release|x64.Build.0 = Release|Any CPU + {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Release|x86.ActiveCfg = Release|Any CPU + {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.Release|x86.Build.0 = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {EF82C289-53D6-41C8-B5C3-72B37655C7F3}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Debug|Any CPU.Build.0 = Debug|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Debug|x64.ActiveCfg = Debug|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Debug|x64.Build.0 = Debug|Any CPU + {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Debug|x86.ActiveCfg = Debug|Any CPU + {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Debug|x86.Build.0 = Debug|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Release|Any CPU.ActiveCfg = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Release|Any CPU.Build.0 = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Release|x64.ActiveCfg = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Release|x64.Build.0 = Release|Any CPU + {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Release|x86.ActiveCfg = Release|Any CPU + {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.Release|x86.Build.0 = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {32F08B9A-F50B-4C2E-AB56-533FED066DDE}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Debug|Any CPU.Build.0 = Debug|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Debug|x64.ActiveCfg = Debug|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Debug|x64.Build.0 = Debug|Any CPU + {31E71543-CBCF-43BB-AF77-D210D548118E}.Debug|x86.ActiveCfg = Debug|Any CPU + {31E71543-CBCF-43BB-AF77-D210D548118E}.Debug|x86.Build.0 = Debug|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Release|Any CPU.ActiveCfg = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Release|Any CPU.Build.0 = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Release|x64.ActiveCfg = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.Release|x64.Build.0 = Release|Any CPU + {31E71543-CBCF-43BB-AF77-D210D548118E}.Release|x86.ActiveCfg = Release|Any CPU + {31E71543-CBCF-43BB-AF77-D210D548118E}.Release|x86.Build.0 = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {31E71543-CBCF-43BB-AF77-D210D548118E}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {31E71543-CBCF-43BB-AF77-D210D548118E}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {31E71543-CBCF-43BB-AF77-D210D548118E}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Debug|Any CPU.Build.0 = Debug|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Debug|x64.ActiveCfg = Debug|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Debug|x64.Build.0 = Debug|Any CPU + {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Debug|x86.ActiveCfg = Debug|Any CPU + {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Debug|x86.Build.0 = Debug|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Release|Any CPU.ActiveCfg = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Release|Any CPU.Build.0 = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Release|x64.ActiveCfg = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Release|x64.Build.0 = Release|Any CPU + {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Release|x86.ActiveCfg = Release|Any CPU + {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.Release|x86.Build.0 = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {E4B4F95E-F027-44D7-AB93-B96EF2E661B6}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Debug|Any CPU.Build.0 = Debug|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Debug|x64.ActiveCfg = Debug|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Debug|x64.Build.0 = Debug|Any CPU + {94D900D1-EC77-4170-8942-56E3736E44DE}.Debug|x86.ActiveCfg = Debug|Any CPU + {94D900D1-EC77-4170-8942-56E3736E44DE}.Debug|x86.Build.0 = Debug|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Release|Any CPU.ActiveCfg = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Release|Any CPU.Build.0 = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Release|x64.ActiveCfg = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.Release|x64.Build.0 = Release|Any CPU + {94D900D1-EC77-4170-8942-56E3736E44DE}.Release|x86.ActiveCfg = Release|Any CPU + {94D900D1-EC77-4170-8942-56E3736E44DE}.Release|x86.Build.0 = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {94D900D1-EC77-4170-8942-56E3736E44DE}.RelWithDebInfo|x86.Build.0 = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|Any CPU.Build.0 = Debug|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|x64.ActiveCfg = Debug|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|x64.Build.0 = Debug|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|x86.ActiveCfg = Debug|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Debug|x86.Build.0 = Debug|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|Any CPU.ActiveCfg = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|Any CPU.Build.0 = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|x64.ActiveCfg = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|x64.Build.0 = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|x86.ActiveCfg = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.Release|x86.Build.0 = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {67E646C2-3C3C-4327-A0B4-40C1DB32579F}.RelWithDebInfo|x86.Build.0 = Release|Any CPU + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Debug|Any CPU.ActiveCfg = Debug|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Debug|Any CPU.Build.0 = Debug|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Debug|x64.ActiveCfg = Debug|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Debug|x64.Build.0 = Debug|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Debug|x86.ActiveCfg = Debug|Win32 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Debug|x86.Build.0 = Debug|Win32 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Release|Any CPU.ActiveCfg = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Release|Any CPU.Build.0 = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Release|x64.ActiveCfg = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Release|x64.Build.0 = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Release|x86.ActiveCfg = Release|Win32 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.Release|x86.Build.0 = Release|Win32 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|Any CPU.ActiveCfg = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|Any CPU.Build.0 = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x64.ActiveCfg = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x64.Build.0 = Release|x64 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x86.ActiveCfg = Release|Win32 + {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x86.Build.0 = Release|Win32 EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/PhysicsTool/PhysicsTool.cpp b/PhysicsTool/PhysicsTool.cpp new file mode 100644 index 00000000..82024888 --- /dev/null +++ b/PhysicsTool/PhysicsTool.cpp @@ -0,0 +1,169 @@ +#include "PhysicsTool.h" +#include + + +class PhysXExporterAllocator : public PxAllocatorCallback +{ +public: + void* allocate(size_t size, const char*, const char*, int) + { + void* ptr = _aligned_malloc(size, 16); + memset(ptr, 0, size); + return ptr; + } + + void deallocate(void* ptr) + { + _aligned_free(ptr); + } +}; + + +PxDefaultErrorCallback gPxErrorCallback; +PhysXExporterAllocator gPxAllocator; + +bool PhysXConverter::InitPhysX() +{ + foundation_ = PxCreateFoundation(PX_PHYSICS_VERSION, gPxAllocator, + gPxErrorCallback); + if (!foundation_) return false; + + physics_ = PxCreatePhysics(PX_PHYSICS_VERSION, *foundation_, + PxTolerancesScale(), false, nullptr); + if (!physics_) return false; + + cooking_ = PxCreateCooking(PX_PHYSICS_VERSION, *foundation_, PxCookingParams(PxTolerancesScale())); + if (!cooking_) return false; + + if (!PxInitExtensions(*physics_, nullptr)) return false; + + registry_ = PxSerialization::createSerializationRegistry(PxGetPhysics()); + + return true; +} + + +void PhysXConverter::ShutdownPhysX() +{ + if (cooking_ == nullptr) return; + + registry_->release(); + registry_ = nullptr; + + cooking_->release(); + cooking_ = nullptr; + + physics_->release(); + physics_ = nullptr; + + foundation_->release(); + foundation_ = nullptr; +} + + +PxCollection* PhysXConverter::LoadCollectionFromBinary(std::span const& bin) +{ + auto binSize = bin.size(); + auto binInput = new uint8_t[binSize + PX_SERIAL_FILE_ALIGN]; + // TODO - release memory block after use + void* memory128 = (void*)((uintptr_t(binInput) + PX_SERIAL_FILE_ALIGN) & ~(PX_SERIAL_FILE_ALIGN - 1)); + memcpy(memory128, bin.data(), binSize); + + return PxSerialization::createCollectionFromBinary(memory128, *registry_); +} + + +class KazMemoryOutputStream : public PxOutputStream +{ +public: + uint32_t write(const void* src, uint32_t count) override + { + auto p = (const uint8_t*)src; + std::copy(p, p + count, std::back_inserter(buf_)); + return count; + } + + inline std::vector const& contents() const + { + return buf_; + } + +private: + std::vector buf_; +}; + + +std::vector PhysXConverter::SaveCollectionToBinary(PxCollection& collection) +{ + KazMemoryOutputStream outStream; + if (!PxSerialization::serializeCollectionToBinaryDeterministic(outStream, collection, *registry_, nullptr, true)) return {}; + + return outStream.contents(); +} + + +std::vector LoadFile(std::string const& path) +{ + std::vector bin; + std::ifstream f(path.c_str(), std::ios::binary | std::ios::in); + if (!f.good()) throw std::runtime_error(std::string("Failed to open file: ") + path); + + f.seekg(0, std::ios::end); + auto size = (std::streamoff)f.tellg(); + f.seekg(0, std::ios::beg); + bin.resize(size); + + f.read(reinterpret_cast(bin.data()), bin.size()); + return bin; +} + + +void WriteFile(std::string const& path, std::vector const& contents) +{ + std::ofstream f(path.c_str(), std::ios::binary | std::ios::out); + if (!f.good()) throw std::runtime_error(std::string("Failed to open file for writing: ") + path); + + f.write(reinterpret_cast(contents.data()), contents.size()); +} + + +int main(int argc, char** argv) +{ + if (argc != 3) { + std::cout << "Usage: PhysicsTool " << std::endl; + return 1; + } + + std::string inputPath = argv[1]; + std::string outputPath = argv[2]; + + std::string inputExt = inputPath.length() > 4 ? inputPath.substr(inputPath.size() - 4) : ""; + std::string outputExt = outputPath.length() > 4 ? outputPath.substr(outputPath.size() - 4) : ""; + + try { + if (inputExt != ".bin" && inputExt != ".xml") throw std::runtime_error("Input file must be a .bin or .xml file"); + if (outputExt != ".bin" && outputExt != ".xml") throw std::runtime_error("Output file must be a .bin or .xml file"); + + bool inputIsXml = (inputExt == ".xml"); + bool outputIsXml = (outputExt == ".xml"); + + PhysXConverter converter; + if (!converter.InitPhysX()) { + std::cout << "Failed to initialize PhysX runtime" << std::endl; + return 1; + } + + auto input = LoadFile(inputPath); + + auto collection = inputIsXml ? converter.LoadCollectionFromXml(input) : converter.LoadCollectionFromBinary(input); + if (!collection) throw std::runtime_error("Unable to load resource collection from source file"); + + auto output = outputIsXml ? converter.SaveCollectionToXml(*collection) : converter.SaveCollectionToBinary(*collection); + WriteFile(outputPath, output); + } catch (std::exception& e) { + std::cout << e.what() << std::endl; + return 1; + } + + return 0; +} diff --git a/PhysicsTool/PhysicsTool.h b/PhysicsTool/PhysicsTool.h new file mode 100644 index 00000000..eb49874e --- /dev/null +++ b/PhysicsTool/PhysicsTool.h @@ -0,0 +1,37 @@ +#pragma once + +#define _USE_MATH_DEFINES + +#include +#include +#include +#include + +#define WIN32_LEAN_AND_MEAN +#include + + +#include +#include + +using namespace physx; + +class PhysXConverter +{ +public: + bool InitPhysX(); + void ShutdownPhysX(); + + PxCollection* LoadCollectionFromXml(std::span const& xml); + PxCollection* LoadCollectionFromBinary(std::span const& bin); + + std::vector SaveCollectionToXml(PxCollection& collection); + std::vector SaveCollectionToBinary(PxCollection& collection); + +private: + PxFoundation* foundation_{ nullptr }; + PxPhysics* physics_{ nullptr }; + PxCooking* cooking_{ nullptr }; + PxSerializationRegistry* registry_{ nullptr }; +}; + diff --git a/PhysicsTool/PhysicsTool.vcxproj b/PhysicsTool/PhysicsTool.vcxproj new file mode 100644 index 00000000..7d1fcab2 --- /dev/null +++ b/PhysicsTool/PhysicsTool.vcxproj @@ -0,0 +1,162 @@ + + + + + + Debug + Win32 + + + Release + Win32 + + + Debug + x64 + + + Release + x64 + + + + 17.0 + Win32Proj + {043514df-5822-41a0-a5ce-cbc349b1398b} + PhysicsTool + 10.0 + + + + Application + true + v143 + Unicode + + + Application + false + v143 + true + Unicode + + + Application + true + v143 + Unicode + + + Application + false + v143 + true + Unicode + + + + + + + + + + + + + + + + + + + + + + Level3 + true + WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + + + Console + true + + + + + Level3 + true + true + true + WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + + + Console + true + true + true + + + + + Level3 + true + _DEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + MultiThreadedDebugDLL + stdcpp20 + + + Console + true + tinyxml.lib;$(CoreLibraryDependencies);%(AdditionalDependencies) + $(SolutionDir)\packages\tinyxml.2.8.2.3\build\native\lib\v141\x64-mt-debug;%(AdditionalLibraryDirectories) + + + + + Level3 + true + true + true + NDEBUG;_CONSOLE;%(PreprocessorDefinitions) + true + MultiThreadedDLL + stdcpp20 + + + Console + true + true + true + tinyxml.lib;$(CoreLibraryDependencies);%(AdditionalDependencies) + $(SolutionDir)\packages\tinyxml.2.8.2.3\build\native\lib\v141\x64-mt;%(AdditionalLibraryDirectories) + + + + + + + + + + + + + + + + + + + + + This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}. + + + + + + \ No newline at end of file diff --git a/PhysicsTool/PhysicsTool.vcxproj.filters b/PhysicsTool/PhysicsTool.vcxproj.filters new file mode 100644 index 00000000..3e20249a --- /dev/null +++ b/PhysicsTool/PhysicsTool.vcxproj.filters @@ -0,0 +1,36 @@ + + + + + {4FC737F1-C7A5-4376-A066-2A32D752A2FF} + cpp;c;cc;cxx;c++;cppm;ixx;def;odl;idl;hpj;bat;asm;asmx + + + {93995380-89BD-4b04-88EB-625FBE52EBFB} + h;hh;hpp;hxx;h++;hm;inl;inc;ipp;xsd + + + {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} + rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + + + + Source Files + + + Source Files + + + Source Files + + + + + + + + Header Files + + + \ No newline at end of file diff --git a/PhysicsTool/PxEncoder.cpp b/PhysicsTool/PxEncoder.cpp new file mode 100644 index 00000000..baeeaeaa --- /dev/null +++ b/PhysicsTool/PxEncoder.cpp @@ -0,0 +1,572 @@ +#include "PhysicsTool.h" +#include + +#define PR(name, expr, def) {auto _v = (expr); if (ExportAllProperties || !(_v == (def))) { ExportProperty(ele, #name, _v); }} +#define P(name, expr) ExportProperty(ele, #name, (expr)) +#define P_BOUNDED(name, expr, bound) {auto _v = (expr); if (ExportAllProperties || _v >= (bound)) { if (_v < bound) ExportProperty(ele, #name, _v); else ExportProperty(ele, #name, "Unbounded"); }} +#define PFLAG(name, expr, flag) {auto _v = (expr); if (ExportAllProperties || (_v & flag) == flag) { ExportProperty(ele, #name, (_v & flag) == flag); }} + +class PhysXExporter +{ +public: + bool ExportAllProperties{ true }; + std::unordered_map materials_; + + TiXmlDocument* Export(PxCollection& collection) + { + auto doc = new TiXmlDocument(); + doc->InsertEndChild(TiXmlDeclaration("1.0", "UTF-8", "")); + auto root = doc->InsertEndChild(TiXmlElement("BG3Physics")); + + for (uint32_t i = 0; i < collection.getNbObjects(); i++) { + auto& obj = collection.getObject(i); + ExportTopLevel(*root, obj); + } + + return doc; + } + + template + void ExportProperty(TiXmlNode& ele, char const* name, T value) + { + auto& prop = *ele.InsertEndChild(TiXmlElement(name)); + prop.InsertEndChild(TiXmlText(std::to_string(value))); + } + + template <> + void ExportProperty(TiXmlNode& ele, char const* name, bool obj) + { + auto& prop = *ele.InsertEndChild(TiXmlElement(name)); + prop.InsertEndChild(TiXmlText(obj ? "true" : "false")); + } + + template <> + void ExportProperty(TiXmlNode& ele, char const* name, char const* obj) + { + auto& prop = *ele.InsertEndChild(TiXmlElement(name)); + prop.InsertEndChild(TiXmlText(obj)); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxVec3 obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(X, obj.x, 0.0f); + PR(Y, obj.y, 0.0f); + PR(Z, obj.z, 0.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxQuat obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(X, obj.x, 0.0f); + PR(Y, obj.y, 0.0f); + PR(Z, obj.z, 0.0f); + PR(W, obj.w, 1.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxTransform obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(Position, obj.p, PxVec3()); + PR(Rotation, obj.q, PxQuat()); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxMeshScale obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(Scale, obj.scale, PxVec3()); + PR(Rotation, obj.rotation, PxQuat()); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxJointLinearLimit obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + P_BOUNDED(Value, obj.value, 3.4e+37f); // PX_MAX_F32 + PR(Restitution, obj.restitution, 0.0f); + PR(BounceThreshold, obj.bounceThreshold, 0.0f); + PR(Stiffness, obj.stiffness, 0.0f); + PR(Damping, obj.damping, 0.0f); + PR(ContactDistance, obj.contactDistance, 0.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxD6Motion::Enum obj) + { + constexpr char const* kNames[] = {"Locked", "Limited", "Free"}; + + auto& prop = *parent.InsertEndChild(TiXmlElement(name)); + prop.InsertEndChild(TiXmlText(kNames[(uint32_t)obj])); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxJointLinearLimitPair obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(Lower, obj.lower, -PX_MAX_F32/3); + PR(Upper, obj.upper, PX_MAX_F32/3); + PR(Restitution, obj.restitution, 0.0f); + PR(BounceThreshold, obj.bounceThreshold, 0.0f); + PR(Stiffness, obj.stiffness, 0.0f); + PR(Damping, obj.damping, 0.0f); + PR(ContactDistance, obj.contactDistance, 0.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxJointAngularLimitPair obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(Lower, obj.lower, -(float)M_PI/2.0f); + PR(Upper, obj.upper, (float)M_PI/2.0f); + PR(Restitution, obj.restitution, 0.0f); + PR(BounceThreshold, obj.bounceThreshold, 0.0f); + PR(Stiffness, obj.stiffness, 0.0f); + PR(Damping, obj.damping, 0.0f); + PR(ContactDistance, obj.contactDistance, 0.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxJointLimitCone obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(YAngle, obj.yAngle, (float)M_PI/2.0f); + PR(ZAngle, obj.zAngle, (float)M_PI/2.0f); + PR(Restitution, obj.restitution, 0.0f); + PR(BounceThreshold, obj.bounceThreshold, 0.0f); + PR(Stiffness, obj.stiffness, 0.0f); + PR(Damping, obj.damping, 0.0f); + PR(ContactDistance, obj.contactDistance, 0.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxJointLimitPyramid obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + PR(YAngleMin, obj.yAngleMin, -(float)M_PI/2.0f); + PR(YAngleMax, obj.yAngleMax, (float)M_PI/2.0f); + PR(ZAngleMin, obj.zAngleMin, -(float)M_PI/2.0f); + PR(ZAngleMax, obj.zAngleMax, (float)M_PI/2.0f); + PR(Restitution, obj.restitution, 0.0f); + PR(BounceThreshold, obj.bounceThreshold, 0.0f); + PR(Stiffness, obj.stiffness, 0.0f); + PR(Damping, obj.damping, 0.0f); + PR(ContactDistance, obj.contactDistance, 0.0f); + } + + template <> + void ExportProperty(TiXmlNode& parent, char const* name, PxD6JointDrive obj) + { + auto& ele = *parent.InsertEndChild(TiXmlElement(name)); + P_BOUNDED(ForceLimit, obj.forceLimit, 3.4e+37f); // PX_MAX_F32 + PFLAG(IsAcceleration, obj.flags, PxD6JointDriveFlag::eACCELERATION); + PR(Stiffness, obj.stiffness, 0.0f); + PR(Damping, obj.damping, 0.0f); + } + + void Export(TiXmlNode& parent, PxMaterial& obj) + { + auto it = materials_.find(&obj); + if (it != materials_.end()) return; + + auto index = (PxU32)materials_.size(); + materials_.insert(std::make_pair(&obj, index)); + + auto& ele = *parent.InsertEndChild(TiXmlElement("Material")); + + P(Index, index); + PR(StaticFriction, obj.getStaticFriction(), 1.0f); + PR(DynamicFriction, obj.getDynamicFriction(), 1.0f); + PR(Restitution, obj.getRestitution(), 0.0f); + } + + void ExportProperties(TiXmlNode& ele, PxSphereGeometry& o) + { + ExportProperty(ele, "Type", "Sphere"); + ExportProperty(ele, "Radius", o.radius); + } + + void ExportProperties(TiXmlNode& ele, PxCapsuleGeometry& o) + { + ExportProperty(ele, "Type", "Capsule"); + ExportProperty(ele, "Radius", o.radius); + ExportProperty(ele, "HalfHeight", o.halfHeight); + } + + void ExportProperties(TiXmlNode& ele, PxBoxGeometry& o) + { + ExportProperty(ele, "Type", "Box"); + ExportProperty(ele, "HalfExtents", o.halfExtents); + } + + void ExportProperties(TiXmlNode& ele, PxConvexMeshGeometry& o) + { + ExportProperty(ele, "Type", "ConvexMesh"); + ExportProperty(ele, "Scale", o.scale); + // s << "\t" "MeshFlags: " << (uint32_t)o.meshFlags << std::endl; - Always 0 + + Export(ele, *o.convexMesh); + } + + void ExportProperties(TiXmlNode& ele, PxTriangleMeshGeometry& o) + { + ExportProperty(ele, "Type", "TriangleMesh"); + ExportProperty(ele, "Scale", o.scale); + // s << "\t" "MeshFlags: " << (uint32_t)o.meshFlags << std::endl; - Always 0 + + Export(ele, *o.triangleMesh); + } + + void Export(TiXmlNode& parent, PxGeometry& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("Geometry")); + + switch (o.getType()) + { + case PxGeometryType::eSPHERE: ExportProperties(ele, static_cast(o)); break; + case PxGeometryType::eCAPSULE: ExportProperties(ele, static_cast(o)); break; + case PxGeometryType::eBOX: ExportProperties(ele, static_cast(o)); break; + case PxGeometryType::eCONVEXMESH: ExportProperties(ele, static_cast(o)); break; + case PxGeometryType::eTRIANGLEMESH: ExportProperties(ele, static_cast(o)); break; + + case PxGeometryType::ePLANE: + case PxGeometryType::eHEIGHTFIELD: + default: + std::cout << "WARNING: Unsupported geometry type: " << o.getType() << std::endl; + break; + } + } + + void Export(TiXmlNode& parent, PxShape& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("Shape")); + + if (o.getNbMaterials() != 1) { + throw std::runtime_error("Only 1 material per shape is supported"); + } + + PxMaterial* material; + o.getMaterials(&material, 1, 0); + auto matIt = materials_.find(material); + if (matIt == materials_.end()) throw std::runtime_error("Shape references unknown material object"); + + ExportProperty(ele, "Name", o.getName()); + P(MaterialIndex, matIt->second); + PR(LocalPose, o.getLocalPose(), PxTransform()); + PR(ContactOffset, o.getContactOffset(), 0.02f); + PR(RestOffset, o.getRestOffset(), 0.0f); + PR(TorsionalPatchRadius, o.getTorsionalPatchRadius(), 0.0f); + PR(MinTorsionalPatchRadius, o.getMinTorsionalPatchRadius(), 0.0f); + + Export(ele, o.getGeometry().any()); + } + + void ExportProperties(TiXmlNode& ele, PxRigidActor& o) + { + ExportProperty(ele, "Name", o.getName()); + + PR(GlobalPose, o.getGlobalPose(), PxTransform()); + PFLAG(DisableGravity, o.getActorFlags(), PxActorFlag::eDISABLE_GRAVITY); + PR(DominanceGroup, (uint32_t)o.getDominanceGroup(), 0); + + std::vector shapes; + shapes.resize(o.getNbShapes()); + o.getShapes(shapes.data(), (uint32_t)shapes.size(), 0); + if (!shapes.empty()) { + auto& shapesEle = *ele.InsertEndChild(TiXmlElement("Shapes")); + for (auto shape : shapes) { + Export(shapesEle, *shape); + } + } + + // These should be handled by the joint, not the rigidbody + /* + if (o.getNbConstraints() > 0) { + s << "\t" "Constraints: "; + PxConstraint* constraints[128]; + o.getConstraints(constraints, o.getNbConstraints(), 0); + for (uint32_t i = 0; i < o.getNbConstraints(); i++) { + s << constraints[i]->getConcreteTypeName() << " "; + } + s << std::endl; + }*/ + } + + void Export(TiXmlNode& parent, PxRigidStatic& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("RigidStatic")); + ExportProperties(ele, static_cast(o)); + } + + void ExportProperties(TiXmlNode& ele, PxRigidBody& o) + { + ExportProperties(ele, static_cast(o)); + + PR(CMassLocalPose, o.getCMassLocalPose(), PxTransform()); + PR(Mass, o.getMass(), 1.0f); + // PR(InvMass, o.getInvMass(), 1.0f); - Calculated by px + PR(MassSpaceInertiaTensor, o.getMassSpaceInertiaTensor(), PxVec3(1.0f, 1.0f, 1.0f)); + // PR(MassSpaceInvInertiaTensor, o.getMassSpaceInvInertiaTensor(), PxVec3(1.0f, 1.0f, 1.0f)); - Calculated by px + PR(LinearDamping, o.getLinearDamping(), 0.0f); + PR(AngularDamping, o.getAngularDamping(), 0.05f); + P_BOUNDED(MaxLinearVelocity, o.getMaxLinearVelocity(), 1e+15f); // 1e+16f + PR(MaxAngularVelocity, o.getMaxAngularVelocity(), 100.0f); + + PFLAG(Kinematic, o.getRigidBodyFlags(), PxRigidBodyFlag::eKINEMATIC); + PFLAG(EnableCCD, o.getRigidBodyFlags(), PxRigidBodyFlag::eENABLE_CCD); + PFLAG(EnableCCDFriction, o.getRigidBodyFlags(), PxRigidBodyFlag::eENABLE_CCD_FRICTION); + PFLAG(EnableSpeculativeCCD, o.getRigidBodyFlags(), PxRigidBodyFlag::eENABLE_SPECULATIVE_CCD); + PFLAG(EnableCCDMaxContactImpulse, o.getRigidBodyFlags(), PxRigidBodyFlag::eENABLE_CCD_MAX_CONTACT_IMPULSE); + PFLAG(RetainAccelerations, o.getRigidBodyFlags(), PxRigidBodyFlag::eRETAIN_ACCELERATIONS); + + PR(MinCCDAdvanceCoefficient, o.getMinCCDAdvanceCoefficient(), 0.15f); + P_BOUNDED(MaxDepenetrationVelocity, o.getMaxDepenetrationVelocity(), 1e+31f); // 1e+32f + P_BOUNDED(MaxContactImpulse, o.getMaxContactImpulse(), 1e+31f); // 1e+32f + } + + void Export(TiXmlNode& parent, PxRigidDynamic& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("RigidDynamic")); + ExportProperties(ele, static_cast(o)); + + PxU32 minPositionIters, minVelocityIters; + o.getSolverIterationCounts(minPositionIters, minVelocityIters); + + PR(SleepThreshold, o.getSleepThreshold(), 0.005f); + PR(StabilizationThreshold, o.getStabilizationThreshold(), 0.0025f); + + PFLAG(LockLinearX, o.getRigidDynamicLockFlags(), PxRigidDynamicLockFlag::eLOCK_LINEAR_X); + PFLAG(LockLinearY, o.getRigidDynamicLockFlags(), PxRigidDynamicLockFlag::eLOCK_LINEAR_Y); + PFLAG(LockLinearZ, o.getRigidDynamicLockFlags(), PxRigidDynamicLockFlag::eLOCK_LINEAR_Z); + PFLAG(LockAngularX, o.getRigidDynamicLockFlags(), PxRigidDynamicLockFlag::eLOCK_ANGULAR_X); + PFLAG(LockAngularY, o.getRigidDynamicLockFlags(), PxRigidDynamicLockFlag::eLOCK_ANGULAR_Y); + PFLAG(LockAngularZ, o.getRigidDynamicLockFlags(), PxRigidDynamicLockFlag::eLOCK_ANGULAR_Z); + + PR(WakeCounter, o.getWakeCounter(), 0); + P_BOUNDED(ContactReportThreshold, o.getContactReportThreshold(), 3.40282e+37f); // PX_MAX_F32 + PR(MinPositionIters, minPositionIters, 4); + PR(MinVelocityIters, minVelocityIters, 1); + } + + void Export(TiXmlNode& parent, PxD6Joint& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("D6Joint")); + ExportProperties(ele, static_cast(o)); + + PR(MotionX, o.getMotion(PxD6Axis::eX), PxD6Motion::eFREE); + PR(MotionY, o.getMotion(PxD6Axis::eY), PxD6Motion::eFREE); + PR(MotionZ, o.getMotion(PxD6Axis::eZ), PxD6Motion::eFREE); + PR(MotionTwist, o.getMotion(PxD6Axis::eTWIST), PxD6Motion::eFREE); + PR(MotionSwing1, o.getMotion(PxD6Axis::eSWING1), PxD6Motion::eFREE); + PR(MotionSwing2, o.getMotion(PxD6Axis::eSWING2), PxD6Motion::eFREE); + + P(DistanceLimit, o.getDistanceLimit()); + P(LinearLimitX, o.getLinearLimit(PxD6Axis::eX)); + P(LinearLimitY, o.getLinearLimit(PxD6Axis::eY)); + P(LinearLimitZ, o.getLinearLimit(PxD6Axis::eZ)); + P(TwistLimit, o.getTwistLimit()); + P(SwingLimit, o.getSwingLimit()); + P(PyramidSwingLimit, o.getPyramidSwingLimit()); + + P(DriveX, o.getDrive(PxD6Drive::eX)); + P(DriveY, o.getDrive(PxD6Drive::eY)); + P(DriveZ, o.getDrive(PxD6Drive::eZ)); + P(DriveSwing, o.getDrive(PxD6Drive::eSWING)); + P(DriveTwist, o.getDrive(PxD6Drive::eTWIST)); + P(DriveSlerp, o.getDrive(PxD6Drive::eSLERP)); + + PR(ProjectionLinearTolerance, o.getProjectionLinearTolerance(), 1e+10f); + PR(ProjectionAngularTolerance, o.getProjectionAngularTolerance(), 3.14159f); + } + + void ExportProperties(TiXmlNode& ele, PxJoint& o) + { + ExportProperty(ele, "Name", o.getName()); + + PxRigidActor* actor0 = nullptr, * actor1 = nullptr; + o.getActors(actor0, actor1); + + if (actor0) ExportProperty(ele, "Actor0", actor0->getName()); + if (actor1) ExportProperty(ele, "Actor1", actor1->getName()); + + PR(Actor0LocalPose, o.getLocalPose(PxJointActorIndex::eACTOR0), PxTransform()); + PR(Actor1LocalPose, o.getLocalPose(PxJointActorIndex::eACTOR1), PxTransform()); + + PxReal force, torque; + o.getBreakForce(force, torque); + P_BOUNDED(BreakForce, force, 3.40282e+37f); // PX_MAX_F32 + P_BOUNDED(BreakTorque, torque, 3.40282e+37f); // PX_MAX_F32 + + PFLAG(ProjectToActor0, o.getConstraintFlags(), PxConstraintFlag::ePROJECT_TO_ACTOR0); + PFLAG(ProjectToActor1, o.getConstraintFlags(), PxConstraintFlag::ePROJECT_TO_ACTOR1); + PFLAG(CollisionEnabled, o.getConstraintFlags(), PxConstraintFlag::eCOLLISION_ENABLED); + PFLAG(DriveLimitsAreForces, o.getConstraintFlags(), PxConstraintFlag::eDRIVE_LIMITS_ARE_FORCES); + + PR(InvMassScale0, o.getInvMassScale0(), 1.0f); + PR(InvInertiaScale0, o.getInvInertiaScale0(), 1.0f); + PR(InvMassScale1, o.getInvMassScale1(), 1.0f); + PR(InvInertiaScale1, o.getInvInertiaScale1(), 1.0f); + } + + void Export(TiXmlNode& parent, PxArticulationJoint& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("Joint")); + ExportProperties(ele, static_cast(o)); + + PR(Stiffness, o.getStiffness(), 0.0f); + PR(Damping, o.getDamping(), 0.0f); + PR(InternalCompliance, o.getInternalCompliance(), 0.0f); + PR(ExternalCompliance, o.getExternalCompliance(), 0.0f); + + PxReal zLimit, yLimit; + o.getSwingLimit(zLimit, yLimit); + PR(SwingLimitZ, zLimit, (float)M_PI / 4); + PR(SwingLimitY, yLimit, (float)M_PI / 4); + + PR(TangentialStiffness, o.getTangentialStiffness(), 0.0f); + PR(TangentialDamping, o.getTangentialDamping(), 0.0f); + PR(SwingLimitContactDistance, o.getSwingLimitContactDistance(), 0.05f); + PR(SwingLimitEnabled, o.getSwingLimitEnabled(), false); + + PxReal lower, upper; + o.getTwistLimit(lower, upper); + PR(TwistLimitLower, lower, -(float)M_PI / 4); + PR(TwistLimitUpper, upper, (float)M_PI / 4); + + PR(TwistLimitContactDistance, o.getTwistLimitContactDistance(), 0.05f); + PR(TwistLimitEnabled, o.getTwistLimitEnabled(), false); + } + + void ExportProperties(TiXmlNode& ele, PxArticulationJointBase& o) + { + PR(ParentPose, o.getParentPose(), PxTransform()); + PR(ChildPose, o.getChildPose(), PxTransform()); + } + + void Export(TiXmlNode& parent, PxArticulationLink& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("Link")); + ExportProperties(ele, static_cast(o)); + + auto joint = o.getInboundJoint(); + if (joint != nullptr) { + PR(InboundJointDof, o.getInboundJointDof(), 0); + Export(ele, static_cast(*joint)); + } + + if (o.getNbChildren() > 0) { + std::vector children; + children.resize(o.getNbChildren()); + o.getChildren(children.data(), (PxU32)children.size(), 0); + + auto& childrenEle = *ele.InsertEndChild(TiXmlElement("Links")); + + for (auto child : children) { + Export(childrenEle, *child); + } + } + } + + void Export(TiXmlNode& parent, PxArticulation& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("Articulation")); + ExportProperties(ele, static_cast(o)); + + PR(MaxProjectionIterations, o.getMaxProjectionIterations(), 4); + PR(SeparationTolerance, o.getSeparationTolerance(), 0.01f); + PR(InternalDriveIterations, o.getInternalDriveIterations(), 4); + PR(ExternalDriveIterations, o.getExternalDriveIterations(), 4); + } + + void ExportProperties(TiXmlNode& ele, PxArticulationBase& o) + { + PR(SleepThreshold, o.getSleepThreshold(), 0.005f); + PR(StabilizationThreshold, o.getStabilizationThreshold(), 0.0025f); + PR(WakeCounter, o.getWakeCounter(), 0.4f); + + if (o.getNbLinks() > 0) { + std::vector children; + children.resize(o.getNbLinks()); + o.getLinks(children.data(), (PxU32)children.size(), 0); + + auto& childrenEle = *ele.InsertEndChild(TiXmlElement("Links")); + + for (auto child : children) { + if (child->getInboundJoint() == nullptr) { + Export(childrenEle, *child); + } + } + } + } + + void Export(TiXmlNode& parent, PxConvexMesh& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("ConvexMesh")); + + auto verts = o.getVertices(); + auto inds = o.getIndexBuffer(); + auto polys = o.getNbPolygons(); + + for (PxU32 i = 0; i < polys; i++) { + PxHullPolygon poly; + o.getPolygonData(i, poly); + + auto& polyEle = *ele.InsertEndChild(TiXmlElement("Polygon")); + + for (PxU32 v = 0; v < poly.mNbVerts; v++) { + auto vert = verts[inds[poly.mIndexBase + v]]; + ExportProperty(polyEle, "Vertex", vert); + } + } + } + + void Export(TiXmlNode& parent, PxTriangleMesh& o) + { + auto& ele = *parent.InsertEndChild(TiXmlElement("TriangleMesh")); + + auto verts = o.getVertices(); + auto inds = (PxU16*)o.getTriangles(); + auto tris = o.getNbTriangles(); + + for (PxU32 i = 0; i < tris*3; i++) { + auto vert = verts[inds[i]]; + ExportProperty(ele, "Vertex", vert); + } + } + + void ExportTopLevel(TiXmlNode& parent, PxBase& obj) + { + switch (obj.getConcreteType()) { + case PxTypeInfo::eFastTypeId: return Export(parent, static_cast(obj)); + case PxTypeInfo::eFastTypeId: Export(parent, static_cast(obj)); break; + case PxTypeInfo::eFastTypeId: Export(parent, static_cast(obj)); break; + case PxTypeInfo::eFastTypeId: Export(parent, static_cast(obj)); break; + case PxTypeInfo::eFastTypeId: Export(parent, static_cast(obj)); break; + + // These are child nodes of other types and will be exported alongside them + case PxTypeInfo::eFastTypeId: + case PxTypeInfo::eFastTypeId: + case PxTypeInfo::eFastTypeId: + case PxTypeInfo::eFastTypeId: + case PxTypeInfo::eFastTypeId: + case PxTypeInfo::eFastTypeId: + return; + + default: + std::cout << "WARNING: Unknown element in PxCollection: " << obj.getConcreteTypeName() << std::endl; + return; + } + } +}; + +std::vector PhysXConverter::SaveCollectionToXml(PxCollection& collection) +{ + PhysXExporter exporter; + auto xml = exporter.Export(collection); + TiXmlPrinter printer; + xml->Accept(&printer); + return std::vector((uint8_t const*)printer.Str().data(), (uint8_t const*)printer.Str().data() + printer.Str().size());; +} diff --git a/PhysicsTool/PxLoader.cpp b/PhysicsTool/PxLoader.cpp new file mode 100644 index 00000000..e7cd9975 --- /dev/null +++ b/PhysicsTool/PxLoader.cpp @@ -0,0 +1,641 @@ +#include "PhysicsTool.h" +#include + +#define PR(name, type, def) LoadProperty(ele, #name, def) +#define P(name, type) LoadProperty(ele, #name) +#define PFLAG(name) LoadProperty(ele, #name, false) +#define P_BOUNDED(name, expr, bound) LoadBoundedProperty(ele, #name, bound) + +#define SET_PR(name, type, def) o->set##name(LoadProperty(ele, #name, def)) +#define SET_P(name, type) o->set##name(LoadProperty(ele, #name)) +#define SET_FLAG(name, prop, enumlbl) o->set##prop(enumlbl, LoadProperty(ele, #name, false)) +#define SET_PB(name, bound) o->set##name(LoadBoundedProperty(ele, #name, bound)) + +class PhysXLoader +{ +public: + PxPhysics* physics_; + PxCooking* cooking_; + + PxCollection* collection_{ nullptr }; + + std::unordered_map materials_; + std::unordered_map actors_; + + PxCollection* Load(TiXmlElement& doc) + { + collection_ = PxCreateCollection(); + + for (auto child = doc.FirstChildElement(); child; child = child->NextSiblingElement()) { + LoadTopLevel(*child); + } + + return collection_; + } + + template + T LoadProperty(TiXmlElement& ele, char const* name, T defaultVal); + + PxReal LoadBoundedProperty(TiXmlElement& ele, char const* name, PxReal bound) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) return bound; + if (strcmp(attr->GetText(), "Unbounded") == 0) return bound; + return std::stof(attr->GetText()); + } + + template <> + PxReal LoadProperty(TiXmlElement& ele, char const* name, PxReal defaultVal) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) return defaultVal; + return std::stof(attr->GetText()); + } + + template <> + PxU32 LoadProperty(TiXmlElement& ele, char const* name, PxU32 defaultVal) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) return defaultVal; + return (PxU32)std::stoi(attr->GetText()); + } + + template <> + bool LoadProperty(TiXmlElement& ele, char const* name, bool defaultVal) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) return defaultVal; + return _stricmp(attr->GetText(), "true") == 0; + } + + template <> + std::string LoadProperty(TiXmlElement& ele, char const* name, std::string defaultVal) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) return defaultVal; + return attr->GetText(); + } + + template + T LoadProperty(TiXmlElement& ele, char const* name); + + template <> + std::string LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) throw std::runtime_error(std::string("Missing property: ") + name); + return attr->GetText(); + } + + template <> + PxD6Motion::Enum LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) throw std::runtime_error(std::string("Missing property: ") + name); + + if (strcmp(attr->GetText(), "Locked") == 0) return PxD6Motion::eLOCKED; + if (strcmp(attr->GetText(), "Limited") == 0) return PxD6Motion::eLIMITED; + return PxD6Motion::eFREE; + } + + template <> + PxTransform LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxTransform tr; + if (attr == nullptr) return tr; + + tr.p = LoadProperty(*attr, "Position"); + tr.q = LoadProperty(*attr, "Rotation"); + return tr; + } + + template <> + PxMeshScale LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxMeshScale tr; + if (attr == nullptr) return tr; + + tr.scale = LoadProperty(*attr, "Scale"); + tr.rotation = LoadProperty(*attr, "Rotation"); + return tr; + } + + template <> + PxVec3 LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxVec3 v; + if (attr == nullptr) return v; + + v.x = LoadProperty(*attr, "X", 0.0f); + v.y = LoadProperty(*attr, "Y", 0.0f); + v.z = LoadProperty(*attr, "Z", 0.0f); + return v; + } + + template <> + PxVec3 LoadProperty(TiXmlElement& ele, char const* name, PxVec3 def) + { + auto attr = ele.FirstChildElement(name); + if (attr == nullptr) return def; + + PxVec3 v; + v.x = LoadProperty(*attr, "X", 0.0f); + v.y = LoadProperty(*attr, "Y", 0.0f); + v.z = LoadProperty(*attr, "Z", 0.0f); + return v; + } + + template <> + PxQuat LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxQuat v; + if (attr == nullptr) return v; + + v.x = LoadProperty(*attr, "X", 0.0f); + v.y = LoadProperty(*attr, "Y", 0.0f); + v.z = LoadProperty(*attr, "Z", 0.0f); + v.w = LoadProperty(*attr, "W", 0.0f); + return v; + } + + template <> + PxJointLinearLimit LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxJointLinearLimit v(PxTolerancesScale(), PX_MAX_F32); + if (attr == nullptr) return v; + + v.value = LoadBoundedProperty(*attr, "Value", PX_MAX_F32); + v.restitution = LoadProperty(*attr, "Restitution", 0.0f); + v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); + v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); + v.damping = LoadProperty(*attr, "Damping", 0.0f); + v.contactDistance = LoadProperty(*attr, "ContactDistance", 0.0f); + return v; + } + + template <> + PxJointLinearLimitPair LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxJointLinearLimitPair v(PxTolerancesScale(), -PX_MAX_F32/3, PX_MAX_F32/3); + if (attr == nullptr) return v; + + v.lower = LoadProperty(*attr, "Lower", -PX_MAX_F32/3); + v.upper = LoadProperty(*attr, "Upper", PX_MAX_F32/3); + v.restitution = LoadProperty(*attr, "Restitution", 0.0f); + v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); + v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); + v.damping = LoadProperty(*attr, "Damping", 0.0f); + v.contactDistance = LoadProperty(*attr, "ContactDistance", 0.0f); + return v; + } + + template <> + PxJointAngularLimitPair LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxJointAngularLimitPair v(-(float)M_PI /2, (float)M_PI /2); + if (attr == nullptr) return v; + + v.lower = LoadProperty(*attr, "Lower", -(float)M_PI / 2); + v.upper = LoadProperty(*attr, "Upper", (float)M_PI / 2); + v.restitution = LoadProperty(*attr, "Restitution", 0.0f); + v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); + v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); + v.damping = LoadProperty(*attr, "Damping", 0.0f); + v.contactDistance = LoadProperty(*attr, "ContactDistance", 0.0f); + return v; + } + + template <> + PxJointLimitCone LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxJointLimitCone v((float)M_PI /2, (float)M_PI /2); + if (attr == nullptr) return v; + + v.yAngle = LoadProperty(*attr, "YAngle", (float)M_PI / 2); + v.zAngle = LoadProperty(*attr, "ZAngle", (float)M_PI / 2); + v.restitution = LoadProperty(*attr, "Restitution", 0.0f); + v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); + v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); + v.damping = LoadProperty(*attr, "Damping", 0.0f); + v.contactDistance = LoadProperty(*attr, "ContactDistance", 0.0f); + return v; + } + + template <> + PxJointLimitPyramid LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxJointLimitPyramid v(-(float)M_PI / 2, (float)M_PI / 2, -(float)M_PI / 2, (float)M_PI / 2); + if (attr == nullptr) return v; + + v.yAngleMin = LoadProperty(*attr, "YAngleMin", -(float)M_PI / 2); + v.yAngleMax = LoadProperty(*attr, "YAngleMax", (float)M_PI / 2); + v.zAngleMin = LoadProperty(*attr, "ZAngleMin", -(float)M_PI / 2); + v.zAngleMax = LoadProperty(*attr, "ZAngleMax", (float)M_PI / 2); + v.restitution = LoadProperty(*attr, "Restitution", 0.0f); + v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); + v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); + v.damping = LoadProperty(*attr, "Damping", 0.0f); + v.contactDistance = LoadProperty(*attr, "ContactDistance", 0.0f); + return v; + } + + template <> + PxD6JointDrive LoadProperty(TiXmlElement& ele, char const* name) + { + auto attr = ele.FirstChildElement(name); + PxD6JointDrive v; + if (attr == nullptr) return v; + + v.forceLimit = LoadBoundedProperty(*attr, "ForceLimit", PX_MAX_F32); + v.flags = LoadProperty(*attr, "IsAcceleration", false) ? PxD6JointDriveFlag::eACCELERATION : (PxD6JointDriveFlag::Enum)0; + v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); + v.damping = LoadProperty(*attr, "Damping", 0.0f); + return v; + } + + PxBase* LoadMaterial(TiXmlElement& ele) + { + auto index = PR(Index, PxU32, 0); + if (materials_.find(index) != materials_.end()) throw std::runtime_error("Duplicate material index"); + + auto mat = physics_->createMaterial( + PR(StaticFriction, PxReal, 1.0f), + PR(DynamicFriction, PxReal, 1.0f), + PR(Restitution, PxReal, 0.0f) + ); + + collection_->add(*mat); + materials_.insert(std::make_pair(index, mat)); + return mat; + } + + void LoadRigidActor(TiXmlElement& ele, PxRigidActor* o) + { + o->setName(_strdup(PR(Name, std::string, "").c_str())); + o->setActorFlag(PxActorFlag::eDISABLE_GRAVITY, PFLAG(DisableGravity)); + o->setDominanceGroup((PxDominanceGroup)PR(DominanceGroup, PxU32, 0)); + + auto shapes = ele.FirstChildElement("Shapes"); + if (shapes) { + for (auto shapeEle = shapes->FirstChildElement("Shape"); shapeEle; shapeEle = shapeEle->NextSiblingElement("Shape")) { + o->attachShape(*LoadShape(*shapeEle)); + } + } + + actors_.insert(std::make_pair(o->getName(), o)); + } + + PxBase* LoadRigidStatic(TiXmlElement& ele) + { + auto o = physics_->createRigidStatic( + P(GlobalPose, PxTransform) + ); + + LoadRigidActor(ele, o); + + collection_->add(*o); + return o; + } + + void LoadRigidBody(TiXmlElement& ele, PxRigidBody* o) + { + LoadRigidActor(ele, o); + + SET_P(CMassLocalPose, PxTransform); + SET_PR(Mass, PxReal, 1.0f); + SET_PR(MassSpaceInertiaTensor, PxVec3, PxVec3(1.0f, 1.0f, 1.0f)); + SET_PR(LinearDamping, PxReal, 0.0f); + SET_PR(AngularDamping, PxReal, 0.5f); + SET_PB(MaxLinearVelocity, 1e+16f); + SET_PR(MaxAngularVelocity, PxReal, 100.0f); + + SET_FLAG(Kinematic, RigidBodyFlag, PxRigidBodyFlag::eKINEMATIC); + SET_FLAG(EnableCCD, RigidBodyFlag, PxRigidBodyFlag::eENABLE_CCD); + SET_FLAG(EnableCCDFriction, RigidBodyFlag, PxRigidBodyFlag::eENABLE_CCD_FRICTION); + SET_FLAG(EnableSpeculativeCCD, RigidBodyFlag, PxRigidBodyFlag::eENABLE_SPECULATIVE_CCD); + SET_FLAG(EnableCCDMaxContactImpulse, RigidBodyFlag, PxRigidBodyFlag::eENABLE_CCD_MAX_CONTACT_IMPULSE); + SET_FLAG(RetainAccelerations, RigidBodyFlag, PxRigidBodyFlag::eRETAIN_ACCELERATIONS); + + SET_PR(MinCCDAdvanceCoefficient, PxReal, 0.15f); + SET_PB(MaxDepenetrationVelocity, 1e+32f); + SET_PB(MaxContactImpulse, 1e+32f); + } + + PxBase* LoadRigidDynamic(TiXmlElement& ele) + { + auto o = physics_->createRigidDynamic( + P(GlobalPose, PxTransform) + ); + + auto minPositionIters = PR(MinPositionIters, PxU32, 4); + auto minVelocityIters = PR(MinVelocityIters, PxU32, 1); + o->setSolverIterationCounts(minPositionIters, minVelocityIters); + + SET_PR(SleepThreshold, PxReal, 0.005f); + SET_PR(StabilizationThreshold, PxReal, 0.0025f); + + SET_FLAG(LockLinearX, RigidDynamicLockFlag, PxRigidDynamicLockFlag::eLOCK_LINEAR_X); + SET_FLAG(LockLinearY, RigidDynamicLockFlag, PxRigidDynamicLockFlag::eLOCK_LINEAR_Y); + SET_FLAG(LockLinearZ, RigidDynamicLockFlag, PxRigidDynamicLockFlag::eLOCK_LINEAR_Z); + SET_FLAG(LockAngularX, RigidDynamicLockFlag, PxRigidDynamicLockFlag::eLOCK_ANGULAR_X); + SET_FLAG(LockAngularY, RigidDynamicLockFlag, PxRigidDynamicLockFlag::eLOCK_ANGULAR_Y); + SET_FLAG(LockAngularZ, RigidDynamicLockFlag, PxRigidDynamicLockFlag::eLOCK_ANGULAR_Z); + + SET_PR(WakeCounter, PxReal, 0.0f); + SET_PB(ContactReportThreshold, PX_MAX_F32); + + LoadRigidBody(ele, o); + + collection_->add(*o); + return o; + } + + PxShape* LoadShape(TiXmlElement& ele) + { + auto matIndex = PR(MaterialIndex, PxU32, 0); + auto mat = materials_.find(matIndex); + if (mat == materials_.end()) throw std::runtime_error("Shape references unknown material index"); + + auto geomEle = ele.FirstChildElement("Geometry"); + if (!geomEle) throw std::runtime_error("Shape has no geometry"); + + auto geom = LoadGeometry(*geomEle); + + auto o = physics_->createShape(*geom, *mat->second, true); + o->setName(_strdup(PR(Name, std::string, "").c_str())); + o->setLocalPose(P(LocalPose, PxTransform)); + o->setContactOffset(PR(ContactOffset, PxReal, 0.02f)); + o->setRestOffset(PR(RestOffset, PxReal, 0.0f)); + o->setTorsionalPatchRadius(PR(TorsionalPatchRadius, PxReal, 0.0f)); + o->setMinTorsionalPatchRadius(PR(MinTorsionalPatchRadius, PxReal, 0.0f)); + + collection_->add(*o); + return o; + } + + PxGeometry* LoadSphere(TiXmlElement& ele) + { + return new PxSphereGeometry( + PR(Radius, PxReal, 1.0f) + ); + } + + PxGeometry* LoadCapsule(TiXmlElement& ele) + { + return new PxCapsuleGeometry( + PR(Radius, PxReal, 1.0f), + PR(HalfHeight, PxReal, 1.0f) + ); + } + + PxGeometry* LoadBox(TiXmlElement& ele) + { + return new PxBoxGeometry( + P(HalExtents, PxVec3) + ); + } + + PxGeometry* LoadConvexMeshGeometry(TiXmlElement& ele) + { + auto meshEle = ele.FirstChildElement("ConvexMesh"); + if (!meshEle) throw std::runtime_error("Geometry has no ConvexMesh"); + auto mesh = LoadConvexMesh(ele); + + return new PxConvexMeshGeometry( + mesh, + P(Scale, PxMeshScale) + ); + } + + PxGeometry* LoadTriangleMeshGeometry(TiXmlElement& ele) + { + auto meshEle = ele.FirstChildElement("TriangleMesh"); + if (!meshEle) throw std::runtime_error("Geometry has no TriangleMesh"); + auto mesh = LoadTriangleMesh(ele); + + return new PxTriangleMeshGeometry( + mesh, + P(Scale, PxMeshScale) + ); + } + + PxConvexMesh* LoadConvexMesh(TiXmlElement& ele) + { + throw new std::runtime_error("LoadConvexMesh: Dont know how to do this yet"); + } + + PxTriangleMesh* LoadTriangleMesh(TiXmlElement& ele) + { + throw new std::runtime_error("LoadTriangleMesh: Dont know how to do this yet"); + } + + PxGeometry* LoadGeometry(TiXmlElement& ele) + { + auto type = PR(Type, std::string, ""); + + if (type == "Sphere") { + return LoadSphere(ele); + } else if (type == "Capsule") { + return LoadCapsule(ele); + } else if (type == "Box") { + return LoadBox(ele); + } else if (type == "ConvexMesh") { + return LoadConvexMeshGeometry(ele); + } else if (type == "TriangleMesh") { + return LoadTriangleMeshGeometry(ele); + } else { + throw std::runtime_error("Unknown geometry type"); + } + } + + void LoadJoint(PxJoint* o, TiXmlElement& ele) + { + o->setName(_strdup(PR(Name, std::string, "").c_str())); + + auto force = P_BOUNDED(BreakForce, PxReal, PX_MAX_F32); + auto torque = P_BOUNDED(BreakTorque, PxReal, PX_MAX_F32); + o->setBreakForce(force, torque); + + SET_FLAG(ProjectToActor0, ConstraintFlag, PxConstraintFlag::ePROJECT_TO_ACTOR0); + SET_FLAG(ProjectToActor1, ConstraintFlag, PxConstraintFlag::ePROJECT_TO_ACTOR1); + SET_FLAG(CollisionEnabled, ConstraintFlag, PxConstraintFlag::eCOLLISION_ENABLED); + SET_FLAG(DriveLimitsAreForces, ConstraintFlag, PxConstraintFlag::eDRIVE_LIMITS_ARE_FORCES); + + SET_PR(InvMassScale0, PxReal, 1.0f); + SET_PR(InvInertiaScale0, PxReal, 1.0f); + SET_PR(InvMassScale1, PxReal, 1.0f); + SET_PR(InvInertiaScale1, PxReal, 1.0f); + } + + PxBase* LoadD6Joint(TiXmlElement& ele) + { + auto actor0Name = P(Actor0, std::string); + auto actor1Name = P(Actor1, std::string); + + auto actor0It = actors_.find(actor0Name); + if (actor0It == actors_.end()) throw std::runtime_error("Actor0 has invalid name"); + + auto actor1It = actors_.find(actor1Name); + if (actor1It == actors_.end()) throw std::runtime_error("Actor1 has invalid name"); + + auto pose0 = P(Actor0LocalPose, PxTransform); + auto pose1 = P(Actor1LocalPose, PxTransform); + + auto o = PxD6JointCreate(*physics_, actor0It->second, pose0, actor1It->second, pose1); + LoadJoint(o, ele); + + o->setMotion(PxD6Axis::eX, P(MotionX, PxD6Motion::Enum)); + o->setMotion(PxD6Axis::eY, P(MotionY, PxD6Motion::Enum)); + o->setMotion(PxD6Axis::eZ, P(MotionZ, PxD6Motion::Enum)); + o->setMotion(PxD6Axis::eTWIST, P(MotionTwist, PxD6Motion::Enum)); + o->setMotion(PxD6Axis::eSWING1, P(MotionSwing1, PxD6Motion::Enum)); + o->setMotion(PxD6Axis::eSWING2, P(MotionSwing2, PxD6Motion::Enum)); + + SET_P(DistanceLimit, PxJointLinearLimit); + o->setLinearLimit(PxD6Axis::eX, P(LinearLimitX, PxJointLinearLimitPair)); + o->setLinearLimit(PxD6Axis::eY, P(LinearLimitY, PxJointLinearLimitPair)); + o->setLinearLimit(PxD6Axis::eZ, P(LinearLimitZ, PxJointLinearLimitPair)); + SET_P(TwistLimit, PxJointAngularLimitPair); + SET_P(SwingLimit, PxJointLimitCone); + SET_P(PyramidSwingLimit, PxJointLimitPyramid); + + o->setDrive(PxD6Drive::eX, P(DriveX, PxD6JointDrive)); + o->setDrive(PxD6Drive::eY, P(DriveY, PxD6JointDrive)); + o->setDrive(PxD6Drive::eZ, P(DriveZ, PxD6JointDrive)); + o->setDrive(PxD6Drive::eSWING, P(DriveSwing, PxD6JointDrive)); + o->setDrive(PxD6Drive::eTWIST, P(DriveTwist, PxD6JointDrive)); + o->setDrive(PxD6Drive::eSLERP, P(DriveSlerp, PxD6JointDrive)); + + SET_PR(ProjectionLinearTolerance, PxReal, 1e+10f); + SET_PR(ProjectionAngularTolerance, PxReal, 3.14159f); + + collection_->add(*o->getConstraint()); + collection_->add(*o); + return o; + } + + PxBase* LoadArticulationJoint(TiXmlElement& ele, PxArticulationJoint* o) + { + SET_P(ParentPose, PxTransform); + SET_P(ChildPose, PxTransform); + + SET_PR(Stiffness, PxReal, 0.0f); + SET_PR(Damping, PxReal, 0.0f); + SET_PR(InternalCompliance, PxReal, 0.0f); + SET_PR(ExternalCompliance, PxReal, 0.0f); + + o->setSwingLimit(PR(SwingLimitZ, PxReal, (float)M_PI / 4), PR(SwingLimitY, PxReal, (float)M_PI / 4)); + + SET_PR(TangentialStiffness, PxReal, 0.0f); + SET_PR(TangentialDamping, PxReal, 0.0f); + SET_PR(SwingLimitContactDistance, PxReal, 0.05f); + SET_PR(SwingLimitEnabled, bool, false); + + o->setTwistLimit(PR(TwistLimitLower, PxReal, -(float)M_PI / 4), PR(TwistLimitUpper, PxReal, (float)M_PI / 4)); + + SET_PR(TwistLimitContactDistance, PxReal, 0.05f); + SET_PR(TwistLimitEnabled, bool, false); + + collection_->add(*o); + return o; + } + + PxBase* LoadArticulationLink(TiXmlElement& ele, PxArticulation& articulation, PxArticulationLink* parent) + { + auto o = articulation.createLink( + parent, P(GlobalPose, PxTransform) + ); + + LoadRigidBody(ele, o); + + if (parent != nullptr) { + auto jointNode = ele.FirstChildElement("Joint"); + if (jointNode == nullptr) throw std::runtime_error("Joint missing on articulation link"); + LoadArticulationJoint(*jointNode, static_cast(o->getInboundJoint())); + } + + collection_->add(*o); + + auto linksNode = ele.FirstChildElement("Links"); + if (linksNode) { + for (auto linkNode = linksNode->FirstChildElement("Link"); linkNode; linkNode = linkNode->NextSiblingElement("Link")) { + LoadArticulationLink(*linkNode, articulation, o); + } + } + + return o; + } + + PxBase* LoadArticulation(TiXmlElement& ele) + { + auto o = physics_->createArticulation(); + + SET_PR(SleepThreshold, PxReal, 0.005f); + SET_PR(StabilizationThreshold, PxReal, 0.0025f); + SET_PR(WakeCounter, PxReal, 0.4f); + + SET_PR(MaxProjectionIterations, PxU32, 4); + SET_PR(SeparationTolerance, PxReal, 0.01f); + SET_PR(InternalDriveIterations, PxU32, 4); + SET_PR(ExternalDriveIterations, PxU32, 4); + + auto linksNode = ele.FirstChildElement("Links"); + if (linksNode) { + for (auto linkNode = linksNode->FirstChildElement("Link"); linkNode; linkNode = linkNode->NextSiblingElement("Link")) { + LoadArticulationLink(*linkNode, *o, nullptr); + + } + } + + collection_->add(*o); + return o; + } + + PxBase* LoadTopLevel(TiXmlElement& ele) + { + auto type = ele.ValueStr(); + if (type == "Material") { + return LoadMaterial(ele); + } else if (type == "RigidStatic") { + return LoadRigidStatic(ele); + } else if (type == "RigidDynamic") { + return LoadRigidDynamic(ele); + } else if (type == "D6Joint") { + return LoadD6Joint(ele); + } else if (type == "Articulation") { + return LoadArticulation(ele); + } else { + std::cout << "WARNING: Don't know how to load object " << type << std::endl; + return nullptr; + } + } +}; + + +PxCollection* PhysXConverter::LoadCollectionFromXml(std::span const& xml) +{ + PhysXLoader loader; + loader.physics_ = physics_; + loader.cooking_ = cooking_; + + // Ensure string is null-terminated + std::string s((char const*)xml.data(), xml.size()); + + TiXmlDocument doc; + doc.Parse(s.c_str(), 0, TIXML_ENCODING_UTF8); + if (doc.Error()) throw std::runtime_error(doc.ErrorDesc()); + + auto root = doc.FirstChildElement(); + if (root == nullptr || strcmp(root->Value(), "BG3Physics") != 0) throw std::runtime_error("Expected a BG3Physics XML document"); + + return loader.Load(*root); +} diff --git a/PhysicsTool/packages.config b/PhysicsTool/packages.config new file mode 100644 index 00000000..61a4c5f2 --- /dev/null +++ b/PhysicsTool/packages.config @@ -0,0 +1,5 @@ + + + + + \ No newline at end of file From 5d6de4eb2a711563d1be5f889235b4113638ed57 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 15 Mar 2024 01:25:24 +0100 Subject: [PATCH 088/139] Improve float precision in physics exports --- PhysicsTool/PxEncoder.cpp | 9 +++++++++ PhysicsTool/PxLoader.cpp | 26 +++++++++++++------------- make-release.bat | 3 +++ 3 files changed, 25 insertions(+), 13 deletions(-) diff --git a/PhysicsTool/PxEncoder.cpp b/PhysicsTool/PxEncoder.cpp index baeeaeaa..067e81db 100644 --- a/PhysicsTool/PxEncoder.cpp +++ b/PhysicsTool/PxEncoder.cpp @@ -40,6 +40,15 @@ class PhysXExporter prop.InsertEndChild(TiXmlText(obj ? "true" : "false")); } + template <> + void ExportProperty(TiXmlNode& ele, char const* name, PxReal obj) + { + auto& prop = *ele.InsertEndChild(TiXmlElement(name)); + char val[32]; + std::snprintf(val, sizeof(val), "%.8f", obj); + prop.InsertEndChild(TiXmlText(val)); + } + template <> void ExportProperty(TiXmlNode& ele, char const* name, char const* obj) { diff --git a/PhysicsTool/PxLoader.cpp b/PhysicsTool/PxLoader.cpp index e7cd9975..e63e28b8 100644 --- a/PhysicsTool/PxLoader.cpp +++ b/PhysicsTool/PxLoader.cpp @@ -199,11 +199,11 @@ class PhysXLoader PxJointAngularLimitPair LoadProperty(TiXmlElement& ele, char const* name) { auto attr = ele.FirstChildElement(name); - PxJointAngularLimitPair v(-(float)M_PI /2, (float)M_PI /2); + PxJointAngularLimitPair v(-PxPi / 2, PxPi / 2); if (attr == nullptr) return v; - v.lower = LoadProperty(*attr, "Lower", -(float)M_PI / 2); - v.upper = LoadProperty(*attr, "Upper", (float)M_PI / 2); + v.lower = LoadProperty(*attr, "Lower", -PxPi / 2); + v.upper = LoadProperty(*attr, "Upper", PxPi / 2); v.restitution = LoadProperty(*attr, "Restitution", 0.0f); v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); @@ -216,11 +216,11 @@ class PhysXLoader PxJointLimitCone LoadProperty(TiXmlElement& ele, char const* name) { auto attr = ele.FirstChildElement(name); - PxJointLimitCone v((float)M_PI /2, (float)M_PI /2); + PxJointLimitCone v(PxPi / 2, PxPi / 2); if (attr == nullptr) return v; - v.yAngle = LoadProperty(*attr, "YAngle", (float)M_PI / 2); - v.zAngle = LoadProperty(*attr, "ZAngle", (float)M_PI / 2); + v.yAngle = LoadProperty(*attr, "YAngle", PxPi / 2); + v.zAngle = LoadProperty(*attr, "ZAngle", PxPi / 2); v.restitution = LoadProperty(*attr, "Restitution", 0.0f); v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); @@ -233,13 +233,13 @@ class PhysXLoader PxJointLimitPyramid LoadProperty(TiXmlElement& ele, char const* name) { auto attr = ele.FirstChildElement(name); - PxJointLimitPyramid v(-(float)M_PI / 2, (float)M_PI / 2, -(float)M_PI / 2, (float)M_PI / 2); + PxJointLimitPyramid v(-PxPi / 2, PxPi / 2, -PxPi / 2, PxPi / 2); if (attr == nullptr) return v; - v.yAngleMin = LoadProperty(*attr, "YAngleMin", -(float)M_PI / 2); - v.yAngleMax = LoadProperty(*attr, "YAngleMax", (float)M_PI / 2); - v.zAngleMin = LoadProperty(*attr, "ZAngleMin", -(float)M_PI / 2); - v.zAngleMax = LoadProperty(*attr, "ZAngleMax", (float)M_PI / 2); + v.yAngleMin = LoadProperty(*attr, "YAngleMin", -PxPi / 2); + v.yAngleMax = LoadProperty(*attr, "YAngleMax", PxPi / 2); + v.zAngleMin = LoadProperty(*attr, "ZAngleMin", -PxPi / 2); + v.zAngleMax = LoadProperty(*attr, "ZAngleMax", PxPi / 2); v.restitution = LoadProperty(*attr, "Restitution", 0.0f); v.bounceThreshold = LoadProperty(*attr, "BounceThreshold", 0.0f); v.stiffness = LoadProperty(*attr, "Stiffness", 0.0f); @@ -533,14 +533,14 @@ class PhysXLoader SET_PR(InternalCompliance, PxReal, 0.0f); SET_PR(ExternalCompliance, PxReal, 0.0f); - o->setSwingLimit(PR(SwingLimitZ, PxReal, (float)M_PI / 4), PR(SwingLimitY, PxReal, (float)M_PI / 4)); + o->setSwingLimit(PR(SwingLimitZ, PxReal, PxPi / 4), PR(SwingLimitY, PxReal, PxPi / 4)); SET_PR(TangentialStiffness, PxReal, 0.0f); SET_PR(TangentialDamping, PxReal, 0.0f); SET_PR(SwingLimitContactDistance, PxReal, 0.05f); SET_PR(SwingLimitEnabled, bool, false); - o->setTwistLimit(PR(TwistLimitLower, PxReal, -(float)M_PI / 4), PR(TwistLimitUpper, PxReal, (float)M_PI / 4)); + o->setTwistLimit(PR(TwistLimitLower, PxReal, -PxPi / 4), PR(TwistLimitUpper, PxReal, PxPi / 4)); SET_PR(TwistLimitContactDistance, PxReal, 0.05f); SET_PR(TwistLimitEnabled, bool, false); diff --git a/make-release.bat b/make-release.bat index 90249ffc..7b8eca78 100644 --- a/make-release.bat +++ b/make-release.bat @@ -1,6 +1,9 @@ mkdir Release\Packed mkdir Release\Packed\Tools +copy x64\Release\*.dll Release\Packed\Tools\ +copy x64\Release\*.exe Release\Packed\Tools\ + copy RconClient\bin\Release\net8.0\*.config Release\Packed\Tools\ copy RconClient\bin\Release\net8.0\*.runtimeconfig.json Release\Packed\Tools\ copy RconClient\bin\Release\net8.0\*.dll Release\Packed\Tools\ From 2c5de79e75441a4f8a5f185affed26f5d55a15ab Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 17 Mar 2024 12:50:14 +0100 Subject: [PATCH 089/139] Fix build rule typo --- LSLib/LSLib.csproj | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 05a6f63e..817ba64f 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -36,9 +36,7 @@ - "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(ProjectDir)\LS\Story\GoalParser\Goal.lex.cs" "$(ProjectDir)\LS\Story\GoalParser\Goal.lex" - -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.lex.cs" "$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.lex" + "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.lex.cs" "$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.lex" "$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.yy.cs" "$(MSBuildProjectDirectory)\LS\Story\GoalParser\Goal.yy" "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.lex.cs" "$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.lex" From 13558946ba4d0a6be5bd3d12596731b51200024b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 17 Mar 2024 13:13:22 +0100 Subject: [PATCH 090/139] Package writer code cleanup --- LSLib/LS/PackageCommon.cs | 2 +- LSLib/LS/PackageWriter.cs | 357 +++++++++++++++++-------------- LSLib/LS/Save/SavegameHelpers.cs | 2 +- 3 files changed, 204 insertions(+), 157 deletions(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index a8725827..93116682 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -195,7 +195,7 @@ public async Task CreatePackage(string packagePath, string inputPath, PackageBui AddFilesFromPath(build, inputPath); ProgressUpdate("Creating archive ...", 0, 1); - using var writer = new PackageWriter(build, packagePath); + using var writer = PackageWriterFactory.Create(build, packagePath); writer.WriteProgress += WriteProgressUpdate; writer.Write(); } diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 0ff61f65..48fa9bd3 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -1,4 +1,5 @@ -using System.IO.Hashing; +using System.IO; +using System.IO.Hashing; using System.Security.Cryptography; using LSLib.LS.Enums; using LZ4; @@ -9,14 +10,32 @@ public class PackageBuildTransientFile : PackagedFileInfoCommon { } -public class PackageWriter(PackageBuildData Build, string PackagePath) : IDisposable +abstract public class PackageWriter : IDisposable { public delegate void WriteProgressDelegate(PackageBuildInputFile file, long numerator, long denominator); - private readonly PackageHeaderCommon Metadata = new(); - private readonly List Streams = []; + protected readonly PackageHeaderCommon Metadata = new(); + protected readonly List Streams = []; + protected readonly PackageBuildData Build; + protected readonly string PackagePath; + protected readonly Stream MainStream; public WriteProgressDelegate WriteProgress = delegate { }; + public PackageWriter(PackageBuildData build, string packagePath) + { + Build = build; + PackagePath = packagePath; + + MainStream = File.Open(PackagePath, FileMode.Create, FileAccess.Write); + Streams.Add(MainStream); + + Metadata.Version = (UInt32)Build.Version; + Metadata.Flags = Build.Flags; + Metadata.Priority = Build.Priority; + Metadata.Md5 = new byte[16]; + } + + public void Dispose() { foreach (Stream stream in Streams) @@ -25,6 +44,40 @@ public void Dispose() } } + private bool CanCompressFile(PackageBuildInputFile file, Stream inputStream) + { + var extension = Path.GetExtension(file.Path).ToLowerInvariant(); + return extension != ".gts" + && extension != ".gtp" + && extension != ".wem" + && extension != ".bnk" + && inputStream.Length > 0; + } + + private void WritePadding(Stream stream) + { + int padLength = Build.Version.PaddingSize(); + long alignTo; + if (Build.Version >= PackageVersion.V16) + { + alignTo = stream.Position - Marshal.SizeOf(typeof(LSPKHeader16)) - 4; + } + else + { + alignTo = stream.Position; + } + + // Pad the file to a multiple of 64 bytes + var padBytes = (padLength - alignTo % padLength) % padLength; + var pad = new byte[padBytes]; + for (var i = 0; i < pad.Length; i++) + { + pad[i] = 0xAD; + } + + stream.Write(pad, 0, pad.Length); + } + private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) { using var inputStream = input.MakeInputStream(); @@ -32,11 +85,7 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) var compression = Build.Compression; var compressionLevel = Build.CompressionLevel; - if (input.Path.EndsWith(".gts") - || input.Path.EndsWith(".gtp") - || input.Path.EndsWith(".wem") - || input.Path.EndsWith(".bnk") - || inputStream.Length == 0) + if (!CanCompressFile(input, inputStream)) { compression = CompressionMethod.None; compressionLevel = LSCompressionLevel.Fast; @@ -78,102 +127,13 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) if (!Build.Flags.HasFlag(PackageFlags.Solid)) { - int padLength = Build.Version.PaddingSize(); - long alignTo; - if (Build.Version >= PackageVersion.V16) - { - alignTo = stream.Position - Marshal.SizeOf(typeof(LSPKHeader16)) - 4; - } - else - { - alignTo = stream.Position; - } - - // Pad the file to a multiple of 64 bytes - var padBytes = (padLength - alignTo % padLength) % padLength; - var pad = new byte[padBytes]; - for (var i = 0; i < pad.Length; i++) - { - pad[i] = 0xAD; - } - - stream.Write(pad, 0, pad.Length); + WritePadding(stream); } return packaged; } - private void PackV7(FileStream mainStream) - where THeader : ILSPKHeader - where TFile : ILSPKFile - { - // <= v9 packages don't support LZ4 - if ((Build.Version == PackageVersion.V7 || Build.Version == PackageVersion.V9) && Build.Compression == CompressionMethod.LZ4) - { - Build.Compression = CompressionMethod.Zlib; - } - - Metadata.NumFiles = (uint)Build.Files.Count; - Metadata.FileListSize = (UInt32)(Marshal.SizeOf(typeof(TFile)) * Build.Files.Count); - - using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - - Metadata.DataOffset = (UInt32)Marshal.SizeOf(typeof(THeader)) + Metadata.FileListSize; - if (Metadata.Version >= 10) - { - Metadata.DataOffset += 4; - } - - int paddingLength = Build.Version.PaddingSize(); - if (Metadata.DataOffset % paddingLength > 0) - { - Metadata.DataOffset += (UInt32)(paddingLength - Metadata.DataOffset % paddingLength); - } - - // Write a placeholder instead of the actual headers; we'll write them after we - // compressed and flushed all files to disk - var placeholder = new byte[Metadata.DataOffset]; - writer.Write(placeholder); - - var writtenFiles = PackFiles(); - - mainStream.Seek(0, SeekOrigin.Begin); - if (Metadata.Version >= 10) - { - writer.Write(PackageHeaderCommon.Signature); - } - Metadata.NumParts = (UInt16)Streams.Count; - Metadata.Md5 = ComputeArchiveHash(); - - var header = (THeader)THeader.FromCommonHeader(Metadata); - BinUtils.WriteStruct(writer, ref header); - - WriteFileList(writer, writtenFiles); - } - - private void PackV13(FileStream mainStream) - where THeader : ILSPKHeader - where TFile : ILSPKFile - { - var writtenFiles = PackFiles(); - - using var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true); - - Metadata.FileListOffset = (UInt64)mainStream.Position; - WriteCompressedFileList(writer, writtenFiles); - - Metadata.FileListSize = (UInt32)(mainStream.Position - (long)Metadata.FileListOffset); - Metadata.Md5 = ComputeArchiveHash(); - Metadata.NumParts = (UInt16)Streams.Count; - - var header = (THeader)THeader.FromCommonHeader(Metadata); - BinUtils.WriteStruct(writer, ref header); - - writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(THeader)))); - writer.Write(PackageHeaderCommon.Signature); - } - - private List PackFiles() + protected List PackFiles() { long totalSize = Build.Files.Sum(p => (long)p.Size()); long currentSize = 0; @@ -189,7 +149,7 @@ private List PackFiles() return writtenFiles; } - private void WriteFileList(BinaryWriter metadataWriter, List files) + internal void WriteFileList(BinaryWriter metadataWriter, List files) where TFile : ILSPKFile { foreach (var file in files) @@ -207,7 +167,7 @@ private void WriteFileList(BinaryWriter metadataWriter, List(BinaryWriter metadataWriter, List files) + internal void WriteCompressedFileList(BinaryWriter metadataWriter, List files) where TFile : ILSPKFile { byte[] fileListBuf; @@ -239,43 +199,7 @@ private void WriteCompressedFileList(BinaryWriter metadataWriter, List(FileStream mainStream) - where THeader : ILSPKHeader - where TFile : ILSPKFile - { - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - writer.Write(PackageHeaderCommon.Signature); - var header = (THeader)THeader.FromCommonHeader(Metadata); - BinUtils.WriteStruct(writer, ref header); - } - - var writtenFiles = PackFiles(); - - using (var writer = new BinaryWriter(mainStream, new UTF8Encoding(), true)) - { - Metadata.FileListOffset = (UInt64)mainStream.Position; - WriteCompressedFileList(writer, writtenFiles); - - Metadata.FileListSize = (UInt32)(mainStream.Position - (long)Metadata.FileListOffset); - if (Build.Hash) - { - Metadata.Md5 = ComputeArchiveHash(); - } - else - { - Metadata.Md5 = new byte[0x10]; - } - - Metadata.NumParts = (UInt16)Streams.Count; - - mainStream.Seek(4, SeekOrigin.Begin); - var header = (THeader)THeader.FromCommonHeader(Metadata); - BinUtils.WriteStruct(writer, ref header); - } - } - - public byte[] ComputeArchiveHash() + protected byte[] ComputeArchiveHash() { // MD5 is computed over the contents of all files in an alphabetically sorted order var orderedFileList = Build.Files.Select(item => item).ToList(); @@ -306,26 +230,149 @@ public byte[] ComputeArchiveHash() return hash; } - public void Write() + abstract public void Write(); +} + + +internal class PackageWriter_V7 : PackageWriter + where THeader : ILSPKHeader + where TFile : ILSPKFile +{ + public PackageWriter_V7(PackageBuildData build, string packagePath) : base(build, packagePath) + { } + + public override void Write() { - var mainStream = File.Open(PackagePath, FileMode.Create, FileAccess.Write); - Streams.Add(mainStream); + // <= v9 packages don't support LZ4 + if ((Build.Version == PackageVersion.V7 || Build.Version == PackageVersion.V9) && Build.Compression == CompressionMethod.LZ4) + { + Build.Compression = CompressionMethod.Zlib; + } - Metadata.Version = (UInt32)Build.Version; - Metadata.Flags = Build.Flags; - Metadata.Priority = Build.Priority; - Metadata.Md5 = new byte[16]; + Metadata.NumFiles = (uint)Build.Files.Count; + Metadata.FileListSize = (UInt32)(Marshal.SizeOf(typeof(TFile)) * Build.Files.Count); + + using var writer = new BinaryWriter(MainStream, new UTF8Encoding(), true); + + Metadata.DataOffset = (UInt32)Marshal.SizeOf(typeof(THeader)) + Metadata.FileListSize; + if (Metadata.Version >= 10) + { + Metadata.DataOffset += 4; + } + + int paddingLength = Build.Version.PaddingSize(); + if (Metadata.DataOffset % paddingLength > 0) + { + Metadata.DataOffset += (UInt32)(paddingLength - Metadata.DataOffset % paddingLength); + } + + // Write a placeholder instead of the actual headers; we'll write them after we + // compressed and flushed all files to disk + var placeholder = new byte[Metadata.DataOffset]; + writer.Write(placeholder); + + var writtenFiles = PackFiles(); + + MainStream.Seek(0, SeekOrigin.Begin); + if (Metadata.Version >= 10) + { + writer.Write(PackageHeaderCommon.Signature); + } + Metadata.NumParts = (UInt16)Streams.Count; + Metadata.Md5 = ComputeArchiveHash(); + + var header = (THeader)THeader.FromCommonHeader(Metadata); + BinUtils.WriteStruct(writer, ref header); + + WriteFileList(writer, writtenFiles); + } +} + + +internal class PackageWriter_V13 : PackageWriter + where THeader : ILSPKHeader + where TFile : ILSPKFile +{ + public PackageWriter_V13(PackageBuildData build, string packagePath) : base(build, packagePath) + { } + + public override void Write() + { + var writtenFiles = PackFiles(); + + using var writer = new BinaryWriter(MainStream, new UTF8Encoding(), true); + + Metadata.FileListOffset = (UInt64)MainStream.Position; + WriteCompressedFileList(writer, writtenFiles); + + Metadata.FileListSize = (UInt32)(MainStream.Position - (long)Metadata.FileListOffset); + Metadata.Md5 = ComputeArchiveHash(); + Metadata.NumParts = (UInt16)Streams.Count; - switch (Build.Version) + var header = (THeader)THeader.FromCommonHeader(Metadata); + BinUtils.WriteStruct(writer, ref header); + + writer.Write((UInt32)(8 + Marshal.SizeOf(typeof(THeader)))); + writer.Write(PackageHeaderCommon.Signature); + } +} + + +internal class PackageWriter_V15 : PackageWriter + where THeader : ILSPKHeader + where TFile : ILSPKFile +{ + public PackageWriter_V15(PackageBuildData build, string packagePath) : base(build, packagePath) + { } + + public override void Write() + { + using (var writer = new BinaryWriter(MainStream, new UTF8Encoding(), true)) { - case PackageVersion.V18: PackV15(mainStream); break; - case PackageVersion.V16: PackV15(mainStream); break; - case PackageVersion.V15: PackV15(mainStream); break; - case PackageVersion.V13: PackV13(mainStream); break; - case PackageVersion.V10: PackV7(mainStream); break; - case PackageVersion.V9: - case PackageVersion.V7: PackV7(mainStream); break; - default: throw new ArgumentException($"Cannot write version {Build.Version} packages"); + writer.Write(PackageHeaderCommon.Signature); + var header = (THeader)THeader.FromCommonHeader(Metadata); + BinUtils.WriteStruct(writer, ref header); + } + + var writtenFiles = PackFiles(); + + using (var writer = new BinaryWriter(MainStream, new UTF8Encoding(), true)) + { + Metadata.FileListOffset = (UInt64)MainStream.Position; + WriteCompressedFileList(writer, writtenFiles); + + Metadata.FileListSize = (UInt32)(MainStream.Position - (long)Metadata.FileListOffset); + if (Build.Hash) + { + Metadata.Md5 = ComputeArchiveHash(); + } + else + { + Metadata.Md5 = new byte[0x10]; + } + + Metadata.NumParts = (UInt16)Streams.Count; + + MainStream.Seek(4, SeekOrigin.Begin); + var header = (THeader)THeader.FromCommonHeader(Metadata); + BinUtils.WriteStruct(writer, ref header); } } } + +public static class PackageWriterFactory +{ + public static PackageWriter Create(PackageBuildData build, string packagePath) + { + return build.Version switch + { + PackageVersion.V18 => new PackageWriter_V15(build, packagePath), + PackageVersion.V16 => new PackageWriter_V15(build, packagePath), + PackageVersion.V15 => new PackageWriter_V15(build, packagePath), + PackageVersion.V13 => new PackageWriter_V13(build, packagePath), + PackageVersion.V10 => new PackageWriter_V7(build, packagePath), + PackageVersion.V9 or PackageVersion.V7 => new PackageWriter_V7(build, packagePath), + _ => throw new ArgumentException($"Cannot write version {build.Version} packages"), + }; + } +} diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index 3ab47d28..3046c9b4 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -131,7 +131,7 @@ public void ResaveStory(Story.Story story, Game game, string path) } } - using (var packageWriter = new PackageWriter(build, path)) + using (var packageWriter = PackageWriterFactory.Create(build, path)) { packageWriter.Write(); } From 0d2fb8c8131e48a166f2ace590a0f873dd9560df Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 17 Mar 2024 16:03:53 +0100 Subject: [PATCH 091/139] Split off stats to a separate lib --- .gitignore | 1 + LSLib/LS/Stats/Parser/PropertyDefinitions.cs | 53 - LSLib/LS/Stats/Parser/StatNodes.cs | 34 - LSLib/LS/Stats/Parser/StatProperty.lex | 90 -- LSLib/LS/Stats/Parser/StatProperty.yy | 122 -- LSLib/LS/Stats/StatDefinitions.cs | 1345 ----------------- LSLib/LSLib.csproj | 8 - LSLibStats/LSLibStats.csproj | 17 + .../Parser => LSLibStats/Stats/File}/Stat.lex | 2 +- .../Parser => LSLibStats/Stats/File}/Stat.yy | 2 +- LSLibStats/Stats/File/StatNodes.cs | 34 + .../Stats/File}/StatParser.cs | 84 +- LSLibStats/Stats/Functor/Functor.lex | 90 ++ LSLibStats/Stats/Functor/Functor.yy | 122 ++ .../Stats/Functor/FunctorParser.cs | 78 +- .../Stats/Functor/FunctorParserDefinitions.cs | 29 + .../Stats/Functor/Lua.lex | 2 +- .../Stats/Functor/Lua.yy | 2 +- .../Stats/Functor/LuaParser.cs | 2 +- LSLibStats/Stats/StatDefinitions.cs | 250 +++ .../LS => LSLibStats}/Stats/StatFileParser.cs | 76 +- .../Stats/StatValueParsers.cs | 73 +- LSTools.sln | 20 + StatParser/StatChecker.cs | 10 +- StatParser/StatParser.csproj | 1 + 25 files changed, 692 insertions(+), 1855 deletions(-) delete mode 100644 LSLib/LS/Stats/Parser/PropertyDefinitions.cs delete mode 100644 LSLib/LS/Stats/Parser/StatNodes.cs delete mode 100644 LSLib/LS/Stats/Parser/StatProperty.lex delete mode 100644 LSLib/LS/Stats/Parser/StatProperty.yy delete mode 100644 LSLib/LS/Stats/StatDefinitions.cs create mode 100644 LSLibStats/LSLibStats.csproj rename {LSLib/LS/Stats/Parser => LSLibStats/Stats/File}/Stat.lex (98%) rename {LSLib/LS/Stats/Parser => LSLibStats/Stats/File}/Stat.yy (99%) create mode 100644 LSLibStats/Stats/File/StatNodes.cs rename {LSLib/LS/Stats/Parser => LSLibStats/Stats/File}/StatParser.cs (66%) create mode 100644 LSLibStats/Stats/Functor/Functor.lex create mode 100644 LSLibStats/Stats/Functor/Functor.yy rename LSLib/LS/Stats/Parser/StatPropertyParser.cs => LSLibStats/Stats/Functor/FunctorParser.cs (70%) create mode 100644 LSLibStats/Stats/Functor/FunctorParserDefinitions.cs rename LSLib/LS/Stats/Parser/StatLua.lex => LSLibStats/Stats/Functor/Lua.lex (98%) rename LSLib/LS/Stats/Parser/StatLua.yy => LSLibStats/Stats/Functor/Lua.yy (98%) rename LSLib/LS/Stats/Parser/StatLuaParser.cs => LSLibStats/Stats/Functor/LuaParser.cs (93%) create mode 100644 LSLibStats/Stats/StatDefinitions.cs rename {LSLib/LS => LSLibStats}/Stats/StatFileParser.cs (87%) rename {LSLib/LS => LSLibStats}/Stats/StatValueParsers.cs (91%) diff --git a/.gitignore b/.gitignore index 71a86311..9a444c6e 100644 --- a/.gitignore +++ b/.gitignore @@ -73,3 +73,4 @@ DebuggerFrontend/DbgProtocol.cs StatFastParser LSLibSearch LSLibSearchIndexer +*.lst diff --git a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs b/LSLib/LS/Stats/Parser/PropertyDefinitions.cs deleted file mode 100644 index 855fde16..00000000 --- a/LSLib/LS/Stats/Parser/PropertyDefinitions.cs +++ /dev/null @@ -1,53 +0,0 @@ -namespace LSLib.LS.Stats.Properties; - -public class Requirement -{ - // Requirement negation ("Immobile" vs. "!Immobile"). - public bool Not; - // Textual name of requirement - public string RequirementName; - // Integer requirement parameter - public int IntParam; - // Tag name parameter ("Tag" requirement only) - public string TagParam; -} - -public class Property -{ - public string TextKey; - public string Context; - public object Condition; - public PropertyAction Action; -} - -public class PropertyAction -{ - public string Action; - public List Arguments; - public int StartPos; - public int EndPos; -} - -public enum ConditionOperator -{ - And, - Or -}; - -public class Condition -{ - public bool Not; -} - -public class UnaryCondition : Condition -{ - public string ConditionType; - public string Argument; -} - -public class BinaryCondition : Condition -{ - public Condition Left; - public Condition Right; - public ConditionOperator Operator; -} diff --git a/LSLib/LS/Stats/Parser/StatNodes.cs b/LSLib/LS/Stats/Parser/StatNodes.cs deleted file mode 100644 index f254ea03..00000000 --- a/LSLib/LS/Stats/Parser/StatNodes.cs +++ /dev/null @@ -1,34 +0,0 @@ -using LSLib.Parser; - -namespace LSLib.LS.Stats.StatParser; - -/// -/// List of stat properties -/// -public class StatDeclaration -{ - public CodeLocation Location; - public Dictionary Properties = []; - public bool WasValidated = false; -} - -/// -/// A string property of a stat entry (Key/value pair) -/// -public class StatProperty -{ - public String Key; - public object Value; - public CodeLocation Location; - public CodeLocation ValueLocation; -} - -/// -/// An element of collection of a stat entry (Key/value pair) -/// -public class StatElement -{ - public String Collection; - public object Value; - public CodeLocation Location; -} diff --git a/LSLib/LS/Stats/Parser/StatProperty.lex b/LSLib/LS/Stats/Parser/StatProperty.lex deleted file mode 100644 index 769ba8c5..00000000 --- a/LSLib/LS/Stats/Parser/StatProperty.lex +++ /dev/null @@ -1,90 +0,0 @@ -%namespace LSLib.LS.Stats.Properties -%visibility public -%scannertype StatPropertyScanner -%scanbasetype StatPropertyScanBase -%tokentype StatPropertyTokens - -letter [a-zA-Z_] -digit [0-9] -namechar [a-zA-Z0-9_] -nonseparator [^,;:()\[\]! ] - -%% - -/* Special trigger words to determine expression type */ -"__TYPE_Properties__" return (int)StatPropertyTokens.EXPR_PROPERTIES; -"__TYPE_DescriptionParams__" return (int)StatPropertyTokens.EXPR_DESCRIPTION_PARAMS; - -/* Reserved words */ -"IF" return (int)StatPropertyTokens.IF; - -/* Text keys */ -"CastOffhand" return (int)StatPropertyTokens.TEXT_KEY; -"Cast2" return (int)StatPropertyTokens.TEXT_KEY; -"Cast3" return (int)StatPropertyTokens.TEXT_KEY; - -/* Stats contexts */ -"ABILITY_CHECK" return (int)StatPropertyTokens.CONTEXT; -"ACTION_RESOURCES_CHANGED" return (int)StatPropertyTokens.CONTEXT; -"AI_IGNORE" return (int)StatPropertyTokens.CONTEXT; -"AI_ONLY" return (int)StatPropertyTokens.CONTEXT; -"AOE" return (int)StatPropertyTokens.CONTEXT; -"ATTACK" return (int)StatPropertyTokens.CONTEXT; -"ATTACKED" return (int)StatPropertyTokens.CONTEXT; -"ATTACKED_IN_MELEE_RANGE" return (int)StatPropertyTokens.CONTEXT; -"ATTACKING_IN_MELEE_RANGE" return (int)StatPropertyTokens.CONTEXT; -"CAST" return (int)StatPropertyTokens.CONTEXT; -"CAST_RESOLVED" return (int)StatPropertyTokens.CONTEXT; -"COMBAT_ENDED" return (int)StatPropertyTokens.CONTEXT; -"CREATE_2" return (int)StatPropertyTokens.CONTEXT; -"DAMAGE" return (int)StatPropertyTokens.CONTEXT; -"DAMAGED" return (int)StatPropertyTokens.CONTEXT; -"DAMAGE_PREVENTED" return (int)StatPropertyTokens.CONTEXT; -"DAMAGED_PREVENTED" return (int)StatPropertyTokens.CONTEXT; -"ENTER_ATTACK_RANGE" return (int)StatPropertyTokens.CONTEXT; -"EQUIP" return (int)StatPropertyTokens.CONTEXT; -"LOCKPICKING_SUCCEEDED" return (int)StatPropertyTokens.CONTEXT; -"GROUND" return (int)StatPropertyTokens.CONTEXT; -"HEAL" return (int)StatPropertyTokens.CONTEXT; -"HEALED" return (int)StatPropertyTokens.CONTEXT; -"INTERRUPT_USED" return (int)StatPropertyTokens.CONTEXT; -"INVENTORY_CHANGED" return (int)StatPropertyTokens.CONTEXT; -"LEAVE_ATTACK_RANGE" return (int)StatPropertyTokens.CONTEXT; -"LONG_REST" return (int)StatPropertyTokens.CONTEXT; -"MOVED_DISTANCE" return (int)StatPropertyTokens.CONTEXT; -"OBSCURITY_CHANGED" return (int)StatPropertyTokens.CONTEXT; -"PROFICIENCY_CHANGED" return (int)StatPropertyTokens.CONTEXT; -"PROJECTILE" return (int)StatPropertyTokens.CONTEXT; -"PUSH" return (int)StatPropertyTokens.CONTEXT; -"PUSHED" return (int)StatPropertyTokens.CONTEXT; -"SELF" return (int)StatPropertyTokens.CONTEXT; -"SHORT_REST" return (int)StatPropertyTokens.CONTEXT; -"STATUS_APPLIED" return (int)StatPropertyTokens.CONTEXT; -"STATUS_APPLY" return (int)StatPropertyTokens.CONTEXT; -"STATUS_REMOVE" return (int)StatPropertyTokens.CONTEXT; -"STATUS_REMOVED" return (int)StatPropertyTokens.CONTEXT; -"SURFACE_ENTER" return (int)StatPropertyTokens.CONTEXT; -"TARGET" return (int)StatPropertyTokens.CONTEXT; -"TURN" return (int)StatPropertyTokens.CONTEXT; - -/* Special characters */ -":" return (int)':'; -"(" return (int)'('; -")" return (int)')'; -"[" return (int)'['; -"]" return (int)']'; -"," return (int)','; -";" return (int)';'; -"!" return (int)'!'; -"-" return (int)'-'; -"." return (int)'.'; -[ ] ; - -{letter}({namechar})+ { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.NAME; } -(-)?{digit}({digit})* { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.INTEGER; } -{digit}{digit}*d{digit}{digit}* { yylval = yytext; return (int)StatPropertyTokens.DICE_ROLL; } -({nonseparator})+ { yylval = MakeLiteral(yytext); return (int)StatPropertyTokens.TEXT; } - -%{ - yylloc = new QUT.Gppg.LexLocation(tokLin, tokCol, tokELin, tokECol); -%} diff --git a/LSLib/LS/Stats/Parser/StatProperty.yy b/LSLib/LS/Stats/Parser/StatProperty.yy deleted file mode 100644 index 3ef5939b..00000000 --- a/LSLib/LS/Stats/Parser/StatProperty.yy +++ /dev/null @@ -1,122 +0,0 @@ -%namespace LSLib.LS.Stats.Properties -%partial -%visibility public -%parsertype StatPropertyParser -%tokentype StatPropertyTokens -%YYSTYPE System.Object - -%start Root - -/* Trigger Lexemes */ -%token EXPR_PROPERTIES -%token EXPR_DESCRIPTION_PARAMS - -/* Reserved words */ -%token IF - -/* Functor Context */ -%token CONTEXT -/* Status/Tag name */ -%token NAME -/* Known text keys */ -%token TEXT_KEY -/* Integer literal */ -%token INTEGER -/* Text-like (unquoted) literal */ -%token TEXT -/* eg. 1d10 */ -%token DICE_ROLL - -%% - -/* A special "trigger word" is prepended to support parsing multiple types from the same lexer/parser */ -Root : EXPR_PROPERTIES Properties { $$ = $2; } - | EXPR_DESCRIPTION_PARAMS OptionalFunctorArgs { $$ = $2; } - ; - -/****************************************************************** - * - * PROPERTY PARSING - * - ******************************************************************/ - -Properties : /* empty */ { $$ = MakePropertyList(); } - | Property { $$ = AddProperty(MakePropertyList(), $1); } - | Properties ';' - | Properties ';' Property { $$ = AddProperty($1, $3); } - ; - -TextKeyProperties : TEXT_KEY '[' Properties ']' { $$ = SetTextKey($3, $1); }; - -Property : PropContexts PropCondition FunctorCall { $$ = MakeProperty($1, $2, $3); } - | TextKeyProperties - ; - -PropContexts : /* empty */ - | PropContextList { $$ = $1; } - ; - -PropContextList : PropContext { $$ = $1; } - | PropContextList PropContext { $$ = $1; } - ; - -PropContext : CONTEXT ':' { $$ = $1; }; - -PropCondition : /* empty */ - | IF '(' NonEmptyFunctorArg ')' ':' { $$ = $3; } - ; - -FunctorCall : FunctorName OptionalFunctorArgList { $$ = MakeAction($1, $2); }; - -FunctorName : NAME { $$ = $1; MarkActionStart(); }; - -OptionalFunctorArgList : /* empty */ { $$ = MakeArgumentList(); } - | '(' OptionalFunctorArgs ')' { $$ = $2; } - ; - -OptionalFunctorArgs : /* empty */ { $$ = MakeArgumentList(); } - | FunctorArgs - ; - -FunctorArgs : NonEmptyFunctorArg { $$ = AddArgument(MakeArgumentList(), $1); } - | FunctorArgs ',' FunctorArg { $$ = AddArgument($1, $3); } - ; - -FunctorArg : /* empty */ - | NonEmptyFunctorArg - ; - -NonEmptyFunctorArg : FunctorArgStart LuaRoot FunctorArgEnd { $$ = $3; }; - -FunctorArgStart : /* empty */ { InitLiteral(); }; - -FunctorArgEnd : /* empty */ { $$ = MakeLiteral(); }; - -LuaRoot : LuaRootSymbol - | LuaRoot LuaRootSymbol - | LuaRoot '(' LuaExpr ')' - | LuaRoot '(' ')' - | '(' LuaExpr ')' - ; - -LuaExpr : LuaSymbol - | LuaExpr LuaSymbol - | LuaExpr '(' LuaExpr ')' - | '(' LuaExpr ')' - | LuaExpr '(' ')' - ; - -LuaRootSymbol : NAME - | INTEGER - | TEXT - | CONTEXT - | DICE_ROLL - | ':' - | '!' - | ';' - | '-' - ; - -LuaSymbol : LuaRootSymbol - | ',' - ; diff --git a/LSLib/LS/Stats/StatDefinitions.cs b/LSLib/LS/Stats/StatDefinitions.cs deleted file mode 100644 index a1b50fac..00000000 --- a/LSLib/LS/Stats/StatDefinitions.cs +++ /dev/null @@ -1,1345 +0,0 @@ -namespace LSLib.LS.Stats; - -public class StatEnumeration(string name) -{ - public readonly string Name = name; - public readonly List Values = []; - public readonly Dictionary ValueToIndexMap = []; - - public void AddItem(int index, string value) - { - if (Values.Count != index) - { - throw new Exception("Enumeration items must be added in order."); - } - - Values.Add(value); - - // Some vanilla enums are bogus and contain names multiple times - ValueToIndexMap.TryAdd(value, index); - } - - public void AddItem(string value) - { - AddItem(Values.Count, value); - } -} - -public class StatField -{ - public string Name; - public string Type; - public StatEnumeration EnumType; - public List ReferenceTypes; - - private IStatValueValidator Validator; - - public IStatValueValidator GetValidator(StatValueValidatorFactory factory, StatDefinitionRepository definitions) - { - Validator ??= factory.CreateValidator(this, definitions); - return Validator; - } -} - -public class StatEntryType(string name, string nameProperty, string basedOnProperty) -{ - public readonly string Name = name; - public readonly string NameProperty = nameProperty; - public readonly string BasedOnProperty = basedOnProperty; - public readonly Dictionary Fields = []; -} - -public class StatFunctorArgumentType -{ - public string Name; - public string Type; -} - -public class StatFunctorType -{ - public string Name; - public int RequiredArgs; - public List Args; -} - -public class StatDefinitionRepository -{ - // Version of modified Enumerations.xml and StatObjectDefinitions.sod we expect - public const string CustomizationsVersion = "1"; - - public readonly Dictionary Enumerations = []; - public readonly Dictionary Types = []; - public readonly Dictionary Functors = []; - public readonly Dictionary Boosts = []; - public readonly Dictionary DescriptionParams = []; - - private StatField AddField(StatEntryType defn, string name, string typeName) - { - var field = new StatField - { - Name = name, - Type = typeName - }; - - if (Enumerations.TryGetValue(typeName, out StatEnumeration enumType) && enumType.Values.Count > 0) - { - field.EnumType = enumType; - } - - defn.Fields.Add(name, field); - return field; - } - - private void AddEnumeration(string name, List labels) - { - var enumType = new StatEnumeration(name); - foreach (var label in labels) - { - enumType.AddItem(label); - } - Enumerations.Add(name, enumType); - } - - private StatFunctorArgumentType MakeFunctorArg(string name, string type) - { - return new StatFunctorArgumentType - { - Name = name, - Type = type - }; - } - - public void AddBoost(string name, int requiredArgs, List args) - { - AddFunctor(Boosts, name, requiredArgs, args); - } - - public void AddFunctor(string name, int requiredArgs, List args) - { - AddFunctor(Functors, name, requiredArgs, args); - } - - public void AddDescriptionParams(string name, int requiredArgs, List args) - { - AddFunctor(DescriptionParams, name, requiredArgs, args); - } - - public void AddFunctor(Dictionary dict, string name, int requiredArgs, List argDescs) - { - var args = new List(); - for (int i = 0; i < argDescs.Count; i += 2) - { - args.Add(MakeFunctorArg(argDescs[i], argDescs[i + 1])); - } - - AddFunctor(dict, name, requiredArgs, args); - } - - public void AddFunctor(Dictionary dict, string name, int requiredArgs, IEnumerable args) - { - var functor = new StatFunctorType - { - Name = name, - RequiredArgs = requiredArgs, - Args = args.ToList() - }; - - dict.Add(name, functor); - } - - public void LoadDefinitions(Stream stream) - { - StatEntryType defn = null; - string line; - - using (var reader = new StreamReader(stream)) - while ((line = reader.ReadLine()) != null) - { - var trimmed = line.Trim(); - if (trimmed.Length > 0) - { - if (trimmed.StartsWith("modifier type ")) - { - var name = trimmed[15..^1]; - defn = new StatEntryType(name, "Name", "Using"); - Types.Add(defn.Name, defn); - AddField(defn, "Name", "FixedString"); - var usingRef = AddField(defn, "Using", "StatReference"); - usingRef.ReferenceTypes = - [ - new StatReferenceConstraint - { - StatType = name - } - ]; - } - else if (trimmed.StartsWith("modifier \"")) - { - var nameEnd = trimmed.IndexOf('"', 10); - var name = trimmed[10..nameEnd]; - var typeName = trimmed.Substring(nameEnd + 3, trimmed.Length - nameEnd - 4); - AddField(defn, name, typeName); - } - } - } - - // Add builtins - var itemColor = new StatEntryType("ItemColor", "ItemColorName", null); - Types.Add(itemColor.Name, itemColor); - AddField(itemColor, "ItemColorName", "FixedString"); - AddField(itemColor, "Primary Color", "FixedString"); - AddField(itemColor, "Secondary Color", "FixedString"); - AddField(itemColor, "Tertiary Color", "FixedString"); - - var itemProgressionName = new StatEntryType("ItemProgressionNames", "Name", null); - Types.Add(itemProgressionName.Name, itemProgressionName); - AddField(itemProgressionName, "Name", "FixedString"); - AddField(itemProgressionName, "Names", "Passthrough"); - - var itemProgressionVisual = new StatEntryType("ItemProgressionVisuals", "Name", null); - Types.Add(itemProgressionVisual.Name, itemProgressionVisual); - AddField(itemProgressionVisual, "Name", "FixedString"); - // FIXME - AddField(itemProgressionVisual, "LevelGroups", "Passthrough"); - AddField(itemProgressionVisual, "NameGroups", "Passthrough"); - AddField(itemProgressionVisual, "RootGroups", "Passthrough"); - - var dataType = new StatEntryType("Data", "Key", null); - Types.Add(dataType.Name, dataType); - AddField(dataType, "Key", "FixedString"); - AddField(dataType, "Value", "FixedString"); - - var treasureTableType = new StatEntryType("TreasureTable", "Name", null); - Types.Add(treasureTableType.Name, treasureTableType); - AddField(treasureTableType, "Name", "FixedString"); - AddField(treasureTableType, "MinLevel", "ConstantInt"); - AddField(treasureTableType, "MaxLevel", "ConstantInt"); - AddField(treasureTableType, "CanMerge", "ConstantInt"); - AddField(treasureTableType, "IgnoreLevelDiff", "ConstantInt"); - AddField(treasureTableType, "UseTreasureGroupCounters", "ConstantInt"); - AddField(treasureTableType, "Subtables", "TreasureSubtables"); - - var treasureSubtableType = new StatEntryType("TreasureSubtable", null, null); - Types.Add(treasureSubtableType.Name, treasureSubtableType); - AddField(treasureSubtableType, "DropCount", "FixedString"); // FIXME validate - AddField(treasureSubtableType, "StartLevel", "ConstantInt"); - AddField(treasureSubtableType, "EndLevel", "ConstantInt"); - AddField(treasureSubtableType, "Objects", "TreasureSubtableObject"); - - var treasureObjectType = new StatEntryType("TreasureSubtableObject", null, null); - Types.Add(treasureObjectType.Name, treasureObjectType); - AddField(treasureObjectType, "Drop", "TreasureDrop"); // FIXME validate - AddField(treasureObjectType, "Frequency", "ConstantInt"); - AddField(treasureObjectType, "Common", "ConstantInt"); - AddField(treasureObjectType, "Uncommon", "ConstantInt"); - AddField(treasureObjectType, "Rare", "ConstantInt"); - AddField(treasureObjectType, "Epic", "ConstantInt"); - AddField(treasureObjectType, "Legendary", "ConstantInt"); - AddField(treasureObjectType, "Divine", "ConstantInt"); - AddField(treasureObjectType, "Unique", "ConstantInt"); - - AddEnumeration("ResurrectType", - [ - "Living", - "Guaranteed", - "Construct", - "Undead" - ]); - - AddEnumeration("SetStatusDurationType", - [ - "SetMinimum", - "ForceSet", - "Add", - "Multiply" - ]); - - AddEnumeration("ExecuteWeaponFunctorsType", - [ - "MainHand", - "OffHand", - "BothHands" - ]); - - AddEnumeration("SpellCooldownType", - [ - "Default", - "OncePerTurn", - "OncePerCombat", - "UntilRest", - "OncePerTurnNoRealtime", - "UntilShortRest", - "UntilPerRestPerItem", - "OncePerShortRestPerItem" - ]); - - AddEnumeration("SummonDuration", - [ - "UntilLongRest", - "Permanent" - ]); - - AddEnumeration("ForceFunctorOrigin", - [ - "OriginToEntity", - "OriginToTarget", - "TargetToEntity" - ]); - - AddEnumeration("ForceFunctorAggression", - [ - "Aggressive", - "Friendly", - "Neutral" - ]); - - AddEnumeration("StatItemSlot", - [ - "Helmet", - "Breast", - "Cloak", - "MeleeMainHand", - "MeleeOffHand", - "RangedMainHand", - "RangedOffHand", - "Ring", - "Underwear", - "Boots", - "Gloves", - "Amulet", - "Ring2", - "Wings", - "Horns", - "Overhead", - "MusicalInstrument", - "VanityBody", - "VanityBoots", - "MainHand", - "OffHand" - ]); - - AddEnumeration("Magical", - [ - "Magical", - "Nonmagical" - ]); - - AddEnumeration("Nonlethal", - [ - "Lethal", - "Nonlethal" - ]); - - AddEnumeration("AllEnum", - [ - "All" - ]); - - AddEnumeration("ZoneShape", - [ - "Cone", - "Square", - ]); - - AddEnumeration("SurfaceLayer", - [ - "Ground", - "Cloud", - ]); - - AddEnumeration("RollAdjustmentType", - [ - "All", - "Distribute", - ]); - - AddEnumeration("StatsRollType", - [ - "Attack", - "MeleeWeaponAttack", - "RangedWeaponAttack", - "MeleeSpellAttack", - "RangedSpellAttack", - "MeleeUnarmedAttack", - "RangedUnarmedAttack", - "SkillCheck", - "SavingThrow", - "RawAbility", - "Damage", - "MeleeOffHandWeaponAttack", - "RangedOffHandWeaponAttack", - "DeathSavingThrow", - "MeleeWeaponDamage", - "RangedWeaponDamage", - "MeleeSpellDamage", - "RangedSpellDamage", - "MeleeUnarmedDamage", - "RangedUnarmedDamage", - ]); - - AddEnumeration("AdvantageType", - [ - "AttackRoll", - "AttackTarget", - "SavingThrow", - "AllSavingThrows", - "Ability", - "AllAbilities", - "Skill", - "AllSkills", - "SourceDialogue", - "DeathSavingThrow", - "Concentration", - ]); - - AddEnumeration("SkillType", - [ - "Deception", - "Intimidation", - "Performance", - "Persuasion", - "Acrobatics", - "SleightOfHand", - "Stealth", - "Arcana", - "History", - "Investigation", - "Nature", - "Religion", - "Athletics", - "AnimalHandling", - "Insight", - "Medicine", - "Perception", - "Survival", - ]); - - AddEnumeration("CriticalHitType", - [ - "AttackTarget", - "AttackRoll" - ]); - - AddEnumeration("Result", - [ - "Success", - "Failure" - ]); - - AddEnumeration("CriticalHitResult", - [ - "Success", - "Failure" - ]); - - AddEnumeration("CriticalHitWhen", - [ - "Never", - "Always", - "ForcedAlways" - ]); - - AddEnumeration("MovementSpeedType", - [ - "Stroll", - "Walk", - "Run", - "Sprint", - ]); - - AddEnumeration("DamageReductionType", - [ - "Half", - "Flat", - "Threshold" - ]); - - AddEnumeration("AttackRollAbility", - [ - "SpellCastingAbility", - "UnarmedMeleeAbility", - "AttackAbility" - ]); - - AddEnumeration("HealingDirection", - [ - "Incoming", - "Outgoing" - ]); - - AddEnumeration("ResistanceBoostFlags", - [ - "None", - "Resistant", - "Immune", - "Vulnerable", - "BelowDamageThreshold", - "ResistantToMagical", - "ImmuneToMagical", - "VulnerableToMagical", - "ResistantToNonMagical", - "ImmuneToNonMagical", - "VulnerableToNonMagical", - ]); - - AddEnumeration("UnlockSpellType", - [ - "Singular", - "AddChildren", - "MostPowerful" - ]); - - AddEnumeration("ProficiencyBonusBoostType", - [ - "AttackRoll", - "AttackTarget", - "SavingThrow", - "AllSavingThrows", - "Ability", - "AllAbilities", - "Skill", - "AllSkills", - "SourceDialogue", - "WeaponActionDC" - ]); - - AddEnumeration("ResourceReplenishType", - [ - "Never", - "Default", - "Combat", - "Rest", - "ShortRest", - "FullRest", - "ExhaustedRest" - ]); - - AddEnumeration("AttackType", - [ - "DirectHit", - "MeleeWeaponAttack", - "RangedWeaponAttack", - "MeleeOffHandWeaponAttack", - "RangedOffHandWeaponAttack", - "MeleeSpellAttack", - "RangedSpellAttack", - "MeleeUnarmedAttack", - "RangedUnarmedAttack" - ]); - - AddEnumeration("DealDamageWeaponDamageType", - [ - "MainWeaponDamageType", - "OffhandWeaponDamageType", - "MainMeleeWeaponDamageType", - "OffhandMeleeWeaponDamageType", - "MainRangedWeaponDamageType", - "OffhandRangedWeaponDamageType", - "SourceWeaponDamageType", - "ThrownWeaponDamageType", - ]); - - AddEnumeration("EngineStatusType", - [ - "DYING", - "HEAL", - "KNOCKED_DOWN", - "TELEPORT_FALLING", - "BOOST", - "REACTION", - "STORY_FROZEN", - "SNEAKING", - "UNLOCK", - "FEAR", - "SMELLY", - "INVISIBLE", - "ROTATE", - "MATERIAL", - "CLIMBING", - "INCAPACITATED", - "INSURFACE", - "POLYMORPHED", - "EFFECT", - "DEACTIVATED", - "DOWNED", - ]); - - - // Add functors - AddFunctor("ApplyStatus", 1, [ - "StatusId", "StatusId", - "Chance", "Int", - "Duration", "Lua", - "StatusSpecificParam1", "String", - "StatusSpecificParam2", "Int", - "StatusSpecificParam3", "Int", - "StatsConditions", "Conditions", - "RequiresConcentration", "Boolean" - ]); - AddFunctor("SurfaceChange", 1, [ - "SurfaceChange", "Surface Change", - "Chance", "Float", - "Arg3", "Float", - "Arg4", "Float", - "Arg5", "Float" - ]); - AddFunctor("Resurrect", 0, [ - "Chance", "Float", - "HealthPercentage", "Float", - "Type", "ResurrectType" - ]); - AddFunctor("Sabotage", 0, [ - "Amount", "Int" - ]); - AddFunctor("Summon", 1, [ - "Template", "Guid", // Root template GUID - "Duration", "SummonDurationOrInt", - "AIHelper", "SpellId", - "Arg4", "Boolean", - "StackId", "String", - "StatusToApply1", "StatusId", - "StatusToApply2", "StatusId", - "StatusToApply3", "StatusId", - "StatusToApply4", "StatusId", - "Arg10", "Boolean", - ]); - AddFunctor("Force", 1, [ - "Distance", "Lua", - "Origin", "ForceFunctorOrigin", - "Aggression", "ForceFunctorAggression", - "Arg4", "Boolean", - "Arg5", "Boolean", - ]); - AddFunctor("Douse", 0, [ - "Arg1", "Float", - "Arg2", "Float" - ]); - AddFunctor("SwapPlaces", 0, [ - "Animation", "String", - "Arg2", "Boolean", - "Arg3", "Boolean" - ]); - AddFunctor("Pickup", 0, [ - "Arg1", "String" - ]); - AddFunctor("CreateSurface", 3, [ - "Radius", "Float", - "Duration", "Float", - "SurfaceType", "Surface Type", - "IsControlledByConcentration", "Boolean", - "Arg5", "Float", - "Arg6", "Boolean" - ]); - AddFunctor("CreateConeSurface", 3, [ - "Radius", "Float", - "Duration", "Float", - "SurfaceType", "Surface Type", - "IsControlledByConcentration", "Boolean", - "Arg5", "Float", - "Arg6", "Boolean" - ]); - AddFunctor("RemoveStatus", 1, [ - "StatusId", "StatusIdOrGroup" - ]); - AddFunctor("DealDamage", 1, [ - "Damage", "Lua", - "DamageType", "DamageTypeOrDealDamageWeaponDamageType", - "Magical", "Magical", - "Nonlethal", "Nonlethal", - "CoinMultiplier", "Int", - "Tooltip", "Guid", - "Arg7", "Boolean", - "Arg8", "Boolean", - "Arg9", "Boolean", - "Arg10", "Boolean", - ]); - AddFunctor("ExecuteWeaponFunctors", 0, [ - "WeaponType", "ExecuteWeaponFunctorsType" - ]); - AddFunctor("RegainHitPoints", 1, [ - "HitPoints", "Lua", - "Type", "ResurrectType" - ]); - AddFunctor("TeleportSource", 0, [ - "Arg1", "Boolean", - "Arg2", "Boolean", - ]); - AddFunctor("SetStatusDuration", 2, [ - "StatusId", "StatusId", - "Duration", "Float", - "ChangeType", "SetStatusDurationType", - ]); - AddFunctor("UseSpell", 1, [ - "SpellId", "SpellId", - "IgnoreHasSpell", "Boolean", - "IgnoreChecks", "Boolean", - "Arg4", "Boolean", - "SpellCastGuid", "Guid", - ]); - AddFunctor("UseActionResource", 1, [ - "ActionResource", "String", // Action resource name - "Amount", "String", // Float or percentage - "Level", "Int", - "Arg4", "Boolean" - ]); - AddFunctor("UseAttack", 0, [ - "IgnoreChecks", "Boolean" - ]); - AddFunctor("CreateExplosion", 0, [ - "SpellId", "SpellId" - ]); - AddFunctor("BreakConcentration", 0, []); - AddFunctor("ApplyEquipmentStatus", 2, [ - "ItemSlot", "StatItemSlot", - "StatusId", "StatusId", - "Chance", "Int", - "Duration", "Lua", - "StatusSpecificParam1", "String", - "StatusSpecificParam2", "Int", - "StatusSpecificParam3", "Int", - "StatsConditions", "Conditions", - "RequiresConcentration", "Boolean" - ]); - AddFunctor("RestoreResource", 2, [ - "ActionResource", "String", // Action resource name - "Amount", "Lua", // or percentage? - "Level", "Int" - ]); - AddFunctor("Spawn", 1, [ - "TemplateId", "Guid", // Root template Guid - "AiHelper", "String", // Should be SpellId, but seemingly defunct? - "StatusToApply1", "StatusId", - "StatusToApply2", "StatusId", - "StatusToApply3", "StatusId", - "StatusToApply4", "StatusId", - "Arg7", "Boolean" - ]); - AddFunctor("Stabilize", 0, []); - AddFunctor("Unlock", 0, []); - AddFunctor("ResetCombatTurn", 0, []); - AddFunctor("RemoveAuraByChildStatus", 1, [ - "StatusId", "StatusId" - ]); - AddFunctor("SummonInInventory", 1, [ - "TemplateId", "Guid", // Root template Guid - "Duration", "SummonDurationOrInt", - "Arg3", "Int", - "Arg4", "Boolean", - "Arg5", "Boolean", - "Arg6", "Boolean", - "Arg7", "Boolean", - "Arg8", "String", - "Arg9", "String", - "Arg10", "String", - "Arg11", "String", // etc. - ]); - AddFunctor("SpawnInInventory", 1, [ - "TemplateId", "Guid", // Root template Guid - "Arg2", "Int", - "Arg3", "Boolean", - "Arg4", "Boolean", - "Arg5", "Boolean", - "Arg6", "String", - "Arg7", "String", - "Arg8", "String", // etc. - ]); - AddFunctor("RemoveUniqueStatus", 1, [ - "StatusId", "StatusId" - ]); - AddFunctor("DisarmWeapon", 0, []); - AddFunctor("DisarmAndStealWeapon", 0, []); - AddFunctor("SwitchDeathType", 1, [ - "DeathType", "Death Type" - ]); - AddFunctor("TriggerRandomCast", 2, [ - "Arg1", "Int", - "Arg2", "Float", - "Arg3", "String", // RandomCastOutcomesID resource - "Arg4", "String", // RandomCastOutcomesID resource - "Arg5", "String", // RandomCastOutcomesID resource - "Arg6", "String", // RandomCastOutcomesID resource - ]); - AddFunctor("GainTemporaryHitPoints", 1, [ - "Amount", "Lua" - ]); - AddFunctor("FireProjectile", 1, [ - "Arg1", "String" - ]); - AddFunctor("ShortRest", 0, []); - AddFunctor("CreateZone", 0, [ - "Shape", "ZoneShape", - "Arg2", "Float", - "Duration", "Float", - "Arg4", "String", - "Arg5", "Boolean", - ]); - AddFunctor("DoTeleport", 0, [ - "Arg1", "Float" - ]); - AddFunctor("RegainTemporaryHitPoints", 1, [ - "Amount", "Lua" - ]); - AddFunctor("RemoveStatusByLevel", 1, [ - "StatusId", "StatusIdOrGroup", - "Arg2", "Int", - "Arg3", "Ability" - ]); - AddFunctor("SurfaceClearLayer", 0, [ - "Layer1", "SurfaceLayer", - "Layer2", "SurfaceLayer", - ]); - AddFunctor("Unsummon", 0, []); - AddFunctor("CreateWall", 0, []); - AddFunctor("Counterspell", 0, []); - AddFunctor("AdjustRoll", 1, [ - "Amount", "Lua", - "Type", "RollAdjustmentType", - "DamageType", "Damage Type", - ]); - AddFunctor("SpawnExtraProjectiles", 0, [ - "Arg1", "String", // ProjectileTypeId - ]); - AddFunctor("Kill", 0, []); - AddFunctor("TutorialEvent", 0, [ - "Event", "Guid", - ]); - AddFunctor("Drop", 0, [ - "Arg1", "String", - ]); - AddFunctor("ResetCooldowns", 1, [ - "Type", "SpellCooldownType", - ]); - AddFunctor("SetRoll", 1, [ - "Roll", "Int", - "DistributionOrDamageType", "RollAdjustmentTypeOrDamageType" - ]); - AddFunctor("SetDamageResistance", 1, [ - "DamageType", "Damage Type", - ]); - AddFunctor("SetReroll", 0, [ - "Roll", "Int", - "Arg2", "Boolean" - ]); - AddFunctor("SetAdvantage", 0, []); - AddFunctor("SetDisadvantage", 0, []); - AddFunctor("MaximizeRoll", 1, [ - "DamageType", "Damage Type" - ]); - AddFunctor("CameraWait", 0, [ - "Arg1", "Float" - ]); - - - - AddDescriptionParams("DealDamage", 1, [ - "Damage", "Lua", - "DamageType", "DamageTypeOrDealDamageWeaponDamageType", - "Magical", "Magical", - "Nonlethal", "Nonlethal", - "Arg5", "Int", - "Tooltip", "Guid", - ]); - AddDescriptionParams("RegainHitPoints", 1, [ - "HitPoints", "Lua", - "Tooltip", "Guid", - ]); - AddDescriptionParams("Distance", 1, [ - "Distance", "Float" - ]); - AddDescriptionParams("GainTemporaryHitPoints", 1, [ - "Amount", "Lua" - ]); - AddDescriptionParams("LevelMapValue", 1, [ - "LevelMap", "String" - ]); - AddDescriptionParams("ApplyStatus", 1, [ - "StatusId", "StatusId", - "Chance", "Int", - "Duration", "Lua", - "StatusSpecificParam1", "String", - "StatusSpecificParam2", "Int", - "StatusSpecificParam3", "Int", - "StatsConditions", "Conditions", - "RequiresConcentration", "Boolean" - ]); - - - - AddBoost("AC", 1, [ - "AC", "Int" - ]); - AddBoost("Ability", 2, [ - "Ability", "Ability", - "Amount", "Int", - "Arg3", "Int", - ]); - AddBoost("RollBonus", 2, [ - "RollType", "StatsRollType", - "Bonus", "Lua", - "Arg3", "String", - ]); - AddBoost("Advantage", 1, [ - "Type", "AdvantageType", - "Arg2", "String", // Depends on type - "Tag1", "String", // TagManager resource - "Tag2", "String", // TagManager resource - "Tag3", "String", // TagManager resource - ]); - AddBoost("Disadvantage", 1, [ - "Type", "AdvantageType", - "Arg2", "String", // Depends on type - "Tag1", "String", // TagManager resource - "Tag2", "String", // TagManager resource - "Tag3", "String", // TagManager resource - ]); - AddBoost("ActionResource", 2, [ - "Resource", "String", // Action resource name - "Amount", "Float", - "Level", "Int", - "DieType", "DieType", - ]); - AddBoost("CriticalHit", 3, [ - "Type", "CriticalHitType", - "Result", "CriticalHitResult", - "When", "CriticalHitWhen", - "Arg4", "Float", - ]); - AddBoost("AbilityFailedSavingThrow", 1, [ - "Ability", "Ability" - ]); - AddBoost("Resistance", 2, [ - "DamageType", "AllOrDamageType", - "ResistanceBoostFlags", "ResistanceBoostFlags" - ]); - AddBoost("WeaponDamageResistance", 1, [ - "DamageType1", "Damage Type", - "DamageType2", "Damage Type", - "DamageType3", "Damage Type", - ]); - AddBoost("ProficiencyBonusOverride", 1, [ - "Bonus", "Lua" - ]); - AddBoost("ActionResourceOverride", 2, [ - "Resource", "String", // Action resource name - "Amount", "Float", - "Level", "Int", - "DieType", "DieType", - ]); - AddBoost("AddProficiencyToAC", 0, []); - AddBoost("JumpMaxDistanceMultiplier", 1, [ - "Multiplier", "Float" - ]); - AddBoost("AddProficiencyToDamage", 0, []); - AddBoost("ActionResourceConsumeMultiplier", 3, [ - "Resource", "String", // Action resource name - "Multiplier", "Float", - "Level", "Int", - ]); - AddBoost("BlockVerbalComponent", 0, []); - AddBoost("BlockSomaticComponent", 0, []); - AddBoost("HalveWeaponDamage", 1, [ - "Ability", "Ability" - ]); - AddBoost("UnlockSpell", 1, [ - "SpellId", "SpellId", - "Type", "UnlockSpellType", - "SpellGuid", "String", // "None" or GUID or "" - "Cooldown", "SpellCooldownType", - "Ability", "Ability" - ]); - AddBoost("SourceAdvantageOnAttack", 0, [ - "Arg1", "Float" - ]); - AddBoost("ProficiencyBonus", 1, [ - "Type", "ProficiencyBonusBoostType", - "Arg2", "String" - ]); - AddBoost("BlockSpellCast", 0, [ - "Arg1", "Float" - ]); - AddBoost("Proficiency", 1, [ - "Arg1", "ProficiencyGroupFlags", - "Arg2", "ProficiencyGroupFlags", - "Arg3", "ProficiencyGroupFlags", - ]); - AddBoost("SourceAllyAdvantageOnAttack", 0, []); - AddBoost("IncreaseMaxHP", 1, [ - "Amount", "String" // Lua or % - ]); - AddBoost("ActionResourceBlock", 1, [ - "Resource", "String", // Action resource name - "Level", "Int", - ]); - AddBoost("StatusImmunity", 1, [ - "StatusId", "StatusIdOrGroup", - "Tag1", "String", // Tag resource name - "Tag2", "String", // Tag resource name - "Tag3", "String", // Tag resource name - "Tag4", "String", // Tag resource name - "Tag5", "String", // Tag resource name - ]); - AddBoost("UseBoosts", 1, [ - "Arg1", "StatsFunctors" - ]); - AddBoost("CannotHarmCauseEntity", 1, [ - "Arg1", "String" - ]); - AddBoost("TemporaryHP", 1, [ - "Amount", "Lua" - ]); - AddBoost("Weight", 1, [ - "Weight", "Float" - ]); - AddBoost("WeightCategory", 1, [ - "Category", "Int" - ]); - AddBoost("FactionOverride", 1, [ - "Faction", "String" // Faction resource GUID or "Source" - ]); - AddBoost("ActionResourceMultiplier", 2, [ - "Resource", "String", // Action resource name - "Multiplier", "Int", - "Level", "Int", - ]); - AddBoost("BlockRegainHP", 0, [ - "Type", "ResurrectTypes" - ]); - AddBoost("Initiative", 1, [ - "Initiative", "Int" - ]); - AddBoost("DarkvisionRange", 1, [ - "Range", "Float" - ]); - AddBoost("DarkvisionRangeMin", 1, [ - "Range", "Float" - ]); - AddBoost("DarkvisionRangeOverride", 1, [ - "Range", "Float" - ]); - AddBoost("Tag", 1, [ - "Arg1", "String" // Tag resource name - ]); - AddBoost("IgnoreDamageThreshold", 2, [ - "DamageType", "AllOrDamageType", - "Threshold", "Int" - ]); - AddBoost("Skill", 2, [ - "Skill", "SkillType", - "Amount", "Lua" - ]); - AddBoost("WeaponDamage", 2, [ - "Amount", "Lua", - "DamageType", "Damage Type", - "Arg3", "Boolean" - ]); - AddBoost("NullifyAbilityScore", 1, [ - "Ability", "Ability" - ]); - AddBoost("IgnoreFallDamage", 0, []); - AddBoost("Reroll", 3, [ - "RollType", "StatsRollType", - "RollBelow", "Int", - "Arg3", "Boolean" - ]); - AddBoost("DownedStatus", 1, [ - "StatusId", "StatusId", - "Arg2", "Int" - ]); - AddBoost("Invulnerable", 0, []); - AddBoost("WeaponEnchantment", 1, [ - "Enchantment", "Int" - ]); - AddBoost("GuaranteedChanceRollOutcome", 1, [ - "Arg1", "Boolean" - ]); - AddBoost("Attribute", 1, [ - "Flags", "AttributeFlags" - ]); - AddBoost("IgnoreLeaveAttackRange", 0, []); - AddBoost("GameplayLight", 2, [ - "Arg1", "Float", - "Arg2", "Boolean", - "Arg3", "Float", - "Arg4", "Boolean" - ]); - AddBoost("DialogueBlock", 0, []); - AddBoost("DualWielding", 1, [ - "DW", "Boolean" - ]); - AddBoost("Savant", 1, [ - "SpellSchool", "SpellSchool" - ]); - AddBoost("MinimumRollResult", 2, [ - "RollType", "StatsRollType", - "MinResult", "Int" - ]); - AddBoost("Lootable", 0, []); - AddBoost("CharacterWeaponDamage", 1, [ - "Amount", "Lua", - "DamageType", "Damage Type" - ]); - AddBoost("ProjectileDeflect", 0, [ - "Type1", "String", - "Type2", "String", - ]); - AddBoost("AbilityOverrideMinimum", 2, [ - "Ability", "Ability", - "Minimum", "Int" - ]); - AddBoost("ACOverrideFormula", 2, [ - "AC", "Int", - "Arg2", "Boolean", - "Ability1", "Ability", - "Ability2", "Ability", - "Ability3", "Ability", - ]); - AddBoost("FallDamageMultiplier", 1, [ - "Multiplier", "Float" - ]); - AddBoost("ActiveCharacterLight", 1, [ - "Light", "String" - ]); - AddBoost("Invisibility", 0, []); - AddBoost("TwoWeaponFighting", 0, []); - AddBoost("WeaponAttackTypeOverride", 1, [ - "Type", "AttackType" - ]); - AddBoost("WeaponDamageDieOverride", 1, [ - "DamageDie", "String", // die, eg. 1d10 - ]); - AddBoost("CarryCapacityMultiplier", 1, [ - "Multiplier", "Float" - ]); - AddBoost("WeaponProperty", 1, [ - "Flags1", "WeaponFlags" - ]); - AddBoost("WeaponAttackRollAbilityOverride", 1, [ - "Ability", "AbilityOrAttackRollAbility" - ]); - AddBoost("BlockTravel", 0, []); - AddBoost("BlockGatherAtCamp", 0, []); - AddBoost("BlockAbilityModifierDamageBonus", 0, []); - AddBoost("VoicebarkBlock", 0, []); - AddBoost("HiddenDuringCinematic", 0, []); - AddBoost("SightRangeAdditive", 1, [ - "Range", "Float" - ]); - AddBoost("SightRangeMinimum", 1, [ - "Range", "Float" - ]); - AddBoost("SightRangeMaximum", 1, [ - "Range", "Float" - ]); - AddBoost("SightRangeOverride", 1, [ - "Range", "Float" - ]); - AddBoost("CannotBeDisarmed", 0, []); - AddBoost("MovementSpeedLimit", 1, [ - "Type", "MovementSpeedType" - ]); - AddBoost("NonLethal", 0, []); - AddBoost("UnlockSpellVariant", 1, [ - "Modification1", "Lua", // TODO - add Modification parser? - "Modification2", "Lua", - "Modification3", "Lua", - "Modification4", "Lua", - "Modification5", "Lua", - "Modification6", "Lua", - "Modification7", "Lua", - "Modification8", "Lua", - "Modification9", "Lua", - "Modification10", "Lua", - "Modification11", "Lua", - "Modification12", "Lua", - "Modification13", "Lua", - "Modification14", "Lua", - "Modification15", "Lua" - ]); - AddBoost("DetectDisturbancesBlock", 1, [ - "Arg1", "Boolean" - ]); - AddBoost("BlockAbilityModifierFromAC", 1, [ - "Ability", "Ability" - ]); - AddBoost("ScaleMultiplier", 0, [ - "Multiplier", "Float" - ]); - AddBoost("CriticalDamageOnHit", 0, []); - AddBoost("DamageReduction", 2, [ - "DamageType", "AllOrDamageType", - "ReductionType", "DamageReductionType", - "Amount", "Lua" - ]); - AddBoost("ReduceCriticalAttackThreshold", 1, [ - "Threshold", "Int", - "StatusId", "StatusIdOrGroup" - ]); - AddBoost("PhysicalForceRangeBonus", 1, [ - "Arg1", "String" - ]); - AddBoost("ObjectSize", 1, [ - "Size", "Int" - ]); - AddBoost("ObjectSizeOverride", 1, [ - "Size", "String" - ]); - AddBoost("ItemReturnToOwner", 0, []); - AddBoost("AiArchetypeOverride", 1, [ - "Archetype", "String", - "Arg2", "Int" - ]); - AddBoost("ExpertiseBonus", 1, [ - "Skill", "SkillType" - ]); - AddBoost("EntityThrowDamage", 1, [ - "Die", "String", - "DamageType", "Damage Type" - ]); - AddBoost("WeaponDamageTypeOverride", 1, [ - "DamageType", "Damage Type" - ]); - AddBoost("MaximizeHealing", 1, [ - "Direction", "HealingDirection", - "Type", "ResurrectType" - ]); - AddBoost("IgnoreEnterAttackRange", 0, []); - AddBoost("DamageBonus", 1, [ - "Amount", "Lua", - "DamageType", "Damage Type", - "Arg3", "Boolean" - ]); - AddBoost("Detach", 0, []); - AddBoost("ConsumeItemBlock", 0, []); - AddBoost("AdvanceSpells", 1, [ - "SpellId", "SpellId", - "Arg2", "Int" - ]); - AddBoost("SpellResistance", 1, [ - "Resistance", "ResistanceBoostFlags" - ]); - AddBoost("WeaponAttackRollBonus", 1, [ - "Amount", "Lua" - ]); - AddBoost("SpellSaveDC", 1, [ - "DC", "Int" - ]); - AddBoost("RedirectDamage", 1, [ - "Arg1", "Float", - "DamageType", "Damage Type", - "DamageType2", "Damage Type", - "Arg4", "Boolean" - ]); - AddBoost("CanSeeThrough", 1, [ - "CanSeeThrough", "Boolean" - ]); - AddBoost("CanShootThrough", 1, [ - "CanShootThrough", "Boolean" - ]); - AddBoost("CanWalkThrough", 1, [ - "CanWalkThrough", "Boolean" - ]); - AddBoost("MonkWeaponAttackOverride", 0, []); - AddBoost("MonkWeaponDamageDiceOverride", 1, [ - "Arg1", "Lua" - ]); - AddBoost("IntrinsicSummonerProficiency", 0, []); - AddBoost("HorizontalFOVOverride", 1, [ - "FOV", "Float" - ]); - AddBoost("CharacterUnarmedDamage", 1, [ - "Damage", "Lua", - "DamageType", "Damage Type" - ]); - AddBoost("UnarmedMagicalProperty", 0, []); - AddBoost("ActionResourceReplenishTypeOverride", 2, [ - "ActionResource", "String", // Action resource name - "ReplenishType", "ResourceReplenishType" - ]); - AddBoost("AreaDamageEvade", 0, []); - AddBoost("ActionResourcePreventReduction", 1, [ - "ActionResource", "String", // Action resource name - "Level", "Int" - ]); - AddBoost("AttackSpellOverride", 1, [ - "AttackSpell", "SpellId", - "OriginalSpell", "SpellId" - ]); - AddBoost("Lock", 0, [ - "DC", "Guid" - ]); - AddBoost("NoAOEDamageOnLand", 0, []); - AddBoost("IgnorePointBlankDisadvantage", 1, [ - "Flags", "WeaponFlags" - ]); - AddBoost("CriticalHitExtraDice", 1, [ - "ExtraDice", "Int", - "AttackType", "AttackType" - ]); - AddBoost("DodgeAttackRoll", 2, [ - "Arg1", "Int", - "Arg2", "Int", - "Status", "StatusIdOrGroup" - ]); - AddBoost("GameplayObscurity", 1, [ - "Obscurity", "Float" - ]); - AddBoost("MaximumRollResult", 2, [ - "RollType", "StatsRollType", - "MinResult", "Int" - ]); - AddBoost("UnlockInterrupt", 1, [ - "Interrupt", "Interrupt" - ]); - AddBoost("IntrinsicSourceProficiency", 0, []); - AddBoost("JumpMaxDistanceBonus", 1, [ - "Bonus", "Float" - ]); - AddBoost("ArmorAbilityModifierCapOverride", 2, [ - "ArmorType", "ArmorType", - "Cap", "Int" - ]); - AddBoost("IgnoreResistance", 2, [ - "DamageType", "Damage Type", - "Flags", "ResistanceBoostFlags" - ]); - AddBoost("ConcentrationIgnoreDamage", 1, [ - "SpellSchool", "SpellSchool" - ]); - AddBoost("LeaveTriggers", 0, []); - AddBoost("IgnoreLowGroundPenalty", 1, [ - "RollType", "StatsRollType" - ]); - AddBoost("IgnoreSurfaceCover", 1, [ - "SurfaceType", "String" // Surface type - ]); - AddBoost("EnableBasicItemInteractions", 0, []); - AddBoost("SoundsBlocked", 0, []); - } - - public void LoadEnumerations(Stream stream) - { - StatEnumeration curEnum = null; - - string line; - - using var reader = new StreamReader(stream); - while ((line = reader.ReadLine()) != null) - { - var trimmed = line.Trim(); - if (trimmed.Length > 0) - { - if (trimmed.StartsWith("valuelist ")) - { - var name = trimmed[11..^1]; - curEnum = new StatEnumeration(name); - Enumerations.Add(curEnum.Name, curEnum); - } - else if (trimmed.StartsWith("value ")) - { - var label = trimmed[7..^1]; - curEnum.AddItem(label); - } - } - } - } -} diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 817ba64f..5cf39a0e 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -42,14 +42,6 @@ "$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.lex.cs" "$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.lex" "$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.yy.cs" "$(MSBuildProjectDirectory)\LS\Story\HeaderParser\StoryHeader.yy" -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.lex.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.yy.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\Stat.yy" - -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.lex.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.yy.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatProperty.yy" - -"$(SolutionDir)\external\gppg\binaries\GpLex" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.lex.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.lex" -"$(SolutionDir)\external\gppg\binaries\Gppg" /out:"$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.yy.cs" "$(MSBuildProjectDirectory)\LS\Stats\Parser\StatLua.yy" LSLib LSLib diff --git a/LSLibStats/LSLibStats.csproj b/LSLibStats/LSLibStats.csproj new file mode 100644 index 00000000..3ef0fab5 --- /dev/null +++ b/LSLibStats/LSLibStats.csproj @@ -0,0 +1,17 @@ + + + + net8.0 + enable + annotations + + + + + + + + + + + diff --git a/LSLib/LS/Stats/Parser/Stat.lex b/LSLibStats/Stats/File/Stat.lex similarity index 98% rename from LSLib/LS/Stats/Parser/Stat.lex rename to LSLibStats/Stats/File/Stat.lex index be5707d6..ea41131f 100644 --- a/LSLib/LS/Stats/Parser/Stat.lex +++ b/LSLibStats/Stats/File/Stat.lex @@ -1,4 +1,4 @@ -%namespace LSLib.LS.Stats.StatParser +%namespace LSLib.Stats.StatParser %visibility public %scannertype StatScanner %scanbasetype StatScanBase diff --git a/LSLib/LS/Stats/Parser/Stat.yy b/LSLibStats/Stats/File/Stat.yy similarity index 99% rename from LSLib/LS/Stats/Parser/Stat.yy rename to LSLibStats/Stats/File/Stat.yy index e28bcfcd..9d8486f9 100644 --- a/LSLib/LS/Stats/Parser/Stat.yy +++ b/LSLibStats/Stats/File/Stat.yy @@ -1,4 +1,4 @@ -%namespace LSLib.LS.Stats.StatParser +%namespace LSLib.Stats.StatParser %partial %visibility public %parsertype StatParser diff --git a/LSLibStats/Stats/File/StatNodes.cs b/LSLibStats/Stats/File/StatNodes.cs new file mode 100644 index 00000000..61d8bddc --- /dev/null +++ b/LSLibStats/Stats/File/StatNodes.cs @@ -0,0 +1,34 @@ +using LSLib.Parser; + +namespace LSLib.Stats.StatParser; + +/// +/// List of stat properties +/// +public class StatDeclaration +{ + public CodeLocation? Location = null; + public Dictionary Properties = []; + public bool WasValidated = false; +} + +/// +/// A string property of a stat entry (Key/value pair) +/// +public class StatProperty(string key, object value, CodeLocation? location = null, CodeLocation? valueLocation = null) +{ + public string Key = key; + public object Value = value; + public CodeLocation? Location = location; + public CodeLocation? ValueLocation = valueLocation; +} + +/// +/// An element of collection of a stat entry (Key/value pair) +/// +public class StatElement(string collection, object value, CodeLocation? location = null) +{ + public string Collection = collection; + public object Value = value; + public CodeLocation? Location = location; +} diff --git a/LSLib/LS/Stats/Parser/StatParser.cs b/LSLibStats/Stats/File/StatParser.cs similarity index 66% rename from LSLib/LS/Stats/Parser/StatParser.cs rename to LSLibStats/Stats/File/StatParser.cs index 45be77db..dd6eee9c 100644 --- a/LSLib/LS/Stats/Parser/StatParser.cs +++ b/LSLibStats/Stats/File/StatParser.cs @@ -2,7 +2,7 @@ using QUT.Gppg; using System.Text.RegularExpressions; -namespace LSLib.LS.Stats.StatParser; +namespace LSLib.Stats.StatParser; /// /// A collection of sub-stats. @@ -17,9 +17,9 @@ namespace LSLib.LS.Stats.StatParser; public abstract class StatScanBase : AbstractScanner { - protected String fileName; + protected string? fileName = null; - public override CodeLocation yylloc { get; set; } + //public override CodeLocation yylloc { get; set; } protected virtual bool yywrap() { return true; } @@ -39,19 +39,18 @@ protected StatProperty MakeDataProperty(int startLine, int startCol, int endLine throw new Exception("Stat data entry match error"); } - return new StatProperty - { - Key = matches.Groups[1].Value, - Value = matches.Groups[2].Value, - Location = new CodeLocation(null, startLine, startCol, endLine, endCol), - ValueLocation = new CodeLocation(null, startLine, startCol + matches.Groups[2].Index, endLine, startCol + matches.Groups[2].Index + matches.Groups[2].Value.Length) - }; + return new StatProperty( + matches.Groups[1].Value, + matches.Groups[2].Value, + new CodeLocation(null, startLine, startCol, endLine, endCol), + new CodeLocation(null, startLine, startCol + matches.Groups[2].Index, endLine, startCol + matches.Groups[2].Index + matches.Groups[2].Value.Length) + ); } } public partial class StatScanner { - public StatScanner(String fileName) + public StatScanner(string? fileName) { this.fileName = fileName; } @@ -89,7 +88,7 @@ private StatDeclarations AddDeclaration(object declarations, object declaration) Location = location }; - private StatDeclaration MakeDeclaration(CodeLocation location, StatProperty[] properties) + private StatDeclaration MakeDeclaration(CodeLocation? location, StatProperty[] properties) { var decl = new StatDeclaration() { @@ -134,16 +133,11 @@ private StatDeclaration AddProperty(object declaration, object property) { if (!decl.Properties.TryGetValue(ele.Collection, out prop)) { - prop = new StatProperty - { - Key = ele.Collection, - Value = new StatCollection(), - Location = ele.Location - }; + prop = new StatProperty(ele.Collection, new StatCollection(), ele.Location, null); decl.Properties[ele.Collection] = prop; } - (prop.Value as StatCollection).Add(ele.Value); + ((StatCollection)prop.Value).Add(ele.Value); } else if (property is StatDeclaration otherDecl) { @@ -160,62 +154,18 @@ private StatDeclaration AddProperty(object declaration, object property) return decl; } - private StatProperty MakeProperty(object key, object value) => new StatProperty() - { - Key = (string)key, - Value = (string)value - }; - - private StatProperty MakeProperty(String key, object value) => new StatProperty() - { - Key = key, - Value = (string)value - }; - - private StatProperty MakeProperty(String key, String value) => new StatProperty() - { - Key = key, - Value = value - }; + private StatProperty MakeProperty(object key, object value) => new StatProperty((string)key, (string)value); - private StatProperty MakeProperty(CodeLocation location, object key, object value) => new StatProperty() + private StatProperty MakeProperty(CodeLocation location, object key, object value) => new StatProperty((string)key, (string)value, location) { Key = (string)key, Value = (string)value, Location = location }; - private StatProperty MakeProperty(CodeLocation location, String key, object value) => new StatProperty() + private StatElement MakeElement(String key, object value, CodeLocation? location = null) { - Key = key, - Value = (string)value, - Location = location - }; - - private StatProperty MakeProperty(CodeLocation location, String key, String value) => new StatProperty() - { - Key = key, - Value = value, - Location = location - }; - - private StatElement MakeElement(String key, object value) - { - return new StatElement() - { - Collection = key, - Value = value - }; - } - - private StatElement MakeElement(String key, object value, CodeLocation location) - { - return new StatElement() - { - Location = location, - Collection = key, - Value = value - }; + return new StatElement(key, value, location); } private StatCollection MakeCollection() => new List(); diff --git a/LSLibStats/Stats/Functor/Functor.lex b/LSLibStats/Stats/Functor/Functor.lex new file mode 100644 index 00000000..d973d429 --- /dev/null +++ b/LSLibStats/Stats/Functor/Functor.lex @@ -0,0 +1,90 @@ +%namespace LSLib.Stats.Functors +%visibility public +%scannertype FunctorScanner +%scanbasetype FunctorScanBase +%tokentype FunctorTokens + +letter [a-zA-Z_] +digit [0-9] +namechar [a-zA-Z0-9_] +nonseparator [^,;:()\[\]! ] + +%% + +/* Special trigger words to determine expression type */ +"__TYPE_Functors__" return (int)FunctorTokens.EXPR_FUNCTORS; +"__TYPE_DescriptionParams__" return (int)FunctorTokens.EXPR_DESCRIPTION_PARAMS; + +/* Reserved words */ +"IF" return (int)FunctorTokens.IF; + +/* Text keys */ +"CastOffhand" return (int)FunctorTokens.TEXT_KEY; +"Cast2" return (int)FunctorTokens.TEXT_KEY; +"Cast3" return (int)FunctorTokens.TEXT_KEY; + +/* Stats contexts */ +"ABILITY_CHECK" return (int)FunctorTokens.CONTEXT; +"ACTION_RESOURCES_CHANGED" return (int)FunctorTokens.CONTEXT; +"AI_IGNORE" return (int)FunctorTokens.CONTEXT; +"AI_ONLY" return (int)FunctorTokens.CONTEXT; +"AOE" return (int)FunctorTokens.CONTEXT; +"ATTACK" return (int)FunctorTokens.CONTEXT; +"ATTACKED" return (int)FunctorTokens.CONTEXT; +"ATTACKED_IN_MELEE_RANGE" return (int)FunctorTokens.CONTEXT; +"ATTACKING_IN_MELEE_RANGE" return (int)FunctorTokens.CONTEXT; +"CAST" return (int)FunctorTokens.CONTEXT; +"CAST_RESOLVED" return (int)FunctorTokens.CONTEXT; +"COMBAT_ENDED" return (int)FunctorTokens.CONTEXT; +"CREATE_2" return (int)FunctorTokens.CONTEXT; +"DAMAGE" return (int)FunctorTokens.CONTEXT; +"DAMAGED" return (int)FunctorTokens.CONTEXT; +"DAMAGE_PREVENTED" return (int)FunctorTokens.CONTEXT; +"DAMAGED_PREVENTED" return (int)FunctorTokens.CONTEXT; +"ENTER_ATTACK_RANGE" return (int)FunctorTokens.CONTEXT; +"EQUIP" return (int)FunctorTokens.CONTEXT; +"LOCKPICKING_SUCCEEDED" return (int)FunctorTokens.CONTEXT; +"GROUND" return (int)FunctorTokens.CONTEXT; +"HEAL" return (int)FunctorTokens.CONTEXT; +"HEALED" return (int)FunctorTokens.CONTEXT; +"INTERRUPT_USED" return (int)FunctorTokens.CONTEXT; +"INVENTORY_CHANGED" return (int)FunctorTokens.CONTEXT; +"LEAVE_ATTACK_RANGE" return (int)FunctorTokens.CONTEXT; +"LONG_REST" return (int)FunctorTokens.CONTEXT; +"MOVED_DISTANCE" return (int)FunctorTokens.CONTEXT; +"OBSCURITY_CHANGED" return (int)FunctorTokens.CONTEXT; +"PROFICIENCY_CHANGED" return (int)FunctorTokens.CONTEXT; +"PROJECTILE" return (int)FunctorTokens.CONTEXT; +"PUSH" return (int)FunctorTokens.CONTEXT; +"PUSHED" return (int)FunctorTokens.CONTEXT; +"SELF" return (int)FunctorTokens.CONTEXT; +"SHORT_REST" return (int)FunctorTokens.CONTEXT; +"STATUS_APPLIED" return (int)FunctorTokens.CONTEXT; +"STATUS_APPLY" return (int)FunctorTokens.CONTEXT; +"STATUS_REMOVE" return (int)FunctorTokens.CONTEXT; +"STATUS_REMOVED" return (int)FunctorTokens.CONTEXT; +"SURFACE_ENTER" return (int)FunctorTokens.CONTEXT; +"TARGET" return (int)FunctorTokens.CONTEXT; +"TURN" return (int)FunctorTokens.CONTEXT; + +/* Special characters */ +":" return (int)':'; +"(" return (int)'('; +")" return (int)')'; +"[" return (int)'['; +"]" return (int)']'; +"," return (int)','; +";" return (int)';'; +"!" return (int)'!'; +"-" return (int)'-'; +"." return (int)'.'; +[ ] ; + +{letter}({namechar})+ { yylval = MakeLiteral(yytext); return (int)FunctorTokens.NAME; } +(-)?{digit}({digit})* { yylval = MakeLiteral(yytext); return (int)FunctorTokens.INTEGER; } +{digit}{digit}*d{digit}{digit}* { yylval = yytext; return (int)FunctorTokens.DICE_ROLL; } +({nonseparator})+ { yylval = MakeLiteral(yytext); return (int)FunctorTokens.TEXT; } + +%{ + yylloc = new QUT.Gppg.LexLocation(tokLin, tokCol, tokELin, tokECol); +%} diff --git a/LSLibStats/Stats/Functor/Functor.yy b/LSLibStats/Stats/Functor/Functor.yy new file mode 100644 index 00000000..8dd4961a --- /dev/null +++ b/LSLibStats/Stats/Functor/Functor.yy @@ -0,0 +1,122 @@ +%namespace LSLib.Stats.Functors +%partial +%visibility public +%parsertype FunctorParser +%tokentype FunctorTokens +%YYSTYPE System.Object + +%start Root + +/* Trigger Lexemes */ +%token EXPR_FUNCTORS +%token EXPR_DESCRIPTION_PARAMS + +/* Reserved words */ +%token IF + +/* Functor Context */ +%token CONTEXT +/* Status/Tag name */ +%token NAME +/* Known text keys */ +%token TEXT_KEY +/* Integer literal */ +%token INTEGER +/* Text-like (unquoted) literal */ +%token TEXT +/* eg. 1d10 */ +%token DICE_ROLL + +%% + +/* A special "trigger word" is prepended to support parsing multiple types from the same lexer/parser */ +Root : EXPR_FUNCTORS Functors { $$ = $2; } + | EXPR_DESCRIPTION_PARAMS OptionalArgs { $$ = $2; } + ; + +/****************************************************************** + * + * FUNCTOR PARSING + * + ******************************************************************/ + +Functors : /* empty */ { $$ = MakeFunctorList(); } + | Functor { $$ = AddFunctor(MakeFunctorList(), $1); } + | Functors ';' + | Functors ';' Functor { $$ = AddFunctor($1, $3); } + ; + +TextKeyFunctors : TEXT_KEY '[' Functors ']' { $$ = SetTextKey($3, $1); }; + +Functor : Contexts Condition Call { $$ = MakeFunctor($1, $2, $3); } + | TextKeyFunctors + ; + +Contexts : /* empty */ + | ContextList { $$ = $1; } + ; + +ContextList : Context { $$ = $1; } + | ContextList Context { $$ = $1; } + ; + +Context : CONTEXT ':' { $$ = $1; }; + +Condition : /* empty */ + | IF '(' NonEmptyArg ')' ':' { $$ = $3; } + ; + +Call : FunctorName OptionalArgList { $$ = MakeAction($1, $2); }; + +FunctorName : NAME { $$ = $1; MarkActionStart(); }; + +OptionalArgList : /* empty */ { $$ = MakeArgumentList(); } + | '(' OptionalArgs ')' { $$ = $2; } + ; + +OptionalArgs : /* empty */ { $$ = MakeArgumentList(); } + | Args + ; + +Args : NonEmptyArg { $$ = AddArgument(MakeArgumentList(), $1); } + | Args ',' Arg { $$ = AddArgument($1, $3); } + ; + +Arg : /* empty */ + | NonEmptyArg + ; + +NonEmptyArg : ArgStart LuaRoot ArgEnd { $$ = $3; }; + +ArgStart : /* empty */ { InitLiteral(); }; + +ArgEnd : /* empty */ { $$ = MakeLiteral(); }; + +LuaRoot : LuaRootSymbol + | LuaRoot LuaRootSymbol + | LuaRoot '(' LuaExpr ')' + | LuaRoot '(' ')' + | '(' LuaExpr ')' + ; + +LuaExpr : LuaSymbol + | LuaExpr LuaSymbol + | LuaExpr '(' LuaExpr ')' + | '(' LuaExpr ')' + | LuaExpr '(' ')' + ; + +LuaRootSymbol : NAME + | INTEGER + | TEXT + | CONTEXT + | DICE_ROLL + | ':' + | '!' + | ';' + | '-' + ; + +LuaSymbol : LuaRootSymbol + | ',' + ; diff --git a/LSLib/LS/Stats/Parser/StatPropertyParser.cs b/LSLibStats/Stats/Functor/FunctorParser.cs similarity index 70% rename from LSLib/LS/Stats/Parser/StatPropertyParser.cs rename to LSLibStats/Stats/Functor/FunctorParser.cs index 9d0f30dc..d7395999 100644 --- a/LSLib/LS/Stats/Parser/StatPropertyParser.cs +++ b/LSLibStats/Stats/Functor/FunctorParser.cs @@ -1,10 +1,12 @@ -using LSLib.LS.Story.GoalParser; -using LSLib.Parser; +using LSLib.Parser; +using LSLib.Stats; +using LSLib.Stats.Functors; using QUT.Gppg; +using System.Text; -namespace LSLib.LS.Stats.Properties; +namespace LSLib.Stats.Functors; -public partial class StatPropertyScanner +public partial class FunctorScanner { public LexLocation LastLocation() { @@ -24,19 +26,19 @@ public int TokenEndPos() private object MakeLiteral(string s) => s; } -public abstract class StatPropertyScanBase : AbstractScanner +public abstract class FunctorScanBase : AbstractScanner { protected virtual bool yywrap() { return true; } } -public class StatActionValidator +public class FunctorActionValidator { private readonly StatDefinitionRepository Definitions; private readonly DiagnosticContext Context; private readonly StatValueValidatorFactory ValidatorFactory; private readonly ExpressionType ExprType; - public StatActionValidator(StatDefinitionRepository definitions, DiagnosticContext ctx, StatValueValidatorFactory validatorFactory, ExpressionType type) + public FunctorActionValidator(StatDefinitionRepository definitions, DiagnosticContext ctx, StatValueValidatorFactory validatorFactory, ExpressionType type) { Definitions = definitions; Context = ctx; @@ -44,17 +46,17 @@ public StatActionValidator(StatDefinitionRepository definitions, DiagnosticConte ExprType = type; } - public void Validate(PropertyAction action, PropertyDiagnosticContainer errors) + public void Validate(FunctorAction action, PropertyDiagnosticContainer errors) { - Dictionary functors = null; - switch (ExprType) + var functors = (ExprType) switch { - case ExpressionType.Boost: functors = Definitions.Boosts; break; - case ExpressionType.Functor: functors = Definitions.Functors; break; - case ExpressionType.DescriptionParams: functors = Definitions.DescriptionParams; break; - } + ExpressionType.Boost => Definitions.Boosts, + ExpressionType.Functor => Definitions.Functors, + ExpressionType.DescriptionParams => Definitions.DescriptionParams, + _ => throw new NotImplementedException("Cannot validate expressions of this type") + }; - if (!functors.TryGetValue(action.Action, out StatFunctorType functor)) + if (!functors.TryGetValue(action.Action, out StatFunctorType? functor)) { if (ExprType != ExpressionType.DescriptionParams) { @@ -116,27 +118,27 @@ public void Validate(PropertyAction action, PropertyDiagnosticContainer errors) } } -public partial class StatPropertyParser +public partial class FunctorParser { private readonly DiagnosticContext Context; - private readonly StatActionValidator ActionValidator; + private readonly FunctorActionValidator ActionValidator; private readonly byte[] Source; private readonly PropertyDiagnosticContainer Errors; private readonly CodeLocation RootLocation; - private readonly StatPropertyScanner StatScanner; + private readonly FunctorScanner StatScanner; private readonly int TokenOffset; private int LiteralStart; private int ActionStart; - public StatPropertyParser(StatPropertyScanner scnr, StatDefinitionRepository definitions, + public FunctorParser(FunctorScanner scnr, StatDefinitionRepository definitions, DiagnosticContext ctx, StatValueValidatorFactory validatorFactory, byte[] source, ExpressionType type, PropertyDiagnosticContainer errors, CodeLocation rootLocation, int tokenOffset) : base(scnr) { Context = ctx; StatScanner = scnr; Source = source; - ActionValidator = new StatActionValidator(definitions, ctx, validatorFactory, type); + ActionValidator = new FunctorActionValidator(definitions, ctx, validatorFactory, type); Errors = errors; RootLocation = rootLocation; TokenOffset = tokenOffset; @@ -147,11 +149,11 @@ public object GetParsedObject() return CurrentSemanticValue; } - private List MakePropertyList() => new List(); + private List MakeFunctorList() => new List(); - private List SetTextKey(object properties, object textKey) + private List SetTextKey(object functors, object textKey) { - var props = properties as List; + var props = functors as List; var tk = (string)textKey; foreach (var property in props) { @@ -160,28 +162,28 @@ private List SetTextKey(object properties, object textKey) return props; } - private List MergeProperties(object properties, object properties2) + private List MergeFunctors(object functors, object functors2) { - var props = properties as List; - props.Concat(properties2 as List); + var props = functors as List; + props.Concat(functors2 as List); return props; } - private List AddProperty(object properties, object property) + private List AddFunctor(object functorss, object functors) { - var props = properties as List; - props.Add(property as Property); + var props = functorss as List; + props.Add(functors as Functor); return props; } - private Property MakeProperty(object context, object condition, object action) => new Property + private Functor MakeFunctor(object context, object condition, object action) => new Functor { Context = (string)context, Condition = condition as object, - Action = action as PropertyAction + Action = action as FunctorAction }; - private List MakeArgumentList() => new List(); + private List MakeArgumentList() => new(); private List AddArgument(object arguments, object arg) { @@ -196,19 +198,19 @@ private object MarkActionStart() return null; } - private PropertyAction MakeAction(object action, object arguments) + private FunctorAction MakeAction(object action, object arguments) { var callErrors = new PropertyDiagnosticContainer(); - var act = new PropertyAction + var act = new FunctorAction { - Action = action as string, - Arguments = arguments as List, + Action = (string)action, + Arguments = (List)arguments, StartPos = ActionStart, EndPos = StatScanner.TokenEndPos() }; ActionValidator.Validate(act, callErrors); - CodeLocation location = null; + CodeLocation? location = null; if (RootLocation != null) { location = new CodeLocation(RootLocation.FileName, @@ -232,4 +234,4 @@ private string MakeLiteral() var val = Encoding.UTF8.GetString(Source, LiteralStart, StatScanner.TokenStartPos() - LiteralStart); return val; } -} \ No newline at end of file +} diff --git a/LSLibStats/Stats/Functor/FunctorParserDefinitions.cs b/LSLibStats/Stats/Functor/FunctorParserDefinitions.cs new file mode 100644 index 00000000..b9390756 --- /dev/null +++ b/LSLibStats/Stats/Functor/FunctorParserDefinitions.cs @@ -0,0 +1,29 @@ +namespace LSLib.Stats.Functors; + +public class Requirement +{ + // Requirement negation ("Immobile" vs. "!Immobile"). + public bool Not = false; + // Textual name of requirement + public string RequirementName = string.Empty; + // Integer requirement parameter + public int IntParam = 0; + // Tag name parameter ("Tag" requirement only) + public string? TagParam = null; +} + +public class Functor +{ + public string? TextKey = null; + public string? Context = null; + public object Condition; + public FunctorAction Action; +} + +public class FunctorAction +{ + public string Action = string.Empty; + public List Arguments = []; + public int StartPos = 0; + public int EndPos = 0; +} diff --git a/LSLib/LS/Stats/Parser/StatLua.lex b/LSLibStats/Stats/Functor/Lua.lex similarity index 98% rename from LSLib/LS/Stats/Parser/StatLua.lex rename to LSLibStats/Stats/Functor/Lua.lex index 0d9d4d3a..abefaa93 100644 --- a/LSLib/LS/Stats/Parser/StatLua.lex +++ b/LSLibStats/Stats/Functor/Lua.lex @@ -1,4 +1,4 @@ -%namespace LSLib.LS.Stats.Lua +%namespace LSLib.Stats.Lua %visibility public %scannertype StatLuaScanner %scanbasetype StatLuaScanBase diff --git a/LSLib/LS/Stats/Parser/StatLua.yy b/LSLibStats/Stats/Functor/Lua.yy similarity index 98% rename from LSLib/LS/Stats/Parser/StatLua.yy rename to LSLibStats/Stats/Functor/Lua.yy index cea2f874..e769f99e 100644 --- a/LSLib/LS/Stats/Parser/StatLua.yy +++ b/LSLibStats/Stats/Functor/Lua.yy @@ -1,4 +1,4 @@ -%namespace LSLib.LS.Stats.Lua +%namespace LSLib.Stats.Lua %partial %visibility public %parsertype StatLuaParser diff --git a/LSLib/LS/Stats/Parser/StatLuaParser.cs b/LSLibStats/Stats/Functor/LuaParser.cs similarity index 93% rename from LSLib/LS/Stats/Parser/StatLuaParser.cs rename to LSLibStats/Stats/Functor/LuaParser.cs index f742a92d..23601a36 100644 --- a/LSLib/LS/Stats/Parser/StatLuaParser.cs +++ b/LSLibStats/Stats/Functor/LuaParser.cs @@ -1,6 +1,6 @@ using QUT.Gppg; -namespace LSLib.LS.Stats.Lua; +namespace LSLib.Stats.Lua; public partial class StatLuaScanner { diff --git a/LSLibStats/Stats/StatDefinitions.cs b/LSLibStats/Stats/StatDefinitions.cs new file mode 100644 index 00000000..0e337c31 --- /dev/null +++ b/LSLibStats/Stats/StatDefinitions.cs @@ -0,0 +1,250 @@ +using LSLib.Stats.Functors; +using System.IO; +using System.Xml; +using System.Xml.Linq; + +namespace LSLib.Stats; + +public class StatEnumeration(string name) +{ + public readonly string Name = name; + public readonly List Values = []; + public readonly Dictionary ValueToIndexMap = []; + + public void AddItem(int index, string value) + { + if (Values.Count != index) + { + throw new Exception("Enumeration items must be added in order."); + } + + Values.Add(value); + + // Some vanilla enums are bogus and contain names multiple times + ValueToIndexMap.TryAdd(value, index); + } + + public void AddItem(string value) + { + AddItem(Values.Count, value); + } +} + +public class StatField(string name, string type) +{ + public string Name = name; + public string Type = type; + public StatEnumeration? EnumType = null; + public List? ReferenceTypes = null; + + private IStatValueValidator? Validator = null; + + public IStatValueValidator GetValidator(StatValueValidatorFactory factory, StatDefinitionRepository definitions) + { + Validator ??= factory.CreateValidator(this, definitions); + return Validator; + } +} + +public class StatEntryType(string name, string nameProperty, string? basedOnProperty) +{ + public readonly string Name = name; + public readonly string NameProperty = nameProperty; + public readonly string? BasedOnProperty = basedOnProperty; + public readonly Dictionary Fields = []; +} + +public class StatFunctorArgumentType(string name, string type) +{ + public string Name = name; + public string Type = type; +} + +public class StatFunctorType(string name, int requiredArgs, List args) +{ + public string Name = name; + public int RequiredArgs = requiredArgs; + public List Args = args; +} + +public class StatDefinitionRepository +{ + public readonly Dictionary Enumerations = []; + public readonly Dictionary Types = []; + public readonly Dictionary Functors = []; + public readonly Dictionary Boosts = []; + public readonly Dictionary DescriptionParams = []; + + private StatField AddField(StatEntryType defn, string name, string typeName) + { + var field = new StatField(name, typeName); + + if (Enumerations.TryGetValue(typeName, out var enumType) && enumType.Values.Count > 0) + { + field.EnumType = enumType; + } + + defn.Fields.Add(name, field); + return field; + } + + private void AddEnumeration(string name, List labels) + { + var enumType = new StatEnumeration(name); + foreach (var label in labels) + { + enumType.AddItem(label); + } + Enumerations.Add(name, enumType); + } + + public void AddFunctor(Dictionary dict, string name, int requiredArgs, List argDescs) + { + var args = new List(); + for (int i = 0; i < argDescs.Count; i += 2) + { + args.Add(new StatFunctorArgumentType(argDescs[i], argDescs[i + 1])); + } + + AddFunctor(dict, name, requiredArgs, args); + } + + public void AddFunctor(Dictionary dict, string name, int requiredArgs, IEnumerable args) + { + var functor = new StatFunctorType(name, requiredArgs, args.ToList()); + dict.Add(name, functor); + } + + public void LoadCustomStatEntryType(XmlElement ele) + { + var entry = new StatEntryType(ele.GetAttribute("Name"), ele.GetAttribute("NameProperty"), null); + Types.Add(entry.Name, entry); + + foreach (var field in ele.GetElementsByTagName("Field")) + { + var e = (XmlElement)field; + AddField(entry, e.GetAttribute("Name"), e.GetAttribute("Type")); + } + } + + public void LoadCustomEnumeration(XmlElement ele) + { + var name = ele.GetAttribute("Name"); + var labels = new List(); + + foreach (var field in ele.GetElementsByTagName("Label")) + { + labels.Add(((XmlElement)field).InnerText); + } + + AddEnumeration(name, labels); + } + + public void LoadCustomFunction(XmlElement ele) + { + var name = ele.GetAttribute("Name"); + var type = ele.GetAttribute("Type"); + var requiredArgsStr = ele.GetAttribute("RequiredArgs"); + var requiredArgs = (requiredArgsStr == "") ? 0 : Int32.Parse(requiredArgsStr); + var args = new List(); + + foreach (var arg in ele.GetElementsByTagName("Arg")) + { + var e = (XmlElement)arg; + args.Add(e.GetAttribute("Name")); + args.Add(e.GetAttribute("Type")); + } + + switch (type) + { + case "Boost": AddFunctor(Boosts, name, requiredArgs, args); break; + case "Functor": AddFunctor(Functors, name, requiredArgs, args); break; + case "DescriptionParams": AddFunctor(DescriptionParams, name, requiredArgs, args); break; + default: throw new InvalidDataException($"Unknown function type in definition file: {type}"); + } + } + + public void LoadLSLibDefinitions(Stream stream) + { + var doc = new XmlDocument(); + doc.Load(stream); + + foreach (var node in doc.DocumentElement!.ChildNodes) + { + if (node is XmlElement element) + { + switch (element.Name) + { + case "EntryType": LoadCustomStatEntryType(element); break; + case "Enumeration": LoadCustomEnumeration(element); break; + case "Function": LoadCustomFunction(element); break; + default: throw new InvalidDataException($"Unknown entry type in definition file: {element.Name}"); + } + } + } + } + + public void LoadDefinitions(Stream stream) + { + StatEntryType? defn = null; + string? line; + + using var reader = new StreamReader(stream); + + while ((line = reader.ReadLine()) != null) + { + var trimmed = line.Trim(); + if (trimmed.Length > 0) + { + if (trimmed.StartsWith("modifier type ")) + { + var name = trimmed[15..^1]; + defn = new StatEntryType(name, "Name", "Using"); + Types.Add(defn.Name, defn); + AddField(defn, "Name", "FixedString"); + var usingRef = AddField(defn, "Using", "StatReference"); + usingRef.ReferenceTypes = + [ + new StatReferenceConstraint + { + StatType = name + } + ]; + } + else if (trimmed.StartsWith("modifier \"")) + { + var nameEnd = trimmed.IndexOf('"', 10); + var name = trimmed[10..nameEnd]; + var typeName = trimmed.Substring(nameEnd + 3, trimmed.Length - nameEnd - 4); + AddField(defn!, name, typeName); + } + } + } + } + + public void LoadEnumerations(Stream stream) + { + StatEnumeration? curEnum = null; + string? line; + + using var reader = new StreamReader(stream); + while ((line = reader.ReadLine()) != null) + { + var trimmed = line.Trim(); + if (trimmed.Length > 0) + { + if (trimmed.StartsWith("valuelist ")) + { + var name = trimmed[11..^1]; + curEnum = new StatEnumeration(name); + Enumerations.Add(curEnum.Name, curEnum); + } + else if (trimmed.StartsWith("value ")) + { + var label = trimmed[7..^1]; + curEnum!.AddItem(label); + } + } + } + } +} diff --git a/LSLib/LS/Stats/StatFileParser.cs b/LSLibStats/Stats/StatFileParser.cs similarity index 87% rename from LSLib/LS/Stats/StatFileParser.cs rename to LSLibStats/Stats/StatFileParser.cs index 6a915e1b..d8a5b605 100644 --- a/LSLib/LS/Stats/StatFileParser.cs +++ b/LSLibStats/Stats/StatFileParser.cs @@ -1,18 +1,8 @@ -using LSLib.LS.Stats.StatParser; -using LSLib.Parser; +using LSLib.Parser; +using LSLib.Stats.StatParser; using System.Xml; -namespace LSLib.LS.Stats; - -public class StatEntry -{ - public string Name; - public StatEntryType Type; - public StatEntry BasedOn; - public CodeLocation Location; - public Dictionary Properties = []; - public Dictionary PropertyLocations = []; -} +namespace LSLib.Stats; /// /// Holder for stat loader diagnostic codes. @@ -54,33 +44,27 @@ public class DiagnosticCode public const string StatNameMissing = "S07"; } -public class StatLoadingError +public class StatLoadingError(string code, string message, CodeLocation? location, List? contexts) { - public string Code; - public string Message; - public CodeLocation Location; - public List Contexts; + public string Code = code; + public string Message = message; + public CodeLocation? Location = location; + public List? Contexts = contexts; } -public class StatLoadingContext +public class StatLoadingContext(StatDefinitionRepository definitions) { - public StatDefinitionRepository Definitions; + public StatDefinitionRepository Definitions = definitions; public List Errors = []; public Dictionary> DeclarationsByType = []; public Dictionary> ResolvedDeclarationsByType = []; public Dictionary> GuidResources = []; public readonly HashSet ObjectCategories = []; - public void LogError(string code, string message, CodeLocation location = null, - List contexts = null) + public void LogError(string code, string message, CodeLocation? location = null, + List? contexts = null) { - Errors.Add(new StatLoadingError - { - Code = code, - Message = message, - Location = location, - Contexts = contexts - }); + Errors.Add(new StatLoadingError(code, message, location, contexts)); } } @@ -97,15 +81,15 @@ private class BaseClassMapping public bool ResolveUsageRef( StatEntryType type,StatDeclaration declaration, Dictionary declarations, - out StatDeclaration basedOn) + out StatDeclaration? basedOn) { var props = declaration.Properties; var name = (string)props[type.NameProperty].Value; - if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out StatProperty prop)) + if (type.BasedOnProperty != null && props.TryGetValue(type.BasedOnProperty, out var prop)) { var baseClass = (string)prop.Value; - if (declarations.TryGetValue(baseClass, out StatDeclaration baseDeclaration)) + if (declarations.TryGetValue(baseClass, out var baseDeclaration)) { basedOn = baseDeclaration; return true; @@ -170,7 +154,7 @@ public Dictionary ResolveUsageRefs(StatEntryType type, { if (declaration.Value.WasValidated) continue; - var succeeded = ResolveUsageRef(type, declaration.Value, declarations, out StatDeclaration baseClass); + var succeeded = ResolveUsageRef(type, declaration.Value, declarations, out var baseClass); if (succeeded && baseClass != null) { mappings.Add(new BaseClassMapping @@ -240,7 +224,7 @@ public StatLoader(StatLoadingContext ctx) DiagContext = new(); } - private List ParseStatStream(string path, Stream stream) + private List? ParseStatStream(string path, Stream stream) { var scanner = new StatScanner(path); scanner.SetSource(stream); @@ -266,9 +250,9 @@ private void AddDeclarations(List declarations) continue; } - var statType = declaration.Properties["EntityType"].Value.ToString(); + var statType = declaration.Properties["EntityType"].Value.ToString()!; - if (!Context.Definitions.Types.TryGetValue(statType, out StatEntryType type)) + if (!Context.Definitions.Types.TryGetValue(statType, out var type)) { Context.LogError(DiagnosticCode.StatEntityTypeUnknown, $"No definition exists for stat type '{statType}'", declaration.Location); continue; @@ -280,14 +264,14 @@ private void AddDeclarations(List declarations) continue; } - if (!Context.DeclarationsByType.TryGetValue(statType, out Dictionary declarationsByType)) + if (!Context.DeclarationsByType.TryGetValue(statType, out var declarationsByType)) { declarationsByType = []; Context.DeclarationsByType[statType] = declarationsByType; } // TODO - duplicate declaration check? - var name = declaration.Properties[type.NameProperty].Value.ToString(); + var name = declaration.Properties[type.NameProperty].Value.ToString()!; declarationsByType[name] = declaration; } } @@ -314,7 +298,7 @@ public void ResolveUsageRef() public void ValidateProperty(StatEntryType type, StatProperty property, string declarationName, PropertyDiagnosticContainer errors) { - if (!type.Fields.TryGetValue(property.Key, out StatField field)) + if (!type.Fields.TryGetValue(property.Key, out var field)) { errors.Add($"Property '{property.Key}' is not supported on type {type.Name}"); return; @@ -371,7 +355,7 @@ public void ValidateEntries() if (!errors.Empty) { errors.AddContext(PropertyDiagnosticContextType.Entry, declaration.Key, declaration.Value.Location); - errors.MergeInto(Context, declaration.Key); + errors.MergeInto(Context); errors.Clear(); } } @@ -383,7 +367,7 @@ private void LoadGuidResources(Dictionary guidResources, XmlNode { foreach (var node in nodes) { - var attributes = (node as XmlElement).GetElementsByTagName("attribute"); + var attributes = ((XmlElement)node).GetElementsByTagName("attribute"); foreach (var attribute in attributes) { var attr = attribute as XmlElement; @@ -405,18 +389,18 @@ public void LoadGuidResources(XmlDocument doc, string typeName, string regionNam Context.GuidResources[typeName] = guidResources; } - var regions = doc.DocumentElement.GetElementsByTagName("region"); + var regions = doc.DocumentElement!.GetElementsByTagName("region"); foreach (var region in regions) { - if ((region as XmlElement).GetAttribute("id") == regionName) + if (((XmlElement)region).GetAttribute("id") == regionName) { - var root = (region as XmlElement).GetElementsByTagName("node"); + var root = ((XmlElement)region).GetElementsByTagName("node")!; if (root.Count > 0) { - var children = (root[0] as XmlElement).GetElementsByTagName("children"); + var children = ((XmlElement)root[0]).GetElementsByTagName("children"); if (children.Count > 0) { - var resources = (children[0] as XmlElement).GetElementsByTagName("node"); + var resources = ((XmlElement)children[0]).GetElementsByTagName("node"); LoadGuidResources(guidResources, resources); } } diff --git a/LSLib/LS/Stats/StatValueParsers.cs b/LSLibStats/Stats/StatValueParsers.cs similarity index 91% rename from LSLib/LS/Stats/StatValueParsers.cs rename to LSLibStats/Stats/StatValueParsers.cs index cc6f613f..5a5d6cf5 100644 --- a/LSLib/LS/Stats/StatValueParsers.cs +++ b/LSLibStats/Stats/StatValueParsers.cs @@ -1,15 +1,16 @@ -using LSLib.LS.Stats.Properties; -using LSLib.LS.Stats.StatParser; -using LSLib.Parser; +using LSLib.Parser; +using LSLib.Stats.Functors; +using LSLib.Stats.StatParser; using System.Globalization; +using System.Text; -namespace LSLib.LS.Stats; +namespace LSLib.Stats; public class DiagnosticContext { public bool IgnoreMissingReferences = false; - public StatDeclaration CurrentDeclaration; - public CodeLocation PropertyValueSpan; + public StatDeclaration? CurrentDeclaration; + public CodeLocation? PropertyValueSpan; } public enum PropertyDiagnosticContextType @@ -27,23 +28,23 @@ public struct PropertyDiagnosticContext public CodeLocation Location; } -public class PropertyDiagnostic +public class PropertyDiagnostic(string message, CodeLocation? location = null, List? contexts = null) { - public string Message; - public CodeLocation Location; - public List Contexts; + public string Message = message; + public CodeLocation? Location = location; + public List? Contexts = contexts; } public class PropertyDiagnosticContainer { - public List Messages; + public List? Messages; public bool Empty { - get { return Messages == null || Messages.Count == 0; } + get { return Messages == null || Messages.Count == 0; } } - public void AddContext(PropertyDiagnosticContextType type, string name, CodeLocation location = null) + public void AddContext(PropertyDiagnosticContextType type, string name, CodeLocation? location = null) { if (Empty) return; @@ -61,14 +62,10 @@ public void AddContext(PropertyDiagnosticContextType type, string name, CodeLoca } } - public void Add(string message, CodeLocation location = null) + public void Add(string message, CodeLocation? location = null) { Messages ??= []; - Messages.Add(new PropertyDiagnostic - { - Message = message, - Location = location - }); + Messages.Add(new PropertyDiagnostic(message, location)); } public void MergeInto(PropertyDiagnosticContainer container) @@ -76,17 +73,17 @@ public void MergeInto(PropertyDiagnosticContainer container) if (Empty) return; container.Messages ??= []; - container.Messages.AddRange(Messages); + container.Messages.AddRange(Messages ?? []); } - public void MergeInto(StatLoadingContext context, string declarationName) + public void MergeInto(StatLoadingContext context) { if (Empty) return; - foreach (var message in Messages) + foreach (var message in Messages ?? []) { var location = message.Location; - foreach (var ctx in message.Contexts) + foreach (var ctx in message.Contexts ?? []) { location ??= ctx.Location; } @@ -104,14 +101,14 @@ public void Clear() public interface IStatValueValidator { - void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors); + void Validate(DiagnosticContext ctx, CodeLocation? location, object value, PropertyDiagnosticContainer errors); } abstract public class StatStringValidator : IStatValueValidator { abstract public void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors); - public void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors) + public void Validate(DiagnosticContext ctx, CodeLocation? location, object value, PropertyDiagnosticContainer errors) { Validate(ctx, (string)value, errors); } @@ -271,9 +268,9 @@ public override void Validate(DiagnosticContext ctx, string value, PropertyDiagn var valueBytes = Encoding.UTF8.GetBytes("__TYPE_" + validatorType + "__ " + value.TrimEnd()); using var buf = new MemoryStream(valueBytes); - var scanner = new StatPropertyScanner(); + var scanner = new FunctorScanner(); scanner.SetSource(buf); - var parser = new StatPropertyParser(scanner, definitions, ctx, validatorFactory, valueBytes, type, errors, ctx.PropertyValueSpan, typeLen); + var parser = new FunctorParser(scanner, definitions, ctx, validatorFactory, valueBytes, type, errors, ctx.PropertyValueSpan, typeLen); var succeeded = parser.Parse(); if (!succeeded) { @@ -419,7 +416,7 @@ public override void Validate(DiagnosticContext ctx, string value, PropertyDiagn public class ObjectListValidator(IPropertyValidator PropertyValidator, StatEntryType ObjectType) : IStatValueValidator { - public void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors) + public void Validate(DiagnosticContext ctx, CodeLocation? location, object value, PropertyDiagnosticContainer errors) { var objs = (IEnumerable)value; foreach (var subobject in objs) @@ -430,11 +427,11 @@ public void Validate(DiagnosticContext ctx, CodeLocation location, object value, } } -public class AnyParser(IEnumerable validators, string message = null) : IStatValueValidator +public class AnyParser(IEnumerable validators, string? defaultMessage = null) : IStatValueValidator { private readonly List Validators = validators.ToList(); - public void Validate(DiagnosticContext ctx, CodeLocation location, object value, PropertyDiagnosticContainer errors) + public void Validate(DiagnosticContext ctx, CodeLocation? location, object value, PropertyDiagnosticContainer errors) { foreach (var validator in Validators) { @@ -443,19 +440,13 @@ public void Validate(DiagnosticContext ctx, CodeLocation location, object value, if (errors.Messages == null || errors.Messages.Count == 0) return; } - if (message != null) + if (defaultMessage != null) { - errors.Add(message); + errors.Add(defaultMessage); } } } -public class AnyType -{ - public List Types; - public string Message; -} - public class StatValueValidatorFactory(IStatReferenceValidator ReferenceValidator, IPropertyValidator PropertyValidator) { public IStatValueValidator CreateReferenceValidator(List constraints) @@ -562,9 +553,9 @@ public IStatValueValidator CreateValidator(StatField field, StatDefinitionReposi return CreateValidator(field.Type, field.EnumType, field.ReferenceTypes, definitions); } - public IStatValueValidator CreateValidator(string type, StatEnumeration enumType, List constraints, StatDefinitionRepository definitions) + public IStatValueValidator CreateValidator(string type, StatEnumeration? enumType, List? constraints, StatDefinitionRepository definitions) { - if (enumType == null && definitions.Enumerations.TryGetValue(type, out StatEnumeration enumInfo) && enumInfo.Values.Count > 0) + if (enumType == null && definitions.Enumerations.TryGetValue(type, out var enumInfo) && enumInfo.Values.Count > 0) { enumType = enumInfo; } @@ -609,7 +600,7 @@ public IStatValueValidator CreateValidator(string type, StatEnumeration enumType "StatsFunctors" => new ExpressionValidator("Properties", definitions, this, ExpressionType.Functor), "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionValidator(), "UseCosts" => new UseCostsValidator(ReferenceValidator), - "StatReference" => new StatReferenceValidator(ReferenceValidator, constraints), + "StatReference" => new StatReferenceValidator(ReferenceValidator, constraints!), "StatusId" => new AnyParser(new List { new EnumValidator(definitions.Enumerations["EngineStatusType"]), new StatReferenceValidator(ReferenceValidator, diff --git a/LSTools.sln b/LSTools.sln index d017e0b2..ed269297 100644 --- a/LSTools.sln +++ b/LSTools.sln @@ -38,6 +38,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution EndProject Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "PhysicsTool", "PhysicsTool\PhysicsTool.vcxproj", "{043514DF-5822-41A0-A5CE-CBC349B1398B}" EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "LSLibStats", "LSLibStats\LSLibStats.csproj", "{A721CE1D-F76D-476B-86E7-C8B2D85D7E73}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -246,6 +248,24 @@ Global {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x64.Build.0 = Release|x64 {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x86.ActiveCfg = Release|Win32 {043514DF-5822-41A0-A5CE-CBC349B1398B}.RelWithDebInfo|x86.Build.0 = Release|Win32 + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Debug|x64.ActiveCfg = Debug|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Debug|x64.Build.0 = Debug|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Debug|x86.ActiveCfg = Debug|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Debug|x86.Build.0 = Debug|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Release|Any CPU.Build.0 = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Release|x64.ActiveCfg = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Release|x64.Build.0 = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Release|x86.ActiveCfg = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.Release|x86.Build.0 = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.RelWithDebInfo|Any CPU.ActiveCfg = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.RelWithDebInfo|Any CPU.Build.0 = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.RelWithDebInfo|x64.ActiveCfg = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.RelWithDebInfo|x64.Build.0 = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.RelWithDebInfo|x86.ActiveCfg = Release|Any CPU + {A721CE1D-F76D-476B-86E7-C8B2D85D7E73}.RelWithDebInfo|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/StatParser/StatChecker.cs b/StatParser/StatChecker.cs index 79565001..e0c15af1 100644 --- a/StatParser/StatChecker.cs +++ b/StatParser/StatChecker.cs @@ -1,5 +1,5 @@ using LSLib.LS; -using LSLib.LS.Stats; +using LSLib.Stats; using System; using System.Collections.Generic; using System.IO; @@ -108,10 +108,6 @@ private void CompilationDiagnostic(StatLoadingError message) public void Check(List mods, List dependencies, List packagePaths) { - Context = new StatLoadingContext(); - - Loader = new StatLoader(Context); - FS = new VFS(); if (LoadPackages) { @@ -133,7 +129,9 @@ public void Check(List mods, List dependencies, List pac visitor.Discover(); LoadStatDefinitions(visitor.Resources); - Context.Definitions = Definitions; + + Context = new StatLoadingContext(Definitions); + Loader = new StatLoader(Context); foreach (var modName in dependencies) { diff --git a/StatParser/StatParser.csproj b/StatParser/StatParser.csproj index f0bb54f1..27d53095 100644 --- a/StatParser/StatParser.csproj +++ b/StatParser/StatParser.csproj @@ -11,6 +11,7 @@ 1.0.0.0 + From 22e730669f017548bf262013366219463bae8c94 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 17 Mar 2024 19:12:25 +0100 Subject: [PATCH 092/139] Compression code reorganization --- DebuggerFrontend/DebugInfoLoader.cs | 4 +- LSLib/LS/BinUtils.cs | 218 -------------------------- LSLib/LS/Compression.cs | 230 ++++++++++++++++++++++++++++ LSLib/LS/PackageCommon.cs | 2 +- LSLib/LS/PackageFormat.cs | 2 +- LSLib/LS/PackageReader.cs | 2 +- LSLib/LS/PackageWriter.cs | 4 +- LSLib/LS/Resources/LSF/LSFReader.cs | 2 +- LSLib/LS/Resources/LSF/LSFWriter.cs | 8 +- StoryCompiler/DebugInfoSaver.cs | 4 +- 10 files changed, 244 insertions(+), 232 deletions(-) create mode 100644 LSLib/LS/Compression.cs diff --git a/DebuggerFrontend/DebugInfoLoader.cs b/DebuggerFrontend/DebugInfoLoader.cs index c6f97109..4a4a0c54 100644 --- a/DebuggerFrontend/DebugInfoLoader.cs +++ b/DebuggerFrontend/DebugInfoLoader.cs @@ -203,8 +203,8 @@ public StoryDebugInfo Load(byte[] msgPayload) var compressed = new byte[msgPayload.Length - 4]; Array.Copy(msgPayload, 0, compressed, 0, msgPayload.Length - 4); - var flags = BinUtils.MakeCompressionFlags(CompressionMethod.LZ4, LSCompressionLevel.Fast); - byte[] decompressed = BinUtils.Decompress(compressed, (int)decompressedSize, flags); + var flags = CompressionHelpers.MakeCompressionFlags(CompressionMethod.LZ4, LSCompressionLevel.Fast); + byte[] decompressed = CompressionHelpers.Decompress(compressed, (int)decompressedSize, flags); var msg = StoryDebugInfoMsg.Parser.ParseFrom(decompressed); var debugInfo = FromProtobuf(msg); return debugInfo; diff --git a/LSLib/LS/BinUtils.cs b/LSLib/LS/BinUtils.cs index 22f712d7..ff63f8f0 100644 --- a/LSLib/LS/BinUtils.cs +++ b/LSLib/LS/BinUtils.cs @@ -62,65 +62,6 @@ public override void Flush() { } } -public class LZ4DecompressionStream : Stream -{ - private readonly MemoryMappedViewAccessor View; - private readonly long Offset; - private readonly int Size; - private readonly int DecompressedSize; - private MemoryStream Decompressed; - - public LZ4DecompressionStream(MemoryMappedViewAccessor view, long offset, int size, int decompressedSize) - { - View = view; - Offset = offset; - Size = size; - DecompressedSize = decompressedSize; - } - - private void DoDecompression() - { - var compressed = new byte[Size]; - View.ReadArray(Offset, compressed, 0, Size); - - var decompressed = new byte[DecompressedSize]; - LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, DecompressedSize, true); - Decompressed = new MemoryStream(decompressed); - } - - public override bool CanRead { get { return true; } } - public override bool CanSeek { get { return false; } } - - public override int Read(byte[] buffer, int offset, int count) - { - if (Decompressed == null) - { - DoDecompression(); - } - - return Decompressed.Read(buffer, offset, count); - } - - public override long Seek(long offset, SeekOrigin origin) - { - throw new NotSupportedException(); - } - - - public override long Position - { - get { return Decompressed?.Position ?? 0; } - set { throw new NotSupportedException(); } - } - - public override bool CanTimeout { get { return false; } } - public override bool CanWrite { get { return false; } } - public override long Length { get { return DecompressedSize; } } - public override void SetLength(long value) { throw new NotSupportedException(); } - public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } - public override void Flush() { } -} - public static class BinUtils { public static T ReadStruct(BinaryReader reader) @@ -412,163 +353,4 @@ public static void WriteAttribute(BinaryWriter writer, NodeAttribute attr) throw new InvalidFormatException(String.Format("WriteAttribute() not implemented for type {0}", attr.Type)); } } - - public static CompressionFlags MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) - { - return method.ToFlags() | level.ToFlags(); - } - - public static byte[] Decompress(byte[] compressed, int decompressedSize, CompressionFlags compression, bool chunked = false) - { - switch (compression.Method()) - { - case CompressionMethod.None: - return compressed; - - case CompressionMethod.Zlib: - { - using (var compressedStream = new MemoryStream(compressed)) - using (var decompressedStream = new MemoryStream()) - using (var stream = new ZLibStream(compressedStream, CompressionMode.Decompress)) - { - stream.CopyTo(decompressedStream); - return decompressedStream.ToArray(); - } - } - - case CompressionMethod.LZ4: - if (chunked) - { - var decompressed = Native.LZ4FrameCompressor.Decompress(compressed); - return decompressed; - } - else - { - var decompressed = new byte[decompressedSize]; - var resultSize = LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, decompressedSize, true); - if (resultSize != decompressedSize) - { - string msg = $"LZ4 compressor disagrees about the size of compressed buffer; expected {decompressedSize}, got {resultSize}"; - throw new InvalidDataException(msg); - } - return decompressed; - } - - case CompressionMethod.Zstd: - { - using (var compressedStream = new MemoryStream(compressed)) - using (var decompressedStream = new MemoryStream()) - using (var stream = new ZstdSharp.DecompressionStream(compressedStream)) - { - stream.CopyTo(decompressedStream); - return decompressedStream.ToArray(); - } - } - - default: - throw new InvalidDataException($"No decompressor found for this format: {compression}"); - } - } - - public static Stream Decompress(MemoryMappedFile file, MemoryMappedViewAccessor view, long sourceOffset, - int sourceSize, int decompressedSize, CompressionFlags compression) - { - // MemoryMappedView considers a size of 0 to mean "entire stream" - if (sourceSize == 0) - { - return new MemoryStream(); - } - - switch (compression.Method()) - { - case CompressionMethod.None: - return file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); - - case CompressionMethod.Zlib: - var sourceStream = file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); - return new ZLibStream(sourceStream, CompressionMode.Decompress); - - case CompressionMethod.LZ4: - return new LZ4DecompressionStream(view, sourceOffset, sourceSize, decompressedSize); - - case CompressionMethod.Zstd: - var zstdStream = file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); - return new ZstdSharp.DecompressionStream(zstdStream); - - default: - throw new InvalidDataException($"No decompressor found for this format: {compression}"); - } - } - - public static byte[] Compress(byte[] uncompressed, CompressionFlags compression) - { - return Compress(uncompressed, compression.Method(), compression.Level()); - } - - public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel level, bool chunked = false) - { - return method switch - { - CompressionMethod.None => uncompressed, - CompressionMethod.Zlib => CompressZlib(uncompressed, level), - CompressionMethod.LZ4 => CompressLZ4(uncompressed, level, chunked), - CompressionMethod.Zstd => CompressZstd(uncompressed, level), - _ => throw new ArgumentException("Invalid compression method specified") - }; - } - - public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel level) - { - var zLevel = level switch - { - LSCompressionLevel.Fast => CompressionLevel.Fastest, - LSCompressionLevel.Default => CompressionLevel.Optimal, - LSCompressionLevel.Max => CompressionLevel.SmallestSize, - _ => throw new ArgumentException() - }; - - using var outputStream = new MemoryStream(); - using (var compressor = new ZLibStream(outputStream, zLevel, true)) - { - compressor.Write(uncompressed, 0, uncompressed.Length); - } - - - return outputStream.ToArray(); - } - - public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compressionLevel, bool chunked = false) - { - if (chunked) - { - return Native.LZ4FrameCompressor.Compress(uncompressed); - } - else if (compressionLevel == LSCompressionLevel.Fast) - { - return LZ4Codec.Encode(uncompressed, 0, uncompressed.Length); - } - else - { - return LZ4Codec.EncodeHC(uncompressed, 0, uncompressed.Length); - } - } - - public static byte[] CompressZstd(byte[] uncompressed, LSCompressionLevel level) - { - var zLevel = level switch - { - LSCompressionLevel.Fast => 3, - LSCompressionLevel.Default => 9, - LSCompressionLevel.Max => 22, - _ => throw new ArgumentException() - }; - - using var outputStream = new MemoryStream(); - using (var compressor = new ZstdSharp.CompressionStream(outputStream, zLevel, 0, true)) - { - compressor.Write(uncompressed, 0, uncompressed.Length); - } - - return outputStream.ToArray(); - } } diff --git a/LSLib/LS/Compression.cs b/LSLib/LS/Compression.cs new file mode 100644 index 00000000..c6706a0c --- /dev/null +++ b/LSLib/LS/Compression.cs @@ -0,0 +1,230 @@ +using LZ4; +using System.IO.Compression; +using System.IO.MemoryMappedFiles; + +namespace LSLib.LS; + + +public class LZ4DecompressionStream : Stream +{ + private readonly MemoryMappedViewAccessor View; + private readonly long Offset; + private readonly int Size; + private readonly int DecompressedSize; + private MemoryStream Decompressed; + + public LZ4DecompressionStream(MemoryMappedViewAccessor view, long offset, int size, int decompressedSize) + { + View = view; + Offset = offset; + Size = size; + DecompressedSize = decompressedSize; + } + + private void DoDecompression() + { + var compressed = new byte[Size]; + View.ReadArray(Offset, compressed, 0, Size); + + var decompressed = new byte[DecompressedSize]; + LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, DecompressedSize, true); + Decompressed = new MemoryStream(decompressed); + } + + public override bool CanRead { get { return true; } } + public override bool CanSeek { get { return false; } } + + public override int Read(byte[] buffer, int offset, int count) + { + if (Decompressed == null) + { + DoDecompression(); + } + + return Decompressed.Read(buffer, offset, count); + } + + public override long Seek(long offset, SeekOrigin origin) + { + throw new NotSupportedException(); + } + + + public override long Position + { + get { return Decompressed?.Position ?? 0; } + set { throw new NotSupportedException(); } + } + + public override bool CanTimeout { get { return false; } } + public override bool CanWrite { get { return false; } } + public override long Length { get { return DecompressedSize; } } + public override void SetLength(long value) { throw new NotSupportedException(); } + public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } + public override void Flush() { } +} + +public static class CompressionHelpers +{ + public static CompressionFlags MakeCompressionFlags(CompressionMethod method, LSCompressionLevel level) + { + // Avoid setting compression level bits if there is no compression + if (method == CompressionMethod.None) return 0; + + return method.ToFlags() | level.ToFlags(); + } + + public static byte[] Decompress(byte[] compressed, int decompressedSize, CompressionFlags compression, bool chunked = false) + { + switch (compression.Method()) + { + case CompressionMethod.None: + return compressed; + + case CompressionMethod.Zlib: + { + using (var compressedStream = new MemoryStream(compressed)) + using (var decompressedStream = new MemoryStream()) + using (var stream = new ZLibStream(compressedStream, CompressionMode.Decompress)) + { + stream.CopyTo(decompressedStream); + return decompressedStream.ToArray(); + } + } + + case CompressionMethod.LZ4: + if (chunked) + { + var decompressed = Native.LZ4FrameCompressor.Decompress(compressed); + return decompressed; + } + else + { + var decompressed = new byte[decompressedSize]; + var resultSize = LZ4Codec.Decode(compressed, 0, compressed.Length, decompressed, 0, decompressedSize, true); + if (resultSize != decompressedSize) + { + string msg = $"LZ4 compressor disagrees about the size of compressed buffer; expected {decompressedSize}, got {resultSize}"; + throw new InvalidDataException(msg); + } + return decompressed; + } + + case CompressionMethod.Zstd: + { + using (var compressedStream = new MemoryStream(compressed)) + using (var decompressedStream = new MemoryStream()) + using (var stream = new ZstdSharp.DecompressionStream(compressedStream)) + { + stream.CopyTo(decompressedStream); + return decompressedStream.ToArray(); + } + } + + default: + throw new InvalidDataException($"No decompressor found for this format: {compression}"); + } + } + + public static Stream Decompress(MemoryMappedFile file, MemoryMappedViewAccessor view, long sourceOffset, + int sourceSize, int decompressedSize, CompressionFlags compression) + { + // MemoryMappedView considers a size of 0 to mean "entire stream" + if (sourceSize == 0) + { + return new MemoryStream(); + } + + switch (compression.Method()) + { + case CompressionMethod.None: + return file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); + + case CompressionMethod.Zlib: + var sourceStream = file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); + return new ZLibStream(sourceStream, CompressionMode.Decompress); + + case CompressionMethod.LZ4: + return new LZ4DecompressionStream(view, sourceOffset, sourceSize, decompressedSize); + + case CompressionMethod.Zstd: + var zstdStream = file.CreateViewStream(sourceOffset, sourceSize, MemoryMappedFileAccess.Read); + return new ZstdSharp.DecompressionStream(zstdStream); + + default: + throw new InvalidDataException($"No decompressor found for this format: {compression}"); + } + } + + public static byte[] Compress(byte[] uncompressed, CompressionFlags compression) + { + return Compress(uncompressed, compression.Method(), compression.Level()); + } + + public static byte[] Compress(byte[] uncompressed, CompressionMethod method, LSCompressionLevel level, bool chunked = false) + { + return method switch + { + CompressionMethod.None => uncompressed, + CompressionMethod.Zlib => CompressZlib(uncompressed, level), + CompressionMethod.LZ4 => CompressLZ4(uncompressed, level, chunked), + CompressionMethod.Zstd => CompressZstd(uncompressed, level), + _ => throw new ArgumentException("Invalid compression method specified") + }; + } + + public static byte[] CompressZlib(byte[] uncompressed, LSCompressionLevel level) + { + var zLevel = level switch + { + LSCompressionLevel.Fast => CompressionLevel.Fastest, + LSCompressionLevel.Default => CompressionLevel.Optimal, + LSCompressionLevel.Max => CompressionLevel.SmallestSize, + _ => throw new ArgumentException() + }; + + using var outputStream = new MemoryStream(); + using (var compressor = new ZLibStream(outputStream, zLevel, true)) + { + compressor.Write(uncompressed, 0, uncompressed.Length); + } + + + return outputStream.ToArray(); + } + + public static byte[] CompressLZ4(byte[] uncompressed, LSCompressionLevel compressionLevel, bool chunked = false) + { + if (chunked) + { + return Native.LZ4FrameCompressor.Compress(uncompressed); + } + else if (compressionLevel == LSCompressionLevel.Fast) + { + return LZ4Codec.Encode(uncompressed, 0, uncompressed.Length); + } + else + { + return LZ4Codec.EncodeHC(uncompressed, 0, uncompressed.Length); + } + } + + public static byte[] CompressZstd(byte[] uncompressed, LSCompressionLevel level) + { + var zLevel = level switch + { + LSCompressionLevel.Fast => 3, + LSCompressionLevel.Default => 9, + LSCompressionLevel.Max => 22, + _ => throw new ArgumentException() + }; + + using var outputStream = new MemoryStream(); + using (var compressor = new ZstdSharp.CompressionStream(outputStream, zLevel, 0, true)) + { + compressor.Write(uncompressed, 0, uncompressed.Length); + } + + return outputStream.ToArray(); + } +} diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 93116682..2e484778 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -28,7 +28,7 @@ public Stream CreateContentReader() } else { - return BinUtils.Decompress(PackageFile, PackageView, (long)OffsetInFile, (int)SizeOnDisk, (int)UncompressedSize, Flags); + return CompressionHelpers.Decompress(PackageFile, PackageView, (long)OffsetInFile, (int)SizeOnDisk, (int)UncompressedSize, Flags); } } diff --git a/LSLib/LS/PackageFormat.cs b/LSLib/LS/PackageFormat.cs index e2b7dd90..cf5453ab 100644 --- a/LSLib/LS/PackageFormat.cs +++ b/LSLib/LS/PackageFormat.cs @@ -311,7 +311,7 @@ public readonly void ToCommon(PackagedFileInfoCommon info) info.Name = BinUtils.NullTerminatedBytesToString(Name); info.ArchivePart = ArchivePart; info.Crc = 0; - info.Flags = UncompressedSize > 0 ? BinUtils.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.Default) : 0; + info.Flags = UncompressedSize > 0 ? CompressionHelpers.MakeCompressionFlags(CompressionMethod.Zlib, LSCompressionLevel.Default) : 0; info.OffsetInFile = OffsetInFile; info.SizeOnDisk = SizeOnDisk; info.UncompressedSize = UncompressedSize; diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index c7f4dea0..e4b0c607 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -114,7 +114,7 @@ private void ReadCompressedFileList(MemoryMappedViewAccessor view, long o } int fileBufferSize = Marshal.SizeOf(typeof(TFile)) * numFiles; - var fileBuf = BinUtils.Decompress(compressed, fileBufferSize, CompressionFlags.MethodLZ4); + var fileBuf = CompressionHelpers.Decompress(compressed, fileBufferSize, CompressionFlags.MethodLZ4); using var ms = new MemoryStream(fileBuf); using var msr = new BinaryReader(ms); diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 48fa9bd3..8ff51556 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -93,7 +93,7 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) var uncompressed = new byte[inputStream.Length]; inputStream.ReadExactly(uncompressed, 0, uncompressed.Length); - var compressed = BinUtils.Compress(uncompressed, compression, compressionLevel); + var compressed = CompressionHelpers.Compress(uncompressed, compression, compressionLevel); if (Streams.Last().Position + compressed.Length > Build.Version.MaxPackageSize()) { @@ -111,7 +111,7 @@ private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) SizeOnDisk = (ulong)compressed.Length, ArchivePart = (UInt32)(Streams.Count - 1), OffsetInFile = (ulong)stream.Position, - Flags = BinUtils.MakeCompressionFlags(compression, compressionLevel) + Flags = CompressionHelpers.MakeCompressionFlags(compression, compressionLevel) }; stream.Write(compressed, 0, compressed.Length); diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index 0c133911..a7b2e3d1 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -283,7 +283,7 @@ private MemoryStream Decompress(BinaryReader reader, uint sizeOnDisk, uint uncom bool isCompressed = Metadata.CompressionFlags.Method() != CompressionMethod.None; uint compressedSize = isCompressed ? sizeOnDisk : uncompressedSize; byte[] compressed = reader.ReadBytes((int)compressedSize); - var uncompressed = BinUtils.Decompress(compressed, (int)uncompressedSize, Metadata.CompressionFlags, chunked); + var uncompressed = CompressionHelpers.Decompress(compressed, (int)uncompressedSize, Metadata.CompressionFlags, chunked); #if DUMP_LSF_SERIALIZATION using (var nodesFile = new FileStream(debugDumpTo, FileMode.Create, FileAccess.Write)) diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index 57a6cea2..a5787bab 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -110,10 +110,10 @@ public void Write(Resource resource) } bool chunked = Version >= LSFVersion.VerChunkedCompress; - byte[] stringsCompressed = BinUtils.Compress(stringBuffer, Compression, CompressionLevel); - byte[] nodesCompressed = BinUtils.Compress(nodeBuffer, Compression, CompressionLevel, chunked); - byte[] attributesCompressed = BinUtils.Compress(attributeBuffer, Compression, CompressionLevel, chunked); - byte[] valuesCompressed = BinUtils.Compress(valueBuffer, Compression, CompressionLevel, chunked); + byte[] stringsCompressed = CompressionHelpers.Compress(stringBuffer, Compression, CompressionLevel); + byte[] nodesCompressed = CompressionHelpers.Compress(nodeBuffer, Compression, CompressionLevel, chunked); + byte[] attributesCompressed = CompressionHelpers.Compress(attributeBuffer, Compression, CompressionLevel, chunked); + byte[] valuesCompressed = CompressionHelpers.Compress(valueBuffer, Compression, CompressionLevel, chunked); if (Version < LSFVersion.VerBG3AdditionalBlob) { diff --git a/StoryCompiler/DebugInfoSaver.cs b/StoryCompiler/DebugInfoSaver.cs index 4ab88409..9bb9104d 100644 --- a/StoryCompiler/DebugInfoSaver.cs +++ b/StoryCompiler/DebugInfoSaver.cs @@ -192,8 +192,8 @@ public void Save(Stream stream, StoryDebugInfo debugInfo) codedStream.Flush(); byte[] proto = ms.ToArray(); - var flags = BinUtils.MakeCompressionFlags(CompressionMethod.LZ4, LSCompressionLevel.Fast); - byte[] compressed = BinUtils.Compress(proto, flags); + var flags = CompressionHelpers.MakeCompressionFlags(CompressionMethod.LZ4, LSCompressionLevel.Fast); + byte[] compressed = CompressionHelpers.Compress(proto, flags); stream.Write(compressed, 0, compressed.Length); using (var writer = new BinaryWriter(stream, Encoding.UTF8, true)) From b322504a2912297d62f13fb1b045c40084f01b01 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 22 Mar 2024 18:00:59 +0100 Subject: [PATCH 093/139] Fix incorrect V15 package TOC --- LSLib/LS/PackageWriter.cs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 8ff51556..39ed4a46 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -44,7 +44,7 @@ public void Dispose() } } - private bool CanCompressFile(PackageBuildInputFile file, Stream inputStream) + protected bool CanCompressFile(PackageBuildInputFile file, Stream inputStream) { var extension = Path.GetExtension(file.Path).ToLowerInvariant(); return extension != ".gts" @@ -54,7 +54,7 @@ private bool CanCompressFile(PackageBuildInputFile file, Stream inputStream) && inputStream.Length > 0; } - private void WritePadding(Stream stream) + protected void WritePadding(Stream stream) { int padLength = Build.Version.PaddingSize(); long alignTo; @@ -78,7 +78,7 @@ private void WritePadding(Stream stream) stream.Write(pad, 0, pad.Length); } - private PackageBuildTransientFile WriteFile(PackageBuildInputFile input) + protected PackageBuildTransientFile WriteFile(PackageBuildInputFile input) { using var inputStream = input.MakeInputStream(); From 229de923b6555893d154b627e4d672cc1d756d08 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 22 Mar 2024 18:01:16 +0100 Subject: [PATCH 094/139] Fix issues when loading savegames --- LSLib/LS/Save/SavegameHelpers.cs | 14 +++++++------- LSLib/LS/Story/Compiler/StoryEmitter.cs | 6 ++---- LSLib/LS/Story/Database.cs | 3 --- 3 files changed, 9 insertions(+), 14 deletions(-) diff --git a/LSLib/LS/Save/SavegameHelpers.cs b/LSLib/LS/Save/SavegameHelpers.cs index 3046c9b4..de7e0a3b 100644 --- a/LSLib/LS/Save/SavegameHelpers.cs +++ b/LSLib/LS/Save/SavegameHelpers.cs @@ -74,7 +74,7 @@ public byte[] ResaveStoryToGlobals(Story.Story story, ResourceConversionParamete var rsrcWriter = new LSFWriter(rewrittenStream) { Version = conversionParams.LSF, - EncodeSiblingData = false + MetadataFormat = LSFMetadataFormat.None }; rsrcWriter.Write(globals); rewrittenStream.Seek(0, SeekOrigin.Begin); @@ -105,10 +105,10 @@ public void ResaveStory(Story.Story story, Game game, string path) foreach (var file in Package.Files.Where(x => x.Name.ToLowerInvariant() != "globals.lsf")) { using var stream = file.CreateContentReader(); - var contents = new byte[stream.Length]; - stream.ReadExactly(contents, 0, contents.Length); + using var unpacked = new MemoryStream(); + stream.CopyTo(unpacked); - build.Files.Add(PackageBuildInputFile.CreateFromBlob(contents, file.Name)); + build.Files.Add(PackageBuildInputFile.CreateFromBlob(unpacked.ToArray(), file.Name)); } } else @@ -124,10 +124,10 @@ public void ResaveStory(Story.Story story, Game game, string path) foreach (var file in Package.Files.Where(x => x.Name.ToLowerInvariant() != "StorySave.bin")) { using var stream = file.CreateContentReader(); - var contents = new byte[stream.Length]; - stream.ReadExactly(contents, 0, contents.Length); + using var unpacked = new MemoryStream(); + stream.CopyTo(unpacked); - build.Files.Add(PackageBuildInputFile.CreateFromBlob(contents, file.Name)); + build.Files.Add(PackageBuildInputFile.CreateFromBlob(unpacked.ToArray(), file.Name)); } } diff --git a/LSLib/LS/Story/Compiler/StoryEmitter.cs b/LSLib/LS/Story/Compiler/StoryEmitter.cs index a404da7c..ce7123de 100644 --- a/LSLib/LS/Story/Compiler/StoryEmitter.cs +++ b/LSLib/LS/Story/Compiler/StoryEmitter.cs @@ -389,8 +389,7 @@ private DatabaseNode EmitDatabase(FunctionSignature signature) { Types = new List(signature.Params.Count) }, - OwnerNode = null, - FactsPosition = 0 + OwnerNode = null }; foreach (var param in signature.Params) @@ -457,8 +456,7 @@ private Database EmitIntermediateDatabase(IRRule rule, int tupleSize, Node owner { Types = paramTypes }, - OwnerNode = ownerNode, - FactsPosition = 0 + OwnerNode = ownerNode }; osiDb.Facts = new FactCollection(osiDb, Story); diff --git a/LSLib/LS/Story/Database.cs b/LSLib/LS/Story/Database.cs index b23ce7f0..71859882 100644 --- a/LSLib/LS/Story/Database.cs +++ b/LSLib/LS/Story/Database.cs @@ -195,7 +195,6 @@ public class Database : OsirisSerializable public ParameterList Parameters; public FactCollection Facts; public Node OwnerNode; - public long FactsPosition; public void Read(OsiReader reader) { @@ -203,7 +202,6 @@ public void Read(OsiReader reader) Parameters = new ParameterList(); Parameters.Read(reader); - FactsPosition = reader.BaseStream.Position; Facts = new FactCollection(this, reader.Story); reader.ReadList(Facts); } @@ -229,7 +227,6 @@ public void DebugDump(TextWriter writer, Story story) writer.Write("(Not owned)"); } - writer.Write(" @ {0:X}: ", FactsPosition); Parameters.DebugDump(writer, story); writer.WriteLine(""); From da5fe7a3b7c5153f9acab3c77f831cf935b35e8b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 27 Mar 2024 18:19:43 +0100 Subject: [PATCH 095/139] Fix export not working when the import GR2 contains duplicate mesh names --- LSLib/Granny/Model/ColladaExporter.cs | 79 +++++++++++++++++---------- 1 file changed, 49 insertions(+), 30 deletions(-) diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index 52583a0f..2dfe3dfb 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -5,9 +5,10 @@ namespace LSLib.Granny.Model; -public class ColladaMeshExporter(Mesh mesh, ExporterOptions options) +public class ColladaMeshExporter(Mesh mesh, string exportedId, ExporterOptions options) { private Mesh ExportedMesh = mesh; + private string ExportedId = exportedId; private ExporterOptions Options = options; private List Sources; private List Inputs; @@ -57,7 +58,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { case "Position": { - var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedMesh.Name); + var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedId); AddInput(positions, "POSITION", "VERTEX"); break; } @@ -66,7 +67,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { if (Options.ExportNormals) { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedMesh.Name); + var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedId); AddInput(normals, "NORMAL"); } break; @@ -76,7 +77,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { if (Options.ExportTangents) { - var tangents = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedMesh.Name); + var tangents = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedId); AddInput(tangents, "TEXTANGENT"); } break; @@ -86,7 +87,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { if (Options.ExportTangents) { - var binormals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedMesh.Name); + var binormals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedId); AddInput(binormals, "TEXBINORMAL"); } break; @@ -102,7 +103,7 @@ private void DetermineInputsFromComponentNames(List componentNames) if (Options.ExportUVs) { int uvIndex = Int32.Parse(component[^1..]); - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } break; @@ -118,7 +119,7 @@ private void DetermineInputsFromComponentNames(List componentNames) if (Options.ExportUVs) { int uvIndex = Int32.Parse(component[^1..]) - 1; - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } break; @@ -133,7 +134,7 @@ private void DetermineInputsFromComponentNames(List componentNames) { if (Options.ExportColors) { - var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedMesh.Name, 0); + var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedId, 0); AddInput(colors, null, "COLOR", 0); } break; @@ -154,27 +155,27 @@ private void DetermineInputsFromVertex(Vertex vertex) } // Vertex positions - var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedMesh.Name); + var positions = ExportedMesh.PrimaryVertexData.MakeColladaPositions(ExportedId); AddInput(positions, "POSITION", "VERTEX"); // Normals if (desc.NormalType != NormalType.None && Options.ExportNormals) { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedMesh.Name); + var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedId); AddInput(normals, null, "NORMAL"); } // Tangents if (desc.TangentType != NormalType.None && Options.ExportTangents) { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedMesh.Name); + var normals = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedId); AddInput(normals, null, "TEXTANGENT"); } // Binormals if (desc.BinormalType != NormalType.None && Options.ExportTangents) { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedMesh.Name); + var normals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedId); AddInput(normals, null, "TEXBINORMAL"); } @@ -183,7 +184,7 @@ private void DetermineInputsFromVertex(Vertex vertex) { for (var uvIndex = 0; uvIndex < desc.TextureCoordinates; uvIndex++) { - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedMesh.Name, uvIndex, Options.FlipUVs); + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } } @@ -193,7 +194,7 @@ private void DetermineInputsFromVertex(Vertex vertex) { for (var colorIndex = 0; colorIndex < desc.ColorMaps; colorIndex++) { - var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedMesh.Name, colorIndex); + var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedId, colorIndex); AddInput(colors, null, "COLOR", (ulong)colorIndex); } } @@ -332,7 +333,7 @@ public mesh Export() { vertices = new vertices { - id = ExportedMesh.Name + "-vertices", + id = ExportedId + "-vertices", input = Inputs.ToArray() }, source = Sources.ToArray(), @@ -359,15 +360,35 @@ public class ColladaExporter [Serialization(Kind = SerializationKind.None)] public ExporterOptions Options = new(); + private Dictionary MeshIds = new(); private XmlDocument Xml = new(); + private void GenerateUniqueMeshIds(List meshes) + { + HashSet namesInUse = []; + foreach (var mesh in meshes) + { + var name = mesh.Name; + var nameNum = 1; + while (namesInUse.Contains(name)) + { + name = mesh.Name + "_" + nameNum.ToString(); + nameNum++; + } + + namesInUse.Add(name); + MeshIds[mesh] = name; + } + } + private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBinding, List geometries, List controllers, List geomNodes) { - var exporter = new ColladaMeshExporter(meshBinding.Mesh, Options); + var meshId = MeshIds[meshBinding.Mesh]; + var exporter = new ColladaMeshExporter(meshBinding.Mesh, meshId, Options); var mesh = exporter.Export(); var geom = new geometry { - id = meshBinding.Mesh.Name + "-geom", + id = meshId + "-geom", name = meshBinding.Mesh.Name, Item = mesh }; @@ -387,7 +408,7 @@ private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBind skin = ExportSkin(meshBinding.Mesh, model.Skeleton.Bones, boneNames, geom.id); ctrl = new controller { - id = meshBinding.Mesh.Name + "-skin", + id = meshId + "-skin", name = meshBinding.Mesh.Name + "_Skin", Item = skin }; @@ -397,7 +418,7 @@ private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBind var geomNode = new node { id = geom.name + "-node", - name = geom.name, + name = meshBinding.Mesh.Name, type = NodeType.NODE }; @@ -424,6 +445,7 @@ private void ExportMeshBinding(Model model, string skelRef, MeshBinding meshBind private skin ExportSkin(Mesh mesh, List bones, Dictionary nameMaps, string geometryId) { + var meshIds = MeshIds[mesh]; var sources = new List(); var joints = new List(); var poses = new List(); @@ -465,9 +487,9 @@ private skin ExportSkin(Mesh mesh, List bones, Dictionary na }); } - var jointSource = ColladaUtils.MakeNameSource(mesh.Name, "joints", ["JOINT"], joints.ToArray()); - var poseSource = ColladaUtils.MakeFloatSource(mesh.Name, "poses", ["TRANSFORM"], poses.ToArray(), 16, "float4x4"); - var weightsSource = mesh.PrimaryVertexData.MakeBoneWeights(mesh.Name); + var jointSource = ColladaUtils.MakeNameSource(meshIds, "joints", ["JOINT"], joints.ToArray()); + var poseSource = ColladaUtils.MakeFloatSource(meshIds, "poses", ["TRANSFORM"], poses.ToArray(), 16, "float4x4"); + var weightsSource = mesh.PrimaryVertexData.MakeBoneWeights(meshIds); var vertices = mesh.PrimaryVertexData.Deduplicator.Vertices.Uniques; var vertexInfluenceCounts = new List(vertices.Count); @@ -587,17 +609,12 @@ public node ExportSkeleton(Skeleton skeleton, string name) private void ExportModels(Root root, List geometries, List controllers, List geomNodes) { - if (root.Models == null) - { - return; - } - - foreach(var model in root.Models) - { + foreach (var model in root.Models ?? []) + { string skelRef = null; if (model.Skeleton != null && !model.Skeleton.IsDummy && model.Skeleton.Bones.Count > 1 && root.Skeletons.Any(s => s.Name == model.Skeleton.Name)) { - Utils.Info($"Exporting model {model.Name} with skeleton {model.Skeleton.Name}"); + Utils.Info($"Exporting model {model.Name} with skeleton {model.Skeleton.Name}"); var skeleton = ExportSkeleton(model.Skeleton, model.Name); geomNodes.Add(skeleton); skelRef = skeleton.id; @@ -882,6 +899,8 @@ public void Export(Root root, string outputPath) else asset.unit.meter = 1; + GenerateUniqueMeshIds(root.Meshes ?? []); + var geometries = new List(); var controllers = new List(); var geomNodes = new List(); From bfeb3d5954e6afa27d572e3f7dcfa43ba08f37a7 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 3 Apr 2024 16:44:44 +0200 Subject: [PATCH 096/139] Fix export crash with non-standard mesh names --- LSLib/Granny/Model/ColladaExporter.cs | 6 ++++++ LSLib/LS/Common.cs | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index 2dfe3dfb..28e5774a 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -2,6 +2,8 @@ using LSLib.LS; using System.Xml; using LSLib.LS.Enums; +using System.Text.RegularExpressions; +using System.Xml.Linq; namespace LSLib.Granny.Model; @@ -366,9 +368,13 @@ public class ColladaExporter private void GenerateUniqueMeshIds(List meshes) { HashSet namesInUse = []; + var charRe = new Regex("[^a-zA-Z0-9_.-]", RegexOptions.CultureInvariant); foreach (var mesh in meshes) { + // Sanitize name to make sure it satisfies Collada xsd:NCName requirements + mesh.Name = charRe.Replace(mesh.Name, "_"); var name = mesh.Name; + var nameNum = 1; while (namesInUse.Contains(name)) { diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index 3b8a6b63..ac6134b2 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -16,7 +16,7 @@ public static class Common public const int MinorVersion = 19; - public const int PatchVersion = 3; + public const int PatchVersion = 5; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; From fdd5e510093bb67a8ee988dfc7938c5f1f4acd87 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 3 Apr 2024 16:44:53 +0200 Subject: [PATCH 097/139] Add missing stat definitions --- LSLibDefinitions.xml | 1099 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1099 insertions(+) create mode 100644 LSLibDefinitions.xml diff --git a/LSLibDefinitions.xml b/LSLibDefinitions.xml new file mode 100644 index 00000000..e5da3c1e --- /dev/null +++ b/LSLibDefinitions.xml @@ -0,0 +1,1099 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file From 354d5b45b5bb5968c3bde61a4f4f77ccee7586bd Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 9 Apr 2024 16:05:26 +0200 Subject: [PATCH 098/139] Hopefully re-add missing stuff --- LSLib/LS/Enums/Game.cs | 4 +- LSLib/LS/Enums/LSFVersion.cs | 6 +-- LSLib/LS/NodeAttribute.cs | 22 ++++++++++ LSLib/LS/PackageReader.cs | 2 +- LSLib/LS/PackageWriter.cs | 2 +- LSLib/LS/Resource.cs | 2 + LSLib/LS/ResourceUtils.cs | 19 ++++++--- LSLib/LS/Resources/LSF/LSFCommon.cs | 58 +++++++++++++++++++++++-- LSLib/LS/Resources/LSF/LSFReader.cs | 65 ++++++++++++++++++++++++----- LSLib/LS/Resources/LSF/LSFWriter.cs | 54 +++++++++++++++++++----- LSLib/LS/Resources/LSX/LSXReader.cs | 3 ++ LSLib/LS/Resources/LSX/LSXWriter.cs | 5 +++ 12 files changed, 203 insertions(+), 39 deletions(-) diff --git a/LSLib/LS/Enums/Game.cs b/LSLib/LS/Enums/Game.cs index f5ddbf27..78b98021 100644 --- a/LSLib/LS/Enums/Game.cs +++ b/LSLib/LS/Enums/Game.cs @@ -38,8 +38,8 @@ public static LSFVersion LSFVersion(this Game game) case Game.DivinityOriginalSinEE: return Enums.LSFVersion.VerChunkedCompress; case Game.DivinityOriginalSin2: return Enums.LSFVersion.VerExtendedNodes; case Game.DivinityOriginalSin2DE: return Enums.LSFVersion.VerExtendedNodes; - case Game.BaldursGate3: return Enums.LSFVersion.VerBG3AdditionalBlob; - default: return Enums.LSFVersion.VerBG3AdditionalBlob; + case Game.BaldursGate3: return Enums.LSFVersion.VerBG3Patch3; + default: return Enums.LSFVersion.VerBG3Patch3; } } diff --git a/LSLib/LS/Enums/LSFVersion.cs b/LSLib/LS/Enums/LSFVersion.cs index 790e35ab..5e269f8a 100644 --- a/LSLib/LS/Enums/LSFVersion.cs +++ b/LSLib/LS/Enums/LSFVersion.cs @@ -28,9 +28,9 @@ public enum LSFVersion VerBG3ExtendedHeader = 0x05, /// - /// BG3 version with unknown additions + /// BG3 version with node key names /// - VerBG3AdditionalBlob = 0x06, + VerBG3NodeKeys = 0x06, /// /// BG3 Patch 3 version with unknown additions @@ -45,7 +45,7 @@ public enum LSFVersion /// /// Latest output version supported by this library /// - MaxWriteVersion = 0x06 + MaxWriteVersion = 0x07 } public enum LSXVersion diff --git a/LSLib/LS/NodeAttribute.cs b/LSLib/LS/NodeAttribute.cs index fc4230af..83780091 100644 --- a/LSLib/LS/NodeAttribute.cs +++ b/LSLib/LS/NodeAttribute.cs @@ -39,6 +39,7 @@ public class NodeSerializationSettings { public bool DefaultByteSwapGuids = true; public bool ByteSwapGuids = true; + public LSFMetadataFormat LSFMetadata = LSFMetadataFormat.None; public void InitFromMeta(string meta) { @@ -46,11 +47,22 @@ public void InitFromMeta(string meta) { // No metadata available, use defaults ByteSwapGuids = DefaultByteSwapGuids; + LSFMetadata = LSFMetadataFormat.None; } else { var tags = meta.Split(','); ByteSwapGuids = tags.Contains("bswap_guids"); + + LSFMetadata = LSFMetadataFormat.None; + if (tags.Contains("lsf_adjacency")) + { + LSFMetadata = LSFMetadataFormat.None2; + } + else if (tags.Contains("lsf_keys_adjacency")) + { + LSFMetadata = LSFMetadataFormat.KeysAndAdjacency; + } } } @@ -61,6 +73,16 @@ public string BuildMeta() { tags.Add("bswap_guids"); } + + if (LSFMetadata == LSFMetadataFormat.None2) + { + tags.Add("lsf_adjacency"); + } + + if (LSFMetadata == LSFMetadataFormat.KeysAndAdjacency) + { + tags.Add("lsf_keys_adjacency"); + } return String.Join(",", tags); } diff --git a/LSLib/LS/PackageReader.cs b/LSLib/LS/PackageReader.cs index e4b0c607..8305d7b1 100644 --- a/LSLib/LS/PackageReader.cs +++ b/LSLib/LS/PackageReader.cs @@ -39,7 +39,7 @@ public void OpenPart(int index, string path) { var file = File.OpenRead(path); Parts[index] = MemoryMappedFile.CreateFromFile(file, null, file.Length, MemoryMappedFileAccess.Read, HandleInheritability.None, false); - Views[index] = MetadataFile.CreateViewAccessor(0, 0, MemoryMappedFileAccess.Read); + Views[index] = Parts[index].CreateViewAccessor(0, file.Length, MemoryMappedFileAccess.Read); } public void OpenStreams(int numParts) diff --git a/LSLib/LS/PackageWriter.cs b/LSLib/LS/PackageWriter.cs index 39ed4a46..b3e1482f 100644 --- a/LSLib/LS/PackageWriter.cs +++ b/LSLib/LS/PackageWriter.cs @@ -368,7 +368,7 @@ public static PackageWriter Create(PackageBuildData build, string packagePath) { PackageVersion.V18 => new PackageWriter_V15(build, packagePath), PackageVersion.V16 => new PackageWriter_V15(build, packagePath), - PackageVersion.V15 => new PackageWriter_V15(build, packagePath), + PackageVersion.V15 => new PackageWriter_V15(build, packagePath), PackageVersion.V13 => new PackageWriter_V13(build, packagePath), PackageVersion.V10 => new PackageWriter_V7(build, packagePath), PackageVersion.V9 or PackageVersion.V7 => new PackageWriter_V7(build, packagePath), diff --git a/LSLib/LS/Resource.cs b/LSLib/LS/Resource.cs index e57e316e..6342682c 100644 --- a/LSLib/LS/Resource.cs +++ b/LSLib/LS/Resource.cs @@ -164,6 +164,7 @@ public static class AttributeTypeMaps public class Resource { public LSMetadata Metadata; + public LSFMetadataFormat? MetadataFormat = null; public Dictionary Regions = []; public Resource() @@ -184,6 +185,7 @@ public class Node public Dictionary Attributes = []; public Dictionary> Children = []; public int? Line = null; + public string KeyAttribute = null; public int ChildCount { diff --git a/LSLib/LS/ResourceUtils.cs b/LSLib/LS/ResourceUtils.cs index 7be8464a..719396c8 100644 --- a/LSLib/LS/ResourceUtils.cs +++ b/LSLib/LS/ResourceUtils.cs @@ -35,9 +35,10 @@ public class ResourceConversionParameters public LSFVersion LSF = LSFVersion.MaxWriteVersion; /// - /// Store sibling/neighbour node data in LSF files (usually done by savegames only) + /// Store sibling/neighbour node data in LSF files (usually done by savegames and dictionary-like files) + /// (null = auto-detect based on input resource file) /// - public bool LSFEncodeSiblingData = false; + public LSFMetadataFormat? MetadataFormat = null; /// /// Format of generated LSX files @@ -52,7 +53,7 @@ public class ResourceConversionParameters /// /// LSF/LSB compression method /// - public CompressionMethod Compression = CompressionMethod.LZ4; + public CompressionMethod Compression = CompressionMethod.None; /// /// LSF/LSB compression level (i.e. size/compression time tradeoff) @@ -78,6 +79,12 @@ public void ToSerializationSettings(NodeSerializationSettings settings) { settings.DefaultByteSwapGuids = ByteSwapGuids; } + + public void ToSerializationSettings(NodeSerializationSettings settings, Resource res) + { + settings.DefaultByteSwapGuids = ByteSwapGuids; + settings.LSFMetadata = res.MetadataFormat ?? settings.LSFMetadata; + } } public class ResourceUtils @@ -167,7 +174,7 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo Version = conversionParams.LSX, PrettyPrint = conversionParams.PrettyPrint }; - conversionParams.ToSerializationSettings(writer.SerializationSettings); + conversionParams.ToSerializationSettings(writer.SerializationSettings, resource); writer.Write(resource); break; } @@ -184,7 +191,7 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo var writer = new LSFWriter(file) { Version = conversionParams.LSF, - EncodeSiblingData = conversionParams.LSFEncodeSiblingData, + MetadataFormat = conversionParams.MetadataFormat ?? resource.MetadataFormat ?? LSFMetadataFormat.None, Compression = conversionParams.Compression, CompressionLevel = conversionParams.CompressionLevel }; @@ -198,7 +205,7 @@ public static void SaveResource(Resource resource, string outputPath, ResourceFo { PrettyPrint = conversionParams.PrettyPrint }; - conversionParams.ToSerializationSettings(writer.SerializationSettings); + conversionParams.ToSerializationSettings(writer.SerializationSettings, resource); writer.Write(resource); break; } diff --git a/LSLib/LS/Resources/LSF/LSFCommon.cs b/LSLib/LS/Resources/LSF/LSFCommon.cs index a970d742..4ac5d95b 100644 --- a/LSLib/LS/Resources/LSF/LSFCommon.cs +++ b/LSLib/LS/Resources/LSF/LSFCommon.cs @@ -85,9 +85,18 @@ internal struct LSFMetadataV5 /// /// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 /// - public UInt32 HasSiblingData; + public LSFMetadataFormat MetadataFormat; } + +public enum LSFMetadataFormat : UInt32 +{ + None = 0, + KeysAndAdjacency = 1, + None2 = 2 // Behaves same way as None +}; + + [StructLayout(LayoutKind.Sequential, Pack = 1)] internal struct LSFMetadataV6 { @@ -99,7 +108,14 @@ internal struct LSFMetadataV6 /// Compressed size of the string hash table /// public UInt32 StringsSizeOnDisk; - public UInt64 Unknown; + /// + /// Total uncompressed size of the node key attribute table + /// + public UInt32 KeysUncompressedSize; + /// + /// Compressed size of the node key attribute table + /// + public UInt32 KeysSizeOnDisk; /// /// Total uncompressed size of the node list /// @@ -135,9 +151,9 @@ internal struct LSFMetadataV6 public Byte Unknown2; public UInt16 Unknown3; /// - /// Extended node/attribute format indicator, 0 for V2, 0/1 for V3 + /// Extended node/attribute format indicator /// - public UInt32 HasSiblingData; + public LSFMetadataFormat MetadataFormat; } /// @@ -223,6 +239,39 @@ public int NameOffset } }; +/// +/// Key attribute name definition for a specific node in the LSF file +/// +[StructLayout(LayoutKind.Sequential, Pack = 1)] +internal struct LSFKeyEntry +{ + /// + /// Index of the node + /// + public UInt32 NodeIndex; + /// + /// Name of key attribute + /// (16-bit MSB: index into name hash table, 16-bit LSB: offset in hash chain) + /// + public UInt32 KeyName; + + /// + /// Index into name hash table + /// + public int KeyNameIndex + { + get { return (int)(KeyName >> 16); } + } + + /// + /// Offset in hash chain + /// + public int KeyNameOffset + { + get { return (int)(KeyName & 0xffff); } + } +}; + /// /// Processed node information for a node in the LSF file /// @@ -246,6 +295,7 @@ internal class LSFNodeInfo /// (-1: node has no attributes) /// public int FirstAttributeIndex; + public string KeyAttribute = null; }; /// diff --git a/LSLib/LS/Resources/LSF/LSFReader.cs b/LSLib/LS/Resources/LSF/LSFReader.cs index a7b2e3d1..22d05b29 100644 --- a/LSLib/LS/Resources/LSF/LSFReader.cs +++ b/LSLib/LS/Resources/LSF/LSFReader.cs @@ -2,6 +2,7 @@ // #define DUMP_LSF_SERIALIZATION using LSLib.LS.Enums; +using System.Diagnostics; namespace LSLib.LS; @@ -249,15 +250,44 @@ private void ReadAttributesV3(Stream s) var resolved = Attributes[i]; var debug = String.Format( - "{0}: {1} (offset {2:X}, typeId {3}, nextAttribute {4})", + "{0}: {1} (offset {2:X}, typeId {3}, length {4}, nextAttribute {5})", i, Names[resolved.NameIndex][resolved.NameOffset], resolved.DataOffset, - resolved.TypeId, resolved.NextAttributeIndex + resolved.TypeId, resolved.Length, resolved.NextAttributeIndex ); Debug.WriteLine(debug); } #endif } + /// + /// Reads the V3 attribute headers for the LSOF resource + /// + /// Stream to read the attribute headers from + private void ReadKeys(Stream s) + { + using var reader = new BinaryReader(s); + +#if DEBUG_LSF_SERIALIZATION + Debug.WriteLine(" ----- DUMP OF KEY TABLE -----"); +#endif + + while (s.Position < s.Length) + { + var key = BinUtils.ReadStruct(reader); + var KeyAttribute = Names[key.KeyNameIndex][key.KeyNameOffset]; + var node = Nodes[(int)key.NodeIndex]; + node.KeyAttribute = KeyAttribute; + +#if DEBUG_LSF_SERIALIZATION + var debug = String.Format( + "{0} ({1}): {2}", + key.NodeIndex, Names[node.NameIndex][node.NameOffset], KeyAttribute + ); + Debug.WriteLine(debug); +#endif + } + } + private MemoryStream Decompress(BinaryReader reader, uint sizeOnDisk, uint uncompressedSize, string debugDumpTo, bool allowChunked) { if (sizeOnDisk == 0 && uncompressedSize != 0) // data is not compressed @@ -335,7 +365,7 @@ private void ReadHeaders(BinaryReader reader) GameVersion = PackedVersion.FromInt32(hdr.EngineVersion); } - if (Version < LSFVersion.VerBG3AdditionalBlob) + if (Version < LSFVersion.VerBG3NodeKeys) { var meta = BinUtils.ReadStruct(reader); Metadata = new LSFMetadataV6 @@ -349,7 +379,7 @@ private void ReadHeaders(BinaryReader reader) ValuesUncompressedSize = meta.ValuesUncompressedSize, ValuesSizeOnDisk = meta.ValuesSizeOnDisk, CompressionFlags = meta.CompressionFlags, - HasSiblingData = meta.HasSiblingData + MetadataFormat = meta.MetadataFormat }; } else @@ -374,18 +404,18 @@ public Resource Read() var nodesStream = Decompress(reader, Metadata.NodesSizeOnDisk, Metadata.NodesUncompressedSize, "nodes.bin", true); using (nodesStream) { - var longNodes = Version >= LSFVersion.VerExtendedNodes - && Metadata.HasSiblingData == 1; - ReadNodes(nodesStream, longNodes); + var hasAdjacencyData = Version >= LSFVersion.VerExtendedNodes + && Metadata.MetadataFormat == LSFMetadataFormat.KeysAndAdjacency; + ReadNodes(nodesStream, hasAdjacencyData); } Attributes = []; var attributesStream = Decompress(reader, Metadata.AttributesSizeOnDisk, Metadata.AttributesUncompressedSize, "attributes.bin", true); using (attributesStream) { - var hasSiblingData = Version >= LSFVersion.VerExtendedNodes - && Metadata.HasSiblingData == 1; - if (hasSiblingData) + var hasAdjacencyData = Version >= LSFVersion.VerExtendedNodes + && Metadata.MetadataFormat == LSFMetadataFormat.KeysAndAdjacency; + if (hasAdjacencyData) { ReadAttributesV3(attributesStream); } @@ -397,7 +427,17 @@ public Resource Read() this.Values = Decompress(reader, Metadata.ValuesSizeOnDisk, Metadata.ValuesUncompressedSize, "values.bin", true); + if (Metadata.MetadataFormat == LSFMetadataFormat.KeysAndAdjacency) + { + var keysStream = Decompress(reader, Metadata.KeysSizeOnDisk, Metadata.KeysUncompressedSize, "keys.bin", true); + using (keysStream) + { + ReadKeys(keysStream); + } + } + Resource resource = new(); + resource.MetadataFormat = Metadata.MetadataFormat; ReadRegions(resource); resource.Metadata.MajorVersion = GameVersion.Major; @@ -419,6 +459,7 @@ private void ReadRegions(Resource resource) { var region = new Region(); ReadNode(defn, region, attrReader); + region.KeyAttribute = defn.KeyAttribute; NodeInstances.Add(region); region.RegionName = region.Name; resource.Regions[region.Name] = region; @@ -427,6 +468,7 @@ private void ReadRegions(Resource resource) { var node = new Node(); ReadNode(defn, node, attrReader); + node.KeyAttribute = defn.KeyAttribute; node.Parent = NodeInstances[defn.ParentIndex]; NodeInstances.Add(node); NodeInstances[defn.ParentIndex].AppendChild(node); @@ -440,6 +482,7 @@ private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) #if DEBUG_LSF_SERIALIZATION Debug.WriteLine(String.Format("Begin node {0}", node.Name)); + var debugSerializationSettings = new NodeSerializationSettings(); #endif if (defn.FirstAttributeIndex != -1) @@ -452,7 +495,7 @@ private void ReadNode(LSFNodeInfo defn, Node node, BinaryReader attributeReader) node.Attributes[Names[attribute.NameIndex][attribute.NameOffset]] = value; #if DEBUG_LSF_SERIALIZATION - Debug.WriteLine(String.Format(" {0:X}: {1} ({2})", attribute.DataOffset, Names[attribute.NameIndex][attribute.NameOffset], value)); + Debug.WriteLine(String.Format(" {0:X}: {1} ({2})", attribute.DataOffset, Names[attribute.NameIndex][attribute.NameOffset], value.AsString(debugSerializationSettings))); #endif if (attribute.NextAttributeIndex == -1) diff --git a/LSLib/LS/Resources/LSF/LSFWriter.cs b/LSLib/LS/Resources/LSF/LSFWriter.cs index a5787bab..0fc78fa0 100644 --- a/LSLib/LS/Resources/LSF/LSFWriter.cs +++ b/LSLib/LS/Resources/LSF/LSFWriter.cs @@ -22,12 +22,15 @@ public class LSFWriter(Stream stream) private MemoryStream ValueStream; private BinaryWriter ValueWriter; + private MemoryStream KeyStream; + private BinaryWriter KeyWriter; + private List> StringHashMap; private List NextSiblingIndices; public LSFVersion Version = LSFVersion.MaxWriteVersion; - public bool EncodeSiblingData = false; - public CompressionMethod Compression = CompressionMethod.LZ4; + public LSFMetadataFormat MetadataFormat = LSFMetadataFormat.None; + public CompressionMethod Compression = CompressionMethod.None; public LSCompressionLevel CompressionLevel = LSCompressionLevel.Default; public void Write(Resource resource) @@ -47,6 +50,8 @@ public void Write(Resource resource) using (this.AttributeWriter = new BinaryWriter(AttributeStream)) using (this.ValueStream = new MemoryStream()) using (this.ValueWriter = new BinaryWriter(ValueStream)) + using (this.KeyStream = new MemoryStream()) + using (this.KeyWriter = new BinaryWriter(KeyStream)) { NextNodeIndex = 0; NextAttributeIndex = 0; @@ -58,7 +63,7 @@ public void Write(Resource resource) StringHashMap.Add([]); } - if (EncodeSiblingData) + if (MetadataFormat != LSFMetadataFormat.None) { ComputeSiblingIndices(resource); } @@ -76,6 +81,7 @@ public void Write(Resource resource) var nodeBuffer = NodeStream.ToArray(); var attributeBuffer = AttributeStream.ToArray(); var valueBuffer = ValueStream.ToArray(); + var keyBuffer = KeyStream.ToArray(); var magic = new LSFMagic { @@ -114,8 +120,19 @@ public void Write(Resource resource) byte[] nodesCompressed = CompressionHelpers.Compress(nodeBuffer, Compression, CompressionLevel, chunked); byte[] attributesCompressed = CompressionHelpers.Compress(attributeBuffer, Compression, CompressionLevel, chunked); byte[] valuesCompressed = CompressionHelpers.Compress(valueBuffer, Compression, CompressionLevel, chunked); + byte[] keysCompressed; + + if (MetadataFormat == LSFMetadataFormat.KeysAndAdjacency) + { + keysCompressed = CompressionHelpers.Compress(keyBuffer, Compression, CompressionLevel, chunked); + } + else + { + // Avoid generating a key blob with compression headers if key data should not be written at all + keysCompressed = new byte[0]; + } - if (Version < LSFVersion.VerBG3AdditionalBlob) + if (Version < LSFVersion.VerBG3NodeKeys) { var meta = new LSFMetadataV5 { @@ -140,10 +157,10 @@ public void Write(Resource resource) meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; } - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); + meta.CompressionFlags = CompressionHelpers.MakeCompressionFlags(Compression, CompressionLevel); meta.Unknown2 = 0; meta.Unknown3 = 0; - meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; + meta.MetadataFormat = MetadataFormat; BinUtils.WriteStruct(Writer, ref meta); } @@ -152,6 +169,7 @@ public void Write(Resource resource) var meta = new LSFMetadataV6 { StringsUncompressedSize = (UInt32)stringBuffer.Length, + KeysUncompressedSize = (UInt32)keyBuffer.Length, NodesUncompressedSize = (UInt32)nodeBuffer.Length, AttributesUncompressedSize = (UInt32)attributeBuffer.Length, ValuesUncompressedSize = (UInt32)valueBuffer.Length @@ -160,6 +178,7 @@ public void Write(Resource resource) if (Compression == CompressionMethod.None) { meta.StringsSizeOnDisk = 0; + meta.KeysSizeOnDisk = 0; meta.NodesSizeOnDisk = 0; meta.AttributesSizeOnDisk = 0; meta.ValuesSizeOnDisk = 0; @@ -167,16 +186,16 @@ public void Write(Resource resource) else { meta.StringsSizeOnDisk = (UInt32)stringsCompressed.Length; + meta.KeysSizeOnDisk = (UInt32)keysCompressed.Length; meta.NodesSizeOnDisk = (UInt32)nodesCompressed.Length; meta.AttributesSizeOnDisk = (UInt32)attributesCompressed.Length; meta.ValuesSizeOnDisk = (UInt32)valuesCompressed.Length; } - meta.Unknown = 0; - meta.CompressionFlags = BinUtils.MakeCompressionFlags(Compression, CompressionLevel); + meta.CompressionFlags = CompressionHelpers.MakeCompressionFlags(Compression, CompressionLevel); meta.Unknown2 = 0; meta.Unknown3 = 0; - meta.HasSiblingData = EncodeSiblingData ? 1u : 0u; + meta.MetadataFormat = MetadataFormat; BinUtils.WriteStruct(Writer, ref meta); } @@ -185,6 +204,7 @@ public void Write(Resource resource) Writer.Write(nodesCompressed, 0, nodesCompressed.Length); Writer.Write(attributesCompressed, 0, attributesCompressed.Length); Writer.Write(valuesCompressed, 0, valuesCompressed.Length); + Writer.Write(keysCompressed, 0, keysCompressed.Length); } } @@ -236,7 +256,7 @@ private void WriteRegions(Resource resource) foreach (var region in resource.Regions) { if (Version >= LSFVersion.VerExtendedNodes - && EncodeSiblingData) + && MetadataFormat == LSFMetadataFormat.KeysAndAdjacency) { WriteNodeV3(region.Value); } @@ -302,7 +322,8 @@ private void WriteNodeChildren(Node node) { foreach (var child in children.Value) { - if (Version >= LSFVersion.VerExtendedNodes && EncodeSiblingData) + if (Version >= LSFVersion.VerExtendedNodes + && MetadataFormat == LSFMetadataFormat.KeysAndAdjacency) { WriteNodeV3(child); } @@ -373,6 +394,17 @@ private void WriteNodeV3(Node node) } BinUtils.WriteStruct(NodeWriter, ref nodeInfo); + + if (node.KeyAttribute != null && MetadataFormat == LSFMetadataFormat.KeysAndAdjacency) + { + var keyInfo = new LSFKeyEntry + { + NodeIndex = (UInt32)NextNodeIndex, + KeyName = AddStaticString(node.KeyAttribute) + }; + BinUtils.WriteStruct(KeyWriter, ref keyInfo); + } + NodeIndices[node] = NextNodeIndex; NextNodeIndex++; diff --git a/LSLib/LS/Resources/LSX/LSXReader.cs b/LSLib/LS/Resources/LSX/LSXReader.cs index 5f3cfeb6..1b7ed4f4 100644 --- a/LSLib/LS/Resources/LSX/LSXReader.cs +++ b/LSLib/LS/Resources/LSX/LSXReader.cs @@ -101,6 +101,7 @@ private void ReadElement() Version = (resource.Metadata.MajorVersion >= 4) ? LSXVersion.V4 : LSXVersion.V3; var lslibMeta = reader["lslib_meta"]; SerializationSettings.InitFromMeta(lslibMeta ?? ""); + resource.MetadataFormat = SerializationSettings.LSFMetadata; break; case "region": @@ -139,6 +140,8 @@ private void ReadElement() Debug.Assert(node.Name != null); node.Parent?.AppendChild(node); + node.KeyAttribute = reader["key"]; + if (!reader.IsEmptyElement) stack.Add(node); break; diff --git a/LSLib/LS/Resources/LSX/LSXWriter.cs b/LSLib/LS/Resources/LSX/LSXWriter.cs index 76b0f83a..25274295 100644 --- a/LSLib/LS/Resources/LSX/LSXWriter.cs +++ b/LSLib/LS/Resources/LSX/LSXWriter.cs @@ -102,6 +102,11 @@ private void WriteNode(Node node) writer.WriteStartElement("node"); writer.WriteAttributeString("id", node.Name); + if (node.KeyAttribute != null) + { + writer.WriteAttributeString("key", node.KeyAttribute); + } + foreach (var attribute in node.Attributes) { writer.WriteStartElement("attribute"); From 88e576d3ec03391631906f4def5c936716e7a205 Mon Sep 17 00:00:00 2001 From: Nicolas Gnyra Date: Fri, 26 Apr 2024 21:51:44 -0400 Subject: [PATCH 099/139] Fix GR2 export weirdness when vertex weight > 255 --- LSLib/Granny/Model/ColladaImporter.cs | 47 ++++++++++++++++----------- 1 file changed, 28 insertions(+), 19 deletions(-) diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 5127c370..08a7da18 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -1,5 +1,4 @@ -using System.Diagnostics; -using LSLib.Granny.GR2; +using LSLib.Granny.GR2; using LSLib.LS; using OpenTK.Mathematics; @@ -700,40 +699,50 @@ private void ImportSkin(Root root, skin skin) influenceSum += weights[weightIndex]; } - byte totalEncoded = 0; + ushort totalEncoded = 0; for (var i = 0; i < influenceCount; i++) { var weightIndex = influences[offset + i * stride + weightInputIndex]; var weight = weights[weightIndex] / influenceSum * 255.0f; var encodedWeight = (byte)Math.Round(weight); totalEncoded += encodedWeight; - vertErrors[i] = Math.Abs(encodedWeight - weight); + vertErrors[i] = encodedWeight - weight; vertWeights[i] = encodedWeight; } - while (totalEncoded != 0 && totalEncoded < 255) + while (totalEncoded != 0 && totalEncoded != 255) { - float firstHighest = 0.0f; - int errorIndex = -1; - for (var i = 0; i < influenceCount; i++) + int errorIndex = 0; + if (totalEncoded < 255) { - if (vertErrors[i] > firstHighest) + for (var i = 1; i < influenceCount; i++) { - firstHighest = vertErrors[i]; - errorIndex = i; + if (vertErrors[i] < vertErrors[errorIndex]) + { + errorIndex = i; + } } - } - var weightIndex = influences[offset + errorIndex * stride + weightInputIndex]; - var weight = weights[weightIndex] / influenceSum * 255.0f; + vertWeights[errorIndex]++; + vertErrors[errorIndex]++; + totalEncoded++; + } + else + { + for (var i = 1; i < influenceCount; i++) + { + if (vertErrors[i] > vertErrors[errorIndex]) + { + errorIndex = i; + } + } - vertWeights[errorIndex]++; - vertErrors[errorIndex] = Math.Abs(vertWeights[errorIndex] - weight); - totalEncoded++; + vertWeights[errorIndex]--; + vertErrors[errorIndex]--; + totalEncoded--; + } } - Debug.Assert(totalEncoded == 0 || totalEncoded == 255); - for (var i = 0; i < influenceCount; i++) { // Not all vertices are actually used in triangles, we may have unused verts in the From c46b43df87107f76d64442b1d600305b054994d1 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 15 May 2024 20:58:02 +0200 Subject: [PATCH 100/139] Fix unnecessary GR2 reader seeks --- LSLib/Granny/GR2/Reader.cs | 63 ++++++++++++++++++++++++++------------ 1 file changed, 44 insertions(+), 19 deletions(-) diff --git a/LSLib/Granny/GR2/Reader.cs b/LSLib/Granny/GR2/Reader.cs index d93e7c2a..fc100235 100644 --- a/LSLib/Granny/GR2/Reader.cs +++ b/LSLib/Granny/GR2/Reader.cs @@ -10,7 +10,7 @@ public class ParsingException(string message) : Exception(message) { } -public class GR2Reader(Stream stream) +public class GR2Reader(Stream stream) : IDisposable { internal Stream InputStream = stream; internal BinaryReader InputReader; @@ -32,6 +32,7 @@ public UInt32 Tag public void Dispose() { + Reader?.Dispose(); Stream?.Dispose(); } @@ -56,29 +57,37 @@ public void Read(object root) Debug.Assert(InputStream.Position == Magic.headersSize); - UncompressStream(); - - foreach (var section in Sections) + try { - ReadSectionRelocations(section); - } + UncompressStream(); - if (Magic.IsLittleEndian != BitConverter.IsLittleEndian) - { - // TODO: This should be done before applying relocations? foreach (var section in Sections) { - ReadSectionMixedMarshallingRelocations(section); + ReadSectionRelocations(section); } - } - var rootStruct = new StructReference - { - Offset = Sections[(int)Header.rootType.Section].Header.offsetInFile + Header.rootType.Offset - }; + if (Magic.IsLittleEndian != BitConverter.IsLittleEndian) + { + // TODO: This should be done before applying relocations? + foreach (var section in Sections) + { + ReadSectionMixedMarshallingRelocations(section); + } + } + + var rootStruct = new StructReference + { + Offset = Sections[(int)Header.rootType.Section].Header.offsetInFile + Header.rootType.Offset + }; - Seek(Header.rootNode); - ReadStruct(rootStruct.Resolve(this), MemberType.Inline, root, null); + Seek(Header.rootNode); + ReadStruct(rootStruct.Resolve(this), MemberType.Inline, root, null); + } + finally + { + Reader?.Dispose(); + Stream?.Dispose(); + } } } @@ -227,7 +236,11 @@ private void UncompressStream() var section = Sections[i]; var hdr = section.Header; byte[] sectionContents = new byte[hdr.compressedSize]; - InputStream.Position = hdr.offsetInFile; + if (InputStream.Position != hdr.offsetInFile) + { + InputStream.Position = hdr.offsetInFile; + } + InputStream.Read(sectionContents, 0, (int)hdr.compressedSize); var originalOffset = hdr.offsetInFile; @@ -290,7 +303,19 @@ private void ReadSectionRelocations(Section section) { if (section.Header.numRelocations == 0) return; - InputStream.Seek(section.Header.relocationsOffset, SeekOrigin.Begin); + if (InputStream.Position != section.Header.relocationsOffset) + { + if (InputStream.Position < section.Header.relocationsOffset) + { + var dummy = new byte[section.Header.relocationsOffset - InputStream.Position]; + InputStream.Read(dummy); + } + else + { + InputStream.Seek(section.Header.relocationsOffset, SeekOrigin.Begin); + } + } + if (section.Header.compression == 4) { using var reader = new BinaryReader(InputStream, Encoding.Default, true); From dc5a5b52301b6f1b74badb3441bb6d7df3053058 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 15 May 2024 23:15:30 +0200 Subject: [PATCH 101/139] Fix GR2 reader struct skip logic --- LSLib/Granny/GR2/Format.cs | 19 ++++++++++++++----- LSLib/Granny/GR2/Reader.cs | 8 ++++---- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index 62efd541..57e50df5 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -802,7 +802,7 @@ public bool IsScalar get { return Type > MemberType.ReferenceToVariantArray; } } - public UInt32 Size(GR2Reader gr2) + public UInt32 ElementSize(GR2Reader gr2) { return Type switch { @@ -820,8 +820,8 @@ public UInt32 Size(GR2Reader gr2) MemberType.Real16 => 2, MemberType.Reference => gr2.Magic.Is32Bit ? 4u : 8, + MemberType.String => gr2.Magic.Is32Bit ? 4u : 8, - MemberType.String => 4, MemberType.Real32 => 4, MemberType.Int32 => 4, MemberType.UInt32 => 4, @@ -837,7 +837,12 @@ public UInt32 Size(GR2Reader gr2) }; } - public UInt32 MarshallingSize() + public UInt32 TotalSize(GR2Reader gr2) + { + return (ArraySize == 0 ? 1 : ArraySize) * ElementSize(gr2); + } + + public UInt32 ElementMarshallingSize() { switch (Type) { @@ -860,7 +865,6 @@ public UInt32 MarshallingSize() case MemberType.Real16: return 2; - case MemberType.String: case MemberType.Transform: case MemberType.Real32: case MemberType.Int32: @@ -875,6 +879,11 @@ public UInt32 MarshallingSize() } } + public UInt32 TotalMarshallingSize() + { + return (ArraySize == 0 ? 1 : ArraySize) * ElementMarshallingSize(); + } + public bool ShouldSerialize(UInt32 version) { return ((MinVersion == 0 || MinVersion <= version) && @@ -1020,7 +1029,7 @@ public UInt32 Size(GR2Reader gr2) { UInt32 size = 0; foreach (var member in Members) - size += member.Size(gr2); + size += member.TotalSize(gr2); return size; } diff --git a/LSLib/Granny/GR2/Reader.cs b/LSLib/Granny/GR2/Reader.cs index fc100235..08c0a0f3 100644 --- a/LSLib/Granny/GR2/Reader.cs +++ b/LSLib/Granny/GR2/Reader.cs @@ -150,7 +150,7 @@ private Header ReadHeader() // throw new ParsingException(String.Format("Incorrect header tag; expected {0:X8}, got {1:X8}", Header.Tag, header.tag)); Debug.Assert(header.fileSize <= InputStream.Length); - Debug.Assert(header.CalculateCRC(InputStream) == header.crc); + //Debug.Assert(header.CalculateCRC(InputStream) == header.crc); Debug.Assert(header.sectionsOffset == header.Size()); Debug.Assert(header.rootType.Section < header.numSections); // TODO: check rootTypeOffset after serialization @@ -338,14 +338,14 @@ private void MixedMarshal(UInt32 count, StructDefinition definition) { foreach (var member in definition.Members) { - var size = member.Size(this); + var size = member.TotalSize(this); if (member.Type == MemberType.Inline) { MixedMarshal(member.ArraySize == 0 ? 1 : member.ArraySize, member.Definition.Resolve(this)); } - else if (member.MarshallingSize() > 1) + else if (member.TotalMarshallingSize() > 1) { - var marshalSize = member.MarshallingSize(); + var marshalSize = member.TotalMarshallingSize(); byte[] data = new byte[size]; Stream.Read(data, 0, (int)size); for (var j = 0; j < size / marshalSize; j++) From 310b62ac9bbd2f22d0098a938b128d9aa090bf52 Mon Sep 17 00:00:00 2001 From: khbsd Date: Sun, 2 Jun 2024 12:56:45 -0500 Subject: [PATCH 102/139] added excludeHidden variable to AddFilesFromPath and CreatePackage to let user have control lets users of lslib choose whether or not to include/exclude files and folders with a . in front of their name --- LSLib/LS/PackageCommon.cs | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 2e484778..233c23de 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -173,7 +173,8 @@ public void UncompressPackage(string packagePath, string outputPath, Func element.StartsWith("."))) + { + var name = Path.GetRelativePath(path, file); + build.Files.Add(PackageBuildInputFile.CreateFromFilesystem(file, name)); + } + } + else if (!excludeHidden) + { + var name = Path.GetRelativePath(path, file); + build.Files.Add(PackageBuildInputFile.CreateFromFilesystem(file, name)); + } } } - public async Task CreatePackage(string packagePath, string inputPath, PackageBuildData build) + public async Task CreatePackage(string packagePath, string inputPath, PackageBuildData build, bool excludeHidden = true) { FileManager.TryToCreateDirectory(packagePath); ProgressUpdate("Enumerating files ...", 0, 1); - AddFilesFromPath(build, inputPath); + AddFilesFromPath(build, inputPath, excludeHidden); ProgressUpdate("Creating archive ...", 0, 1); using var writer = PackageWriterFactory.Create(build, packagePath); From b73129db6f680653c596676e3d482ca5a013df6c Mon Sep 17 00:00:00 2001 From: khbsd Date: Tue, 4 Jun 2024 14:37:26 -0500 Subject: [PATCH 103/139] refactored exclusion logic, and added get and set methods for Priority and Exclusion. refactored exclusion logic, and added get and set methods for Priority and ExcludeHidden. built and tested. --- LSLib/LS/PackageCommon.cs | 52 ++++++++++++++++++++++++++------------- 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 233c23de..d4b9431e 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -1,4 +1,6 @@ -using System.IO.MemoryMappedFiles; +using System.IO; +using System.IO.MemoryMappedFiles; +using System.Xml.Linq; using LSLib.LS.Enums; namespace LSLib.LS; @@ -114,10 +116,15 @@ public class PackageBuildData public CompressionMethod Compression = CompressionMethod.None; public LSCompressionLevel CompressionLevel = LSCompressionLevel.Default; public PackageFlags Flags = 0; - public byte Priority = 0; // Calculate full archive checksum? public bool Hash = false; public List Files = []; + + public bool ExcludeHidden + { get; set; } = true; + public byte Priority + { get; set; } = 0; + } public class Packager @@ -173,8 +180,28 @@ public void UncompressPackage(string packagePath, string outputPath, Func element.StartsWith("."))) + { + return true; + } + return false; + } + else if (!build.ExcludeHidden) + { + return true; + } + return false; + } + + + private static void AddFilesFromPath(PackageBuildData build, string path) { if (!path.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) { @@ -183,30 +210,21 @@ private static void AddFilesFromPath(PackageBuildData build, string path, bool e foreach (var file in Directory.EnumerateFiles(path, "*.*", SearchOption.AllDirectories)) { - string[] fileElements = file.Split(Path.DirectorySeparatorChar); + var name = Path.GetRelativePath(path, file); - if (excludeHidden) - { - if (!Array.Exists(fileElements, element => element.StartsWith("."))) - { - var name = Path.GetRelativePath(path, file); - build.Files.Add(PackageBuildInputFile.CreateFromFilesystem(file, name)); - } - } - else if (!excludeHidden) + if (ShouldInclude(file, build)) { - var name = Path.GetRelativePath(path, file); build.Files.Add(PackageBuildInputFile.CreateFromFilesystem(file, name)); } } } - public async Task CreatePackage(string packagePath, string inputPath, PackageBuildData build, bool excludeHidden = true) + public async Task CreatePackage(string packagePath, string inputPath, PackageBuildData build) { FileManager.TryToCreateDirectory(packagePath); ProgressUpdate("Enumerating files ...", 0, 1); - AddFilesFromPath(build, inputPath, excludeHidden); + AddFilesFromPath(build, inputPath); ProgressUpdate("Creating archive ...", 0, 1); using var writer = PackageWriterFactory.Create(build, packagePath); From 2020d4a4e3ad79efc8918c1c1b43ad0892129a00 Mon Sep 17 00:00:00 2001 From: khbsd Date: Tue, 4 Jun 2024 15:14:00 -0500 Subject: [PATCH 104/139] removed unnecessary return statement --- LSLib/LS/PackageCommon.cs | 1 - 1 file changed, 1 deletion(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index d4b9431e..b19d8907 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -191,7 +191,6 @@ public static bool ShouldInclude(string file, PackageBuildData build) { return true; } - return false; } else if (!build.ExcludeHidden) { From e9d00dac1f690e5441eb817bf71ee0f3c730ac68 Mon Sep 17 00:00:00 2001 From: khbsd Date: Mon, 10 Jun 2024 12:30:00 -0500 Subject: [PATCH 105/139] cleanup and simplified code, ty norlbo baggins simplified conditional and include/exclude logic --- LSLib/LS/PackageCommon.cs | 21 +++++---------------- 1 file changed, 5 insertions(+), 16 deletions(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index b19d8907..ac70690b 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -120,10 +120,8 @@ public class PackageBuildData public bool Hash = false; public List Files = []; - public bool ExcludeHidden - { get; set; } = true; - public byte Priority - { get; set; } = 0; + public bool ExcludeHidden { get; set; } = true; + public byte Priority { get; set; } = 0; } @@ -180,26 +178,17 @@ public void UncompressPackage(string packagePath, string outputPath, Func element.StartsWith("."))) - { - return true; - } - } - else if (!build.ExcludeHidden) - { - return true; + var fileElements = file.Split(Path.DirectorySeparatorChar); + + return Array.Exists(fileElements, element => element.StartsWith('.')); } return false; } - private static void AddFilesFromPath(PackageBuildData build, string path) { if (!path.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.InvariantCultureIgnoreCase)) From ce002b730ea352182975860361648798b54cfb6d Mon Sep 17 00:00:00 2001 From: khbsd Date: Tue, 18 Jun 2024 12:42:09 -0500 Subject: [PATCH 106/139] make path checking posix-compatible --- LSLib/LS/FileManager.cs | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/LSLib/LS/FileManager.cs b/LSLib/LS/FileManager.cs index c236f89a..1e72518a 100644 --- a/LSLib/LS/FileManager.cs +++ b/LSLib/LS/FileManager.cs @@ -1,4 +1,4 @@ -namespace LSLib.LS; +namespace LSLib.LS; public class FileManager { @@ -12,24 +12,13 @@ public static void TryToCreateDirectory(string path) } // throw exception if path is relative - Uri uri; - try - { - Uri.TryCreate(outputPath, UriKind.RelativeOrAbsolute, out uri); - } - catch (InvalidOperationException) - { - throw new ArgumentException("Cannot create directory without absolute path", nameof(path)); - } - - if (!Path.IsPathRooted(outputPath) || !uri.IsFile) + if (!Path.IsPathFullyQualified(outputPath)) { throw new ArgumentException("Cannot create directory without absolute path", nameof(path)); } // validate path outputPath = Path.GetFullPath(path); - outputPath = Path.GetDirectoryName(outputPath); if (outputPath == null) From e062312111417107449a43241cada00bfd82a9b7 Mon Sep 17 00:00:00 2001 From: khbsd Date: Tue, 18 Jun 2024 12:43:21 -0500 Subject: [PATCH 107/139] make build package more consistent --- LSLib/LS/PackageCommon.cs | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index ac70690b..0e1ee791 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -112,14 +112,13 @@ public static PackageBuildInputFile CreateFromFilesystem(string filesystemPath, public class PackageBuildData { - public PackageVersion Version = PackageHeaderCommon.CurrentVersion; - public CompressionMethod Compression = CompressionMethod.None; - public LSCompressionLevel CompressionLevel = LSCompressionLevel.Default; - public PackageFlags Flags = 0; + public PackageVersion Version { get; set; } = PackageHeaderCommon.CurrentVersion; + public CompressionMethod Compression { get; set; } = CompressionMethod.None; + public LSCompressionLevel CompressionLevel { get; set; } = LSCompressionLevel.Default; + public PackageFlags Flags { get; set; } = 0; // Calculate full archive checksum? - public bool Hash = false; + public bool Hash { get; set; } = false; public List Files = []; - public bool ExcludeHidden { get; set; } = true; public byte Priority { get; set; } = 0; From e4985eb7548e039fba540e1af9b4c0fff96a35f5 Mon Sep 17 00:00:00 2001 From: khbsd Date: Tue, 18 Jun 2024 12:44:56 -0500 Subject: [PATCH 108/139] forgot one --- LSLib/LS/PackageCommon.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 0e1ee791..981b8139 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -118,7 +118,7 @@ public class PackageBuildData public PackageFlags Flags { get; set; } = 0; // Calculate full archive checksum? public bool Hash { get; set; } = false; - public List Files = []; + public List Files { get; set; } = []; public bool ExcludeHidden { get; set; } = true; public byte Priority { get; set; } = 0; From 09eb180e1c9d93455b4d549a8a36e15a956e06c8 Mon Sep 17 00:00:00 2001 From: juumeijin <144704019+juumeijin@users.noreply.github.com> Date: Sun, 7 Jul 2024 19:57:06 +0200 Subject: [PATCH 109/139] Update LSLibDefinitions.xml Small --- LSLibDefinitions.xml | 52 ++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/LSLibDefinitions.xml b/LSLibDefinitions.xml index e5da3c1e..c68c2353 100644 --- a/LSLibDefinitions.xml +++ b/LSLibDefinitions.xml @@ -377,7 +377,7 @@ - + @@ -389,8 +389,8 @@ - - + + @@ -430,10 +430,10 @@ - - - - + + + + @@ -462,7 +462,7 @@ - + @@ -505,9 +505,9 @@ - - - + + + @@ -653,12 +653,12 @@ - + - + @@ -725,7 +725,7 @@ - + @@ -784,7 +784,7 @@ - + @@ -821,11 +821,11 @@ - + - + @@ -839,10 +839,10 @@ - + - + @@ -870,7 +870,7 @@ - + @@ -974,7 +974,7 @@ - + @@ -1006,10 +1006,10 @@ - - - - + + + + @@ -1043,7 +1043,7 @@ - + From 455aab5127bbc81010c0948db24457261b85fee5 Mon Sep 17 00:00:00 2001 From: khbsd Date: Sun, 7 Jul 2024 14:09:00 -0500 Subject: [PATCH 110/139] beans'd it smoge --- LSLib/LS/PackageCommon.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/LS/PackageCommon.cs b/LSLib/LS/PackageCommon.cs index 981b8139..f7c48c4f 100644 --- a/LSLib/LS/PackageCommon.cs +++ b/LSLib/LS/PackageCommon.cs @@ -183,7 +183,7 @@ public static bool ShouldInclude(string file, PackageBuildData build) { var fileElements = file.Split(Path.DirectorySeparatorChar); - return Array.Exists(fileElements, element => element.StartsWith('.')); + return !Array.Exists(fileElements, element => element.StartsWith('.')); } return false; } From 2e7c2652d30a232df13abc24add643cf8b32c48c Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Oct 2024 14:28:34 +0200 Subject: [PATCH 111/139] Various diagnostic location tracking fixes --- LSLib/LS/Story/GoalParser/GoalParser.cs | 2 +- LSLibStats/Stats/File/Stat.lex | 2 +- LSLibStats/Stats/File/StatParser.cs | 10 +++++----- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/LSLib/LS/Story/GoalParser/GoalParser.cs b/LSLib/LS/Story/GoalParser/GoalParser.cs index 807b1a6b..d94a1b28 100644 --- a/LSLib/LS/Story/GoalParser/GoalParser.cs +++ b/LSLib/LS/Story/GoalParser/GoalParser.cs @@ -85,7 +85,7 @@ public GoalScanner(String fileName) public CodeLocation LastLocation() { - return new CodeLocation(null, tokLin, tokCol, tokELin, tokECol); + return new CodeLocation(fileName, tokLin, tokCol, tokELin, tokECol); } } diff --git a/LSLibStats/Stats/File/Stat.lex b/LSLibStats/Stats/File/Stat.lex index ea41131f..4f0e4979 100644 --- a/LSLibStats/Stats/File/Stat.lex +++ b/LSLibStats/Stats/File/Stat.lex @@ -10,7 +10,7 @@ namechar [a-zA-Z_] %% -data([ ]+)\"([^\"]+)\"([ ]+)\"(.*)\" { yylval = MakeDataProperty(tokLin, tokCol, tokELin, tokECol, yytext); return (int)StatTokens.DATA_ITEM; } +data([ ]+)\"([^\"]+)\"([ ]+)\"(.*)\" { yylval = MakeDataProperty(fileName, tokLin, tokCol, tokELin, tokECol, yytext); return (int)StatTokens.DATA_ITEM; } /* Reserved words */ "new" return (int)StatTokens.NEW; diff --git a/LSLibStats/Stats/File/StatParser.cs b/LSLibStats/Stats/File/StatParser.cs index dd6eee9c..bc49dd2a 100644 --- a/LSLibStats/Stats/File/StatParser.cs +++ b/LSLibStats/Stats/File/StatParser.cs @@ -19,7 +19,7 @@ public abstract class StatScanBase : AbstractScanner { protected string? fileName = null; - //public override CodeLocation yylloc { get; set; } + public override CodeLocation yylloc { get; set; } protected virtual bool yywrap() { return true; } @@ -30,7 +30,7 @@ protected string MakeString(string lit) return MakeLiteral(Regex.Unescape(lit.Substring(1, lit.Length - 2))); } - protected StatProperty MakeDataProperty(int startLine, int startCol, int endLine, int endCol, string lit) + protected StatProperty MakeDataProperty(string fileName, int startLine, int startCol, int endLine, int endCol, string lit) { var re = new Regex(@"data\s+""([^""]+)""\s+""(.*)""\s*", RegexOptions.CultureInvariant); var matches = re.Match(lit); @@ -42,8 +42,8 @@ protected StatProperty MakeDataProperty(int startLine, int startCol, int endLine return new StatProperty( matches.Groups[1].Value, matches.Groups[2].Value, - new CodeLocation(null, startLine, startCol, endLine, endCol), - new CodeLocation(null, startLine, startCol + matches.Groups[2].Index, endLine, startCol + matches.Groups[2].Index + matches.Groups[2].Value.Length) + new CodeLocation(fileName, startLine, startCol, endLine, endCol), + new CodeLocation(fileName, startLine, startCol + matches.Groups[2].Index, endLine, startCol + matches.Groups[2].Index + matches.Groups[2].Value.Length) ); } } @@ -57,7 +57,7 @@ public StatScanner(string? fileName) public CodeLocation LastLocation() { - return new CodeLocation(null, tokLin, tokCol, tokELin, tokECol); + return new CodeLocation(fileName, tokLin, tokCol, tokELin, tokECol); } } From 9c05c6a36e8f97ca5d0953dd3b28eb8cdb3fe703 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Oct 2024 14:29:26 +0200 Subject: [PATCH 112/139] Split requirements validator --- LSLibStats/Stats/Functor/RollConditions.lex | 23 +++++++ LSLibStats/Stats/Functor/RollConditions.yy | 38 ++++++++++ .../Stats/Functor/RollConditionsParser.cs | 69 +++++++++++++++++++ LSLibStats/Stats/StatValueParsers.cs | 30 +++++++- 4 files changed, 159 insertions(+), 1 deletion(-) create mode 100644 LSLibStats/Stats/Functor/RollConditions.lex create mode 100644 LSLibStats/Stats/Functor/RollConditions.yy create mode 100644 LSLibStats/Stats/Functor/RollConditionsParser.cs diff --git a/LSLibStats/Stats/Functor/RollConditions.lex b/LSLibStats/Stats/Functor/RollConditions.lex new file mode 100644 index 00000000..d19e5a41 --- /dev/null +++ b/LSLibStats/Stats/Functor/RollConditions.lex @@ -0,0 +1,23 @@ +%namespace LSLib.Stats.RollConditions +%visibility public +%scannertype RollConditionScanner +%scanbasetype RollConditionScanBase +%tokentype RollConditionTokens + +namechar [a-zA-Z0-9_] +nonseparator [^;\[\] ] + +%% + +/* Special characters */ +";" return (int)';'; +"[" return (int)'['; +"]" return (int)']'; +[ \t] ; + +({namechar})+ { yylval = yytext; return (int)RollConditionTokens.NAME; } +({nonseparator})+ { yylval = yytext; return (int)RollConditionTokens.TEXT; } + +%{ + yylloc = new QUT.Gppg.LexLocation(tokLin, tokCol, tokELin, tokECol); +%} diff --git a/LSLibStats/Stats/Functor/RollConditions.yy b/LSLibStats/Stats/Functor/RollConditions.yy new file mode 100644 index 00000000..b6585878 --- /dev/null +++ b/LSLibStats/Stats/Functor/RollConditions.yy @@ -0,0 +1,38 @@ +%namespace LSLib.Stats.RollConditions +%partial +%visibility public +%parsertype RollConditionParser +%tokentype RollConditionTokens +%YYSTYPE System.Object + +%start Root + +/* Name-like expression */ +%token NAME +/* Text-like (unquoted) literal */ +%token TEXT + +%% + +Root : RollConditions; + +RollConditions : RollConditionOrEmpty { $$ = AddCondition(MakeConditions(), $1); } + | RollConditions ';' RollConditionOrEmpty { $$ = AddCondition($1, $3); } + ; + +RollConditionOrEmpty : /* empty */ + | RollCondition + ; + +RollCondition : NAME '[' Expression ']' { $$ = MakeCondition($1, $3); } + | NAME + | TEXT + | NAME Expression { $$ = ConcatExpression($1, $2); } + | TEXT Expression { $$ = ConcatExpression($1, $2); } + ; + +Expression : NAME + | TEXT + | Expression NAME { $$ = ConcatExpression($1, $2); } + | Expression TEXT { $$ = ConcatExpression($1, $2); } + ; diff --git a/LSLibStats/Stats/Functor/RollConditionsParser.cs b/LSLibStats/Stats/Functor/RollConditionsParser.cs new file mode 100644 index 00000000..1da76242 --- /dev/null +++ b/LSLibStats/Stats/Functor/RollConditionsParser.cs @@ -0,0 +1,69 @@ +using QUT.Gppg; + +namespace LSLib.Stats.RollConditions; + +public class RollCondition +{ + public string TextKey; + public string Expression; +} + +public partial class RollConditionScanner +{ + public LexLocation LastLocation() + { + return new LexLocation(tokLin, tokCol, tokELin, tokECol); + } +} + +public abstract class RollConditionScanBase : AbstractScanner +{ + protected virtual bool yywrap() { return true; } +} + +public partial class RollConditionParser +{ + private readonly IStatValueValidator ExpressionValidator; + private readonly DiagnosticContext Ctx; + private readonly PropertyDiagnosticContainer Errors; + + public RollConditionParser(RollConditionScanner scnr, IStatValueValidator expressionValidator, + DiagnosticContext ctx, PropertyDiagnosticContainer errors) : base(scnr) + { + ExpressionValidator = expressionValidator; + Ctx = ctx; + Errors = errors; + } + + private string ConcatExpression(object a, object b) + { + return (string)a + " " + (string)b; + } + + private List MakeConditions() => new List(); + + private List AddCondition(object conditions, object condition) + { + var conds = conditions as List; + if (condition is string) + { + conds.Add(MakeCondition("", condition)); + } + else + { + conds.Add((RollCondition)condition); + } + return conds; + } + + private RollCondition MakeCondition(object textKey, object expression) + { + ExpressionValidator.Validate(Ctx, null, expression, Errors); + + return new RollCondition + { + TextKey = (string)textKey, + Expression = (string)expression + }; + } +} diff --git a/LSLibStats/Stats/StatValueParsers.cs b/LSLibStats/Stats/StatValueParsers.cs index 5a5d6cf5..6d853fa4 100644 --- a/LSLibStats/Stats/StatValueParsers.cs +++ b/LSLibStats/Stats/StatValueParsers.cs @@ -282,10 +282,38 @@ public override void Validate(DiagnosticContext ctx, string value, PropertyDiagn } } -public class LuaExpressionValidator : StatStringValidator +public class RequirementsValidator : StatStringValidator { public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) { + var valueBytes = Encoding.UTF8.GetBytes(value); + using var buf = new MemoryStream(valueBytes); + var scanner = new Requirements.RequirementScanner(); + scanner.SetSource(buf); + var parser = new Requirements.RequirementParser(scanner); + var succeeded = parser.Parse(); + if (!succeeded) + { + // FIXME pass location to error container + var location = scanner.LastLocation(); + if (location.StartColumn != -1) + { + errors.Add($"Syntax error at or near character {location.StartColumn}"); + } + else + { + errors.Add($"Syntax error"); + } + } + } +} + +public class LuaExpressionValidator(bool allowEmpty = false) : StatStringValidator +{ + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) + { + if (allowEmpty && value.Trim().Length == 0) return; + var valueBytes = Encoding.UTF8.GetBytes(value); using var buf = new MemoryStream(valueBytes); var scanner = new Lua.StatLuaScanner(); From 0f605251ae4ddb176ba41d5a549d0daf3551ec46 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Oct 2024 14:30:23 +0200 Subject: [PATCH 113/139] Split validators, part 2 --- LSLibStats/Stats/Functor/Requirement.lex | 25 +++++++ LSLibStats/Stats/Functor/Requirement.yy | 39 +++++++++++ LSLibStats/Stats/Functor/RequirementParser.cs | 69 +++++++++++++++++++ LSLibStats/Stats/StatValueParsers.cs | 38 +++++++++- 4 files changed, 168 insertions(+), 3 deletions(-) create mode 100644 LSLibStats/Stats/Functor/Requirement.lex create mode 100644 LSLibStats/Stats/Functor/Requirement.yy create mode 100644 LSLibStats/Stats/Functor/RequirementParser.cs diff --git a/LSLibStats/Stats/Functor/Requirement.lex b/LSLibStats/Stats/Functor/Requirement.lex new file mode 100644 index 00000000..69da8172 --- /dev/null +++ b/LSLibStats/Stats/Functor/Requirement.lex @@ -0,0 +1,25 @@ +%namespace LSLib.Stats.Requirements +%visibility public +%scannertype RequirementScanner +%scanbasetype RequirementScanBase +%tokentype RequirementTokens + +letter [a-zA-Z_] +digit [0-9] +namechar [a-zA-Z0-9_] +nonseparator [^,;:()\[\]! ] + +%% + +/* Special characters */ +";" return (int)';'; +"!" return (int)'!'; +[ \t] ; + +{letter}({namechar})+ { yylval = yytext; return (int)RequirementTokens.NAME; } +(-)?{digit}({digit})* { yylval = yytext; return (int)RequirementTokens.INTEGER; } +({nonseparator})+ { yylval = yytext; return (int)RequirementTokens.TEXT; } + +%{ + yylloc = new QUT.Gppg.LexLocation(tokLin, tokCol, tokELin, tokECol); +%} diff --git a/LSLibStats/Stats/Functor/Requirement.yy b/LSLibStats/Stats/Functor/Requirement.yy new file mode 100644 index 00000000..1a846b14 --- /dev/null +++ b/LSLibStats/Stats/Functor/Requirement.yy @@ -0,0 +1,39 @@ +%namespace LSLib.Stats.Requirements +%partial +%visibility public +%parsertype RequirementParser +%tokentype RequirementTokens +%YYSTYPE System.Object + +%start Root + +/* Status/Tag name */ +%token NAME +/* Integer literal */ +%token INTEGER +/* Text-like (unquoted) literal */ +%token TEXT + +%% + +/****************************************************************** + * + * REQUIREMENTS PARSING + * + ******************************************************************/ + +Root : Requirements; + +Requirements : /* empty */ { $$ = MakeRequirements(); } + | UnaryRequirement { $$ = AddRequirement(MakeRequirements(), $1); } + | Requirements ';' + | Requirements ';' UnaryRequirement { $$ = AddRequirement($1, $3); } + ; + +UnaryRequirement : Requirement + | '!' Requirement { $$ = MakeNotRequirement($2); } + ; + +Requirement : NAME { $$ = MakeRequirement($1); } + | NAME INTEGER { $$ = MakeIntRequirement($1, $2); } + ; diff --git a/LSLibStats/Stats/Functor/RequirementParser.cs b/LSLibStats/Stats/Functor/RequirementParser.cs new file mode 100644 index 00000000..af4595c5 --- /dev/null +++ b/LSLibStats/Stats/Functor/RequirementParser.cs @@ -0,0 +1,69 @@ +using LSLib.Stats.Functors; +using QUT.Gppg; + +namespace LSLib.Stats.Requirements; + +public partial class RequirementScanner +{ + public LexLocation LastLocation() + { + return new LexLocation(tokLin, tokCol, tokELin, tokECol); + } +} + +public abstract class RequirementScanBase : AbstractScanner +{ + protected virtual bool yywrap() { return true; } +} + +public partial class RequirementParser +{ + public RequirementParser(RequirementScanner scnr) : base(scnr) + { + } + + private List MakeRequirements() => new List(); + + private List AddRequirement(object requirements, object requirement) + { + var req = requirements as List; + req.Add(requirement as Requirement); + return req; + } + + private Requirement MakeNotRequirement(object requirement) + { + var req = requirement as Requirement; + req.Not = true; + return req; + } + + private Requirement MakeRequirement(object name) + { + return new Requirement + { + Not = false, + RequirementName = name as string, + IntParam = 0, + TagParam = "" + }; + } + + private Requirement MakeIntRequirement(object name, object intArg) + { + var reqmtName = name as string; + + /*if (!RequirementsWithArgument.ValueToIndexMap.ContainsKey(reqmtName)) + { + OnError?.Invoke($"Requirement '{reqmtName}' doesn't need any arguments"); + }*/ + + return new Requirement + { + Not = false, + RequirementName = reqmtName, + IntParam = Int32.Parse(intArg as string), + TagParam = "" + }; + } +} diff --git a/LSLibStats/Stats/StatValueParsers.cs b/LSLibStats/Stats/StatValueParsers.cs index 6d853fa4..e80df891 100644 --- a/LSLibStats/Stats/StatValueParsers.cs +++ b/LSLibStats/Stats/StatValueParsers.cs @@ -336,6 +336,36 @@ public override void Validate(DiagnosticContext ctx, string value, PropertyDiagn } } +public class RollConditionsValidator : StatStringValidator +{ + private LuaExpressionValidator LuaValidator = new(); + + public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) + { + if (value.Trim().Length == 0) return; + + var valueBytes = Encoding.UTF8.GetBytes(value); + using var buf = new MemoryStream(valueBytes); + var scanner = new RollConditionScanner(); + scanner.SetSource(buf); + var parser = new RollConditionParser(scanner, LuaValidator, ctx, errors); + var succeeded = parser.Parse(); + if (!succeeded) + { + // FIXME pass location to error container + var location = scanner.LastLocation(); + if (location.StartColumn != -1) + { + errors.Add($"Syntax error at or near character {location.StartColumn}"); + } + else + { + errors.Add($"Syntax error"); + } + } + } +} + public class UseCostsValidator(IStatReferenceValidator validator) : StatStringValidator { public override void Validate(DiagnosticContext ctx, string value, PropertyDiagnosticContainer errors) @@ -624,9 +654,11 @@ public IStatValueValidator CreateValidator(string type, StatEnumeration? enumTyp "ConstantFloat" or "Float" => new FloatValidator(), "String" or "FixedString" or "TranslatedString" => new StringValidator(), "Guid" => new UUIDValidator(), - "Requirements" => new ExpressionValidator("Requirements", definitions, this, ExpressionType.Functor), - "StatsFunctors" => new ExpressionValidator("Properties", definitions, this, ExpressionType.Functor), - "Lua" or "RollConditions" or "TargetConditions" or "Conditions" => new LuaExpressionValidator(), + "Requirements" => new RequirementsValidator(), + "StatsFunctors" => new ExpressionValidator("Functors", definitions, this, ExpressionType.Functor), + "Lua" => new LuaExpressionValidator(), + "TargetConditions" or "Conditions" => new LuaExpressionValidator(true), + "RollConditions" => new RollConditionsValidator(), "UseCosts" => new UseCostsValidator(ReferenceValidator), "StatReference" => new StatReferenceValidator(ReferenceValidator, constraints!), "StatusId" => new AnyParser(new List { From 84d93a96c78ff8f6a7b83e88d522829476bd5422 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Oct 2024 14:30:36 +0200 Subject: [PATCH 114/139] Fix Lua array parsing logic --- LSLibStats/Stats/Functor/Lua.lex | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/LSLibStats/Stats/Functor/Lua.lex b/LSLibStats/Stats/Functor/Lua.lex index abefaa93..b204fade 100644 --- a/LSLibStats/Stats/Functor/Lua.lex +++ b/LSLibStats/Stats/Functor/Lua.lex @@ -7,7 +7,7 @@ letter [a-zA-Z_] digit [0-9] namechar [a-zA-Z0-9_] -nonseparator [^,;:()\[\]!+*/^&%~|><=.# ] +nonseparator [^,;:()\[\]{}!+*/^&%~|><=.# ] %% @@ -20,7 +20,9 @@ nonseparator [^,;:()\[\]!+*/^&%~|><=.# ] "," return (int)','; ";" return (int)';'; "." return (int)'.'; -[ ] ; +"{" return (int)'{'; +"}" return (int)'}'; +[ \t] ; "nil" return (int)StatLuaTokens.LUA_RESERVED_VAL; From 06cf49f6f34855fffb8884cae3a900ccdcc5a56f Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Oct 2024 14:30:57 +0200 Subject: [PATCH 115/139] Fix parsing of text key functors --- LSLibStats/Stats/Functor/Functor.lex | 7 +------ LSLibStats/Stats/Functor/Functor.yy | 22 ++++++++++++++-------- LSLibStats/Stats/Functor/FunctorParser.cs | 12 ++++++++++++ LSLibStats/Stats/StatDefinitions.cs | 4 ++-- 4 files changed, 29 insertions(+), 16 deletions(-) diff --git a/LSLibStats/Stats/Functor/Functor.lex b/LSLibStats/Stats/Functor/Functor.lex index d973d429..d0e0c813 100644 --- a/LSLibStats/Stats/Functor/Functor.lex +++ b/LSLibStats/Stats/Functor/Functor.lex @@ -18,11 +18,6 @@ nonseparator [^,;:()\[\]! ] /* Reserved words */ "IF" return (int)FunctorTokens.IF; -/* Text keys */ -"CastOffhand" return (int)FunctorTokens.TEXT_KEY; -"Cast2" return (int)FunctorTokens.TEXT_KEY; -"Cast3" return (int)FunctorTokens.TEXT_KEY; - /* Stats contexts */ "ABILITY_CHECK" return (int)FunctorTokens.CONTEXT; "ACTION_RESOURCES_CHANGED" return (int)FunctorTokens.CONTEXT; @@ -78,7 +73,7 @@ nonseparator [^,;:()\[\]! ] "!" return (int)'!'; "-" return (int)'-'; "." return (int)'.'; -[ ] ; +[ \t] ; {letter}({namechar})+ { yylval = MakeLiteral(yytext); return (int)FunctorTokens.NAME; } (-)?{digit}({digit})* { yylval = MakeLiteral(yytext); return (int)FunctorTokens.INTEGER; } diff --git a/LSLibStats/Stats/Functor/Functor.yy b/LSLibStats/Stats/Functor/Functor.yy index 8dd4961a..80b44e78 100644 --- a/LSLibStats/Stats/Functor/Functor.yy +++ b/LSLibStats/Stats/Functor/Functor.yy @@ -18,8 +18,6 @@ %token CONTEXT /* Status/Tag name */ %token NAME -/* Known text keys */ -%token TEXT_KEY /* Integer literal */ %token INTEGER /* Text-like (unquoted) literal */ @@ -30,7 +28,7 @@ %% /* A special "trigger word" is prepended to support parsing multiple types from the same lexer/parser */ -Root : EXPR_FUNCTORS Functors { $$ = $2; } +Root : EXPR_FUNCTORS TopLevelFunctors { $$ = $2; } | EXPR_DESCRIPTION_PARAMS OptionalArgs { $$ = $2; } ; @@ -40,17 +38,25 @@ Root : EXPR_FUNCTORS Functors { $$ = $2; } * ******************************************************************/ +TopLevelFunctors : /* empty */ { $$ = MakeFunctorList(); } + | TopLevelFunctor { $$ = AddFunctor(MakeFunctorList(), $1); } + | TopLevelFunctors ';' + | TopLevelFunctors ';' TopLevelFunctor { $$ = AddFunctor($1, $3); } + ; + +TopLevelFunctor : Contexts Condition CallOrTextKeyFunctor { $$ = MakeFunctorOrTextKeyFunctors($1, $2, $3); }; + +CallOrTextKeyFunctor : NAME '[' Functors ']' { $$ = SetTextKey($3, $1); } + | Call + ; + Functors : /* empty */ { $$ = MakeFunctorList(); } | Functor { $$ = AddFunctor(MakeFunctorList(), $1); } | Functors ';' | Functors ';' Functor { $$ = AddFunctor($1, $3); } ; -TextKeyFunctors : TEXT_KEY '[' Functors ']' { $$ = SetTextKey($3, $1); }; - -Functor : Contexts Condition Call { $$ = MakeFunctor($1, $2, $3); } - | TextKeyFunctors - ; +Functor : Contexts Condition Call { $$ = MakeFunctor($1, $2, $3); }; Contexts : /* empty */ | ContextList { $$ = $1; } diff --git a/LSLibStats/Stats/Functor/FunctorParser.cs b/LSLibStats/Stats/Functor/FunctorParser.cs index d7395999..05513bb1 100644 --- a/LSLibStats/Stats/Functor/FunctorParser.cs +++ b/LSLibStats/Stats/Functor/FunctorParser.cs @@ -183,6 +183,18 @@ private List AddFunctor(object functorss, object functors) Action = action as FunctorAction }; + private object MakeFunctorOrTextKeyFunctors(object context, object condition, object action) + { + if (action is FunctorAction) + { + return MakeFunctor(context, condition, action); + } + else + { + return action; + } + } + private List MakeArgumentList() => new(); private List AddArgument(object arguments, object arg) diff --git a/LSLibStats/Stats/StatDefinitions.cs b/LSLibStats/Stats/StatDefinitions.cs index 0e337c31..3ea8ddb2 100644 --- a/LSLibStats/Stats/StatDefinitions.cs +++ b/LSLibStats/Stats/StatDefinitions.cs @@ -24,9 +24,9 @@ public void AddItem(int index, string value) ValueToIndexMap.TryAdd(value, index); } - public void AddItem(string value) + public void AddItem(string label) { - AddItem(Values.Count, value); + AddItem(Values.Count, label); } } From c130b177a7a9f2750a7e40332c239cae7096a0cc Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Oct 2024 14:31:13 +0200 Subject: [PATCH 116/139] Add support for patch 7 ValueLists.txt format --- LSLibStats/Stats/StatDefinitions.cs | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/LSLibStats/Stats/StatDefinitions.cs b/LSLibStats/Stats/StatDefinitions.cs index 3ea8ddb2..24fbb2a4 100644 --- a/LSLibStats/Stats/StatDefinitions.cs +++ b/LSLibStats/Stats/StatDefinitions.cs @@ -1,5 +1,6 @@ using LSLib.Stats.Functors; using System.IO; +using System.Text.RegularExpressions; using System.Xml; using System.Xml.Linq; @@ -67,7 +68,7 @@ public class StatFunctorType(string name, int requiredArgs, List Args = args; } -public class StatDefinitionRepository +public partial class StatDefinitionRepository { public readonly Dictionary Enumerations = []; public readonly Dictionary Types = []; @@ -224,6 +225,7 @@ public void LoadDefinitions(Stream stream) public void LoadEnumerations(Stream stream) { + var valueRe = EnumerationValueRegEx(); StatEnumeration? curEnum = null; string? line; @@ -239,12 +241,26 @@ public void LoadEnumerations(Stream stream) curEnum = new StatEnumeration(name); Enumerations.Add(curEnum.Name, curEnum); } - else if (trimmed.StartsWith("value ")) + else { - var label = trimmed[7..^1]; - curEnum!.AddItem(label); + var match = valueRe.Match(trimmed); + if (match.Success) + { + var value = match.Groups["value"].Value; + if (value != null) + { + curEnum!.AddItem(Int32.Parse(value), match.Groups["label"].Value); + } + else + { + curEnum!.AddItem(match.Groups["label"].Value); + } + } } } } } + + [GeneratedRegex("^value \"(? private void InitializeComponent() { - this.gr2ModeTabControl = new System.Windows.Forms.TabControl(); - this.gr2SingleFileTab = new System.Windows.Forms.TabPage(); - this.lblOutputPath = new System.Windows.Forms.Label(); - this.saveOutputBtn = new System.Windows.Forms.Button(); - this.inputPath = new System.Windows.Forms.TextBox(); - this.lblSrcPath = new System.Windows.Forms.Label(); - this.inputFileBrowseBtn = new System.Windows.Forms.Button(); - this.loadInputBtn = new System.Windows.Forms.Button(); - this.outputPath = new System.Windows.Forms.TextBox(); - this.outputFileBrowserBtn = new System.Windows.Forms.Button(); - this.gr2BatchTab = new System.Windows.Forms.TabPage(); - this.gr2BatchProgressLabel = new System.Windows.Forms.Label(); - this.gr2BatchInputBrowseBtn = new System.Windows.Forms.Button(); - this.gr2BatchOutputBrowseBtn = new System.Windows.Forms.Button(); - this.gr2BatchProgressBar = new System.Windows.Forms.ProgressBar(); - this.label23 = new System.Windows.Forms.Label(); - this.gr2BatchInputFormat = new System.Windows.Forms.ComboBox(); - this.label22 = new System.Windows.Forms.Label(); - this.gr2BatchOutputFormat = new System.Windows.Forms.ComboBox(); - this.label21 = new System.Windows.Forms.Label(); - this.label19 = new System.Windows.Forms.Label(); - this.gr2BatchConvertBtn = new System.Windows.Forms.Button(); - this.gr2BatchInputDir = new System.Windows.Forms.TextBox(); - this.label20 = new System.Windows.Forms.Label(); - this.gr2BatchOutputDir = new System.Windows.Forms.TextBox(); - this.groupBox2 = new System.Windows.Forms.GroupBox(); - this.flipMeshes = new System.Windows.Forms.CheckBox(); - this.flipSkeletons = new System.Windows.Forms.CheckBox(); - this.flipUVs = new System.Windows.Forms.CheckBox(); - this.label2 = new System.Windows.Forms.Label(); - this.exportableObjects = new System.Windows.Forms.ListView(); - this.exportableName = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); - this.exportableType = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader())); - this.filterUVs = new System.Windows.Forms.CheckBox(); - this.groupBox1 = new System.Windows.Forms.GroupBox(); - this.conformCopySkeletons = new System.Windows.Forms.CheckBox(); - this.meshProxy = new System.Windows.Forms.CheckBox(); - this.meshCloth = new System.Windows.Forms.CheckBox(); - this.meshRigid = new System.Windows.Forms.CheckBox(); - this.applyBasisTransforms = new System.Windows.Forms.CheckBox(); - this.conformantGR2BrowseBtn = new System.Windows.Forms.Button(); - this.conformantGR2Path = new System.Windows.Forms.TextBox(); - this.conformToOriginal = new System.Windows.Forms.CheckBox(); - this.buildDummySkeleton = new System.Windows.Forms.CheckBox(); - this.label1 = new System.Windows.Forms.Label(); - this.gr2OutputDirDlg = new System.Windows.Forms.FolderBrowserDialog(); - this.gr2InputDirDlg = new System.Windows.Forms.FolderBrowserDialog(); - this.conformSkeletonFileDlg = new System.Windows.Forms.OpenFileDialog(); - this.outputFileDlg = new System.Windows.Forms.SaveFileDialog(); - this.inputFileDlg = new System.Windows.Forms.OpenFileDialog(); - this.resourceFormats = new ConverterApp.ExportItemSelection(); - this.gr2ModeTabControl.SuspendLayout(); - this.gr2SingleFileTab.SuspendLayout(); - this.gr2BatchTab.SuspendLayout(); - this.groupBox2.SuspendLayout(); - this.groupBox1.SuspendLayout(); - this.SuspendLayout(); + gr2ModeTabControl = new System.Windows.Forms.TabControl(); + gr2SingleFileTab = new System.Windows.Forms.TabPage(); + lblOutputPath = new System.Windows.Forms.Label(); + saveOutputBtn = new System.Windows.Forms.Button(); + inputPath = new System.Windows.Forms.TextBox(); + lblSrcPath = new System.Windows.Forms.Label(); + inputFileBrowseBtn = new System.Windows.Forms.Button(); + loadInputBtn = new System.Windows.Forms.Button(); + outputPath = new System.Windows.Forms.TextBox(); + outputFileBrowserBtn = new System.Windows.Forms.Button(); + gr2BatchTab = new System.Windows.Forms.TabPage(); + gr2BatchProgressLabel = new System.Windows.Forms.Label(); + gr2BatchInputBrowseBtn = new System.Windows.Forms.Button(); + gr2BatchOutputBrowseBtn = new System.Windows.Forms.Button(); + gr2BatchProgressBar = new System.Windows.Forms.ProgressBar(); + label23 = new System.Windows.Forms.Label(); + gr2BatchInputFormat = new System.Windows.Forms.ComboBox(); + label22 = new System.Windows.Forms.Label(); + gr2BatchOutputFormat = new System.Windows.Forms.ComboBox(); + label21 = new System.Windows.Forms.Label(); + label19 = new System.Windows.Forms.Label(); + gr2BatchConvertBtn = new System.Windows.Forms.Button(); + gr2BatchInputDir = new System.Windows.Forms.TextBox(); + label20 = new System.Windows.Forms.Label(); + gr2BatchOutputDir = new System.Windows.Forms.TextBox(); + groupBox2 = new System.Windows.Forms.GroupBox(); + flipMeshes = new System.Windows.Forms.CheckBox(); + flipSkeletons = new System.Windows.Forms.CheckBox(); + flipUVs = new System.Windows.Forms.CheckBox(); + label2 = new System.Windows.Forms.Label(); + exportableObjects = new System.Windows.Forms.ListView(); + exportableName = new System.Windows.Forms.ColumnHeader(); + exportableType = new System.Windows.Forms.ColumnHeader(); + filterUVs = new System.Windows.Forms.CheckBox(); + groupBox1 = new System.Windows.Forms.GroupBox(); + conformCopySkeletons = new System.Windows.Forms.CheckBox(); + meshProxy = new System.Windows.Forms.CheckBox(); + meshCloth = new System.Windows.Forms.CheckBox(); + meshRigid = new System.Windows.Forms.CheckBox(); + applyBasisTransforms = new System.Windows.Forms.CheckBox(); + conformantGR2BrowseBtn = new System.Windows.Forms.Button(); + conformantGR2Path = new System.Windows.Forms.TextBox(); + conformToOriginal = new System.Windows.Forms.CheckBox(); + buildDummySkeleton = new System.Windows.Forms.CheckBox(); + resourceFormats = new ExportItemSelection(); + label1 = new System.Windows.Forms.Label(); + gr2OutputDirDlg = new System.Windows.Forms.FolderBrowserDialog(); + gr2InputDirDlg = new System.Windows.Forms.FolderBrowserDialog(); + conformSkeletonFileDlg = new System.Windows.Forms.OpenFileDialog(); + outputFileDlg = new System.Windows.Forms.SaveFileDialog(); + inputFileDlg = new System.Windows.Forms.OpenFileDialog(); + gr2ModeTabControl.SuspendLayout(); + gr2SingleFileTab.SuspendLayout(); + gr2BatchTab.SuspendLayout(); + groupBox2.SuspendLayout(); + groupBox1.SuspendLayout(); + SuspendLayout(); // // gr2ModeTabControl // - this.gr2ModeTabControl.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.gr2ModeTabControl.Controls.Add(this.gr2SingleFileTab); - this.gr2ModeTabControl.Controls.Add(this.gr2BatchTab); - this.gr2ModeTabControl.Location = new System.Drawing.Point(7, 9); - this.gr2ModeTabControl.Name = "gr2ModeTabControl"; - this.gr2ModeTabControl.SelectedIndex = 0; - this.gr2ModeTabControl.Size = new System.Drawing.Size(887, 159); - this.gr2ModeTabControl.TabIndex = 38; + gr2ModeTabControl.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + gr2ModeTabControl.Controls.Add(gr2SingleFileTab); + gr2ModeTabControl.Controls.Add(gr2BatchTab); + gr2ModeTabControl.Location = new System.Drawing.Point(9, 14); + gr2ModeTabControl.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2ModeTabControl.Name = "gr2ModeTabControl"; + gr2ModeTabControl.SelectedIndex = 0; + gr2ModeTabControl.Size = new System.Drawing.Size(1183, 245); + gr2ModeTabControl.TabIndex = 38; // // gr2SingleFileTab // - this.gr2SingleFileTab.Controls.Add(this.lblOutputPath); - this.gr2SingleFileTab.Controls.Add(this.saveOutputBtn); - this.gr2SingleFileTab.Controls.Add(this.inputPath); - this.gr2SingleFileTab.Controls.Add(this.lblSrcPath); - this.gr2SingleFileTab.Controls.Add(this.inputFileBrowseBtn); - this.gr2SingleFileTab.Controls.Add(this.loadInputBtn); - this.gr2SingleFileTab.Controls.Add(this.outputPath); - this.gr2SingleFileTab.Controls.Add(this.outputFileBrowserBtn); - this.gr2SingleFileTab.Location = new System.Drawing.Point(4, 22); - this.gr2SingleFileTab.Name = "gr2SingleFileTab"; - this.gr2SingleFileTab.Padding = new System.Windows.Forms.Padding(3); - this.gr2SingleFileTab.Size = new System.Drawing.Size(879, 133); - this.gr2SingleFileTab.TabIndex = 0; - this.gr2SingleFileTab.Text = "Single File"; - this.gr2SingleFileTab.UseVisualStyleBackColor = true; + gr2SingleFileTab.Controls.Add(lblOutputPath); + gr2SingleFileTab.Controls.Add(saveOutputBtn); + gr2SingleFileTab.Controls.Add(inputPath); + gr2SingleFileTab.Controls.Add(lblSrcPath); + gr2SingleFileTab.Controls.Add(inputFileBrowseBtn); + gr2SingleFileTab.Controls.Add(loadInputBtn); + gr2SingleFileTab.Controls.Add(outputPath); + gr2SingleFileTab.Controls.Add(outputFileBrowserBtn); + gr2SingleFileTab.Location = new System.Drawing.Point(4, 29); + gr2SingleFileTab.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2SingleFileTab.Name = "gr2SingleFileTab"; + gr2SingleFileTab.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2SingleFileTab.Size = new System.Drawing.Size(1175, 212); + gr2SingleFileTab.TabIndex = 0; + gr2SingleFileTab.Text = "Single File"; + gr2SingleFileTab.UseVisualStyleBackColor = true; // // lblOutputPath // - this.lblOutputPath.AutoSize = true; - this.lblOutputPath.Location = new System.Drawing.Point(6, 46); - this.lblOutputPath.Name = "lblOutputPath"; - this.lblOutputPath.Size = new System.Drawing.Size(82, 13); - this.lblOutputPath.TabIndex = 29; - this.lblOutputPath.Text = "Output file path:"; + lblOutputPath.AutoSize = true; + lblOutputPath.Location = new System.Drawing.Point(8, 71); + lblOutputPath.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + lblOutputPath.Name = "lblOutputPath"; + lblOutputPath.Size = new System.Drawing.Size(117, 20); + lblOutputPath.TabIndex = 29; + lblOutputPath.Text = "Output file path:"; // // saveOutputBtn // - this.saveOutputBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.saveOutputBtn.Enabled = false; - this.saveOutputBtn.Location = new System.Drawing.Point(721, 61); - this.saveOutputBtn.Name = "saveOutputBtn"; - this.saveOutputBtn.Size = new System.Drawing.Size(141, 23); - this.saveOutputBtn.TabIndex = 34; - this.saveOutputBtn.Text = "Export"; - this.saveOutputBtn.UseVisualStyleBackColor = true; - this.saveOutputBtn.Click += new System.EventHandler(this.saveOutputBtn_Click); + saveOutputBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + saveOutputBtn.Enabled = false; + saveOutputBtn.Location = new System.Drawing.Point(961, 94); + saveOutputBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + saveOutputBtn.Name = "saveOutputBtn"; + saveOutputBtn.Size = new System.Drawing.Size(188, 35); + saveOutputBtn.TabIndex = 34; + saveOutputBtn.Text = "Export"; + saveOutputBtn.UseVisualStyleBackColor = true; + saveOutputBtn.Click += saveOutputBtn_Click; // // inputPath // - this.inputPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.inputPath.Location = new System.Drawing.Point(9, 19); - this.inputPath.Name = "inputPath"; - this.inputPath.Size = new System.Drawing.Size(659, 20); - this.inputPath.TabIndex = 25; + inputPath.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + inputPath.Location = new System.Drawing.Point(12, 29); + inputPath.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + inputPath.Name = "inputPath"; + inputPath.Size = new System.Drawing.Size(877, 27); + inputPath.TabIndex = 25; // // lblSrcPath // - this.lblSrcPath.AutoSize = true; - this.lblSrcPath.Location = new System.Drawing.Point(6, 3); - this.lblSrcPath.Name = "lblSrcPath"; - this.lblSrcPath.Size = new System.Drawing.Size(74, 13); - this.lblSrcPath.TabIndex = 26; - this.lblSrcPath.Text = "Input file path:"; + lblSrcPath.AutoSize = true; + lblSrcPath.Location = new System.Drawing.Point(8, 5); + lblSrcPath.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + lblSrcPath.Name = "lblSrcPath"; + lblSrcPath.Size = new System.Drawing.Size(105, 20); + lblSrcPath.TabIndex = 26; + lblSrcPath.Text = "Input file path:"; // // inputFileBrowseBtn // - this.inputFileBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.inputFileBrowseBtn.Location = new System.Drawing.Point(666, 18); - this.inputFileBrowseBtn.Name = "inputFileBrowseBtn"; - this.inputFileBrowseBtn.Size = new System.Drawing.Size(41, 22); - this.inputFileBrowseBtn.TabIndex = 27; - this.inputFileBrowseBtn.Text = "..."; - this.inputFileBrowseBtn.UseVisualStyleBackColor = true; - this.inputFileBrowseBtn.Click += new System.EventHandler(this.inputFileBrowseBtn_Click); + inputFileBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + inputFileBrowseBtn.Location = new System.Drawing.Point(888, 28); + inputFileBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + inputFileBrowseBtn.Name = "inputFileBrowseBtn"; + inputFileBrowseBtn.Size = new System.Drawing.Size(55, 34); + inputFileBrowseBtn.TabIndex = 27; + inputFileBrowseBtn.Text = "..."; + inputFileBrowseBtn.UseVisualStyleBackColor = true; + inputFileBrowseBtn.Click += inputFileBrowseBtn_Click; // // loadInputBtn // - this.loadInputBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.loadInputBtn.Location = new System.Drawing.Point(721, 18); - this.loadInputBtn.Name = "loadInputBtn"; - this.loadInputBtn.Size = new System.Drawing.Size(141, 23); - this.loadInputBtn.TabIndex = 31; - this.loadInputBtn.Text = "Import"; - this.loadInputBtn.UseVisualStyleBackColor = true; - this.loadInputBtn.Click += new System.EventHandler(this.loadInputBtn_Click); + loadInputBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + loadInputBtn.Location = new System.Drawing.Point(961, 28); + loadInputBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + loadInputBtn.Name = "loadInputBtn"; + loadInputBtn.Size = new System.Drawing.Size(188, 35); + loadInputBtn.TabIndex = 31; + loadInputBtn.Text = "Import"; + loadInputBtn.UseVisualStyleBackColor = true; + loadInputBtn.Click += loadInputBtn_Click; // // outputPath // - this.outputPath.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.outputPath.Location = new System.Drawing.Point(9, 62); - this.outputPath.Name = "outputPath"; - this.outputPath.Size = new System.Drawing.Size(659, 20); - this.outputPath.TabIndex = 28; + outputPath.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + outputPath.Location = new System.Drawing.Point(12, 95); + outputPath.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + outputPath.Name = "outputPath"; + outputPath.Size = new System.Drawing.Size(877, 27); + outputPath.TabIndex = 28; // // outputFileBrowserBtn // - this.outputFileBrowserBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.outputFileBrowserBtn.Location = new System.Drawing.Point(666, 61); - this.outputFileBrowserBtn.Name = "outputFileBrowserBtn"; - this.outputFileBrowserBtn.Size = new System.Drawing.Size(41, 22); - this.outputFileBrowserBtn.TabIndex = 30; - this.outputFileBrowserBtn.Text = "..."; - this.outputFileBrowserBtn.UseVisualStyleBackColor = true; - this.outputFileBrowserBtn.Click += new System.EventHandler(this.outputFileBrowserBtn_Click); + outputFileBrowserBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + outputFileBrowserBtn.Location = new System.Drawing.Point(888, 94); + outputFileBrowserBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + outputFileBrowserBtn.Name = "outputFileBrowserBtn"; + outputFileBrowserBtn.Size = new System.Drawing.Size(55, 34); + outputFileBrowserBtn.TabIndex = 30; + outputFileBrowserBtn.Text = "..."; + outputFileBrowserBtn.UseVisualStyleBackColor = true; + outputFileBrowserBtn.Click += outputFileBrowserBtn_Click; // // gr2BatchTab // - this.gr2BatchTab.Controls.Add(this.gr2BatchProgressLabel); - this.gr2BatchTab.Controls.Add(this.gr2BatchInputBrowseBtn); - this.gr2BatchTab.Controls.Add(this.gr2BatchOutputBrowseBtn); - this.gr2BatchTab.Controls.Add(this.gr2BatchProgressBar); - this.gr2BatchTab.Controls.Add(this.label23); - this.gr2BatchTab.Controls.Add(this.gr2BatchInputFormat); - this.gr2BatchTab.Controls.Add(this.label22); - this.gr2BatchTab.Controls.Add(this.gr2BatchOutputFormat); - this.gr2BatchTab.Controls.Add(this.label21); - this.gr2BatchTab.Controls.Add(this.label19); - this.gr2BatchTab.Controls.Add(this.gr2BatchConvertBtn); - this.gr2BatchTab.Controls.Add(this.gr2BatchInputDir); - this.gr2BatchTab.Controls.Add(this.label20); - this.gr2BatchTab.Controls.Add(this.gr2BatchOutputDir); - this.gr2BatchTab.Location = new System.Drawing.Point(4, 22); - this.gr2BatchTab.Name = "gr2BatchTab"; - this.gr2BatchTab.Padding = new System.Windows.Forms.Padding(3); - this.gr2BatchTab.Size = new System.Drawing.Size(879, 133); - this.gr2BatchTab.TabIndex = 1; - this.gr2BatchTab.Text = "Batch"; - this.gr2BatchTab.UseVisualStyleBackColor = true; + gr2BatchTab.Controls.Add(gr2BatchProgressLabel); + gr2BatchTab.Controls.Add(gr2BatchInputBrowseBtn); + gr2BatchTab.Controls.Add(gr2BatchOutputBrowseBtn); + gr2BatchTab.Controls.Add(gr2BatchProgressBar); + gr2BatchTab.Controls.Add(label23); + gr2BatchTab.Controls.Add(gr2BatchInputFormat); + gr2BatchTab.Controls.Add(label22); + gr2BatchTab.Controls.Add(gr2BatchOutputFormat); + gr2BatchTab.Controls.Add(label21); + gr2BatchTab.Controls.Add(label19); + gr2BatchTab.Controls.Add(gr2BatchConvertBtn); + gr2BatchTab.Controls.Add(gr2BatchInputDir); + gr2BatchTab.Controls.Add(label20); + gr2BatchTab.Controls.Add(gr2BatchOutputDir); + gr2BatchTab.Location = new System.Drawing.Point(4, 29); + gr2BatchTab.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchTab.Name = "gr2BatchTab"; + gr2BatchTab.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchTab.Size = new System.Drawing.Size(1175, 212); + gr2BatchTab.TabIndex = 1; + gr2BatchTab.Text = "Batch"; + gr2BatchTab.UseVisualStyleBackColor = true; // // gr2BatchProgressLabel // - this.gr2BatchProgressLabel.AutoSize = true; - this.gr2BatchProgressLabel.Location = new System.Drawing.Point(82, 88); - this.gr2BatchProgressLabel.Name = "gr2BatchProgressLabel"; - this.gr2BatchProgressLabel.Size = new System.Drawing.Size(0, 13); - this.gr2BatchProgressLabel.TabIndex = 49; + gr2BatchProgressLabel.AutoSize = true; + gr2BatchProgressLabel.Location = new System.Drawing.Point(109, 135); + gr2BatchProgressLabel.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + gr2BatchProgressLabel.Name = "gr2BatchProgressLabel"; + gr2BatchProgressLabel.Size = new System.Drawing.Size(0, 20); + gr2BatchProgressLabel.TabIndex = 49; // // gr2BatchInputBrowseBtn // - this.gr2BatchInputBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.gr2BatchInputBrowseBtn.Location = new System.Drawing.Point(822, 17); - this.gr2BatchInputBrowseBtn.Name = "gr2BatchInputBrowseBtn"; - this.gr2BatchInputBrowseBtn.Size = new System.Drawing.Size(41, 23); - this.gr2BatchInputBrowseBtn.TabIndex = 37; - this.gr2BatchInputBrowseBtn.Text = "..."; - this.gr2BatchInputBrowseBtn.UseVisualStyleBackColor = true; - this.gr2BatchInputBrowseBtn.Click += new System.EventHandler(this.GR2BatchInputBrowseBtn_Click); + gr2BatchInputBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + gr2BatchInputBrowseBtn.Location = new System.Drawing.Point(1096, 26); + gr2BatchInputBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchInputBrowseBtn.Name = "gr2BatchInputBrowseBtn"; + gr2BatchInputBrowseBtn.Size = new System.Drawing.Size(55, 35); + gr2BatchInputBrowseBtn.TabIndex = 37; + gr2BatchInputBrowseBtn.Text = "..."; + gr2BatchInputBrowseBtn.UseVisualStyleBackColor = true; + gr2BatchInputBrowseBtn.Click += GR2BatchInputBrowseBtn_Click; // // gr2BatchOutputBrowseBtn // - this.gr2BatchOutputBrowseBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.gr2BatchOutputBrowseBtn.Location = new System.Drawing.Point(822, 60); - this.gr2BatchOutputBrowseBtn.Name = "gr2BatchOutputBrowseBtn"; - this.gr2BatchOutputBrowseBtn.Size = new System.Drawing.Size(41, 23); - this.gr2BatchOutputBrowseBtn.TabIndex = 40; - this.gr2BatchOutputBrowseBtn.Text = "..."; - this.gr2BatchOutputBrowseBtn.UseVisualStyleBackColor = true; - this.gr2BatchOutputBrowseBtn.Click += new System.EventHandler(this.GR2BatchOutputBrowseBtn_Click); + gr2BatchOutputBrowseBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + gr2BatchOutputBrowseBtn.Location = new System.Drawing.Point(1096, 92); + gr2BatchOutputBrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchOutputBrowseBtn.Name = "gr2BatchOutputBrowseBtn"; + gr2BatchOutputBrowseBtn.Size = new System.Drawing.Size(55, 35); + gr2BatchOutputBrowseBtn.TabIndex = 40; + gr2BatchOutputBrowseBtn.Text = "..."; + gr2BatchOutputBrowseBtn.UseVisualStyleBackColor = true; + gr2BatchOutputBrowseBtn.Click += GR2BatchOutputBrowseBtn_Click; // // gr2BatchProgressBar // - this.gr2BatchProgressBar.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.gr2BatchProgressBar.Location = new System.Drawing.Point(9, 104); - this.gr2BatchProgressBar.Name = "gr2BatchProgressBar"; - this.gr2BatchProgressBar.Size = new System.Drawing.Size(700, 23); - this.gr2BatchProgressBar.TabIndex = 47; + gr2BatchProgressBar.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + gr2BatchProgressBar.Location = new System.Drawing.Point(12, 160); + gr2BatchProgressBar.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchProgressBar.Name = "gr2BatchProgressBar"; + gr2BatchProgressBar.Size = new System.Drawing.Size(933, 35); + gr2BatchProgressBar.TabIndex = 47; // // label23 // - this.label23.AutoSize = true; - this.label23.Location = new System.Drawing.Point(6, 88); - this.label23.Name = "label23"; - this.label23.Size = new System.Drawing.Size(51, 13); - this.label23.TabIndex = 48; - this.label23.Text = "Progress:"; + label23.AutoSize = true; + label23.Location = new System.Drawing.Point(8, 135); + label23.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label23.Name = "label23"; + label23.Size = new System.Drawing.Size(68, 20); + label23.TabIndex = 48; + label23.Text = "Progress:"; // // gr2BatchInputFormat // - this.gr2BatchInputFormat.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; - this.gr2BatchInputFormat.FormattingEnabled = true; - this.gr2BatchInputFormat.Items.AddRange(new object[] { - "GR2", - "DAE"}); - this.gr2BatchInputFormat.Location = new System.Drawing.Point(9, 19); - this.gr2BatchInputFormat.Name = "gr2BatchInputFormat"; - this.gr2BatchInputFormat.Size = new System.Drawing.Size(67, 21); - this.gr2BatchInputFormat.TabIndex = 46; + gr2BatchInputFormat.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; + gr2BatchInputFormat.FormattingEnabled = true; + gr2BatchInputFormat.Items.AddRange(new object[] { "GR2", "DAE", "GLTF", "GLB" }); + gr2BatchInputFormat.Location = new System.Drawing.Point(12, 29); + gr2BatchInputFormat.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchInputFormat.Name = "gr2BatchInputFormat"; + gr2BatchInputFormat.Size = new System.Drawing.Size(88, 28); + gr2BatchInputFormat.TabIndex = 46; // // label22 // - this.label22.AutoSize = true; - this.label22.Location = new System.Drawing.Point(6, 3); - this.label22.Name = "label22"; - this.label22.Size = new System.Drawing.Size(66, 13); - this.label22.TabIndex = 45; - this.label22.Text = "Input format:"; + label22.AutoSize = true; + label22.Location = new System.Drawing.Point(8, 5); + label22.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label22.Name = "label22"; + label22.Size = new System.Drawing.Size(95, 20); + label22.TabIndex = 45; + label22.Text = "Input format:"; // // gr2BatchOutputFormat // - this.gr2BatchOutputFormat.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; - this.gr2BatchOutputFormat.FormattingEnabled = true; - this.gr2BatchOutputFormat.Items.AddRange(new object[] { - "GR2", - "DAE"}); - this.gr2BatchOutputFormat.Location = new System.Drawing.Point(9, 62); - this.gr2BatchOutputFormat.Name = "gr2BatchOutputFormat"; - this.gr2BatchOutputFormat.Size = new System.Drawing.Size(67, 21); - this.gr2BatchOutputFormat.TabIndex = 44; + gr2BatchOutputFormat.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; + gr2BatchOutputFormat.FormattingEnabled = true; + gr2BatchOutputFormat.Items.AddRange(new object[] { "GR2", "DAE", "GLTF", "GLB" }); + gr2BatchOutputFormat.Location = new System.Drawing.Point(12, 95); + gr2BatchOutputFormat.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchOutputFormat.Name = "gr2BatchOutputFormat"; + gr2BatchOutputFormat.Size = new System.Drawing.Size(88, 28); + gr2BatchOutputFormat.TabIndex = 44; // // label21 // - this.label21.AutoSize = true; - this.label21.Location = new System.Drawing.Point(6, 46); - this.label21.Name = "label21"; - this.label21.Size = new System.Drawing.Size(74, 13); - this.label21.TabIndex = 43; - this.label21.Text = "Output format:"; + label21.AutoSize = true; + label21.Location = new System.Drawing.Point(8, 71); + label21.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label21.Name = "label21"; + label21.Size = new System.Drawing.Size(107, 20); + label21.TabIndex = 43; + label21.Text = "Output format:"; // // label19 // - this.label19.AutoSize = true; - this.label19.Location = new System.Drawing.Point(79, 46); - this.label19.Name = "label19"; - this.label19.Size = new System.Drawing.Size(85, 13); - this.label19.TabIndex = 39; - this.label19.Text = "Output directory:"; + label19.AutoSize = true; + label19.Location = new System.Drawing.Point(105, 71); + label19.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label19.Name = "label19"; + label19.Size = new System.Drawing.Size(121, 20); + label19.TabIndex = 39; + label19.Text = "Output directory:"; // // gr2BatchConvertBtn // - this.gr2BatchConvertBtn.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); - this.gr2BatchConvertBtn.Location = new System.Drawing.Point(723, 104); - this.gr2BatchConvertBtn.Name = "gr2BatchConvertBtn"; - this.gr2BatchConvertBtn.Size = new System.Drawing.Size(141, 23); - this.gr2BatchConvertBtn.TabIndex = 42; - this.gr2BatchConvertBtn.Text = "Convert"; - this.gr2BatchConvertBtn.UseVisualStyleBackColor = true; - this.gr2BatchConvertBtn.Click += new System.EventHandler(this.GR2BatchConvertBtn_Click); + gr2BatchConvertBtn.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right; + gr2BatchConvertBtn.Location = new System.Drawing.Point(964, 160); + gr2BatchConvertBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchConvertBtn.Name = "gr2BatchConvertBtn"; + gr2BatchConvertBtn.Size = new System.Drawing.Size(188, 35); + gr2BatchConvertBtn.TabIndex = 42; + gr2BatchConvertBtn.Text = "Convert"; + gr2BatchConvertBtn.UseVisualStyleBackColor = true; + gr2BatchConvertBtn.Click += GR2BatchConvertBtn_Click; // // gr2BatchInputDir // - this.gr2BatchInputDir.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.gr2BatchInputDir.Location = new System.Drawing.Point(82, 19); - this.gr2BatchInputDir.Name = "gr2BatchInputDir"; - this.gr2BatchInputDir.Size = new System.Drawing.Size(742, 20); - this.gr2BatchInputDir.TabIndex = 35; + gr2BatchInputDir.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + gr2BatchInputDir.Location = new System.Drawing.Point(109, 29); + gr2BatchInputDir.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchInputDir.Name = "gr2BatchInputDir"; + gr2BatchInputDir.Size = new System.Drawing.Size(988, 27); + gr2BatchInputDir.TabIndex = 35; // // label20 // - this.label20.AutoSize = true; - this.label20.Location = new System.Drawing.Point(79, 3); - this.label20.Name = "label20"; - this.label20.Size = new System.Drawing.Size(77, 13); - this.label20.TabIndex = 36; - this.label20.Text = "Input directory:"; + label20.AutoSize = true; + label20.Location = new System.Drawing.Point(105, 5); + label20.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label20.Name = "label20"; + label20.Size = new System.Drawing.Size(109, 20); + label20.TabIndex = 36; + label20.Text = "Input directory:"; // // gr2BatchOutputDir // - this.gr2BatchOutputDir.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.gr2BatchOutputDir.Location = new System.Drawing.Point(82, 62); - this.gr2BatchOutputDir.Name = "gr2BatchOutputDir"; - this.gr2BatchOutputDir.Size = new System.Drawing.Size(742, 20); - this.gr2BatchOutputDir.TabIndex = 38; + gr2BatchOutputDir.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + gr2BatchOutputDir.Location = new System.Drawing.Point(109, 95); + gr2BatchOutputDir.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + gr2BatchOutputDir.Name = "gr2BatchOutputDir"; + gr2BatchOutputDir.Size = new System.Drawing.Size(988, 27); + gr2BatchOutputDir.TabIndex = 38; // // groupBox2 // - this.groupBox2.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) - | System.Windows.Forms.AnchorStyles.Left))); - this.groupBox2.Controls.Add(this.flipMeshes); - this.groupBox2.Controls.Add(this.flipSkeletons); - this.groupBox2.Controls.Add(this.flipUVs); - this.groupBox2.Controls.Add(this.label2); - this.groupBox2.Controls.Add(this.exportableObjects); - this.groupBox2.Controls.Add(this.filterUVs); - this.groupBox2.Location = new System.Drawing.Point(7, 174); - this.groupBox2.Name = "groupBox2"; - this.groupBox2.Size = new System.Drawing.Size(395, 448); - this.groupBox2.TabIndex = 37; - this.groupBox2.TabStop = false; - this.groupBox2.Text = "Export Options"; + groupBox2.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left; + groupBox2.Controls.Add(flipMeshes); + groupBox2.Controls.Add(flipSkeletons); + groupBox2.Controls.Add(flipUVs); + groupBox2.Controls.Add(label2); + groupBox2.Controls.Add(exportableObjects); + groupBox2.Controls.Add(filterUVs); + groupBox2.Location = new System.Drawing.Point(9, 268); + groupBox2.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox2.Name = "groupBox2"; + groupBox2.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox2.Size = new System.Drawing.Size(527, 689); + groupBox2.TabIndex = 37; + groupBox2.TabStop = false; + groupBox2.Text = "Export Options"; // // flipMeshes // - this.flipMeshes.AutoSize = true; - this.flipMeshes.Checked = true; - this.flipMeshes.CheckState = System.Windows.Forms.CheckState.Checked; - this.flipMeshes.Location = new System.Drawing.Point(189, 45); - this.flipMeshes.Name = "flipMeshes"; - this.flipMeshes.Size = new System.Drawing.Size(132, 17); - this.flipMeshes.TabIndex = 26; - this.flipMeshes.Text = "X-flip meshes (D:OS 2)"; - this.flipMeshes.UseVisualStyleBackColor = true; + flipMeshes.AutoSize = true; + flipMeshes.Checked = true; + flipMeshes.CheckState = System.Windows.Forms.CheckState.Checked; + flipMeshes.Location = new System.Drawing.Point(252, 69); + flipMeshes.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + flipMeshes.Name = "flipMeshes"; + flipMeshes.Size = new System.Drawing.Size(180, 24); + flipMeshes.TabIndex = 26; + flipMeshes.Text = "X-flip meshes (D:OS 2)"; + flipMeshes.UseVisualStyleBackColor = true; // // flipSkeletons // - this.flipSkeletons.AutoSize = true; - this.flipSkeletons.Location = new System.Drawing.Point(189, 22); - this.flipSkeletons.Name = "flipSkeletons"; - this.flipSkeletons.Size = new System.Drawing.Size(141, 17); - this.flipSkeletons.TabIndex = 25; - this.flipSkeletons.Text = "X-flip skeletons (D:OS 2)"; - this.flipSkeletons.UseVisualStyleBackColor = true; + flipSkeletons.AutoSize = true; + flipSkeletons.Location = new System.Drawing.Point(252, 34); + flipSkeletons.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + flipSkeletons.Name = "flipSkeletons"; + flipSkeletons.Size = new System.Drawing.Size(192, 24); + flipSkeletons.TabIndex = 25; + flipSkeletons.Text = "X-flip skeletons (D:OS 2)"; + flipSkeletons.UseVisualStyleBackColor = true; // // flipUVs // - this.flipUVs.AutoSize = true; - this.flipUVs.Checked = true; - this.flipUVs.CheckState = System.Windows.Forms.CheckState.Checked; - this.flipUVs.Location = new System.Drawing.Point(11, 22); - this.flipUVs.Name = "flipUVs"; - this.flipUVs.Size = new System.Drawing.Size(65, 17); - this.flipUVs.TabIndex = 23; - this.flipUVs.Text = "Flip UVs"; - this.flipUVs.UseVisualStyleBackColor = true; + flipUVs.AutoSize = true; + flipUVs.Checked = true; + flipUVs.CheckState = System.Windows.Forms.CheckState.Checked; + flipUVs.Location = new System.Drawing.Point(15, 34); + flipUVs.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + flipUVs.Name = "flipUVs"; + flipUVs.Size = new System.Drawing.Size(84, 24); + flipUVs.TabIndex = 23; + flipUVs.Text = "Flip UVs"; + flipUVs.UseVisualStyleBackColor = true; // // label2 // - this.label2.AutoSize = true; - this.label2.Location = new System.Drawing.Point(10, 73); - this.label2.Name = "label2"; - this.label2.Size = new System.Drawing.Size(141, 13); - this.label2.TabIndex = 22; - this.label2.Text = "Select subobjects for export:"; + label2.AutoSize = true; + label2.Location = new System.Drawing.Point(13, 112); + label2.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label2.Name = "label2"; + label2.Size = new System.Drawing.Size(197, 20); + label2.TabIndex = 22; + label2.Text = "Select subobjects for export:"; // // exportableObjects // - this.exportableObjects.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) - | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.exportableObjects.CheckBoxes = true; - this.exportableObjects.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { - this.exportableName, - this.exportableType}); - this.exportableObjects.Enabled = false; - this.exportableObjects.FullRowSelect = true; - this.exportableObjects.Location = new System.Drawing.Point(11, 91); - this.exportableObjects.Name = "exportableObjects"; - this.exportableObjects.Size = new System.Drawing.Size(373, 340); - this.exportableObjects.TabIndex = 21; - this.exportableObjects.UseCompatibleStateImageBehavior = false; - this.exportableObjects.View = System.Windows.Forms.View.Details; + exportableObjects.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + exportableObjects.CheckBoxes = true; + exportableObjects.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] { exportableName, exportableType }); + exportableObjects.Enabled = false; + exportableObjects.FullRowSelect = true; + exportableObjects.Location = new System.Drawing.Point(15, 140); + exportableObjects.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + exportableObjects.Name = "exportableObjects"; + exportableObjects.Size = new System.Drawing.Size(496, 521); + exportableObjects.TabIndex = 21; + exportableObjects.UseCompatibleStateImageBehavior = false; + exportableObjects.View = System.Windows.Forms.View.Details; // // exportableName // - this.exportableName.Text = "Name"; - this.exportableName.Width = 230; + exportableName.Text = "Name"; + exportableName.Width = 230; // // exportableType // - this.exportableType.Text = "Type"; - this.exportableType.Width = 130; + exportableType.Text = "Type"; + exportableType.Width = 130; // // filterUVs // - this.filterUVs.AutoSize = true; - this.filterUVs.Location = new System.Drawing.Point(11, 45); - this.filterUVs.Name = "filterUVs"; - this.filterUVs.Size = new System.Drawing.Size(71, 17); - this.filterUVs.TabIndex = 16; - this.filterUVs.Text = "Filter UVs"; - this.filterUVs.UseVisualStyleBackColor = true; + filterUVs.AutoSize = true; + filterUVs.Location = new System.Drawing.Point(15, 69); + filterUVs.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + filterUVs.Name = "filterUVs"; + filterUVs.Size = new System.Drawing.Size(93, 24); + filterUVs.TabIndex = 16; + filterUVs.Text = "Filter UVs"; + filterUVs.UseVisualStyleBackColor = true; // // groupBox1 // - this.groupBox1.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) - | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.groupBox1.Controls.Add(this.conformCopySkeletons); - this.groupBox1.Controls.Add(this.meshProxy); - this.groupBox1.Controls.Add(this.meshCloth); - this.groupBox1.Controls.Add(this.meshRigid); - this.groupBox1.Controls.Add(this.applyBasisTransforms); - this.groupBox1.Controls.Add(this.conformantGR2BrowseBtn); - this.groupBox1.Controls.Add(this.conformantGR2Path); - this.groupBox1.Controls.Add(this.conformToOriginal); - this.groupBox1.Controls.Add(this.buildDummySkeleton); - this.groupBox1.Controls.Add(this.resourceFormats); - this.groupBox1.Controls.Add(this.label1); - this.groupBox1.Location = new System.Drawing.Point(418, 174); - this.groupBox1.Name = "groupBox1"; - this.groupBox1.Size = new System.Drawing.Size(476, 448); - this.groupBox1.TabIndex = 36; - this.groupBox1.TabStop = false; - this.groupBox1.Text = "GR2 Export Options"; + groupBox1.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + groupBox1.Controls.Add(conformCopySkeletons); + groupBox1.Controls.Add(meshProxy); + groupBox1.Controls.Add(meshCloth); + groupBox1.Controls.Add(meshRigid); + groupBox1.Controls.Add(applyBasisTransforms); + groupBox1.Controls.Add(conformantGR2BrowseBtn); + groupBox1.Controls.Add(conformantGR2Path); + groupBox1.Controls.Add(conformToOriginal); + groupBox1.Controls.Add(buildDummySkeleton); + groupBox1.Controls.Add(resourceFormats); + groupBox1.Controls.Add(label1); + groupBox1.Location = new System.Drawing.Point(557, 268); + groupBox1.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox1.Name = "groupBox1"; + groupBox1.Padding = new System.Windows.Forms.Padding(4, 5, 4, 5); + groupBox1.Size = new System.Drawing.Size(635, 689); + groupBox1.TabIndex = 36; + groupBox1.TabStop = false; + groupBox1.Text = "GR2 Export Options"; // // conformCopySkeletons // - this.conformCopySkeletons.AutoSize = true; - this.conformCopySkeletons.Enabled = false; - this.conformCopySkeletons.Location = new System.Drawing.Point(243, 91); - this.conformCopySkeletons.Name = "conformCopySkeletons"; - this.conformCopySkeletons.Size = new System.Drawing.Size(95, 17); - this.conformCopySkeletons.TabIndex = 29; - this.conformCopySkeletons.Text = "Copy Skeleton"; - this.conformCopySkeletons.UseVisualStyleBackColor = true; + conformCopySkeletons.AutoSize = true; + conformCopySkeletons.Enabled = false; + conformCopySkeletons.Location = new System.Drawing.Point(324, 140); + conformCopySkeletons.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + conformCopySkeletons.Name = "conformCopySkeletons"; + conformCopySkeletons.Size = new System.Drawing.Size(126, 24); + conformCopySkeletons.TabIndex = 29; + conformCopySkeletons.Text = "Copy Skeleton"; + conformCopySkeletons.UseVisualStyleBackColor = true; // // meshProxy // - this.meshProxy.AutoSize = true; - this.meshProxy.Location = new System.Drawing.Point(243, 68); - this.meshProxy.Name = "meshProxy"; - this.meshProxy.Size = new System.Drawing.Size(125, 17); - this.meshProxy.TabIndex = 29; - this.meshProxy.Text = "(D:OS 2) Mesh Proxy"; - this.meshProxy.UseVisualStyleBackColor = true; + meshProxy.AutoSize = true; + meshProxy.Location = new System.Drawing.Point(324, 105); + meshProxy.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + meshProxy.Name = "meshProxy"; + meshProxy.Size = new System.Drawing.Size(165, 24); + meshProxy.TabIndex = 29; + meshProxy.Text = "(D:OS 2) Mesh Proxy"; + meshProxy.UseVisualStyleBackColor = true; // // meshCloth // - this.meshCloth.AutoSize = true; - this.meshCloth.Location = new System.Drawing.Point(243, 45); - this.meshCloth.Name = "meshCloth"; - this.meshCloth.Size = new System.Drawing.Size(94, 17); - this.meshCloth.TabIndex = 28; - this.meshCloth.Text = "(D:OS 2) Cloth"; - this.meshCloth.UseVisualStyleBackColor = true; + meshCloth.AutoSize = true; + meshCloth.Location = new System.Drawing.Point(324, 69); + meshCloth.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + meshCloth.Name = "meshCloth"; + meshCloth.Size = new System.Drawing.Size(125, 24); + meshCloth.TabIndex = 28; + meshCloth.Text = "(D:OS 2) Cloth"; + meshCloth.UseVisualStyleBackColor = true; // // meshRigid // - this.meshRigid.AutoSize = true; - this.meshRigid.Location = new System.Drawing.Point(243, 22); - this.meshRigid.Name = "meshRigid"; - this.meshRigid.Size = new System.Drawing.Size(94, 17); - this.meshRigid.TabIndex = 27; - this.meshRigid.Text = "(D:OS 2) Rigid"; - this.meshRigid.UseVisualStyleBackColor = true; + meshRigid.AutoSize = true; + meshRigid.Location = new System.Drawing.Point(324, 34); + meshRigid.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + meshRigid.Name = "meshRigid"; + meshRigid.Size = new System.Drawing.Size(125, 24); + meshRigid.TabIndex = 27; + meshRigid.Text = "(D:OS 2) Rigid"; + meshRigid.UseVisualStyleBackColor = true; // // applyBasisTransforms // - this.applyBasisTransforms.AutoSize = true; - this.applyBasisTransforms.Checked = true; - this.applyBasisTransforms.CheckState = System.Windows.Forms.CheckState.Checked; - this.applyBasisTransforms.Location = new System.Drawing.Point(16, 22); - this.applyBasisTransforms.Name = "applyBasisTransforms"; - this.applyBasisTransforms.Size = new System.Drawing.Size(100, 17); - this.applyBasisTransforms.TabIndex = 26; - this.applyBasisTransforms.Text = "Convert to Y-up"; - this.applyBasisTransforms.UseVisualStyleBackColor = true; + applyBasisTransforms.AutoSize = true; + applyBasisTransforms.Checked = true; + applyBasisTransforms.CheckState = System.Windows.Forms.CheckState.Checked; + applyBasisTransforms.Location = new System.Drawing.Point(21, 34); + applyBasisTransforms.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + applyBasisTransforms.Name = "applyBasisTransforms"; + applyBasisTransforms.Size = new System.Drawing.Size(135, 24); + applyBasisTransforms.TabIndex = 26; + applyBasisTransforms.Text = "Convert to Y-up"; + applyBasisTransforms.UseVisualStyleBackColor = true; // // conformantGR2BrowseBtn // - this.conformantGR2BrowseBtn.Enabled = false; - this.conformantGR2BrowseBtn.Location = new System.Drawing.Point(419, 109); - this.conformantGR2BrowseBtn.Name = "conformantGR2BrowseBtn"; - this.conformantGR2BrowseBtn.Size = new System.Drawing.Size(41, 23); - this.conformantGR2BrowseBtn.TabIndex = 25; - this.conformantGR2BrowseBtn.Text = "..."; - this.conformantGR2BrowseBtn.UseVisualStyleBackColor = true; - this.conformantGR2BrowseBtn.Click += new System.EventHandler(this.conformantSkeletonBrowseBtn_Click); + conformantGR2BrowseBtn.Enabled = false; + conformantGR2BrowseBtn.Location = new System.Drawing.Point(559, 168); + conformantGR2BrowseBtn.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + conformantGR2BrowseBtn.Name = "conformantGR2BrowseBtn"; + conformantGR2BrowseBtn.Size = new System.Drawing.Size(55, 35); + conformantGR2BrowseBtn.TabIndex = 25; + conformantGR2BrowseBtn.Text = "..."; + conformantGR2BrowseBtn.UseVisualStyleBackColor = true; + conformantGR2BrowseBtn.Click += conformantSkeletonBrowseBtn_Click; // // conformantGR2Path // - this.conformantGR2Path.Enabled = false; - this.conformantGR2Path.Location = new System.Drawing.Point(15, 111); - this.conformantGR2Path.Name = "conformantGR2Path"; - this.conformantGR2Path.Size = new System.Drawing.Size(405, 20); - this.conformantGR2Path.TabIndex = 24; + conformantGR2Path.Enabled = false; + conformantGR2Path.Location = new System.Drawing.Point(20, 171); + conformantGR2Path.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + conformantGR2Path.Name = "conformantGR2Path"; + conformantGR2Path.Size = new System.Drawing.Size(539, 27); + conformantGR2Path.TabIndex = 24; // // conformToOriginal // - this.conformToOriginal.AutoSize = true; - this.conformToOriginal.Enabled = false; - this.conformToOriginal.Location = new System.Drawing.Point(15, 91); - this.conformToOriginal.Name = "conformToOriginal"; - this.conformToOriginal.Size = new System.Drawing.Size(141, 17); - this.conformToOriginal.TabIndex = 23; - this.conformToOriginal.Text = "Conform to original GR2:"; - this.conformToOriginal.UseVisualStyleBackColor = true; - this.conformToOriginal.Click += new System.EventHandler(this.conformToSkeleton_CheckedChanged); + conformToOriginal.AutoSize = true; + conformToOriginal.Enabled = false; + conformToOriginal.Location = new System.Drawing.Point(20, 140); + conformToOriginal.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + conformToOriginal.Name = "conformToOriginal"; + conformToOriginal.Size = new System.Drawing.Size(196, 24); + conformToOriginal.TabIndex = 23; + conformToOriginal.Text = "Conform to original GR2:"; + conformToOriginal.UseVisualStyleBackColor = true; + conformToOriginal.Click += conformToSkeleton_CheckedChanged; // // buildDummySkeleton // - this.buildDummySkeleton.AutoSize = true; - this.buildDummySkeleton.Checked = true; - this.buildDummySkeleton.CheckState = System.Windows.Forms.CheckState.Checked; - this.buildDummySkeleton.Location = new System.Drawing.Point(15, 68); - this.buildDummySkeleton.Name = "buildDummySkeleton"; - this.buildDummySkeleton.Size = new System.Drawing.Size(136, 17); - this.buildDummySkeleton.TabIndex = 22; - this.buildDummySkeleton.Text = "Create dummy skeleton"; - this.buildDummySkeleton.UseVisualStyleBackColor = true; + buildDummySkeleton.AutoSize = true; + buildDummySkeleton.Checked = true; + buildDummySkeleton.CheckState = System.Windows.Forms.CheckState.Checked; + buildDummySkeleton.Location = new System.Drawing.Point(20, 105); + buildDummySkeleton.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + buildDummySkeleton.Name = "buildDummySkeleton"; + buildDummySkeleton.Size = new System.Drawing.Size(187, 24); + buildDummySkeleton.TabIndex = 22; + buildDummySkeleton.Text = "Create dummy skeleton"; + buildDummySkeleton.UseVisualStyleBackColor = true; + // + // resourceFormats + // + resourceFormats.Anchor = System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left | System.Windows.Forms.AnchorStyles.Right; + resourceFormats.FullRowSelect = true; + resourceFormats.Location = new System.Drawing.Point(20, 246); + resourceFormats.Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + resourceFormats.Name = "resourceFormats"; + resourceFormats.Size = new System.Drawing.Size(592, 415); + resourceFormats.TabIndex = 16; + resourceFormats.UseCompatibleStateImageBehavior = false; + resourceFormats.View = System.Windows.Forms.View.Details; // // label1 // - this.label1.AutoSize = true; - this.label1.Location = new System.Drawing.Point(13, 139); - this.label1.Name = "label1"; - this.label1.Size = new System.Drawing.Size(139, 13); - this.label1.TabIndex = 15; - this.label1.Text = "Customize resource formats:"; + label1.AutoSize = true; + label1.Location = new System.Drawing.Point(17, 214); + label1.Margin = new System.Windows.Forms.Padding(4, 0, 4, 0); + label1.Name = "label1"; + label1.Size = new System.Drawing.Size(196, 20); + label1.TabIndex = 15; + label1.Text = "Customize resource formats:"; // // conformSkeletonFileDlg // - this.conformSkeletonFileDlg.Filter = "Granny GR2|*.gr2;*.lsm"; - this.conformSkeletonFileDlg.Title = "Select Conforming Skeleton File"; + conformSkeletonFileDlg.Filter = "Granny GR2|*.gr2;*.lsm"; + conformSkeletonFileDlg.Title = "Select Conforming Skeleton File"; // // outputFileDlg // - this.outputFileDlg.Filter = "COLLADA/GR2 files|*.dae;*.gr2;*.lsm"; - this.outputFileDlg.Title = "Select Output File"; + outputFileDlg.Filter = "glTF/COLLADA/GR2 files|*.dae;*.gr2;*.lsm;*.gltf;*.glb"; + outputFileDlg.Title = "Select Output File"; // // inputFileDlg // - this.inputFileDlg.Filter = "COLLADA/GR2 files|*.dae;*.gr2;*.lsm"; - this.inputFileDlg.Title = "Select Input File"; - // - // resourceFormats - // - this.resourceFormats.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) - | System.Windows.Forms.AnchorStyles.Left) - | System.Windows.Forms.AnchorStyles.Right))); - this.resourceFormats.FullRowSelect = true; - this.resourceFormats.Location = new System.Drawing.Point(15, 160); - this.resourceFormats.Name = "resourceFormats"; - this.resourceFormats.Size = new System.Drawing.Size(445, 271); - this.resourceFormats.TabIndex = 16; - this.resourceFormats.UseCompatibleStateImageBehavior = false; - this.resourceFormats.View = System.Windows.Forms.View.Details; + inputFileDlg.Filter = "glTF/COLLADA/GR2 files|*.dae;*.gr2;*.lsm;*.gltf;*.glb"; + inputFileDlg.Title = "Select Input File"; // // GR2Pane // - this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); - this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; - this.Controls.Add(this.gr2ModeTabControl); - this.Controls.Add(this.groupBox2); - this.Controls.Add(this.groupBox1); - this.Name = "GR2Pane"; - this.Size = new System.Drawing.Size(901, 630); - this.gr2ModeTabControl.ResumeLayout(false); - this.gr2SingleFileTab.ResumeLayout(false); - this.gr2SingleFileTab.PerformLayout(); - this.gr2BatchTab.ResumeLayout(false); - this.gr2BatchTab.PerformLayout(); - this.groupBox2.ResumeLayout(false); - this.groupBox2.PerformLayout(); - this.groupBox1.ResumeLayout(false); - this.groupBox1.PerformLayout(); - this.ResumeLayout(false); - + AutoScaleDimensions = new System.Drawing.SizeF(8F, 20F); + AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; + Controls.Add(gr2ModeTabControl); + Controls.Add(groupBox2); + Controls.Add(groupBox1); + Margin = new System.Windows.Forms.Padding(4, 5, 4, 5); + Name = "GR2Pane"; + Size = new System.Drawing.Size(1201, 969); + gr2ModeTabControl.ResumeLayout(false); + gr2SingleFileTab.ResumeLayout(false); + gr2SingleFileTab.PerformLayout(); + gr2BatchTab.ResumeLayout(false); + gr2BatchTab.PerformLayout(); + groupBox2.ResumeLayout(false); + groupBox2.PerformLayout(); + groupBox1.ResumeLayout(false); + groupBox1.PerformLayout(); + ResumeLayout(false); } #endregion diff --git a/ConverterApp/GR2Pane.cs b/ConverterApp/GR2Pane.cs index d8ef555a..9e8f0e6b 100644 --- a/ConverterApp/GR2Pane.cs +++ b/ConverterApp/GR2Pane.cs @@ -227,14 +227,10 @@ private void UpdateExporterSettings(ExporterOptions settings) UpdateCommonExporterSettings(settings); settings.InputPath = inputPath.Text; - var inputExtension = Path.GetExtension(settings.InputPath)?.ToLower(); - bool inputIsGr2 = inputExtension == ".gr2" || inputExtension == ".lsm"; - settings.InputFormat = inputIsGr2 ? ExportFormat.GR2 : ExportFormat.DAE; + settings.InputFormat = GR2Utils.PathExtensionToModelFormat(settings.InputPath); settings.OutputPath = outputPath.Text; - var outputExtension = Path.GetExtension(settings.OutputPath)?.ToLower(); - bool outputIsGr2 = outputExtension == ".gr2" || outputExtension == ".lsm"; - settings.OutputFormat = outputIsGr2 ? ExportFormat.GR2 : ExportFormat.DAE; + settings.OutputFormat = GR2Utils.PathExtensionToModelFormat(settings.OutputPath); foreach (ListViewItem item in exportableObjects.Items) { @@ -315,11 +311,14 @@ private void inputFileBrowseBtn_Click(object sender, EventArgs e) private void loadInputBtn_Click(object sender, EventArgs e) { +#if !DEBUG string nl = Environment.NewLine; try { - LoadFile(inputPath.Text); +#endif + LoadFile(inputPath.Text); +#if !DEBUG } catch (ParsingException exc) { @@ -329,6 +328,7 @@ private void loadInputBtn_Click(object sender, EventArgs e) { MessageBox.Show($"Internal error!{nl}{nl}{exc}", "Import Failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } +#endif } private void conformToSkeleton_CheckedChanged(object sender, EventArgs e) @@ -420,14 +420,26 @@ private static void GR2ConversionError(string inputPath, string outputPath, Exce } } + private ExportFormat IndexToExportFormat(int index) + { + return index switch + { + 0 => ExportFormat.GR2, + 1 => ExportFormat.DAE, + 2 => ExportFormat.GLTF, + 3 => ExportFormat.GLB, + _ => throw new InvalidDataException() + }; + } + private void GR2BatchConvertBtn_Click(object sender, EventArgs e) { gr2BatchConvertBtn.Enabled = false; var exporter = new Exporter(); UpdateCommonExporterSettings(exporter.Options); - exporter.Options.InputFormat = gr2BatchInputFormat.SelectedIndex == 0 ? ExportFormat.GR2 : ExportFormat.DAE; - exporter.Options.OutputFormat = gr2BatchOutputFormat.SelectedIndex == 0 ? ExportFormat.GR2 : ExportFormat.DAE; + exporter.Options.InputFormat = IndexToExportFormat(gr2BatchInputFormat.SelectedIndex); + exporter.Options.OutputFormat = IndexToExportFormat(gr2BatchOutputFormat.SelectedIndex); var batchConverter = new GR2Utils { diff --git a/ConverterApp/GR2Pane.resx b/ConverterApp/GR2Pane.resx index 539aaed4..bd7d55cc 100644 --- a/ConverterApp/GR2Pane.resx +++ b/ConverterApp/GR2Pane.resx @@ -1,17 +1,17 @@  - diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index cf1ee673..aa38f4ed 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -62,7 +62,7 @@ public class CommandLineArguments [EnumeratedValueArgument(typeof(string), 'i', "input-format", Description = "Set input format for batch operations", DefaultValue = null, - AllowedValues = "dae;gr2;lsv;pak;lsj;lsx;lsb;lsf", + AllowedValues = "dae;glb;gltf;gr2;lsv;pak;lsj;lsx;lsb;lsf", ValueOptional = false, Optional = true )] @@ -72,7 +72,7 @@ public class CommandLineArguments [EnumeratedValueArgument(typeof(string), 'o', "output-format", Description = "Set output format for batch operations", DefaultValue = null, - AllowedValues = "dae;gr2;lsv;pak;lsj;lsx;lsb;lsf", + AllowedValues = "dae;glb;gltf;gr2;lsv;pak;lsj;lsx;lsb;lsf", ValueOptional = false, Optional = true )] @@ -236,36 +236,6 @@ public static Game GetGameByString(string game) } } - public static ExportFormat GetModelFormatByString(string format) - { - switch (format.ToLower()) - { - case "gr2": - { - return ExportFormat.GR2; - } - case "dae": - { - return ExportFormat.DAE; - } - default: - { - throw new ArgumentException($"Unknown model format: {format}"); - } - } - } - - public static ExportFormat GetModelFormatByPath(string path) - { - string extension = Path.GetExtension(path); - if (extension != null) - { - return GetModelFormatByString(extension.Substring(1)); - } - - throw new ArgumentException($"Could not determine model format from filename: {path}"); - } - // ReSharper disable once RedundantCaseLabel public static ResourceFormat GetResourceFormatByString(string resourceFormat) { diff --git a/Divine/CLI/CommandLineGR2Processor.cs b/Divine/CLI/CommandLineGR2Processor.cs index f1f7d3d9..ea1342b9 100644 --- a/Divine/CLI/CommandLineGR2Processor.cs +++ b/Divine/CLI/CommandLineGR2Processor.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.IO; +using LSLib.Granny; using LSLib.Granny.GR2; using LSLib.Granny.Model; using LSLib.LS.Enums; @@ -27,8 +28,8 @@ public static ExporterOptions UpdateExporterSettings() { InputPath = CommandLineActions.SourcePath, OutputPath = CommandLineActions.DestinationPath, - InputFormat = Program.argv.InputFormat != null ? CommandLineArguments.GetModelFormatByString(Program.argv.InputFormat) : CommandLineArguments.GetModelFormatByPath(CommandLineActions.SourcePath), - OutputFormat = Program.argv.OutputFormat != null ? CommandLineArguments.GetModelFormatByString(Program.argv.OutputFormat) : CommandLineArguments.GetModelFormatByPath(CommandLineActions.DestinationPath), + InputFormat = Program.argv.InputFormat != null ? GR2Utils.FileExtensionToModelFormat("." + Program.argv.InputFormat) : GR2Utils.PathExtensionToModelFormat(CommandLineActions.SourcePath), + OutputFormat = Program.argv.OutputFormat != null ? GR2Utils.FileExtensionToModelFormat("." + Program.argv.OutputFormat) : GR2Utils.PathExtensionToModelFormat(CommandLineActions.DestinationPath), ExportNormals = GR2Options["export-normals"], ExportTangents = GR2Options["export-tangents"], ExportUVs = GR2Options["export-uvs"], diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index 57e50df5..be94a0bf 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -2,6 +2,7 @@ using OpenTK.Mathematics; using System.Reflection; using System.IO.Hashing; +using SharpGLTF.Transforms; namespace LSLib.Granny.GR2; @@ -122,6 +123,15 @@ public static Transform FromMatrix4(Matrix4 mat) return transform; } + public static Transform FromGLTF(AffineTransform t) + { + var transform = new Transform(); + transform.SetTranslation(new Vector3(t.Translation.X, t.Translation.Y, t.Translation.Z)); + transform.SetRotation(new Quaternion(t.Rotation.X, t.Rotation.Y, t.Rotation.Z, t.Rotation.W)); + transform.SetScale(new Vector3(t.Scale.X, t.Scale.Y, t.Scale.Z)); + return transform; + } + public Matrix4 ToMatrix4Composite() { Matrix3 transform3 = Matrix3.CreateFromQuaternion(Rotation); diff --git a/LSLib/Granny/GR2Utils.cs b/LSLib/Granny/GR2Utils.cs index 532630e6..330ea0dd 100644 --- a/LSLib/Granny/GR2Utils.cs +++ b/LSLib/Granny/GR2Utils.cs @@ -13,23 +13,29 @@ public class GR2Utils public ConversionErrorDelegate ConversionError = delegate { }; public ProgressUpdateDelegate ProgressUpdate = delegate { }; - public static ExportFormat ExtensionToModelFormat(string path) + public static ExportFormat FileExtensionToModelFormat(string extension) { - string extension = Path.GetExtension(path)?.ToLower(); - - return extension switch + return extension.ToLower() switch { ".gr2" or ".lsm" => ExportFormat.GR2, ".dae" => ExportFormat.DAE, + ".gltf" => ExportFormat.GLTF, + ".glb" => ExportFormat.GLB, _ => throw new ArgumentException($"Unrecognized model file extension: {extension}"), }; } + public static ExportFormat PathExtensionToModelFormat(string path) + { + string extension = Path.GetExtension(path); + return FileExtensionToModelFormat(extension); + } + public static Root LoadModel(string inputPath) { var options = new ExporterOptions { - InputFormat = ExtensionToModelFormat(inputPath) + InputFormat = PathExtensionToModelFormat(inputPath) }; return LoadModel(inputPath, options); } @@ -57,6 +63,16 @@ public static Root LoadModel(string inputPath, ExporterOptions options) return importer.Import(inputPath); } + case ExportFormat.GLTF: + case ExportFormat.GLB: + { + var importer = new GLTFImporter + { + Options = options + }; + return importer.Import(inputPath); + } + default: throw new ArgumentException("Invalid model format"); } diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 08a7da18..7e423309 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -455,11 +455,11 @@ private void LoadColladaLSLibProfileData(COLLADA collada) } } - private Mesh ImportMesh(geometry geom, mesh mesh, VertexDescriptor vertexFormat) + private Mesh ImportMesh(geometry geom, mesh mesh) { var collada = new ColladaMesh(); bool isSkinned = SkinnedMeshes.Contains(geom.id); - collada.ImportFromCollada(mesh, vertexFormat, isSkinned, Options); + collada.ImportFromCollada(mesh, isSkinned, Options); var m = new Mesh { @@ -508,9 +508,9 @@ private Mesh ImportMesh(geometry geom, mesh mesh, VertexDescriptor vertexFormat) return m; } - private Mesh ImportMesh(Root root, string name, geometry geom, mesh mesh, VertexDescriptor vertexFormat) + private Mesh ImportMesh(Root root, string name, geometry geom, mesh mesh) { - var m = ImportMesh(geom, mesh, vertexFormat); + var m = ImportMesh(geom, mesh); m.Name = name; root.VertexDatas.Add(m.PrimaryVertexData); root.TriTopologies.Add(m.PrimaryTopology); @@ -790,71 +790,9 @@ private void ImportSkin(Root root, skin skin) if (Options.RecalculateOBBs) { - UpdateOBBs(root.Skeletons.Single(), mesh); + VertexHelpers.UpdateOBBs(root.Skeletons.Single(), mesh); } } - - class OBB - { - public Vector3 Min, Max; - public int NumVerts = 0; - } - - private void UpdateOBBs(Skeleton skeleton, Mesh mesh) - { - if (mesh.BoneBindings == null || mesh.BoneBindings.Count == 0) return; - - var obbs = new List(mesh.BoneBindings.Count); - for (var i = 0; i < mesh.BoneBindings.Count; i++) - { - obbs.Add(new OBB - { - Min = new Vector3(1000.0f, 1000.0f, 1000.0f), - Max = new Vector3(-1000.0f, -1000.0f, -1000.0f), - }); - } - - foreach (var vert in mesh.PrimaryVertexData.Vertices) - { - for (var i = 0; i < Vertex.MaxBoneInfluences; i++) - { - if (vert.BoneWeights[i] > 0) - { - var bi = vert.BoneIndices[i]; - var obb = obbs[bi]; - obb.NumVerts++; - - var bone = skeleton.GetBoneByName(mesh.BoneBindings[bi].BoneName); - var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); - var transformed = Vector3.TransformPosition(vert.Position, invWorldTransform); - - obb.Min.X = Math.Min(obb.Min.X, transformed.X); - obb.Min.Y = Math.Min(obb.Min.Y, transformed.Y); - obb.Min.Z = Math.Min(obb.Min.Z, transformed.Z); - - obb.Max.X = Math.Max(obb.Max.X, transformed.X); - obb.Max.Y = Math.Max(obb.Max.Y, transformed.Y); - obb.Max.Z = Math.Max(obb.Max.Z, transformed.Z); - } - } - } - - for (var i = 0; i < obbs.Count; i++) - { - var obb = obbs[i]; - if (obb.NumVerts > 0) - { - mesh.BoneBindings[i].OBBMin = [obb.Min.X, obb.Min.Y, obb.Min.Z]; - mesh.BoneBindings[i].OBBMax = [obb.Max.X, obb.Max.Y, obb.Max.Z]; - } - else - { - mesh.BoneBindings[i].OBBMin = [0.0f, 0.0f, 0.0f]; - mesh.BoneBindings[i].OBBMax = [0.0f, 0.0f, 0.0f]; - } - } - } - private void LoadColladaLSLibProfileData(animation anim, TrackGroup loaded) { var technique = FindExporterExtraData(anim.extra); @@ -954,21 +892,10 @@ public Root Import(string inputPath) LoadColladaLSLibProfileData(collada); - var root = new Root - { - ArtToolInfo = ImportArtToolInfo(collada), - ExporterInfo = Options.StripMetadata ? null : ImportExporterInfo(collada), - - FromFileName = inputPath, - - Skeletons = [], - VertexDatas = [], - TriTopologies = [], - Meshes = [], - Models = [], - TrackGroups = [], - Animations = [] - }; + var root = Root.CreateEmpty(); + root.ArtToolInfo = ImportArtToolInfo(collada); + root.ExporterInfo = Options.StripMetadata ? null : ImportExporterInfo(collada); + root.FromFileName = inputPath; ColladaGeometries = []; SkinnedMeshes = []; @@ -1063,9 +990,7 @@ public Root Import(string inputPath) foreach (var geometry in collGeometries) { - // Use the override vertex format, if one was specified - Options.VertexFormats.TryGetValue(geometry.name, out VertexDescriptor vertexFormat); - var mesh = ImportMesh(root, geometry.name, geometry, geometry.Item as mesh, vertexFormat); + var mesh = ImportMesh(root, geometry.name, geometry, geometry.Item as mesh); ColladaGeometries.Add(geometry.id, mesh); } diff --git a/LSLib/Granny/Model/ColladaMesh.cs b/LSLib/Granny/Model/ColladaMesh.cs index bf4873e7..493c92a4 100644 --- a/LSLib/Granny/Model/ColladaMesh.cs +++ b/LSLib/Granny/Model/ColladaMesh.cs @@ -71,112 +71,6 @@ public int GetHashCode(int[] obj) } } - void computeTangents() - { - // Check if the vertex format has at least one UV set - if (ConsolidatedVertices.Count > 0) - { - var v = ConsolidatedVertices[0]; - if (v.Format.TextureCoordinates == 0) - { - throw new InvalidOperationException("At least one UV set is required to recompute tangents"); - } - } - - foreach (var v in ConsolidatedVertices) - { - v.Tangent = Vector3.Zero; - v.Binormal = Vector3.Zero; - } - - for (int i = 0; i < TriangleCount; i++) - { - var i1 = ConsolidatedIndices[i * 3 + 0]; - var i2 = ConsolidatedIndices[i * 3 + 1]; - var i3 = ConsolidatedIndices[i * 3 + 2]; - - var vert1 = ConsolidatedVertices[i1]; - var vert2 = ConsolidatedVertices[i2]; - var vert3 = ConsolidatedVertices[i3]; - - var v1 = vert1.Position; - var v2 = vert2.Position; - var v3 = vert3.Position; - - var w1 = vert1.TextureCoordinates0; - var w2 = vert2.TextureCoordinates0; - var w3 = vert3.TextureCoordinates0; - - float x1 = v2.X - v1.X; - float x2 = v3.X - v1.X; - float y1 = v2.Y - v1.Y; - float y2 = v3.Y - v1.Y; - float z1 = v2.Z - v1.Z; - float z2 = v3.Z - v1.Z; - - float s1 = w2.X - w1.X; - float s2 = w3.X - w1.X; - float t1 = w2.Y - w1.Y; - float t2 = w3.Y - w1.Y; - - float r = 1.0F / (s1 * t2 - s2 * t1); - - if ((Single.IsNaN(r) || Single.IsInfinity(r)) && !Options.IgnoreUVNaN) - { - throw new Exception($"Couldn't calculate tangents; the mesh most likely contains non-manifold geometry.{Environment.NewLine}" - + $"UV1: {w1}{Environment.NewLine}UV2: {w2}{Environment.NewLine}UV3: {w3}"); - } - - var sdir = new Vector3( - (t2 * x1 - t1 * x2) * r, - (t2 * y1 - t1 * y2) * r, - (t2 * z1 - t1 * z2) * r - ); - var tdir = new Vector3( - (s1 * x2 - s2 * x1) * r, - (s1 * y2 - s2 * y1) * r, - (s1 * z2 - s2 * z1) * r - ); - - vert1.Tangent += sdir; - vert2.Tangent += sdir; - vert3.Tangent += sdir; - - vert1.Binormal += tdir; - vert2.Binormal += tdir; - vert3.Binormal += tdir; - } - - foreach (var v in ConsolidatedVertices) - { - var n = v.Normal; - var t = v.Tangent; - var b = v.Binormal; - - // Gram-Schmidt orthogonalize - var tangent = (t - n * Vector3.Dot(n, t)).Normalized(); - - // Calculate handedness - var w = (Vector3.Dot(Vector3.Cross(n, t), b) < 0.0F) ? 1.0F : -1.0F; - var binormal = (Vector3.Cross(n, t) * w).Normalized(); - - v.Tangent = tangent; - v.Binormal = binormal; - } - } - - private Vector3 triangleNormalFromVertex(int[] indices, int vertexIndex) - { - // This assumes that A->B->C is a counter-clockwise ordering - var a = Vertices[indices[vertexIndex]].Position; - var b = Vertices[indices[(vertexIndex + 1) % 3]].Position; - var c = Vertices[indices[(vertexIndex + 2) % 3]].Position; - - var N = Vector3.Cross(b - a, c - a); - float sin_alpha = N.Length / ((b - a).Length * (c - a).Length); - return N.Normalized() * (float)Math.Asin(sin_alpha); - } - private int VertexIndexCount() { return Indices.Count / InputOffsetCount; @@ -203,7 +97,7 @@ private void computeNormals() VertexIndex(baseIdx + 1), VertexIndex(baseIdx + 2) }; - N += triangleNormalFromVertex(indices, triVertIdx - baseIdx); + N += VertexHelpers.TriangleNormalFromVertex(Vertices, indices, triVertIdx - baseIdx); } } @@ -488,14 +382,14 @@ private VertexDescriptor FindVertexFormat(bool isSkinned) return desc; } - public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isSkinned, ExporterOptions options) + public void ImportFromCollada(mesh mesh, bool isSkinned, ExporterOptions options) { Options = options; Mesh = mesh; ImportSources(); ImportFaces(); - vertexFormat ??= FindVertexFormat(isSkinned); + var vertexFormat = FindVertexFormat(isSkinned); InputVertexType = vertexFormat; OutputVertexType = new VertexDescriptor @@ -611,7 +505,7 @@ public void ImportFromCollada(mesh mesh, VertexDescriptor vertexFormat, bool isS OutputVertexType.TangentType = NormalType.Float3; OutputVertexType.BinormalType = NormalType.Float3; HasTangents = true; - computeTangents(); + VertexHelpers.ComputeTangents(ConsolidatedVertices, ConsolidatedIndices, Options.IgnoreUVNaN); } // Use optimized tangent, texture map and color map format when exporting for D:OS 2 diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 560b508a..765e933f 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -11,7 +11,9 @@ public class ExportException(string message) : Exception(message) public enum ExportFormat { GR2, - DAE + DAE, + GLTF, + GLB }; public enum DivinityModelInfoFormat @@ -82,7 +84,6 @@ public class ExporterOptions public bool ConformAnimations = true; public bool ConformMeshBoneBindings = true; public bool ConformModels = true; - public Dictionary VertexFormats = []; // Extended model info format to use when exporting to D:OS public DivinityModelInfoFormat ModelInfoFormat = DivinityModelInfoFormat.None; // Model flags to use when exporting @@ -176,6 +177,15 @@ private Root LoadDAE(string inPath) return importer.Import(inPath); } + private Root LoadGLTF(string inPath) + { + var importer = new GLTFImporter + { + Options = Options + }; + return importer.Import(inPath); + } + private Root Load(string inPath, ExportFormat format) { switch (format) @@ -186,6 +196,10 @@ private Root Load(string inPath, ExportFormat format) case ExportFormat.DAE: return LoadDAE(inPath); + case ExportFormat.GLTF: + case ExportFormat.GLB: + return LoadGLTF(inPath); + default: throw new NotImplementedException("Unsupported input format"); } @@ -225,6 +239,15 @@ private void SaveDAE(Root root, ExporterOptions options) exporter.Export(root, options.OutputPath); } + private void SaveGLTF(Root root, ExporterOptions options) + { + var exporter = new GLTFExporter + { + Options = options + }; + exporter.Export(root, options.OutputPath); + } + private void Save(Root root, ExporterOptions options) { switch (options.OutputFormat) @@ -238,6 +261,11 @@ private void Save(Root root, ExporterOptions options) SaveDAE(root, options); break; + case ExportFormat.GLTF: + case ExportFormat.GLB: + SaveGLTF(root, options); + break; + default: throw new NotImplementedException("Unsupported output format"); } diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs new file mode 100644 index 00000000..7cd7d7e5 --- /dev/null +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -0,0 +1,210 @@ +using LSLib.Granny.GR2; +using System.Text.RegularExpressions; +using SharpGLTF.Transforms; +using System.Numerics; +using SharpGLTF.Scenes; +using LSLib.LS; +using SharpGLTF.Schema2; + +namespace LSLib.Granny.Model; + +public class GLTFExporter +{ + [Serialization(Kind = SerializationKind.None)] + public ExporterOptions Options = new(); + + private Dictionary MeshIds = new(); + private Dictionary> Skeletons = new(); + + private void GenerateUniqueMeshIds(List meshes) + { + HashSet namesInUse = []; + var charRe = new Regex("[^a-zA-Z0-9_.-]", RegexOptions.CultureInvariant); + foreach (var mesh in meshes) + { + // Sanitize name to make sure it satisfies Collada xsd:NCName requirements + mesh.Name = charRe.Replace(mesh.Name, "_"); + var name = mesh.Name; + + var nameNum = 1; + while (namesInUse.Contains(name)) + { + name = mesh.Name + "_" + nameNum.ToString(); + nameNum++; + } + + namesInUse.Add(name); + MeshIds[mesh] = name; + } + } + + private void ExportMeshBinding(Model model, Skeleton skeleton, MeshBinding meshBinding, SceneBuilder scene) + { + var meshId = MeshIds[meshBinding.Mesh]; + var exporter = new GLTFMeshExporter(meshBinding.Mesh, meshId, Options); + var mesh = exporter.Export(); + + if (skeleton == null || !meshBinding.Mesh.VertexFormat.HasBoneWeights) + { + scene.AddRigidMesh(mesh, new AffineTransform(Matrix4x4.Identity)); + } + else + { + scene.AddSkinnedMesh(mesh, Matrix4x4.Identity, Skeletons[skeleton].ToArray()); + } + } + + private List ExportSkeleton(NodeBuilder root, Skeleton skeleton) + { + var joints = new List(); + foreach (var joint in skeleton.Bones) + { + NodeBuilder node; + if (joint.ParentIndex == -1) + { + // FIXME - parent to dummy root proxy? + // node = root.CreateNode(joint.Name); + node = new NodeBuilder(joint.Name); + } + else + { + node = joints[joint.ParentIndex].CreateNode(joint.Name); + } + + node.LocalTransform = ToGLTFTransform(joint.Transform); + joints.Add(node); + } + + return joints; + } + + private void ExportSceneExtensions(Root root, GLTFSceneExtensions ext) + { + ext.MetadataVersion = Common.GLTFMetadataVersion; + ext.LSLibMajor = Common.MajorVersion; + ext.LSLibMinor = Common.MinorVersion; + ext.LSLibPatch = Common.PatchVersion; + } + + private void ExportSkeletonExtensions(Skeleton skeleton, GLTFSceneExtensions ext) + { + ext.BoneOrder = []; + foreach (var joint in skeleton.Bones) + { + ext.BoneOrder[joint.Name] = joint.ExportIndex + 1; + } + } + + private void ExportMeshExtensions(Mesh mesh, GLTFMeshExtensions ext) + { + var extd = mesh.ExtendedData; + var user = extd.UserMeshProperties; + ext.Rigid = user.MeshFlags.IsRigid() || extd.Rigid == 1; + ext.Cloth = user.MeshFlags.IsCloth() || extd.Cloth == 1; + ext.MeshProxy = user.MeshFlags.IsMeshProxy() || extd.MeshProxy == 1; + ext.ProxyGeometry = user.MeshFlags.HasProxyGeometry(); + ext.Spring = user.MeshFlags.IsSpring() || extd.Spring == 1; + ext.Occluder = user.MeshFlags.IsOccluder() || extd.Occluder == 1; + ext.ClothPhysics = user.ClothFlags.HasClothPhysics(); + ext.Cloth01 = user.ClothFlags.HasClothFlag01(); + ext.Cloth02 = user.ClothFlags.HasClothFlag02(); + ext.Cloth04 = user.ClothFlags.HasClothFlag04(); + ext.Impostor = user.IsImpostor[0] == 1; + ext.ExportOrder = mesh.ExportOrder; + ext.LOD = (user.Lod[0] >= 0) ? user.Lod[0] : 0; + ext.LODDistance = (user.LodDistance[0] < 100000000.0f) ? user.LodDistance[0] : 0.0f; + } + + private void ExportExtensions(Root root, ModelRoot modelRoot) + { + var sceneExt = modelRoot.LogicalScenes.First().UseExtension(); + ExportSceneExtensions(root, sceneExt); + + foreach (var mesh in modelRoot.LogicalMeshes) + { + foreach (var grMesh in root.Meshes) + { + if (mesh.Name == grMesh.Name) + { + var meshExt = mesh.UseExtension(); + ExportMeshExtensions(grMesh, meshExt); + break; + } + } + } + + if (modelRoot.LogicalSkins.Count > 0) + { + ExportSkeletonExtensions(root.Skeletons[0], sceneExt); + } + } + + private AffineTransform ToGLTFTransform(Transform t) + { + return new AffineTransform( + new Vector3(t.ScaleShear[0,0], t.ScaleShear[1,1], t.ScaleShear[2,2]), + new Quaternion(t.Rotation.X, t.Rotation.Y, t.Rotation.Z, t.Rotation.W), + new Vector3(t.Translation.X, t.Translation.Y, t.Translation.Z) + ); + } + + private void ExportModel(Root root, Model model, SceneBuilder scene) + { + Skeleton skel = null; + if (model.Skeleton != null && !model.Skeleton.IsDummy && model.Skeleton.Bones.Count > 1 && root.Skeletons.Any(s => s.Name == model.Skeleton.Name)) + { + skel = model.Skeleton; + } + + foreach (var meshBinding in model.MeshBindings ?? []) + { + ExportMeshBinding(model, skel, meshBinding, scene); + } + } + + + private SceneBuilder ExportScene(Root root) + { + var scene = new SceneBuilder(); + GenerateUniqueMeshIds(root.Meshes ?? []); + + foreach (var skeleton in root.Skeletons ?? []) + { + //var skelRoot = new NodeBuilder(); + //skelRoot.Name = skeleton.Name; + //scene.AddNode(skelRoot); + + var joints = ExportSkeleton(null, skeleton); + Skeletons.Add(skeleton, joints); + } + + foreach (var model in root.Models ?? []) + { + ExportModel(root, model, scene); + } + + return scene; + } + + + public void Export(Root root, string outputPath) + { + GLTFExtensions.RegisterExtensions(); + var scene = ExportScene(root); + var modelRoot = scene.ToGltf2(); + + ExportExtensions(root, modelRoot); + + switch (Options.OutputFormat) + { + case ExportFormat.GLTF: + modelRoot.SaveGLTF(Options.OutputPath); + break; + case ExportFormat.GLB: + modelRoot.SaveGLB(Options.OutputPath); + break; + default: + throw new NotImplementedException(); + } + } +} diff --git a/LSLib/Granny/Model/GLTFExtensions.cs b/LSLib/Granny/Model/GLTFExtensions.cs new file mode 100644 index 00000000..a9d9a029 --- /dev/null +++ b/LSLib/Granny/Model/GLTFExtensions.cs @@ -0,0 +1,187 @@ +using SharpGLTF.Schema2; +using System.Text.Json; + +namespace LSLib.Granny.Model; + + +partial class GLTFSceneExtensions : ExtraProperties +{ + internal GLTFSceneExtensions() { } + + public Int32 MetadataVersion = 0; + public Int32 LSLibMajor = 0; + public Int32 LSLibMinor = 0; + public Int32 LSLibPatch = 0; + + public Dictionary BoneOrder = []; + + protected override void SerializeProperties(Utf8JsonWriter writer) + { + base.SerializeProperties(writer); + + SerializeProperty(writer, "MetadataVersion", MetadataVersion); + SerializeProperty(writer, "LSLibMajor", LSLibMajor); + SerializeProperty(writer, "LSLibMinor", LSLibMinor); + SerializeProperty(writer, "LSLibPatch", LSLibPatch); + + SerializeProperty(writer, "BoneOrder", BoneOrder); + } + + protected override void DeserializeProperty(string jsonPropertyName, ref Utf8JsonReader reader) + { + switch (jsonPropertyName) + { + case "MetadataVersion": MetadataVersion = DeserializePropertyValue(ref reader); break; + case "LSLibMajor": LSLibMajor = DeserializePropertyValue(ref reader); break; + case "LSLibMinor": LSLibMinor = DeserializePropertyValue(ref reader); break; + case "LSLibPatch": LSLibPatch = DeserializePropertyValue(ref reader); break; + + case "BoneOrder": DeserializePropertyDictionary(ref reader, BoneOrder); break; + + default: base.DeserializeProperty(jsonPropertyName, ref reader); break; + } + } +} + +partial class GLTFMeshExtensions : ExtraProperties +{ + internal GLTFMeshExtensions() { } + + public bool Rigid = false; + public bool Cloth = false; + public bool MeshProxy = false; + public bool ProxyGeometry = false; + public bool Spring = false; + public bool Occluder = false; + public bool ClothPhysics = false; + public bool Cloth01 = false; + public bool Cloth02 = false; + public bool Cloth04 = false; + public bool Impostor = false; + public Int32 ExportOrder = 0; + public Int32 LOD = 0; + public Single LODDistance = 0; + + protected override void SerializeProperties(Utf8JsonWriter writer) + { + base.SerializeProperties(writer); + SerializeProperty(writer, "Rigid", Rigid); + SerializeProperty(writer, "Cloth", Cloth); + SerializeProperty(writer, "MeshProxy", MeshProxy); + SerializeProperty(writer, "ProxyGeometry", ProxyGeometry); + SerializeProperty(writer, "Spring", Spring); + SerializeProperty(writer, "Occluder", Occluder); + SerializeProperty(writer, "ClothPhysics", ClothPhysics); + SerializeProperty(writer, "Cloth01", Cloth01); + SerializeProperty(writer, "Cloth02", Cloth02); + SerializeProperty(writer, "Cloth04", Cloth04); + SerializeProperty(writer, "Impostor", Impostor); + SerializeProperty(writer, "ExportOrder", ExportOrder); + SerializeProperty(writer, "LOD", LOD); + SerializeProperty(writer, "LODDistance", LODDistance); + } + + protected override void DeserializeProperty(string jsonPropertyName, ref Utf8JsonReader reader) + { + switch (jsonPropertyName) + { + case "Rigid": Rigid = DeserializePropertyValue(ref reader); break; + case "Cloth": Cloth = DeserializePropertyValue(ref reader); break; + case "MeshProxy": MeshProxy = DeserializePropertyValue(ref reader); break; + case "ProxyGeometry": ProxyGeometry = DeserializePropertyValue(ref reader); break; + case "Spring": Spring = DeserializePropertyValue(ref reader); break; + case "Occluder": Occluder = DeserializePropertyValue(ref reader); break; + case "ClothPhysics": ClothPhysics = DeserializePropertyValue(ref reader); break; + case "Cloth01": Cloth01 = DeserializePropertyValue(ref reader); break; + case "Cloth02": Cloth02 = DeserializePropertyValue(ref reader); break; + case "Cloth04": Cloth04 = DeserializePropertyValue(ref reader); break; + case "Impostor": Impostor = DeserializePropertyValue(ref reader); break; + case "ExportOrder": ExportOrder = DeserializePropertyValue(ref reader); break; + case "LOD": LOD = DeserializePropertyValue(ref reader); break; + case "LODDistance": LODDistance = DeserializePropertyValue(ref reader); break; + default: base.DeserializeProperty(jsonPropertyName, ref reader); break; + } + } + + public void Apply(Mesh mesh, DivinityMeshExtendedData data) + { + if (Cloth) + { + data.UserMeshProperties.MeshFlags |= DivinityModelFlag.Cloth; + data.Cloth = 1; + } + + if (Rigid) + { + data.UserMeshProperties.MeshFlags |= DivinityModelFlag.Rigid; + data.Rigid = 1; + } + + if (MeshProxy) + { + data.UserMeshProperties.MeshFlags |= DivinityModelFlag.MeshProxy; + data.MeshProxy = 1; + } + + if (ProxyGeometry) + { + data.UserMeshProperties.MeshFlags |= DivinityModelFlag.HasProxyGeometry; + } + + if (Spring) + { + data.UserMeshProperties.MeshFlags |= DivinityModelFlag.Spring; + data.Spring = 1; + } + + if (Occluder) + { + data.UserMeshProperties.MeshFlags |= DivinityModelFlag.Occluder; + data.Occluder = 1; + } + + if (Cloth01) data.UserMeshProperties.ClothFlags |= DivinityClothFlag.Cloth01; + if (Cloth02) data.UserMeshProperties.ClothFlags |= DivinityClothFlag.Cloth02; + if (Cloth04) data.UserMeshProperties.ClothFlags |= DivinityClothFlag.Cloth04; + if (ClothPhysics) data.UserMeshProperties.ClothFlags |= DivinityClothFlag.ClothPhysics; + + data.UserMeshProperties.IsImpostor[0] = Impostor ? 1 : 0; + mesh.ExportOrder = ExportOrder; + + if (LOD <= 0) + { + data.LOD = 0; + data.UserMeshProperties.Lod[0] = -1; + } + else + { + data.LOD = LOD; + data.UserMeshProperties.Lod[0] = LOD; + } + + if (LODDistance <= 0) + { + data.UserMeshProperties.LodDistance[0] = 3.40282347E+38f; + } + else + { + data.UserMeshProperties.LodDistance[0] = LODDistance; + } + } +} + + +public static partial class GLTFExtensions +{ + private static bool Registered; + + public static void RegisterExtensions() + { + if (Registered) return; + + Registered = true; + + ExtensionsFactory.RegisterExtension("EXT_lslib_profile", p => new GLTFSceneExtensions()); + ExtensionsFactory.RegisterExtension("EXT_lslib_profile", p => new GLTFMeshExtensions()); + } +} diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs new file mode 100644 index 00000000..8e96fef3 --- /dev/null +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -0,0 +1,501 @@ +using LSLib.Granny.GR2; +using LSLib.LS; +using OpenTK.Mathematics; +using SharpGLTF.Scenes; +using SharpGLTF.Schema2; + +namespace LSLib.Granny.Model; + +public class GLTFImporter +{ + public ExporterOptions Options = new(); + public List ImportedMeshes; + + private ExporterInfo MakeExporterInfo() + { + return new ExporterInfo + { + ExporterName = $"LSLib GR2 Exporter v{Common.LibraryVersion()}", + ExporterMajorRevision = Common.MajorVersion, + ExporterMinorRevision = Common.MinorVersion, + ExporterBuildNumber = 0, + ExporterCustomization = Common.PatchVersion + }; + } + + private DivinityModelFlag DetermineSkeletonModelFlagsFromModels(Root root, Skeleton skeleton, DivinityModelFlag meshFlagOverrides) + { + DivinityModelFlag accumulatedFlags = 0; + foreach (var model in root.Models ?? Enumerable.Empty()) + { + if (model.Skeleton == skeleton && model.MeshBindings != null) + { + foreach (var meshBinding in model.MeshBindings) + { + accumulatedFlags |= meshBinding.Mesh?.ExtendedData?.UserMeshProperties?.MeshFlags ?? meshFlagOverrides; + } + } + } + + return accumulatedFlags; + } + + private void BuildExtendedData(Root root) + { + if (Options.ModelInfoFormat == DivinityModelInfoFormat.None) + { + return; + } + + var modelFlagOverrides = Options.ModelType; + + foreach (var mesh in root.Meshes ?? Enumerable.Empty()) + { + DivinityModelFlag modelFlags = modelFlagOverrides; + if (modelFlags == 0 && mesh.ExtendedData != null) + { + modelFlags = mesh.ExtendedData.UserMeshProperties.MeshFlags; + } + + mesh.ExtendedData ??= DivinityMeshExtendedData.Make(); + mesh.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; + mesh.ExtendedData.UpdateFromModelInfo(mesh, Options.ModelInfoFormat); + } + + foreach (var skeleton in root.Skeletons ?? Enumerable.Empty()) + { + if (Options.ModelInfoFormat == DivinityModelInfoFormat.None || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv3) + { + foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) + { + bone.ExtendedData = null; + } + } + else + { + var accumulatedFlags = DetermineSkeletonModelFlagsFromModels(root, skeleton, modelFlagOverrides); + + foreach (var bone in skeleton.Bones ?? Enumerable.Empty()) + { + bone.ExtendedData ??= new DivinityBoneExtendedData(); + var userDefinedProperties = UserDefinedPropertiesHelpers.MeshFlagsToUserDefinedProperties(accumulatedFlags); + bone.ExtendedData.UserDefinedProperties = userDefinedProperties; + bone.ExtendedData.IsRigid = (accumulatedFlags.IsRigid()) ? 1 : 0; + } + } + } + } + + private void FindRootBones(List parents, node node, List rootBones) + { + if (node.type == NodeType.JOINT) + { + var root = new RootBoneInfo + { + Bone = node, + Parents = parents.Select(a => a).ToList() + }; + rootBones.Add(root); + } + else if (node.type == NodeType.NODE) + { + if (node.node1 != null) + { + parents.Add(node); + foreach (var child in node.node1) + { + FindRootBones(parents, child, rootBones); + } + parents.RemoveAt(parents.Count - 1); + } + } + } + + public static technique FindExporterExtraData(extra[] extras) + { + foreach (var extra in extras ?? Enumerable.Empty()) + { + foreach (var technique in extra.technique ?? Enumerable.Empty()) + { + if (technique.profile == "LSTools") + { + return technique; + } + } + } + + return null; + } + + private void MakeExtendedData(ContentTransformer content, GLTFMeshExtensions ext, Mesh loaded) + { + var modelFlagOverrides = Options.ModelType; + + DivinityModelFlag modelFlags = modelFlagOverrides; + if (modelFlags == 0 && loaded.ExtendedData != null) + { + modelFlags = loaded.ExtendedData.UserMeshProperties.MeshFlags; + } + + loaded.ExtendedData = DivinityMeshExtendedData.Make(); + loaded.ExtendedData.UserMeshProperties.MeshFlags = modelFlags; + loaded.ExtendedData.UpdateFromModelInfo(loaded, Options.ModelInfoFormat); + + if (ext != null) + { + ext.Apply(loaded, loaded.ExtendedData); + } + } + + private static GLTFMeshExtensions FindMeshExtension(ModelRoot root, string name) + { + foreach (var mesh in root.LogicalMeshes) + { + if (mesh.Name == name) + { + return mesh.GetExtension(); + } + } + + return null; + } + + private Mesh ImportMesh(ModelRoot modelRoot, ContentTransformer content, string name) + { + var collada = new GLTFMesh(); + collada.ImportFromGLTF(content, Options); + + var m = new Mesh + { + VertexFormat = collada.InternalVertexType, + Name = name, + + PrimaryVertexData = new VertexData + { + Vertices = collada.Vertices + }, + + PrimaryTopology = new TriTopology + { + Indices = collada.Indices, + Groups = [ + new TriTopologyGroup + { + MaterialIndex = 0, + TriFirst = 0, + TriCount = collada.TriangleCount + } + ] + }, + + MaterialBindings = [new MaterialBinding()] + }; + + if (!Options.StripMetadata) + { + var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); + m.PrimaryVertexData.VertexComponentNames = components; + } + else + { + m.PrimaryVertexData.VertexComponentNames = null; + } + + var ext = FindMeshExtension(modelRoot, name); + MakeExtendedData(content, ext, m); + + Utils.Info(String.Format("Imported {0} mesh ({1} tri groups, {2} tris)", + (m.VertexFormat.HasBoneWeights ? "skinned" : "rigid"), + m.PrimaryTopology.Groups.Count, + collada.TriangleCount)); + + return m; + } + + private void AddMeshToRoot(Root root, Mesh mesh) + { + root.VertexDatas.Add(mesh.PrimaryVertexData); + root.TriTopologies.Add(mesh.PrimaryTopology); + root.Meshes.Add(mesh); + root.Models[0].MeshBindings.Add(new MeshBinding + { + Mesh = mesh + }); + } + + private void LoadColladaLSLibProfileData(animation anim, TrackGroup loaded) + { + var technique = FindExporterExtraData(anim.extra); + if (technique == null || technique.Any == null) return; + + foreach (var setting in technique.Any) + { + switch (setting.LocalName) + { + case "SkeletonResourceID": + loaded.ExtendedData = new BG3TrackGroupExtendedData + { + SkeletonResourceID = setting.InnerText.Trim() + }; + break; + + default: + Utils.Warn($"Unrecognized LSLib animation profile attribute: {setting.LocalName}"); + break; + } + } + } + + public void ImportAnimations(IEnumerable anims, Root root, Skeleton skeleton) + { + var trackGroup = new TrackGroup + { + Name = (skeleton != null) ? skeleton.Name : "Dummy_Root", + TransformTracks = [], + InitialPlacement = new Transform(), + AccumulationFlags = 2, + LoopTranslation = [0, 0, 0] + }; + + var animation = new Animation + { + Name = "Default", + TimeStep = 0.016667f, // 60 FPS + Oversampling = 1, + DefaultLoopCount = 1, + Flags = 1, + Duration = .0f, + TrackGroups = [trackGroup] + }; + + foreach (var colladaTrack in anims) + { + ImportAnimation(colladaTrack, animation, trackGroup, skeleton); + } + + if (trackGroup.TransformTracks.Count > 0) + { + // Reorder transform tracks in lexicographic order + // This is needed by Granny; otherwise it'll fail to find animation tracks + trackGroup.TransformTracks.Sort((t1, t2) => String.Compare(t1.Name, t2.Name, StringComparison.Ordinal)); + + root.TrackGroups.Add(trackGroup); + root.Animations.Add(animation); + } + } + + public void ImportAnimation(animation colladaAnim, Animation animation, TrackGroup trackGroup, Skeleton skeleton) + { + var childAnims = 0; + foreach (var item in colladaAnim.Items) + { + if (item is animation) + { + ImportAnimation(item as animation, animation, trackGroup, skeleton); + childAnims++; + } + } + + var duration = .0f; + if (childAnims < colladaAnim.Items.Length) + { + ColladaAnimation importAnim = new(); + if (importAnim.ImportFromCollada(colladaAnim, skeleton)) + { + duration = Math.Max(duration, importAnim.Duration); + var track = importAnim.MakeTrack(Options.RemoveTrivialAnimationKeys); + trackGroup.TransformTracks.Add(track); + LoadColladaLSLibProfileData(colladaAnim, trackGroup); + } + } + + animation.Duration = Math.Max(animation.Duration, duration); + } + + private int ImportBone(Skeleton skeleton, int parentIndex, NodeBuilder node, GLTFSceneExtensions ext) + { + var transform = node.LocalTransform; + var tm = transform.Matrix; + var myIndex = skeleton.Bones.Count; + + var bone = new Bone + { + ParentIndex = parentIndex, + Name = node.Name, + LODError = 0, // TODO + OriginalTransform = new Matrix4( + tm.M11, tm.M12, tm.M13, tm.M14, + tm.M21, tm.M22, tm.M23, tm.M24, + tm.M31, tm.M32, tm.M33, tm.M34, + tm.M41, tm.M42, tm.M43, tm.M44 + ), + Transform = Transform.FromGLTF(transform) + }; + + skeleton.Bones.Add(bone); + + bone.UpdateWorldTransforms(skeleton.Bones); + + if (ext != null && ext.BoneOrder.TryGetValue(bone.Name, out var order) && order > 0) + { + bone.ExportIndex = order - 1; + } + + return myIndex; + } + + private void ImportBoneTree(Skeleton skeleton, int parentIndex, NodeBuilder node, GLTFSceneExtensions ext) + { + if (ext != null && !ext.BoneOrder.ContainsKey(node.Name)) return; + + var boneIndex = ImportBone(skeleton, parentIndex, node, ext); + + foreach (var child in node.VisualChildren) + { + ImportBoneTree(skeleton, boneIndex, child, ext); + } + } + + private Skeleton ImportSkeleton(string name, NodeBuilder root, GLTFSceneExtensions ext) + { + var skeleton = Skeleton.CreateEmpty(name); + + if (ext != null && ext.BoneOrder.Count > 0) + { + // Try to figure out what the real root bone is + if (!ext.BoneOrder.ContainsKey(root.Name)) + { + // Find real root among 1st-level children + var roots = root.VisualChildren.Where(n => ext.BoneOrder.ContainsKey(n.Name)).ToList(); + if (roots.Count == 1) + { + ImportBoneTree(skeleton, -1, roots[0], ext); + return skeleton; + } + else + { + throw new ParsingException("Unable to determine real root bone of skeleton."); + } + } + } + + ImportBoneTree(skeleton, -1, root, ext); + return skeleton; + } + + public Root Import(string inputPath) + { + GLTFExtensions.RegisterExtensions(); + ModelRoot modelRoot = ModelRoot.Load(inputPath); + + if (modelRoot.LogicalScenes.Count != 1) + { + throw new ParsingException($"GLTF file is expected to have a single scene, got {modelRoot.LogicalScenes.Count}"); + } + + if (modelRoot.LogicalSkins.Count > 1) + { + throw new ParsingException("GLTF files containing multiple skeletons are not supported"); + } + + var sceneExt = modelRoot.DefaultScene.GetExtension(); + if (sceneExt != null) + { + if (sceneExt.MetadataVersion > Common.GLTFMetadataVersion) + { + throw new ParsingException( + $"GLTF file is using a newer LSLib metadata format than this LSLib version supports, please upgrade.\r\n" + + $"File version: {sceneExt.MetadataVersion}, exporter version: {Common.GLTFMetadataVersion}"); + } + } + + var scene = SceneBuilder.CreateFrom(modelRoot).First(); + + var root = Root.CreateEmpty(); + root.ArtToolInfo = ArtToolInfo.CreateDefault(); + root.ArtToolInfo.SetYUp(); + root.ExporterInfo = Options.StripMetadata ? null : MakeExporterInfo(); + root.FromFileName = inputPath; + + ImportedMeshes = []; + + foreach (var geometry in scene.Instances) + { + if (geometry.Content?.HasRenderableContent == true) + { + var content = geometry.Content; + var name = geometry.Name ?? content.Name ?? content.GetGeometryAsset().Name; + var mesh = ImportMesh(modelRoot, content, name); + ImportedMeshes.Add(mesh); + + if (content is SkinnedTransformer skin) + { + var joints = skin.GetJointBindings(); + mesh.BoneBindings = []; + if (joints.Length > 0) + { + foreach (var (joint, inverseBindMatrix) in joints) + { + var binding = new BoneBinding + { + BoneName = joint.Name, + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; + mesh.BoneBindings.Add(binding); + } + } + + if (Options.RecalculateOBBs) + { + // FIXME! VertexHelpers.UpdateOBBs(root.Skeletons.Single(), mesh); + } + } + } + else + { + var skeletonRoot = geometry.Content?.GetArmatureRoot(); + if (skeletonRoot != null && skeletonRoot == ((RigidTransformer)geometry.Content).Transform) + { + var skel = ImportSkeleton(geometry.Name, skeletonRoot, sceneExt); + root.Skeletons.Add(skel); + } + } + } + + var rootModel = new Model + { + Name = "Unnamed", // TODO + InitialPlacement = new Transform(), + MeshBindings = new List() + }; + + if (root.Skeletons.Count > 0) + { + rootModel.Skeleton = root.Skeletons[0]; + rootModel.Name = rootModel.Skeleton.Bones[0].Name; + } + + root.Models.Add(rootModel); + + // Reorder meshes based on their ExportOrder + if (ImportedMeshes.Any(m => m.ExportOrder > 0)) + { + ImportedMeshes.Sort((a, b) => a.ExportOrder - b.ExportOrder); + } + + foreach (var mesh in ImportedMeshes) + { + AddMeshToRoot(root, mesh); + } + + // TODO: make this an option! + if (root.Skeletons.Count > 0) + root.Skeletons[0].UpdateWorldTransforms(); + root.PostLoad(GR2.Header.DefaultTag); + + BuildExtendedData(root); + + return root; + } +} diff --git a/LSLib/Granny/Model/GLTFMesh.cs b/LSLib/Granny/Model/GLTFMesh.cs new file mode 100644 index 00000000..0831abfa --- /dev/null +++ b/LSLib/Granny/Model/GLTFMesh.cs @@ -0,0 +1,241 @@ +using LSLib.Granny.GR2; +using SharpGLTF.Scenes; +using SharpGLTF.Geometry.VertexTypes; +using SharpGLTF.Geometry; +using SharpGLTF.Materials; + +namespace LSLib.Granny.Model; + +public class GLTFMesh +{ + private VertexDescriptor InputVertexType; + private VertexDescriptor OutputVertexType; + private GLTFVertexBuildHelper BuildHelper; + private bool HasNormals = false; + private bool HasTangents = false; + + public int TriangleCount; + public List Vertices; + public List Indices; + private ExporterOptions Options; + + public VertexDescriptor InternalVertexType + { + get { return OutputVertexType; } + } + + private void ImportTriangles(IPrimitiveReader primitives) + { + if (primitives.Points.Count > 0 || + primitives.Lines.Count > 0 || + primitives.VerticesPerPrimitive != 3) + { + throw new ParsingException("Non-triangle"); // FIXME + } + + TriangleCount = primitives.Triangles.Count; + Indices = new List(TriangleCount * 3); + foreach (var (A, B, C) in primitives.Triangles) + { + Indices.Add(A); + Indices.Add(B); + Indices.Add(C); + } + } + + private void ImportVertices(IPrimitiveReader primitives) + { + BuildHelper = new GLTFVertexBuildHelper("", OutputVertexType, Options); + + Vertices = new List(primitives.Vertices.Count); + foreach (var vert in primitives.Vertices) + { + var vertex = BuildHelper.FromGLTF(vert); + Vertices.Add(vertex); + } + + HasNormals = (InputVertexType.NormalType != NormalType.None); + HasTangents = (InputVertexType.TangentType != NormalType.None); + } + + private VertexDescriptor FindVertexFormat(Type type) + { + var desc = new VertexDescriptor + { + PositionType = PositionType.Float3 + }; + + foreach (var field in type.GetFields()) + { + if (field.Name == "Geometry") + { + if (field.FieldType == typeof(VertexPosition)) + { + // No normals available + } + else if (field.FieldType == typeof(VertexPositionNormal)) + { + desc.NormalType = NormalType.Float3; + } + else if (field.FieldType == typeof(VertexPositionNormalTangent)) + { + desc.NormalType = NormalType.Float3; + desc.TangentType = NormalType.Float3; + desc.BinormalType = NormalType.Float3; + } + else + { + throw new InvalidDataException($"Unsupported geometry data format: {field.FieldType}"); + } + } + else if (field.Name == "Material") + { + if (field.FieldType == typeof(VertexEmpty)) + { + // No texture data available + } + else if (field.FieldType == typeof(VertexTexture1)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 1; + } + else if (field.FieldType == typeof(VertexTexture2)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 2; + } + else if (field.FieldType == typeof(VertexTexture3)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 3; + } + else if (field.FieldType == typeof(VertexTexture4)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 4; + } + else if (field.FieldType == typeof(VertexColor1Texture1)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 1; + desc.ColorMapType = ColorMapType.Float4; + desc.ColorMaps = 1; + } + else if (field.FieldType == typeof(VertexColor1Texture2)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 2; + desc.ColorMapType = ColorMapType.Float4; + desc.ColorMaps = 1; + } + else if (field.FieldType == typeof(VertexColor2Texture1)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 1; + desc.ColorMapType = ColorMapType.Float4; + desc.ColorMaps = 2; + } + else if (field.FieldType == typeof(VertexColor2Texture2)) + { + desc.TextureCoordinateType = TextureCoordinateType.Float2; + desc.TextureCoordinates = 2; + desc.ColorMapType = ColorMapType.Float4; + desc.ColorMaps = 2; + } + else + { + throw new InvalidDataException($"Unsupported material data format: {field.FieldType}"); + } + } + else if (field.Name == "Skinning") + { + if (field.FieldType == typeof(VertexEmpty)) + { + // No skinning data available + } + else if (field.FieldType == typeof(VertexJoints4)) + { + desc.HasBoneWeights = true; + } + else + { + throw new InvalidDataException($"Unsupported skinning data format: {field.FieldType}"); + } + } + } + + return desc; + } + + public void ImportFromGLTF(ContentTransformer content, ExporterOptions options) + { + var geometry = content.GetGeometryAsset(); + var primitives = geometry.Primitives.First(); + + Options = options; + + var vertexFormat = FindVertexFormat(primitives.VertexType); + InputVertexType = vertexFormat; + OutputVertexType = new VertexDescriptor + { + HasBoneWeights = InputVertexType.HasBoneWeights, + NumBoneInfluences = InputVertexType.NumBoneInfluences, + PositionType = InputVertexType.PositionType, + NormalType = InputVertexType.NormalType, + TangentType = InputVertexType.TangentType, + BinormalType = InputVertexType.BinormalType, + ColorMapType = InputVertexType.ColorMapType, + ColorMaps = InputVertexType.ColorMaps, + TextureCoordinateType = InputVertexType.TextureCoordinateType, + TextureCoordinates = InputVertexType.TextureCoordinates + }; + + ImportTriangles(primitives); + ImportVertices(primitives); + + if (!HasNormals || Options.RecalculateNormals) + { + HasNormals = true; + OutputVertexType.NormalType = NormalType.Float3; + VertexHelpers.ComputeNormals(Vertices, Indices); + } + + if ((InputVertexType.TangentType == NormalType.None + || InputVertexType.BinormalType == NormalType.None) + && ((!HasTangents && InputVertexType.TextureCoordinates > 0) || Options.RecalculateTangents)) + { + OutputVertexType.TangentType = NormalType.Float3; + OutputVertexType.BinormalType = NormalType.Float3; + HasTangents = true; + VertexHelpers.ComputeTangents(Vertices, Indices, Options.IgnoreUVNaN); + } + + if (!HasNormals || !HasTangents) + { + throw new InvalidDataException($"Import needs geometry with normal and tangent data"); + } + + // Use optimized tangent, texture map and color map format when exporting for D:OS 2 + if ((Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv0 + || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv1 + || Options.ModelInfoFormat == DivinityModelInfoFormat.LSMv3) + && Options.EnableQTangents + && HasNormals + && HasTangents) + { + OutputVertexType.NormalType = NormalType.QTangent; + OutputVertexType.TangentType = NormalType.QTangent; + OutputVertexType.BinormalType = NormalType.QTangent; + + if (OutputVertexType.TextureCoordinateType == TextureCoordinateType.Float2) + { + OutputVertexType.TextureCoordinateType = TextureCoordinateType.Half2; + } + + if (OutputVertexType.ColorMapType == ColorMapType.Float4) + { + OutputVertexType.ColorMapType = ColorMapType.Byte4; + } + } + } +} diff --git a/LSLib/Granny/Model/GLTFVertex.cs b/LSLib/Granny/Model/GLTFVertex.cs new file mode 100644 index 00000000..8954fa1a --- /dev/null +++ b/LSLib/Granny/Model/GLTFVertex.cs @@ -0,0 +1,583 @@ +using SharpGLTF.Geometry; +using SharpGLTF.Materials; +using SharpGLTF.Geometry.VertexTypes; +using System.Numerics; +using TKVec2 = OpenTK.Mathematics.Vector2; +using TKVec3 = OpenTK.Mathematics.Vector3; +using TKVec4 = OpenTK.Mathematics.Vector4; +using System.Reflection; + +namespace LSLib.Granny.Model; + +public interface GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert); + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert); +} + +public class GLTFVertexNoneBuilder : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + } +} + +public class GLTFVertexGeometryBuilderPosition : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var pos = gr2Vert.Position; + var v = new VertexPosition(pos.X, pos.Y, pos.Z); + gltfVert.SetGeometry(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexPositionNormal)gltfVert.GetGeometry(); + var pos = geom.Position; + gr2Vert.Position = new TKVec3(pos.X, pos.Y, pos.Z); + } +} + +public class GLTFVertexGeometryBuilderPositionNormal : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var pos = gr2Vert.Position; + var n = gr2Vert.Normal; + var v = new VertexPositionNormal( + pos.X, pos.Y, pos.Z, + n.X, n.Y, n.Z + ); + gltfVert.SetGeometry(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexPositionNormal)gltfVert.GetGeometry(); + var pos = geom.Position; + var n = geom.Normal; + gr2Vert.Position = new TKVec3(pos.X, pos.Y, pos.Z); + gr2Vert.Normal = new TKVec3(n.X, n.Y, n.Z); + } +} + +public class GLTFVertexGeometryBuilderPositionNormalTangent : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var pos = gr2Vert.Position; + var n = gr2Vert.Normal; + var t = gr2Vert.Tangent; + var b = gr2Vert.Binormal; + var w = (TKVec3.Dot(TKVec3.Cross(n, t), b) < 0.0F) ? -1.0F : 1.0F; + + var v = new VertexPositionNormalTangent( + new Vector3(pos.X, pos.Y, pos.Z), + new Vector3(n.X, n.Y, n.Z), + new Vector4(t.X, t.Y, t.Z, w) + ); + gltfVert.SetGeometry(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexPositionNormalTangent)gltfVert.GetGeometry(); + var pos = geom.Position; + var n = geom.Normal; + var t = geom.Tangent; + gr2Vert.Position = new TKVec3(pos.X, pos.Y, pos.Z); + gr2Vert.Normal = new TKVec3(n.X, n.Y, n.Z); + gr2Vert.Tangent = new TKVec3(t.X, t.Y, t.Z); + gr2Vert.Binormal = (TKVec3.Cross(gr2Vert.Normal, gr2Vert.Tangent) * t.W).Normalized(); + } +} + +public class GLTFVertexBuilderNOTIMPL : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + throw new Exception("Not implemented yet"); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + throw new Exception("Not implemented yet"); + } +} + +public class GLTFVertexMaterialBuilderTexture1 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var uv0 = gr2Vert.TextureCoordinates0; + var v = new VertexTexture1( + new Vector2(uv0.X, uv0.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexTexture1)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + } +} + +public class GLTFVertexMaterialBuilderTexture2 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var uv0 = gr2Vert.TextureCoordinates0; + var uv1 = gr2Vert.TextureCoordinates1; + var v = new VertexTexture2( + new Vector2(uv0.X, uv0.Y), + new Vector2(uv1.X, uv1.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexTexture2)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord0; + var uv1 = geom.TexCoord1; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); + } +} + +public class GLTFVertexMaterialBuilderTexture3 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var uv0 = gr2Vert.TextureCoordinates0; + var uv1 = gr2Vert.TextureCoordinates1; + var uv2 = gr2Vert.TextureCoordinates2; + var v = new VertexTexture3( + new Vector2(uv0.X, uv0.Y), + new Vector2(uv1.X, uv1.Y), + new Vector2(uv2.X, uv2.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexTexture3)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord0; + var uv1 = geom.TexCoord1; + var uv2 = geom.TexCoord2; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); + gr2Vert.TextureCoordinates2 = new TKVec2(uv2.X, uv2.Y); + } +} + +public class GLTFVertexMaterialBuilderTexture4 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var uv0 = gr2Vert.TextureCoordinates0; + var uv1 = gr2Vert.TextureCoordinates1; + var uv2 = gr2Vert.TextureCoordinates2; + var uv3 = gr2Vert.TextureCoordinates3; + var v = new VertexTexture4( + new Vector2(uv0.X, uv0.Y), + new Vector2(uv1.X, uv1.Y), + new Vector2(uv2.X, uv2.Y), + new Vector2(uv3.X, uv3.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexTexture4)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord0; + var uv1 = geom.TexCoord1; + var uv2 = geom.TexCoord2; + var uv3 = geom.TexCoord3; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); + gr2Vert.TextureCoordinates2 = new TKVec2(uv2.X, uv2.Y); + gr2Vert.TextureCoordinates3 = new TKVec2(uv3.X, uv3.Y); + } +} + +public class GLTFVertexMaterialBuilderColor1Texture1 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var c0 = gr2Vert.Color0; + var uv0 = gr2Vert.TextureCoordinates0; + var v = new VertexColor1Texture1( + new Vector4(c0.X, c0.Y, c0.Z, c0.W), + new Vector2(uv0.X, uv0.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexColor1Texture1)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord; + var c0 = geom.Color; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); + } +} + +public class GLTFVertexMaterialBuilderColor1Texture2 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var c0 = gr2Vert.Color0; + var uv0 = gr2Vert.TextureCoordinates0; + var uv1 = gr2Vert.TextureCoordinates1; + var v = new VertexColor1Texture2( + new Vector4(c0.X, c0.Y, c0.Z, c0.W), + new Vector2(uv0.X, uv0.Y), + new Vector2(uv1.X, uv1.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexColor1Texture2)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord0; + var uv1 = geom.TexCoord1; + var c0 = geom.Color; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); + gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); + } +} + +public class GLTFVertexMaterialBuilderColor2Texture1 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var c0 = gr2Vert.Color0; + var c1 = gr2Vert.Color1; + var uv0 = gr2Vert.TextureCoordinates0; + var v = new VertexColor2Texture1( + new Vector4(c0.X, c0.Y, c0.Z, c0.W), + new Vector4(c1.X, c1.Y, c1.Z, c1.W), + new Vector2(uv0.X, uv0.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexColor2Texture1)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord; + var c0 = geom.Color0; + var c1 = geom.Color1; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); + gr2Vert.Color1 = new TKVec4(c1.X, c1.Y, c1.Z, c1.W); + } +} + +public class GLTFVertexMaterialBuilderColor2Texture2 : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var c0 = gr2Vert.Color0; + var c1 = gr2Vert.Color1; + var uv0 = gr2Vert.TextureCoordinates0; + var uv1 = gr2Vert.TextureCoordinates1; + var v = new VertexColor2Texture2( + new Vector4(c0.X, c0.Y, c0.Z, c0.W), + new Vector4(c1.X, c1.Y, c1.Z, c1.W), + new Vector2(uv0.X, uv0.Y), + new Vector2(uv1.X, uv1.Y) + ); + gltfVert.SetMaterial(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var geom = (VertexColor2Texture2)gltfVert.GetMaterial(); + var uv0 = geom.TexCoord0; + var uv1 = geom.TexCoord1; + var c0 = geom.Color0; + var c1 = geom.Color1; + gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); + gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); + gr2Vert.Color1 = new TKVec4(c1.X, c1.Y, c1.Z, c1.W); + } +} + +public class GLTFVertexSkinningBuilder : GLTFVertexBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var v = new VertexJoints4( + (gr2Vert.BoneIndices.A, gr2Vert.BoneWeights.A / 255.0f), + (gr2Vert.BoneIndices.B, gr2Vert.BoneWeights.B / 255.0f), + (gr2Vert.BoneIndices.C, gr2Vert.BoneWeights.C / 255.0f), + (gr2Vert.BoneIndices.D, gr2Vert.BoneWeights.D / 255.0f) + ); + gltfVert.SetSkinning(v); + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + { + var skin = (VertexJoints4)gltfVert.GetSkinning(); + Span weights = stackalloc byte[4]; + VertexHelpers.CompressBoneWeights([skin.Weights.X, skin.Weights.Y, skin.Weights.Z, skin.Weights.W], weights); + + gr2Vert.BoneIndices.A = (byte)skin.Joints[0]; + gr2Vert.BoneIndices.B = (byte)skin.Joints[1]; + gr2Vert.BoneIndices.C = (byte)skin.Joints[2]; + gr2Vert.BoneIndices.D = (byte)skin.Joints[3]; + + gr2Vert.BoneWeights.A = weights[0]; + gr2Vert.BoneWeights.B = weights[1]; + gr2Vert.BoneWeights.C = weights[2]; + gr2Vert.BoneWeights.D = weights[3]; + + gr2Vert.FinalizeInfluences(); + } +} + +public interface IGLTFMeshBuildWrapper +{ + public IMeshBuilder Build(Mesh m); +} + +public class GLTFMeshBuildWrapper : IGLTFMeshBuildWrapper + where TvG : struct, IVertexGeometry + where TvM : struct, IVertexMaterial + where TvS : struct, IVertexSkinning +{ + private GLTFVertexBuildHelper BuildHelper; + private MeshBuilder Builder; + private PrimitiveBuilder Primitives; + private List Vertices; + private List<(int A, int B, int C)> TriIndices; + + public GLTFMeshBuildWrapper(GLTFVertexBuildHelper helper, string exportedId) + { + BuildHelper = helper; + Builder = new MeshBuilder(exportedId); + var material = new MaterialBuilder("Dummy"); + Primitives = Builder.UsePrimitive(material); + + var triInds = Primitives.GetType().GetField("_TriIndices", BindingFlags.Instance | BindingFlags.NonPublic); + TriIndices = (List<(int A, int B, int C)>)triInds.GetValue(Primitives); + } + + private void BuildVertices(Mesh mesh) + { + Vertices = new List(mesh.PrimaryVertexData.Vertices.Count); + + foreach (var v in mesh.PrimaryVertexData.Vertices) + { + var vert = Primitives.VertexFactory(); + BuildHelper.ToGLTF(vert, v); + Vertices.Add(vert); + } + } + + public IMeshBuilder Build(Mesh m) + { + BuildVertices(m); + + var useVert = Primitives.GetType().BaseType.GetMethod("UseVertex", BindingFlags.Instance | BindingFlags.NonPublic); + // var fun = useVert.CreateDelegate, Int32>>(Primitives); + foreach (var vertex in Vertices) + { + //fun((VertexBuilder)vertex); + useVert.Invoke(Primitives, [vertex]); + } + + var inds = m.PrimaryTopology.Indices; + for (var i = 0; i < inds.Count; i += 3) + { + // Primitives.AddTriangle(Vertices[inds[i]], Vertices[inds[i + 1]], Vertices[inds[i + 2]]); + TriIndices.Add((inds[i], inds[i+1], inds[i+2])); + } + + return Builder; + } +} + +public class GLTFVertexBuildHelper +{ + private readonly string ExportedId; + private readonly VertexDescriptor VertexFormat; + private readonly ExporterOptions Options; + + private GLTFVertexBuilder GeometryBuilder; + private GLTFVertexBuilder MaterialBuilder; + private GLTFVertexBuilder SkinningBuilder; + + private Type GeometryDataType; + private Type MaterialDataType; + private Type SkinningDataType; + + private int UVs; + private int ColorMaps; + private bool HasNormals; + private bool HasTangents; + + public GLTFVertexBuildHelper(string exportedId, VertexDescriptor vertexFormat, ExporterOptions options) + { + ExportedId = exportedId; + VertexFormat = vertexFormat; + Options = options; + + HasNormals = Options.ExportNormals && VertexFormat.NormalType != NormalType.None; + HasTangents = Options.ExportTangents && VertexFormat.TangentType != NormalType.None; + UVs = Options.ExportUVs ? VertexFormat.TextureCoordinates : 0; + ColorMaps = Options.ExportColors ? VertexFormat.ColorMaps : 0; + + SelectGeometryBuilder(); + SelectMaterialBuilder(); + SelectSkinningBuilder(); + } + + private void SelectGeometryBuilder() + { + if (HasNormals) + { + if (HasTangents) + { + GeometryDataType = typeof(VertexPositionNormalTangent); + GeometryBuilder = new GLTFVertexGeometryBuilderPositionNormalTangent(); + } + else + { + GeometryDataType = typeof(VertexPositionNormal); + GeometryBuilder = new GLTFVertexGeometryBuilderPositionNormal(); + } + } + else + { + GeometryDataType = typeof(VertexPosition); + GeometryBuilder = new GLTFVertexGeometryBuilderPosition(); + } + } + + private void SelectMaterialBuilder() + { + if (UVs == 0 && ColorMaps == 0) + { + MaterialDataType = typeof(VertexEmpty); + MaterialBuilder = new GLTFVertexNoneBuilder(); + } + else if (UVs == 1 && ColorMaps == 0) + { + MaterialDataType = typeof(VertexTexture1); + MaterialBuilder = new GLTFVertexMaterialBuilderTexture1(); + } + else if (UVs == 2 && ColorMaps == 0) + { + MaterialDataType = typeof(VertexTexture2); + MaterialBuilder = new GLTFVertexMaterialBuilderTexture2(); + } + else if (UVs == 3 && ColorMaps == 0) + { + MaterialDataType = typeof(VertexTexture3); + MaterialBuilder = new GLTFVertexMaterialBuilderTexture3(); + } + else if (UVs == 4 && ColorMaps == 0) + { + MaterialDataType = typeof(VertexTexture4); + MaterialBuilder = new GLTFVertexMaterialBuilderTexture4(); + } + else if (UVs == 1 && ColorMaps == 1) + { + MaterialDataType = typeof(VertexColor1Texture1); + MaterialBuilder = new GLTFVertexMaterialBuilderColor1Texture1(); + } + else if (UVs == 2 && ColorMaps == 1) + { + MaterialDataType = typeof(VertexColor1Texture2); + MaterialBuilder = new GLTFVertexMaterialBuilderColor1Texture2(); + } + else if (UVs == 1 && ColorMaps == 2) + { + MaterialDataType = typeof(VertexColor2Texture1); + MaterialBuilder = new GLTFVertexMaterialBuilderColor2Texture1(); + } + else if (UVs == 2 && ColorMaps == 2) + { + MaterialDataType = typeof(VertexColor2Texture2); + MaterialBuilder = new GLTFVertexMaterialBuilderColor2Texture2(); + } + else + { + throw new InvalidDataException($"Unsupported vertex format for glTF export: UVs {UVs}, Color maps {ColorMaps}"); + } + } + + private void SelectSkinningBuilder() + { + if (VertexFormat.HasBoneWeights) + { + SkinningDataType = typeof(VertexJoints4); + SkinningBuilder = new GLTFVertexSkinningBuilder(); + } + else + { + SkinningDataType = typeof(VertexEmpty); + SkinningBuilder = new GLTFVertexNoneBuilder(); + } + } + + public IGLTFMeshBuildWrapper InternalCreateBuilder() + where TvG : struct, IVertexGeometry + where TvM : struct, IVertexMaterial + where TvS : struct, IVertexSkinning + { + return new GLTFMeshBuildWrapper(this, ExportedId); + } + + public IGLTFMeshBuildWrapper CreateBuilder() + { + return (IGLTFMeshBuildWrapper)GetType() + .GetMethod("InternalCreateBuilder") + .MakeGenericMethod([GeometryDataType, MaterialDataType, SkinningDataType]) + .Invoke(this, []); + } + + public void ToGLTF(IVertexBuilder gltf, Vertex gr) + { + GeometryBuilder.ToGLTF(gltf, gr); + MaterialBuilder.ToGLTF(gltf, gr); + SkinningBuilder.ToGLTF(gltf, gr); + } + + public Vertex FromGLTF(IVertexBuilder gltf) + { + var gr = VertexFormat.CreateInstance(); + GeometryBuilder.FromGLTF(gltf, gr); + MaterialBuilder.FromGLTF(gltf, gr); + SkinningBuilder.FromGLTF(gltf, gr); + return gr; + } +} + +public class GLTFMeshExporter(Mesh mesh, string exportedId, ExporterOptions options) +{ + private Mesh ExportedMesh = mesh; + private GLTFVertexBuildHelper BuildHelper = new(exportedId, mesh.VertexFormat, options); + + public IMeshBuilder Export() + { + var builder = BuildHelper.CreateBuilder(); + return builder.Build(ExportedMesh); + } +} diff --git a/LSLib/Granny/Model/Metadata.cs b/LSLib/Granny/Model/Metadata.cs index c30f5efe..2887911c 100644 --- a/LSLib/Granny/Model/Metadata.cs +++ b/LSLib/Granny/Model/Metadata.cs @@ -21,18 +21,31 @@ public class ArtToolInfo [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] public object ExtendedData; + public static ArtToolInfo CreateDefault() + { + return new ArtToolInfo + { + FromArtToolName = "", + ArtToolMajorRevision = 1, + ArtToolMinorRevision = 0, + ArtToolPointerSize = 64, + UnitsPerMeter = 1, + Origin = [0, 0, 0] + }; + } + public void SetYUp() { - RightVector = new float[] { 1, 0, 0 }; - UpVector = new float[] { 0, 1, 0 }; - BackVector = new float[] { 0, 0, -1 }; + RightVector = [1, 0, 0]; + UpVector = [0, 1, 0]; + BackVector = [0, 0, -1]; } public void SetZUp() { - RightVector = new float[] { 1, 0, 0 }; - UpVector = new float[] { 0, 0, 1 }; - BackVector = new float[] { 0, 1, 0 }; + RightVector = [1, 0, 0]; + UpVector = [0, 0, 1]; + BackVector = [0, 1, 0]; } } diff --git a/LSLib/Granny/Model/Root.cs b/LSLib/Granny/Model/Root.cs index 2ad246e2..0739dbea 100644 --- a/LSLib/Granny/Model/Root.cs +++ b/LSLib/Granny/Model/Root.cs @@ -34,6 +34,19 @@ public class Root [Serialization(Kind = SerializationKind.None)] public UInt32 GR2Tag; + public static Root CreateEmpty() + { + return new Root + { + Skeletons = [], + VertexDatas = [], + TriTopologies = [], + Meshes = [], + Models = [], + TrackGroups = [], + Animations = [] + }; + } public void TransformVertices(Matrix4 transformation) { diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index b26153fc..6abb5a7f 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -185,6 +185,18 @@ public class Skeleton [Serialization(Kind = SerializationKind.None)] public bool IsDummy = false; + public static Skeleton CreateEmpty(string name) + { + return new Skeleton + { + Bones = [], + LODType = 1, + Name = name, + BonesBySID = [], + BonesByID = [] + }; + } + public static Skeleton FromCollada(node root) { var skeleton = new Skeleton diff --git a/LSLib/Granny/Model/VertexHelpers.cs b/LSLib/Granny/Model/VertexHelpers.cs new file mode 100644 index 00000000..e88ad698 --- /dev/null +++ b/LSLib/Granny/Model/VertexHelpers.cs @@ -0,0 +1,249 @@ +using OpenTK.Mathematics; + +namespace LSLib.Granny.Model; + +public class VertexHelpers +{ + public static void CompressBoneWeights(Span weights, Span compressedWeights) + { + Span errors = stackalloc float[weights.Length]; + + var influenceCount = weights.Length; + float influenceSum = 0.0f; + foreach (var w in weights) + { + influenceSum += w; + } + + ushort totalEncoded = 0; + for (var i = 0; i < influenceCount; i++) + { + var weight = weights[i] / influenceSum * 255.0f; + var encodedWeight = (byte)Math.Round(weight); + totalEncoded += encodedWeight; + errors[i] = encodedWeight - weight; + compressedWeights[i] = encodedWeight; + } + + while (totalEncoded != 0 && totalEncoded != 255) + { + int errorIndex = 0; + if (totalEncoded < 255) + { + for (var i = 1; i < influenceCount; i++) + { + if (errors[i] < errors[errorIndex]) + { + errorIndex = i; + } + } + + compressedWeights[errorIndex]++; + errors[errorIndex]++; + totalEncoded++; + } + else + { + for (var i = 1; i < influenceCount; i++) + { + if (errors[i] > errors[errorIndex]) + { + errorIndex = i; + } + } + + compressedWeights[errorIndex]--; + errors[errorIndex]--; + totalEncoded--; + } + } + } + + public static void ComputeTangents(IList vertices, IList indices, bool ignoreNaNUV) + { + // Check if the vertex format has at least one UV set + if (vertices.Count > 0) + { + var v = vertices[0]; + if (v.Format.TextureCoordinates == 0) + { + throw new InvalidOperationException("At least one UV set is required to recompute tangents"); + } + } + + foreach (var v in vertices) + { + v.Tangent = Vector3.Zero; + v.Binormal = Vector3.Zero; + } + + for (int i = 0; i < indices.Count/3; i++) + { + var i1 = indices[i * 3 + 0]; + var i2 = indices[i * 3 + 1]; + var i3 = indices[i * 3 + 2]; + + var vert1 = vertices[i1]; + var vert2 = vertices[i2]; + var vert3 = vertices[i3]; + + var v1 = vert1.Position; + var v2 = vert2.Position; + var v3 = vert3.Position; + + var w1 = vert1.TextureCoordinates0; + var w2 = vert2.TextureCoordinates0; + var w3 = vert3.TextureCoordinates0; + + float x1 = v2.X - v1.X; + float x2 = v3.X - v1.X; + float y1 = v2.Y - v1.Y; + float y2 = v3.Y - v1.Y; + float z1 = v2.Z - v1.Z; + float z2 = v3.Z - v1.Z; + + float s1 = w2.X - w1.X; + float s2 = w3.X - w1.X; + float t1 = w2.Y - w1.Y; + float t2 = w3.Y - w1.Y; + + float r = 1.0F / (s1 * t2 - s2 * t1); + + if ((Single.IsNaN(r) || Single.IsInfinity(r)) && !ignoreNaNUV) + { + throw new Exception($"Couldn't calculate tangents; the mesh most likely contains non-manifold geometry.{Environment.NewLine}" + + $"UV1: {w1}{Environment.NewLine}UV2: {w2}{Environment.NewLine}UV3: {w3}"); + } + + var sdir = new Vector3( + (t2 * x1 - t1 * x2) * r, + (t2 * y1 - t1 * y2) * r, + (t2 * z1 - t1 * z2) * r + ); + var tdir = new Vector3( + (s1 * x2 - s2 * x1) * r, + (s1 * y2 - s2 * y1) * r, + (s1 * z2 - s2 * z1) * r + ); + + vert1.Tangent += sdir; + vert2.Tangent += sdir; + vert3.Tangent += sdir; + + vert1.Binormal += tdir; + vert2.Binormal += tdir; + vert3.Binormal += tdir; + } + + foreach (var v in vertices) + { + var n = v.Normal; + var t = v.Tangent; + var b = v.Binormal; + + // Gram-Schmidt orthogonalize + var tangent = (t - n * Vector3.Dot(n, t)).Normalized(); + + // Calculate handedness + var w = (Vector3.Dot(Vector3.Cross(n, t), b) < 0.0F) ? 1.0F : -1.0F; + var binormal = (Vector3.Cross(n, t) * w).Normalized(); + + v.Tangent = tangent; + v.Binormal = binormal; + } + } + + public static Vector3 TriangleNormalFromVertex(IList vertices, IList indices, int vertexIndex) + { + // This assumes that A->B->C is a counter-clockwise ordering + var a = vertices[indices[vertexIndex]].Position; + var b = vertices[indices[(vertexIndex + 1) % 3]].Position; + var c = vertices[indices[(vertexIndex + 2) % 3]].Position; + + var N = Vector3.Cross(b - a, c - a); + float sin_alpha = N.Length / ((b - a).Length * (c - a).Length); + return N.Normalized() * (float)Math.Asin(sin_alpha); + } + + public static void ComputeNormals(IList vertices, IList indices) + { + for (var vertexIdx = 0; vertexIdx < vertices.Count; vertexIdx++) + { + Vector3 N = new(0, 0, 0); + var numIndices = indices.Count; + for (int triVertIdx = 0; triVertIdx < numIndices; triVertIdx++) + { + if (indices[triVertIdx] == vertexIdx) + { + int baseIdx = ((int)(triVertIdx / 3)) * 3; + N += TriangleNormalFromVertex(vertices, indices, baseIdx); + } + } + + N.Normalize(); + vertices[vertexIdx].Normal = N; + } + } + + struct OBB + { + public Vector3 Min, Max; + public int NumVerts; + } + + public static void UpdateOBBs(Skeleton skeleton, Mesh mesh) + { + if (mesh.BoneBindings == null || mesh.BoneBindings.Count == 0) return; + + var obbs = new List(mesh.BoneBindings.Count); + for (var i = 0; i < mesh.BoneBindings.Count; i++) + { + obbs.Add(new OBB + { + Min = new Vector3(1000.0f, 1000.0f, 1000.0f), + Max = new Vector3(-1000.0f, -1000.0f, -1000.0f), + NumVerts = 0 + }); + } + + foreach (var vert in mesh.PrimaryVertexData.Vertices) + { + for (var i = 0; i < Vertex.MaxBoneInfluences; i++) + { + if (vert.BoneWeights[i] > 0) + { + var bi = vert.BoneIndices[i]; + var obb = obbs[bi]; + obb.NumVerts++; + + var bone = skeleton.GetBoneByName(mesh.BoneBindings[bi].BoneName); + var invWorldTransform = ColladaHelpers.FloatsToMatrix(bone.InverseWorldTransform); + var transformed = Vector3.TransformPosition(vert.Position, invWorldTransform); + + obb.Min.X = Math.Min(obb.Min.X, transformed.X); + obb.Min.Y = Math.Min(obb.Min.Y, transformed.Y); + obb.Min.Z = Math.Min(obb.Min.Z, transformed.Z); + + obb.Max.X = Math.Max(obb.Max.X, transformed.X); + obb.Max.Y = Math.Max(obb.Max.Y, transformed.Y); + obb.Max.Z = Math.Max(obb.Max.Z, transformed.Z); + } + } + } + + for (var i = 0; i < obbs.Count; i++) + { + var obb = obbs[i]; + if (obb.NumVerts > 0) + { + mesh.BoneBindings[i].OBBMin = [obb.Min.X, obb.Min.Y, obb.Min.Z]; + mesh.BoneBindings[i].OBBMax = [obb.Max.X, obb.Max.Y, obb.Max.Z]; + } + else + { + mesh.BoneBindings[i].OBBMin = [0.0f, 0.0f, 0.0f]; + mesh.BoneBindings[i].OBBMax = [0.0f, 0.0f, 0.0f]; + } + } + } +} diff --git a/LSLib/LS/Common.cs b/LSLib/LS/Common.cs index ac6134b2..de231144 100644 --- a/LSLib/LS/Common.cs +++ b/LSLib/LS/Common.cs @@ -14,13 +14,16 @@ public static class Common { public const int MajorVersion = 1; - public const int MinorVersion = 19; + public const int MinorVersion = 20; - public const int PatchVersion = 5; + public const int PatchVersion = 0; // Version of LSTools profile data in generated DAE files public const int ColladaMetadataVersion = 3; + // Version of LSTools profile data in generated GLTF files + public const int GLTFMetadataVersion = 3; + /// /// Returns the version number of the LSLib library /// diff --git a/LSLib/LSLib.csproj b/LSLib/LSLib.csproj index 5cf39a0e..b6b56890 100644 --- a/LSLib/LSLib.csproj +++ b/LSLib/LSLib.csproj @@ -26,13 +26,15 @@ - + - - + + + + From fffa6e7c06942a1010dd256ce82210d568b34c5c Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 22 Mar 2025 20:12:17 +0100 Subject: [PATCH 120/139] Remove useless exporter options --- ConverterApp/GR2Pane.cs | 1 - Divine/CLI/CommandLineArguments.cs | 20 ++++---- Divine/CLI/CommandLineGR2Processor.cs | 8 --- LSLib/Granny/Model/ColladaExporter.cs | 70 +++++++++------------------ LSLib/Granny/Model/ColladaImporter.cs | 15 ++---- LSLib/Granny/Model/ColladaMesh.cs | 11 ++--- LSLib/Granny/Model/Exporter.cs | 27 ----------- LSLib/Granny/Model/GLTFExporter.cs | 2 +- LSLib/Granny/Model/GLTFImporter.cs | 25 ++-------- LSLib/Granny/Model/GLTFMesh.cs | 7 +-- LSLib/Granny/Model/GLTFVertex.cs | 18 +++---- LSLib/Granny/Model/Metadata.cs | 13 +++++ 12 files changed, 72 insertions(+), 145 deletions(-) diff --git a/ConverterApp/GR2Pane.cs b/ConverterApp/GR2Pane.cs index 9e8f0e6b..4379cb25 100644 --- a/ConverterApp/GR2Pane.cs +++ b/ConverterApp/GR2Pane.cs @@ -274,7 +274,6 @@ private void UpdateCommonExporterSettings(ExporterOptions settings) settings.FlipUVs = flipUVs.Checked; settings.BuildDummySkeleton = buildDummySkeleton.Checked; - settings.DeduplicateUVs = filterUVs.Checked; settings.ApplyBasisTransforms = applyBasisTransforms.Checked; settings.FlipMesh = flipMeshes.Checked; settings.FlipSkeleton = flipSkeletons.Checked; diff --git a/Divine/CLI/CommandLineArguments.cs b/Divine/CLI/CommandLineArguments.cs index aa38f4ed..2947c33f 100644 --- a/Divine/CLI/CommandLineArguments.cs +++ b/Divine/CLI/CommandLineArguments.cs @@ -329,15 +329,7 @@ public static Dictionary GetGR2Options(string[] options) { var results = new Dictionary { - { "export-normals", true }, - { "export-tangents", true }, - { "export-uvs", true }, - { "export-colors", true }, { "deduplicate-vertices", true }, - { "deduplicate-uvs", true }, - { "recalculate-normals", false }, - { "recalculate-tangents", false }, - { "recalculate-iwt", false }, { "flip-uvs", true }, { "ignore-uv-nan", true }, { "disable-qtangents", false }, @@ -349,7 +341,17 @@ public static Dictionary GetGR2Options(string[] options) { "x-flip-skeletons", false }, { "x-flip-meshes", false }, { "conform", false }, - { "conform-copy", false } + { "conform-copy", false }, + + // Deprecated options, no longer in use + { "export-normals", true }, + { "export-tangents", true }, + { "export-uvs", true }, + { "export-colors", true }, + { "recalculate-normals", false }, + { "recalculate-tangents", false }, + { "recalculate-iwt", false }, + { "deduplicate-uvs", true } }; if (options == null) diff --git a/Divine/CLI/CommandLineGR2Processor.cs b/Divine/CLI/CommandLineGR2Processor.cs index ea1342b9..e56dabfa 100644 --- a/Divine/CLI/CommandLineGR2Processor.cs +++ b/Divine/CLI/CommandLineGR2Processor.cs @@ -30,18 +30,10 @@ public static ExporterOptions UpdateExporterSettings() OutputPath = CommandLineActions.DestinationPath, InputFormat = Program.argv.InputFormat != null ? GR2Utils.FileExtensionToModelFormat("." + Program.argv.InputFormat) : GR2Utils.PathExtensionToModelFormat(CommandLineActions.SourcePath), OutputFormat = Program.argv.OutputFormat != null ? GR2Utils.FileExtensionToModelFormat("." + Program.argv.OutputFormat) : GR2Utils.PathExtensionToModelFormat(CommandLineActions.DestinationPath), - ExportNormals = GR2Options["export-normals"], - ExportTangents = GR2Options["export-tangents"], - ExportUVs = GR2Options["export-uvs"], - ExportColors = GR2Options["export-colors"], FlipUVs = GR2Options["flip-uvs"], - RecalculateNormals = GR2Options["recalculate-normals"], - RecalculateTangents = GR2Options["recalculate-tangents"], - RecalculateIWT = GR2Options["recalculate-iwt"], BuildDummySkeleton = GR2Options["build-dummy-skeleton"], CompactIndices = GR2Options["compact-tris"], DeduplicateVertices = GR2Options["deduplicate-vertices"], - DeduplicateUVs = GR2Options["deduplicate-uvs"], ApplyBasisTransforms = GR2Options["apply-basis-transforms"], UseObsoleteVersionTag = GR2Options["force-legacy-version"], ConformGR2Path = GR2Options["conform"] && !string.IsNullOrEmpty(CommandLineActions.ConformPath) ? CommandLineActions.ConformPath : null, diff --git a/LSLib/Granny/Model/ColladaExporter.cs b/LSLib/Granny/Model/ColladaExporter.cs index 28e5774a..57a18832 100644 --- a/LSLib/Granny/Model/ColladaExporter.cs +++ b/LSLib/Granny/Model/ColladaExporter.cs @@ -67,31 +67,22 @@ private void DetermineInputsFromComponentNames(List componentNames) case "Normal": { - if (Options.ExportNormals) - { - var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedId); - AddInput(normals, "NORMAL"); - } + var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedId); + AddInput(normals, "NORMAL"); break; } case "Tangent": { - if (Options.ExportTangents) - { - var tangents = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedId); - AddInput(tangents, "TEXTANGENT"); - } + var tangents = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedId); + AddInput(tangents, "TEXTANGENT"); break; } case "Binormal": { - if (Options.ExportTangents) - { - var binormals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedId); - AddInput(binormals, "TEXBINORMAL"); - } + var binormals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedId); + AddInput(binormals, "TEXBINORMAL"); break; } @@ -102,12 +93,9 @@ private void DetermineInputsFromComponentNames(List componentNames) case "TextureCoordinates4": case "TextureCoordinates5": { - if (Options.ExportUVs) - { - int uvIndex = Int32.Parse(component[^1..]); - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); - AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); - } + int uvIndex = Int32.Parse(component[^1..]); + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); + AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); break; } @@ -118,12 +106,9 @@ private void DetermineInputsFromComponentNames(List componentNames) case "UVChannel_2": case "map1": { - if (Options.ExportUVs) - { - int uvIndex = Int32.Parse(component[^1..]) - 1; - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); - AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); - } + int uvIndex = Int32.Parse(component[^1..]) - 1; + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); + AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); break; } @@ -134,11 +119,8 @@ private void DetermineInputsFromComponentNames(List componentNames) case "DiffuseColor0": { - if (Options.ExportColors) - { - var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedId, 0); - AddInput(colors, null, "COLOR", 0); - } + var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedId, 0); + AddInput(colors, null, "COLOR", 0); break; } @@ -161,44 +143,38 @@ private void DetermineInputsFromVertex(Vertex vertex) AddInput(positions, "POSITION", "VERTEX"); // Normals - if (desc.NormalType != NormalType.None && Options.ExportNormals) + if (desc.NormalType != NormalType.None) { var normals = ExportedMesh.PrimaryVertexData.MakeColladaNormals(ExportedId); AddInput(normals, null, "NORMAL"); } // Tangents - if (desc.TangentType != NormalType.None && Options.ExportTangents) + if (desc.TangentType != NormalType.None) { var normals = ExportedMesh.PrimaryVertexData.MakeColladaTangents(ExportedId); AddInput(normals, null, "TEXTANGENT"); } // Binormals - if (desc.BinormalType != NormalType.None && Options.ExportTangents) + if (desc.BinormalType != NormalType.None) { var normals = ExportedMesh.PrimaryVertexData.MakeColladaBinormals(ExportedId); AddInput(normals, null, "TEXBINORMAL"); } // Texture coordinates - if (Options.ExportUVs) + for (var uvIndex = 0; uvIndex < desc.TextureCoordinates; uvIndex++) { - for (var uvIndex = 0; uvIndex < desc.TextureCoordinates; uvIndex++) - { - var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); - AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); - } + var uvs = ExportedMesh.PrimaryVertexData.MakeColladaUVs(ExportedId, uvIndex, Options.FlipUVs); + AddInput(uvs, null, "TEXCOORD", (ulong)uvIndex); } // Vertex colors - if (Options.ExportColors) + for (var colorIndex = 0; colorIndex < desc.ColorMaps; colorIndex++) { - for (var colorIndex = 0; colorIndex < desc.ColorMaps; colorIndex++) - { - var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedId, colorIndex); - AddInput(colors, null, "COLOR", (ulong)colorIndex); - } + var colors = ExportedMesh.PrimaryVertexData.MakeColladaColors(ExportedId, colorIndex); + AddInput(colors, null, "COLOR", (ulong)colorIndex); } // BoneWeights and BoneIndices are handled in ExportSkin() diff --git a/LSLib/Granny/Model/ColladaImporter.cs b/LSLib/Granny/Model/ColladaImporter.cs index 7e423309..90ff466e 100644 --- a/LSLib/Granny/Model/ColladaImporter.cs +++ b/LSLib/Granny/Model/ColladaImporter.cs @@ -488,15 +488,8 @@ private Mesh ImportMesh(geometry geom, mesh mesh) OriginalToConsolidatedVertexIndexMap = collada.OriginalToConsolidatedVertexIndexMap }; - if (!Options.StripMetadata) - { - var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); - m.PrimaryVertexData.VertexComponentNames = components; - } - else - { - m.PrimaryVertexData.VertexComponentNames = null; - } + var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); + m.PrimaryVertexData.VertexComponentNames = components; MakeExtendedData(mesh, m); @@ -894,8 +887,8 @@ public Root Import(string inputPath) var root = Root.CreateEmpty(); root.ArtToolInfo = ImportArtToolInfo(collada); - root.ExporterInfo = Options.StripMetadata ? null : ImportExporterInfo(collada); - root.FromFileName = inputPath; + root.ExporterInfo = ImportExporterInfo(collada); + root.FromFileName = ""; ColladaGeometries = []; SkinnedMeshes = []; diff --git a/LSLib/Granny/Model/ColladaMesh.cs b/LSLib/Granny/Model/ColladaMesh.cs index 493c92a4..2d33a255 100644 --- a/LSLib/Granny/Model/ColladaMesh.cs +++ b/LSLib/Granny/Model/ColladaMesh.cs @@ -410,10 +410,9 @@ public void ImportFromCollada(mesh mesh, bool isSkinned, ExporterOptions options // TODO: This should be done before deduplication! // TODO: Move this to somewhere else ... ? - if (!HasNormals || Options.RecalculateNormals) + if (!HasNormals) { - if (!HasNormals) - Utils.Info(String.Format("Channel 'NORMAL' not found, will rebuild vertex normals after import.")); + Utils.Info(String.Format("Channel 'NORMAL' not found, will rebuild vertex normals after import.")); HasNormals = true; OutputVertexType.NormalType = NormalType.Float3; @@ -497,10 +496,10 @@ public void ImportFromCollada(mesh mesh, bool isSkinned, ExporterOptions options if ((InputVertexType.TangentType == NormalType.None || InputVertexType.BinormalType == NormalType.None) - && ((!HasTangents && UVs.Count > 0) || Options.RecalculateTangents)) + && !HasTangents + && UVs.Count > 0) { - if (!HasTangents) - Utils.Info(String.Format("Channel 'TANGENT'/'BINROMAL' not found, will rebuild vertex tangents after import.")); + Utils.Info(String.Format("Channel 'TANGENT'/'BINROMAL' not found, will rebuild vertex tangents after import.")); OutputVertexType.TangentType = NormalType.Float3; OutputVertexType.BinormalType = NormalType.Float3; diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 765e933f..7887990e 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -50,31 +50,14 @@ public class ExporterOptions // have to 1:1 match the GR2 structs for that version, as it won't just // memcpy the struct from the GR2 file directly. public UInt32 VersionTag = GR2.Header.DefaultTag; - // Export vertex normals to DAE/GR2 file - public bool ExportNormals = true; - // Export tangents/binormals to DAE/GR2 file - public bool ExportTangents = true; - // Export UV-s to DAE/GR2 file - public bool ExportUVs = true; - // Export vertex colors to DAE/GR2 file - public bool ExportColors = true; // Flip the V coord of UV-s (GR2 stores them in flipped format) public bool FlipUVs = true; - // Recalculate normals, even if they're available in the source mesh - // (They'll be recalculated automatically if unavailable) - public bool RecalculateNormals = false; - // Recalculate tangents/binormals, even if they're available in the source mesh - // (They'll be recalculated automatically if unavailable) - public bool RecalculateTangents = false; - // Recalculate bone inverse world transforms - public bool RecalculateIWT = false; // Create a dummy skeleton if none exists in the mesh // Some games will crash if they encounter a mesh without a skeleton public bool BuildDummySkeleton = false; // Save 16-bit vertex indices, if possible public bool CompactIndices = true; public bool DeduplicateVertices = true; // TODO: Add Collada conforming vert. handling as well - public bool DeduplicateUVs = true; // TODO: UNHANDLED public bool ApplyBasisTransforms = true; // Use an obsolete version tag to prevent Granny from memory mapping the structs public bool UseObsoleteVersionTag = false; @@ -88,8 +71,6 @@ public class ExporterOptions public DivinityModelInfoFormat ModelInfoFormat = DivinityModelInfoFormat.None; // Model flags to use when exporting public DivinityModelFlag ModelType = 0; - // Remove unused metadata from the GR2 file - public bool StripMetadata = true; // Flip mesh on X axis public bool FlipMesh = false; // Flip skeleton on X axis @@ -762,14 +743,6 @@ public void Export() Root.ConvertToYUp(Options.TransformSkeletons); } - if (Options.RecalculateIWT && Root.Skeletons != null) - { - foreach (var skeleton in Root.Skeletons) - { - skeleton.UpdateWorldTransforms(); - } - } - // TODO: DeduplicateUVs if (Options.ConformGR2Path != null) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 7cd7d7e5..8ca31ef8 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -41,7 +41,7 @@ private void GenerateUniqueMeshIds(List meshes) private void ExportMeshBinding(Model model, Skeleton skeleton, MeshBinding meshBinding, SceneBuilder scene) { var meshId = MeshIds[meshBinding.Mesh]; - var exporter = new GLTFMeshExporter(meshBinding.Mesh, meshId, Options); + var exporter = new GLTFMeshExporter(meshBinding.Mesh, meshId); var mesh = exporter.Export(); if (skeleton == null || !meshBinding.Mesh.VertexFormat.HasBoneWeights) diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index 8e96fef3..5e1984c3 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -11,18 +11,6 @@ public class GLTFImporter public ExporterOptions Options = new(); public List ImportedMeshes; - private ExporterInfo MakeExporterInfo() - { - return new ExporterInfo - { - ExporterName = $"LSLib GR2 Exporter v{Common.LibraryVersion()}", - ExporterMajorRevision = Common.MajorVersion, - ExporterMinorRevision = Common.MinorVersion, - ExporterBuildNumber = 0, - ExporterCustomization = Common.PatchVersion - }; - } - private DivinityModelFlag DetermineSkeletonModelFlagsFromModels(Root root, Skeleton skeleton, DivinityModelFlag meshFlagOverrides) { DivinityModelFlag accumulatedFlags = 0; @@ -191,15 +179,8 @@ private Mesh ImportMesh(ModelRoot modelRoot, ContentTransformer content, string MaterialBindings = [new MaterialBinding()] }; - if (!Options.StripMetadata) - { - var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); - m.PrimaryVertexData.VertexComponentNames = components; - } - else - { - m.PrimaryVertexData.VertexComponentNames = null; - } + var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); + m.PrimaryVertexData.VertexComponentNames = components; var ext = FindMeshExtension(modelRoot, name); MakeExtendedData(content, ext, m); @@ -414,7 +395,7 @@ public Root Import(string inputPath) var root = Root.CreateEmpty(); root.ArtToolInfo = ArtToolInfo.CreateDefault(); root.ArtToolInfo.SetYUp(); - root.ExporterInfo = Options.StripMetadata ? null : MakeExporterInfo(); + root.ExporterInfo = ExporterInfo.MakeCurrent(); root.FromFileName = inputPath; ImportedMeshes = []; diff --git a/LSLib/Granny/Model/GLTFMesh.cs b/LSLib/Granny/Model/GLTFMesh.cs index 0831abfa..ffe9bd48 100644 --- a/LSLib/Granny/Model/GLTFMesh.cs +++ b/LSLib/Granny/Model/GLTFMesh.cs @@ -45,7 +45,7 @@ private void ImportTriangles(IPrimitiveReader primitives) private void ImportVertices(IPrimitiveReader primitives) { - BuildHelper = new GLTFVertexBuildHelper("", OutputVertexType, Options); + BuildHelper = new GLTFVertexBuildHelper("", OutputVertexType); Vertices = new List(primitives.Vertices.Count); foreach (var vert in primitives.Vertices) @@ -193,7 +193,7 @@ public void ImportFromGLTF(ContentTransformer content, ExporterOptions options) ImportTriangles(primitives); ImportVertices(primitives); - if (!HasNormals || Options.RecalculateNormals) + if (!HasNormals) { HasNormals = true; OutputVertexType.NormalType = NormalType.Float3; @@ -202,7 +202,8 @@ public void ImportFromGLTF(ContentTransformer content, ExporterOptions options) if ((InputVertexType.TangentType == NormalType.None || InputVertexType.BinormalType == NormalType.None) - && ((!HasTangents && InputVertexType.TextureCoordinates > 0) || Options.RecalculateTangents)) + && !HasTangents + && InputVertexType.TextureCoordinates > 0) { OutputVertexType.TangentType = NormalType.Float3; OutputVertexType.BinormalType = NormalType.Float3; diff --git a/LSLib/Granny/Model/GLTFVertex.cs b/LSLib/Granny/Model/GLTFVertex.cs index 8954fa1a..b4f1747a 100644 --- a/LSLib/Granny/Model/GLTFVertex.cs +++ b/LSLib/Granny/Model/GLTFVertex.cs @@ -417,7 +417,6 @@ public class GLTFVertexBuildHelper { private readonly string ExportedId; private readonly VertexDescriptor VertexFormat; - private readonly ExporterOptions Options; private GLTFVertexBuilder GeometryBuilder; private GLTFVertexBuilder MaterialBuilder; @@ -432,16 +431,15 @@ public class GLTFVertexBuildHelper private bool HasNormals; private bool HasTangents; - public GLTFVertexBuildHelper(string exportedId, VertexDescriptor vertexFormat, ExporterOptions options) + public GLTFVertexBuildHelper(string exportedId, VertexDescriptor vertexFormat) { ExportedId = exportedId; VertexFormat = vertexFormat; - Options = options; - HasNormals = Options.ExportNormals && VertexFormat.NormalType != NormalType.None; - HasTangents = Options.ExportTangents && VertexFormat.TangentType != NormalType.None; - UVs = Options.ExportUVs ? VertexFormat.TextureCoordinates : 0; - ColorMaps = Options.ExportColors ? VertexFormat.ColorMaps : 0; + HasNormals = VertexFormat.NormalType != NormalType.None; + HasTangents = VertexFormat.TangentType != NormalType.None; + UVs = VertexFormat.TextureCoordinates; + ColorMaps = VertexFormat.ColorMaps; SelectGeometryBuilder(); SelectMaterialBuilder(); @@ -570,10 +568,10 @@ public Vertex FromGLTF(IVertexBuilder gltf) } } -public class GLTFMeshExporter(Mesh mesh, string exportedId, ExporterOptions options) +public class GLTFMeshExporter(Mesh mesh, string exportedId) { - private Mesh ExportedMesh = mesh; - private GLTFVertexBuildHelper BuildHelper = new(exportedId, mesh.VertexFormat, options); + private readonly Mesh ExportedMesh = mesh; + private readonly GLTFVertexBuildHelper BuildHelper = new(exportedId, mesh.VertexFormat); public IMeshBuilder Export() { diff --git a/LSLib/Granny/Model/Metadata.cs b/LSLib/Granny/Model/Metadata.cs index 2887911c..2c14c809 100644 --- a/LSLib/Granny/Model/Metadata.cs +++ b/LSLib/Granny/Model/Metadata.cs @@ -1,4 +1,5 @@ using LSLib.Granny.GR2; +using LSLib.LS; namespace LSLib.Granny.Model; @@ -58,4 +59,16 @@ public class ExporterInfo public Int32 ExporterBuildNumber; [Serialization(Type = MemberType.VariantReference, MinVersion = 0x80000011)] public object ExtendedData; + + public static ExporterInfo MakeCurrent() + { + return new ExporterInfo + { + ExporterName = $"LSLib GR2 Exporter v{Common.LibraryVersion()}", + ExporterMajorRevision = Common.MajorVersion, + ExporterMinorRevision = Common.MinorVersion, + ExporterBuildNumber = 0, + ExporterCustomization = Common.PatchVersion + }; + } } From 875da9e5e56665585f6dc7750d7a0e5a3c5e7f1b Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 23 Mar 2025 11:28:46 +0100 Subject: [PATCH 121/139] Fix no cli exit code when parameter parsing fails --- Divine/Program.cs | 1 + 1 file changed, 1 insertion(+) diff --git a/Divine/Program.cs b/Divine/Program.cs index 7c128459..691583f4 100644 --- a/Divine/Program.cs +++ b/Divine/Program.cs @@ -34,6 +34,7 @@ private static void Main(string[] args) catch (Exception e) { Console.WriteLine($"[FATAL] {e.Message}"); + Environment.Exit(1); } #endif From ca8a6bbd143c4e273ae898b3b236a4ac0ec689ee Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sat, 29 Mar 2025 14:57:20 +0100 Subject: [PATCH 122/139] Update functor definitions --- LSLibDefinitions.xml | 93 +++++++++++++++++++++++--------------------- 1 file changed, 48 insertions(+), 45 deletions(-) diff --git a/LSLibDefinitions.xml b/LSLibDefinitions.xml index b0db6df2..18837c47 100644 --- a/LSLibDefinitions.xml +++ b/LSLibDefinitions.xml @@ -361,9 +361,9 @@ - + - + @@ -376,14 +376,15 @@ - - + + - + + @@ -393,7 +394,7 @@ - + @@ -402,7 +403,7 @@ - + @@ -415,8 +416,6 @@ - - @@ -441,8 +440,8 @@ - - + + @@ -455,7 +454,7 @@ - + @@ -465,7 +464,7 @@ - + @@ -494,7 +493,7 @@ - + @@ -506,24 +505,28 @@ - - - - - - - - + + + + + + + + + + - - - - - - - + + + + + + + + + @@ -534,18 +537,18 @@ - - - - - - + + + + + + - + @@ -563,8 +566,8 @@ - - + + @@ -579,14 +582,14 @@ - + - + @@ -600,7 +603,7 @@ - + @@ -618,7 +621,7 @@ - + @@ -762,10 +765,10 @@ - + - + @@ -800,7 +803,7 @@ - + From b27113f900ed9946b927fa62716f81f76ebc475d Mon Sep 17 00:00:00 2001 From: Norbyte Date: Fri, 4 Apr 2025 17:55:18 +0200 Subject: [PATCH 123/139] Support for skeleton-only export --- LSLib/Granny/Model/GLTFExporter.cs | 100 +++++++++++++++++++++++++---- LSLib/Granny/Model/GLTFMesh.cs | 3 +- 2 files changed, 91 insertions(+), 12 deletions(-) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 8ca31ef8..59a34fff 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -8,13 +8,20 @@ namespace LSLib.Granny.Model; +internal class GLTFSkeletonExportData +{ + public NodeBuilder Root; + public List<(NodeBuilder, Matrix4x4)> Joints; + public bool UsedForSkinning; +} + public class GLTFExporter { [Serialization(Kind = SerializationKind.None)] public ExporterOptions Options = new(); private Dictionary MeshIds = new(); - private Dictionary> Skeletons = new(); + private Dictionary Skeletons = new(); private void GenerateUniqueMeshIds(List meshes) { @@ -50,13 +57,14 @@ private void ExportMeshBinding(Model model, Skeleton skeleton, MeshBinding meshB } else { - scene.AddSkinnedMesh(mesh, Matrix4x4.Identity, Skeletons[skeleton].ToArray()); + Skeletons[skeleton].UsedForSkinning = true; + scene.AddSkinnedMesh(mesh, Skeletons[skeleton].Joints.ToArray()); } } - private List ExportSkeleton(NodeBuilder root, Skeleton skeleton) + private GLTFSkeletonExportData ExportSkeleton(NodeBuilder root, Skeleton skeleton) { - var joints = new List(); + var joints = new List<(NodeBuilder, Matrix4x4)>(); foreach (var joint in skeleton.Bones) { NodeBuilder node; @@ -68,14 +76,25 @@ private List ExportSkeleton(NodeBuilder root, Skeleton skeleton) } else { - node = joints[joint.ParentIndex].CreateNode(joint.Name); + node = joints[joint.ParentIndex].Item1.CreateNode(joint.Name); } node.LocalTransform = ToGLTFTransform(joint.Transform); - joints.Add(node); + var t = joint.InverseWorldTransform; + var iwt = new Matrix4x4( + t[0], t[1], t[2], t[3], + t[4], t[5], t[6], t[7], + t[8], t[9], t[10], t[11], + t[12], t[13], t[14], t[15] + ); + joints.Add((node, iwt)); } - return joints; + return new GLTFSkeletonExportData { + Joints = joints, + Root = joints[0].Item1, + UsedForSkinning = false + }; } private void ExportSceneExtensions(Root root, GLTFSceneExtensions ext) @@ -170,10 +189,6 @@ private SceneBuilder ExportScene(Root root) foreach (var skeleton in root.Skeletons ?? []) { - //var skelRoot = new NodeBuilder(); - //skelRoot.Name = skeleton.Name; - //scene.AddNode(skelRoot); - var joints = ExportSkeleton(null, skeleton); Skeletons.Add(skeleton, joints); } @@ -183,9 +198,63 @@ private SceneBuilder ExportScene(Root root) ExportModel(root, model, scene); } + foreach (var skeleton in Skeletons) + { + if (!skeleton.Value.UsedForSkinning) + { + scene.AddNode(skeleton.Value.Root); + } + } + return scene; } + private SharpGLTF.Schema2.Node FindRoot(ModelRoot root, NodeBuilder node) + { + foreach (var n in root.LogicalNodes) + { + if (node.Name == n.Name && n.VisualParent == null) + { + return n; + } + } + + return null; + } + + private SharpGLTF.Schema2.Node FindNode(ModelRoot root, NodeBuilder node) + { + foreach (var n in root.LogicalNodes) + { + if (node.Name == n.Name && n.VisualParent?.Name == node.Parent?.Name) + { + return n; + } + } + + return null; + } + + private void ExportSkin(ModelRoot root, GLTFSkeletonExportData skeleton) + { + var skelRoot = FindRoot(root, skeleton.Root); + + List<(SharpGLTF.Schema2.Node Joint, Matrix4x4 InverseBindMatrix)> joints = []; + foreach (var (joint, bindMat) in skeleton.Joints) + { + var mapped = FindNode(root, joint); + if (mapped == null) + { + throw new ParsingException($"Unable to find bone {joint.Name} in gltf node tree"); + } + + joints.Add((mapped, bindMat)); + } + + var skin = skelRoot.LogicalParent.CreateSkin(); + skin.BindJoints(joints); + } + public void Export(Root root, string outputPath) { @@ -193,6 +262,15 @@ public void Export(Root root, string outputPath) var scene = ExportScene(root); var modelRoot = scene.ToGltf2(); + // Add skins for skeletons that were not used for skinning any mesh + foreach (var skeleton in Skeletons) + { + if (!skeleton.Value.UsedForSkinning) + { + ExportSkin(modelRoot, skeleton.Value); + } + } + ExportExtensions(root, modelRoot); switch (Options.OutputFormat) diff --git a/LSLib/Granny/Model/GLTFMesh.cs b/LSLib/Granny/Model/GLTFMesh.cs index ffe9bd48..f966e02e 100644 --- a/LSLib/Granny/Model/GLTFMesh.cs +++ b/LSLib/Granny/Model/GLTFMesh.cs @@ -30,7 +30,8 @@ private void ImportTriangles(IPrimitiveReader primitives) primitives.Lines.Count > 0 || primitives.VerticesPerPrimitive != 3) { - throw new ParsingException("Non-triangle"); // FIXME + throw new ParsingException($"glTF mesh needs to be triangulated; " + + $"got {primitives.Points.Count} points, {primitives.Lines.Count} lines, {primitives.VerticesPerPrimitive} verts per primitive"); } TriangleCount = primitives.Triangles.Count; From 4728671559052e6db5c4d0fe2308e1784497956d Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Apr 2025 12:52:56 +0200 Subject: [PATCH 124/139] Improve logic for detecting dummy skeletons --- LSLib/Granny/Model/GLTFExporter.cs | 7 +++-- LSLib/Granny/Model/Skeleton.cs | 50 ++++++++++++++++++++++++++++-- 2 files changed, 53 insertions(+), 4 deletions(-) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 59a34fff..74cfffab 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -189,8 +189,11 @@ private SceneBuilder ExportScene(Root root) foreach (var skeleton in root.Skeletons ?? []) { - var joints = ExportSkeleton(null, skeleton); - Skeletons.Add(skeleton, joints); + if (!skeleton.IsDummy) + { + var joints = ExportSkeleton(null, skeleton); + Skeletons.Add(skeleton, joints); + } } foreach (var model in root.Models ?? []) diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index 6abb5a7f..5a9d6696 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -270,10 +270,56 @@ public void ReorderBones() } } + private bool CheckIsDummy(Root root) + { + // If we have any skinned meshes, the skeleton cannot be dummy + var hasSkinnedMeshes = root.Models != null + && root.Models.Any((model) => model.Skeleton == this) // We have a binding for this skeleton + && root.Meshes != null + && root.Meshes.Any((mesh) => mesh.IsSkinned()); // ... and the mesh has bone weights + if (hasSkinnedMeshes) return false; + + // If we have animations (that have skeleton bindings), the skeleton cannot be dummy + if (root.Animations != null && root.Animations.Count > 0) return false; + + // If we don't have any meshes (i.e. only exporting the skeleton resource), always include + // the skeleton even if it's a dummy skel + if (root.Meshes == null || root.Meshes.Count == 0) return false; + + // Check if the skeleton conforms to one of the dummy patterns: + // 1) A single dummy root bone + if (Bones.Count == 1) return true; + + // 2) A bone for each mesh parented to a dummy root bone + if (Bones.Count == 1 + root.Meshes.Count) + { + foreach (var bone in Bones) + { + if (!bone.IsRoot && bone.ParentIndex != 0) return false; + } + + HashSet marked = []; + foreach (var mesh in root.Meshes) + { + if (mesh.BoneBindings == null + || mesh.BoneBindings.Count != 1) + { + return false; + } + + if (marked.Contains(mesh.BoneBindings[0].BoneName)) return false; + marked.Add(mesh.BoneBindings[0].BoneName); + } + + return true; + } + + return false; + } + public void PostLoad(Root root) { - var hasSkinnedMeshes = root.Models.Any((model) => model.Skeleton == this); - if (!hasSkinnedMeshes || Bones.Count == 1) + if (CheckIsDummy(root)) { IsDummy = true; Utils.Info(String.Format("Skeleton '{0}' marked as dummy", this.Name)); From 935eeb28dd20f3d7c1c4a517827ec8a48a2dd155 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Apr 2025 13:36:26 +0200 Subject: [PATCH 125/139] Add support for exporting animations to glTF --- Divine/CLI/CommandLineActions.cs | 6 ++- Divine/CLI/CommandLineGR2Processor.cs | 2 +- LSLib/Granny/Model/GLTFExporter.cs | 65 +++++++++++++++++++++++++++ 3 files changed, 71 insertions(+), 2 deletions(-) diff --git a/Divine/CLI/CommandLineActions.cs b/Divine/CLI/CommandLineActions.cs index 533be3b7..cf67eb0b 100644 --- a/Divine/CLI/CommandLineActions.cs +++ b/Divine/CLI/CommandLineActions.cs @@ -106,9 +106,13 @@ private static void SetUpAndValidate(CommandLineArguments args) } - if (GR2Options["conform"]) + if (args.ConformPath != null && args.ConformPath != "") { ConformPath = TryToValidatePath(args.ConformPath); + if (!Path.Exists(ConformPath)) + { + CommandLineLogger.LogFatal($"Skeleton source GR2 does not exist: {args.ConformPath}", 1); + } } } diff --git a/Divine/CLI/CommandLineGR2Processor.cs b/Divine/CLI/CommandLineGR2Processor.cs index e56dabfa..a0cbdf47 100644 --- a/Divine/CLI/CommandLineGR2Processor.cs +++ b/Divine/CLI/CommandLineGR2Processor.cs @@ -36,7 +36,7 @@ public static ExporterOptions UpdateExporterSettings() DeduplicateVertices = GR2Options["deduplicate-vertices"], ApplyBasisTransforms = GR2Options["apply-basis-transforms"], UseObsoleteVersionTag = GR2Options["force-legacy-version"], - ConformGR2Path = GR2Options["conform"] && !string.IsNullOrEmpty(CommandLineActions.ConformPath) ? CommandLineActions.ConformPath : null, + ConformGR2Path = !string.IsNullOrEmpty(CommandLineActions.ConformPath) ? CommandLineActions.ConformPath : null, FlipSkeleton = GR2Options["x-flip-skeletons"], FlipMesh = GR2Options["x-flip-meshes"], TransformSkeletons = GR2Options["y-up-skeletons"], diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 74cfffab..d7a15679 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -5,6 +5,7 @@ using SharpGLTF.Scenes; using LSLib.LS; using SharpGLTF.Schema2; +using SharpGLTF.Animations; namespace LSLib.Granny.Model; @@ -12,6 +13,7 @@ internal class GLTFSkeletonExportData { public NodeBuilder Root; public List<(NodeBuilder, Matrix4x4)> Joints; + public Dictionary Names; public bool UsedForSkinning; } @@ -65,6 +67,7 @@ private void ExportMeshBinding(Model model, Skeleton skeleton, MeshBinding meshB private GLTFSkeletonExportData ExportSkeleton(NodeBuilder root, Skeleton skeleton) { var joints = new List<(NodeBuilder, Matrix4x4)>(); + var names = new Dictionary(); foreach (var joint in skeleton.Bones) { NodeBuilder node; @@ -88,10 +91,12 @@ private GLTFSkeletonExportData ExportSkeleton(NodeBuilder root, Skeleton skeleto t[12], t[13], t[14], t[15] ); joints.Add((node, iwt)); + names.Add(joint.Name, node); } return new GLTFSkeletonExportData { Joints = joints, + Names = names, Root = joints[0].Item1, UsedForSkinning = false }; @@ -181,6 +186,56 @@ private void ExportModel(Root root, Model model, SceneBuilder scene) } } + private void ExportAnimationTrack(TransformTrack track, NodeBuilder joint, string animName) + { + var keyframes = track.ToKeyframes(); + + var translate = joint.UseTranslation().UseTrackBuilder(animName); + var rotation = joint.UseRotation().UseTrackBuilder(animName); + var scale = joint.UseScale().UseTrackBuilder(animName); + + foreach (var (time, frame) in keyframes.Keyframes) + { + if (frame.HasTranslation) + { + var v = frame.Translation; + translate.SetPoint(time, new Vector3(v.X, v.Y, v.Z), true); + } + + if (frame.HasRotation) + { + var q = frame.Rotation; + rotation.SetPoint(time, new Quaternion(q.X, q.Y, q.Z, q.W), true); + } + + if (frame.HasScaleShear) + { + var m = frame.ScaleShear; + scale.SetPoint(time, new Vector3(m[0,0], m[1,1], m[2,2]), true); + } + } + } + + private void ExportAnimation(Animation anim) + { + if (Skeletons.Count != 1) + { + throw new ParsingException("Exporting .GR2 animations without skeleton data is not supported"); + } + + if (anim.TrackGroups.Count != 1) + { + throw new ParsingException("Exporting .GR2 animations with multiple track groups is not supported"); + } + + var group = anim.TrackGroups[0]; + foreach (var track in group.TransformTracks) + { + var joint = Skeletons.First().Value.Names[track.Name]; + ExportAnimationTrack(track, joint, anim.Name); + } + } + private SceneBuilder ExportScene(Root root) { @@ -201,6 +256,16 @@ private SceneBuilder ExportScene(Root root) ExportModel(root, model, scene); } + if (root.Animations != null && root.Animations.Count > 1) + { + throw new ParsingException("Exporting .GR2 files with multiple animations is not supported"); + } + + foreach (var animation in root.Animations ?? []) + { + ExportAnimation(animation); + } + foreach (var skeleton in Skeletons) { if (!skeleton.Value.UsedForSkinning) From 73083a25fdf5e02b8d263be7363aab17e061cd46 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Apr 2025 14:34:33 +0200 Subject: [PATCH 126/139] Add support for importing animations --- LSLib/Granny/Model/GLTFExporter.cs | 8 ++ LSLib/Granny/Model/GLTFExtensions.cs | 3 + LSLib/Granny/Model/GLTFImporter.cs | 188 ++++++++++++++++----------- 3 files changed, 123 insertions(+), 76 deletions(-) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index d7a15679..4c9e4be4 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -108,6 +108,14 @@ private void ExportSceneExtensions(Root root, GLTFSceneExtensions ext) ext.LSLibMajor = Common.MajorVersion; ext.LSLibMinor = Common.MinorVersion; ext.LSLibPatch = Common.PatchVersion; + + foreach (var group in root.TrackGroups ?? []) + { + if (group.ExtendedData != null) + { + ext.SkeletonResourceID = group.ExtendedData.SkeletonResourceID; + } + } } private void ExportSkeletonExtensions(Skeleton skeleton, GLTFSceneExtensions ext) diff --git a/LSLib/Granny/Model/GLTFExtensions.cs b/LSLib/Granny/Model/GLTFExtensions.cs index a9d9a029..c5ca0d92 100644 --- a/LSLib/Granny/Model/GLTFExtensions.cs +++ b/LSLib/Granny/Model/GLTFExtensions.cs @@ -14,6 +14,7 @@ internal GLTFSceneExtensions() { } public Int32 LSLibPatch = 0; public Dictionary BoneOrder = []; + public string SkeletonResourceID; protected override void SerializeProperties(Utf8JsonWriter writer) { @@ -25,6 +26,7 @@ protected override void SerializeProperties(Utf8JsonWriter writer) SerializeProperty(writer, "LSLibPatch", LSLibPatch); SerializeProperty(writer, "BoneOrder", BoneOrder); + SerializeProperty(writer, "SkeletonResourceID", SkeletonResourceID); } protected override void DeserializeProperty(string jsonPropertyName, ref Utf8JsonReader reader) @@ -37,6 +39,7 @@ protected override void DeserializeProperty(string jsonPropertyName, ref Utf8Jso case "LSLibPatch": LSLibPatch = DeserializePropertyValue(ref reader); break; case "BoneOrder": DeserializePropertyDictionary(ref reader, BoneOrder); break; + case "SkeletonResourceID": SkeletonResourceID = DeserializePropertyValue(ref reader); break; default: base.DeserializeProperty(jsonPropertyName, ref reader); break; } diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index 5e1984c3..cba6b2e5 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -1,15 +1,23 @@ using LSLib.Granny.GR2; using LSLib.LS; -using OpenTK.Mathematics; +using SharpGLTF.Animations; using SharpGLTF.Scenes; using SharpGLTF.Schema2; +using System.Numerics; namespace LSLib.Granny.Model; +class GLTFImportedSkeleton +{ + public Dictionary Joints = []; +} + public class GLTFImporter { public ExporterOptions Options = new(); public List ImportedMeshes; + private HashSet AnimationNames = []; + private Dictionary Skeletons = []; private DivinityModelFlag DetermineSkeletonModelFlagsFromModels(Root root, Skeleton skeleton, DivinityModelFlag meshFlagOverrides) { @@ -99,22 +107,6 @@ private void FindRootBones(List parents, node node, List roo } } - public static technique FindExporterExtraData(extra[] extras) - { - foreach (var extra in extras ?? Enumerable.Empty()) - { - foreach (var technique in extra.technique ?? Enumerable.Empty()) - { - if (technique.profile == "LSTools") - { - return technique; - } - } - } - - return null; - } - private void MakeExtendedData(ContentTransformer content, GLTFMeshExtensions ext, Mesh loaded) { var modelFlagOverrides = Options.ModelType; @@ -204,96 +196,118 @@ private void AddMeshToRoot(Root root, Mesh mesh) }); } - private void LoadColladaLSLibProfileData(animation anim, TrackGroup loaded) + private TrackGroup ImportTrackGroup(Animation anim, GLTFImportedSkeleton skeleton, string name, GLTFSceneExtensions ext) { - var technique = FindExporterExtraData(anim.extra); - if (technique == null || technique.Any == null) return; - - foreach (var setting in technique.Any) + var trackGroup = new TrackGroup { - switch (setting.LocalName) + Name = name, + TransformTracks = [], + InitialPlacement = new Transform(), + AccumulationFlags = 2, + LoopTranslation = [0, 0, 0], + ExtendedData = new BG3TrackGroupExtendedData { - case "SkeletonResourceID": - loaded.ExtendedData = new BG3TrackGroupExtendedData - { - SkeletonResourceID = setting.InnerText.Trim() - }; - break; + SkeletonResourceID = ext?.SkeletonResourceID ?? "" + } + }; - default: - Utils.Warn($"Unrecognized LSLib animation profile attribute: {setting.LocalName}"); - break; + foreach (var (jointName, joint) in skeleton.Joints) + { + var track = ImportTrack(anim, joint, name); + if (track != null) + { + track.Name = jointName; + trackGroup.TransformTracks.Add(track); } } + + // Reorder transform tracks in lexicographic order + // This is needed by Granny; otherwise it'll fail to find animation tracks + trackGroup.TransformTracks.Sort((t1, t2) => String.Compare(t1.Name, t2.Name, StringComparison.Ordinal)); + + return trackGroup; } - public void ImportAnimations(IEnumerable anims, Root root, Skeleton skeleton) + private void ImportAnimations(Root root, Skeleton skeleton, GLTFSceneExtensions ext) { - var trackGroup = new TrackGroup - { - Name = (skeleton != null) ? skeleton.Name : "Dummy_Root", - TransformTracks = [], - InitialPlacement = new Transform(), - AccumulationFlags = 2, - LoopTranslation = [0, 0, 0] - }; + var gltfSkel = Skeletons[skeleton]; var animation = new Animation { - Name = "Default", + Name = skeleton.Name, TimeStep = 0.016667f, // 60 FPS Oversampling = 1, DefaultLoopCount = 1, Flags = 1, Duration = .0f, - TrackGroups = [trackGroup] + TrackGroups = [] }; - foreach (var colladaTrack in anims) - { - ImportAnimation(colladaTrack, animation, trackGroup, skeleton); - } - - if (trackGroup.TransformTracks.Count > 0) + foreach (var animName in AnimationNames) { - // Reorder transform tracks in lexicographic order - // This is needed by Granny; otherwise it'll fail to find animation tracks - trackGroup.TransformTracks.Sort((t1, t2) => String.Compare(t1.Name, t2.Name, StringComparison.Ordinal)); - + var trackGroup = ImportTrackGroup(animation, gltfSkel, animName, ext); + animation.TrackGroups.Add(trackGroup); root.TrackGroups.Add(trackGroup); - root.Animations.Add(animation); } + + root.Animations.Add(animation); } - public void ImportAnimation(animation colladaAnim, Animation animation, TrackGroup trackGroup, Skeleton skeleton) + private TransformTrack ImportTrack(Animation anim, NodeBuilder joint, string animName) { - var childAnims = 0; - foreach (var item in colladaAnim.Items) + if (!joint.HasAnimations) return null; + + var translate = joint.Translation?.Tracks.GetValueOrDefault(animName); + var rotate = joint.Rotation?.Tracks.GetValueOrDefault(animName); + var scale = joint.Scale?.Tracks.GetValueOrDefault(animName); + + if (translate == null && rotate == null && scale == null) return null; + + var keyframes = new KeyframeTrack(); + + if (translate != null) { - if (item is animation) + var curve = (CurveBuilder)translate; + foreach (var key in curve.Keys) { - ImportAnimation(item as animation, animation, trackGroup, skeleton); - childAnims++; + var t = curve.GetPoint(key); + keyframes.AddTranslation(key, new OpenTK.Mathematics.Vector3(t.X, t.Y, t.Z)); } } - var duration = .0f; - if (childAnims < colladaAnim.Items.Length) + if (rotate != null) { - ColladaAnimation importAnim = new(); - if (importAnim.ImportFromCollada(colladaAnim, skeleton)) + var curve = (CurveBuilder)rotate; + foreach (var key in curve.Keys) { - duration = Math.Max(duration, importAnim.Duration); - var track = importAnim.MakeTrack(Options.RemoveTrivialAnimationKeys); - trackGroup.TransformTracks.Add(track); - LoadColladaLSLibProfileData(colladaAnim, trackGroup); + var q = curve.GetPoint(key); + keyframes.AddRotation(key, new OpenTK.Mathematics.Quaternion(q.X, q.Y, q.Z, q.W)); + } + } + + if (scale != null) + { + var curve = (CurveBuilder)scale; + foreach (var key in curve.Keys) + { + var s = curve.GetPoint(key); + var m = new OpenTK.Mathematics.Matrix3( + s.X, 0.0f, 0.0f, + 0.0f, s.Y, 0.0f, + 0.0f, 0.0f, s.Z + ); + keyframes.AddScaleShear(key, m); } } - animation.Duration = Math.Max(animation.Duration, duration); + var track = TransformTrack.FromKeyframes(keyframes); + track.Flags = 0; + anim.Duration = Math.Max(anim.Duration, keyframes.Keyframes.Last().Key); + + return track; } - private int ImportBone(Skeleton skeleton, int parentIndex, NodeBuilder node, GLTFSceneExtensions ext) + private int ImportBone(Skeleton skeleton, int parentIndex, NodeBuilder node, GLTFSceneExtensions ext, GLTFImportedSkeleton imported) { var transform = node.LocalTransform; var tm = transform.Matrix; @@ -304,7 +318,7 @@ private int ImportBone(Skeleton skeleton, int parentIndex, NodeBuilder node, GLT ParentIndex = parentIndex, Name = node.Name, LODError = 0, // TODO - OriginalTransform = new Matrix4( + OriginalTransform = new OpenTK.Mathematics.Matrix4( tm.M11, tm.M12, tm.M13, tm.M14, tm.M21, tm.M22, tm.M23, tm.M24, tm.M31, tm.M32, tm.M33, tm.M34, @@ -322,24 +336,34 @@ private int ImportBone(Skeleton skeleton, int parentIndex, NodeBuilder node, GLT bone.ExportIndex = order - 1; } + if (node.HasAnimations) + { + foreach (var anim in node.AnimationTracksNames) + { + AnimationNames.Add(anim); + } + } + + imported.Joints.Add(node.Name, node); return myIndex; } - private void ImportBoneTree(Skeleton skeleton, int parentIndex, NodeBuilder node, GLTFSceneExtensions ext) + private void ImportBoneTree(Skeleton skeleton, int parentIndex, NodeBuilder node, GLTFSceneExtensions ext, GLTFImportedSkeleton imported) { if (ext != null && !ext.BoneOrder.ContainsKey(node.Name)) return; - var boneIndex = ImportBone(skeleton, parentIndex, node, ext); + var boneIndex = ImportBone(skeleton, parentIndex, node, ext, imported); foreach (var child in node.VisualChildren) { - ImportBoneTree(skeleton, boneIndex, child, ext); + ImportBoneTree(skeleton, boneIndex, child, ext, imported); } } private Skeleton ImportSkeleton(string name, NodeBuilder root, GLTFSceneExtensions ext) { var skeleton = Skeleton.CreateEmpty(name); + var imported = new GLTFImportedSkeleton(); if (ext != null && ext.BoneOrder.Count > 0) { @@ -350,7 +374,7 @@ private Skeleton ImportSkeleton(string name, NodeBuilder root, GLTFSceneExtensio var roots = root.VisualChildren.Where(n => ext.BoneOrder.ContainsKey(n.Name)).ToList(); if (roots.Count == 1) { - ImportBoneTree(skeleton, -1, roots[0], ext); + ImportBoneTree(skeleton, -1, roots[0], ext, imported); return skeleton; } else @@ -360,7 +384,9 @@ private Skeleton ImportSkeleton(string name, NodeBuilder root, GLTFSceneExtensio } } - ImportBoneTree(skeleton, -1, root, ext); + ImportBoneTree(skeleton, -1, root, ext, imported); + + Skeletons[skeleton] = imported; return skeleton; } @@ -470,6 +496,16 @@ public Root Import(string inputPath) AddMeshToRoot(root, mesh); } + if (AnimationNames.Count > 0) + { + if (root.Skeletons.Count != 1) + { + throw new ParsingException("GLTF file must contain exactly one skeleton for animation import"); + } + + ImportAnimations(root, root.Skeletons.FirstOrDefault(), sceneExt); + } + // TODO: make this an option! if (root.Skeletons.Count > 0) root.Skeletons[0].UpdateWorldTransforms(); From 513ceaefcaa75e80a98ef542db832e56fd66d78d Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Apr 2025 15:48:43 +0200 Subject: [PATCH 127/139] Use builtin Half conversion logic --- LSLib/Granny/Model/HalfHelpers.cs | 167 ---------------------- LSLib/Granny/Model/VertexSerialization.cs | 48 +++---- 2 files changed, 24 insertions(+), 191 deletions(-) delete mode 100644 LSLib/Granny/Model/HalfHelpers.cs diff --git a/LSLib/Granny/Model/HalfHelpers.cs b/LSLib/Granny/Model/HalfHelpers.cs deleted file mode 100644 index 81d7ef24..00000000 --- a/LSLib/Granny/Model/HalfHelpers.cs +++ /dev/null @@ -1,167 +0,0 @@ -namespace LSLib.Granny.Model; - -/// -/// Helper class for Half conversions and some low level operations. -/// This class is internally used in the Half class. -/// -/// -/// References: -/// - Fast Half Float Conversions, Jeroen van der Zijp, link: http://www.fox-toolkit.org/ftp/fasthalffloatconversion.pdf -/// -internal static class HalfHelpers -{ - private static uint[] mantissaTable = GenerateMantissaTable(); - private static uint[] exponentTable = GenerateExponentTable(); - private static ushort[] offsetTable = GenerateOffsetTable(); - private static ushort[] baseTable = GenerateBaseTable(); - private static sbyte[] shiftTable = GenerateShiftTable(); - - // Transforms the subnormal representation to a normalized one. - private static uint ConvertMantissa(int i) - { - uint m = (uint)(i << 13); // Zero pad mantissa bits - uint e = 0; // Zero exponent - - // While not normalized - while ((m & 0x00800000) == 0) - { - e -= 0x00800000; // Decrement exponent (1<<23) - m <<= 1; // Shift mantissa - } - m &= unchecked((uint)~0x00800000); // Clear leading 1 bit - e += 0x38800000; // Adjust bias ((127-14)<<23) - return m | e; // Return combined number - } - - private static uint[] GenerateMantissaTable() - { - uint[] mantissaTable = new uint[2048]; - mantissaTable[0] = 0; - for (int i = 1; i < 1024; i++) - { - mantissaTable[i] = ConvertMantissa(i); - } - for (int i = 1024; i < 2048; i++) - { - mantissaTable[i] = (uint)(0x38000000 + ((i - 1024) << 13)); - } - - return mantissaTable; - } - private static uint[] GenerateExponentTable() - { - uint[] exponentTable = new uint[64]; - exponentTable[0] = 0; - for (int i = 1; i < 31; i++) - { - exponentTable[i] = (uint)(i << 23); - } - exponentTable[31] = 0x47800000; - exponentTable[32] = 0x80000000; - for (int i = 33; i < 63; i++) - { - exponentTable[i] = (uint)(0x80000000 + ((i - 32) << 23)); - } - exponentTable[63] = 0xc7800000; - - return exponentTable; - } - private static ushort[] GenerateOffsetTable() - { - ushort[] offsetTable = new ushort[64]; - offsetTable[0] = 0; - for (int i = 1; i < 32; i++) - { - offsetTable[i] = 1024; - } - offsetTable[32] = 0; - for (int i = 33; i < 64; i++) - { - offsetTable[i] = 1024; - } - - return offsetTable; - } - private static ushort[] GenerateBaseTable() - { - ushort[] baseTable = new ushort[512]; - for (int i = 0; i < 256; ++i) - { - sbyte e = (sbyte)(127 - i); - if (e > 24) - { // Very small numbers map to zero - baseTable[i | 0x000] = 0x0000; - baseTable[i | 0x100] = 0x8000; - } - else if (e > 14) - { // Small numbers map to denorms - baseTable[i | 0x000] = (ushort)(0x0400 >> (18 + e)); - baseTable[i | 0x100] = (ushort)((0x0400 >> (18 + e)) | 0x8000); - } - else if (e >= -15) - { // Normal numbers just lose precision - baseTable[i | 0x000] = (ushort)((15 - e) << 10); - baseTable[i | 0x100] = (ushort)(((15 - e) << 10) | 0x8000); - } - else if (e > -128) - { // Large numbers map to Infinity - baseTable[i | 0x000] = 0x7c00; - baseTable[i | 0x100] = 0xfc00; - } - else - { // Infinity and NaN's stay Infinity and NaN's - baseTable[i | 0x000] = 0x7c00; - baseTable[i | 0x100] = 0xfc00; - } - } - - return baseTable; - } - private static sbyte[] GenerateShiftTable() - { - sbyte[] shiftTable = new sbyte[512]; - for (int i = 0; i < 256; ++i) - { - sbyte e = (sbyte)(127 - i); - if (e > 24) - { // Very small numbers map to zero - shiftTable[i | 0x000] = 24; - shiftTable[i | 0x100] = 24; - } - else if (e > 14) - { // Small numbers map to denorms - shiftTable[i | 0x000] = (sbyte)(e - 1); - shiftTable[i | 0x100] = (sbyte)(e - 1); - } - else if (e >= -15) - { // Normal numbers just lose precision - shiftTable[i | 0x000] = 13; - shiftTable[i | 0x100] = 13; - } - else if (e > -128) - { // Large numbers map to Infinity - shiftTable[i | 0x000] = 24; - shiftTable[i | 0x100] = 24; - } - else - { // Infinity and NaN's stay Infinity and NaN's - shiftTable[i | 0x000] = 13; - shiftTable[i | 0x100] = 13; - } - } - - return shiftTable; - } - - public static unsafe float HalfToSingle(ushort half) - { - uint result = mantissaTable[offsetTable[half >> 10] + (half & 0x3ff)] + exponentTable[half >> 10]; - return *((float*)&result); - } - public static unsafe ushort SingleToHalf(float single) - { - uint value = *((uint*)&single); - - return (ushort)(baseTable[(value >> 23) & 0x1ff] + ((value & 0x007fffff) >> shiftTable[value >> 23])); - } -} diff --git a/LSLib/Granny/Model/VertexSerialization.cs b/LSLib/Granny/Model/VertexSerialization.cs index e3ab1e17..97f22e3f 100644 --- a/LSLib/Granny/Model/VertexSerialization.cs +++ b/LSLib/Granny/Model/VertexSerialization.cs @@ -18,8 +18,8 @@ public static Vector2 ReadVector2(GR2Reader reader) public static Vector2 ReadHalfVector2(GR2Reader reader) { Vector2 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.X = (float)reader.Reader.ReadHalf(); + v.Y = (float)reader.Reader.ReadHalf(); return v; } @@ -35,18 +35,18 @@ public static Vector3 ReadVector3(GR2Reader reader) public static Vector3 ReadHalfVector3(GR2Reader reader) { Vector3 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.X = (float)reader.Reader.ReadHalf(); + v.Y = (float)reader.Reader.ReadHalf(); + v.Z = (float)reader.Reader.ReadHalf(); return v; } public static Vector3 ReadHalfVector4As3(GR2Reader reader) { Vector3 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.X = (float)reader.Reader.ReadHalf(); + v.Y = (float)reader.Reader.ReadHalf(); + v.Z = (float)reader.Reader.ReadHalf(); reader.Reader.ReadUInt16(); return v; } @@ -75,10 +75,10 @@ public static Vector4 ReadVector4(GR2Reader reader) public static Vector4 ReadHalfVector4(GR2Reader reader) { Vector4 v; - v.X = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Y = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.Z = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); - v.W = HalfHelpers.HalfToSingle(reader.Reader.ReadUInt16()); + v.X = (float)reader.Reader.ReadHalf(); + v.Y = (float)reader.Reader.ReadHalf(); + v.Z = (float)reader.Reader.ReadHalf(); + v.W = (float)reader.Reader.ReadHalf(); return v; } @@ -205,8 +205,8 @@ public static void WriteVector2(WritableSection section, Vector2 v) public static void WriteHalfVector2(WritableSection section, Vector2 v) { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); + section.Writer.Write((System.Half)v.X); + section.Writer.Write((System.Half)v.Y); } public static void WriteVector3(WritableSection section, Vector3 v) @@ -218,16 +218,16 @@ public static void WriteVector3(WritableSection section, Vector3 v) public static void WriteHalfVector3(WritableSection section, Vector3 v) { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); + section.Writer.Write((System.Half)v.X); + section.Writer.Write((System.Half)v.Y); + section.Writer.Write((System.Half)v.Z); } public static void WriteHalfVector3As4(WritableSection section, Vector3 v) { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); + section.Writer.Write((System.Half)v.X); + section.Writer.Write((System.Half)v.Y); + section.Writer.Write((System.Half)v.Z); section.Writer.Write((ushort)0); } @@ -249,10 +249,10 @@ public static void WriteVector4(WritableSection section, Vector4 v) public static void WriteHalfVector4(WritableSection section, Vector4 v) { - section.Writer.Write(HalfHelpers.SingleToHalf(v.X)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Y)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.Z)); - section.Writer.Write(HalfHelpers.SingleToHalf(v.W)); + section.Writer.Write((System.Half)v.X); + section.Writer.Write((System.Half)v.Y); + section.Writer.Write((System.Half)v.Z); + section.Writer.Write((System.Half)v.W); } public static void WriteNormalByteVector4(WritableSection section, Vector4 v) From 134d55e27c764fc5d4f9167f00df861139dc3a77 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 6 Apr 2025 20:00:53 +0200 Subject: [PATCH 128/139] Simplify vector format conversions --- LSLib/Granny/GR2/Format.cs | 7 +- LSLib/Granny/Model/GLTFExporter.cs | 8 +- LSLib/Granny/Model/GLTFImporter.cs | 4 +- LSLib/Granny/Model/GLTFVertex.cs | 188 ++++++++++++++--------------- 4 files changed, 103 insertions(+), 104 deletions(-) diff --git a/LSLib/Granny/GR2/Format.cs b/LSLib/Granny/GR2/Format.cs index be94a0bf..f9113f2d 100644 --- a/LSLib/Granny/GR2/Format.cs +++ b/LSLib/Granny/GR2/Format.cs @@ -3,6 +3,7 @@ using System.Reflection; using System.IO.Hashing; using SharpGLTF.Transforms; +using LSLib.Granny.Model; namespace LSLib.Granny.GR2; @@ -126,9 +127,9 @@ public static Transform FromMatrix4(Matrix4 mat) public static Transform FromGLTF(AffineTransform t) { var transform = new Transform(); - transform.SetTranslation(new Vector3(t.Translation.X, t.Translation.Y, t.Translation.Z)); - transform.SetRotation(new Quaternion(t.Rotation.X, t.Rotation.Y, t.Rotation.Z, t.Rotation.W)); - transform.SetScale(new Vector3(t.Scale.X, t.Scale.Y, t.Scale.Z)); + transform.SetTranslation(t.Translation.ToOpenTK()); + transform.SetRotation(t.Rotation.ToOpenTK()); + transform.SetScale(t.Scale.ToOpenTK()); return transform; } diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 4c9e4be4..23ca5564 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -175,8 +175,8 @@ private AffineTransform ToGLTFTransform(Transform t) { return new AffineTransform( new Vector3(t.ScaleShear[0,0], t.ScaleShear[1,1], t.ScaleShear[2,2]), - new Quaternion(t.Rotation.X, t.Rotation.Y, t.Rotation.Z, t.Rotation.W), - new Vector3(t.Translation.X, t.Translation.Y, t.Translation.Z) + t.Rotation.ToNumerics(), + t.Translation.ToNumerics() ); } @@ -207,13 +207,13 @@ private void ExportAnimationTrack(TransformTrack track, NodeBuilder joint, strin if (frame.HasTranslation) { var v = frame.Translation; - translate.SetPoint(time, new Vector3(v.X, v.Y, v.Z), true); + translate.SetPoint(time, v.ToNumerics(), true); } if (frame.HasRotation) { var q = frame.Rotation; - rotation.SetPoint(time, new Quaternion(q.X, q.Y, q.Z, q.W), true); + rotation.SetPoint(time, q.ToNumerics(), true); } if (frame.HasScaleShear) diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index cba6b2e5..57ea2253 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -271,7 +271,7 @@ private TransformTrack ImportTrack(Animation anim, NodeBuilder joint, string ani foreach (var key in curve.Keys) { var t = curve.GetPoint(key); - keyframes.AddTranslation(key, new OpenTK.Mathematics.Vector3(t.X, t.Y, t.Z)); + keyframes.AddTranslation(key, t.ToOpenTK()); } } @@ -281,7 +281,7 @@ private TransformTrack ImportTrack(Animation anim, NodeBuilder joint, string ani foreach (var key in curve.Keys) { var q = curve.GetPoint(key); - keyframes.AddRotation(key, new OpenTK.Mathematics.Quaternion(q.X, q.Y, q.Z, q.W)); + keyframes.AddRotation(key, q.ToOpenTK()); } } diff --git a/LSLib/Granny/Model/GLTFVertex.cs b/LSLib/Granny/Model/GLTFVertex.cs index b4f1747a..25244701 100644 --- a/LSLib/Granny/Model/GLTFVertex.cs +++ b/LSLib/Granny/Model/GLTFVertex.cs @@ -5,10 +5,54 @@ using TKVec2 = OpenTK.Mathematics.Vector2; using TKVec3 = OpenTK.Mathematics.Vector3; using TKVec4 = OpenTK.Mathematics.Vector4; +using TKQuat = OpenTK.Mathematics.Quaternion; using System.Reflection; +using OpenTK.Mathematics; namespace LSLib.Granny.Model; +static class GLTFConversionHelpers +{ + public static TKVec2 ToOpenTK(this System.Numerics.Vector2 v) + { + return new TKVec2(v.X, v.Y); + } + + public static TKVec3 ToOpenTK(this System.Numerics.Vector3 v) + { + return new TKVec3(v.X, v.Y, v.Z); + } + + public static TKVec4 ToOpenTK(this System.Numerics.Vector4 v) + { + return new TKVec4(v.X, v.Y, v.Z, v.W); + } + + public static TKQuat ToOpenTK(this System.Numerics.Quaternion v) + { + return new TKQuat(v.X, v.Y, v.Z, v.W); + } + + public static System.Numerics.Vector2 ToNumerics(this TKVec2 v) + { + return new System.Numerics.Vector2(v.X, v.Y); + } + + public static System.Numerics.Vector3 ToNumerics(this TKVec3 v) + { + return new System.Numerics.Vector3(v.X, v.Y, v.Z); + } + + public static System.Numerics.Vector4 ToNumerics(this TKVec4 v) + { + return new System.Numerics.Vector4(v.X, v.Y, v.Z, v.W); + } + + public static System.Numerics.Quaternion ToNumerics(this TKQuat v) + { + return new System.Numerics.Quaternion(v.X, v.Y, v.Z, v.W); + } +} public interface GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert); @@ -77,9 +121,9 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) var w = (TKVec3.Dot(TKVec3.Cross(n, t), b) < 0.0F) ? -1.0F : 1.0F; var v = new VertexPositionNormalTangent( - new Vector3(pos.X, pos.Y, pos.Z), - new Vector3(n.X, n.Y, n.Z), - new Vector4(t.X, t.Y, t.Z, w) + pos.ToNumerics(), + n.ToNumerics(), + new System.Numerics.Vector4(t.X, t.Y, t.Z, w) ); gltfVert.SetGeometry(v); } @@ -87,11 +131,9 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexPositionNormalTangent)gltfVert.GetGeometry(); - var pos = geom.Position; - var n = geom.Normal; var t = geom.Tangent; - gr2Vert.Position = new TKVec3(pos.X, pos.Y, pos.Z); - gr2Vert.Normal = new TKVec3(n.X, n.Y, n.Z); + gr2Vert.Position = geom.Position.ToOpenTK(); + gr2Vert.Normal = geom.Normal.ToOpenTK(); gr2Vert.Tangent = new TKVec3(t.X, t.Y, t.Z); gr2Vert.Binormal = (TKVec3.Cross(gr2Vert.Normal, gr2Vert.Tangent) * t.W).Normalized(); } @@ -114,9 +156,8 @@ public class GLTFVertexMaterialBuilderTexture1 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var uv0 = gr2Vert.TextureCoordinates0; var v = new VertexTexture1( - new Vector2(uv0.X, uv0.Y) + gr2Vert.TextureCoordinates0.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -124,8 +165,7 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexTexture1)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); + gr2Vert.TextureCoordinates0 = geom.TexCoord.ToOpenTK(); } } @@ -133,11 +173,9 @@ public class GLTFVertexMaterialBuilderTexture2 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var uv0 = gr2Vert.TextureCoordinates0; - var uv1 = gr2Vert.TextureCoordinates1; var v = new VertexTexture2( - new Vector2(uv0.X, uv0.Y), - new Vector2(uv1.X, uv1.Y) + gr2Vert.TextureCoordinates0.ToNumerics(), + gr2Vert.TextureCoordinates1.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -145,10 +183,8 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexTexture2)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord0; - var uv1 = geom.TexCoord1; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); + gr2Vert.TextureCoordinates0 = geom.TexCoord0.ToOpenTK(); + gr2Vert.TextureCoordinates1 = geom.TexCoord1.ToOpenTK(); } } @@ -156,13 +192,10 @@ public class GLTFVertexMaterialBuilderTexture3 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var uv0 = gr2Vert.TextureCoordinates0; - var uv1 = gr2Vert.TextureCoordinates1; - var uv2 = gr2Vert.TextureCoordinates2; var v = new VertexTexture3( - new Vector2(uv0.X, uv0.Y), - new Vector2(uv1.X, uv1.Y), - new Vector2(uv2.X, uv2.Y) + gr2Vert.TextureCoordinates0.ToNumerics(), + gr2Vert.TextureCoordinates1.ToNumerics(), + gr2Vert.TextureCoordinates2.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -170,12 +203,9 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexTexture3)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord0; - var uv1 = geom.TexCoord1; - var uv2 = geom.TexCoord2; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); - gr2Vert.TextureCoordinates2 = new TKVec2(uv2.X, uv2.Y); + gr2Vert.TextureCoordinates0 = geom.TexCoord0.ToOpenTK(); + gr2Vert.TextureCoordinates1 = geom.TexCoord1.ToOpenTK(); + gr2Vert.TextureCoordinates2 = geom.TexCoord2.ToOpenTK(); } } @@ -183,15 +213,11 @@ public class GLTFVertexMaterialBuilderTexture4 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var uv0 = gr2Vert.TextureCoordinates0; - var uv1 = gr2Vert.TextureCoordinates1; - var uv2 = gr2Vert.TextureCoordinates2; - var uv3 = gr2Vert.TextureCoordinates3; var v = new VertexTexture4( - new Vector2(uv0.X, uv0.Y), - new Vector2(uv1.X, uv1.Y), - new Vector2(uv2.X, uv2.Y), - new Vector2(uv3.X, uv3.Y) + gr2Vert.TextureCoordinates0.ToNumerics(), + gr2Vert.TextureCoordinates1.ToNumerics(), + gr2Vert.TextureCoordinates2.ToNumerics(), + gr2Vert.TextureCoordinates3.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -199,14 +225,10 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexTexture4)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord0; - var uv1 = geom.TexCoord1; - var uv2 = geom.TexCoord2; - var uv3 = geom.TexCoord3; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); - gr2Vert.TextureCoordinates2 = new TKVec2(uv2.X, uv2.Y); - gr2Vert.TextureCoordinates3 = new TKVec2(uv3.X, uv3.Y); + gr2Vert.TextureCoordinates0 = geom.TexCoord0.ToOpenTK(); + gr2Vert.TextureCoordinates1 = geom.TexCoord1.ToOpenTK(); + gr2Vert.TextureCoordinates2 = geom.TexCoord2.ToOpenTK(); + gr2Vert.TextureCoordinates3 = geom.TexCoord3.ToOpenTK(); } } @@ -214,11 +236,9 @@ public class GLTFVertexMaterialBuilderColor1Texture1 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var c0 = gr2Vert.Color0; - var uv0 = gr2Vert.TextureCoordinates0; var v = new VertexColor1Texture1( - new Vector4(c0.X, c0.Y, c0.Z, c0.W), - new Vector2(uv0.X, uv0.Y) + gr2Vert.Color0.ToNumerics(), + gr2Vert.TextureCoordinates0.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -226,10 +246,8 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexColor1Texture1)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord; - var c0 = geom.Color; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); + gr2Vert.TextureCoordinates0 = geom.TexCoord.ToOpenTK(); + gr2Vert.Color0 = geom.Color.ToOpenTK(); } } @@ -237,13 +255,10 @@ public class GLTFVertexMaterialBuilderColor1Texture2 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var c0 = gr2Vert.Color0; - var uv0 = gr2Vert.TextureCoordinates0; - var uv1 = gr2Vert.TextureCoordinates1; var v = new VertexColor1Texture2( - new Vector4(c0.X, c0.Y, c0.Z, c0.W), - new Vector2(uv0.X, uv0.Y), - new Vector2(uv1.X, uv1.Y) + gr2Vert.Color0.ToNumerics(), + gr2Vert.TextureCoordinates0.ToNumerics(), + gr2Vert.TextureCoordinates1.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -251,12 +266,9 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexColor1Texture2)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord0; - var uv1 = geom.TexCoord1; - var c0 = geom.Color; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); - gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); + gr2Vert.TextureCoordinates0 = geom.TexCoord0.ToOpenTK(); + gr2Vert.TextureCoordinates1 = geom.TexCoord1.ToOpenTK(); + gr2Vert.Color0 = geom.Color.ToOpenTK(); } } @@ -264,13 +276,10 @@ public class GLTFVertexMaterialBuilderColor2Texture1 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var c0 = gr2Vert.Color0; - var c1 = gr2Vert.Color1; - var uv0 = gr2Vert.TextureCoordinates0; var v = new VertexColor2Texture1( - new Vector4(c0.X, c0.Y, c0.Z, c0.W), - new Vector4(c1.X, c1.Y, c1.Z, c1.W), - new Vector2(uv0.X, uv0.Y) + gr2Vert.Color0.ToNumerics(), + gr2Vert.Color1.ToNumerics(), + gr2Vert.TextureCoordinates0.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -278,12 +287,9 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexColor2Texture1)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord; - var c0 = geom.Color0; - var c1 = geom.Color1; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); - gr2Vert.Color1 = new TKVec4(c1.X, c1.Y, c1.Z, c1.W); + gr2Vert.TextureCoordinates0 = geom.TexCoord.ToOpenTK(); + gr2Vert.Color0 = geom.Color0.ToOpenTK(); + gr2Vert.Color1 = geom.Color1.ToOpenTK(); } } @@ -291,15 +297,11 @@ public class GLTFVertexMaterialBuilderColor2Texture2 : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { - var c0 = gr2Vert.Color0; - var c1 = gr2Vert.Color1; - var uv0 = gr2Vert.TextureCoordinates0; - var uv1 = gr2Vert.TextureCoordinates1; var v = new VertexColor2Texture2( - new Vector4(c0.X, c0.Y, c0.Z, c0.W), - new Vector4(c1.X, c1.Y, c1.Z, c1.W), - new Vector2(uv0.X, uv0.Y), - new Vector2(uv1.X, uv1.Y) + gr2Vert.Color0.ToNumerics(), + gr2Vert.Color1.ToNumerics(), + gr2Vert.TextureCoordinates0.ToNumerics(), + gr2Vert.TextureCoordinates1.ToNumerics() ); gltfVert.SetMaterial(v); } @@ -307,14 +309,10 @@ public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) { var geom = (VertexColor2Texture2)gltfVert.GetMaterial(); - var uv0 = geom.TexCoord0; - var uv1 = geom.TexCoord1; - var c0 = geom.Color0; - var c1 = geom.Color1; - gr2Vert.TextureCoordinates0 = new TKVec2(uv0.X, uv0.Y); - gr2Vert.TextureCoordinates1 = new TKVec2(uv1.X, uv1.Y); - gr2Vert.Color0 = new TKVec4(c0.X, c0.Y, c0.Z, c0.W); - gr2Vert.Color1 = new TKVec4(c1.X, c1.Y, c1.Z, c1.W); + gr2Vert.TextureCoordinates0 = geom.TexCoord0.ToOpenTK(); + gr2Vert.TextureCoordinates1 = geom.TexCoord1.ToOpenTK(); + gr2Vert.Color0 = geom.Color0.ToOpenTK(); + gr2Vert.Color1 = geom.Color1.ToOpenTK(); } } From 435b91763e9da2600af712cd612eb3a0a624cf32 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 16 Apr 2025 17:56:53 +0200 Subject: [PATCH 129/139] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 65eb20ea..063fb56d 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,6 @@ Requirements To build the tools you'll need to get the following dependencies: - - Download GPLex 1.2.2 [from here](https://s3.eu-central-1.amazonaws.com/nb-stor/dos/ExportTool/gplex-distro-1_2_2.zip) and extract it to the `External\gplex\` directory - - Download GPPG 1.5.2 [from here](https://s3.eu-central-1.amazonaws.com/nb-stor/dos/ExportTool/gppg-distro-1_5_2.zip) and extract it to the `External\gppg\` directory + - Download GPLex 1.2.2 [from here](https://s3.eu-central-1.amazonaws.com/nb-stor/dos-legacy/ExportTool/gplex-distro-1_2_2.zip) and extract it to the `External\gplex\` directory + - Download GPPG 1.5.2 [from here](https://s3.eu-central-1.amazonaws.com/nb-stor/dos-legacy/ExportTool/gppg-distro-1_5_2.zip) and extract it to the `External\gppg\` directory - Protocol Buffers 3.6.1 compiler [from here](https://github.com/protocolbuffers/protobuf/releases/download/v3.6.1/protoc-3.6.1-win32.zip) and extract it to the `External\protoc\` directory From afd3240d8021c00ca76bc4d7b4d3e1487acb8bcb Mon Sep 17 00:00:00 2001 From: Norbyte Date: Wed, 16 Apr 2025 19:27:41 +0200 Subject: [PATCH 130/139] Add support for Osi format v1.14 --- LSLib/LS/Story/Call.cs | 18 +++-- LSLib/LS/Story/Common.cs | 7 +- LSLib/LS/Story/Rule.cs | 12 +++- LSLib/LS/Story/Story.cs | 2 +- LSLib/LS/Story/Value.cs | 138 ++++++++++++++++++++++++++++++++------- 5 files changed, 144 insertions(+), 33 deletions(-) diff --git a/LSLib/LS/Story/Call.cs b/LSLib/LS/Story/Call.cs index 02fa698d..55086e98 100644 --- a/LSLib/LS/Story/Call.cs +++ b/LSLib/LS/Story/Call.cs @@ -20,11 +20,18 @@ public void Read(OsiReader reader) while (numParams-- > 0) { TypedValue param; - var type = reader.ReadByte(); - if (type == 1) + if (reader.Ver >= OsiVersion.VerValueFlags) + { param = new Variable(); + } else - param = new TypedValue(); + { + var type = reader.ReadByte(); + if (type == 1) + param = new Variable(); + else + param = new TypedValue(); + } param.Read(reader); Parameters.Add(param); } @@ -47,7 +54,10 @@ public void Write(OsiWriter writer) writer.Write((byte)Parameters.Count); foreach (var param in Parameters) { - writer.Write(param is Variable); + if (writer.Ver < OsiVersion.VerValueFlags) + { + writer.Write(param is Variable); + } param.Write(writer); } } diff --git a/LSLib/LS/Story/Common.cs b/LSLib/LS/Story/Common.cs index 7e55d33e..0801a1c6 100644 --- a/LSLib/LS/Story/Common.cs +++ b/LSLib/LS/Story/Common.cs @@ -71,10 +71,15 @@ public static class OsiVersion /// public const uint VerEnums = 0x010d; + /// + /// Changed values to store flags/indices in a more compact way + /// + public const uint VerValueFlags = 0x010e; + /// /// Last supported Osi version /// - public const uint VerLastSupported = VerEnums; + public const uint VerLastSupported = VerValueFlags; } public class OsiReader : BinaryReader diff --git a/LSLib/LS/Story/Rule.cs b/LSLib/LS/Story/Rule.cs index 644d8d3f..86d298e4 100644 --- a/LSLib/LS/Story/Rule.cs +++ b/LSLib/LS/Story/Rule.cs @@ -24,8 +24,11 @@ public override void Read(OsiReader reader) var variables = reader.ReadByte(); while (variables-- > 0) { - var type = reader.ReadByte(); - if (type != 1) throw new InvalidDataException("Illegal value type in rule variable list"); + if (reader.Ver < OsiVersion.VerValueFlags) + { + var type = reader.ReadByte(); + if (type != 1) throw new InvalidDataException("Illegal value type in rule variable list"); + } var variable = new Variable(); variable.Read(reader); if (variable.Adapted) @@ -52,7 +55,10 @@ public override void Write(OsiWriter writer) writer.Write((byte)Variables.Count); foreach (var variable in Variables) { - writer.Write((byte)1); + if (writer.Ver < OsiVersion.VerValueFlags) + { + writer.Write((byte)1); + } variable.Write(writer); } diff --git a/LSLib/LS/Story/Story.cs b/LSLib/LS/Story/Story.cs index aa8212ee..3471c3f1 100644 --- a/LSLib/LS/Story/Story.cs +++ b/LSLib/LS/Story/Story.cs @@ -301,7 +301,7 @@ public Story Read(Stream stream) if (reader.Ver > OsiVersion.VerLastSupported) { var msg = String.Format( - "Osiris version v{0}.{1} unsupported; this tool supports loading up to version 1.12.", + "Osiris version v{0}.{1} unsupported; this tool supports loading up to version 1.14.", reader.MajorVersion, reader.MinorVersion ); throw new InvalidDataException(msg); diff --git a/LSLib/LS/Story/Value.cs b/LSLib/LS/Story/Value.cs index 59707ccd..115781d3 100644 --- a/LSLib/LS/Story/Value.cs +++ b/LSLib/LS/Story/Value.cs @@ -22,12 +22,37 @@ public enum Type_OS1 : uint String = 3 } + // Format of flags after v1.14 + [Flags] + protected enum ValueFlags : byte + { + NoneType = 0, + SimpleValue = 0x01, + TypedValue = 0x02, + Variable = 0x03, + IsValid = 0x08, + OutParam = 0x10, + IsAType = 0x20, + Unused = 0x40, + Adapted = 0x80, + } + public UInt32 TypeId; public Int32 IntValue; public Int64 Int64Value; public Single FloatValue; public String StringValue; + // for TypedVal + public bool IsValid; + public bool OutParam; + public bool IsAType; + + // for Value + public sbyte Index; + public bool Unused; + public bool Adapted; + public override string ToString() { switch ((Type)TypeId) @@ -104,6 +129,20 @@ public Type GetBuiltinTypeId(Story story) public virtual void Read(OsiReader reader) { + if (reader.Ver >= OsiVersion.VerValueFlags) + { + Index = reader.ReadSByte(); + + var flags = (ValueFlags)reader.ReadByte(); + if ((flags & ValueFlags.IsValid) == ValueFlags.IsValid) IsValid = true; + if ((flags & ValueFlags.OutParam) == ValueFlags.OutParam) OutParam = true; + if ((flags & ValueFlags.IsAType) == ValueFlags.IsAType) IsAType = true; + if ((flags & ValueFlags.Unused) == ValueFlags.Unused) Unused = true; + if ((flags & ValueFlags.Adapted) == ValueFlags.Adapted) Adapted = true; + + if ((flags & ValueFlags.IsValid) != ValueFlags.IsValid) return; + } + // possibly isReference? var wtf = reader.ReadByte(); if (wtf == '1') @@ -207,8 +246,29 @@ public virtual void Read(OsiReader reader) } } + protected virtual ValueFlags GetTypeFlags() + { + return ValueFlags.SimpleValue; + } + public virtual void Write(OsiWriter writer) { + if (writer.Ver >= OsiVersion.VerValueFlags) + { + writer.Write(Index); + + var flags = GetTypeFlags(); + + if (IsValid) flags |= ValueFlags.IsValid; + if (OutParam) flags |= ValueFlags.OutParam; + if (IsAType) flags |= ValueFlags.IsAType; + if (Unused) flags |= ValueFlags.Unused; + if (Adapted) flags |= ValueFlags.Adapted; + writer.Write((byte)flags); + + if (!IsValid) return; + } + if (writer.Enums.ContainsKey(TypeId)) { writer.Write((byte)'e'); @@ -360,24 +420,32 @@ public virtual void MakeScript(TextWriter writer, Story story, Tuple tuple, bool public class TypedValue : Value { - public bool IsValid; - public bool OutParam; - public bool IsAType; public override void Read(OsiReader reader) { base.Read(reader); - IsValid = reader.ReadBoolean(); - OutParam = reader.ReadBoolean(); - IsAType = reader.ReadBoolean(); + if (reader.Ver < OsiVersion.VerValueFlags) + { + IsValid = reader.ReadBoolean(); + OutParam = reader.ReadBoolean(); + IsAType = reader.ReadBoolean(); + } } public override void Write(OsiWriter writer) { base.Write(writer); - writer.Write(IsValid); - writer.Write(OutParam); - writer.Write(IsAType); + if (writer.Ver < OsiVersion.VerValueFlags) + { + writer.Write(IsValid); + writer.Write(OutParam); + writer.Write(IsAType); + } + } + + protected override ValueFlags GetTypeFlags() + { + return ValueFlags.TypedValue; } public override void DebugDump(TextWriter writer, Story story) @@ -399,25 +467,33 @@ public override void DebugDump(TextWriter writer, Story story) public class Variable : TypedValue { - public sbyte Index; - public bool Unused; - public bool Adapted; public string VariableName; public override void Read(OsiReader reader) { base.Read(reader); - Index = reader.ReadSByte(); - Unused = reader.ReadBoolean(); - Adapted = reader.ReadBoolean(); + if (reader.Ver < OsiVersion.VerValueFlags) + { + Index = reader.ReadSByte(); + Unused = reader.ReadBoolean(); + Adapted = reader.ReadBoolean(); + } } public override void Write(OsiWriter writer) { base.Write(writer); - writer.Write(Index); - writer.Write(Unused); - writer.Write(Adapted); + if (writer.Ver < OsiVersion.VerValueFlags) + { + writer.Write(Index); + writer.Write(Unused); + writer.Write(Adapted); + } + } + + protected override ValueFlags GetTypeFlags() + { + return ValueFlags.Variable; } public override void DebugDump(TextWriter writer, Story story) @@ -476,12 +552,23 @@ public void Read(OsiReader reader) var count = reader.ReadByte(); while (count-- > 0) { - var index = reader.ReadByte(); - var value = new Value(); - value.Read(reader); + if (reader.Ver >= OsiVersion.VerValueFlags) + { + var value = new Value(); + value.Read(reader); - Physical.Add(value); - Logical.Add(index, value); + Physical.Add(value); + Logical.Add(value.Index, value); + } + else + { + var index = reader.ReadByte(); + var value = new Value(); + value.Read(reader); + + Physical.Add(value); + Logical.Add(index, value); + } } } @@ -490,7 +577,10 @@ public void Write(OsiWriter writer) writer.Write((byte)Logical.Count); foreach (var logical in Logical) { - writer.Write((byte)logical.Key); + if (writer.Ver < OsiVersion.VerValueFlags) + { + writer.Write((byte)logical.Key); + } logical.Value.Write(writer); } } From ff9b1e804ae3f8d49eeeb5c916af6e89695aee0e Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 4 May 2025 14:33:59 +0200 Subject: [PATCH 131/139] Make loading unpacked files optional --- LSLib/LS/VFS.cs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/LSLib/LS/VFS.cs b/LSLib/LS/VFS.cs index 42ce2608..89fa272d 100644 --- a/LSLib/LS/VFS.cs +++ b/LSLib/LS/VFS.cs @@ -74,9 +74,12 @@ public void DetachRoot() RootDir = null; } - public void AttachGameDirectory(string gameDataPath, bool excludeAssets = true) + public void AttachGameDirectory(string gameDataPath, bool excludeAssets = true, bool loadUnpackedFiles = true) { - AttachRoot(gameDataPath); + if (loadUnpackedFiles) + { + AttachRoot(gameDataPath); + } // List of packages we won't ever load // These packages don't contain any mod resources, but have a large From a1ebfdb1f97d461df57dad119564564d1f08a76c Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 4 May 2025 14:34:36 +0200 Subject: [PATCH 132/139] Export model name to glTF --- LSLib/Granny/Model/GLTFExporter.cs | 22 ++++++++++++++++++++-- LSLib/Granny/Model/GLTFExtensions.cs | 3 +++ LSLib/Granny/Model/GLTFImporter.cs | 8 ++++++-- LSLib/LS/Mods/ModResources.cs | 4 ++-- 4 files changed, 31 insertions(+), 6 deletions(-) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 23ca5564..9c743e3d 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -5,7 +5,6 @@ using SharpGLTF.Scenes; using LSLib.LS; using SharpGLTF.Schema2; -using SharpGLTF.Animations; namespace LSLib.Granny.Model; @@ -109,9 +108,28 @@ private void ExportSceneExtensions(Root root, GLTFSceneExtensions ext) ext.LSLibMinor = Common.MinorVersion; ext.LSLibPatch = Common.PatchVersion; + foreach (var model in root.Models ?? []) + { + if (model.Name != "") + { + ext.ModelName = model.Name; + } + } + + if (ext.ModelName == "") + { + foreach (var skeleton in root.Skeletons ?? []) + { + if (skeleton.Name != "") + { + ext.ModelName = skeleton.Name; + } + } + } + foreach (var group in root.TrackGroups ?? []) { - if (group.ExtendedData != null) + if (group.ExtendedData != null && group.ExtendedData.SkeletonResourceID != "") { ext.SkeletonResourceID = group.ExtendedData.SkeletonResourceID; } diff --git a/LSLib/Granny/Model/GLTFExtensions.cs b/LSLib/Granny/Model/GLTFExtensions.cs index c5ca0d92..07085900 100644 --- a/LSLib/Granny/Model/GLTFExtensions.cs +++ b/LSLib/Granny/Model/GLTFExtensions.cs @@ -15,6 +15,7 @@ internal GLTFSceneExtensions() { } public Dictionary BoneOrder = []; public string SkeletonResourceID; + public string ModelName; protected override void SerializeProperties(Utf8JsonWriter writer) { @@ -27,6 +28,7 @@ protected override void SerializeProperties(Utf8JsonWriter writer) SerializeProperty(writer, "BoneOrder", BoneOrder); SerializeProperty(writer, "SkeletonResourceID", SkeletonResourceID); + SerializeProperty(writer, "ModelName", ModelName); } protected override void DeserializeProperty(string jsonPropertyName, ref Utf8JsonReader reader) @@ -40,6 +42,7 @@ protected override void DeserializeProperty(string jsonPropertyName, ref Utf8Jso case "BoneOrder": DeserializePropertyDictionary(ref reader, BoneOrder); break; case "SkeletonResourceID": SkeletonResourceID = DeserializePropertyValue(ref reader); break; + case "ModelName": ModelName = DeserializePropertyValue(ref reader); break; default: base.DeserializeProperty(jsonPropertyName, ref reader); break; } diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index 57ea2253..b51a869a 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -470,9 +470,10 @@ public Root Import(string inputPath) } } + bool hasNameOverride = sceneExt != null && sceneExt.ModelName != ""; var rootModel = new Model { - Name = "Unnamed", // TODO + Name = hasNameOverride ? sceneExt.ModelName : Path.GetFileNameWithoutExtension(inputPath), InitialPlacement = new Transform(), MeshBindings = new List() }; @@ -480,7 +481,10 @@ public Root Import(string inputPath) if (root.Skeletons.Count > 0) { rootModel.Skeleton = root.Skeletons[0]; - rootModel.Name = rootModel.Skeleton.Bones[0].Name; + if (!hasNameOverride) + { + rootModel.Name = rootModel.Skeleton.Bones[0].Name; + } } root.Models.Add(rootModel); diff --git a/LSLib/LS/Mods/ModResources.cs b/LSLib/LS/Mods/ModResources.cs index 6c000edb..59795410 100644 --- a/LSLib/LS/Mods/ModResources.cs +++ b/LSLib/LS/Mods/ModResources.cs @@ -264,10 +264,10 @@ public class GameDataContext public VFS FS; public ModResources Resources; - public GameDataContext(string path, TargetGame game = TargetGame.BG3, bool excludeAssets = true) + public GameDataContext(string path, TargetGame game = TargetGame.BG3, bool excludeAssets = true, bool loadUnpackedFiles = true) { FS = new VFS(); - FS.AttachGameDirectory(path, excludeAssets); + FS.AttachGameDirectory(path, excludeAssets, loadUnpackedFiles); FS.FinishBuild(); Resources = new ModResources(); From 84d0f80b7817ccb1483793cfc657ec3ddc8f1c64 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Sun, 4 May 2025 14:34:46 +0200 Subject: [PATCH 133/139] Compute per-bone OBBs --- LSLib/Granny/Model/Exporter.cs | 31 ++++++++++++++++++++----------- LSLib/Granny/Model/Mesh.cs | 23 +++++++++++++++++++++++ 2 files changed, 43 insertions(+), 11 deletions(-) diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 7887990e..308f9bb6 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -1,6 +1,7 @@ using LSLib.Granny.GR2; using LSLib.LS; using LSLib.LS.Enums; +using OpenTK.Mathematics; namespace LSLib.Granny.Model; @@ -259,7 +260,7 @@ private void GenerateDummySkeleton(Root root) if (model.Skeleton == null) { Utils.Info($"Generating dummy skeleton for model '{model.Name}'"); - var bone = new Bone + var rootBone = new Bone { Name = model.Name, ParentIndex = -1, @@ -271,14 +272,10 @@ private void GenerateDummySkeleton(Root root) Name = model.Name, LODType = 1, IsDummy = true, - Bones = [bone] + Bones = [rootBone] }; root.Skeletons.Add(skeleton); - // TODO: Transform / IWT is not always identity on dummy bones! - skeleton.UpdateWorldTransforms(); - model.Skeleton = skeleton; - foreach (var mesh in model.MeshBindings) { if (mesh.Mesh.BoneBindings != null && mesh.Mesh.BoneBindings.Count > 0) @@ -286,17 +283,29 @@ private void GenerateDummySkeleton(Root root) throw new ParsingException("Failed to generate dummy skeleton: Mesh already has bone bindings."); } + var bone = new Bone + { + Name = mesh.Mesh.Name, + ParentIndex = 0, + Transform = new Transform() + }; + skeleton.Bones.Add(bone); + (Vector3 min, Vector3 max) = mesh.Mesh.CalculateOBB(); + var binding = new BoneBinding { BoneName = bone.Name, - // TODO: Calculate bounding box! - // Use small bounding box values, as it interferes with object placement - // in D:OS 2 (after the Gift Bag 2 update) - OBBMin = [-0.1f, -0.1f, -0.1f], - OBBMax = [0.1f, 0.1f, 0.1f] + // TODO: Use oriented bounding box instead of AABB + // (AABB should be fine here, as there are no transforms on the bones) + OBBMin = [min.X, min.Y, min.Z], + OBBMax = [max.X, max.Y, max.Z] }; mesh.Mesh.BoneBindings = [binding]; } + + // TODO: Transform / IWT is not always identity on dummy bones! + skeleton.UpdateWorldTransforms(); + model.Skeleton = skeleton; } } } diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 323a3cc0..99548beb 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -741,4 +741,27 @@ public bool IsSkinned() return hasWeights && hasIndices; } + + public Tuple CalculateOBB() + { + if (PrimaryVertexData.Vertices.Count == 0) + { + throw new ParsingException("Cannot calculate OBB for mesh with no vertices!"); + } + + var min = new Vector3(9999999.0f, 9999999.0f, 9999999.0f); + var max = new Vector3(-9999999.0f, -9999999.0f, -9999999.0f); + + foreach (var vert in PrimaryVertexData.Vertices) + { + min.X = Math.Min(vert.Position.X, min.X); + max.X = Math.Max(vert.Position.X, max.X); + min.Y = Math.Min(vert.Position.Y, min.Y); + max.Y = Math.Max(vert.Position.Y, max.Y); + min.Z = Math.Min(vert.Position.Z, min.Z); + max.Z = Math.Max(vert.Position.Z, max.Z); + } + + return new Tuple(min, max); + } } From 376d7b6b8f3b316a1ee0070138c1036c1f344ea8 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 6 May 2025 23:31:51 +0200 Subject: [PATCH 134/139] Fix OBB calculation being broken --- LSLib/Granny/Model/VertexHelpers.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LSLib/Granny/Model/VertexHelpers.cs b/LSLib/Granny/Model/VertexHelpers.cs index e88ad698..d82c988b 100644 --- a/LSLib/Granny/Model/VertexHelpers.cs +++ b/LSLib/Granny/Model/VertexHelpers.cs @@ -185,7 +185,7 @@ public static void ComputeNormals(IList vertices, IList indices) } } - struct OBB + class OBB { public Vector3 Min, Max; public int NumVerts; From b9a62fa539c3abe1d930395d8f5abfbdd72fe956 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 6 May 2025 23:32:39 +0200 Subject: [PATCH 135/139] Fix rigid flag being erroneously set on imported meshes --- LSLib/Granny/Model/DivinityMesh.cs | 5 ++++- LSLib/Granny/Model/Mesh.cs | 5 ++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/LSLib/Granny/Model/DivinityMesh.cs b/LSLib/Granny/Model/DivinityMesh.cs index 3c15e006..8d94c1d7 100644 --- a/LSLib/Granny/Model/DivinityMesh.cs +++ b/LSLib/Granny/Model/DivinityMesh.cs @@ -236,6 +236,8 @@ public class DivinityMeshProperties public float[] LodDistance; [Serialization(ArraySize = 1)] public Int32[] IsImpostor; + [Serialization(Kind = SerializationKind.None)] + public bool NewlyAdded = false; public DivinityModelFlag MeshFlags { @@ -281,7 +283,8 @@ public static DivinityMeshExtendedData Make() FormatDescs = null, ExtendedData = null, LodDistance = [3.40282347E+38f], - IsImpostor = [0] + IsImpostor = [0], + NewlyAdded = true }, LSMVersion = CurrentLSMVersion }; diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index 99548beb..c62b5811 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -659,7 +659,10 @@ public void PostLoad() VertexFormat = PrimaryVertexData.Vertices[0].Format; } - if (ExtendedData != null && ExtendedData.UserMeshProperties.MeshFlags == 0) + if (ExtendedData != null + && ExtendedData.UserMeshProperties != null + && ExtendedData.UserMeshProperties.Flags[0] == 0 + && ExtendedData.UserMeshProperties.NewlyAdded) { ExtendedData.UserMeshProperties.MeshFlags = AutodetectMeshFlags(); } From a34756eea74974ac33bad7758782c07d704917d2 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 6 May 2025 23:33:11 +0200 Subject: [PATCH 136/139] Set skeleton LOD type appropriately --- LSLib/Granny/Model/Exporter.cs | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 308f9bb6..019616b0 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -705,6 +705,30 @@ private void Conform(string inPath) } } + + private void UpdateSkeletonLODType(Skeleton skeleton) + { + bool hasSkinnedVerts = false; + + foreach (var mesh in Root.Meshes ?? []) + { + if (mesh.IsSkinned()) + { + hasSkinnedVerts = true; + } + } + + foreach (var track in Root.TrackGroups ?? []) + { + if (track.TransformTracks.Count > 0) + { + hasSkinnedVerts = true; + } + } + + skeleton.LODType = hasSkinnedVerts ? 1 : 0; + } + public void Export() { if (Options.InputPath != null) @@ -776,6 +800,11 @@ public void Export() Root.Flip(Options.FlipMesh, Options.FlipSkeleton); } + foreach (var skeleton in Root.Skeletons ?? []) + { + UpdateSkeletonLODType(skeleton); + } + // This option should be handled after everything else, as it converts Indices // into Indices16 and breaks every other operation that manipulates tri topologies. if (Options.OutputFormat == ExportFormat.GR2 && Options.CompactIndices) From 06f31b40104f75be5781a068de7754ac3caaf8ad Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 6 May 2025 23:34:37 +0200 Subject: [PATCH 137/139] Auto calculate OBB for glTF imports --- LSLib/Granny/Model/Exporter.cs | 2 +- LSLib/Granny/Model/GLTFImporter.cs | 12 ++++++++++++ LSLib/Granny/Model/Skeleton.cs | 1 - 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/LSLib/Granny/Model/Exporter.cs b/LSLib/Granny/Model/Exporter.cs index 019616b0..ec10d817 100644 --- a/LSLib/Granny/Model/Exporter.cs +++ b/LSLib/Granny/Model/Exporter.cs @@ -84,7 +84,7 @@ public class ExporterOptions // Disabled by default, as D:OS doesn't support sparse knot values in anim curves. public bool RemoveTrivialAnimationKeys = false; // Recalculate mesh bone binding OBBs - public bool RecalculateOBBs = false; + public bool RecalculateOBBs = true; // Allow encoding tangents/binormals as QTangents // See: Spherical Skinning with Dual-Quaternions and QTangents, Crytek R&D public bool EnableQTangents = true; diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index b51a869a..21ac412f 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -485,6 +485,18 @@ public Root Import(string inputPath) { rootModel.Name = rootModel.Skeleton.Bones[0].Name; } + + + if (Options.RecalculateOBBs) + { + foreach (var mesh in ImportedMeshes) + { + if (mesh.BoneBindings != null && mesh.BoneBindings.Count > 0) + { + VertexHelpers.UpdateOBBs(rootModel.Skeleton, mesh); + } + } + } } root.Models.Add(rootModel); diff --git a/LSLib/Granny/Model/Skeleton.cs b/LSLib/Granny/Model/Skeleton.cs index 5a9d6696..ad957d56 100644 --- a/LSLib/Granny/Model/Skeleton.cs +++ b/LSLib/Granny/Model/Skeleton.cs @@ -322,7 +322,6 @@ public void PostLoad(Root root) if (CheckIsDummy(root)) { IsDummy = true; - Utils.Info(String.Format("Skeleton '{0}' marked as dummy", this.Name)); } for (var i = 0; i < Bones.Count; i++) From ad9d0b37393fa999ed8471cc526d9ab35f0de136 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 6 May 2025 23:36:37 +0200 Subject: [PATCH 138/139] Various bone remapping fixes --- LSLib/Granny/Model/GLTFExporter.cs | 24 +++- LSLib/Granny/Model/GLTFImporter.cs | 183 +++++++++++++++++++++-------- LSLib/Granny/Model/GLTFMesh.cs | 10 +- LSLib/Granny/Model/GLTFVertex.cs | 56 ++++++--- LSLib/Granny/Model/Mesh.cs | 52 ++++++++ 5 files changed, 249 insertions(+), 76 deletions(-) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 9c743e3d..067df836 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -49,17 +49,29 @@ private void GenerateUniqueMeshIds(List meshes) private void ExportMeshBinding(Model model, Skeleton skeleton, MeshBinding meshBinding, SceneBuilder scene) { var meshId = MeshIds[meshBinding.Mesh]; - var exporter = new GLTFMeshExporter(meshBinding.Mesh, meshId); - var mesh = exporter.Export(); - if (skeleton == null || !meshBinding.Mesh.VertexFormat.HasBoneWeights) + if (skeleton != null && meshBinding.Mesh.VertexFormat.HasBoneWeights) { - scene.AddRigidMesh(mesh, new AffineTransform(Matrix4x4.Identity)); + var joints = meshBinding.Mesh.GetInfluencingJoints(skeleton); + + var exporter = new GLTFMeshExporter(meshBinding.Mesh, meshId, joints.BindRemaps); + var mesh = exporter.Export(); + + Skeletons[skeleton].UsedForSkinning = true; + + List<(NodeBuilder, Matrix4x4)> bindings = []; + foreach (var jointIndex in joints.SkeletonJoints) + { + bindings.Add(Skeletons[skeleton].Joints[jointIndex]); + } + + scene.AddSkinnedMesh(mesh, bindings.ToArray()); } else { - Skeletons[skeleton].UsedForSkinning = true; - scene.AddSkinnedMesh(mesh, Skeletons[skeleton].Joints.ToArray()); + var exporter = new GLTFMeshExporter(meshBinding.Mesh, meshId, null); + var mesh = exporter.Export(); + scene.AddRigidMesh(mesh, new AffineTransform(Matrix4x4.Identity)); } } diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index 21ac412f..e7985bb1 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -1,6 +1,7 @@ using LSLib.Granny.GR2; using LSLib.LS; using SharpGLTF.Animations; +using SharpGLTF.Geometry.VertexTypes; using SharpGLTF.Scenes; using SharpGLTF.Schema2; using System.Numerics; @@ -140,30 +141,93 @@ private static GLTFMeshExtensions FindMeshExtension(ModelRoot root, string name) return null; } - private Mesh ImportMesh(ModelRoot modelRoot, ContentTransformer content, string name) + private static InfluencingJoints GetInfluencingJoints(SkinnedTransformer skin, Skeleton skeleton) { - var collada = new GLTFMesh(); - collada.ImportFromGLTF(content, Options); + var joints = new HashSet(); + var verts = skin.GetGeometryAsset().Primitives.First().Vertices; + foreach (var vert in verts) + { + var s = (VertexJoints4)vert.GetSkinning(); + if (s.Weights[0] > 0) joints.Add((int)s.Joints[0]); + if (s.Weights[1] > 0) joints.Add((int)s.Joints[1]); + if (s.Weights[2] > 0) joints.Add((int)s.Joints[2]); + if (s.Weights[3] > 0) joints.Add((int)s.Joints[3]); + } + + var ij = new InfluencingJoints(); + ij.BindJoints = joints.Order().ToList(); + ij.SkeletonJoints = []; + + var bindJoints = skin.GetJointBindings(); + foreach (var bindIndex in ij.BindJoints) + { + var binding = bindJoints[bindIndex].Joint.Name; + var jointIndex = skeleton.Bones.FindIndex((bone) => bone.Name == binding); + if (jointIndex == -1) + { + throw new ParsingException($"Couldn't find bind bone {binding} in parent skeleton."); + } + + ij.SkeletonJoints.Add(jointIndex); + } + + ij.BindRemaps = InfluencingJoints.BindJointsToRemaps(ij.BindJoints); + return ij; + } + + private (Mesh, GLTFMesh) ImportMesh(ModelRoot modelRoot, Skeleton skeleton, ContentTransformer content, string name) + { + var ext = FindMeshExtension(modelRoot, name); + + InfluencingJoints influencingJoints = null; + if (content is SkinnedTransformer skin) + { + if (skeleton == null) + { + throw new ParsingException($"Trying to export skinned mesh '{name}', but the glTF file contains no skeleton"); + } + + influencingJoints = GetInfluencingJoints(skin, skeleton); + } + else if (ext != null && ext.ParentBone != "") + { + if (skeleton == null) + { + throw new ParsingException($"Mesh '{name}' has a parent bone set ({ext.ParentBone}) but the glTF file contains no skeleton"); + } + + var parentBone = skeleton.Bones.FindIndex((bone) => bone.Name == ext.ParentBone); + if (parentBone == -1) + { + throw new ParsingException($"Mesh '{name}' has a parent bone ({ext.ParentBone}) that does not exist in the skeleton"); + } + + influencingJoints = new(); + influencingJoints.SkeletonJoints = [parentBone]; + } + + var converted = new GLTFMesh(); + converted.ImportFromGLTF(content, influencingJoints, Options); var m = new Mesh { - VertexFormat = collada.InternalVertexType, + VertexFormat = converted.InternalVertexType, Name = name, PrimaryVertexData = new VertexData { - Vertices = collada.Vertices + Vertices = converted.Vertices }, PrimaryTopology = new TriTopology { - Indices = collada.Indices, + Indices = converted.Indices, Groups = [ new TriTopologyGroup { MaterialIndex = 0, TriFirst = 0, - TriCount = collada.TriangleCount + TriCount = converted.TriangleCount } ] }, @@ -174,15 +238,14 @@ private Mesh ImportMesh(ModelRoot modelRoot, ContentTransformer content, string var components = m.VertexFormat.ComponentNames().Select(s => new GrannyString(s)).ToList(); m.PrimaryVertexData.VertexComponentNames = components; - var ext = FindMeshExtension(modelRoot, name); MakeExtendedData(content, ext, m); Utils.Info(String.Format("Imported {0} mesh ({1} tri groups, {2} tris)", (m.VertexFormat.HasBoneWeights ? "skinned" : "rigid"), - m.PrimaryTopology.Groups.Count, - collada.TriangleCount)); + m.PrimaryTopology.Groups.Count, + converted.TriangleCount)); - return m; + return (m, converted); } private void AddMeshToRoot(Root root, Mesh mesh) @@ -390,21 +453,61 @@ private Skeleton ImportSkeleton(string name, NodeBuilder root, GLTFSceneExtensio return skeleton; } + private void ImportSkinBinding(Mesh mesh, InfluencingJoints influences, SkinnedTransformer skin) + { + var joints = skin.GetJointBindings(); + mesh.BoneBindings = []; + foreach (var jointIndex in influences.BindJoints) + { + var (joint, _) = joints[jointIndex]; + var binding = new BoneBinding + { + BoneName = joint.Name, + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; + mesh.BoneBindings.Add(binding); + } + } + + private void ImportGeometry(ModelRoot modelRoot, Skeleton skeleton, InstanceBuilder geometry) + { + var content = geometry.Content; + var name = geometry.Name ?? content.Name ?? content.GetGeometryAsset().Name; + var (mesh, gltfMesh) = ImportMesh(modelRoot, skeleton, content, name); + ImportedMeshes.Add(mesh); + + if (content is SkinnedTransformer skin) + { + ImportSkinBinding(mesh, gltfMesh.InfluencingJoints, skin); + } + } + + private Skeleton TryImportSkin(Root root, InstanceBuilder geometry, GLTFSceneExtensions sceneExt) + { + var skeletonRoot = geometry.Content?.GetArmatureRoot(); + if (skeletonRoot != null && skeletonRoot == ((RigidTransformer)geometry.Content).Transform) + { + var skel = ImportSkeleton(geometry.Name, skeletonRoot, sceneExt); + root.Skeletons.Add(skel); + return skel; + } + else + { + return null; + } + } + public Root Import(string inputPath) { GLTFExtensions.RegisterExtensions(); - ModelRoot modelRoot = ModelRoot.Load(inputPath); + var modelRoot = ModelRoot.Load(inputPath); if (modelRoot.LogicalScenes.Count != 1) { throw new ParsingException($"GLTF file is expected to have a single scene, got {modelRoot.LogicalScenes.Count}"); } - if (modelRoot.LogicalSkins.Count > 1) - { - throw new ParsingException("GLTF files containing multiple skeletons are not supported"); - } - var sceneExt = modelRoot.DefaultScene.GetExtension(); if (sceneExt != null) { @@ -425,48 +528,32 @@ public Root Import(string inputPath) root.FromFileName = inputPath; ImportedMeshes = []; + Skeleton skeleton = null; + // Import skins needed for geometry processing foreach (var geometry in scene.Instances) { - if (geometry.Content?.HasRenderableContent == true) + if (geometry.Content?.HasRenderableContent != true) { - var content = geometry.Content; - var name = geometry.Name ?? content.Name ?? content.GetGeometryAsset().Name; - var mesh = ImportMesh(modelRoot, content, name); - ImportedMeshes.Add(mesh); - - if (content is SkinnedTransformer skin) + var skel = TryImportSkin(root, geometry, sceneExt); + if (skel != null) { - var joints = skin.GetJointBindings(); - mesh.BoneBindings = []; - if (joints.Length > 0) + if (skeleton != null) { - foreach (var (joint, inverseBindMatrix) in joints) - { - var binding = new BoneBinding - { - BoneName = joint.Name, - OBBMin = [-0.1f, -0.1f, -0.1f], - OBBMax = [0.1f, 0.1f, 0.1f] - }; - mesh.BoneBindings.Add(binding); - } + throw new ParsingException("GLTF files containing multiple skins are not supported"); } - if (Options.RecalculateOBBs) - { - // FIXME! VertexHelpers.UpdateOBBs(root.Skeletons.Single(), mesh); - } + skeleton = skel; } } - else + } + + // Import non-skin geometries + foreach (var geometry in scene.Instances) + { + if (geometry.Content?.HasRenderableContent == true) { - var skeletonRoot = geometry.Content?.GetArmatureRoot(); - if (skeletonRoot != null && skeletonRoot == ((RigidTransformer)geometry.Content).Transform) - { - var skel = ImportSkeleton(geometry.Name, skeletonRoot, sceneExt); - root.Skeletons.Add(skel); - } + ImportGeometry(modelRoot, skeleton, geometry); } } diff --git a/LSLib/Granny/Model/GLTFMesh.cs b/LSLib/Granny/Model/GLTFMesh.cs index f966e02e..c0301bf6 100644 --- a/LSLib/Granny/Model/GLTFMesh.cs +++ b/LSLib/Granny/Model/GLTFMesh.cs @@ -14,6 +14,7 @@ public class GLTFMesh private bool HasNormals = false; private bool HasTangents = false; + public InfluencingJoints InfluencingJoints; public int TriangleCount; public List Vertices; public List Indices; @@ -44,9 +45,9 @@ private void ImportTriangles(IPrimitiveReader primitives) } } - private void ImportVertices(IPrimitiveReader primitives) + private void ImportVertices(IPrimitiveReader primitives, int[] jointRemaps) { - BuildHelper = new GLTFVertexBuildHelper("", OutputVertexType); + BuildHelper = new GLTFVertexBuildHelper("", OutputVertexType, jointRemaps); Vertices = new List(primitives.Vertices.Count); foreach (var vert in primitives.Vertices) @@ -168,12 +169,13 @@ private VertexDescriptor FindVertexFormat(Type type) return desc; } - public void ImportFromGLTF(ContentTransformer content, ExporterOptions options) + public void ImportFromGLTF(ContentTransformer content, InfluencingJoints influencingJoints, ExporterOptions options) { var geometry = content.GetGeometryAsset(); var primitives = geometry.Primitives.First(); Options = options; + InfluencingJoints = influencingJoints; var vertexFormat = FindVertexFormat(primitives.VertexType); InputVertexType = vertexFormat; @@ -192,7 +194,7 @@ public void ImportFromGLTF(ContentTransformer content, ExporterOptions options) }; ImportTriangles(primitives); - ImportVertices(primitives); + ImportVertices(primitives, influencingJoints?.BindRemaps); if (!HasNormals) { diff --git a/LSLib/Granny/Model/GLTFVertex.cs b/LSLib/Granny/Model/GLTFVertex.cs index 25244701..ea7ed05c 100644 --- a/LSLib/Granny/Model/GLTFVertex.cs +++ b/LSLib/Granny/Model/GLTFVertex.cs @@ -53,12 +53,19 @@ public static System.Numerics.Quaternion ToNumerics(this TKQuat v) return new System.Numerics.Quaternion(v.X, v.Y, v.Z, v.W); } } + public interface GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert); public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert); } +public interface GLTFVertexSkinBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert, int[] remaps); + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert, int[] remaps); +} + public class GLTFVertexNoneBuilder : GLTFVertexBuilder { public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) @@ -316,29 +323,40 @@ public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) } } -public class GLTFVertexSkinningBuilder : GLTFVertexBuilder +public class GLTFVertexNoneSkinBuilder : GLTFVertexSkinBuilder { - public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert, int[] remaps) + { + } + + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert, int[] remaps) + { + } +} + +public class GLTFVertexSkinningBuilder : GLTFVertexSkinBuilder +{ + public void ToGLTF(IVertexBuilder gltfVert, Vertex gr2Vert, int[] remaps) { var v = new VertexJoints4( - (gr2Vert.BoneIndices.A, gr2Vert.BoneWeights.A / 255.0f), - (gr2Vert.BoneIndices.B, gr2Vert.BoneWeights.B / 255.0f), - (gr2Vert.BoneIndices.C, gr2Vert.BoneWeights.C / 255.0f), - (gr2Vert.BoneIndices.D, gr2Vert.BoneWeights.D / 255.0f) + (remaps[gr2Vert.BoneIndices.A], gr2Vert.BoneWeights.A / 255.0f), + (remaps[gr2Vert.BoneIndices.B], gr2Vert.BoneWeights.B / 255.0f), + (remaps[gr2Vert.BoneIndices.C], gr2Vert.BoneWeights.C / 255.0f), + (remaps[gr2Vert.BoneIndices.D], gr2Vert.BoneWeights.D / 255.0f) ); gltfVert.SetSkinning(v); } - public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert) + public void FromGLTF(IVertexBuilder gltfVert, Vertex gr2Vert, int[] remaps) { var skin = (VertexJoints4)gltfVert.GetSkinning(); Span weights = stackalloc byte[4]; VertexHelpers.CompressBoneWeights([skin.Weights.X, skin.Weights.Y, skin.Weights.Z, skin.Weights.W], weights); - gr2Vert.BoneIndices.A = (byte)skin.Joints[0]; - gr2Vert.BoneIndices.B = (byte)skin.Joints[1]; - gr2Vert.BoneIndices.C = (byte)skin.Joints[2]; - gr2Vert.BoneIndices.D = (byte)skin.Joints[3]; + gr2Vert.BoneIndices.A = (byte)remaps[(byte)skin.Joints[0]]; + gr2Vert.BoneIndices.B = (byte)remaps[(byte)skin.Joints[1]]; + gr2Vert.BoneIndices.C = (byte)remaps[(byte)skin.Joints[2]]; + gr2Vert.BoneIndices.D = (byte)remaps[(byte)skin.Joints[3]]; gr2Vert.BoneWeights.A = weights[0]; gr2Vert.BoneWeights.B = weights[1]; @@ -415,10 +433,11 @@ public class GLTFVertexBuildHelper { private readonly string ExportedId; private readonly VertexDescriptor VertexFormat; + private readonly int[] JointRemaps; private GLTFVertexBuilder GeometryBuilder; private GLTFVertexBuilder MaterialBuilder; - private GLTFVertexBuilder SkinningBuilder; + private GLTFVertexSkinBuilder SkinningBuilder; private Type GeometryDataType; private Type MaterialDataType; @@ -429,10 +448,11 @@ public class GLTFVertexBuildHelper private bool HasNormals; private bool HasTangents; - public GLTFVertexBuildHelper(string exportedId, VertexDescriptor vertexFormat) + public GLTFVertexBuildHelper(string exportedId, VertexDescriptor vertexFormat, int[] jointRemaps) { ExportedId = exportedId; VertexFormat = vertexFormat; + JointRemaps = jointRemaps; HasNormals = VertexFormat.NormalType != NormalType.None; HasTangents = VertexFormat.TangentType != NormalType.None; @@ -529,7 +549,7 @@ private void SelectSkinningBuilder() else { SkinningDataType = typeof(VertexEmpty); - SkinningBuilder = new GLTFVertexNoneBuilder(); + SkinningBuilder = new GLTFVertexNoneSkinBuilder(); } } @@ -553,7 +573,7 @@ public void ToGLTF(IVertexBuilder gltf, Vertex gr) { GeometryBuilder.ToGLTF(gltf, gr); MaterialBuilder.ToGLTF(gltf, gr); - SkinningBuilder.ToGLTF(gltf, gr); + SkinningBuilder.ToGLTF(gltf, gr, JointRemaps); } public Vertex FromGLTF(IVertexBuilder gltf) @@ -561,15 +581,15 @@ public Vertex FromGLTF(IVertexBuilder gltf) var gr = VertexFormat.CreateInstance(); GeometryBuilder.FromGLTF(gltf, gr); MaterialBuilder.FromGLTF(gltf, gr); - SkinningBuilder.FromGLTF(gltf, gr); + SkinningBuilder.FromGLTF(gltf, gr, JointRemaps); return gr; } } -public class GLTFMeshExporter(Mesh mesh, string exportedId) +public class GLTFMeshExporter(Mesh mesh, string exportedId, int[] jointRemaps) { private readonly Mesh ExportedMesh = mesh; - private readonly GLTFVertexBuildHelper BuildHelper = new(exportedId, mesh.VertexFormat); + private readonly GLTFVertexBuildHelper BuildHelper = new(exportedId, mesh.VertexFormat, jointRemaps); public IMeshBuilder Export() { diff --git a/LSLib/Granny/Model/Mesh.cs b/LSLib/Granny/Model/Mesh.cs index c62b5811..ddbb4e45 100644 --- a/LSLib/Granny/Model/Mesh.cs +++ b/LSLib/Granny/Model/Mesh.cs @@ -629,6 +629,27 @@ public class MorphTarget public Int32 DataIsDeltas; } +public class InfluencingJoints +{ + public List BindJoints; + public List SkeletonJoints; + public int[] BindRemaps; + + public static int[] BindJointsToRemaps(List joints) + { + var maxJoint = joints.Max(); + var remaps = new int[maxJoint + 1]; + var i = 0; + + foreach (var joint in joints) + { + remaps[joint] = i++; + } + + return remaps; + } +} + public class Mesh { public string Name; @@ -745,6 +766,37 @@ public bool IsSkinned() return hasWeights && hasIndices; } + public InfluencingJoints GetInfluencingJoints(Skeleton skeleton) + { + HashSet joints = []; + + foreach (var vert in PrimaryVertexData.Vertices) + { + if (vert.BoneWeights.A > 0) joints.Add(vert.BoneIndices.A); + if (vert.BoneWeights.B > 0) joints.Add(vert.BoneIndices.B); + if (vert.BoneWeights.C > 0) joints.Add(vert.BoneIndices.C); + if (vert.BoneWeights.D > 0) joints.Add(vert.BoneIndices.D); + } + + var ij = new InfluencingJoints(); + ij.BindJoints = joints.Order().ToList(); + ij.SkeletonJoints = []; + foreach (var bindIndex in ij.BindJoints) + { + var binding = BoneBindings[bindIndex].BoneName; + var jointIndex = skeleton.Bones.FindIndex((bone) => bone.Name == binding); + if (jointIndex == -1) + { + throw new ParsingException($"Couldn't find bind bone {binding} in parent skeleton."); + } + + ij.SkeletonJoints.Add(jointIndex); + } + + ij.BindRemaps = InfluencingJoints.BindJointsToRemaps(ij.BindJoints); + return ij; + } + public Tuple CalculateOBB() { if (PrimaryVertexData.Vertices.Count == 0) From 9b367d72a6847a3c4f3676fd0ad09fea782e28b8 Mon Sep 17 00:00:00 2001 From: Norbyte Date: Tue, 6 May 2025 23:37:08 +0200 Subject: [PATCH 139/139] Support for reexporting rigid bone attachments --- LSLib/Granny/Model/GLTFExporter.cs | 4 ++++ LSLib/Granny/Model/GLTFExtensions.cs | 3 +++ LSLib/Granny/Model/GLTFImporter.cs | 20 ++++++++++++++++++++ 3 files changed, 27 insertions(+) diff --git a/LSLib/Granny/Model/GLTFExporter.cs b/LSLib/Granny/Model/GLTFExporter.cs index 067df836..8e57fe97 100644 --- a/LSLib/Granny/Model/GLTFExporter.cs +++ b/LSLib/Granny/Model/GLTFExporter.cs @@ -175,6 +175,10 @@ private void ExportMeshExtensions(Mesh mesh, GLTFMeshExtensions ext) ext.ExportOrder = mesh.ExportOrder; ext.LOD = (user.Lod[0] >= 0) ? user.Lod[0] : 0; ext.LODDistance = (user.LodDistance[0] < 100000000.0f) ? user.LodDistance[0] : 0.0f; + if (!mesh.IsSkinned() && mesh.BoneBindings != null && mesh.BoneBindings.Count == 1) + { + ext.ParentBone = mesh.BoneBindings[0].BoneName; + } } private void ExportExtensions(Root root, ModelRoot modelRoot) diff --git a/LSLib/Granny/Model/GLTFExtensions.cs b/LSLib/Granny/Model/GLTFExtensions.cs index 07085900..4a7f99ce 100644 --- a/LSLib/Granny/Model/GLTFExtensions.cs +++ b/LSLib/Granny/Model/GLTFExtensions.cs @@ -67,6 +67,7 @@ internal GLTFMeshExtensions() { } public Int32 ExportOrder = 0; public Int32 LOD = 0; public Single LODDistance = 0; + public String ParentBone = ""; protected override void SerializeProperties(Utf8JsonWriter writer) { @@ -85,6 +86,7 @@ protected override void SerializeProperties(Utf8JsonWriter writer) SerializeProperty(writer, "ExportOrder", ExportOrder); SerializeProperty(writer, "LOD", LOD); SerializeProperty(writer, "LODDistance", LODDistance); + SerializeProperty(writer, "ParentBone", ParentBone); } protected override void DeserializeProperty(string jsonPropertyName, ref Utf8JsonReader reader) @@ -105,6 +107,7 @@ protected override void DeserializeProperty(string jsonPropertyName, ref Utf8Jso case "ExportOrder": ExportOrder = DeserializePropertyValue(ref reader); break; case "LOD": LOD = DeserializePropertyValue(ref reader); break; case "LODDistance": LODDistance = DeserializePropertyValue(ref reader); break; + case "ParentBone": ParentBone = DeserializePropertyValue(ref reader); break; default: base.DeserializeProperty(jsonPropertyName, ref reader); break; } } diff --git a/LSLib/Granny/Model/GLTFImporter.cs b/LSLib/Granny/Model/GLTFImporter.cs index e7985bb1..edef8e4e 100644 --- a/LSLib/Granny/Model/GLTFImporter.cs +++ b/LSLib/Granny/Model/GLTFImporter.cs @@ -470,6 +470,22 @@ private void ImportSkinBinding(Mesh mesh, InfluencingJoints influences, SkinnedT } } + private void ImportRigidSkinBinding(Mesh mesh, InfluencingJoints influences, Skeleton skeleton) + { + mesh.BoneBindings = []; + foreach (var jointIndex in influences.SkeletonJoints) + { + var bone = skeleton.Bones[jointIndex]; + var binding = new BoneBinding + { + BoneName = bone.Name, + OBBMin = [-0.1f, -0.1f, -0.1f], + OBBMax = [0.1f, 0.1f, 0.1f] + }; + mesh.BoneBindings.Add(binding); + } + } + private void ImportGeometry(ModelRoot modelRoot, Skeleton skeleton, InstanceBuilder geometry) { var content = geometry.Content; @@ -481,6 +497,10 @@ private void ImportGeometry(ModelRoot modelRoot, Skeleton skeleton, InstanceBuil { ImportSkinBinding(mesh, gltfMesh.InfluencingJoints, skin); } + else if (gltfMesh.InfluencingJoints != null) + { + ImportRigidSkinBinding(mesh, gltfMesh.InfluencingJoints, skeleton); + } } private Skeleton TryImportSkin(Root root, InstanceBuilder geometry, GLTFSceneExtensions sceneExt)