diff --git a/license.txt b/LICENSE.txt similarity index 100% rename from license.txt rename to LICENSE.txt diff --git a/docs/bfast.md b/docs/bfast.md index caaa6cec..318a6450 100644 --- a/docs/bfast.md +++ b/docs/bfast.md @@ -31,7 +31,6 @@ containers for other data. * Easy to implement efficient and conformant encoders and decoders in different languages * Fast random access to any point in the data format with a minimum of disk accesses * Format and endianess easily identified through a magic number at the front of the file -* Data arrays are 64 byte aligned to facilitate casting to SIMD data types (eg. AVX-512) * Array offsets are encoded using 64-bit integers to supports large data sets * Positions of data buffers are encoded in the beginning of the file * Quick and easy to validate that a block is a valid BFAST encoding of data @@ -68,7 +67,7 @@ The file format consists of three sections: * Header - Fixed size descriptor (32 bytes) describing the file contents * Ranges - An array of offset pairs indicating the begin and end of each buffer (relative to file begin) -* Data - 64-byte aligned data buffers +* Data - Data buffers ## Header Section @@ -103,7 +102,7 @@ offsets relative to the beginning of the file. ## Data Section -The data section starts at the first 64 byte aligned address immediately following the last `Range` value. +The data section starts just after the last `Range` value. This value is stored for validation purposes in the header as `DataStart`. ### Names Buffer @@ -113,12 +112,3 @@ strings separated by null characters. Names may be zero-length and are not guara A name may contain any Utf-8 encoded character except the null character. There must be N-1 names where N is the number of ranges (i.e. the `NumArrays` value in header). - -# Implementations - -The official reference implementation of BFAST is written in C# and targets .NET Standard 2.0. The C# -test suite uses NUnit and targets .NET Core 2.1. At VIM AEC we are using BFAST in production code that -targets Unity 2019.1 and .NET Framework 4.7.1. - -There is currently a C++ encoder and a JavaScript decoder implementation under development, but they -are not tested and supported yet. \ No newline at end of file diff --git a/src/cs/.editorconfig b/src/cs/.editorconfig new file mode 100644 index 00000000..7f629237 --- /dev/null +++ b/src/cs/.editorconfig @@ -0,0 +1,215 @@ +# Remove the line below if you want to inherit .editorconfig settings from higher directories +root = true + +# C# files +[*.cs] + +# Default severity for analyzer diagnostics with category 'Style' (escalated to build warnings) +# This causes the build to fail for the cases below that are suffixed with :error +dotnet_analyzer_diagnostic.category-Style.severity = default + +#### Core EditorConfig Options #### + +# Indentation and spacing +indent_size = 4 +indent_style = space +tab_width = 4 + +# New line preferences +end_of_line = crlf +insert_final_newline = true + +#### .NET Coding Conventions #### + +# Organize usings +dotnet_separate_import_directive_groups = false +dotnet_sort_system_directives_first = false +file_header_template = unset + +# this. and Me. preferences +dotnet_style_qualification_for_event = false:error +dotnet_style_qualification_for_field = false:error +dotnet_style_qualification_for_method = false:error +dotnet_style_qualification_for_property = false:error + +# Language keywords vs BCL types preferences +dotnet_style_predefined_type_for_locals_parameters_members = true +dotnet_style_predefined_type_for_member_access = true + +# Parentheses preferences +dotnet_style_parentheses_in_arithmetic_binary_operators = always_for_clarity +dotnet_style_parentheses_in_other_binary_operators = always_for_clarity +dotnet_style_parentheses_in_other_operators = never_if_unnecessary +dotnet_style_parentheses_in_relational_binary_operators = always_for_clarity + +# Modifier preferences +dotnet_style_require_accessibility_modifiers = for_non_interface_members + +# Expression-level preferences +dotnet_style_coalesce_expression = true +dotnet_style_collection_initializer = true +dotnet_style_explicit_tuple_names = true +dotnet_style_null_propagation = true +dotnet_style_object_initializer = true +dotnet_style_operator_placement_when_wrapping = beginning_of_line +dotnet_style_prefer_auto_properties = true:error +dotnet_style_prefer_compound_assignment = true +dotnet_style_prefer_conditional_expression_over_assignment = true +dotnet_style_prefer_conditional_expression_over_return = true +dotnet_style_prefer_inferred_anonymous_type_member_names = true +dotnet_style_prefer_inferred_tuple_names = true +dotnet_style_prefer_is_null_check_over_reference_equality_method = true +dotnet_style_prefer_simplified_boolean_expressions = true +dotnet_style_prefer_simplified_interpolation = true + +# Field preferences +dotnet_style_readonly_field = true + +# Parameter preferences +dotnet_code_quality_unused_parameters = all + +# Suppression preferences +dotnet_remove_unnecessary_suppression_exclusions = none + +#### C# Coding Conventions #### + +# var preferences +csharp_style_var_elsewhere = true:error +csharp_style_var_for_built_in_types = true:error +csharp_style_var_when_type_is_apparent = true:error + +# Expression-bodied members +csharp_style_expression_bodied_accessors = true:silent +csharp_style_expression_bodied_constructors = true:silent +csharp_style_expression_bodied_indexers = true:silent +csharp_style_expression_bodied_lambdas = true:silent +csharp_style_expression_bodied_local_functions = true:silent +csharp_style_expression_bodied_methods = true:silent +csharp_style_expression_bodied_operators = true:silent +csharp_style_expression_bodied_properties = true:silent + +# Pattern matching preferences +csharp_style_pattern_matching_over_as_with_null_check = true +csharp_style_pattern_matching_over_is_with_cast_check = true +csharp_style_prefer_not_pattern = true +csharp_style_prefer_pattern_matching = true +csharp_style_prefer_switch_expression = true + +# Null-checking preferences +csharp_style_conditional_delegate_call = true + +# Modifier preferences +csharp_prefer_static_local_function = true +csharp_preferred_modifier_order = public,private,protected,internal,static,extern,new,virtual,abstract,sealed,override,readonly,unsafe,volatile,async + +# Code-block preferences +csharp_prefer_braces = when_multiline:error +csharp_prefer_simple_using_statement = true + +# Expression-level preferences +csharp_prefer_simple_default_expression = true +csharp_style_deconstructed_variable_declaration = true +csharp_style_implicit_object_creation_when_type_is_apparent = true +csharp_style_inlined_variable_declaration = true +csharp_style_pattern_local_over_anonymous_function = true +csharp_style_prefer_index_operator = true +csharp_style_prefer_range_operator = true +csharp_style_throw_expression = true +csharp_style_unused_value_assignment_preference = discard_variable +csharp_style_unused_value_expression_statement_preference = discard_variable + +# 'using' directive preferences +csharp_using_directive_placement = outside_namespace + +#### C# Formatting Rules #### + +# New line preferences +csharp_new_line_before_catch = true +csharp_new_line_before_else = true +csharp_new_line_before_finally = true +csharp_new_line_before_members_in_anonymous_types = true +csharp_new_line_before_members_in_object_initializers = true +csharp_new_line_before_open_brace = all +csharp_new_line_between_query_expression_clauses = true + +# Indentation preferences +csharp_indent_block_contents = true +csharp_indent_braces = false +csharp_indent_case_contents = true +csharp_indent_case_contents_when_block = true +csharp_indent_labels = one_less_than_current +csharp_indent_switch_labels = true + +# Space preferences +csharp_space_after_cast = false +csharp_space_after_colon_in_inheritance_clause = true +csharp_space_after_comma = true +csharp_space_after_dot = false +csharp_space_after_keywords_in_control_flow_statements = true +csharp_space_after_semicolon_in_for_statement = true +csharp_space_around_binary_operators = before_and_after +csharp_space_around_declaration_statements = false +csharp_space_before_colon_in_inheritance_clause = true +csharp_space_before_comma = false +csharp_space_before_dot = false +csharp_space_before_open_square_brackets = false +csharp_space_before_semicolon_in_for_statement = false +csharp_space_between_empty_square_brackets = false +csharp_space_between_method_call_empty_parameter_list_parentheses = false +csharp_space_between_method_call_name_and_opening_parenthesis = false +csharp_space_between_method_call_parameter_list_parentheses = false +csharp_space_between_method_declaration_empty_parameter_list_parentheses = false +csharp_space_between_method_declaration_name_and_open_parenthesis = false +csharp_space_between_method_declaration_parameter_list_parentheses = false +csharp_space_between_parentheses = false +csharp_space_between_square_brackets = false + +# Wrapping preferences +csharp_preserve_single_line_blocks = true +csharp_preserve_single_line_statements = true + +#### Naming styles #### + +# Naming rules + +dotnet_naming_rule.interface_should_be_begins_with_i.severity = suggestion +dotnet_naming_rule.interface_should_be_begins_with_i.symbols = interface +dotnet_naming_rule.interface_should_be_begins_with_i.style = begins_with_i + +dotnet_naming_rule.types_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.types_should_be_pascal_case.symbols = types +dotnet_naming_rule.types_should_be_pascal_case.style = pascal_case + +dotnet_naming_rule.non_field_members_should_be_pascal_case.severity = suggestion +dotnet_naming_rule.non_field_members_should_be_pascal_case.symbols = non_field_members +dotnet_naming_rule.non_field_members_should_be_pascal_case.style = pascal_case + +# Symbol specifications + +dotnet_naming_symbols.interface.applicable_kinds = interface +dotnet_naming_symbols.interface.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.interface.required_modifiers = + +dotnet_naming_symbols.types.applicable_kinds = class, struct, interface, enum +dotnet_naming_symbols.types.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.types.required_modifiers = + +dotnet_naming_symbols.non_field_members.applicable_kinds = property, event, method +dotnet_naming_symbols.non_field_members.applicable_accessibilities = public, internal, private, protected, protected_internal, private_protected +dotnet_naming_symbols.non_field_members.required_modifiers = + +# Naming styles + +dotnet_naming_style.pascal_case.required_prefix = +dotnet_naming_style.pascal_case.required_suffix = +dotnet_naming_style.pascal_case.word_separator = +dotnet_naming_style.pascal_case.capitalization = pascal_case + +dotnet_naming_style.begins_with_i.required_prefix = I +dotnet_naming_style.begins_with_i.required_suffix = +dotnet_naming_style.begins_with_i.word_separator = +dotnet_naming_style.begins_with_i.capitalization = pascal_case + +# Web development files use 2 spaces +[*.{js,jsx,ts,tsx,py,css,json}] +indent_size = 2 diff --git a/src/cs/Vim.G3dNext.Tests/G3dNextTestUtils.cs b/src/cs/Vim.G3dNext.Tests/G3dNextTestUtils.cs new file mode 100644 index 00000000..118167fb --- /dev/null +++ b/src/cs/Vim.G3dNext.Tests/G3dNextTestUtils.cs @@ -0,0 +1,33 @@ +using Vim.Math3d; + +namespace Vim.G3dNext.Tests +{ + public static class G3dNextTestUtils + { + public static G3dVim CreateTestG3d() + { + var g3d = new G3dVim( + instanceTransforms: new Matrix4x4[] { Matrix4x4.Identity }, + instanceMeshes: new int[] { 0 }, + instanceParents: new int[] { -1 }, + instanceFlags: null, + meshSubmeshOffsets: new int[] { 0 }, + submeshIndexOffsets: new int[] { 0, 3, 6 }, + submeshMaterials: new int[] { 0 }, + indices: new int[] { 0, 1, 2, 0, 3, 2, 1, 3, 2 }, + positions: new Vector3[] { Vector3.Zero, Vector3.UnitX, Vector3.UnitY, Vector3.UnitZ }, + materialColors: new Vector4[] { new Vector4(0.25f, 0.5f, 0.75f, 1) }, + materialGlossiness: new float[] { 0.95f }, + materialSmoothness: new float[] { 0.5f }, + shapeColors: null, + shapeVertexOffsets: null, + shapeVertices: null, + shapeWidths: null + ); + g3d.Validate(); + + + return g3d; + } + } +} diff --git a/src/cs/Vim.G3dNext.Tests/Vim.G3dNext.Tests.csproj b/src/cs/Vim.G3dNext.Tests/Vim.G3dNext.Tests.csproj new file mode 100644 index 00000000..032e77f2 --- /dev/null +++ b/src/cs/Vim.G3dNext.Tests/Vim.G3dNext.Tests.csproj @@ -0,0 +1,30 @@ + + + + net6.0 + enable + enable + + false + true + + + + + + + + + + + + + + + + True + + + + + diff --git a/src/cs/Vim.G3dNext.Tests/VimG3dNextTests.cs b/src/cs/Vim.G3dNext.Tests/VimG3dNextTests.cs new file mode 100644 index 00000000..2d357abd --- /dev/null +++ b/src/cs/Vim.G3dNext.Tests/VimG3dNextTests.cs @@ -0,0 +1,71 @@ +using NUnit.Framework; +using NUnit.Framework.Internal; +using Vim.BFastLib; +using Vim.Util.Tests; + +namespace Vim.G3dNext.Tests +{ + [TestFixture] + public static class VimG3dNextTests + { + [Test] + public static void Can_Read_G3d_From_Vim() + { + var g3d = G3dVim.FromVim(TestUtils.ResidencePath); + Assert.IsNotNull(g3d); + } + + [Test] + public static void Can_Ignore_Extra_Attributes() + { + // Both G3dVim and G3dMaterial share 3 attributes + // G3dVim contains many more attributes + // We create a g3dMaterial from the bytes of a g3dVim + // Shows that extra attributes are ignored as they should. + + var g3d = G3dNextTestUtils.CreateTestG3d(); + var g3dMats = new G3dMaterials(g3d.ToBFast()); + + Assert.IsNotNull(g3dMats); + Assert.AreEqual(g3d.MaterialColors, g3dMats.MaterialColors); + Assert.AreEqual(g3d.MaterialGlossiness, g3dMats.MaterialGlossiness); + Assert.AreEqual(g3d.MaterialSmoothness, g3dMats.MaterialSmoothness); + } + + [Test] + public static void Can_Write_And_Read() + { + var expected = G3dNextTestUtils.CreateTestG3d(); + var g3d = new G3dVim(expected.ToBFast()); + Assert.IsTrue(g3d.Equals(expected)); + } + + [Test] + public static void Can_Merge_two_g3d() + { + var g3d = G3dNextTestUtils.CreateTestG3d(); + var merged = g3d.Merge(g3d); + + var expected = new G3dVim( + instanceTransforms: g3d.InstanceTransforms.Concat(g3d.InstanceTransforms).ToArray(), + instanceMeshes: g3d.InstanceMeshes.Concat(g3d.InstanceMeshes.Select(i => i + g3d.GetMeshCount())).ToArray(), + instanceParents: g3d.InstanceParents.Concat(g3d.InstanceParents).ToArray(), + instanceFlags: null, + meshSubmeshOffsets: g3d.MeshSubmeshOffsets.Concat(g3d.MeshSubmeshOffsets.Select(i => g3d.GetSubmeshCount())).ToArray(), + submeshIndexOffsets: g3d.SubmeshIndexOffsets.Concat(g3d.SubmeshIndexOffsets.Select(i => i + g3d.GetIndexCount())).ToArray(), + submeshMaterials: g3d.SubmeshMaterials.Concat(g3d.SubmeshMaterials.Select(i => i + g3d.GetMaterialCount())).ToArray(), + indices: g3d.Indices.Concat(g3d.Indices.Select(i => i + g3d.Positions.Length)).ToArray(), + positions: g3d.Positions.Concat(g3d.Positions).ToArray(), + materialColors: g3d.MaterialColors.Concat(g3d.MaterialColors).ToArray(), + materialGlossiness: g3d.MaterialGlossiness.Concat(g3d.MaterialGlossiness).ToArray(), + materialSmoothness: g3d.MaterialSmoothness.Concat(g3d.MaterialSmoothness).ToArray(), + shapeColors: null, + shapeWidths: null, + shapeVertices: null, + shapeVertexOffsets: null + ); + Assert.IsTrue(merged.Equals(expected)); + } + } +} + diff --git a/src/cs/bfast/Vim.BFast.Tests/BFastTestProgram.cs b/src/cs/bfast/Vim.BFast.Tests/BFastTestProgram.cs deleted file mode 100644 index f4b3cab8..00000000 --- a/src/cs/bfast/Vim.BFast.Tests/BFastTestProgram.cs +++ /dev/null @@ -1,198 +0,0 @@ -/* - BFAST - Binary Format for Array Streaming and Transmission - Copyright 2019, VIMaec LLC - Copyright 2018, Ara 3D, Inc. - Usage licensed under terms of MIT License - https://github.com/vimaec/bfast -*/ - -using NUnit.Framework; -using System; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Numerics; - -namespace Vim.BFast.Tests -{ - public static class BFastTests - { - public static int Mb = 1000 * 1000; - public static int Gb = 1000 * Mb; - - static byte[] ByteArray(int numBytes) => - Enumerable.Range(0, numBytes).Select(i => (byte)i).ToArray(); - - static readonly byte[] Array1MB - = ByteArray(Mb); - - static readonly double[] Array1GB - = Enumerable.Range(0, Gb / 8).Select(i => (double)i).ToArray(); - - public static (string, byte[])[] ZeroBuffers - = Enumerable.Empty<(string, byte[])>().ToArray(); - - public static (string, byte[])[] Ten1MBBuffers - = Enumerable.Range(0, 10).Select(i => (i.ToString(), Array1MB)).ToArray(); - - public static (string, double[])[] One1GBBuffer - => Enumerable.Range(0, 1).Select(i => (i.ToString(), Array1GB)).ToArray(); - - public static void TestBFastBytes(byte[] bytes) - { - Console.WriteLine($"Size of buffer = {bytes.Length}"); - Console.WriteLine($"First 8 bytes = {string.Join(", ", bytes.Take(8))}"); - } - - public class DisposableTimer : IDisposable - { - Stopwatch Stopwatch = Stopwatch.StartNew(); - - public void Dispose() - => Console.WriteLine($"Elapsed = {Stopwatch.ElapsedMilliseconds / 1000}s {Stopwatch.ElapsedMilliseconds % 1000}ms"); - } - - public static DisposableTimer CreateTimer(string message = null) - { - Console.WriteLine($"Starting timer {message ?? string.Empty}"); - return new DisposableTimer(); - } - - [Test] - public static void TestStringPacking() - { - var noStrings = new string[0]; - var oneStrings = new[] { "" }; - var twoStrings = new[] { "", "ab" }; - var threeStrings = new[] { "a", "b", "" }; - var noPacked = BFast.PackStrings(noStrings); - var onePacked = BFast.PackStrings(oneStrings); - var twoPacked = BFast.PackStrings(twoStrings); - var threePacked = BFast.PackStrings(threeStrings); - Assert.AreEqual(0, noPacked.Length); - Assert.AreEqual(1, onePacked.Length); - Assert.AreEqual(4, twoPacked.Length); - Assert.AreEqual(5, threePacked.Length); - Assert.AreEqual(noStrings, BFast.UnpackStrings(noPacked)); - Assert.AreEqual(oneStrings, BFast.UnpackStrings(onePacked)); - Assert.AreEqual(twoStrings, BFast.UnpackStrings(twoPacked)); - Assert.AreEqual(threeStrings, BFast.UnpackStrings(threePacked)); - } - - [Test] - public static void BasicTests() - { - using (CreateTimer("ZeroBuffers")) - { - var bytes = BFast.WriteBFastToBytes(ZeroBuffers); - TestBFastBytes(bytes); - var tmp = BFast.ReadBFast(bytes).ToArray(); - Assert.AreEqual(0, tmp.Length); - } - using (CreateTimer("Ten1MBBuffers")) - { - var bytes = BFast.WriteBFastToBytes(Ten1MBBuffers); - TestBFastBytes(bytes); - var tmp = BFast.ReadBFast(bytes).ToArray(); - Assert.AreEqual(10, tmp.Length); - Assert.AreEqual(tmp.Select(x => x.Name).ToArray(), Enumerable.Range(0, 10).Select(x => x.ToString()).ToArray()); - Assert.AreEqual(tmp.Select(x => (int)x.NumBytes()).ToArray(), Enumerable.Repeat(Mb, 10).ToArray()); - - for (var i = 0; i < 10; ++i) - Assert.AreEqual(Ten1MBBuffers[i].Item2, tmp[i].ToBytes(), $"Buffer {i} are different"); - } - using (CreateTimer("OneGBBuffer")) - { - //Enumerable.Range(0, Gb).Select(i => (double)i).ToArray() - var bytes = BFast.WriteBFastToBytes(One1GBBuffer); - TestBFastBytes(bytes); - var tmp = BFast.ReadBFast(bytes).ToArray(); - Assert.AreEqual(1, tmp.Length); - Assert.AreEqual(tmp.Select(x => x.Name).ToArray(), new[] { "0" }); - Assert.AreEqual(tmp.Select(x => x.NumBytes()).ToArray(), Enumerable.Repeat((long)Gb, 1).ToArray()); - } - } - - public static BFastBuilder BFastWithSubs(int numBuffers, int numLevels, Func numBytes) - => Enumerable.Range(0, numBuffers).Aggregate(new BFastBuilder(), - (bld, i) => bld.Add(i.ToString(), - numLevels > 0 - ? BFastWithSubs(numBuffers, numLevels - 1, numBytes) - : BFastRoot(numBuffers, numBytes)) - ); - - public static BFastBuilder BFastRoot(int numBuffers, Func numBytes) - => Enumerable.Range(0, numBuffers).Aggregate(new BFastBuilder(), (bld, i) => bld.Add(i.ToString(), ByteArray(numBytes()).ToBuffer())); - - public static void ValidateBFast(byte[] buffer, BFastBuilder srcBuilder) - { - var bfast = BFast.ReadBFast(buffer).ToArray(); - - var names = srcBuilder.BufferNames().ToArray(); - var sizes = srcBuilder.BufferSizes().ToArray(); - var numBuffers = names.Count(); - // We should have the same number of buffers - AssertEquals(bfast.Length, numBuffers); - for (var i = 0; i < numBuffers; i++) - { - // Of equal size - AssertEquals(bfast[i].Name, names[i]); - AssertEquals(bfast[i].Data.Length, sizes[i]); - // And they might be sub-buffers - if (srcBuilder.Children[i].Item2 is BFastBuilder childBuilder) - ValidateBFast(bfast[i].ToBytes(), childBuilder); - } - } - - [Test] - public static void TestNestedBFast() - { - var random = new Random(1234567); - // Create a nested BFast structure 3 layers deep with randomly-sized buffers between 1 & 256 bytes size - var builder = BFastWithSubs(3, 3, () => random.Next(1, 256)); - // Create a buffer to recieve this structure; - var buffer = new byte[builder.GetSize()]; - var stream = new MemoryStream(buffer, true); - builder.Write(stream); - - // Now, lets try and deserialize these buffers: - ValidateBFast(buffer, builder); - } - - public static void AssertEquals(T x, T y) - { - if (!x.Equals(y)) - throw new Exception($"Expected value {x} but instead got {y}"); - } - - /// - /// This test cannot be run from the test runner, because the App.Config option - /// has to be enabled from within the host program. - /// - public static void ReallyBigTest() - { - var xs = new Vector3[500 * 1000 * 1000]; - for (var i = 0; i < xs.Length; ++i) - xs[i] = new Vector3(i, i, i); - var filePath = Path.Combine(Path.GetTempPath(), "really_big_test.bfast"); - using (var stream = File.OpenWrite(filePath)) - stream.WriteBFast(new[] { ("buffer", xs) }); - - var name = ""; - Vector3[] ys; - using (var stream = File.OpenRead(filePath)) - { - var buffers = BFast.ReadBFast(stream).ToArray(); - if (buffers.Length != 1) - throw new Exception($"Expected exactly one buffer, not {buffers.Length}"); - (name, ys) = (buffers[0].Name, buffers[1].AsArray()); - } - if (name != "buffer") - throw new Exception($"Expected name of buffer to be buffer not {name}"); - AssertEquals(xs.Length, ys.Length); - AssertEquals(xs[0], ys[0]); - AssertEquals(xs[1], ys[1]); - AssertEquals(xs[xs.Length - 1], ys[ys.Length - 1]); - } - } -} diff --git a/src/cs/bfast/Vim.BFast.Tests/BFastTests.cs b/src/cs/bfast/Vim.BFast.Tests/BFastTests.cs new file mode 100644 index 00000000..7e4a9742 --- /dev/null +++ b/src/cs/bfast/Vim.BFast.Tests/BFastTests.cs @@ -0,0 +1,576 @@ +using NUnit.Framework; +using NUnit.Framework.Constraints; +using System.Data; +using Vim.BFastLib.Core; +using Vim.Util.Tests; + +namespace Vim.BFastLib.Tests +{ + public class BFastTests + { + public static string ResultPath = Path.Combine(VimFormatRepoPaths.OutDir, "input.bfast"); + public static string ResultPath2 = Path.Combine(VimFormatRepoPaths.OutDir, "input.bfast"); + public static string ResidencePath = VimFormatRepoPaths.GetLatestWolfordResidenceVim(); + + BFast bfast; + + [SetUp] + public void Setup() + { + bfast = new BFast(); + + if (!Directory.Exists(VimFormatRepoPaths.OutDir)) + { + Directory.CreateDirectory(VimFormatRepoPaths.OutDir); + } + if (File.Exists(ResultPath)) + { + File.Delete(ResultPath); + } + if (File.Exists(ResultPath2)) + { + File.Delete(ResultPath2); + } + } + + private void TestBeforeAfter(Action method) + { + method(bfast); + + // Test that it also works after write/read + var next = new BFast(bfast.ToMemoryStream()); + method(next); + } + + private void TestBeforeAfter(Func method, IResolveConstraint constraint) + { + Assert.That(method(bfast), constraint); + + // Test that it also works after write/read + var next = new BFast(bfast.ToMemoryStream()); + Assert.That(method(next), constraint); + } + + private void TestBeforeAfterFile(Func method, IResolveConstraint constraint) + { + Assert.That(method(bfast), constraint); + using (var file = File.Open(ResultPath, FileMode.CreateNew)) + { + bfast.Write(file); + file.Seek(0, SeekOrigin.Begin); + + // Test that it also works after write/read + var next = new BFast(file); + Assert.That(method(next), constraint); + } + } + + #region empty + + [Test] + public void EmptyBFast_Has_No_Entries() + { + var bfast = new BFast(); + Assert.That(bfast.Entries.Count(), Is.EqualTo(0)); + } + + [Test] + public void EmptyBFast_GetArray_Returns_Null() + { + var bfast = new BFast(); + Assert.IsNull(bfast.GetArray("missing")); + } + + [Test] + public void EmptyBFast_GetBfast_Returns_Null() + { + var bfast = new BFast(); + Assert.IsNull(bfast.GetBFast("missing")); + } + + [Test] + public void EmptyBFast_GetEnumerable_Returns_Null() + { + var bfast = new BFast(); + Assert.IsNull(bfast.GetEnumerable("missing")); + } + + [Test] + public void EmptyBFast_Remove_Does_Nothing() + { + var bfast = new BFast(); + bfast.Remove("missing"); + } + + + [Test] + public void EmptyBFast_Writes_Header() + { + var bfast = new BFast(); + var stream = new MemoryStream(); + bfast.Write(stream); + + stream.Seek(0, SeekOrigin.Begin); + var raw = BFastHeader.FromStream(stream); + + Assert.That(raw.Ranges.Count, Is.EqualTo(0)); + } + #endregion + + #region enumerable + + [Test] + public void SetEnumerable_Adds_Entry() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + TestBeforeAfter(b => b.Entries.Count(), Is.EqualTo(1)); + } + + [Test] + public void SetEnumerable_Then_GetEnumerable() + { + var expected = new int[3] { 0, 1, 2 }; + bfast.SetEnumerable("A", () => expected); + TestBeforeAfter(b => b.GetEnumerable("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetEnumerable_Then_GetEnumerable_Bytes() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + var expected = (new int[3] { 0, 1, 2 }).SelectMany(i => BitConverter.GetBytes(i)); + TestBeforeAfter(b => b.GetEnumerable("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetEnumerable_Then_GetEnumerable_Float() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + var expected = (new int[3] { 0, 1, 2 }).Select(i => BitConverter.Int32BitsToSingle(i)); + TestBeforeAfter(b => b.GetEnumerable("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetEnumerable_Then_GetArray() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + var expected = new int[3] { 0, 1, 2 }; + TestBeforeAfter(b => b.GetArray("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetEnumerable_Then_GetArray_Bytes() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + var expected = (new int[3] { 0, 1, 2 }).SelectMany(i => BitConverter.GetBytes(i)); + TestBeforeAfter(b => b.GetArray("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetEnumerable_Then_GetArray_Float() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + var expected = (new int[3] { 0, 1, 2 }).Select(i => BitConverter.Int32BitsToSingle(i)); + // MemoryStream can't handle such size. + TestBeforeAfter(b => b.GetArray("A"), Is.EqualTo(expected)); + + } + + [Test] + public void SetEnumerable_Then_GetBFast_Throws() + { + bfast.SetEnumerable("A", () => new int[3] { 0, 1, 2 }); + TestBeforeAfter(b => { + Assert.That(() => b.GetBFast("A"), Throws.Exception); + }); + } + + [Test] + public void SetEnumerable_Then_GetBFast_ValidBytes() + { + var sub = new BFast(); + bfast.SetBFast("A", sub); + var bytes = bfast.GetArray("A"); + bfast.SetEnumerable("A",() => bytes); + + TestBeforeAfter(b => b.GetBFast("A"), Is.EqualTo(sub)); + } + + [Test, Explicit] + public void SetEnumerable_Then_GetEnumerable_Lots() + { + IEnumerable GetLots() + { + return Enumerable.Range(0, int.MaxValue).Concat(Enumerable.Range(0, 10)); + } + bfast.SetEnumerable("A", GetLots); + + TestBeforeAfterFile(b => b.GetEnumerable("A"), Is.EqualTo(GetLots())); + } + + #endregion + + #region array + [Test] + public void SetArray_Adds_Entry() + { + bfast.SetArray("A", new int[3] { 0, 1, 2 }); + TestBeforeAfter(b => b.Entries.Count(), Is.EqualTo(1)); + } + + [Test] + public void SetArray_Then_GetArray() + { + var array = new int[3] { 0, 1, 2 }; + bfast.SetArray("A", array); + TestBeforeAfter(b => b.GetArray("A"), Is.EqualTo(array)); + } + + [Test] + public void SetArray_Then_GetArray_Bytes() + { + var array = new int[3] { 0, 1, 2 }; + var expected = array.SelectMany(i => BitConverter.GetBytes(i)); + + bfast.SetArray("A", array); + TestBeforeAfter(b => bfast.GetArray("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetArray_Then_GetArray_Float() + { + var array = new int[3] { 0, 1, 2 }; + var expected = array.Select(i => BitConverter.Int32BitsToSingle(i)); + + bfast.SetArray("A", array); + TestBeforeAfter(b => bfast.GetArray("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetArray_Then_GetEnumerable() + { + var array = new int[3] { 0, 1, 2 }; + bfast.SetArray("A", array); + TestBeforeAfter(b => b.GetEnumerable("A"), Is.EqualTo(array)); + } + + [Test] + public void SetArray_Then_GetEnumerable_Bytes() + { + var array = new int[3] { 0, 1, 2 }; + var expected = array.SelectMany(i => BitConverter.GetBytes(i)); + + bfast.SetArray("A", array); + TestBeforeAfter(b => b.GetEnumerable("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetArray_Then_GetEnumerable_Float() + { + var array = new int[3] { 0, 1, 2 }; + var expected = array.Select(i => BitConverter.Int32BitsToSingle(i)); + + bfast.SetArray("A", array); + var result = bfast.GetEnumerable("A"); + } + + [Test] + public void SetArray_Then_GetBFast_Throws() + { + bfast.SetArray("A", new int[3] { 0, 1, 2 }); + TestBeforeAfter(b => { + Assert.That(() => b.GetBFast("A"), Throws.Exception); + }); + } + + [Test] + public void SetArray_Then_SetArray_Replaces() + { + var ints = new int[3] { 0, 1, 2 }; + var floats = new float[3] { 0.1f, 0.2f, 0.3f }; + bfast.SetArray("A", ints); + bfast.SetArray("A", floats); + TestBeforeAfter(b => { + Assert.That(b.GetArray("A"), Is.EqualTo(floats)); + Assert.That(b.GetArray("A"), Is.Not.EqualTo(ints)); + }); + } + + [Test] + public void SetArray_Then_SetBFast_Replaces() + { + bfast.SetArray("A", new int[3] { 0, 1, 2 }); + bfast.SetBFast("A", new BFast()); + TestBeforeAfter(b => + { + // That's the bfast read as an ints. + Assert.That(b.GetArray("A").Length, Is.GreaterThan(3)); + Assert.That(b.GetBFast("A"), Is.EqualTo(new BFast())); + }); + } + #endregion + + [Test] + public void SetBFast_Adds_Entry() + { + bfast.SetBFast("A", new BFast()); + TestBeforeAfter(b => b.Entries.Count(), Is.EqualTo(1)); + } + + [Test] + public void SetBFast_Then_GetBFast_Returns_Same() + { + var expected = new BFast(); + bfast.SetBFast("A", expected); + TestBeforeAfter(b => b.GetBFast("A"), Is.EqualTo(expected)); + } + + [Test] + public void SetBFast_Then_GetBFast_Nested() + { + using (var file = File.Open(ResidencePath, FileMode.Open)) + { + var (b1, b2) = (new BFast(), new BFast()); + b1.SetBFast("b2", b2); + bfast.SetBFast("b1", b1); + + var mem = bfast.ToMemoryStream(); + var r = new BFast(mem); + var r1 = r.GetBFast("b1"); + var r2 = r1.GetBFast("b2"); + + Assert.NotNull(r); + Assert.NotNull(r1); + Assert.NotNull(r2); + } + } + + #region compress + [Test] + public void Compression_Decompress_Uncompressed_Returns_Throws() + { + var expected = new BFast(); + bfast.SetBFast("A", expected); + TestBeforeAfter(b => + { + Assert.That(() => b.GetBFast("A", decompress: true), Throws.Exception); + }); + } + + [Test] + public void Compression_Get_Compressed_Returns_Null() + { + var expected = new BFast(); + bfast.SetBFast("A", expected, compress: true); + TestBeforeAfter(b => + { + Assert.That(() => b.GetBFast("A"), Throws.Exception); + }); + } + + [Test] + public void Compression_Get_Uncompressed_Works() + { + // This is tested by the bfast tests. + } + + [Test] + public void Compression_Decompress_Compressed_Works() + { + var ints = new int[3] { 0, 1, 2 }; + + var bfastA = new BFast(); + bfastA.SetArray("B", ints); + bfast.SetBFast("A", bfastA, compress: true); + + TestBeforeAfter((b) => + { + var result = b.GetBFast("A", decompress: true); + var b2 = result.GetArray("B"); + + Assert.That(result.Entries.Count(), Is.EqualTo(1)); + Assert.That(b2, Is.EqualTo(ints)); + }); + } + #endregion + + #region bfast + + [Test] + public void SetBFast_Then_SetBFast_Replaces() + { + var bfastA = new BFast(); + bfast.SetBFast("A", bfastA); + + var bfastB = new BFast(); + bfastB.SetArray("A", new int[] { 1, 2, 3 }); + bfast.SetBFast("A", bfastB); + + TestBeforeAfter((b) => + { + var result = b.GetBFast("A"); + Assert.That(bfastA, Is.Not.EqualTo(bfastB)); + Assert.That(result, Is.Not.EqualTo(bfastA)); + Assert.That(result, Is.EqualTo(bfastB)); + }); + } + + [Test] + public void SetBFast_Then_SetArray_Replaces() + { + var ints = new int[3] { 0, 1, 2 }; + bfast.SetBFast("A", new BFast()); + bfast.SetArray("A", ints); + TestBeforeAfter((b) => + { + Assert.That(() => b.GetBFast("A"), Throws.Exception); + Assert.That(b.GetArray("A"), Is.EqualTo(ints)); + }); + + } + #endregion + + [Test] + public void Remove_Missing_DoesNothing() + { + TestBeforeAfter((b) => + { + b.Remove("A"); + Assert.That(b.Entries.Count() == 0); + }); + } + + [Test] + public void Remove_Array() + { + bfast.SetArray("A", new int[3] { 0, 1, 2 }); + bfast.Remove("A"); + TestBeforeAfter((b) => + { + Assert.IsNull(b.GetArray("A")); + Assert.That(b.Entries.Count() == 0); + }); + } + + [Test] + public void Remove_BFast() + { + bfast.SetBFast("A", new BFast()); + bfast.Remove("A"); + + TestBeforeAfter((b) => + { + Assert.IsNull(bfast.GetBFast("A")); + Assert.That(bfast.Entries.Count() == 0); + }); + } + + [Test] + public void Removed_InChild_Not_Written() + { + using (var residence = File.OpenRead(ResidencePath)) + { + var input = new BFast(residence); + var geometry = input.GetBFast("geometry"); + geometry.Remove("g3d:vertex:position:0:float32:3"); + input.SetBFast("geometry", geometry); + input.Write(ResultPath); + } + + using (var stream = File.OpenRead(ResultPath)) + { + var bfast = new BFast(stream); + var geometry = bfast.GetBFast("geometry"); + Assert.That(bfast.Entries.Count() == 5); + Assert.That(geometry.Entries.Count() == 16); + Assert.IsNull(geometry.GetArray("g3d:vertex:position:0:float32:3")); + } + } + + [Test] + public void Write_Then_Read_NestedBFast() + { + var bfast = new BFast(); + var child = new BFast(); + var grandChild = new BFast(); + + bfast.SetBFast("child", child); + child.SetBFast("grandChild", grandChild); + bfast.Write(ResultPath); + + using (var stream = File.OpenRead(ResultPath)) + { + var other = new BFast(stream); + var child2 = other.GetBFast("child"); + var grandChild2 = child2.GetBFast("grandChild"); + + Assert.That(other.Entries.Count() == 1); + Assert.That(child2.Entries.Count() == 1); + Assert.That(grandChild2.Entries.Count() == 0); + } + } + + [Test] + public void Write_Then_Read_NestedBFast_WithArray() + { + var bfast = new BFast(); + var child = new BFast(); + var grandChild = new BFast(); + + bfast.SetBFast("child", child); + child.SetBFast("grandChild", grandChild); + grandChild.SetArray("A", new int[3] { 0, 1, 2 }); + + + bfast.Write(ResultPath); + using (var stream = File.OpenRead(ResultPath)) + { + var other = new BFast(stream); + var child2 = other.GetBFast("child"); + var grandChild2 = child2.GetBFast("grandChild"); + var result = grandChild2.GetArray("A"); + + Assert.That(other.Entries.Count() == 1); + Assert.That(child2.Entries.Count() == 1); + Assert.That(grandChild2.Entries.Count() == 1); + Assert.That(result, Is.EqualTo(new int[3] { 0, 1, 2 })); + } + } + + [Test] + public void Write_Then_Read_Mixed_Sources() + { + var basic = new BFast(); + var dummy = new MemoryStream(); + basic.SetArray("ints", new int[1] { 1 }); + basic.SetArray("floats", new float[1] { 2.0f }); + basic.Write(dummy); + + using (var residence = File.OpenRead(ResidencePath)) + { + dummy.Seek(0, SeekOrigin.Begin); + var input = new BFast(dummy); + + var inputResidence = new BFast(residence); + var output = new BFast(); + + output.SetBFast("input", input); + output.SetBFast("residence", inputResidence); + output.Write(ResultPath2); + } + + using (var stream = File.OpenRead(ResultPath2)) + { + var bfast = new BFast(stream); + var input = bfast.GetBFast("input"); + var residence = bfast.GetBFast("residence"); + var geometry = residence.GetBFast("geometry"); + + Assert.That(bfast.Entries.Count() == 2); + Assert.That(input.Entries.Count() == 2); + Assert.That(residence.Entries.Count() == 5); + Assert.That(geometry.Entries.Count() == 17); + } + } + } +} diff --git a/src/cs/bfast/Vim.BFast.Tests/Vim.BFast.Tests.csproj b/src/cs/bfast/Vim.BFast.Tests/Vim.BFast.Tests.csproj index 3ce7dc39..c7ddf233 100644 --- a/src/cs/bfast/Vim.BFast.Tests/Vim.BFast.Tests.csproj +++ b/src/cs/bfast/Vim.BFast.Tests/Vim.BFast.Tests.csproj @@ -1,18 +1,29 @@  - - net6.0 - false - + + net6.0 + enable + enable - - - - - + false + true + + + + True + + + - - - + + + + + + + + + + diff --git a/src/cs/bfast/Vim.BFast/BFast.cs b/src/cs/bfast/Vim.BFast/BFast.cs deleted file mode 100644 index 885fe582..00000000 --- a/src/cs/bfast/Vim.BFast/BFast.cs +++ /dev/null @@ -1,439 +0,0 @@ -/* - BFAST - Binary Format for Array Streaming and Transmission - Copyright 2019, VIMaec LLC - Copyright 2018, Ara 3D, Inc. - Usage licensed under terms of MIT License - https://github.com/vimaec/bfast - - The BFAST format is a simple, generic, and efficient representation of - buffers (arrays of binary data) with optional names. - - It can be used in place of a zip when compression is not required, or when a simple protocol - is required for transmitting data to/from disk, between processes, or over a network. -*/ - -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Text; - -namespace Vim.BFast -{ - /// - /// Callback function allows clients to control writing the data to the output stream - /// - public delegate long BFastWriterFn(Stream writingStream, int bufferIdx, string bufferName, long bytesToWrite); - - /// - /// Wraps an array of byte buffers encoding a BFast structure and provides validation and safe access to the memory. - /// The BFAST file/data format is structured as follows: - /// * File header - Fixed size file descriptor - /// * Ranges - An array of pairs of offsets that point to the begin and end of each data arrays - /// * Array data - All of the array data is contained in this section. - /// - public static class BFast - { - /// - /// Given a position in the stream, tells us where the the next aligned position will be, if it the current position is not aligned. - /// - public static long ComputeNextAlignment(long n) - => IsAligned(n) ? n : n + Constants.ALIGNMENT - (n % Constants.ALIGNMENT); - - /// - /// Given a position in the stream, computes how much padding is required to bring the value to an aligned point. - /// - public static long ComputePadding(long n) - => ComputeNextAlignment(n) - n; - - /// - /// Computes the padding requires after the array of BFastRanges are written out. - /// - /// - /// - public static long ComputePadding(BFastRange[] ranges) - => ComputePadding(BFastPreamble.Size + ranges.Length * BFastRange.Size); - - /// - /// Given a position in the stream, tells us whether the position is aligned. - /// - public static bool IsAligned(long n) - => n % Constants.ALIGNMENT == 0; - - /// - /// Writes n zero bytes. - /// - public static void WriteZeroBytes(this BinaryWriter bw, long n) - { - for (var i = 0L; i < n; ++i) - bw.Write((byte)0); - } - - /// - /// Checks that the stream (if seekable) is well aligned - /// - public static void CheckAlignment(Stream stream) - { - if (!stream.CanSeek) - return; - // TODO: Check with CD: Should we bail out here? This means that any - // alignment checks for a currently-writing stream are effectively ignored. - if (stream.Position == stream.Length) - return; - if (!IsAligned(stream.Position)) - throw new Exception($"Stream position {stream.Position} is not well aligned"); - } - - /// - /// Converts a collection of strings, into a null-separated byte[] array - /// - public static byte[] PackStrings(this IEnumerable strings) - { - var r = new List(); - foreach (var name in strings) - { - var bytes = Encoding.UTF8.GetBytes(name); - r.AddRange(bytes); - r.Add(0); - } - return r.ToArray(); - } - - /// - /// Converts a byte[] array encoding a collection of strings separate by NULL into an array of string - /// - public static string[] UnpackStrings(this byte[] bytes) - { - var r = new List(); - if (bytes.Length == 0) - return r.ToArray(); - var prev = 0; - for (var i = 0; i < bytes.Length; ++i) - { - if (bytes[i] == 0) - { - r.Add(Encoding.UTF8.GetString(bytes, prev, i - prev)); - prev = i + 1; - } - } - if (prev < bytes.Length) - r.Add(Encoding.UTF8.GetString(bytes, prev, bytes.Length - prev)); - return r.ToArray(); - } - - /// - /// Creates a BFAST structure, without any actual data buffers, from a list of sizes of buffers (not counting the name buffer). - /// Used as an intermediate step to create a BFAST. - /// - public static BFastHeader CreateBFastHeader(this long[] bufferSizes, string[] bufferNames) - { - if (bufferNames.Length != bufferSizes.Length) - throw new Exception($"The number of buffer sizes {bufferSizes.Length} is not equal to the number of buffer names {bufferNames.Length}"); - - var header = new BFastHeader - { - Names = bufferNames - }; - header.Preamble.Magic = Constants.Magic; - header.Preamble.NumArrays = bufferSizes.Length + 1; - - // Allocate the data for the ranges - header.Ranges = new BFastRange[header.Preamble.NumArrays]; - header.Preamble.DataStart = ComputeNextAlignment(header.Preamble.RangesEnd); - - var nameBufferLength = PackStrings(bufferNames).LongLength; - var sizes = (new[] { nameBufferLength }).Concat(bufferSizes).ToArray(); - - // Compute the offsets for the data buffers - var curIndex = header.Preamble.DataStart; - var i = 0; - foreach (var size in sizes) - { - curIndex = ComputeNextAlignment(curIndex); - Debug.Assert(IsAligned(curIndex)); - - header.Ranges[i].Begin = curIndex; - curIndex += size; - - header.Ranges[i].End = curIndex; - i++; - } - - // Finish with the header - // Each buffer we contain is padded to ensure the next one - // starts on alignment, so we pad our DataEnd to reflect this reality - header.Preamble.DataEnd = ComputeNextAlignment(curIndex); - - // Check that everything adds up - return header.Validate(); - } - - /// - /// Checks that the header values are sensible, and throws an exception otherwise. - /// - public static BFastPreamble Validate(this BFastPreamble preamble) - { - if (preamble.Magic != Constants.SameEndian && preamble.Magic != Constants.SwappedEndian) - throw new Exception($"Invalid magic number {preamble.Magic}"); - - if (preamble.DataStart < BFastPreamble.Size) - throw new Exception($"Data start {preamble.DataStart} cannot be before the file header size {BFastPreamble.Size}"); - - if (preamble.DataStart > preamble.DataEnd) - throw new Exception($"Data start {preamble.DataStart} cannot be after the data end {preamble.DataEnd}"); - - if (!IsAligned(preamble.DataEnd)) - throw new Exception($"Data end {preamble.DataEnd} should be aligned"); - - if (preamble.NumArrays < 0) - throw new Exception($"Number of arrays {preamble.NumArrays} is not a positive number"); - - if (preamble.NumArrays > preamble.DataEnd) - throw new Exception($"Number of arrays {preamble.NumArrays} can't be more than the total size"); - - if (preamble.RangesEnd > preamble.DataStart) - throw new Exception($"End of range {preamble.RangesEnd} can't be after data-start {preamble.DataStart}"); - - return preamble; - } - - /// - /// Checks that the header values are sensible, and throws an exception otherwise. - /// - public static BFastHeader Validate(this BFastHeader header) - { - var preamble = header.Preamble.Validate(); - var ranges = header.Ranges; - var names = header.Names; - - if (preamble.RangesEnd > preamble.DataStart) - throw new Exception($"Computed arrays ranges end must be less than the start of data {preamble.DataStart}"); - - if (ranges == null) - throw new Exception("Ranges must not be null"); - - var min = preamble.DataStart; - var max = preamble.DataEnd; - - for (var i = 0; i < ranges.Length; ++i) - { - var begin = ranges[i].Begin; - if (!IsAligned(begin)) - throw new Exception($"The beginning of the range is not well aligned {begin}"); - var end = ranges[i].End; - if (begin < min || begin > max) - throw new Exception($"Array offset begin {begin} is not in valid span of {min} to {max}"); - if (i > 0) - { - if (begin < ranges[i - 1].End) - throw new Exception($"Array offset begin {begin} is overlapping with previous array {ranges[i - 1].End}"); - } - - if (end < begin || end > max) - throw new Exception($"Array offset end {end} is not in valid span of {begin} to {max}"); - } - - if (names.Length < ranges.Length - 1) - throw new Exception($"Number of buffer names {names.Length} is not one less than the number of ranges {ranges.Length}"); - - return header; - } - - /// - /// Reads a BFAST from a file as a collection of named buffers. - /// - public static INamedBuffer[] Read(string filePath) - { - using (var stream = File.OpenRead(filePath)) - return Read(stream); - } - - /// - /// Reads a BFAST from a stream as a collection of named buffers. - /// - public static INamedBuffer[] Read(Stream stream) - => stream.ReadBFast().ToArray(); - - /// - /// Reads a BFAST buffer from a stream as a collection of named buffers. - /// This call limits the buffers to 2GB. - /// - public static IEnumerable ReadBFast(this Stream stream) - { - foreach (var br in stream.GetBFastBufferReaders()) - { - var s = br.Seek(); - yield return s.ReadArray((int)br.Size).ToNamedBuffer(br.Name); - } - } - - /// - /// Reads a BFAST from a stream as a collection of named buffers. - /// This call limits the buffers to 2GB. - /// - public static unsafe IEnumerable> ReadBFast(this Stream stream) where T : unmanaged - => stream.ReadBFast>((s, bufferName, bufferLength) - => s.ReadArray((int)(bufferLength / sizeof(T))).ToNamedBuffer(bufferName)) - .Select(item => item.Item2); - - /// - /// Reads a BFAST from a byte array as a collection of named buffers. - /// This call limits the buffers to 2GB. - /// - public static INamedBuffer[] ReadBFast(this byte[] bytes) - { - using (var stream = new MemoryStream(bytes)) - return ReadBFast(stream).ToArray(); - } - - /// - /// The total size required to put a BFAST in the header. - /// - public static long ComputeSize(long[] bufferSizes, string[] bufferNames) - => CreateBFastHeader(bufferSizes, bufferNames).Preamble.DataEnd; - - /// - /// Writes the BFAST header and name buffer to stream using the provided BinaryWriter. The BinaryWriter will be properly aligned by padding zeros - /// - public static BinaryWriter WriteBFastHeader(this Stream stream, BFastHeader header) - { - if (header.Ranges.Length != header.Names.Length + 1) - throw new Exception($"The number of ranges {header.Ranges.Length} must be equal to one more than the number of names {header.Names.Length}"); - var bw = new BinaryWriter(stream); - bw.Write(header.Preamble.Magic); - bw.Write(header.Preamble.DataStart); - bw.Write(header.Preamble.DataEnd); - bw.Write(header.Preamble.NumArrays); - foreach (var r in header.Ranges) - { - bw.Write(r.Begin); - bw.Write(r.End); - } - WriteZeroBytes(bw, ComputePadding(header.Ranges)); - - CheckAlignment(stream); - var nameBuffer = PackStrings(header.Names); - bw.Write(nameBuffer); - WriteZeroBytes(bw, ComputePadding(nameBuffer.LongLength)); - - CheckAlignment(stream); - return bw; - } - - /// - /// Enables a user to write a BFAST from an array of names, sizes, and a custom writing function. - /// The function will receive a BinaryWriter, the index of the buffer, and is expected to return the number of bytes written. - /// Simplifies the process of creating custom BinaryWriters, or writing extremely large arrays if necessary. - /// - public static void WriteBFast(this Stream stream, string[] bufferNames, long[] bufferSizes, BFastWriterFn onBuffer) - { - if (bufferSizes.Any(sz => sz < 0)) - throw new Exception("All buffer sizes must be zero or greater than zero"); - - if (bufferNames.Length != bufferSizes.Length) - throw new Exception($"The number of buffer names {bufferNames.Length} is not equal to the number of buffer sizes {bufferSizes}"); - - var header = CreateBFastHeader(bufferSizes, bufferNames); - stream.WriteBFast(header, bufferNames, bufferSizes, onBuffer); - } - - /// - /// Enables a user to write a BFAST from an array of names, sizes, and a custom writing function. - /// This is useful when the header is already computed. - /// - public static void WriteBFast(this Stream stream, BFastHeader header, string[] bufferNames, long[] bufferSizes, BFastWriterFn onBuffer) - { - stream.WriteBFastHeader(header); - CheckAlignment(stream); - stream.WriteBFastBody(header, bufferNames, bufferSizes, onBuffer); - } - - /// - /// Must be called after "WriteBFastHeader" - /// Enables a user to write the contents of a BFAST from an array of names, sizes, and a custom writing function. - /// The function will receive a BinaryWriter, the index of the buffer, and is expected to return the number of bytes written. - /// Simplifies the process of creating custom BinaryWriters, or writing extremely large arrays if necessary. - /// - public static void WriteBFastBody(this Stream stream, BFastHeader header, string[] bufferNames, long[] bufferSizes, BFastWriterFn onBuffer) - { - CheckAlignment(stream); - - if (bufferSizes.Any(sz => sz < 0)) - throw new Exception("All buffer sizes must be zero or greater than zero"); - - if (bufferNames.Length != bufferSizes.Length) - throw new Exception($"The number of buffer names {bufferNames.Length} is not equal to the number of buffer sizes {bufferSizes}"); - - // Then passes the binary writer for each buffer: checking that the correct amount of data was written. - for (var i = 0; i < bufferNames.Length; ++i) - { - CheckAlignment(stream); - var nBytes = bufferSizes[i]; - var pos = stream.CanSeek ? stream.Position : 0; - var nWrittenBytes = onBuffer(stream, i, bufferNames[i], nBytes); - if (stream.CanSeek) - { - if (stream.Position - pos != nWrittenBytes) - throw new NotImplementedException($"Buffer:{bufferNames[i]}. Stream movement {stream.Position - pos} does not reflect number of bytes claimed to be written {nWrittenBytes}"); - } - - if (nBytes != nWrittenBytes) - throw new Exception($"Number of bytes written {nWrittenBytes} not equal to expected bytes{nBytes}"); - var padding = ComputePadding(nBytes); - for (var j = 0; j < padding; ++j) - stream.WriteByte(0); - CheckAlignment(stream); - } - } - - public static unsafe long ByteSize(this T[] self) where T : unmanaged - => self.LongLength * sizeof(T); - - public static unsafe void WriteBFast(this Stream stream, IEnumerable<(string, T[])> buffers) where T : unmanaged - { - var xs = buffers.ToArray(); - BFastWriterFn writerFn = (writer, index, name, size) => - { - var initPosition = writer.Position; - writer.Write(xs[index].Item2); - return writer.Position - initPosition; - }; - - stream.WriteBFast( - xs.Select(b => b.Item1), - xs.Select(b => b.Item2.ByteSize()), - writerFn); - } - - public static void WriteBFast(this Stream stream, IEnumerable bufferNames, IEnumerable bufferSizes, BFastWriterFn onBuffer) - => WriteBFast(stream, bufferNames.ToArray(), bufferSizes.ToArray(), onBuffer); - - public static byte[] WriteBFastToBytes(IEnumerable bufferNames, IEnumerable bufferSizes, BFastWriterFn onBuffer) - { - // NOTE: we can't call "WriteBFast(Stream ...)" directly because it disposes the stream before we can convert it to an array - using (var stream = new MemoryStream()) - { - WriteBFast(stream, bufferNames.ToArray(), bufferSizes.ToArray(), onBuffer); - return stream.ToArray(); - } - } - - public static void WriteBFastToFile(string filePath, IEnumerable bufferNames, IEnumerable bufferSizes, BFastWriterFn onBuffer) - => File.OpenWrite(filePath).WriteBFast(bufferNames, bufferSizes, onBuffer); - - public static unsafe byte[] WriteBFastToBytes(this (string Name, T[] Data)[] buffers) where T : unmanaged - => WriteBFastToBytes( - buffers.Select(b => b.Name), - buffers.Select(b => b.Data.LongLength * sizeof(T)), - (writer, index, name, size) => - { - var initPosition = writer.Position; - writer.Write(buffers[index].Data); - return writer.Position - initPosition; - }); - - public static BFastBuilder ToBFastBuilder(this IEnumerable buffers) - => new BFastBuilder().Add(buffers); - } -} diff --git a/src/cs/bfast/Vim.BFast/BFast/BFast.cs b/src/cs/bfast/Vim.BFast/BFast/BFast.cs new file mode 100644 index 00000000..880251b3 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/BFast.cs @@ -0,0 +1,197 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using Vim.BFastLib.Core; + +namespace Vim.BFastLib +{ + /// + /// Main API to read and write bfast content. + /// + public class BFast : IBFastNode + { + private readonly Dictionary _children = new Dictionary(); + + /// + /// Returns all buffer names in this bfast. + /// + public IEnumerable Entries => _children.Keys; + private IEnumerable<(string name, IWritable buffer)> Writables => _children.Select(kvp => (kvp.Key, kvp.Value as IWritable)); + + public BFast() { } + public BFast(Stream stream) + { + var nodes = GetBFastNodes(stream); + _children = nodes.ToDictionary(c => c.name, c => new CompressibleNode(c.value)); + } + + /// + /// Sets or overrides a bfast value at given name. + /// + public void SetBFast(string name, BFast bfast, bool compress = false) + { + if (bfast == null) + { + _children.Remove(name); + return; + } + + _children[name] = new CompressibleNode(bfast, compress); + } + + /// + /// Sets or overrides an enumerable value at given name. + /// + public void SetEnumerable(string name, Func> enumerable) where T : unmanaged + { + if (enumerable == null) + { + _children.Remove(name); + return; + } + _children[name] = new CompressibleNode(new BFastEnumerableNode(enumerable)); + } + + /// + /// Sets or overrides an array value at given name. + /// + public void SetArray(string name, T[] array) where T : unmanaged + { + if (array == null) + { + _children.Remove(name); + return; + } + _children[name] = new CompressibleNode(new BFastArrayNode(array)); + } + + /// + /// Tries to interpret the data at given name as a BFast and returns it. + /// Will throw if the data is not a bfast or if decompress doesnt match compression. + /// + public BFast GetBFast(string name, bool decompress = false) + { + var node = GetNode(name); + if (node == null) return null; + var n = node.GetNode(decompress); + return n.AsBFast(); + } + + /// + /// Tries to cast the data at given name as an enumerable of type T. + /// Will throw if the data cannot be cast. + /// + public IEnumerable GetEnumerable(string name) where T : unmanaged + { + if (!_children.ContainsKey(name)) return null; + return _children[name].GetNode().AsEnumerable(); + } + + /// + /// Tries to cast the data at given name as an array of type T. + /// Will throw if the data cannot be cast. + /// + public T[] GetArray(string name) where T : unmanaged + { + if (!_children.ContainsKey(name)) return null; + return _children[name].GetNode().AsArray(); + } + + private CompressibleNode GetNode(string name) + => _children.TryGetValue(name, out var value) ? value : null; + + /// + /// Remove the value at name so it won't be written. + /// + public void Remove(string name) + => _children.Remove(name); + + /// + /// Writes the current state to a stream using bfast format. + /// + public void Write(Stream stream) + { + var list = Writables.OrderBy(kvp => kvp.name).ToList(); + var strings = list.Select(n => n.name).ToArray(); + var buffers = list.Select(n => n.buffer).ToArray(); + var writer = new BFastWriter(strings, buffers); + writer.Write(stream); + } + + /// + /// Writes the current state to a new file using bfast format. + /// + public void Write(string path) + { + using (var file = new FileStream(path, FileMode.Create)) + { + Write(file); + } + } + + BFast IBFastNode.AsBFast() + { + return this; + } + + T[] IBFastNode.AsArray() + { + using (var mem = ToMemoryStream()) + { + return mem.ReadArray(); + } + } + + IEnumerable IBFastNode.AsEnumerable() + { + return (this as IBFastNode).AsArray(); + } + + private static IEnumerable<(string name, BFastStreamNode value)> GetBFastNodes(Stream stream) + { + var offset = stream.Position; + var raw = BFastHeader.FromStream(stream); + foreach (var kvp in raw.Ranges) + { + var node = new BFastStreamNode( + stream, + kvp.Value.OffsetBy(offset) + ); + + yield return (kvp.Key, node); + } + } + + /// + /// Writes the current bfast to a new memory streams + /// The stream is returned at position 0. + /// + public MemoryStream ToMemoryStream() + { + var stream = new MemoryStream(); + Write(stream); + stream.Seek(0, SeekOrigin.Begin); + return stream; + } + + public override bool Equals(object obj) + { + if (obj is BFast) + { + return Equals((BFast)obj); + } + return false; + } + + public bool Equals(BFast other) + { + var a = (this as IBFastNode).AsEnumerable(); + var b = (other as IBFastNode).AsEnumerable(); + return a.SequenceEqual(b); + } + + public override int GetHashCode() => (this as IBFastNode).AsEnumerable().GetHashCode(); + } +} + diff --git a/src/cs/bfast/Vim.BFast/BFast/BFastArrayNode.cs b/src/cs/bfast/Vim.BFast/BFast/BFastArrayNode.cs new file mode 100644 index 00000000..cc6a06da --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/BFastArrayNode.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; +using System.IO; +using Vim.BFastLib.Core; + +namespace Vim.BFastLib +{ + public class BFastArrayNode : IBFastNode where TData : unmanaged + { + private readonly TData[] _array; + + public BFastArrayNode(TData[] array) + { + _array = array; + } + + public T[] AsArray() where T : unmanaged + { + if (typeof(T) == typeof(TData)) + { + return _array as T[]; + } + return _array.Cast(); + } + + public BFast AsBFast() + { + try + { + return new BFast(_array.ToMemoryStream()); + } + catch (Exception e) + { + throw new Exception("Array data is not a valid BFast.", e); + } + } + + public IEnumerable AsEnumerable() where T: unmanaged { + return (_array as IEnumerable).Cast(); + } + + public void Write(Stream stream) + { + stream.Write(_array); + } + } +} diff --git a/src/cs/bfast/Vim.BFast/BFast/BFastEnumerableNode.cs b/src/cs/bfast/Vim.BFast/BFast/BFastEnumerableNode.cs new file mode 100644 index 00000000..aa6f4eb7 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/BFastEnumerableNode.cs @@ -0,0 +1,59 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using Vim.BFastLib.Core; + +namespace Vim.BFastLib +{ + public class BFastEnumerableNode : IBFastNode where TNode : unmanaged + { + // We use a Func to prevent cases where the given IEnumerable can't iterated twice. + private readonly Func> _source; + + public BFastEnumerableNode(Func> source) + { + _source = source; + } + + public T[] AsArray() where T : unmanaged + { + if (typeof(T) == typeof(TNode)) + { + return _source().Cast().ToArray(); + } + else + { + return _source().Cast().ToArray(); + } + } + + public BFast AsBFast() + { + try + { + return new BFast(_source().ToMemoryStream()); + } + catch (Exception e) + { + throw new Exception("Enumerable data is not a valid BFast", e); + } + } + public IEnumerable AsEnumerable() where T : unmanaged + { + if (typeof(T) == typeof(TNode)) + { + return _source().Cast(); + } + else + { + return _source().Cast(); + } + } + + public void Write(Stream stream) + { + stream.Write(_source()); + } + } +} diff --git a/src/cs/bfast/Vim.BFast/BFast/BFastHelpers.cs b/src/cs/bfast/Vim.BFast/BFast/BFastHelpers.cs new file mode 100644 index 00000000..0aa910a0 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/BFastHelpers.cs @@ -0,0 +1,46 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; + +namespace Vim.BFastLib +{ + public static class BFastHelpers + { + /// + /// Opens a file as a BFast, applies func to it and closes the file. + /// + public static T Read(string path, Func func) + { + using (var file = new FileStream(path, FileMode.Open)) + { + var bfast = new BFast(file); + return func(bfast); + } + } + } + + public static class BFastExtensions + { + /// + /// Returns an enumerable of all nodes of the BFast as NamedBuffers. + /// + public static IEnumerable ToNamedBuffers(this BFast bfast) + { + return bfast.Entries.Select(name => bfast.GetArray(name).ToNamedBuffer(name)); + } + + /// + /// Writes the current bfast to a new memory streams + /// The stream is returned at position 0. + /// + public static MemoryStream ToMemoryStream(this IBFastNode bfast) + { + var stream = new MemoryStream(); + bfast.Write(stream); + stream.Seek(0, SeekOrigin.Begin); + return stream; + } + } +} + diff --git a/src/cs/bfast/Vim.BFast/BFast/BFastStreamNode.cs b/src/cs/bfast/Vim.BFast/BFast/BFastStreamNode.cs new file mode 100644 index 00000000..457b8470 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/BFastStreamNode.cs @@ -0,0 +1,50 @@ +using System; +using System.Collections.Generic; +using System.IO; +using Vim.BFastLib.Core; + +namespace Vim.BFastLib +{ + public class BFastStreamNode : IBFastNode + { + private readonly Stream _stream; + private readonly BFastRange _range; + + public BFastStreamNode(Stream stream, BFastRange range) + { + _stream = stream; + _range = range; + } + + public BFast AsBFast() + { + _stream.Seek(_range.Begin, SeekOrigin.Begin); + try + { + return new BFast(_stream); + } + catch (Exception e) + { + throw new Exception("Requested data is not a valid BFast or is compressed and needs decompression.", e); + } + } + + public T[] AsArray() where T : unmanaged + { + _stream.Seek(_range.Begin, SeekOrigin.Begin); + return _stream.ReadArrayBytes(_range.Count); + } + + public IEnumerable AsEnumerable() where T : unmanaged + { + _stream.Seek(_range.Begin, SeekOrigin.Begin); + return _stream.ReadEnumerableByte(_range.Count); + } + + public void Write(Stream stream) + { + _stream.Seek(_range.Begin, SeekOrigin.Begin); + _stream.CopySome(stream, (int)_range.Count); + } + } +} diff --git a/src/cs/bfast/Vim.BFast/BFast/CompressibleNode.cs b/src/cs/bfast/Vim.BFast/BFast/CompressibleNode.cs new file mode 100644 index 00000000..7a85eeba --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/CompressibleNode.cs @@ -0,0 +1,83 @@ +using System.IO; +using System.IO.Compression; +using Vim.BFastLib.Core; + +namespace Vim.BFastLib +{ + /// + /// A wrapper around a IBFastNode that manages writing and reading using compression. + /// + public class CompressibleNode : IWritable + { + private readonly IBFastNode _node; + private readonly bool _compress; + + public CompressibleNode(IBFastNode node, bool compress = false) + { + _node = node; + _compress = compress; + } + + public void Write(Stream stream) + { + if (_compress) + { + WriteCompress(stream); + } + else + { + _node.Write(stream); + } + } + + /// + /// Returns the node after it is decompressed if needed. + /// Will throw if decompress argument doesnt match compression state. + /// + public IBFastNode GetNode(bool decompress = false) + { + if (decompress) + { + if (_node is BFastStreamNode) + { + return Decompress(); + } + if (!_compress) + { + throw new System.Exception("Cannot uncompress non-compressed data."); + } + return _node; + } + if(_compress) + { + throw new System.Exception("Compressed data needs to be decompressed."); + } + return _node; + } + + private IBFastNode Decompress() + { + // This memory stream is not disposed. But it's ok. + // It really is just an array under the hood. + // https://stackoverflow.com/questions/4274590/memorystream-close-or-memorystream-dispose + var output = new MemoryStream(); + + using (var input = _node.ToMemoryStream()) + using (var compress = new DeflateStream(input, CompressionMode.Decompress, true)) + { + compress.CopyTo(output); + output.Seek(0, SeekOrigin.Begin); + return new BFastStreamNode(output, output.FullRange()); + } + } + + private void WriteCompress(Stream stream) + { + using (var input = _node.ToMemoryStream()) + using (var compress = new DeflateStream(stream, CompressionMode.Compress, true)) + { + input.CopyTo(compress); + } + } + } +} diff --git a/src/cs/bfast/Vim.BFast/BFast/IBFastNode.cs b/src/cs/bfast/Vim.BFast/BFast/IBFastNode.cs new file mode 100644 index 00000000..5d27c59e --- /dev/null +++ b/src/cs/bfast/Vim.BFast/BFast/IBFastNode.cs @@ -0,0 +1,31 @@ +using System.Collections.Generic; +using System.IO; + +namespace Vim.BFastLib +{ + public interface IWritable + { + /// + /// Writes the current data to the given stream. + /// + void Write(Stream stream); + } + + public interface IBFastNode : IWritable + { + /// + /// Tries to cast node data as an array of T. + /// + T[] AsArray() where T : unmanaged; + + /// + /// Tries to cast node data as an enumerable of T. + /// AsEnumerable() where T : unmanaged; + + /// + /// Tries to interpret node data as a BFast. + /// - /// Represents a BFAST buffer whose stream can be read after calling Seek(). - /// - public class BFastBufferReader - { - /// - /// The seekable stream from which the buffer can be read. - /// - private readonly Stream _stream; - - /// - /// The start position of the buffer in the stream. - /// - private readonly long _startPosition; - - /// - /// The size in bytes of the buffer. - /// - public readonly long Size; - - /// - /// The buffer name. - /// - public readonly string Name; - - /// - /// Deconstruct operator - /// - public void Deconstruct(out string name, out long size) - => (name, size) = (Name, Size); - - /// - /// Constructor. - /// - public BFastBufferReader(Stream stream, string name, long startPosition, long size) - { - _stream = stream; - _startPosition = startPosition; - Size = size; - Name = name; - } - - /// - /// Seeks to the start of the BFAST buffer and returns the stream. - /// - public Stream Seek() - { - _stream.Seek(_startPosition, SeekOrigin.Begin); - BFast.CheckAlignment(_stream); - return _stream; - } - } - - public static class BFastBufferReaderExtensions - { - /// - /// Reads the preamble, the ranges, and the names of the rest of the buffers. - /// - public static BFastHeader ReadBFastHeader(this Stream stream) - { - var r = new BFastHeader(); - var br = new BinaryReader(stream); - - if (stream.Length - stream.Position < sizeof(long) * 4) - throw new Exception("Stream too short"); - - r.Preamble = new BFastPreamble - { - Magic = br.ReadInt64(), - DataStart = br.ReadInt64(), - DataEnd = br.ReadInt64(), - NumArrays = br.ReadInt64(), - }.Validate(); - - r.Ranges = stream.ReadArray((int)r.Preamble.NumArrays); - - var padding = BFast.ComputePadding(r.Ranges); - br.ReadBytes((int)padding); - BFast.CheckAlignment(br.BaseStream); - - var nameBytes = br.ReadBytes((int)r.Ranges[0].Count); - r.Names = nameBytes.UnpackStrings(); - - padding = BFast.ComputePadding(r.Ranges[0].End); - br.ReadBytes((int)padding); - BFast.CheckAlignment(br.BaseStream); - - return r.Validate(); - } - - /// - /// Returns a list of BFAST buffer readers in the stream. - /// Assumes the stream's current position designates a BFAST header. - /// - public static IReadOnlyList GetBFastBufferReaders( - this Stream stream, - Func filterFn = null) - { - var result = new List(); - - using (var seekContext = new SeekContext(stream)) - { - // Read the header - var header = stream.ReadBFastHeader(); - BFast.CheckAlignment(stream); - - // Create a BFastBufferReader for each range. - for (var i = 1; i < header.Ranges.Length; ++i) - { - var range = header.Ranges[i]; - var name = header.Names[i - 1]; - - var startSeekPosition = seekContext.OriginalSeekPosition + range.Begin; - var size = range.End - range.Begin; - - var bfastBufferReader = new BFastBufferReader(seekContext.Stream, name, startSeekPosition, size); - - if (filterFn?.Invoke(bfastBufferReader) ?? true) - { - result.Add(bfastBufferReader); - } - } - } - - return result; - } - - /// - /// Returns a BFAST buffer reader corresponding to the given buffer name. - /// Returns null if the given buffer name was not found or if the buffer name is null or empty. - /// - public static BFastBufferReader GetBFastBufferReader(this Stream stream, string bufferName) - => string.IsNullOrEmpty(bufferName) - ? null - : stream.GetBFastBufferReaders(br => br.Name == bufferName).FirstOrDefault(); - - /// - /// Reads a BFAST stream and returns a list of labeled results. - /// - public static List<(string Label, T Result)> ReadBFast( - this Stream stream, - Func onBuffer) - { - var result = new List<(string, T)>(); - - foreach (var br in stream.GetBFastBufferReaders()) - { - var name = br.Name; - var s = br.Seek(); - result.Add((name, onBuffer(s, name, br.Size))); - } - - return result; - } - - /// - /// Returns a named buffer corresponding to the given bufferName. Returns null if no buffer name is found. - /// This call limits the buffers to 2GB. - /// - public static NamedBuffer ReadBFastBuffer(this Stream stream, string bufferName) where T : unmanaged - { - var br = stream.GetBFastBufferReader(bufferName); - if (br == null) - return null; - - var s = br.Seek(); - return s.ReadArray((int)br.Size).ToNamedBuffer(br.Name); - } - } -} diff --git a/src/cs/bfast/Vim.BFast/BFastBuilder.cs b/src/cs/bfast/Vim.BFast/BFastBuilder.cs deleted file mode 100644 index 7c39221b..00000000 --- a/src/cs/bfast/Vim.BFast/BFastBuilder.cs +++ /dev/null @@ -1,93 +0,0 @@ -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; - -namespace Vim.BFast -{ - /// - /// Anything that can be added to a BFAST must have a size and write to a stream. - /// - public interface IBFastComponent - { - long GetSize(); - void Write(Stream stream); - } - - /// - /// A wrapper around a buffer so that it can be used as a BFAST component - /// - public class BufferAsBFastComponent : IBFastComponent - { - public BufferAsBFastComponent(IBuffer buffer) - => Buffer = buffer; - public IBuffer Buffer { get; } - public void Write(Stream stream) => stream.Write(Buffer); - public long GetSize() => Buffer.NumBytes(); - } - - /// - /// Used to build BFASTs incrementally that contain named buffers and/or other BFASTs. - /// - public class BFastBuilder : IBFastComponent - { - public BFastHeader Header { get; private set; } - public long GetSize() => GetOrComputeHeader().Preamble.DataEnd; - - public List<(string, IBFastComponent)> Children { get; } = new List<(string, IBFastComponent)>(); - - public void Write(Stream stream) - => stream.WriteBFast(GetOrComputeHeader(), - BufferNames().ToArray(), - BufferSizes().ToArray(), - OnBuffer); - - public void Write(string filePath) - { - using (var stream = File.OpenWrite(filePath)) - Write(stream); - } - - public long OnBuffer(Stream stream, int index, string name, long size) - { - var (bufferName, x) = Children[index]; - Debug.Assert(name == bufferName); - Debug.Assert(size != GetSize()); - Debug.Assert(size == x.GetSize()); - x.Write(stream); - return size; - } - - public BFastHeader GetOrComputeHeader() - => Header ?? (Header = BFast.CreateBFastHeader( - BufferSizes().ToArray(), BufferNames().ToArray())); - - private BFastBuilder _add(string name, IBFastComponent component) - { - Header = null; - Children.Add((name, component)); - return this; - } - - public BFastBuilder Add(string name, IBFastComponent component) - => _add(name, component); - - public BFastBuilder Add(string name, IBuffer buffer) - => _add(name, new BufferAsBFastComponent(buffer)); - - public BFastBuilder Add(INamedBuffer buffer) - => Add(buffer.Name, buffer); - - public BFastBuilder Add(IEnumerable buffers) - => buffers.Aggregate(this, (x, y) => x.Add(y)); - - public BFastBuilder Add(string name, IEnumerable buffers) - => Add(name, new BFastBuilder().Add(buffers)); - - public IEnumerable BufferNames() - => Children.Select(x => x.Item1); - - public IEnumerable BufferSizes() - => Children.Select(x => x.Item2.GetSize()); - } -} diff --git a/src/cs/bfast/Vim.BFast/BFastStructs.cs b/src/cs/bfast/Vim.BFast/BFastStructs.cs deleted file mode 100644 index 46cb0dde..00000000 --- a/src/cs/bfast/Vim.BFast/BFastStructs.cs +++ /dev/null @@ -1,113 +0,0 @@ -/* - BFAST - Binary Format for Array Streaming and Transmission - Copyright 2019, VIMaec LLC - Copyright 2018, Ara 3D, Inc. - Usage licensed under terms of MIT License - https://github.com/vimaec/bfast - - The BFAST format is a simple, generic, and efficient representation of - buffers (arrays of binary data) with optional names. - - It can be used in place of a zip when compression is not required, or when a simple protocol - is required for transmitting data to/from disk, between processes, or over a network. -*/ - -using System.Linq; -using System.Runtime.InteropServices; - -namespace Vim.BFast -{ - /// - /// This contains the BFAST data loaded or written from disk. - /// - public class BFastHeader - { - public BFastPreamble Preamble = new BFastPreamble(); - public BFastRange[] Ranges; - public string[] Names; - - public override bool Equals(object o) - => o is BFastHeader other && Equals(other); - - public bool Equals(BFastHeader other) - => Preamble.Equals(other.Preamble) && - Ranges.Length == other.Ranges.Length && - Ranges.Zip(other.Ranges, (x, y) => x.Equals(y)).All(x => x) && - Names.Zip(other.Names, (x, y) => x.Equals(y)).All(x => x); - } - - /// - /// Constants. - /// - public static class Constants - { - public const long Magic = 0xBFA5; - - // https://en.wikipedia.org/wiki/Endianness - public const long SameEndian = Magic; - public const long SwappedEndian = 0xA5BFL << 48; - - /// - /// Data arrays are aligned to 64 bytes, so that they can be cast directly to AVX-512 registers. - /// This is useful for efficiently working with floating point data. - /// - public const long ALIGNMENT = 64; - } - - /// - /// This tells us where a particular array begins and ends in relation to the beginning of a file. - /// * Begin must be less than or equal to End. - /// * Begin must be greater than or equal to DataStart - /// * End must be less than or equal to DataEnd - /// - [StructLayout(LayoutKind.Explicit, Pack = 8, Size = 16)] - public struct BFastRange - { - [FieldOffset(0)] public long Begin; - [FieldOffset(8)] public long End; - - public long Count => End - Begin; - public static long Size = 16; - - public override bool Equals(object x) - => x is BFastRange other && Equals(other); - - public bool Equals(BFastRange other) - => Begin == other.Begin && End == other.End; - } - - /// - /// The header contains a magic number, the begin and end indices of data, and the number of arrays. - /// - [StructLayout(LayoutKind.Explicit, Pack = 8, Size = 32)] - public struct BFastPreamble - { - [FieldOffset(0)] public long Magic; // Either Constants.SameEndian or Constants.SwappedEndian depending on endianess of writer compared to reader. - [FieldOffset(8)] public long DataStart; // <= file size and >= ArrayRangesEnd and >= FileHeader.ByteCount - [FieldOffset(16)] public long DataEnd; // >= DataStart and <= file size - [FieldOffset(24)] public long NumArrays; // number of arrays - - /// - /// This is where the array ranges are finished. - /// Must be less than or equal to DataStart. - /// Must be greater than or equal to FileHeader.ByteCount - /// - public long RangesEnd => Size + NumArrays * 16; - - /// - /// The size of the FileHeader structure - /// - public static long Size = 32; - - /// - /// Returns true if the producer of the BFast file has the same endianness as the current library - /// - public bool SameEndian => Magic == Constants.SameEndian; - - public override bool Equals(object x) - => x is BFastPreamble other && Equals(other); - - public bool Equals(BFastPreamble other) - => Magic == other.Magic && DataStart == other.DataStart && DataEnd == other.DataEnd && NumArrays == other.NumArrays; - }; -} diff --git a/src/cs/bfast/Vim.BFast/BufferExtensions.cs b/src/cs/bfast/Vim.BFast/Buffers/BufferExtensions.cs similarity index 97% rename from src/cs/bfast/Vim.BFast/BufferExtensions.cs rename to src/cs/bfast/Vim.BFast/Buffers/BufferExtensions.cs index d07b8eb5..6260c22b 100644 --- a/src/cs/bfast/Vim.BFast/BufferExtensions.cs +++ b/src/cs/bfast/Vim.BFast/Buffers/BufferExtensions.cs @@ -2,8 +2,9 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using Vim.BFastLib.Core; -namespace Vim.BFast +namespace Vim.BFastLib { /// /// Helper functions for working with buffers @@ -75,7 +76,7 @@ public static long NumBytes(this IBuffer buffer) => (long)buffer.NumElements() * buffer.ElementSize; public static Buffer ReadBufferFromNumberOfBytes(this Stream stream, long numBytes) where T : unmanaged - => stream.ReadArrayFromNumberOfBytes(numBytes).ToBuffer(); + => stream.ReadArrayBytes(numBytes).ToBuffer(); public static Buffer ReadBuffer(this Stream stream, int numElements) where T : unmanaged => stream.ReadArray(numElements).ToBuffer(); diff --git a/src/cs/bfast/Vim.BFast/Buffers.cs b/src/cs/bfast/Vim.BFast/Buffers/Buffers.cs similarity index 97% rename from src/cs/bfast/Vim.BFast/Buffers.cs rename to src/cs/bfast/Vim.BFast/Buffers/Buffers.cs index 8039f273..9f4205d1 100644 --- a/src/cs/bfast/Vim.BFast/Buffers.cs +++ b/src/cs/bfast/Vim.BFast/Buffers/Buffers.cs @@ -1,7 +1,8 @@ using System; using System.IO; +using Vim.BFastLib.Core; -namespace Vim.BFast +namespace Vim.BFastLib { /// /// Provides an interface to an object that manages a potentially large array of elements all of the same unmanaged type. diff --git a/src/cs/bfast/Vim.BFast/Core/BFastConstants.cs b/src/cs/bfast/Vim.BFast/Core/BFastConstants.cs new file mode 100644 index 00000000..c6934caf --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastConstants.cs @@ -0,0 +1,15 @@ +namespace Vim.BFastLib.Core +{ + /// + /// Constants. + /// + public static class BFastConstants + { + public const long Magic = 0xBFA5; + + // https://en.wikipedia.org/wiki/Endianness + public const long SameEndian = Magic; + public const long SwappedEndian = 0xA5BFL << 48; + + } +} diff --git a/src/cs/bfast/Vim.BFast/Core/BFastHeader.cs b/src/cs/bfast/Vim.BFast/Core/BFastHeader.cs new file mode 100644 index 00000000..bc0e62c4 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastHeader.cs @@ -0,0 +1,85 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; + +namespace Vim.BFastLib.Core +{ + public class BFastHeader + { + public readonly BFastPreamble Preamble; + public IReadOnlyDictionary Ranges => _ranges; + private readonly Dictionary _ranges; + + public BFastHeader(BFastPreamble preamble, Dictionary ranges) + { + Preamble = preamble; + _ranges = ranges; + } + + /// + /// Reads the preamble, the ranges, and the names of the rest of the buffers. + /// + public static BFastHeader FromStream(Stream stream) + { + if (stream.Length - stream.Position < sizeof(long) * 4) + throw new Exception("Stream too short"); + + var offset = stream.Position; + + var preamble = stream.ReadValue(); + var ranges = stream.ReadArray((int)preamble.NumArrays); + + // In a lot of existing vim there is padding before the first buffer. + stream.Seek(offset + ranges[0].Begin, SeekOrigin.Begin); + var nameBytes = stream.ReadArray((int)ranges[0].Count); + var names = BFastStrings.Unpack(nameBytes); + + // Some old vim have duplicated buffers + // It is wrong but such is life. + MakeNamesUnique(names); + + var map = names + .Zip(ranges.Skip(1), (n, r) => (n, r)) + .ToDictionary(p => p.n, p => p.r); + + return new BFastHeader(preamble, map).Validate(); + } + + private static void MakeNamesUnique(string[] names) + { + var nameSet = new Dictionary(); + for (var i = 0; i < names.Length; i++) + { + if (nameSet.ContainsKey(names[i])) + { + var count = nameSet[names[i]]; + names[i] = names[i] + "_" + count; + Debug.WriteLine($"Duplicated Name {names[i]} in BFAST. Making name unique. This can result in unexpected behaviour."); + } + if (!nameSet.ContainsKey(names[i])) + { + nameSet.Add(names[i], i); + } + } + } + + public BFastHeader Validate() + { + Preamble.Validate(); + foreach (var range in _ranges.Values) + { + if (range.Begin < Preamble.DataStart) + { + throw new Exception("range.Begin must be larger than Data Start"); + } + if (range.End > Preamble.DataEnd) + { + throw new Exception("range.End must be smaller than Data End"); + } + } + return this; + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Core/BFastPreamble.cs b/src/cs/bfast/Vim.BFast/Core/BFastPreamble.cs new file mode 100644 index 00000000..9aac30f8 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastPreamble.cs @@ -0,0 +1,68 @@ +using System; +using System.Runtime.InteropServices; + +namespace Vim.BFastLib.Core +{ + /// + /// The header contains a magic number, the begin and end indices of data, and the number of arrays. + /// + [StructLayout(LayoutKind.Explicit, Pack = 8, Size = 32)] + public struct BFastPreamble + { + [FieldOffset(0)] public long Magic; // Either Constants.SameEndian or Constants.SwappedEndian depending on endianess of writer compared to reader. + [FieldOffset(8)] public long DataStart; // <= file size and >= ArrayRangesEnd and >= FileHeader.ByteCount + [FieldOffset(16)] public long DataEnd; // >= DataStart and <= file size + [FieldOffset(24)] public long NumArrays; // number of arrays + + /// + /// The size of the FileHeader structure + /// + public static long Size = 32; + + /// + /// Returns true if the producer of the BFast file has the same endianness as the current library + /// + public bool SameEndian => Magic == BFastConstants.SameEndian; + + public override bool Equals(object x) + => x is BFastPreamble other && Equals(other); + + public bool Equals(BFastPreamble other) + => Magic == other.Magic && DataStart == other.DataStart && DataEnd == other.DataEnd && NumArrays == other.NumArrays; + + + /// + /// Checks that the header values are sensible, and throws an exception otherwise. + /// + public BFastPreamble Validate() + { + if (Magic != BFastConstants.SameEndian && Magic != BFastConstants.SwappedEndian) + throw new Exception($"Invalid magic number {Magic}"); + + if (DataStart < BFastPreamble.Size) + throw new Exception($"Data start {DataStart} cannot be before the file header size {BFastPreamble.Size}"); + + if (DataStart > DataEnd) + throw new Exception($"Data start {DataStart} cannot be after the data end {DataEnd}"); + + if (NumArrays < 0) + throw new Exception($"Number of arrays {NumArrays} should be at least one"); + + if (NumArrays > DataEnd) + throw new Exception($"Number of arrays {NumArrays} can't be more than the total size"); + + return this; + } + + public override int GetHashCode() + { + var hashCode = 275654494; + hashCode = hashCode * -1521134295 + Magic.GetHashCode(); + hashCode = hashCode * -1521134295 + DataStart.GetHashCode(); + hashCode = hashCode * -1521134295 + DataEnd.GetHashCode(); + hashCode = hashCode * -1521134295 + NumArrays.GetHashCode(); + hashCode = hashCode * -1521134295 + SameEndian.GetHashCode(); + return hashCode; + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Core/BFastRange.cs b/src/cs/bfast/Vim.BFast/Core/BFastRange.cs new file mode 100644 index 00000000..e16c6d35 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastRange.cs @@ -0,0 +1,44 @@ +using System.IO; +using System.Runtime.InteropServices; + +namespace Vim.BFastLib.Core +{ + /// + /// This tells us where a particular array begins and ends in relation to the beginning of a file. + /// * Begin must be less than or equal to End. + /// * Begin must be greater than or equal to DataStart + /// * End must be less than or equal to DataEnd + /// + [StructLayout(LayoutKind.Explicit, Pack = 8, Size = 16)] + public struct BFastRange + { + [FieldOffset(0)] public long Begin; + [FieldOffset(8)] public long End; + + public long Count => End - Begin; + public static long Size = 16; + + public override bool Equals(object x) + => x is BFastRange other && Equals(other); + + public bool Equals(BFastRange other) + => Begin == other.Begin && End == other.End; + + public BFastRange OffsetBy(long offset) + => new BFastRange() + { + Begin = Begin + offset, + End = End + offset + }; + } + + public static class BFastRangeExtensions + { + public static BFastRange FullRange(this Stream stream) + => new BFastRange() + { + Begin = 0, + End = stream.Length + }; + } +} diff --git a/src/cs/bfast/Vim.BFast/Core/BFastSection.cs b/src/cs/bfast/Vim.BFast/Core/BFastSection.cs new file mode 100644 index 00000000..fbf40b5c --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastSection.cs @@ -0,0 +1,103 @@ +using System; +using System.Diagnostics; +using System.IO; + +namespace Vim.BFastLib.Core +{ + /// + /// Represents a section of the bfast that will be written to at some point. + /// + public class BFastSection + { + public readonly long Origin; + public readonly long LocalStart; + public readonly long Length; + public long AbsoluteStart => LocalStart + Origin; + + public long AbsoluteEnd => AbsoluteStart + Length; + public long LocalEnd => LocalStart + Length; + public long End => AbsoluteStart + Length; + public BFastRange LocalRange => new BFastRange() + { + Begin = LocalStart, + End = LocalEnd + }; + + public BFastSection(long start, long length, long origin = 0) + { + LocalStart = start; + Length = length; + Origin = origin; + } + + /// + /// Returns a new range offset by given amount. + /// + public BFastSection Offset(long offset) + { + return new BFastSection(AbsoluteStart, Length, offset); + } + + /// + /// Returns an equivalent section but with given origin. + /// + public BFastSection Rebase(long origin) + { + return new BFastSection(LocalStart - origin, Length, origin); + } + + /// + /// Returns a new range Starting where this one ends. + /// + /// Byte length of the section + public BFastSection Next(long length) + { + return new BFastSection(LocalEnd, length, Origin); + } + + /// + /// Writes 0 bytes over the whole section. + /// + public void Clear(Stream stream) + { + stream.Seek(AbsoluteStart, SeekOrigin.Begin); + for (var i = 0; i < Length; i++) + { + stream.WriteByte(0); + } + } + + /// + /// Writes given bytes in the section. Throws if bytes don't match section length. + /// + public void Write(Stream stream, byte[] bytes) + { + if (bytes.Length != Length) + throw new Exception("Data length not matching section length"); + + stream.Seek(AbsoluteStart, SeekOrigin.Begin); + stream.Write(bytes); + } + + /// + /// Writes given value in the section. Throws if value don't match section length. + /// + unsafe public void Write(Stream stream, T value) where T : unmanaged + { + Debug.Assert(sizeof(T) == Length); + stream.Seek(AbsoluteStart, SeekOrigin.Begin); + stream.WriteValue(value); + } + + /// + /// Writes given buffer and returns resulting section. + /// + public static BFastSection Write(Stream stream, IWritable buffer) + { + var start = stream.Position; + buffer.Write(stream); + return new BFastSection(start, stream.Position - start); + } + } +} + diff --git a/src/cs/bfast/Vim.BFast/Core/BFastStrings.cs b/src/cs/bfast/Vim.BFast/Core/BFastStrings.cs new file mode 100644 index 00000000..f2bc1e67 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastStrings.cs @@ -0,0 +1,46 @@ +using System.Collections.Generic; +using System.Text; + +namespace Vim.BFastLib.Core +{ + public static class BFastStrings + { + /// + /// Converts a collection of strings, into a null-separated byte[] array + /// + public static byte[] Pack(IEnumerable strings) + { + var r = new List(); + foreach (var name in strings) + { + var bytes = Encoding.UTF8.GetBytes(name); + r.AddRange(bytes); + r.Add(0); + } + return r.ToArray(); + } + + + /// + /// Converts a byte[] array encoding a collection of strings separate by NULL into an array of string + /// + public static string[] Unpack(byte[] bytes) + { + var r = new List(); + if (bytes.Length == 0) + return r.ToArray(); + var prev = 0; + for (var i = 0; i < bytes.Length; ++i) + { + if (bytes[i] == 0) + { + r.Add(Encoding.UTF8.GetString(bytes, prev, i - prev)); + prev = i + 1; + } + } + if (prev < bytes.Length) + r.Add(Encoding.UTF8.GetString(bytes, prev, bytes.Length - prev)); + return r.ToArray(); + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Core/BFastWriter.cs b/src/cs/bfast/Vim.BFast/Core/BFastWriter.cs new file mode 100644 index 00000000..f6b8455c --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Core/BFastWriter.cs @@ -0,0 +1,100 @@ +using System.IO; + +namespace Vim.BFastLib.Core +{ + /// + /// Provide methods to write a buffer collection to a stream. + /// + public class BFastWriter + { + private readonly string[] _bufferNames; + private readonly IWritable[] _buffers; + private readonly byte[] _packedNames; + + private readonly BFastSection _preamble; + private readonly BFastSection _ranges; + private readonly BFastSection _names; + + public long Start => _preamble.AbsoluteStart; + + public BFastWriter(string[] names, IWritable[] buffers, long offset = 0) + { + if(names.Length != buffers.Length) + { + throw new System.Exception("Names and buffer length must match"); + } + + _bufferNames = names; + _buffers = buffers; + _packedNames = BFastStrings.Pack(names); + + _preamble = new BFastSection(0, 32).Offset(offset); + _ranges = _preamble.Next((buffers.Length + 1) * 16); + _names = _ranges.Next(_packedNames.Length); + } + + /// + /// Writes to given stream, which may or may not be at Position 0. + /// + public unsafe void Write(Stream stream) + { + var offset = stream.Position; + if (Start != stream.Position) + { + // Offset sections if stream not at 0 + Offset(stream.Position).Write(stream); + return; + } + + // Leave space for preamble + _preamble.Clear(stream); + + // Leave space for ranges and write Names range. + _ranges.Clear(stream); + WriteRange(stream, _ranges.AbsoluteStart, 0, _names.LocalRange); + + // Write Names + _names.Write(stream, _packedNames); + + // Write each buffer and go back to write its Range. + var dataPointer = _names.End; + for (var i = 0; i < _buffers.Length; i++) + { + var section = WriteBuffer(stream, dataPointer, _buffers[i]).Rebase(offset); + WriteRange(stream, _ranges.AbsoluteStart, i + 1, section.LocalRange); + dataPointer = section.End; + } + + // Finally go back to write the preamble. + var preamble = new BFastPreamble() + { + Magic = BFastConstants.Magic, + NumArrays = _buffers.Length + 1, + DataStart = _ranges.End - offset, + DataEnd = dataPointer - offset, + }; + _preamble.Write(stream, preamble); + + // Move pointer back to end as the caller would expect + stream.Seek(dataPointer, SeekOrigin.Begin); + } + + private BFastWriter Offset(long offset) + { + return new BFastWriter(_bufferNames, _buffers, offset); + } + + private void WriteRange(Stream stream, long start, int index, BFastRange range) + { + stream.Seek(start + index * 16, SeekOrigin.Begin); + stream.WriteValue(range); + } + + private BFastSection WriteBuffer(Stream stream, long start, IWritable buffer) + { + stream.Seek(start, SeekOrigin.Begin); + return BFastSection.Write(stream, buffer); + } + } +} + diff --git a/src/cs/bfast/Vim.BFast/SeekContext.cs b/src/cs/bfast/Vim.BFast/SeekContext.cs deleted file mode 100644 index eea7102e..00000000 --- a/src/cs/bfast/Vim.BFast/SeekContext.cs +++ /dev/null @@ -1,41 +0,0 @@ -using System; -using System.IO; - -namespace Vim.BFast -{ - /// - /// Manages a Stream's seek pointer within a given `using` scope. - /// When the stream context is disposed, the seek position is reset - /// to the original position when the object was created. - /// - public sealed class SeekContext : IDisposable - { - /// - /// The seekable stream. - /// - public readonly Stream Stream; - - /// - /// The original stream seek position when the object was created. - /// - public readonly long OriginalSeekPosition; - - /// - /// Constructor. - /// - public SeekContext(Stream stream) - { - if (!stream.CanSeek) - throw new ArgumentException("Stream must be seekable."); - - Stream = stream; - OriginalSeekPosition = stream.Position; - } - - /// - /// Disposer. - /// - public void Dispose() - => Stream.Seek(OriginalSeekPosition, SeekOrigin.Begin); - } -} diff --git a/src/cs/bfast/Vim.BFast/Unsafe/MemStreamHelpers.cs b/src/cs/bfast/Vim.BFast/Unsafe/MemStreamHelpers.cs new file mode 100644 index 00000000..40c43e76 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Unsafe/MemStreamHelpers.cs @@ -0,0 +1,38 @@ +using System.Collections.Generic; +using System.IO; + +namespace Vim.BFastLib.Core +{ + /// + /// This class would benefit from being in a generic utilities class, however, having it here allows BFAST to be a standalone without dependencies. + /// + public static class MemStreamHelpers + { + /// + /// Creates and fills a new Memory Stream from the given array. + /// The stream is returned at Position 0. + /// + public static unsafe MemoryStream ToMemoryStream(this T[] array) where T : unmanaged + { + var mem = new MemoryStream(); + mem.Write(array); + mem.Seek(0, SeekOrigin.Begin); + return mem; + } + + /// + /// Creates and fills a new Memory Stream from the given array. + /// The stream is returned at Position 0. + /// + public static unsafe MemoryStream ToMemoryStream(this IEnumerable enumerable) where T : unmanaged + { + var mem = new MemoryStream(); + foreach(var e in enumerable) + { + mem.WriteValue(e); + } + mem.Seek(0, SeekOrigin.Begin); + return mem; + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Unsafe/UnsafeCast.cs b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeCast.cs new file mode 100644 index 00000000..8ea2e41b --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeCast.cs @@ -0,0 +1,66 @@ +using System.Collections.Generic; +using System.IO; + +namespace Vim.BFastLib.Core +{ + public static class UnsafeCast + { + /// + /// Cast an array of type TInput to an array of type TOutput + /// This is not a Cast but an actual byte level conversion. + /// + public static unsafe TResult[] Cast(this TInput[] array) + where TInput : unmanaged + where TResult : unmanaged + { + var count = array.Length * (sizeof(TInput) / sizeof(TResult)); + using (var mem = array.ToMemoryStream()) + { + return mem.ReadArray(count); + } + } + + /// + /// Converts an enumerable of type TInput to an enumerable of type TOutput + /// This is not a Cast but an actual byte level conversion. + /// + public static IEnumerable Cast(this IEnumerable input, int chunksize = 1048576) + where TInput : unmanaged + where TResult : unmanaged + { + var stream = new MemoryStream(); + var array = new TResult[chunksize]; + var chunks = UnsafeHelpers.Chunkify(input, chunksize); + while (chunks.MoveNext()) + { + (var chunk, var size) = chunks.Current; + stream.Seek(0, SeekOrigin.Begin); + stream.Write(chunk, size); + var count = ReadArray(stream, array); + + if (count > 0) + { + for (var i = 0; i < count; i++) + { + yield return array[i]; + } + } + } + } + + // Function is extracted because unsafe code cannot appear in generator + private static unsafe int ReadArray(MemoryStream stream, T[] array) where T : unmanaged + { + var length = (int)stream.Position; + if (length < sizeof(T)) + { + return 0; + } + + var count = length / sizeof(T); + stream.Seek(0, SeekOrigin.Begin); + stream.ReadArray(array, count); + return count; + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Unsafe/UnsafeHelpers.cs b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeHelpers.cs new file mode 100644 index 00000000..dd3d3521 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeHelpers.cs @@ -0,0 +1,49 @@ +using System; +using System.Collections.Generic; +using System.IO; + +namespace Vim.BFastLib.Core +{ + public static class UnsafeHelpers + { + /// + /// Returns an enumeration of chunks of the given size from the given enumeration. + /// + public static IEnumerator<(T[], int)> Chunkify(IEnumerable source, int chunkSize = 1048576) + { + var chunk = new T[chunkSize]; + var index = 0; + + foreach (var item in source) + { + chunk[index++] = item; + + if (index == chunkSize) + { + yield return (chunk, index); + index = 0; + } + } + + if (index > 0) + { + yield return (chunk, index); + } + } + + /// + /// Copies given number of bytes from input stream to output stream. Optional buffer size. + /// + public static void CopySome(this Stream input, Stream output, int bytes, int bufferSize = 32768) + { + var buffer = new byte[bufferSize]; + int read; + while (bytes > 0 && + (read = input.Read(buffer, 0, Math.Min(buffer.Length, bytes))) > 0) + { + output.Write(buffer, 0, read); + bytes -= read; + } + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Unsafe/UnsafeReadArray.cs b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeReadArray.cs new file mode 100644 index 00000000..5d3cac1d --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeReadArray.cs @@ -0,0 +1,93 @@ +using System; +using System.IO; + +namespace Vim.BFastLib.Core +{ + public static class UnsafeReadArray + { + /// + /// Reads and converts the next value of the stream. + /// + public static unsafe T ReadValue(this Stream stream) where T : unmanaged + { + T r; + stream.ReadBytesBuffered((byte*)&r, sizeof(T)); + return r; + } + + /// + /// Reads bytes until the end of the stream and converts them to T. + /// + public static unsafe T[] ReadArray(this Stream stream) where T : unmanaged + { + return ReadArrayBytes(stream, stream.Length - stream.Position); + } + + /// + /// Reads and converts the next ByteCount bytes from the stream and returns the result as a new array. + /// Will throw if ByteCount is not a multiple of sizeof T. + /// + public static unsafe T[] ReadArrayBytes(this Stream stream, long byteCount) where T : unmanaged + { + var count = byteCount / sizeof(T); + if (byteCount % sizeof(T) != 0) + throw new Exception($"The number of bytes {byteCount} is not divisible by the size of the type {sizeof(T)}"); + if (count >= int.MaxValue) + throw new Exception($"{count} exceeds the maximum number of items that can be read into an array {int.MaxValue}"); + return ReadArray(stream, (int)count); + } + + /// + /// Reads and converts the next Count value from the stream and returns the result as a new array. + /// + public static unsafe T[] ReadArray(this Stream stream, int count) where T : unmanaged + { + var r = new T[count]; + fixed (T* pDest = r) + { + var pBytes = (byte*)pDest; + stream.ReadBytesBuffered(pBytes, (long)count * sizeof(T)); + } + return r; + } + + /// + /// Reads and converts the next Count values from the stream and writes the result into the given array. + /// + public static unsafe void ReadArray(this Stream stream, T[] array, int count) where T : unmanaged + { + if (array.Length < count) + throw new Exception("Destination array needs to be larger than count."); + + fixed (T* pDest = array) + { + var pBytes = (byte*)pDest; + stream.ReadBytesBuffered(pBytes, (long)count * sizeof(T)); + } + } + + /// + /// Helper for reading arrays of arbitrary unmanaged types from a Stream, that might be over 2GB of size. + /// That said, in C#, you can never load more int.MaxValue numbers of items. + /// NOTE: Arrays are still limited to 2gb in size unless gcAllowVeryLargeObjects is set to true + /// in the runtime environment. + /// https://docs.microsoft.com/en-us/dotnet/api/system.array?redirectedfrom=MSDN&view=netframework-4.7.2#remarks + /// Alternatively, we could convert to .Net Core + /// + private static unsafe void ReadBytesBuffered(this Stream stream, byte* dest, long count, int bufferSize = 4096) + { + var buffer = new byte[bufferSize]; + int bytesRead; + fixed (byte* pBuffer = buffer) + { + while ((bytesRead = stream.Read(buffer, 0, (int)Math.Min(buffer.Length, count))) > 0) + { + if (dest != null) + Buffer.MemoryCopy(pBuffer, dest, count, bytesRead); + count -= bytesRead; + dest += bytesRead; + } + } + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Unsafe/UnsafeReadEnumerable.cs b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeReadEnumerable.cs new file mode 100644 index 00000000..2a4a1652 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeReadEnumerable.cs @@ -0,0 +1,70 @@ +using System; +using System.Collections.Generic; +using System.IO; + +namespace Vim.BFastLib.Core +{ + public static class UnsafeReadEnumerable + { + /// + /// Reads the next byteLength bytes from the stream and return the result as an enumerable of T + /// Throws if byteLength is not a multiple of T size. + /// + public static IEnumerable ReadEnumerableByte(this Stream stream, long byteLength, int bufferSize = 4096) where T : unmanaged + { + var count = GetTCount(byteLength); + return ReadEnumerable(stream, count, bufferSize); + } + + /// + /// Reads the next count values of T from the stream as an enumerable. + /// + public static IEnumerable ReadEnumerable(this Stream stream, long count, int bufferSize = 4096) where T : unmanaged + { + var remaining = count; + var (array, buffer) = AllocBuffers(bufferSize); + + while (remaining > 0) + { + var toRead = (int)Math.Min(bufferSize, remaining); + var read = FillArray(stream, toRead, array, buffer); + + for (var i = 0; i < read; i++) + { + yield return array[i]; + } + remaining -= read; + } + } + + // Function is extracted because unsafe code cannot appear in generator + private static unsafe long GetTCount(long byteLength) where T : unmanaged + { + if (byteLength % sizeof(T) != 0) + { + throw new Exception("Byte length must be a multiple of T size."); + } + return byteLength / sizeof(T); + } + + // Function is extracted because unsafe code cannot appear in generator + private static unsafe (T[], byte[]) AllocBuffers(int count) where T : unmanaged + { + return (new T[count], new byte[count * sizeof(T)]); + } + + // Function is extracted because unsafe code cannot appear in generator + private static unsafe int FillArray(Stream stream, int count, T[] array, byte[] buffer) where T : unmanaged + { + fixed (T* pDestTyped = array) + fixed (byte* pBuffer = buffer) + { + var pDestBytes = (byte*)pDestTyped; + var toRead = Math.Min(buffer.Length, count * sizeof(T)); + var bytesRead = stream.Read(buffer, 0, toRead); + Buffer.MemoryCopy(pBuffer, pDestTyped, array.Length * sizeof(T), bytesRead); + return bytesRead / sizeof(T); + } + } + } +} diff --git a/src/cs/bfast/Vim.BFast/Unsafe/UnsafeWrite.cs b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeWrite.cs new file mode 100644 index 00000000..a3d75978 --- /dev/null +++ b/src/cs/bfast/Vim.BFast/Unsafe/UnsafeWrite.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections.Generic; +using System.IO; + +namespace Vim.BFastLib.Core +{ + public static class UnsafeWrite + { + /// + /// Converts given value to bytes and writes resulting bytes to the stream + /// + public static unsafe void WriteValue(this Stream stream, T x) where T : unmanaged + { + var p = &x; + stream.WriteBytesBuffered((byte*)p, sizeof(T)); + } + + /// + /// Converts values of the given array to bytes and writes the resulting bytes to the stream. + /// + public static unsafe void Write(this Stream stream, T[] xs) where T : unmanaged + { + Write(stream, xs, xs.LongLength); + } + + /// + /// Converts the first Count elements of an array to bytes and writes the resulting bytes to the stream. + /// + public static unsafe void Write(this Stream stream, T[] xs, long count) where T : unmanaged + { + fixed (T* p = xs) + { + stream.WriteBytesBuffered((byte*)p, count * sizeof(T)); + } + } + + /// + /// Converts and writes the elements of values to the given stream. + /// + public static unsafe void Write(this Stream stream, IEnumerable values) where T : unmanaged + { + var chunks = UnsafeHelpers.Chunkify(values); + while (chunks.MoveNext()) + { + var (chunk, size) = chunks.Current; + fixed (T* p = chunk) + { + stream.WriteBytesBuffered((byte*)p, size * sizeof(T)); + } + } + } + + /// + /// Writes an arbitrary large numbers of bytes to the stream. + /// + private static unsafe void WriteBytesBuffered(this Stream stream, byte* src, long count, int bufferSize = 4096) + { + var buffer = new byte[bufferSize]; + fixed (byte* pBuffer = buffer) + { + while (count > 0) + { + var toWrite = (int)Math.Min(count, buffer.Length); + Buffer.MemoryCopy(src, pBuffer, buffer.Length, toWrite); + stream.Write(buffer, 0, toWrite); + count -= toWrite; + src += toWrite; + } + } + } + } +} diff --git a/src/cs/bfast/Vim.BFast/UnsafeHelpers.cs b/src/cs/bfast/Vim.BFast/UnsafeHelpers.cs deleted file mode 100644 index 7d01a1de..00000000 --- a/src/cs/bfast/Vim.BFast/UnsafeHelpers.cs +++ /dev/null @@ -1,116 +0,0 @@ -using System; -using System.IO; - -namespace Vim.BFast -{ - /// - /// This class would benefit from being in a generic utilities class, however, having it here allows BFAST to be a standalone without dependencies. - /// - public static class UnsafeHelpers - { - /// - /// Helper for reading arbitrary unmanaged types from a Stream. - /// - public static unsafe void ReadBytesBuffered(this Stream stream, byte* dest, long count, int bufferSize = 4096) - { - var buffer = new byte[bufferSize]; - int bytesRead; - fixed (byte* pBuffer = buffer) - { - while ((bytesRead = stream.Read(buffer, 0, (int)Math.Min(buffer.Length, count))) > 0) - { - if (dest != null) - Buffer.MemoryCopy(pBuffer, dest, count, bytesRead); - count -= bytesRead; - dest += bytesRead; - } - } - } - - /// - /// Helper for writing arbitrary large numbers of bytes - /// - public static unsafe void WriteBytesBuffered(this Stream stream, byte* src, long count, int bufferSize = 4096) - { - var buffer = new byte[bufferSize]; - fixed (byte* pBuffer = buffer) - { - while (count > 0) - { - var toWrite = (int)Math.Min(count, buffer.Length); - Buffer.MemoryCopy(src, pBuffer, buffer.Length, toWrite); - stream.Write(buffer, 0, toWrite); - count -= toWrite; - src += toWrite; - } - } - } - - /// - /// Helper for reading arbitrary unmanaged types from a Stream. - /// - public static unsafe void Read(this Stream stream, T* dest) where T : unmanaged - => stream.ReadBytesBuffered((byte*)dest, sizeof(T)); - - /// - /// Helper for reading arrays of arbitrary unmanaged types from a Stream, that might be over 2GB of size. - /// That said, in C#, you can never load more int.MaxValue numbers of items. - /// NOTE: Arrays are still limited to 2gb in size unless gcAllowVeryLargeObjects is set to true - /// in the runtime environment. - /// https://docs.microsoft.com/en-us/dotnet/api/system.array?redirectedfrom=MSDN&view=netframework-4.7.2#remarks - /// Alternatively, we could convert to .Net Core - /// - public static unsafe T[] ReadArray(this Stream stream, int count) where T : unmanaged - { - var r = new T[count]; - fixed (T* pDest = r) - { - - var pBytes = (byte*)pDest; - stream.ReadBytesBuffered(pBytes, (long)count * sizeof(T)); - } - return r; - } - - /// - /// A wrapper for stream.Seek(numBytes, SeekOrigin.Current) to avoid allocating memory for unrecognized buffers. - /// - public static void SkipBytes(this Stream stream, long numBytes) - => stream.Seek(numBytes, SeekOrigin.Current); - - /// - /// Helper for reading arrays of arbitrary unmanaged types from a Stream, that might be over 2GB of size. - /// That said, in C#, you can never load more int.MaxValue numbers of items. - /// - public static unsafe T[] ReadArrayFromNumberOfBytes(this Stream stream, long numBytes) where T : unmanaged - { - var count = numBytes / sizeof(T); - if (numBytes % sizeof(T) != 0) - throw new Exception($"The number of bytes {numBytes} is not divisible by the size of the type {sizeof(T)}"); - if (count >= int.MaxValue) - throw new Exception($"{count} exceeds the maximum number of items that can be read into an array {int.MaxValue}"); - return stream.ReadArray((int)count); - } - - /// - /// Helper for writing arbitrary unmanaged types - /// - public static unsafe void WriteValue(this Stream stream, T x) where T : unmanaged - { - var p = &x; - stream.WriteBytesBuffered((byte*)p, sizeof(T)); - } - - - /// - /// Helper for writing arrays of unmanaged types - /// - public static unsafe void Write(this Stream stream, T[] xs) where T : unmanaged - { - fixed (T* p = xs) - { - stream.WriteBytesBuffered((byte*)p, xs.LongLength * sizeof(T)); - } - } - } -} diff --git a/src/cs/bfast/Vim.BFast/Vim.BFast.csproj b/src/cs/bfast/Vim.BFast/Vim.BFast.csproj index 3c6ef62c..16404976 100644 --- a/src/cs/bfast/Vim.BFast/Vim.BFast.csproj +++ b/src/cs/bfast/Vim.BFast/Vim.BFast.csproj @@ -2,36 +2,15 @@ netstandard2.0 - Vim.BFast - https://github.com/vimaec/bfast - https://github.com/vimaec/bfast - GitHub - true - license.txt - BFAST is a library for converting collections of named binary buffers to a single byte array for efficient cross-platform serialization and deserialization. - 1.5.0.0 - 1.5.0.0 - 1.5.0 true - true - true - true - snupkg - - - - - true - - - - + True + diff --git a/src/cs/bfast/Vim.BFast/readme.md b/src/cs/bfast/Vim.BFast/readme.md deleted file mode 100644 index 4525dbd1..00000000 --- a/src/cs/bfast/Vim.BFast/readme.md +++ /dev/null @@ -1,76 +0,0 @@ -# BFAST - -[](https://www.nuget.org/packages/Vim.Bfast) - -BFAST stands for the **B**inary **F**ormat for **A**rray **S**erialization and **T**ransmission. - -## Summary - -BFAST is an extremely efficent and simple alternative to ProtoBuf and FlatBuffers, whe data that follows the form -of a collection of name/value pairs where names are strings, and values are arrays of bytes. - -* Unlike JSON, XML, and YAML: BFAST is binary -* Unlike ProtoBuf and FlatBuffers: BFAST does not require a schema -* Unlike TAR: BFAST is simple and easy to implement -* Unlike ZIP: BFAST is not concerned with compression - -## Details - -BFAST is a data format for simple and efficient serialization and deserialization of -collections of named data buffers in a generic and cross-platform manner. -A BFAST data buffer is a named arrays of binary data (bytes) that is aligned on 64 byte boundaries. - -You would use tshe BFAST structure if you have a binary data to serialize that is mostly in the form of -long arrays. For example a set of files that you want to bundle together without wanting to bring in -the overhead of a compression library or re-implementing TAR. We use BFAST to encode mesh data and as -containers for other data. - -BFAST is intended to be a high-performance implementation that is fast enough to use as a purely -in-memory low-level data format, for representing arbitrary data such as meshes, point-clouds, image data, -collections of files, etc. and to scale to data that must be processed out of core. One of the design goals was to assure -that the format could be easily and efficiently decoded using JavaScript on most modern web-browsers -with very little code. - -BFAST is maintained by [VIMaec LLC](https://www.vimaec.com) and is licensed under the terms of -the MIT License. - -## Features - -* Very small implementation overhead -* Easy to implement efficient and conformant encoders and decoders in different languages -* Fast random access to any point in the data format with a minimum of disk accesses -* Format and endianess easily identified through a magic number at the front of the file -* Data arrays are 64 byte aligned to facilitate casting to SIMD data types (eg. AVX-512) -* Array offsets are encoded using 64-bit integers to supports large data sets -* Positions of data buffers are encoded in the beginning of the file -* Quick and easy to validate that a block is a valid BFAST encoding of data - -## Rationale - -Encoding containers of binary data is a deceptively simple problem that is easy to solve -in ways that have are not as efficient of generic as possible, or dependent on a particular platform. -We proposing a standardized solution to the problem in the form of a specification and sample -implementation that can allow software to easily encode low level binary data in a manner -that is both efficient and cross-platform. - -## Related Libraries - -The following is a partial list of commonly used binary data serialization formats: - -* [Protcol Buffers](https://developers.google.com/protocol-buffers/) -* [FlatBuffers](https://github.com/google/flatbuffers) -* [BINN](https://github.com/liteserver/binn/) -* [BSON](http://bsonspec.org/) -* [UBJSON](http://ubjson.org/) -* [MessagePack](https://msgpack.org/) -* [CBOR](https://cbor.io/) -* [TAR](https://www.gnu.org/software/tar/manual/html_node/Standard.html) - -For a more comprehensive list see: - -* https://en.wikipedia.org/wiki/Comparison_of_data-serialization_formats -* https://en.wikipedia.org/wiki/List_of_archive_formats - -# Specification - -See the file [spec.txt](spec.txt) for the official specification. diff --git a/src/cs/bfast/Vim.BFast/spec.txt b/src/cs/bfast/Vim.BFast/spec.txt deleted file mode 100644 index 1e432cf0..00000000 --- a/src/cs/bfast/Vim.BFast/spec.txt +++ /dev/null @@ -1,100 +0,0 @@ -BFAST Specification Proposal -June 23rd, 2020 - -Summary - -BFAST is a simple high-performance general purpose method of packing and unpacking named -arrays of binary data for serialization, deserialization, and transport in a cross-platform -and language agnostic manner. - -Introduction - -Of the myriad standardized formats for binary representation of structured data most -are very general and support nested data and schemas. - -One of the most common use case for structured binary data are collections of key/value -pairs where each key is a string and the value is an array of bytes. -An example of this are archive file formats like TAR and ZIP. Many 3D and image formats -data also follow this format. - -When data conforms to this kind of schema, then most binary formats (e.g CBOR, FlatBuffers, -ProtoBuf, Capnproto, etc.) introduce more overhead than is required, in terms of -memory, performance, and code. - -Often developers encountering this scenario will write an ad-hoc serialization/deserialization -protocol, which can lead to some problems: - - * Endianness may or may not be considered - * When memory mapping the structure, O(1) random access to buffers might not possible - * Programs written by other developers cannot easily deduce the layout of the format - * Alignment of the buffers might not be suitable for fast processing of the data arrays - * Testing and validation of the protocol might not be suitably robust - -The BFAST format is intended to provide a simple open standard that developers can use instead of -rolling their own. We have used in production code in a multitude of scenarios on a wide range of -devices and languages, and found it to be quite satsifactory in terms of efficiency and simplicity. - -Like TAR, BFAST is explicitly not a compression format, and can be easily compressed using -any compression algorithm. It is appropriate for use as an archive format, or as a container -format for other more complex file formats that provide additional semantic requirements -on the usage and naming of specific buffers. - -Features - - * The file format can be quickly detected reading the first 8 bytes - * Endianness of the writer can be detected from first 8 bytes, and compensated for by a reader - * Buffers names are stored in the first buffer, and can be quickly retrieved - * Each data-buffer is aligned on 64 bytes (for easy SIMD register alignment) - * Buffer begin/end positions are stored in the beginning of the file for fast seeking to data - * Buffer names can be arbitrarily long sequences of Utf-8 characters - * Buffers can be over 2GB in length - -Header Layout - - Preamble (bytes 0..32) - int64_t Magic; - int64_t DataStart; // The beginning position of the first data buffer, 64 byte aligned - int64_t DataEnd; // The end position of the last data buffer - int64_t Count; // Number of all buffers, including name buffer. There should always be at least 1 buffer. - - Ranges (bytes 32..32 + NumArray*16) - int64_t Begin; - int64_t End; - -Requirements - - * The first eight bytes are 0xBFA5 - * The file can be encoded as big-endian or little-endian - * If the endianness of the reader is different then the writer, the eight bytes will apprea as 0xA5BF << 48; - * Each DataBuffer starts on a 64 byte aligned buffer - * Buffer names are stored as null terminated Utf8 strings in the first buffer - * There are always exactly n-1 buffer names, where n is the number of buffers - * Buffer names can be empty (0 length strings) - * Multiple buffers can have the same name - * There is no padding between each range structure - * There is no padding between the header and the range - * The range struct is 16 bytes long - * The header struct is 32 bytes - * A data buffer could be empty, in which case, the begin and end is the same - * If a data buffer is empty, the the next data buffer will point to the data buffer beginning - * The DataStart can be computed by align(64, sizeof(Header) + sizeof(Range) * Header.NumArrays) - * Header.DataStart is equivalent to Range[0].Begin - * Header.DataEnd is equivalent to Range[N-1].End - -Related Information - - * [Zip]https://en.wikipedia.org/wiki/Zip_(file_format) - * [Protcol Buffers](https://developers.google.com/protocol-buffers/) - * [FlatBuffers](https://github.com/google/flatbuffers) - * [BINN](https://github.com/liteserver/binn/) - * [BSON](http://bsonspec.org/) - * [UBJSON](http://ubjson.org/) - * [MessagePack](https://msgpack.org/) - * [CBOR](https://cbor.io/) - * [TAR](https://www.gnu.org/software/tar/manual/html_node/Standard.html) - -For a more comprehensive list see: - - * [Comparison of Data Serialization Formats](https://en.wikipedia.org/wiki/Comparison_of_data-serialization_formats) - * [List of Archive Formats](https://en.wikipedia.org/wiki/List_of_archive_formats) - diff --git a/src/cs/g3d/Vim.G3d.AssimpWrapper/Vim.G3d.AssimpWrapper.csproj b/src/cs/g3d/Vim.G3d.AssimpWrapper/Vim.G3d.AssimpWrapper.csproj index afd6d77d..3b567b9f 100644 --- a/src/cs/g3d/Vim.G3d.AssimpWrapper/Vim.G3d.AssimpWrapper.csproj +++ b/src/cs/g3d/Vim.G3d.AssimpWrapper/Vim.G3d.AssimpWrapper.csproj @@ -12,5 +12,10 @@ - + + + True + + + diff --git a/src/cs/g3d/Vim.G3d.Tests/G3dTestUtils.cs b/src/cs/g3d/Vim.G3d.Tests/G3dTestUtils.cs deleted file mode 100644 index 0a28e014..00000000 --- a/src/cs/g3d/Vim.G3d.Tests/G3dTestUtils.cs +++ /dev/null @@ -1,137 +0,0 @@ -using Assimp; -using NUnit.Framework; -using System; -using System.Diagnostics; -using System.IO; -using System.Linq; -using Vim.G3d.AssimpWrapper; -using Vim.LinqArray; - -namespace Vim.G3d.Tests -{ - public static class G3dTestUtils - { - public static void OutputSceneStats(Scene scene) - => Console.WriteLine( -$@" #animations = {scene.AnimationCount} - #cameras = {scene.CameraCount} - #lights = {scene.LightCount} - #materials = {scene.MaterialCount} - #meshes = {scene.MeshCount} - #nodes = {scene.GetNodes().Count()} - #textures = {scene.TextureCount}"); - - // TODO: merge all of the meshes using the transform. - - public static void OutputMeshStats(Mesh mesh) - => Console.WriteLine( - $@" - mesh {mesh.Name} - #faces = {mesh.FaceCount} - #vertices = {mesh.VertexCount} - #normals = {mesh.Normals?.Count ?? 0} - #texture coordinate chanels = {mesh.TextureCoordinateChannelCount} - #vertex color chanels = {mesh.VertexColorChannelCount} - #bones = {mesh.BoneCount} - #tangents = {mesh.Tangents?.Count} - #bitangents = {mesh.BiTangents?.Count}"); - - public static T TimeLoadingFile(string fileName, Func func) - { - var sw = new Stopwatch(); - sw.Start(); - try - { - return func(fileName); - } - finally - { - Console.WriteLine($"Time to open {Path.GetFileName(fileName)} is {sw.ElapsedMilliseconds}msec"); - } - } - - public static void OutputStats(G3D g) - { - //Console.WriteLine("Header"); - - Console.WriteLine($"# corners per faces {g.NumCornersPerFace} "); - Console.WriteLine($"# vertices = {g.NumVertices}"); - Console.WriteLine($"# faces = {g.NumFaces}"); - Console.WriteLine($"# subgeos = {g.NumMeshes}"); - Console.WriteLine($"# indices (corners/edges0 = {g.NumCorners}"); - Console.WriteLine($"# instances = {g.NumInstances}"); - Console.WriteLine($"Number of attributes = {g.Attributes.Count}"); - - foreach (var attr in g.Attributes.ToEnumerable()) - Console.WriteLine($"{attr.Name} #items={attr.ElementCount}"); - } - - public static void AssertSame(G3D g1, G3D g2) - { - Assert.AreEqual(g1.NumCornersPerFace, g2.NumCornersPerFace); - Assert.AreEqual(g1.NumFaces, g2.NumFaces); - Assert.AreEqual(g1.NumCorners, g2.NumCorners); - Assert.AreEqual(g1.NumVertices, g2.NumVertices); - Assert.AreEqual(g1.NumInstances, g2.NumInstances); - Assert.AreEqual(g1.NumMeshes, g2.NumMeshes); - Assert.AreEqual(g1.Attributes.Count, g2.Attributes.Count); - for (var i = 0; i < g1.Attributes.Count; ++i) - { - var attr1 = g1.Attributes[i]; - var attr2 = g2.Attributes[i]; - Assert.AreEqual(attr1.Name, attr2.Name); - Assert.AreEqual(attr1.GetByteSize(), attr2.GetByteSize()); - Assert.AreEqual(attr1.ElementCount, attr2.ElementCount); - } - } - - public static void AssertSame(Mesh m, G3D g) - { - Assert.AreEqual(m.FaceCount, g.NumFaces); - Assert.AreEqual(m.GetIndices(), g.Indices.ToArray()); - Assert.AreEqual(m.VertexCount, g.NumVertices); - } - - public static G3D CompareTiming(string fileName, string outputFolder) - { - using (var context = new AssimpContext()) - { - var scene = TimeLoadingFile(fileName, context.ImportFile); - var m = scene.Meshes[0]; - var g3d = m.ToG3D(); - AssertSame(m, g3d); - var outputFile = Path.Combine(outputFolder, Path.GetFileName(fileName) + ".g3d"); - g3d.Write(outputFile); - var r = TimeLoadingFile(outputFile, G3D.Read); - //OutputG3DStats(g3d); - AssertSame(g3d, r); - return r; - } - } - - public static string[] TestFiles = - { - @"models-nonbsd\3DS\jeep1.3ds", - @"models-nonbsd\3DS\mar_rifle.3ds", - @"models-nonbsd\dxf\rifle.dxf", - @"models-nonbsd\FBX\2013_ASCII\duck.fbx", - @"models-nonbsd\FBX\2013_ASCII\jeep1.fbx", - // Binary fails assimp import - //@"models-nonbsd\FBX\2013_BINARY\duck.fbx", - //@"models-nonbsd\FBX\2013_BINARY\jeep1.fbx", - // OBJ files were not checked in to the repo. - //@"models-nonbsd\OBJ\rifle.obj", - //@"models-nonbsd\OBJ\segment.obj", - @"models-nonbsd\PLY\ant-half.ply", - @"models\IFC\AC14-FZK-Haus.ifc", - @"models\PLY\Wuson.ply", - @"models\STL\Wuson.stl", - @"models\STL\Spider_ascii.stl", - @"models\STL\Spider_binary.stl", - @"models\glTF\CesiumMilkTruck\CesiumMilkTruck.gltf", - @"models\glTF2\2CylinderEngine-glTF-Binary\2CylinderEngine.glb", - @"models\DXF\wuson.dxf", - @"models\Collada\duck.dae", - }; - } -} diff --git a/src/cs/g3d/Vim.G3d.Tests/G3dTests.cs b/src/cs/g3d/Vim.G3d.Tests/G3dTests.cs index 363a020d..fa9f38ed 100644 --- a/src/cs/g3d/Vim.G3d.Tests/G3dTests.cs +++ b/src/cs/g3d/Vim.G3d.Tests/G3dTests.cs @@ -4,13 +4,14 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using Vim.BFastLib; using Vim.G3d.AssimpWrapper; using Vim.LinqArray; using Vim.Math3d; namespace Vim.G3d.Tests { - [TestFixture, Ignore("Ignored until the new version is ready")] + [TestFixture] public static class G3dTests { public class FileLoadData @@ -37,6 +38,23 @@ public FileLoadData(string filePath) public static string RootFolder = Path.Combine(ProjectFolder, "..", "..", "..", ".."); public static string TestInputFolder = Path.Combine(RootFolder, "data", "g3d-test-data", "models"); public static string TestOutputFolder = Path.Combine(RootFolder, "data", "g3d-test-data", "output"); + + [SetUp] + public static void Setup() + { + if (!Directory.Exists(RootFolder)) + { + Directory.CreateDirectory(RootFolder); + } + if (!Directory.Exists(TestInputFolder)) + { + Directory.CreateDirectory(TestInputFolder); + } + if (!Directory.Exists(TestOutputFolder)) + { + Directory.CreateDirectory(TestOutputFolder); + } + } public static IEnumerable GetInputFiles() => Directory.GetFiles(TestInputFolder, "*.*", SearchOption.AllDirectories); @@ -66,16 +84,6 @@ public static void ValidateSameG3D(G3D g1, G3D g2) } } - [Test, Explicit("Use during debugging")] - public static void ReadG3DFiles() - { - foreach (var f in Directory.GetFiles(TestOutputFolder)) - { - var g3d = G3D.Read(f); - G3dTestUtils.OutputStats(g3d); - } - } - [Test] [Platform(Exclude = "Linux,Unix", Reason = "AssimpNet is failing to load its dependency on 'libdl.so'.")] public static void OpenAndConvertAssimpFiles() @@ -128,7 +136,7 @@ public static void OpenAndConvertAssimpFiles() f.G3DFile = new FileInfo(outputFilePath); f.MSecToSaveG3d = Util.GetMSecElapsed(() => - f.G3d.Write(outputFilePath)); + f.G3d.ToBFast().Write(outputFilePath)); } catch (Exception e) { @@ -215,8 +223,8 @@ public static void TriangleTest() .Add(materialIndices.ToIArray().ToFaceMaterialAttribute()) .ToG3D(); - var bytes = g3d.WriteToBytes(); - var g = G3D.Read(bytes); + var bfast = g3d.ToBFast(); + var g = G3D.Read(bfast); Assert.IsNotNull(g); @@ -233,84 +241,13 @@ public static void TriangleTest() } [Test] - public static void QuadAndCopyTest() + public static void UnexpectedAttributes_Are_Ignored() { - // Serialize a triangle g3d as bytes and read it back. - var vertices = new[] - { - new Vector3(0, 0, 0), - new Vector3(0, 1, 0), - new Vector3(0, 1, 1), - new Vector3(1, 1, 1) - }; - - var indices = new[] { 0, 1, 2, 3 }; - var materialIndices = new[] { 5 }; - - var g3d = new G3DBuilder() - .AddVertices(vertices.ToIArray()) - .AddIndices(indices.ToIArray()) - .Add(materialIndices.ToIArray().ToFaceMaterialAttribute()) - .ToG3D(); - - var bytes = g3d.WriteToBytes(); - var g = G3D.Read(bytes); - - Assert.IsNotNull(g); - - Assert.AreEqual(4, g.NumCornersPerFace); - Assert.AreEqual(4, g.NumVertices); - Assert.AreEqual(4, g.NumCorners); - Assert.AreEqual(1, g.NumFaces); - Assert.AreEqual(0, g.NumMeshes); - Assert.AreEqual(0, g.NumInstances); - - Assert.AreEqual(vertices, g.Vertices.ToArray()); - Assert.AreEqual(indices, g.Indices.ToArray()); - Assert.AreEqual(materialIndices, g.FaceMaterials.ToArray()); - - var g2 = g.TriangulateQuadMesh(); - - Assert.AreEqual(3, g2.NumCornersPerFace); - Assert.AreEqual(4, g2.NumVertices); - Assert.AreEqual(6, g2.NumCorners); - Assert.AreEqual(2, g2.NumFaces); - Assert.AreEqual(0, g2.NumMeshes); - Assert.AreEqual(0, g2.NumInstances); - - Assert.AreEqual(vertices, g2.GetAttributeDataPosition().ToArray()); - Assert.AreEqual(new[] { 0, 1, 2, 0, 2, 3 }, g2.GetAttributeDataIndex().ToArray()); - Assert.AreEqual(new[] { 5, 5 }, g2.GetAttributeDataFaceMaterial().ToArray()); - - g2 = g2.CopyFaces(1, 1); - - Assert.AreEqual(3, g2.NumCornersPerFace); - Assert.AreEqual(4, g2.NumVertices); - Assert.AreEqual(3, g2.NumCorners); - Assert.AreEqual(1, g2.NumFaces); - - Assert.AreEqual(vertices, g2.GetAttributeDataPosition().ToArray()); - Assert.AreEqual(new[] { 0, 2, 3 }, g2.GetAttributeDataIndex().ToArray()); - Assert.AreEqual(new[] { 5 }, g2.GetAttributeDataFaceMaterial().ToArray()); - } - - [Test] - public static void UnexpectedAttributeTest() - { - // This test validates that unrecognized g3d attributes are simply ignored in the deserialization process. - // - // "unexpected.g3d" was generated using the following code snippet. Note that the code was temporarily modified such - // that UNKNOWN mapped to a ulong data type value (8 bits): - // - // var g3d = new G3DBuilder() - // .Add(new GeometryAttribute(new int[] { 5 }.ToIArray(), AttributeDescriptor.Parse("g3d:instance:potato:0:int32:1"))) - // .Add(new GeometryAttribute(new ulong[] { 42 }.ToIArray(), AttributeDescriptor.Parse("g3d:instance:beep:0:UNKNOWN:1"))) - // .ToG3D(); - - var g = G3D.Read(Path.Combine(TestInputFolder, "unexpected.g3d")); + var bfast = new BFast(); + bfast.SetArray("g3d:instance:potato:0:int32:1", new int[] { 5 }); + var g = G3D.Read(bfast); var parsedInstanceAttrs = g.Attributes.Where(ga => ga.Descriptor.Association == Association.assoc_instance).ToArray(); - Assert.AreEqual(1, parsedInstanceAttrs.Length); // NOTE: we only expect one attribute (the one with the "potato" semantic) because UNKNOWN is currently ignored as a datatype. var parsedPotatoAttr = parsedInstanceAttrs.Single(ga => ga.Descriptor.Semantic == "potato"); Assert.AreEqual(new [] { 5 }, parsedPotatoAttr.AsType().Data.ToArray()); } @@ -332,15 +269,16 @@ public static void BigFileTest() var vertices = nVerts.Select(i => new Vector3(i, i, i)); var bldr = new G3DBuilder(); bldr.AddVertices(vertices); - var g3d = bldr.ToG3D(); - Assert.AreEqual(nVerts, g3d.NumVertices); - var tempFile = Path.Combine(Path.GetTempPath(), "bigfile.g3d"); - g3d.Write(tempFile); - var tmp = G3D.Read(tempFile); - ValidateSameG3D(g3d, tmp); + + var expectedG3d = bldr.ToG3D(); + Assert.AreEqual(nVerts, expectedG3d.NumVertices); + var bfast = expectedG3d.ToBFast(); + var resultG3d = G3D.Read(bfast); + + ValidateSameG3D(expectedG3d, resultG3d); } - [Test] + [Test, Explicit] [Platform(Exclude = "Linux,Unix", Reason = "AssimpNet is failing to load its dependency on 'libdl.so'.")] public static void TestWriters() { diff --git a/src/cs/g3d/Vim.G3d.Tests/Properties/Resources.Designer.cs b/src/cs/g3d/Vim.G3d.Tests/Properties/Resources.Designer.cs index 2f5f4dfd..dba826c3 100644 --- a/src/cs/g3d/Vim.G3d.Tests/Properties/Resources.Designer.cs +++ b/src/cs/g3d/Vim.G3d.Tests/Properties/Resources.Designer.cs @@ -19,7 +19,7 @@ namespace Vim.G3d.Tests.Properties { // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")] + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { @@ -61,7 +61,7 @@ internal Resources() { } /// - /// Looks up a localized string similar to C:\DEV\g3d\csharp\Vim.G3d.Tests\ + /// Looks up a localized string similar to C:\Users\Rober\Desktop\Vim\vim-format\src\cs\g3d\Vim.G3d.Tests\ ///. /// internal static string ProjDir { diff --git a/src/cs/g3d/Vim.G3d.Tests/Vim.G3d.Tests.csproj b/src/cs/g3d/Vim.G3d.Tests/Vim.G3d.Tests.csproj index 285c4437..ae2d33e9 100644 --- a/src/cs/g3d/Vim.G3d.Tests/Vim.G3d.Tests.csproj +++ b/src/cs/g3d/Vim.G3d.Tests/Vim.G3d.Tests.csproj @@ -14,6 +14,8 @@ + + @@ -34,4 +36,10 @@ + + + True + + + diff --git a/src/cs/g3d/Vim.G3d/G3D.cs b/src/cs/g3d/Vim.G3d/G3D.cs index 98aed23b..36e90ca7 100644 --- a/src/cs/g3d/Vim.G3d/G3D.cs +++ b/src/cs/g3d/Vim.G3d/G3D.cs @@ -7,11 +7,12 @@ Usage licensed under terms of MIT License using System; using System.Collections.Generic; -using System.Diagnostics; using System.IO; using System.Linq; using Vim.LinqArray; using Vim.Math3d; +using Vim.BFastLib; +using System.Diagnostics; namespace Vim.G3d { @@ -293,22 +294,52 @@ public Vector3 ComputeFaceNormal(int nFace) public static G3D Read(string filePath) { using (var stream = File.OpenRead(filePath)) - return stream.ReadG3d(); + { + var bfast = new BFast(stream); + return Read(bfast); + } } - public static G3D Read(Stream stream) - => stream.ReadG3d(); - public static G3D Read(byte[] bytes) + public static G3D Read(BFast bfast) { - using (var stream = new MemoryStream(bytes)) - return stream.ReadG3d(); + var header = G3dHeader.FromBytesOrDefault(bfast.GetArray("meta")); + var attributes = new List(); + foreach (var name in bfast.Entries) + { + if (name == "meta") continue; + var attribute = GetEmptyAttribute(name); + if (attribute == null) continue; + var a = attribute.Read(bfast); + attributes.Add(a); + } + + return new G3D(attributes, header); + } + private static GeometryAttribute GetEmptyAttribute(string name) + { + if (!AttributeDescriptor.TryParse(name, out var attributeDescriptor)) + { + Debug.WriteLine("G3D Error: Could not parse attribute " + name); + return null; + } + try + { + return attributeDescriptor.ToDefaultAttribute(0); + } + catch + { + Debug.WriteLine("G3D Error: Could not parse attribute " + name); + return null; + } } + public static G3D Create(params GeometryAttribute[] attributes) => new G3D(attributes); public static G3D Create(G3dHeader header, params GeometryAttribute[] attributes) => new G3D(attributes, header); + } } diff --git a/src/cs/g3d/Vim.G3d/G3dSerialization.cs b/src/cs/g3d/Vim.G3d/G3dSerialization.cs index ba7e59a7..4f47b27d 100644 --- a/src/cs/g3d/Vim.G3d/G3dSerialization.cs +++ b/src/cs/g3d/Vim.G3d/G3dSerialization.cs @@ -1,7 +1,8 @@ using System; using System.IO; -using System.Linq; -using Vim.BFast; +using Vim.BFastLib; +using Vim.LinqArray; +using System.Collections.Generic; namespace Vim.G3d { @@ -17,107 +18,18 @@ public static void WriteAttribute(Stream stream, GeometryAttribute attribute, st stream.Write(buffer); } - public static G3dWriter ToG3DWriter(this IGeometryAttributes self, G3dHeader? header = null) - => new G3dWriter(self, header); - - public static void Write(this IGeometryAttributes self, Stream stream, G3dHeader? header = null) - => self.ToG3DWriter(header).Write(stream); - - public static void Write(this IGeometryAttributes self, string filePath, G3dHeader? header = null) - { - using (var stream = File.OpenWrite(filePath)) - self.Write(stream, header); - } - - public static byte[] WriteToBytes(this IGeometryAttributes self) - { - using (var memoryStream = new MemoryStream()) - { - self.Write(memoryStream); - return memoryStream.ToArray(); - } - } - - public static bool TryReadHeader(Stream stream, long size, out G3dHeader outHeader) - { - var buffer = stream.ReadArray((int)size); - - if (buffer[0] == G3dHeader.MagicA && buffer[1] == G3dHeader.MagicB) - { - outHeader = G3dHeader.FromBytes(buffer); - return true; - } - else - { - outHeader = default; - return false; - } - } - - public static bool TryReadGeometryAttribute(Stream stream, string name, long size, out GeometryAttribute geometryAttribute) + public static BFast ToBFast(this IGeometryAttributes self, G3dHeader? header = null) { - geometryAttribute = null; - - bool ReadFailure() - { - // Update the seek head to consume the stream and return false. - stream.Seek((int)size, SeekOrigin.Current); - return false; - } - - if (!AttributeDescriptor.TryParse(name, out var attributeDescriptor)) - { - // Skip unknown attribute descriptors. - return ReadFailure(); - } - - // Populate a default attribute with the parsed attribute descriptor. - GeometryAttribute defaultAttribute; - try - { - defaultAttribute = attributeDescriptor.ToDefaultAttribute(0); - } - catch + var bfast = new BFast(); + bfast.SetArray("meta", (header ?? G3dHeader.Default).ToBytes()); + foreach(var attribute in self.Attributes.ToEnumerable()) { - // Eat the exception and return. - return ReadFailure(); + attribute.AddTo(bfast); } - - // Success; consume the stream. - geometryAttribute = defaultAttribute.Read(stream, size); - return true; + return bfast; } - public static G3D ReadG3d(this Stream stream, Func renameFunc = null) - { - var header = G3dHeader.Default; - - GeometryAttribute ReadG3dSegment(Stream s2, string name, long size) - { - name = renameFunc?.Invoke(name) ?? name; - - // Check for the G3dHeader - if (name == "meta" && size == 8) - { - if (TryReadHeader(s2, size, out var outHeader)) - { - // Assign to the header variable in the closure. - header = outHeader; - } - return null; - } - else - { - return TryReadGeometryAttribute(s2, name, size, out var geometryAttribute) - ? geometryAttribute - : null; - } - - } - - var results = stream.ReadBFast(ReadG3dSegment).Select(r => r.Item2); - return new G3D(results.Where(x => x != null), header); - } + } } diff --git a/src/cs/g3d/Vim.G3d/G3dWriter.cs b/src/cs/g3d/Vim.G3d/G3dWriter.cs deleted file mode 100644 index 0d3a9ca8..00000000 --- a/src/cs/g3d/Vim.G3d/G3dWriter.cs +++ /dev/null @@ -1,47 +0,0 @@ -using System.IO; -using System.Linq; -using Vim.BFast; -using Vim.LinqArray; - -namespace Vim.G3d -{ - public static partial class G3DExtension - { - /// - /// This is a helper class for writing G3Ds - /// - public class G3dWriter : IBFastComponent - { - public INamedBuffer Meta { get; } - public string[] Names { get; } - public long[] Sizes { get; } - BFastHeader Header { get; } - IGeometryAttributes Attributes { get; } - - public G3dWriter(IGeometryAttributes g, G3dHeader? header = null) - { - Attributes = g; - Meta = (header ?? G3dHeader.Default).ToBytes().ToNamedBuffer("meta"); - Names = new[] { Meta.Name }.Concat(g.Attributes.ToEnumerable().Select(attr => attr.Name)).ToArray(); - Sizes = new[] { Meta.NumBytes() }.Concat(g.Attributes.ToEnumerable().Select(attr => attr.GetByteSize())).ToArray(); - Header = BFast.BFast.CreateBFastHeader(Sizes, Names); - } - - public long GetSize() - => Header.Preamble.DataEnd; - - public void Write(Stream stream) - { - stream.WriteBFastHeader(Header); - stream.WriteBFastBody(Header, Names, Sizes, (_stream, index, name, size) => - { - if (index == 0) - _stream.Write(Meta); - else - WriteAttribute(_stream, Attributes.Attributes[index - 1], name, size); - return size; - }); - } - } - } -} diff --git a/src/cs/g3d/Vim.G3d/GeometryAttribute.cs b/src/cs/g3d/Vim.G3d/GeometryAttribute.cs index 221a576e..9b2030f9 100644 --- a/src/cs/g3d/Vim.G3d/GeometryAttribute.cs +++ b/src/cs/g3d/Vim.G3d/GeometryAttribute.cs @@ -2,9 +2,10 @@ using System.Collections.Generic; using System.IO; using System.Linq; -using Vim.BFast; using Vim.LinqArray; using Vim.Math3d; +using Vim.BFastLib; +using Vim.BFastLib.Core; namespace Vim.G3d { @@ -85,6 +86,13 @@ public GeometryAttribute AsType() where T : unmanaged /// public abstract GeometryAttribute Read(Stream stream, long byteCount); + /// + /// Loads the correct typed data from a BFastNext. + /// + public abstract GeometryAttribute Read(BFast bfast); + + public abstract void AddTo(BFast bfast); + /// /// Creates a new GeometryAttribute with the same data, but with a different index. Useful when constructing attributes /// @@ -220,6 +228,17 @@ public override GeometryAttribute Read(Stream stream, long byteCount) return new GeometryAttribute(data.ToIArray(), Descriptor); } + public override GeometryAttribute Read(BFast bfast) + { + var array = bfast.GetArray(Name); + return new GeometryAttribute(array.ToIArray(), Descriptor); + } + + public override void AddTo(BFast bfast) + { + bfast.SetArray(Name, Data.ToArray()); + } + public override GeometryAttribute SetIndex(int index) => index == Descriptor.Index ? this : new GeometryAttribute(Data, Descriptor.SetIndex(index)); } diff --git a/src/cs/g3d/Vim.G3d/Header.cs b/src/cs/g3d/Vim.G3d/Header.cs index c37140ce..60d4bb09 100644 --- a/src/cs/g3d/Vim.G3d/Header.cs +++ b/src/cs/g3d/Vim.G3d/Header.cs @@ -40,6 +40,18 @@ public static G3dHeader FromBytes(byte[] bytes) } .Validate(); + public static G3dHeader FromBytesOrDefault(byte[] bytes) + { + try + { + return FromBytes(bytes).Validate(); + } + catch (Exception) + { + return Default; + } + } + public static G3dHeader Default = new G3dHeader { diff --git a/src/cs/g3d/Vim.G3d/Vim.G3d.csproj b/src/cs/g3d/Vim.G3d/Vim.G3d.csproj index 7f4cdcb4..f3bb78c2 100644 --- a/src/cs/g3d/Vim.G3d/Vim.G3d.csproj +++ b/src/cs/g3d/Vim.G3d/Vim.G3d.csproj @@ -19,8 +19,9 @@ It can be easily and efficiently deserialized and rendered in different language - + True + @@ -48,9 +49,9 @@ It can be easily and efficiently deserialized and rendered in different language - - - + + + diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/CodeBuilder.cs b/src/cs/g3d/Vim.G3dNext.CodeGen/CodeBuilder.cs new file mode 100644 index 00000000..8dd3283a --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/CodeBuilder.cs @@ -0,0 +1,73 @@ +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace Vim.G3dNext.CodeGen +{ + public class CodeBuilder + { + private int _indentCount; + private StringBuilder _sb = new StringBuilder(); + + public CodeBuilder AppendRaw(string line) + { + _sb.Append(new string(' ', _indentCount * 4)); + _sb.AppendLine(line); + return this; + } + + public CodeBuilder AppendLines(IEnumerable lines) + { + foreach(var l in lines) { + AppendLine(l); + } + return this; + } + + public CodeBuilder AppendLine(string line = "") + { + var openBraces = line.Count(c => c == '{'); + var closeBraces = line.Count(c => c == '}'); + + // Sometimes we have {} on the same line + if (openBraces == closeBraces) + { + openBraces = 0; + closeBraces = 0; + } + + _indentCount -= closeBraces; + _sb.Append(new string(' ', _indentCount * 4)); + _sb.AppendLine(line); + _indentCount += openBraces; + return this; + } + + public void Indent() + { + ++_indentCount; + } + + public void Unindent() + { + _indentCount = System.Math.Max(0, --_indentCount); + } + + public void IndentOneLine(string line) + { + Indent(); + AppendLine(line); + Unindent(); + } + + public void UnindentOneLine(string line) + { + Unindent(); + AppendLine(line); + Indent(); + } + + public override string ToString() + => _sb.ToString(); + } +} diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/Definitions.cs b/src/cs/g3d/Vim.G3dNext.CodeGen/Definitions.cs new file mode 100644 index 00000000..9eb47307 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/Definitions.cs @@ -0,0 +1,62 @@ +using Vim.Math3d; + +namespace Vim.G3dNext.CodeGen +{ + internal class Definitions + { + public static G3dEntity[] GetEntities() + { + return new G3dEntity[] { vim, mesh, materials, scene }; + } + + public static G3dEntity vim = new G3dEntity("G3dVim") + .Index("Indices", "g3d:corner:index:0:int32:1", "Positions") + .Data("Positions", "g3d:vertex:position:0:float32:3") + .Data("InstanceTransforms", "g3d:instance:transform:0:float32:16") + .Index("InstanceParents", "g3d:instance:parent:0:int32:1", "InstanceTransforms") + .Data("InstanceFlags", "g3d:instance:flags:0:uint16:1") + .Index("InstanceMeshes", "g3d:instance:mesh:0:int32:1", "MeshSubmeshOffsets") + .Index("MeshSubmeshOffsets", "g3d:mesh:submeshoffset:0:int32:1", "SubmeshIndexOffsets") + .Index("SubmeshIndexOffsets", "g3d:submesh:indexoffset:0:int32:1", "Indices") + .Index("SubmeshMaterials", "g3d:submesh:material:0:int32:1", "MaterialColors") + .Data("MaterialColors", "g3d:material:color:0:float32:4") + .Data("MaterialGlossiness", "g3d:material:glossiness:0:float32:1") + .Data("MaterialSmoothness", "g3d:material:smoothness:0:float32:1") + .Data("ShapeVertices", "g3d:shapevertex:position:0:float32:3") + .Index("ShapeVertexOffsets", "g3d:shape:vertexoffset:0:int32:1", "ShapeVertices") + .Data("ShapeColors", "g3d:shape:color:0:float32:4") + .Data("ShapeWidths", "g3d:shape:width:0:float32:1"); + + public static G3dEntity scene = new G3dEntity("G3dScene") + .Data("ChunkCount", "g3d:chunk:count:0:int32:1") + .Data("InstanceMeshes", "g3d:instance:mesh:0:int32:1") + .Data("InstanceTransformData", "g3d:instance:transform:0:float32:16") + .Data("InstanceNodes", "g3d:instance:node:0:int32:1") + .Data("InstanceGroups", "g3d:instance:group:0:int32:1") + .Data("InstanceTags", "g3d:instance:tag:0:int64:1") + .Data("InstanceFlags", "g3d:instance:flags:0:uint16:1") + .Data("InstanceMins", "g3d:instance:min:0:float32:3") + .Data("InstanceMaxs", "g3d:instance:max:0:float32:3") + .Data("MeshChunks", "g3d:mesh:chunk:0:int32:1") + .Data("MeshChunkIndices", "g3d:mesh:chunkindex:0:int32:1") + .Data("MeshVertexCounts", "g3d:mesh:vertexcount:0:int32:1") + .Data("MeshIndexCounts", "g3d:mesh:indexcount:0:int32:1") + .Data("MeshOpaqueVertexCounts", "g3d:mesh:opaquevertexcount:0:int32:1") + .Data("MeshOpaqueIndexCounts", "g3d:mesh:opaqueindexcount:0:int32:1"); + + public static G3dEntity materials = new G3dEntity("G3dMaterials") + .Data("MaterialColors", "g3d:material:color:0:float32:4") + .Data("MaterialGlossiness", "g3d:material:glossiness:0:float32:1") + .Data("MaterialSmoothness", "g3d:material:smoothness:0:float32:1"); + + + public static G3dEntity mesh = new G3dEntity("G3dChunk") + .Data("MeshOpaqueSubmeshCounts", "g3d:mesh:opaquesubmeshcount:0:int32:1") + .Index("MeshSubmeshOffset", "g3d:mesh:submeshoffset:0:int32:1", "Indices") + .Index("SubmeshIndexOffsets", "g3d:submesh:indexoffset:0:int32:1", "Indices") + .Index("SubmeshVertexOffsets", "g3d:submesh:vertexoffset:0:int32:1", "Indices") + .Index("SubmeshMaterials", "g3d:submesh:material:0:int32:1") + .Data("Positions", "g3d:vertex:position:0:float32:3") + .Index("Indices", "g3d:corner:index:0:int32:1", "Positions"); + } +} diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/G3dBuffer.cs b/src/cs/g3d/Vim.G3dNext.CodeGen/G3dBuffer.cs new file mode 100644 index 00000000..97cf4ce1 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/G3dBuffer.cs @@ -0,0 +1,48 @@ +using System; +using System.Diagnostics; + +namespace Vim.G3dNext.CodeGen +{ + public enum BufferType + { + Singleton, + Data, + Index + } + + /// + /// Holds all necessary data to generate the code for a g3dBuffer. + /// + public class G3dBuffer + { + public readonly string MemberName; + public readonly string BufferName; + public readonly BufferType BufferType; + public readonly Type ValueType; + public readonly string IndexInto; + + public string ArgumentName => LowerFirst(MemberName); + + public G3dBuffer(string name, string bufferName, BufferType bufferType, Type valueType, string indexInto = null) + { + Debug.Assert(bufferName.ToLower() == bufferName, "G3dCodeGen: Expected buffer name to be lowercase."); + + MemberName = name; + BufferName = bufferName; + BufferType = bufferType; + ValueType = valueType; + IndexInto = indexInto; + } + + public static string LowerFirst(string input) + { + if (string.IsNullOrEmpty(input)) + { + return input; + } + + return char.ToLower(input[0]) + input.Substring(1); + } + } +} + diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/G3dCodeGen.cs b/src/cs/g3d/Vim.G3dNext.CodeGen/G3dCodeGen.cs new file mode 100644 index 00000000..5724b22f --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/G3dCodeGen.cs @@ -0,0 +1,134 @@ +using System; +using System.IO; +using System.Linq; + +namespace Vim.G3dNext.CodeGen +{ + public static class G3dCodeGen + { + public static void WriteDocument(string filePath) + { + try + { + var cb = new CodeBuilder(); + + cb.AppendLine("// AUTO-GENERATED FILE, DO NOT MODIFY."); + cb.AppendLine("// ReSharper disable All"); + cb.AppendLine("using Vim.BFastLib;"); + cb.AppendLine(); + cb.AppendLine("namespace Vim.G3dNext"); + cb.AppendLine("{"); + WriteEntities(cb); + cb.AppendLine("}"); + var content = cb.ToString(); + File.WriteAllText(filePath, content); + } + catch (Exception e) + { + Console.WriteLine(e); + throw; + } + } + + public static void WriteEntities(CodeBuilder cb) + { + foreach(var entity in Definitions.GetEntities()) + { + cb.AppendLine(EntityToCode(entity)); + } + } + + public static string EntityToCode(G3dEntity entity) + { + return $@"// Please provide an explicit implementation in another partial class file. + public partial class {entity.ClassName} : ISetup + {{ + {string.Join("\n \t\t", entity.Buffers.Select(b => + { + return $"public {b.ValueType}[] {b.MemberName};"; + })).TrimStart()} + + public {entity.ClassName}( + {string.Join(", \n \t\t\t", entity.Buffers.Select(b => + { + return $"{b.ValueType}[] {b.ArgumentName}"; + })).TrimStart()} + ) + {{ + {string.Join("\n \t\t\t", entity.Buffers.Select(b => + { + return $"{b.MemberName} = {b.ArgumentName};"; + })).TrimStart()} + + (this as ISetup).Setup(); + }} + + public {entity.ClassName}(BFast bfast) + {{ + {string.Join("\n \t\t\t", entity.Buffers.Select(b => + { + return $"{b.MemberName} = bfast.GetArray<{b.ValueType}>(\"{b.BufferName}\");"; + })).TrimStart()} + + (this as ISetup).Setup(); + }} + + public BFast ToBFast() + {{ + var bfast = new BFast(); + + {string.Join("\n \t\t\t", entity.Buffers.Select(b => + { + return $"bfast.SetArray<{b.ValueType}>(\"{b.BufferName}\", {b.MemberName});"; + })).TrimStart()} + + return bfast; + }} + + public bool Equals({entity.ClassName} other ) + {{ + return {string.Join(" && \n \t\t\t", entity.Buffers.Select(b => + { + return $"BufferMethods.SafeEquals({b.MemberName}, other.{b.MemberName})"; + }))}; + }} + + public {entity.ClassName} Merge({entity.ClassName} other) + {{ + return new {entity.ClassName}( + {string.Join(", \n \t\t\t\t", entity.Buffers.Select(b => { + + switch (b.BufferType) + { + case BufferType.Singleton: + return $"{b.MemberName}"; + case BufferType.Data: + return $"BufferMethods.MergeData({b.MemberName}, other.{b.MemberName})"; + case BufferType.Index: + return $"BufferMethods.MergeIndex({b.MemberName}, other.{b.MemberName}, {b.IndexInto}?.Length ?? 0)"; + default: + return ""; + } + }))} + ); + }} + + public void Validate() + {{ + // Ensure all the indices are either -1 or within the bounds of the attributes they are indexing into. + {string.Join("\n \t\t\t", entity.Buffers.Select(c => + { + if (c.BufferType == BufferType.Index) + { + return $"BufferMethods.ValidateIndex({c.MemberName}, {c.IndexInto}, \"{c.MemberName}\");"; + } + return null; + }).Where(s => s != null))} + }} + }} +"; + } + } +} + + diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/G3dEntity.cs b/src/cs/g3d/Vim.G3dNext.CodeGen/G3dEntity.cs new file mode 100644 index 00000000..2be5f9e7 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/G3dEntity.cs @@ -0,0 +1,34 @@ +using System.Collections.Generic; + +namespace Vim.G3dNext.CodeGen +{ + /// + /// Holds the data to generate the code for a g3d entity. + /// + public class G3dEntity + { + public readonly string ClassName; + public readonly List Buffers = new List(); + + public G3dEntity(string name) + { + ClassName = name; + } + + public G3dEntity Index(string name, string bufferName, string indexInto = null) + { + if (indexInto == null) + { + return Data(name, bufferName); + } + Buffers.Add(new G3dBuffer(name, bufferName, BufferType.Index, typeof(int), indexInto)); + return this; + } + + public G3dEntity Data(string name, string bufferName, string indexInto = null) + { + Buffers.Add(new G3dBuffer(name, bufferName, BufferType.Data, typeof(T), indexInto)); + return this; + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/Program.cs b/src/cs/g3d/Vim.G3dNext.CodeGen/Program.cs new file mode 100644 index 00000000..5470fb2f --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/Program.cs @@ -0,0 +1,11 @@ +namespace Vim.G3dNext.CodeGen +{ + public static class Program + { + public static void Main(string[] args) + { + var file = args[0]; + G3dCodeGen.WriteDocument(file); + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext.CodeGen/Vim.G3dNext.CodeGen.csproj b/src/cs/g3d/Vim.G3dNext.CodeGen/Vim.G3dNext.CodeGen.csproj new file mode 100644 index 00000000..41239fb1 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.CodeGen/Vim.G3dNext.CodeGen.csproj @@ -0,0 +1,26 @@ + + + netstandard2.0;net6.0 + OnOutputUpdated + + + + Exe + + Vim.G3dNext.CodeGen.Program + + + + + + + + + + + + True + + + + \ No newline at end of file diff --git a/src/cs/g3d/Vim.G3dNext.Tests/Vim.G3dNext.Tests.csproj b/src/cs/g3d/Vim.G3dNext.Tests/Vim.G3dNext.Tests.csproj new file mode 100644 index 00000000..158c815b --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext.Tests/Vim.G3dNext.Tests.csproj @@ -0,0 +1,37 @@ + + + netstandard2.0 + false + + + + + + + + + + + + + + + + + True + True + Resources.resx + + + + + + ResXFileCodeGenerator + Resources.Designer.cs + + + + + + + diff --git a/src/cs/g3d/Vim.G3dNext/BufferMethods.cs b/src/cs/g3d/Vim.G3dNext/BufferMethods.cs new file mode 100644 index 00000000..e4f74d70 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/BufferMethods.cs @@ -0,0 +1,63 @@ +using System; +using System.IO; +using System.Linq; + +namespace Vim.G3dNext +{ + public static class BufferMethods + { + public static bool SafeEquals(T[] a, T[] b) + { + if(a == null && b == null) return true; + if(a == null) return false; + if(b == null) return false; + if(a.Length != b.Length) return false; + for(var i=0; i(T[] a, T[] b) + { + if(a == null && b == null) return null; + if(a == null) { return b.ToArray(); } + if(b == null) { return a.ToArray(); } + var result = new T[a.Length + b.Length]; + Array.Copy(a, result, a.Length); + Array.Copy(b, 0, result, a.Length, b.Length); + return result; + } + + public static int[] MergeIndex(int[] a, int[] b, int offset) + { + if (a == null && b == null) return null; + if (a == null && b == null) return null; + if (a == null) { return b.ToArray(); } + if (b == null) { return a.ToArray(); } + var result = new int[a.Length + b.Length]; + Array.Copy(a, result, a.Length); + for(var i=0; i= 0 + ? offset + b[i] + : -1; + } + return result; + } + + public static void ValidateIndex(int[] array, T[] into, string name) + { + if (array == null) return; + var max = into?.Length -1 ?? int.MaxValue; + for(var i=0; i < array.Length; i++) + { + if (array[i] < -1 || array[i] > max) + { + throw new InvalidDataException($"Invalid value {array[i]} in {name} buffer."); + } + } + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext/Constants.cs b/src/cs/g3d/Vim.G3dNext/Constants.cs new file mode 100644 index 00000000..c8cc2282 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/Constants.cs @@ -0,0 +1,47 @@ +namespace Vim.G3dNext +{ + /// + /// Defines method for additionnal setup after constructors in generated G3d classes. + /// + public interface ISetup + { + void Setup(); + } + + public enum MeshSection + { + Opaque, + Transparent, + All + } + + + public static class Utils { + public static bool SafeEqual(this T[] a, T[] b) + { + if (a == null && b == null) return true; + if (a == null) return false; + if(b == null) return false; + if(a.Length != b.Length) return false; + for(var i= 0; i < a.Length; i++) + { + if (!a[i].Equals(b[i])) return false; + } + return true; + } + } + + public static class Constants + { + public const string G3dPrefix = "g3d"; + public const string Separator = ":"; + public const char SeparatorChar = ':'; + + public const string MetaHeaderSegmentName = "meta"; + public const long MetaHeaderSegmentNumBytes = 8; // The header is 7 bytes + 1 bytes padding. + public const byte MetaHeaderMagicA = 0x63; + public const byte MetaHeaderMagicB = 0xD0; + + public static readonly string[] MetaHeaderSupportedUnits = { "mm", "cm", "m\0", "km", "in", "ft", "yd", "mi" }; + } +} diff --git a/src/cs/g3d/Vim.G3dNext/G3dChunk.cs b/src/cs/g3d/Vim.G3dNext/G3dChunk.cs new file mode 100644 index 00000000..8ad82bd3 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/G3dChunk.cs @@ -0,0 +1,164 @@ +using System; +using Vim.Math3d; + +namespace Vim.G3dNext +{ + public partial class G3dChunk + { + void ISetup.Setup() + { + // empty + } + + public int GetSubmeshCount() => SubmeshIndexOffsets?.Length ?? 0; + + public int getMeshCount() => MeshSubmeshOffset?.Length ?? 0; + + /// + /// The total number of submeshes. + /// + public int GetSubmeshCount(int mesh, MeshSection section) => + GetMeshSubmeshEnd(mesh, section) - GetMeshSubmeshStart(mesh, section); + + public int GetMeshSubmeshStart(int mesh, MeshSection section) + { + if (section == MeshSection.Opaque || section == MeshSection.All) + { + return MeshSubmeshOffset[mesh]; + } + + return MeshSubmeshOffset[mesh] + MeshOpaqueSubmeshCounts[mesh]; + } + + public int GetMeshSubmeshEnd(int mesh, MeshSection section) + { + if (section == MeshSection.Opaque) + { + return MeshSubmeshOffset[mesh] + MeshOpaqueSubmeshCounts[mesh]; + } + if(mesh + 1 >= MeshSubmeshOffset.Length) + { + return SubmeshIndexOffsets.Length; + } + return MeshSubmeshOffset[mesh + 1]; + } + + public int GetMeshIndexStart(int mesh, MeshSection section) + { + var sub = GetMeshSubmeshStart(mesh, section); + return GetSubmeshIndexStart(sub); + } + + public int GetMeshIndexEnd(int mesh, MeshSection section) + { + var sub = GetMeshSubmeshEnd(mesh, section); + return GetSubmeshIndexEnd(sub); + } + + public int GetMeshIndexCount(int mesh, MeshSection section) + { + return GetMeshIndexEnd(mesh, section) - GetMeshIndexStart(mesh, section); + } + + public AABox GetAABox(int mesh, Matrix4x4 matrix) + { + var start = GetMeshVertexStart(mesh, MeshSection.All); + var end = GetMeshVertexEnd(mesh, MeshSection.All); + var min = Positions[start].Transform(matrix); + var max = min; + for (var v = start + 1; v < end; v++) + { + var pos = Positions[v].Transform(matrix); + min = min.Min(pos); + max = max.Max(pos); + } + return new AABox(min, max); + } + + /// + /// The total number of indices. + /// + public int GetIndexCount() => Indices?.Length ?? 0; + + public int GetMeshVertexStart(int mesh, MeshSection section) + { + var sub = GetMeshSubmeshStart(mesh, section); + return GetSubmeshVertexStart(sub); + } + + public int GetMeshVertexEnd(int mesh, MeshSection section) + { + var sub = GetMeshSubmeshEnd(mesh, section) - 1; + return GetSubmeshVertexEnd(sub); + } + + public int GetMeshVertexCount(int mesh, MeshSection section) + { + return GetMeshVertexEnd(mesh, section) - GetMeshVertexStart(mesh, section); + } + + /// + /// The total number of vertices. + /// + public int GetVertexCount() => (Positions?.Length ?? 0); + + public int GetSubmeshIndexStart(int submesh) + { + return SubmeshIndexOffsets[submesh]; + } + + public int GetSubmeshIndexEnd(int submesh) + { + return submesh + 1 < GetSubmeshCount() + ? SubmeshIndexOffsets[submesh + 1] + : GetIndexCount(); + } + + public int GetSubmeshIndexCount(int submesh) + { + return GetSubmeshIndexEnd(submesh) - GetSubmeshIndexStart(submesh); + } + + public int GetSubmeshVertexStart(int submesh) + { + return SubmeshVertexOffsets[submesh]; + } + + public int GetSubmeshVertexEnd(int submesh) + { + return submesh + 1 < GetSubmeshCount() ? SubmeshVertexOffsets[submesh + 1] : GetVertexCount(); + } + + public int GetSubmeshVertexCount(int submesh) + { + return GetSubmeshVertexEnd(submesh) - GetSubmeshVertexStart(submesh); + } + + public AABox GetAABB() + { + var box = new AABox(Positions[0], Positions[0]); + for (var p = 1; p < Positions.Length; p++) + { + var pos = Positions[p]; + box = Expand(box, pos); + } + return box; + } + + static AABox Expand(AABox box, Vector3 pos) + { + return new AABox( + new Vector3( + Math.Min(box.Min.X, pos.X), + Math.Min(box.Min.Y, pos.Y), + Math.Min(box.Min.Z, pos.Z) + ), + new Vector3( + Math.Max(box.Max.X, pos.X), + Math.Max(box.Max.Y, pos.Y), + Math.Max(box.Max.Z, pos.Z) + ) + ); + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext/G3dGenerated.g.cs b/src/cs/g3d/Vim.G3dNext/G3dGenerated.g.cs new file mode 100644 index 00000000..5d2a7b8d --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/G3dGenerated.g.cs @@ -0,0 +1,471 @@ +// AUTO-GENERATED FILE, DO NOT MODIFY. +// ReSharper disable All +using Vim.BFastLib; + +namespace Vim.G3dNext +{ + // Please provide an explicit implementation in another partial class file. + public partial class G3dVim : ISetup + { + public System.Int32[] Indices; + public Vim.Math3d.Vector3[] Positions; + public Vim.Math3d.Matrix4x4[] InstanceTransforms; + public System.Int32[] InstanceParents; + public System.UInt16[] InstanceFlags; + public System.Int32[] InstanceMeshes; + public System.Int32[] MeshSubmeshOffsets; + public System.Int32[] SubmeshIndexOffsets; + public System.Int32[] SubmeshMaterials; + public Vim.Math3d.Vector4[] MaterialColors; + public System.Single[] MaterialGlossiness; + public System.Single[] MaterialSmoothness; + public Vim.Math3d.Vector3[] ShapeVertices; + public System.Int32[] ShapeVertexOffsets; + public Vim.Math3d.Vector4[] ShapeColors; + public System.Single[] ShapeWidths; + + public G3dVim( + System.Int32[] indices, + Vim.Math3d.Vector3[] positions, + Vim.Math3d.Matrix4x4[] instanceTransforms, + System.Int32[] instanceParents, + System.UInt16[] instanceFlags, + System.Int32[] instanceMeshes, + System.Int32[] meshSubmeshOffsets, + System.Int32[] submeshIndexOffsets, + System.Int32[] submeshMaterials, + Vim.Math3d.Vector4[] materialColors, + System.Single[] materialGlossiness, + System.Single[] materialSmoothness, + Vim.Math3d.Vector3[] shapeVertices, + System.Int32[] shapeVertexOffsets, + Vim.Math3d.Vector4[] shapeColors, + System.Single[] shapeWidths + ) + { + Indices = indices; + Positions = positions; + InstanceTransforms = instanceTransforms; + InstanceParents = instanceParents; + InstanceFlags = instanceFlags; + InstanceMeshes = instanceMeshes; + MeshSubmeshOffsets = meshSubmeshOffsets; + SubmeshIndexOffsets = submeshIndexOffsets; + SubmeshMaterials = submeshMaterials; + MaterialColors = materialColors; + MaterialGlossiness = materialGlossiness; + MaterialSmoothness = materialSmoothness; + ShapeVertices = shapeVertices; + ShapeVertexOffsets = shapeVertexOffsets; + ShapeColors = shapeColors; + ShapeWidths = shapeWidths; + + (this as ISetup).Setup(); + } + + public G3dVim(BFast bfast) + { + Indices = bfast.GetArray("g3d:corner:index:0:int32:1"); + Positions = bfast.GetArray("g3d:vertex:position:0:float32:3"); + InstanceTransforms = bfast.GetArray("g3d:instance:transform:0:float32:16"); + InstanceParents = bfast.GetArray("g3d:instance:parent:0:int32:1"); + InstanceFlags = bfast.GetArray("g3d:instance:flags:0:uint16:1"); + InstanceMeshes = bfast.GetArray("g3d:instance:mesh:0:int32:1"); + MeshSubmeshOffsets = bfast.GetArray("g3d:mesh:submeshoffset:0:int32:1"); + SubmeshIndexOffsets = bfast.GetArray("g3d:submesh:indexoffset:0:int32:1"); + SubmeshMaterials = bfast.GetArray("g3d:submesh:material:0:int32:1"); + MaterialColors = bfast.GetArray("g3d:material:color:0:float32:4"); + MaterialGlossiness = bfast.GetArray("g3d:material:glossiness:0:float32:1"); + MaterialSmoothness = bfast.GetArray("g3d:material:smoothness:0:float32:1"); + ShapeVertices = bfast.GetArray("g3d:shapevertex:position:0:float32:3"); + ShapeVertexOffsets = bfast.GetArray("g3d:shape:vertexoffset:0:int32:1"); + ShapeColors = bfast.GetArray("g3d:shape:color:0:float32:4"); + ShapeWidths = bfast.GetArray("g3d:shape:width:0:float32:1"); + + (this as ISetup).Setup(); + } + + public BFast ToBFast() + { + var bfast = new BFast(); + + bfast.SetArray("g3d:corner:index:0:int32:1", Indices); + bfast.SetArray("g3d:vertex:position:0:float32:3", Positions); + bfast.SetArray("g3d:instance:transform:0:float32:16", InstanceTransforms); + bfast.SetArray("g3d:instance:parent:0:int32:1", InstanceParents); + bfast.SetArray("g3d:instance:flags:0:uint16:1", InstanceFlags); + bfast.SetArray("g3d:instance:mesh:0:int32:1", InstanceMeshes); + bfast.SetArray("g3d:mesh:submeshoffset:0:int32:1", MeshSubmeshOffsets); + bfast.SetArray("g3d:submesh:indexoffset:0:int32:1", SubmeshIndexOffsets); + bfast.SetArray("g3d:submesh:material:0:int32:1", SubmeshMaterials); + bfast.SetArray("g3d:material:color:0:float32:4", MaterialColors); + bfast.SetArray("g3d:material:glossiness:0:float32:1", MaterialGlossiness); + bfast.SetArray("g3d:material:smoothness:0:float32:1", MaterialSmoothness); + bfast.SetArray("g3d:shapevertex:position:0:float32:3", ShapeVertices); + bfast.SetArray("g3d:shape:vertexoffset:0:int32:1", ShapeVertexOffsets); + bfast.SetArray("g3d:shape:color:0:float32:4", ShapeColors); + bfast.SetArray("g3d:shape:width:0:float32:1", ShapeWidths); + + return bfast; + } + + public bool Equals(G3dVim other ) + { + return BufferMethods.SafeEquals(Indices, other.Indices) && + BufferMethods.SafeEquals(Positions, other.Positions) && + BufferMethods.SafeEquals(InstanceTransforms, other.InstanceTransforms) && + BufferMethods.SafeEquals(InstanceParents, other.InstanceParents) && + BufferMethods.SafeEquals(InstanceFlags, other.InstanceFlags) && + BufferMethods.SafeEquals(InstanceMeshes, other.InstanceMeshes) && + BufferMethods.SafeEquals(MeshSubmeshOffsets, other.MeshSubmeshOffsets) && + BufferMethods.SafeEquals(SubmeshIndexOffsets, other.SubmeshIndexOffsets) && + BufferMethods.SafeEquals(SubmeshMaterials, other.SubmeshMaterials) && + BufferMethods.SafeEquals(MaterialColors, other.MaterialColors) && + BufferMethods.SafeEquals(MaterialGlossiness, other.MaterialGlossiness) && + BufferMethods.SafeEquals(MaterialSmoothness, other.MaterialSmoothness) && + BufferMethods.SafeEquals(ShapeVertices, other.ShapeVertices) && + BufferMethods.SafeEquals(ShapeVertexOffsets, other.ShapeVertexOffsets) && + BufferMethods.SafeEquals(ShapeColors, other.ShapeColors) && + BufferMethods.SafeEquals(ShapeWidths, other.ShapeWidths); + } + + public G3dVim Merge(G3dVim other) + { + return new G3dVim( + BufferMethods.MergeIndex(Indices, other.Indices, Positions?.Length ?? 0), + BufferMethods.MergeData(Positions, other.Positions), + BufferMethods.MergeData(InstanceTransforms, other.InstanceTransforms), + BufferMethods.MergeIndex(InstanceParents, other.InstanceParents, InstanceTransforms?.Length ?? 0), + BufferMethods.MergeData(InstanceFlags, other.InstanceFlags), + BufferMethods.MergeIndex(InstanceMeshes, other.InstanceMeshes, MeshSubmeshOffsets?.Length ?? 0), + BufferMethods.MergeIndex(MeshSubmeshOffsets, other.MeshSubmeshOffsets, SubmeshIndexOffsets?.Length ?? 0), + BufferMethods.MergeIndex(SubmeshIndexOffsets, other.SubmeshIndexOffsets, Indices?.Length ?? 0), + BufferMethods.MergeIndex(SubmeshMaterials, other.SubmeshMaterials, MaterialColors?.Length ?? 0), + BufferMethods.MergeData(MaterialColors, other.MaterialColors), + BufferMethods.MergeData(MaterialGlossiness, other.MaterialGlossiness), + BufferMethods.MergeData(MaterialSmoothness, other.MaterialSmoothness), + BufferMethods.MergeData(ShapeVertices, other.ShapeVertices), + BufferMethods.MergeIndex(ShapeVertexOffsets, other.ShapeVertexOffsets, ShapeVertices?.Length ?? 0), + BufferMethods.MergeData(ShapeColors, other.ShapeColors), + BufferMethods.MergeData(ShapeWidths, other.ShapeWidths) + ); + } + + public void Validate() + { + // Ensure all the indices are either -1 or within the bounds of the attributes they are indexing into. + BufferMethods.ValidateIndex(Indices, Positions, "Indices"); + BufferMethods.ValidateIndex(InstanceParents, InstanceTransforms, "InstanceParents"); + BufferMethods.ValidateIndex(InstanceMeshes, MeshSubmeshOffsets, "InstanceMeshes"); + BufferMethods.ValidateIndex(MeshSubmeshOffsets, SubmeshIndexOffsets, "MeshSubmeshOffsets"); + BufferMethods.ValidateIndex(SubmeshIndexOffsets, Indices, "SubmeshIndexOffsets"); + BufferMethods.ValidateIndex(SubmeshMaterials, MaterialColors, "SubmeshMaterials"); + BufferMethods.ValidateIndex(ShapeVertexOffsets, ShapeVertices, "ShapeVertexOffsets"); + } + } + + // Please provide an explicit implementation in another partial class file. + public partial class G3dChunk : ISetup + { + public System.Int32[] MeshOpaqueSubmeshCounts; + public System.Int32[] MeshSubmeshOffset; + public System.Int32[] SubmeshIndexOffsets; + public System.Int32[] SubmeshVertexOffsets; + public System.Int32[] SubmeshMaterials; + public Vim.Math3d.Vector3[] Positions; + public System.Int32[] Indices; + + public G3dChunk( + System.Int32[] meshOpaqueSubmeshCounts, + System.Int32[] meshSubmeshOffset, + System.Int32[] submeshIndexOffsets, + System.Int32[] submeshVertexOffsets, + System.Int32[] submeshMaterials, + Vim.Math3d.Vector3[] positions, + System.Int32[] indices + ) + { + MeshOpaqueSubmeshCounts = meshOpaqueSubmeshCounts; + MeshSubmeshOffset = meshSubmeshOffset; + SubmeshIndexOffsets = submeshIndexOffsets; + SubmeshVertexOffsets = submeshVertexOffsets; + SubmeshMaterials = submeshMaterials; + Positions = positions; + Indices = indices; + + (this as ISetup).Setup(); + } + + public G3dChunk(BFast bfast) + { + MeshOpaqueSubmeshCounts = bfast.GetArray("g3d:mesh:opaquesubmeshcount:0:int32:1"); + MeshSubmeshOffset = bfast.GetArray("g3d:mesh:submeshoffset:0:int32:1"); + SubmeshIndexOffsets = bfast.GetArray("g3d:submesh:indexoffset:0:int32:1"); + SubmeshVertexOffsets = bfast.GetArray("g3d:submesh:vertexoffset:0:int32:1"); + SubmeshMaterials = bfast.GetArray("g3d:submesh:material:0:int32:1"); + Positions = bfast.GetArray("g3d:vertex:position:0:float32:3"); + Indices = bfast.GetArray("g3d:corner:index:0:int32:1"); + + (this as ISetup).Setup(); + } + + public BFast ToBFast() + { + var bfast = new BFast(); + + bfast.SetArray("g3d:mesh:opaquesubmeshcount:0:int32:1", MeshOpaqueSubmeshCounts); + bfast.SetArray("g3d:mesh:submeshoffset:0:int32:1", MeshSubmeshOffset); + bfast.SetArray("g3d:submesh:indexoffset:0:int32:1", SubmeshIndexOffsets); + bfast.SetArray("g3d:submesh:vertexoffset:0:int32:1", SubmeshVertexOffsets); + bfast.SetArray("g3d:submesh:material:0:int32:1", SubmeshMaterials); + bfast.SetArray("g3d:vertex:position:0:float32:3", Positions); + bfast.SetArray("g3d:corner:index:0:int32:1", Indices); + + return bfast; + } + + public bool Equals(G3dChunk other ) + { + return BufferMethods.SafeEquals(MeshOpaqueSubmeshCounts, other.MeshOpaqueSubmeshCounts) && + BufferMethods.SafeEquals(MeshSubmeshOffset, other.MeshSubmeshOffset) && + BufferMethods.SafeEquals(SubmeshIndexOffsets, other.SubmeshIndexOffsets) && + BufferMethods.SafeEquals(SubmeshVertexOffsets, other.SubmeshVertexOffsets) && + BufferMethods.SafeEquals(SubmeshMaterials, other.SubmeshMaterials) && + BufferMethods.SafeEquals(Positions, other.Positions) && + BufferMethods.SafeEquals(Indices, other.Indices); + } + + public G3dChunk Merge(G3dChunk other) + { + return new G3dChunk( + BufferMethods.MergeData(MeshOpaqueSubmeshCounts, other.MeshOpaqueSubmeshCounts), + BufferMethods.MergeIndex(MeshSubmeshOffset, other.MeshSubmeshOffset, Indices?.Length ?? 0), + BufferMethods.MergeIndex(SubmeshIndexOffsets, other.SubmeshIndexOffsets, Indices?.Length ?? 0), + BufferMethods.MergeIndex(SubmeshVertexOffsets, other.SubmeshVertexOffsets, Indices?.Length ?? 0), + BufferMethods.MergeData(SubmeshMaterials, other.SubmeshMaterials), + BufferMethods.MergeData(Positions, other.Positions), + BufferMethods.MergeIndex(Indices, other.Indices, Positions?.Length ?? 0) + ); + } + + public void Validate() + { + // Ensure all the indices are either -1 or within the bounds of the attributes they are indexing into. + BufferMethods.ValidateIndex(MeshSubmeshOffset, Indices, "MeshSubmeshOffset"); + BufferMethods.ValidateIndex(SubmeshIndexOffsets, Indices, "SubmeshIndexOffsets"); + BufferMethods.ValidateIndex(SubmeshVertexOffsets, Indices, "SubmeshVertexOffsets"); + BufferMethods.ValidateIndex(Indices, Positions, "Indices"); + } + } + + // Please provide an explicit implementation in another partial class file. + public partial class G3dMaterials : ISetup + { + public Vim.Math3d.Vector4[] MaterialColors; + public System.Single[] MaterialGlossiness; + public System.Single[] MaterialSmoothness; + + public G3dMaterials( + Vim.Math3d.Vector4[] materialColors, + System.Single[] materialGlossiness, + System.Single[] materialSmoothness + ) + { + MaterialColors = materialColors; + MaterialGlossiness = materialGlossiness; + MaterialSmoothness = materialSmoothness; + + (this as ISetup).Setup(); + } + + public G3dMaterials(BFast bfast) + { + MaterialColors = bfast.GetArray("g3d:material:color:0:float32:4"); + MaterialGlossiness = bfast.GetArray("g3d:material:glossiness:0:float32:1"); + MaterialSmoothness = bfast.GetArray("g3d:material:smoothness:0:float32:1"); + + (this as ISetup).Setup(); + } + + public BFast ToBFast() + { + var bfast = new BFast(); + + bfast.SetArray("g3d:material:color:0:float32:4", MaterialColors); + bfast.SetArray("g3d:material:glossiness:0:float32:1", MaterialGlossiness); + bfast.SetArray("g3d:material:smoothness:0:float32:1", MaterialSmoothness); + + return bfast; + } + + public bool Equals(G3dMaterials other ) + { + return BufferMethods.SafeEquals(MaterialColors, other.MaterialColors) && + BufferMethods.SafeEquals(MaterialGlossiness, other.MaterialGlossiness) && + BufferMethods.SafeEquals(MaterialSmoothness, other.MaterialSmoothness); + } + + public G3dMaterials Merge(G3dMaterials other) + { + return new G3dMaterials( + BufferMethods.MergeData(MaterialColors, other.MaterialColors), + BufferMethods.MergeData(MaterialGlossiness, other.MaterialGlossiness), + BufferMethods.MergeData(MaterialSmoothness, other.MaterialSmoothness) + ); + } + + public void Validate() + { + // Ensure all the indices are either -1 or within the bounds of the attributes they are indexing into. + + } + } + + // Please provide an explicit implementation in another partial class file. + public partial class G3dScene : ISetup + { + public System.Int32[] ChunkCount; + public System.Int32[] InstanceMeshes; + public Vim.Math3d.Matrix4x4[] InstanceTransformData; + public System.Int32[] InstanceNodes; + public System.Int32[] InstanceGroups; + public System.Int64[] InstanceTags; + public System.UInt16[] InstanceFlags; + public Vim.Math3d.Vector3[] InstanceMins; + public Vim.Math3d.Vector3[] InstanceMaxs; + public System.Int32[] MeshChunks; + public System.Int32[] MeshChunkIndices; + public System.Int32[] MeshVertexCounts; + public System.Int32[] MeshIndexCounts; + public System.Int32[] MeshOpaqueVertexCounts; + public System.Int32[] MeshOpaqueIndexCounts; + + public G3dScene( + System.Int32[] chunkCount, + System.Int32[] instanceMeshes, + Vim.Math3d.Matrix4x4[] instanceTransformData, + System.Int32[] instanceNodes, + System.Int32[] instanceGroups, + System.Int64[] instanceTags, + System.UInt16[] instanceFlags, + Vim.Math3d.Vector3[] instanceMins, + Vim.Math3d.Vector3[] instanceMaxs, + System.Int32[] meshChunks, + System.Int32[] meshChunkIndices, + System.Int32[] meshVertexCounts, + System.Int32[] meshIndexCounts, + System.Int32[] meshOpaqueVertexCounts, + System.Int32[] meshOpaqueIndexCounts + ) + { + ChunkCount = chunkCount; + InstanceMeshes = instanceMeshes; + InstanceTransformData = instanceTransformData; + InstanceNodes = instanceNodes; + InstanceGroups = instanceGroups; + InstanceTags = instanceTags; + InstanceFlags = instanceFlags; + InstanceMins = instanceMins; + InstanceMaxs = instanceMaxs; + MeshChunks = meshChunks; + MeshChunkIndices = meshChunkIndices; + MeshVertexCounts = meshVertexCounts; + MeshIndexCounts = meshIndexCounts; + MeshOpaqueVertexCounts = meshOpaqueVertexCounts; + MeshOpaqueIndexCounts = meshOpaqueIndexCounts; + + (this as ISetup).Setup(); + } + + public G3dScene(BFast bfast) + { + ChunkCount = bfast.GetArray("g3d:chunk:count:0:int32:1"); + InstanceMeshes = bfast.GetArray("g3d:instance:mesh:0:int32:1"); + InstanceTransformData = bfast.GetArray("g3d:instance:transform:0:float32:16"); + InstanceNodes = bfast.GetArray("g3d:instance:node:0:int32:1"); + InstanceGroups = bfast.GetArray("g3d:instance:group:0:int32:1"); + InstanceTags = bfast.GetArray("g3d:instance:tag:0:int64:1"); + InstanceFlags = bfast.GetArray("g3d:instance:flags:0:uint16:1"); + InstanceMins = bfast.GetArray("g3d:instance:min:0:float32:3"); + InstanceMaxs = bfast.GetArray("g3d:instance:max:0:float32:3"); + MeshChunks = bfast.GetArray("g3d:mesh:chunk:0:int32:1"); + MeshChunkIndices = bfast.GetArray("g3d:mesh:chunkindex:0:int32:1"); + MeshVertexCounts = bfast.GetArray("g3d:mesh:vertexcount:0:int32:1"); + MeshIndexCounts = bfast.GetArray("g3d:mesh:indexcount:0:int32:1"); + MeshOpaqueVertexCounts = bfast.GetArray("g3d:mesh:opaquevertexcount:0:int32:1"); + MeshOpaqueIndexCounts = bfast.GetArray("g3d:mesh:opaqueindexcount:0:int32:1"); + + (this as ISetup).Setup(); + } + + public BFast ToBFast() + { + var bfast = new BFast(); + + bfast.SetArray("g3d:chunk:count:0:int32:1", ChunkCount); + bfast.SetArray("g3d:instance:mesh:0:int32:1", InstanceMeshes); + bfast.SetArray("g3d:instance:transform:0:float32:16", InstanceTransformData); + bfast.SetArray("g3d:instance:node:0:int32:1", InstanceNodes); + bfast.SetArray("g3d:instance:group:0:int32:1", InstanceGroups); + bfast.SetArray("g3d:instance:tag:0:int64:1", InstanceTags); + bfast.SetArray("g3d:instance:flags:0:uint16:1", InstanceFlags); + bfast.SetArray("g3d:instance:min:0:float32:3", InstanceMins); + bfast.SetArray("g3d:instance:max:0:float32:3", InstanceMaxs); + bfast.SetArray("g3d:mesh:chunk:0:int32:1", MeshChunks); + bfast.SetArray("g3d:mesh:chunkindex:0:int32:1", MeshChunkIndices); + bfast.SetArray("g3d:mesh:vertexcount:0:int32:1", MeshVertexCounts); + bfast.SetArray("g3d:mesh:indexcount:0:int32:1", MeshIndexCounts); + bfast.SetArray("g3d:mesh:opaquevertexcount:0:int32:1", MeshOpaqueVertexCounts); + bfast.SetArray("g3d:mesh:opaqueindexcount:0:int32:1", MeshOpaqueIndexCounts); + + return bfast; + } + + public bool Equals(G3dScene other ) + { + return BufferMethods.SafeEquals(ChunkCount, other.ChunkCount) && + BufferMethods.SafeEquals(InstanceMeshes, other.InstanceMeshes) && + BufferMethods.SafeEquals(InstanceTransformData, other.InstanceTransformData) && + BufferMethods.SafeEquals(InstanceNodes, other.InstanceNodes) && + BufferMethods.SafeEquals(InstanceGroups, other.InstanceGroups) && + BufferMethods.SafeEquals(InstanceTags, other.InstanceTags) && + BufferMethods.SafeEquals(InstanceFlags, other.InstanceFlags) && + BufferMethods.SafeEquals(InstanceMins, other.InstanceMins) && + BufferMethods.SafeEquals(InstanceMaxs, other.InstanceMaxs) && + BufferMethods.SafeEquals(MeshChunks, other.MeshChunks) && + BufferMethods.SafeEquals(MeshChunkIndices, other.MeshChunkIndices) && + BufferMethods.SafeEquals(MeshVertexCounts, other.MeshVertexCounts) && + BufferMethods.SafeEquals(MeshIndexCounts, other.MeshIndexCounts) && + BufferMethods.SafeEquals(MeshOpaqueVertexCounts, other.MeshOpaqueVertexCounts) && + BufferMethods.SafeEquals(MeshOpaqueIndexCounts, other.MeshOpaqueIndexCounts); + } + + public G3dScene Merge(G3dScene other) + { + return new G3dScene( + BufferMethods.MergeData(ChunkCount, other.ChunkCount), + BufferMethods.MergeData(InstanceMeshes, other.InstanceMeshes), + BufferMethods.MergeData(InstanceTransformData, other.InstanceTransformData), + BufferMethods.MergeData(InstanceNodes, other.InstanceNodes), + BufferMethods.MergeData(InstanceGroups, other.InstanceGroups), + BufferMethods.MergeData(InstanceTags, other.InstanceTags), + BufferMethods.MergeData(InstanceFlags, other.InstanceFlags), + BufferMethods.MergeData(InstanceMins, other.InstanceMins), + BufferMethods.MergeData(InstanceMaxs, other.InstanceMaxs), + BufferMethods.MergeData(MeshChunks, other.MeshChunks), + BufferMethods.MergeData(MeshChunkIndices, other.MeshChunkIndices), + BufferMethods.MergeData(MeshVertexCounts, other.MeshVertexCounts), + BufferMethods.MergeData(MeshIndexCounts, other.MeshIndexCounts), + BufferMethods.MergeData(MeshOpaqueVertexCounts, other.MeshOpaqueVertexCounts), + BufferMethods.MergeData(MeshOpaqueIndexCounts, other.MeshOpaqueIndexCounts) + ); + } + + public void Validate() + { + // Ensure all the indices are either -1 or within the bounds of the attributes they are indexing into. + + } + } + +} diff --git a/src/cs/g3d/Vim.G3dNext/G3dMaterials.cs b/src/cs/g3d/Vim.G3dNext/G3dMaterials.cs new file mode 100644 index 00000000..18c3138a --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/G3dMaterials.cs @@ -0,0 +1,18 @@ + +namespace Vim.G3dNext +{ + public partial class G3dMaterials + { + void ISetup.Setup() + { + // empty + } + + public G3dMaterials(G3dVim vim) + { + MaterialColors = vim.MaterialColors; + MaterialGlossiness = vim.MaterialGlossiness; + MaterialSmoothness = vim.MaterialSmoothness; + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext/G3dScene.cs b/src/cs/g3d/Vim.G3dNext/G3dScene.cs new file mode 100644 index 00000000..4d6adb03 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/G3dScene.cs @@ -0,0 +1,12 @@ +namespace Vim.G3dNext +{ + public partial class G3dScene + { + public int GetChunksCount() => ChunkCount[0]; + public int GetInstanceCount() => InstanceMeshes.Length; + void ISetup.Setup() + { + // empty + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext/G3dVim.cs b/src/cs/g3d/Vim.G3dNext/G3dVim.cs new file mode 100644 index 00000000..3ddb9885 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/G3dVim.cs @@ -0,0 +1,185 @@ +using System; +using System.Collections.Generic; +using Vim.BFastLib; + +namespace Vim.G3dNext +{ + public partial class G3dVim + { + // Computed field + public int[] MeshVertexOffsets; + private List[] _meshInstances; + + public IReadOnlyList GetMeshInstances(int mesh) + { + return _meshInstances[mesh]; + } + + public int GetApproxSize(int mesh) + { + return GetMeshVertexCount(mesh) * 12 + GetMeshIndexCount(mesh) * 4; + } + + void ISetup.Setup() + { + MeshVertexOffsets = ComputeMeshVertexOffsets(); + _meshInstances = ComputeMeshInstances(); + } + + public static G3dVim FromVim(string vimPath) + => BFastHelpers.Read(vimPath, b => new G3dVim(b.GetBFast("geometry"))); + + private int[] ComputeMeshVertexOffsets() + { + var result = new int[GetMeshCount()]; + for (var m = 0; m < result.Length; m++) + { + var min = int.MaxValue; + var start = GetMeshIndexStart(m); + var end = GetMeshIndexEnd(m); + for (var i = start; i < end; i++) + { + min = Math.Min(min, Indices[i]); + } + result[m] = min; + } + return result; + } + + private List[] ComputeMeshInstances() + { + var result = new List[GetMeshCount()]; + for (var i = 0; i < result.Length; i++) + { + result[i] = new List(); + } + + for (var i = 0; i < InstanceMeshes.Length; i++) + { + var mesh = InstanceMeshes[i]; + if (mesh >= 0) + { + result[mesh].Add(i); + } + } + + return result; + } + + public int GetTriangleCount() + { + return GetIndexCount() / 3; + } + + /// + /// The total number of instances. + /// + public int GetInstanceCount() => InstanceTransforms?.Length ?? 0; + + #region meshes + /// + /// The total number of meshes. + /// + public int GetMeshCount() => MeshSubmeshOffsets?.Length ?? 0; + + public int GetMeshIndexStart(int mesh) + { + var submesh = GetMeshSubmeshStart(mesh); + return GetSubmeshIndexStart(submesh); + } + + public int GetMeshIndexEnd(int mesh) + { + var submesh = GetMeshSubmeshEnd(mesh) - 1; + return GetSubmeshIndexEnd(submesh); + } + + public int GetMeshIndexCount(int mesh) + { + return GetMeshIndexEnd(mesh) - GetMeshIndexStart(mesh); + } + + public int GetMeshVertexStart(int mesh) + { + return MeshVertexOffsets[mesh]; + } + + public int GetMeshVertexEnd(int mesh) + { + return mesh + 1 < GetMeshCount() ? MeshVertexOffsets[mesh + 1] : Positions.Length; + } + + public int GetMeshVertexCount(int mesh) + { + return GetMeshVertexEnd(mesh) - GetMeshVertexStart(mesh); + } + + public int GetMeshSubmeshStart(int mesh) + { + return MeshSubmeshOffsets[mesh]; + } + + public int GetMeshSubmeshEnd(int mesh) + { + return mesh + 1 < GetMeshCount() + ? MeshSubmeshOffsets[mesh + 1] + : GetSubmeshCount(); + } + + public int GetMeshSubmeshCount(int mesh) + { + return GetMeshSubmeshEnd(mesh) - GetMeshSubmeshStart(mesh); + } + + #endregion + + #region submesh + + /// + /// The total number of submeshes. + /// + public int GetSubmeshCount() => SubmeshIndexOffsets?.Length ?? 0; + + public int GetSubmeshIndexStart(int submesh) + { + return SubmeshIndexOffsets[submesh]; + } + + public int GetSubmeshIndexEnd(int submesh) + { + return submesh + 1 < GetSubmeshCount() ? SubmeshIndexOffsets[submesh + 1] : GetIndexCount(); + } + + public int GetSubmeshIndexCount(int submesh) + { + return GetSubmeshIndexEnd(submesh) - GetSubmeshIndexStart(submesh); + } + + #endregion + + /// + /// The total number of indices. + /// + public int GetIndexCount() => Indices?.Length ?? 0; + + /// + /// The total number of vertices. + /// + public int GetVertexCount() => Positions?.Length ?? 0; + + /// + /// The total number of materials. + /// + public int GetMaterialCount() => MaterialColors?.Length ?? 0; + + /// + /// The total number of shapes. + /// + public int GetShapeCount() => ShapeVertexOffsets?.Length ?? 0; + + /// + /// The total number of shape vertices. + /// + public int GetShapeVertexCount() => ShapeVertices?.Length ?? 0; + } +} diff --git a/src/cs/g3d/Vim.G3dNext/MetaHeader.cs b/src/cs/g3d/Vim.G3dNext/MetaHeader.cs new file mode 100644 index 00000000..a58a3a6f --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/MetaHeader.cs @@ -0,0 +1,63 @@ +using System; +using System.Text; + +namespace Vim.G3dNext +{ + // http://docs.autodesk.com/FBX/2014/ENU/FBX-SDK-Documentation/index.html?url=files/GUID-CC93340E-C4A1-49EE-B048-E898F856CFBF.htm,topicNumber=d30e8478 + // https://twitter.com/FreyaHolmer/status/644881436982575104 + // https://github.com/KhronosGroup/glTF/tree/master/specification/2.0#coordinate-system-and-units + + public struct MetaHeader + { + public byte MagicA; // 0x63 + public byte MagicB; // 0xD0 + public byte UnitA; // with unitB could be: 'ft', 'yd', 'mi', 'km', 'mm', 'in', 'cm', 'm', + public byte UnitB; + public byte UpAxis; // e.g. 1=y or 2=z (could be 0=x, if you hate people) + public byte ForwardVector; // e.g. 0=x, 1=y, 2=z, 3=-x, 4=-y, 5=-z + public byte Handedness; // 0=left-handed, 1=right-handed + public byte Padding; // 0 + + public string Unit => Encoding.ASCII.GetString(new byte[] { UnitA, UnitB }); + + public byte[] ToBytes() + => new[] { MagicA, MagicB, UnitA, UnitB, UpAxis, ForwardVector, Handedness, Padding }; + + public static MetaHeader FromBytes(byte[] bytes) + => new MetaHeader + { + MagicA = bytes[0], + MagicB = bytes[1], + UnitA = bytes[2], + UnitB = bytes[3], + UpAxis = bytes[4], + ForwardVector = bytes[5], + Handedness = bytes[6], + } + .Validate(); + + public static MetaHeader Default + = new MetaHeader + { + MagicA = Constants.MetaHeaderMagicA, + MagicB = Constants.MetaHeaderMagicB, + UnitA = (byte)'m', + UnitB = 0, + UpAxis = 2, + ForwardVector = 0, + Handedness = 0, + Padding = 0 + }; + + public MetaHeader Validate() + { + if (MagicA != Constants.MetaHeaderMagicA) throw new Exception($"First magic number must be 0x{Constants.MetaHeaderMagicA:X2} not 0x{MagicA:X2}"); + if (MagicB != Constants.MetaHeaderMagicB) throw new Exception($"Second magic number must be 0x{Constants.MetaHeaderMagicB:X2} not 0x{MagicB:X2}"); + if (Array.IndexOf(Constants.MetaHeaderSupportedUnits, Unit) < 0) throw new Exception($"Unit {Unit} is not a supported unit: {string.Join(", ", Constants.MetaHeaderSupportedUnits)}"); + if (UpAxis < 0 || UpAxis > 2) throw new Exception("Up axis must be 0(x), 1(y), or 2(z)"); + if (ForwardVector < 0 || ForwardVector > 5) throw new Exception("Front vector must be 0 (x), 1(y), 2(z), 3(-x), 4(-y), or 5(-z)"); + if (Handedness < 0 || Handedness > 1) throw new Exception("Handedness must be 0 (left) or 1 (right"); + return this; + } + } +} diff --git a/src/cs/g3d/Vim.G3dNext/Vim.G3dNext.csproj b/src/cs/g3d/Vim.G3dNext/Vim.G3dNext.csproj new file mode 100644 index 00000000..b0086596 --- /dev/null +++ b/src/cs/g3d/Vim.G3dNext/Vim.G3dNext.csproj @@ -0,0 +1,22 @@ + + + netstandard2.0 + + + + + + + + + + + + + + True + + + + + diff --git a/src/cs/linqarray/Vim.LinqArray.Tests/Vim.LinqArray.Tests.csproj b/src/cs/linqarray/Vim.LinqArray.Tests/Vim.LinqArray.Tests.csproj index fb957c18..38321c15 100644 --- a/src/cs/linqarray/Vim.LinqArray.Tests/Vim.LinqArray.Tests.csproj +++ b/src/cs/linqarray/Vim.LinqArray.Tests/Vim.LinqArray.Tests.csproj @@ -12,7 +12,13 @@ - + + + + + True + + diff --git a/src/cs/linqarray/Vim.LinqArray/LinqArray.cs b/src/cs/linqarray/Vim.LinqArray/LinqArray.cs index 92cc3b45..cf646593 100644 --- a/src/cs/linqarray/Vim.LinqArray/LinqArray.cs +++ b/src/cs/linqarray/Vim.LinqArray/LinqArray.cs @@ -651,7 +651,7 @@ public static int IndexOf(this IArray self, T item) where T : IEquatable /// Returns the index of the first element matching the given item. /// - public static int IndexOf(this IArray self, Func predicate) + public static int IndexOf(this IArray self, Func predicate) { for (var i = 0; i < self.Count; ++i) { @@ -976,7 +976,7 @@ public static IArray PrefixSums(this IArray self) public static IArray PrefixSums(this IArray self) => self.Scan(0.0, (a, b) => a + b); - public static IArray Scan(this IArray self, U init, Func scanFunc) + public static IArray Scan(this IArray self, U init, Func scanFunc) { if (self.Count == 0) return Empty(); @@ -1012,5 +1012,12 @@ public static IArray SetFirstElementWhere(this IArray self, Func(this IArray self, int index, T defaultValue) + { + if(index < 0) return defaultValue; + if(index >= self.Count) return defaultValue; + return self[index]; + } } } diff --git a/src/cs/linqarray/Vim.LinqArray/Vim.LinqArray.csproj b/src/cs/linqarray/Vim.LinqArray/Vim.LinqArray.csproj index 110fabe9..558465f5 100644 --- a/src/cs/linqarray/Vim.LinqArray/Vim.LinqArray.csproj +++ b/src/cs/linqarray/Vim.LinqArray/Vim.LinqArray.csproj @@ -10,7 +10,7 @@ https://github.com/vimaec/linqarray GitHub true - license.txt + LICENSE.txt 1.1.3 true true @@ -27,7 +27,7 @@ - + True diff --git a/src/cs/math3d/Vim.Math3D.Tests/Vim.Math3D.Tests.csproj b/src/cs/math3d/Vim.Math3D.Tests/Vim.Math3D.Tests.csproj index a6f30d42..e4e11fda 100644 --- a/src/cs/math3d/Vim.Math3D.Tests/Vim.Math3D.Tests.csproj +++ b/src/cs/math3d/Vim.Math3D.Tests/Vim.Math3D.Tests.csproj @@ -12,7 +12,13 @@ - + + + + + True + + diff --git a/src/cs/math3d/Vim.Math3D/Vim.Math3D.csproj b/src/cs/math3d/Vim.Math3D/Vim.Math3D.csproj index b2138f57..ac941449 100644 --- a/src/cs/math3d/Vim.Math3D/Vim.Math3D.csproj +++ b/src/cs/math3d/Vim.Math3D/Vim.Math3D.csproj @@ -10,7 +10,7 @@ https://github.com/vimaec/math3d GitHub true - license.txt + LICENSE.txt 1.6.2 true true @@ -60,9 +60,9 @@ Structs.tt - + - + True diff --git a/src/cs/util/Vim.Util.Logging.Serilog/Vim.Util.Logging.Serilog.csproj b/src/cs/util/Vim.Util.Logging.Serilog/Vim.Util.Logging.Serilog.csproj index 31723bd0..ab531508 100644 --- a/src/cs/util/Vim.Util.Logging.Serilog/Vim.Util.Logging.Serilog.csproj +++ b/src/cs/util/Vim.Util.Logging.Serilog/Vim.Util.Logging.Serilog.csproj @@ -12,7 +12,13 @@ - + + + + True + + + diff --git a/src/cs/util/Vim.Util.Tests/Properties/Resources.Designer.cs b/src/cs/util/Vim.Util.Tests/Properties/Resources.Designer.cs index 2d371c8f..d31f3de6 100644 --- a/src/cs/util/Vim.Util.Tests/Properties/Resources.Designer.cs +++ b/src/cs/util/Vim.Util.Tests/Properties/Resources.Designer.cs @@ -61,7 +61,7 @@ internal Resources() { } /// - /// Looks up a localized string similar to C:\DEV\vimaec\src\Vim\vim-format\src\cs\util\Vim.Util.Tests\ + /// Looks up a localized string similar to C:\Users\Rober\Desktop\Vim\vim-format\src\cs\util\Vim.Util.Tests\ ///. /// internal static string ProjDir { diff --git a/src/cs/util/Vim.Util.Tests/TestUtils.cs b/src/cs/util/Vim.Util.Tests/TestUtils.cs new file mode 100644 index 00000000..db4f642a --- /dev/null +++ b/src/cs/util/Vim.Util.Tests/TestUtils.cs @@ -0,0 +1,106 @@ +using System; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Xml.Linq; + +namespace Vim.Util.Tests +{ + public static class TestUtils + { + + public static string ResidencePath = VimFormatRepoPaths.GetLatestWolfordResidenceVim(); + + /// + /// Deletes and/or creates a folder for given test case name. + /// + public static string PrepareTestDir([CallerMemberName] string testName = null) + { + if (testName == null) + throw new ArgumentException(nameof(testName)); + + var testDir = Path.Combine(VimFormatRepoPaths.OutDir, testName); + + // Prepare the test directory + if (Directory.Exists(testDir)) + Directory.Delete(testDir, true); + Directory.CreateDirectory(testDir); + + return testDir; + } + + public static string PrepareOutputPath(string fileName) + { + var outputFolder = PrepareTestDir("Can_Convert_And_Read_Vimx"); + return Path.Combine(outputFolder, fileName); + } + + public static (long, long) GetMemoryConsumptionAndMSecElapsed(Action action) + { + var time = 0L; + var mem = GetMemoryConsumption( + () => time = GetMSecElapsed(action)); + return (mem, time); + } + + public static long GetMSecElapsed(Action action) + { + var sw = Stopwatch.StartNew(); + action(); + return sw.ElapsedMilliseconds; + } + + /// + /// Creates a directory if needed, or clears all of its contents otherwise + /// + public static string CreateAndClearDirectory(string dirPath) + { + if (!Directory.Exists(dirPath)) + Directory.CreateDirectory(dirPath); + else + DeleteFolderContents(dirPath); + return dirPath; + } + + /// + /// Deletes all contents in a folder + /// + /// + /// https://stackoverflow.com/questions/1288718/how-to-delete-all-files-and-folders-in-a-directory + /// + private static void DeleteFolderContents(string folderPath) + { + var di = new DirectoryInfo(folderPath); + foreach (var dir in di.EnumerateDirectories().AsParallel()) + DeleteFolderAndAllContents(dir.FullName); + foreach (var file in di.EnumerateFiles().AsParallel()) + file.Delete(); + } + + /// + /// Deletes everything in a folder and then the folder. + /// + private static void DeleteFolderAndAllContents(string folderPath) + { + if (!Directory.Exists(folderPath)) + return; + + DeleteFolderContents(folderPath); + Directory.Delete(folderPath); + } + + // NOTE: Calling a function generates additional memory + private static long GetMemoryConsumption(Action action) + { + GC.Collect(); + GC.WaitForPendingFinalizers(); + var memBefore = GC.GetTotalMemory(true); + action(); + GC.Collect(); + GC.WaitForPendingFinalizers(); + return GC.GetTotalMemory(true) - memBefore; + } + + } +} diff --git a/src/cs/util/Vim.Util.Tests/Vim.Util.Tests.csproj b/src/cs/util/Vim.Util.Tests/Vim.Util.Tests.csproj index 8b38f884..6d419428 100644 --- a/src/cs/util/Vim.Util.Tests/Vim.Util.Tests.csproj +++ b/src/cs/util/Vim.Util.Tests/Vim.Util.Tests.csproj @@ -15,8 +15,9 @@ - - + + + @@ -37,5 +38,11 @@ + + + True + + + diff --git a/src/cs/util/Vim.Util/Vim.Util.csproj b/src/cs/util/Vim.Util/Vim.Util.csproj index dbdcea46..fc964269 100644 --- a/src/cs/util/Vim.Util/Vim.Util.csproj +++ b/src/cs/util/Vim.Util/Vim.Util.csproj @@ -4,4 +4,10 @@ netstandard2.0 + + + True + + + diff --git a/src/cs/vim-format.sln b/src/cs/vim-format.sln index ba3ae0fe..a75b5a5b 100644 --- a/src/cs/vim-format.sln +++ b/src/cs/vim-format.sln @@ -5,10 +5,6 @@ VisualStudioVersion = 17.0.31903.59 MinimumVisualStudioVersion = 10.0.40219.1 Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "bfast", "bfast", "{F3260C54-834F-4C74-A3B7-EAB622AFA492}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.BFast", "bfast\Vim.BFast\Vim.BFast.csproj", "{1059A7DE-95FA-4F54-85E3-84049E1872D5}" -EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.BFast.Tests", "bfast\Vim.BFast.Tests\Vim.BFast.Tests.csproj", "{000ED0CF-EC9C-488C-BE85-14B95FFDF104}" -EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "linqarray", "linqarray", "{CF54EDFD-51AB-4B0D-B084-2DF42918BA51}" EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.LinqArray", "linqarray\Vim.LinqArray\Vim.LinqArray.csproj", "{07CA1F46-21DA-4C22-A8CB-52D526D51C94}" @@ -45,7 +41,23 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.Util.Tests", "util\Vim. EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "util", "util", "{A279C2F9-3418-484B-A36E-D1D1C67A0B2D}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Vim.Util.Logging.Serilog", "util\Vim.Util.Logging.Serilog\Vim.Util.Logging.Serilog.csproj", "{6B9E6432-A7BB-4487-905A-0C3117398140}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.Util.Logging.Serilog", "util\Vim.Util.Logging.Serilog\Vim.Util.Logging.Serilog.csproj", "{6B9E6432-A7BB-4487-905A-0C3117398140}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.G3dNext", "g3d\Vim.G3dNext\Vim.G3dNext.csproj", "{7A3255D6-444D-46B7-8BBD-ABED88C99009}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.G3dNext.CodeGen", "g3d\Vim.G3dNext.CodeGen\Vim.G3dNext.CodeGen.csproj", "{F0FFC990-6358-4B17-B878-C6CA087CDF3F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.Format.Vimx", "vim\Vim.Format.Vimx\Vim.Format.Vimx.csproj", "{B5C8E733-8D3F-45BD-BBBE-09A9F1965545}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.Format.Vimx.Test", "vim\Vim.Vimx.Test\Vim.Format.Vimx.Test.csproj", "{BBF94CBD-CD39-49F5-979A-5C9E52C29330}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.BFast", "bfast\Vim.BFast\Vim.BFast.csproj", "{408884EA-3CE5-4A34-97F6-1F2D64A0E745}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.BFast.Tests", "bfast\Vim.BFast.Tests\Vim.BFast.Tests.csproj", "{FD149D64-5905-4F7D-97A8-9F7DA18A257D}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Vim.Format.Vimx.Conversion", "vim\Vim.Format.Vimx.Conversion\Vim.Format.Vimx.Conversion.csproj", "{4C4F9826-0DEF-4A39-BFC8-A834522694A0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Vim.G3dNext.Tests", "Vim.G3dNext.Tests\Vim.G3dNext.Tests.csproj", "{AD1A2A9F-18E9-4430-A192-3CD1A17DDA0D}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -53,14 +65,6 @@ Global Release|Any CPU = Release|Any CPU EndGlobalSection GlobalSection(ProjectConfigurationPlatforms) = postSolution - {1059A7DE-95FA-4F54-85E3-84049E1872D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1059A7DE-95FA-4F54-85E3-84049E1872D5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1059A7DE-95FA-4F54-85E3-84049E1872D5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1059A7DE-95FA-4F54-85E3-84049E1872D5}.Release|Any CPU.Build.0 = Release|Any CPU - {000ED0CF-EC9C-488C-BE85-14B95FFDF104}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {000ED0CF-EC9C-488C-BE85-14B95FFDF104}.Debug|Any CPU.Build.0 = Debug|Any CPU - {000ED0CF-EC9C-488C-BE85-14B95FFDF104}.Release|Any CPU.ActiveCfg = Release|Any CPU - {000ED0CF-EC9C-488C-BE85-14B95FFDF104}.Release|Any CPU.Build.0 = Release|Any CPU {07CA1F46-21DA-4C22-A8CB-52D526D51C94}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {07CA1F46-21DA-4C22-A8CB-52D526D51C94}.Debug|Any CPU.Build.0 = Debug|Any CPU {07CA1F46-21DA-4C22-A8CB-52D526D51C94}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -117,13 +121,43 @@ Global {6B9E6432-A7BB-4487-905A-0C3117398140}.Debug|Any CPU.Build.0 = Debug|Any CPU {6B9E6432-A7BB-4487-905A-0C3117398140}.Release|Any CPU.ActiveCfg = Release|Any CPU {6B9E6432-A7BB-4487-905A-0C3117398140}.Release|Any CPU.Build.0 = Release|Any CPU + {7A3255D6-444D-46B7-8BBD-ABED88C99009}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7A3255D6-444D-46B7-8BBD-ABED88C99009}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7A3255D6-444D-46B7-8BBD-ABED88C99009}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7A3255D6-444D-46B7-8BBD-ABED88C99009}.Release|Any CPU.Build.0 = Release|Any CPU + {F0FFC990-6358-4B17-B878-C6CA087CDF3F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F0FFC990-6358-4B17-B878-C6CA087CDF3F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F0FFC990-6358-4B17-B878-C6CA087CDF3F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F0FFC990-6358-4B17-B878-C6CA087CDF3F}.Release|Any CPU.Build.0 = Release|Any CPU + {B5C8E733-8D3F-45BD-BBBE-09A9F1965545}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B5C8E733-8D3F-45BD-BBBE-09A9F1965545}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B5C8E733-8D3F-45BD-BBBE-09A9F1965545}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B5C8E733-8D3F-45BD-BBBE-09A9F1965545}.Release|Any CPU.Build.0 = Release|Any CPU + {BBF94CBD-CD39-49F5-979A-5C9E52C29330}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BBF94CBD-CD39-49F5-979A-5C9E52C29330}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BBF94CBD-CD39-49F5-979A-5C9E52C29330}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BBF94CBD-CD39-49F5-979A-5C9E52C29330}.Release|Any CPU.Build.0 = Release|Any CPU + {408884EA-3CE5-4A34-97F6-1F2D64A0E745}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {408884EA-3CE5-4A34-97F6-1F2D64A0E745}.Debug|Any CPU.Build.0 = Debug|Any CPU + {408884EA-3CE5-4A34-97F6-1F2D64A0E745}.Release|Any CPU.ActiveCfg = Release|Any CPU + {408884EA-3CE5-4A34-97F6-1F2D64A0E745}.Release|Any CPU.Build.0 = Release|Any CPU + {FD149D64-5905-4F7D-97A8-9F7DA18A257D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FD149D64-5905-4F7D-97A8-9F7DA18A257D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FD149D64-5905-4F7D-97A8-9F7DA18A257D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FD149D64-5905-4F7D-97A8-9F7DA18A257D}.Release|Any CPU.Build.0 = Release|Any CPU + {4C4F9826-0DEF-4A39-BFC8-A834522694A0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4C4F9826-0DEF-4A39-BFC8-A834522694A0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4C4F9826-0DEF-4A39-BFC8-A834522694A0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4C4F9826-0DEF-4A39-BFC8-A834522694A0}.Release|Any CPU.Build.0 = Release|Any CPU + {AD1A2A9F-18E9-4430-A192-3CD1A17DDA0D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AD1A2A9F-18E9-4430-A192-3CD1A17DDA0D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AD1A2A9F-18E9-4430-A192-3CD1A17DDA0D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AD1A2A9F-18E9-4430-A192-3CD1A17DDA0D}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE EndGlobalSection GlobalSection(NestedProjects) = preSolution - {1059A7DE-95FA-4F54-85E3-84049E1872D5} = {F3260C54-834F-4C74-A3B7-EAB622AFA492} - {000ED0CF-EC9C-488C-BE85-14B95FFDF104} = {F3260C54-834F-4C74-A3B7-EAB622AFA492} {07CA1F46-21DA-4C22-A8CB-52D526D51C94} = {CF54EDFD-51AB-4B0D-B084-2DF42918BA51} {461788B0-072C-485F-82C0-F83CCC86F95D} = {CF54EDFD-51AB-4B0D-B084-2DF42918BA51} {1086F24E-32C8-4261-9B06-A364EEE71DEF} = {9BD3CC85-97FD-4093-AA11-58FE405B0F65} @@ -138,6 +172,14 @@ Global {EC00B2EC-3CF0-43C3-A071-320AD3C355CF} = {A279C2F9-3418-484B-A36E-D1D1C67A0B2D} {F7091670-1059-4F4F-AC3A-0B1DE4A724B5} = {A279C2F9-3418-484B-A36E-D1D1C67A0B2D} {6B9E6432-A7BB-4487-905A-0C3117398140} = {A279C2F9-3418-484B-A36E-D1D1C67A0B2D} + {7A3255D6-444D-46B7-8BBD-ABED88C99009} = {705B8FB2-D707-4527-91FC-0AADEDBA4D80} + {F0FFC990-6358-4B17-B878-C6CA087CDF3F} = {705B8FB2-D707-4527-91FC-0AADEDBA4D80} + {B5C8E733-8D3F-45BD-BBBE-09A9F1965545} = {D639C635-01D1-48C4-89B5-1FD70F1AFEE9} + {BBF94CBD-CD39-49F5-979A-5C9E52C29330} = {D639C635-01D1-48C4-89B5-1FD70F1AFEE9} + {408884EA-3CE5-4A34-97F6-1F2D64A0E745} = {F3260C54-834F-4C74-A3B7-EAB622AFA492} + {FD149D64-5905-4F7D-97A8-9F7DA18A257D} = {F3260C54-834F-4C74-A3B7-EAB622AFA492} + {4C4F9826-0DEF-4A39-BFC8-A834522694A0} = {D639C635-01D1-48C4-89B5-1FD70F1AFEE9} + {AD1A2A9F-18E9-4430-A192-3CD1A17DDA0D} = {705B8FB2-D707-4527-91FC-0AADEDBA4D80} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {7D1BF641-26E8-4809-B638-029241C51BE2} diff --git a/src/cs/vim/Vim.Format.CodeGen/ObjectModelTypeScriptGenerator.cs b/src/cs/vim/Vim.Format.CodeGen/ObjectModelTypeScriptGenerator.cs index 70e40bdc..b665b612 100644 --- a/src/cs/vim/Vim.Format.CodeGen/ObjectModelTypeScriptGenerator.cs +++ b/src/cs/vim/Vim.Format.CodeGen/ObjectModelTypeScriptGenerator.cs @@ -405,8 +405,8 @@ private static void WriteVimDocument(CodeBuilder cb, Type[] entityTypes) cb.AppendLine("}"); cb.AppendLine(); - cb.AppendLine("static async createFromBfast(bfast: BFast, ignoreStrings: boolean = false): Promise {"); - cb.AppendLine("const loaded = await VimLoader.loadFromBfast(bfast, ignoreStrings)"); + cb.AppendLine("static async createFromBfast(bfast: BFast, download:boolean, ignoreStrings: boolean = false): Promise {"); + cb.AppendLine("const loaded = await VimLoader.loadFromBfast(bfast, download, ignoreStrings)"); cb.AppendLine(); cb.AppendLine("if (loaded[0] === undefined)"); cb.AppendLine(" return undefined"); diff --git a/src/cs/vim/Vim.Format.CodeGen/Vim.Format.CodeGen.csproj b/src/cs/vim/Vim.Format.CodeGen/Vim.Format.CodeGen.csproj index ec673b10..89a08ae0 100644 --- a/src/cs/vim/Vim.Format.CodeGen/Vim.Format.CodeGen.csproj +++ b/src/cs/vim/Vim.Format.CodeGen/Vim.Format.CodeGen.csproj @@ -8,7 +8,7 @@ OnOutputUpdated - + @@ -16,12 +16,18 @@ - - + + + + + True + + + diff --git a/src/cs/vim/Vim.Format.Core/AssetInfo.cs b/src/cs/vim/Vim.Format.Core/AssetInfo.cs index 303c5d27..009d7d37 100644 --- a/src/cs/vim/Vim.Format.Core/AssetInfo.cs +++ b/src/cs/vim/Vim.Format.Core/AssetInfo.cs @@ -1,9 +1,9 @@ using System; using System.Collections.Generic; using System.IO; -using Vim.BFast; using Vim.Util; using Vim.LinqArray; +using Vim.BFastLib; namespace Vim.Format { diff --git a/src/cs/vim/Vim.Format.Core/BigG3dWriter.cs b/src/cs/vim/Vim.Format.Core/BigG3dWriter.cs deleted file mode 100644 index e70ac297..00000000 --- a/src/cs/vim/Vim.Format.Core/BigG3dWriter.cs +++ /dev/null @@ -1,210 +0,0 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using Vim.G3d; -using Vim.BFast; -using System.IO; -using static Vim.Format.DocumentBuilder; - -namespace Vim.Format -{ - /// - /// This is a helper class for writing the really big G3Ds needed in a VIM - /// - public class BigG3dWriter : IBFastComponent - { - public INamedBuffer Meta { get; } - public string[] Names { get; } - public long[] Sizes { get; } - public BFastHeader Header { get; } - public List Meshes { get; } - public List Instances { get; } - public List Shapes { get; } - public List Materials { get; } - - // Computed fields - public int[] MeshVertexOffsets { get; } - public int[] MeshIndexOffsets { get; } - public int[] MeshSubmeshOffset { get; } - public int[] SubmeshIndexOffsets { get; } - public int[] ShapeVertexOffsets { get; } - - public BigG3dWriter(List meshes, List instances, List shapes, List materials, G3dHeader? header = null, bool useColors = false) - { - Meshes = meshes; - Instances = instances; - Shapes = shapes; - Materials = materials; - var totalSubmeshCount = meshes.Select(s => s.SubmeshesIndexOffset.Count).Sum(); - - // Compute the Vertex offsets and index offsets - MeshVertexOffsets = new int[meshes.Count]; - MeshIndexOffsets = new int[meshes.Count]; - SubmeshIndexOffsets = new int[totalSubmeshCount]; - MeshSubmeshOffset = new int[meshes.Count]; - - var n = meshes.Count; - - for (var i = 1; i < n; ++i) - { - MeshVertexOffsets[i] = MeshVertexOffsets[i - 1] + meshes[i - 1].Vertices.Count; - MeshIndexOffsets[i] = MeshIndexOffsets[i - 1] + meshes[i - 1].Indices.Count; - MeshSubmeshOffset[i] = MeshSubmeshOffset[i - 1] + meshes[i - 1].SubmeshesIndexOffset.Count; - } - - var subIndex =0; - var previousIndexCount = 0; - foreach(var geo in meshes) - { - foreach(var sub in geo.SubmeshesIndexOffset) - { - SubmeshIndexOffsets[subIndex++] = sub + previousIndexCount; - } - previousIndexCount += geo.Indices.Count; - } - - var submeshCount = meshes.Select(s => s.SubmeshesIndexOffset.Count).Sum(); - - var totalVertices = n == 0 ? 0 : MeshVertexOffsets[n - 1] + meshes[n - 1].Vertices.Count; - var totalIndices = n == 0 ? 0 : MeshIndexOffsets[n - 1] + meshes[n - 1].Indices.Count; - long totalFaces = totalIndices / 3; - - // Compute the shape vertex offsets - var numShapes = shapes.Count; - ShapeVertexOffsets = new int[numShapes]; - for (var i = 1; i < numShapes; ++i) - { - ShapeVertexOffsets[i] = ShapeVertexOffsets[i - 1] + shapes[i - 1].Vertices.Count; - } - var numShapeVertices = numShapes == 0 ? 0 : ShapeVertexOffsets[numShapes - 1] + shapes[numShapes - 1].Vertices.Count; - - Meta = (header ?? G3dHeader.Default).ToBytes().ToNamedBuffer("meta"); - - (long size, string name) AttributeSizeAndName(string attributeName, long count) - => (AttributeDescriptor.Parse(attributeName).DataElementSize * count, attributeName); - - var writers = new List<(long size, string attribute)>() - { - (Meta.NumBytes(), Meta.Name), - AttributeSizeAndName(CommonAttributes.Position, totalVertices), - AttributeSizeAndName(CommonAttributes.Index, totalIndices), - - AttributeSizeAndName(CommonAttributes.MeshSubmeshOffset, meshes.Count), - AttributeSizeAndName(CommonAttributes.SubmeshIndexOffset, submeshCount), - AttributeSizeAndName(CommonAttributes.SubmeshMaterial, submeshCount), - - AttributeSizeAndName(CommonAttributes.InstanceTransform, instances.Count), - AttributeSizeAndName(CommonAttributes.InstanceParent, instances.Count), - AttributeSizeAndName(CommonAttributes.InstanceMesh, instances.Count), - AttributeSizeAndName(CommonAttributes.InstanceFlags, instances.Count), - - AttributeSizeAndName(CommonAttributes.ShapeVertex, numShapeVertices), - AttributeSizeAndName(CommonAttributes.ShapeVertexOffset, numShapes), - AttributeSizeAndName(CommonAttributes.ShapeColor, numShapes), - AttributeSizeAndName(CommonAttributes.ShapeWidth, numShapes), - - AttributeSizeAndName(CommonAttributes.MaterialColor, materials.Count), - AttributeSizeAndName(CommonAttributes.MaterialGlossiness, materials.Count), - AttributeSizeAndName(CommonAttributes.MaterialSmoothness, materials.Count), - }; - - if (useColors) - { - writers.Add(AttributeSizeAndName(CommonAttributes.VertexColor, totalVertices)); - } - - Names = writers.Select(w => w.attribute).ToArray(); - Sizes = writers.Select(w => w.size).ToArray(); - Header = BFast.BFast.CreateBFastHeader(Sizes, Names); - } - - public long GetSize() - => BFast.BFast.ComputeNextAlignment(Header.Preamble.DataEnd); - - public void Write(Stream stream) - { - // TODO: validate in debug mode that this is producing the current data model. Look at the schema! - - stream.WriteBFastHeader(Header); - stream.WriteBFastBody(Header, Names, Sizes, (_stream, index, name, size) => - { - switch (name) - { - case "meta": - _stream.Write(Meta); - break; - - // Vertices - case CommonAttributes.Position: - Meshes.ForEach(g => stream.Write(g.Vertices.ToArray())); - break; - - // Indices - case CommonAttributes.Index: - for (var i = 0; i < Meshes.Count; ++i) - { - var g = Meshes[i]; - var offset = MeshVertexOffsets[i]; - stream.Write(g.Indices.Select(idx => idx + offset).ToArray()); - } - break; - - // Meshes - case CommonAttributes.MeshSubmeshOffset: - stream.Write(MeshSubmeshOffset); - break; - - // Instances - case CommonAttributes.InstanceMesh: - stream.Write(Instances.Select(i => i.MeshIndex).ToArray()); - break; - case CommonAttributes.InstanceTransform: - stream.Write(Instances.Select(i => i.Transform).ToArray()); - break; - case CommonAttributes.InstanceParent: - stream.Write(Instances.Select(i => i.ParentIndex).ToArray()); - break; - case CommonAttributes.InstanceFlags: - stream.Write(Instances.Select(i => (ushort) i.InstanceFlags).ToArray()); - break; - - // Shapes - case CommonAttributes.ShapeVertex: - stream.Write(Shapes.SelectMany(s => s.Vertices).ToArray()); - break; - case CommonAttributes.ShapeVertexOffset: - stream.Write(ShapeVertexOffsets); - break; - case CommonAttributes.ShapeColor: - stream.Write(Shapes.Select(s => s.Color).ToArray()); - break; - case CommonAttributes.ShapeWidth: - stream.Write(Shapes.Select(s => s.Width).ToArray()); - break; - - // Materials - case CommonAttributes.MaterialColor: - stream.Write(Materials.Select(i => i.Color).ToArray()); - break; - case CommonAttributes.MaterialGlossiness: - stream.Write(Materials.Select(i => i.Glossiness).ToArray()); - break; - case CommonAttributes.MaterialSmoothness: - stream.Write(Materials.Select(i => i.Smoothness).ToArray()); - break; - - // Submeshes - case CommonAttributes.SubmeshIndexOffset: - stream.Write(SubmeshIndexOffsets); - break; - case CommonAttributes.SubmeshMaterial: - stream.Write(Meshes.SelectMany(s => s.SubmeshMaterials).ToArray()); - break; - default: - throw new Exception($"Not a recognized geometry buffer: {name}"); - } - return size; - }); - } - } -} diff --git a/src/cs/vim/Vim.Format.Core/ColumnExtensions.Buffer.cs b/src/cs/vim/Vim.Format.Core/ColumnExtensions.Buffer.cs index 7e1d8ac0..0b213bd8 100644 --- a/src/cs/vim/Vim.Format.Core/ColumnExtensions.Buffer.cs +++ b/src/cs/vim/Vim.Format.Core/ColumnExtensions.Buffer.cs @@ -2,8 +2,8 @@ using System.Collections.Generic; using System.Diagnostics; using System.Linq; -using Vim.BFast; using Vim.LinqArray; +using Vim.BFastLib; namespace Vim.Format { @@ -35,11 +35,11 @@ public static SerializableEntityTable ValidateColumnRowsAreAligned(this Serializ public static string ValidateCanConcatBuffers(this INamedBuffer thisBuffer, INamedBuffer otherBuffer) { - var thisPrefix = thisBuffer.GetTypePrefix(); + var thisPrefix = SerializableEntityTable.GetTypeFromName(thisBuffer.Name); if (string.IsNullOrEmpty(thisPrefix)) throw new Exception("NamedBuffer prefix not found"); - var otherPrefix = otherBuffer.GetTypePrefix(); + var otherPrefix = SerializableEntityTable.GetTypeFromName(otherBuffer.Name); if (string.IsNullOrEmpty(otherPrefix)) throw new Exception("NamedBuffer prefix not found"); @@ -77,7 +77,10 @@ public static object GetDataColumnValue(this IBuffer dataColumn, string typePref } public static object GetDataColumnValue(this INamedBuffer dataColumn, int rowIndex) - => dataColumn.GetDataColumnValue(dataColumn.GetTypePrefix(), rowIndex); + { + var prefix = SerializableEntityTable.GetTypeFromName(dataColumn.Name); + return dataColumn.GetDataColumnValue(prefix, rowIndex); + } public static IBuffer CreateDefaultDataColumnBuffer(int length, string typePrefix) { @@ -119,7 +122,7 @@ public static IBuffer CopyDataColumn(this IBuffer dataColumn, string typePrefix, public static INamedBuffer CopyDataColumn(this INamedBuffer dataColumn, List remapping = null) { - var typePrefix = dataColumn.GetTypePrefix(); + var typePrefix = SerializableEntityTable.GetTypeFromName(dataColumn.Name); return new NamedBuffer(dataColumn.CopyDataColumn(typePrefix, remapping), dataColumn.Name); } diff --git a/src/cs/vim/Vim.Format.Core/Document.cs b/src/cs/vim/Vim.Format.Core/Document.cs index 476bd8e7..8465ea7d 100644 --- a/src/cs/vim/Vim.Format.Core/Document.cs +++ b/src/cs/vim/Vim.Format.Core/Document.cs @@ -1,5 +1,5 @@ using Vim.LinqArray; -using Vim.BFast; +using Vim.BFastLib; namespace Vim.Format { @@ -19,7 +19,7 @@ public Document(SerializableDocument document) } public string FileName => _Document.FileName; - private SerializableDocument _Document { get; } + public SerializableDocument _Document { get; } public SerializableHeader Header { get; } public ILookup EntityTables { get; } public ILookup Assets { get; } diff --git a/src/cs/vim/Vim.Format.Core/DocumentBuilder.cs b/src/cs/vim/Vim.Format.Core/DocumentBuilder.cs index 04c2372b..d90c1d6a 100644 --- a/src/cs/vim/Vim.Format.Core/DocumentBuilder.cs +++ b/src/cs/vim/Vim.Format.Core/DocumentBuilder.cs @@ -3,7 +3,7 @@ using System.Diagnostics; using System.Linq; using Vim.Math3d; -using Vim.BFast; +using Vim.BFastLib; using System.IO; using Vim.Util; @@ -14,10 +14,7 @@ public partial class DocumentBuilder public readonly SerializableHeader Header; public readonly Dictionary Tables = new Dictionary(); public readonly Dictionary Assets = new Dictionary(); - public readonly List Meshes = new List(); - public readonly List Instances = new List(); - public readonly List Shapes = new List(); - public readonly List Materials = new List(); + public readonly G3dBuilder Geometry = new G3dBuilder(); public bool UseColors { get; set; } @@ -51,51 +48,64 @@ public DocumentBuilder AddAsset(string name, byte[] asset) return this; } - public DocumentBuilder AddMesh(SubdividedMesh g) + public DocumentBuilder AddMesh(SubdividedMesh mesh) { - Meshes.Add(g); + Geometry.AddMesh(mesh); return this; } - public DocumentBuilder AddMeshes(IEnumerable gb) + public DocumentBuilder AddMeshes(IEnumerable meshes) { - Meshes.AddRange(gb); + foreach (var m in meshes) + { + AddMesh(m); + } return this; } - public DocumentBuilder AddInstances(IEnumerable ib) + public DocumentBuilder AddInstances(IEnumerable instances) { - Instances.AddRange(ib); + foreach (var m in instances) + { + Geometry.AddInstance(m); + } return this; } - public DocumentBuilder AddMaterials(IEnumerable mb) + + public DocumentBuilder AddInstance(Matrix4x4 transform, int meshIndex, int parentIndex = -1) { - Materials.AddRange(mb); + var instance = new Instance() + { + Transform = transform, + MeshIndex = meshIndex, + ParentIndex = parentIndex + }; + Geometry.AddInstance(instance); return this; } - public DocumentBuilder AddShapes(IEnumerable sb) + public DocumentBuilder AddMaterials(IEnumerable materials) { - Shapes.AddRange(sb); + foreach (var material in materials) + { + Geometry.AddMaterial(material); + } + return this; + } + + public DocumentBuilder AddShapes(IEnumerable shapes) + { + foreach (var shape in shapes) + { + Geometry.AddShape(shape); + } return this; } public DocumentBuilder AddAsset(INamedBuffer b) => AddAsset(b.Name, b.ToBytes()); - public DocumentBuilder AddInstance(Matrix4x4 transform, int meshIndex, int parentIndex = -1) - { - Instances.Add( - new Instance() - { - Transform = transform, - MeshIndex = meshIndex, - ParentIndex = parentIndex - } - ); - return this; - } public class StringLookupInfo { @@ -134,17 +144,17 @@ public List ComputeEntityTables(IReadOnlyDictionary ComputeEntityTables(IReadOnlyDictionary g.Vertices.Count)); - tb.AddDataColumn("int:FaceCount", Meshes.Select(g => g.Indices.Count / 3)); + tb.AddDataColumn("int:VertexCount", Geometry.GetVertexCounts()); + tb.AddDataColumn("int:FaceCount", Geometry.GetFaceCounts()); } // TODO: add bounding box information to the nodes @@ -180,12 +190,15 @@ public List ComputeEntityTables(IReadOnlyDictionary kv.Value.ToNamedBuffer(kv.Key)) as IEnumerable; Debug.Assert(assets != null, "Asset conversion to IEnumerable failed."); @@ -194,7 +207,18 @@ public void Write(Stream stream) var entityTables = ComputeEntityTables(stringLookupInfo.StringLookup); var stringTable = stringLookupInfo.StringTable; - Serializer.Serialize(stream, Header, assets, stringTable, entityTables, new BigG3dWriter(Meshes, Instances, Shapes, Materials, null, UseColors)); + var doc = new SerializableDocument() + { + Header = Header, + Assets = assets.ToArray(), + StringTable = stringTable.ToArray(), + EntityTables = entityTables + }; + var bfast = doc.ToBFast(); + + bfast.SetBFast(BufferNames.Geometry, Geometry.ToBFast()); + + return bfast; } } } diff --git a/src/cs/vim/Vim.Format.Core/DocumentBuilderExtensions.cs b/src/cs/vim/Vim.Format.Core/DocumentBuilderExtensions.cs index 81e1c2d8..77c0d209 100644 --- a/src/cs/vim/Vim.Format.Core/DocumentBuilderExtensions.cs +++ b/src/cs/vim/Vim.Format.Core/DocumentBuilderExtensions.cs @@ -1,6 +1,6 @@ using System.Collections.Generic; using System.Linq; -using Vim.BFast; +using Vim.BFastLib; using Vim.Format.Geometry; using Vim.G3d; using Vim.LinqArray; diff --git a/src/cs/vim/Vim.Format.Core/DocumentExtensions.cs b/src/cs/vim/Vim.Format.Core/DocumentExtensions.cs index 52e2c5e2..9d7a9cfd 100644 --- a/src/cs/vim/Vim.Format.Core/DocumentExtensions.cs +++ b/src/cs/vim/Vim.Format.Core/DocumentExtensions.cs @@ -1,7 +1,7 @@ using System; using System.Text.RegularExpressions; using Vim.LinqArray; -using Vim.BFast; +using Vim.BFastLib; namespace Vim.Format { diff --git a/src/cs/vim/Vim.Format.Core/EntityTable.cs b/src/cs/vim/Vim.Format.Core/EntityTable.cs index a9e0b050..d35097cc 100644 --- a/src/cs/vim/Vim.Format.Core/EntityTable.cs +++ b/src/cs/vim/Vim.Format.Core/EntityTable.cs @@ -1,7 +1,6 @@ using System; -using System.Diagnostics; -using Vim.BFast; using Vim.LinqArray; +using Vim.BFastLib; namespace Vim.Format { diff --git a/src/cs/vim/Vim.Format.Core/EntityTableBuilder.cs b/src/cs/vim/Vim.Format.Core/EntityTableBuilder.cs index bd1da0fd..08582b99 100644 --- a/src/cs/vim/Vim.Format.Core/EntityTableBuilder.cs +++ b/src/cs/vim/Vim.Format.Core/EntityTableBuilder.cs @@ -1,7 +1,7 @@ using System; using System.Collections.Generic; using System.Linq; -using Vim.BFast; +using Vim.BFastLib; namespace Vim.Format { diff --git a/src/cs/vim/Vim.Format.Core/G3dBuilder.cs b/src/cs/vim/Vim.Format.Core/G3dBuilder.cs new file mode 100644 index 00000000..258e1f52 --- /dev/null +++ b/src/cs/vim/Vim.Format.Core/G3dBuilder.cs @@ -0,0 +1,117 @@ +using System.Collections.Generic; +using System.Linq; +using Vim.G3d; +using Vim.BFastLib; +using static Vim.Format.DocumentBuilder; +using Vim.Math3d; + +namespace Vim.Format +{ + public class G3dBuilder + { + private readonly List _meshes = new List(); + private readonly List _instances = new List(); + private readonly List _shapes = new List(); + private readonly List _materials = new List(); + + public void AddMesh(SubdividedMesh mesh) + { + _meshes.Add(mesh); + } + + public void AddInstance(Instance instance) + { + _instances.Add(instance); + } + + public void AddShape(Shape shape) + { + _shapes.Add(shape); + } + + public void AddMaterial(Material material) + { + _materials.Add(material); + } + + public int InstanceCount => _instances.Count; + public int MeshCount => _meshes.Count; + public int MaterialCount => _materials.Count; + public int ShapeCount => _shapes.Count; + + public SubdividedMesh GetMesh(int index) => _meshes[index]; + public AABox GetBox(int meshIndex) + { + return AABox.Create(_meshes[meshIndex].Vertices); + } + + public int[] GetVertexCounts() + { + return _meshes.Select(m => m.Vertices.Count).ToArray(); + } + + public int[] GetFaceCounts() + { + return _meshes.Select(m => m.Indices.Count / 3).ToArray(); + } + + + public BFast ToBFast() + { + var bfast = new BFast(); + var totalSubmeshCount = _meshes.Select(s => s.SubmeshesIndexOffset.Count).Sum(); + + // Compute the Vertex offsets and index offsets + var meshVertexOffsets = new int[_meshes.Count]; + var meshIndexOffsets = new int[_meshes.Count]; + var submeshIndexOffsets = new int[totalSubmeshCount]; + var meshSubmeshOffset = new int[_meshes.Count]; + + var n = _meshes.Count; + + for (var i = 1; i < n; ++i) + { + meshVertexOffsets[i] = meshVertexOffsets[i - 1] + _meshes[i - 1].Vertices.Count; + meshIndexOffsets[i] = meshIndexOffsets[i - 1] + _meshes[i - 1].Indices.Count; + meshSubmeshOffset[i] = meshSubmeshOffset[i - 1] + _meshes[i - 1].SubmeshesIndexOffset.Count; + } + + var subIndex = 0; + var previousIndexCount = 0; + foreach (var geo in _meshes) + { + foreach (var sub in geo.SubmeshesIndexOffset) + { + submeshIndexOffsets[subIndex++] = sub + previousIndexCount; + } + previousIndexCount += geo.Indices.Count; + } + + // Compute the shape vertex offsets + var numShapes = _shapes.Count; + var shapeVertexOffsets = new int[numShapes]; + for (var i = 1; i < numShapes; ++i) + { + shapeVertexOffsets[i] = shapeVertexOffsets[i - 1] + _shapes[i - 1].Vertices.Count; + } + + bfast.SetEnumerable(CommonAttributes.Position, () => _meshes.SelectMany(m => m.Vertices)); + bfast.SetEnumerable(CommonAttributes.Index, () => _meshes.SelectMany(m => m.Indices)); + bfast.SetEnumerable(CommonAttributes.MeshSubmeshOffset, () => meshSubmeshOffset); + bfast.SetEnumerable(CommonAttributes.SubmeshIndexOffset, () => submeshIndexOffsets); + bfast.SetEnumerable(CommonAttributes.SubmeshMaterial, () => _meshes.SelectMany(s => s.SubmeshMaterials)); + bfast.SetEnumerable(CommonAttributes.InstanceFlags, () => _instances.Select(i => (ushort)i.InstanceFlags)); + bfast.SetEnumerable(CommonAttributes.InstanceParent, () => _instances.Select(i => i.ParentIndex)); + bfast.SetEnumerable(CommonAttributes.InstanceMesh, () => _instances.Select(i => i.MeshIndex)); + bfast.SetEnumerable(CommonAttributes.InstanceTransform, () => _instances.Select(i => i.Transform)); + bfast.SetEnumerable(CommonAttributes.ShapeVertex, () => _shapes.SelectMany(s => s.Vertices)); + bfast.SetEnumerable(CommonAttributes.ShapeVertexOffset, () => shapeVertexOffsets); + bfast.SetEnumerable(CommonAttributes.ShapeColor, () => _shapes.Select(s => s.Color)); + bfast.SetEnumerable(CommonAttributes.ShapeWidth, () => _shapes.Select(s => s.Width)); + bfast.SetEnumerable(CommonAttributes.MaterialColor, () => _materials.Select(i => i.Color)); + bfast.SetEnumerable(CommonAttributes.MaterialGlossiness, () => _materials.Select(i => i.Glossiness)); + bfast.SetEnumerable(CommonAttributes.MaterialSmoothness, () => _materials.Select(i => i.Smoothness)); + return bfast; + } + } +} diff --git a/src/cs/vim/Vim.Format.Core/Geometry/Serialization.cs b/src/cs/vim/Vim.Format.Core/Geometry/Serialization.cs deleted file mode 100644 index 8a7f15a0..00000000 --- a/src/cs/vim/Vim.Format.Core/Geometry/Serialization.cs +++ /dev/null @@ -1,22 +0,0 @@ -using Vim.G3d; - -namespace Vim.Format.Geometry -{ - public static class Serialization - { - public static IMesh ReadG3D(string filePath) - => G3D.Read(filePath).ToIMesh(); - - public static G3D ToG3d(this IMesh mesh) - => mesh is G3D r ? r : mesh.Attributes.ToG3d(); - - public static void WriteG3d(this IMesh mesh, string filePath) - => mesh.ToG3d().Write(filePath); - - public static void WriteObj(this IMesh mesh, string filePath) - => mesh.ToG3d().WriteObj(filePath); - - public static void WritePly(this IMesh mesh, string filePath) - => mesh.ToG3d().WritePly(filePath); - } -} diff --git a/src/cs/vim/Vim.Format.Core/Geometry/Validation.cs b/src/cs/vim/Vim.Format.Core/Geometry/Validation.cs index 5ccca143..abf23eef 100644 --- a/src/cs/vim/Vim.Format.Core/Geometry/Validation.cs +++ b/src/cs/vim/Vim.Format.Core/Geometry/Validation.cs @@ -1,8 +1,8 @@ using System; using System.Collections.Generic; -using Vim.BFast; using Vim.G3d; using Vim.LinqArray; +using Vim.BFastLib; namespace Vim.Format.Geometry { diff --git a/src/cs/vim/Vim.Format.Core/SerializableDocument.cs b/src/cs/vim/Vim.Format.Core/SerializableDocument.cs index 6b834739..3a9ae487 100644 --- a/src/cs/vim/Vim.Format.Core/SerializableDocument.cs +++ b/src/cs/vim/Vim.Format.Core/SerializableDocument.cs @@ -1,44 +1,14 @@ using System; using System.Collections.Generic; +using System.IO; using System.Linq; -using Vim.BFast; +using System.Text; +using Vim.BFastLib; +using Vim.BFastLib.Core; +using Vim.G3d; namespace Vim.Format { - /// - /// Tracks all of the data for a particular entity type in a conceptual table. - /// A column maybe a relation to another entity table (IndexColumn) - /// a data value stored as a double (DataColumn) or else - /// it is string data, stored as an index into the global lookup table (StringColumn). - /// - public class SerializableEntityTable - { - /// - /// Name of - /// - public string Name; - - /// - /// Relation to another entity table. For example surface to element. - /// - public List> IndexColumns = new List>(); - - /// - /// Data encoded as strings in the global string table - /// - public List> StringColumns = new List>(); - - /// - /// Numeric data encoded as byte, int, float, or doubles - /// - public List DataColumns = new List(); - - public IEnumerable ColumnNames - => IndexColumns.Select(c => c.Name) - .Concat(StringColumns.Select(c => c.Name)) - .Concat(DataColumns.Select(c => c.Name)); - } - /// /// Controls what parts of the VIM file are loaded /// @@ -88,5 +58,158 @@ public class SerializableDocument /// The originating file name (if provided) /// public string FileName; + public BFast ToBFast() + { + var bfast = new BFast(); + if(Header != null) + { + bfast.SetArray(BufferNames.Header, Header.ToBytes()); + } + + if(Assets != null) + { + var assets = new BFast(); + foreach (var asset in Assets) + { + assets.SetArray(asset.Name, asset.ToArray()); + } + bfast.SetBFast(BufferNames.Assets, assets); + } + + if(EntityTables != null) + { + var entities = new BFast(); + foreach (var entity in EntityTables) + { + entities.SetBFast(entity.Name, entity.ToBFast()); + } + bfast.SetBFast(BufferNames.Entities, entities); + } + + if (StringTable != null) + { + bfast.SetArray(BufferNames.Strings, BFastStrings.Pack(StringTable)); + } + + if(Geometry != null) + { + bfast.SetBFast(BufferNames.Geometry, Geometry?.ToBFast()); + } + + return bfast; + } + + public static SerializableDocument FromPath(string path, LoadOptions options = null) + { + var doc = BFastHelpers.Read(path, b => FromBFast(b)); + doc.FileName = path; + return doc; + } + + public static SerializableDocument FromBFast(BFast bfast, LoadOptions options = null) + { + var doc = new SerializableDocument(); + doc.Options = options ?? new LoadOptions(); + doc.Header = SerializableHeader.FromBytes(bfast.GetArray(BufferNames.Header)); + if (!doc.Options.SkipAssets) + { + var asset = bfast.GetBFast(BufferNames.Assets); + doc.Assets = asset.ToNamedBuffers().ToArray(); + } + var strs = bfast.GetArray(BufferNames.Strings); + doc.StringTable = Encoding.UTF8.GetString(strs).Split('\0'); + + if (!doc.Options.SkipGeometry) + { + var geo = bfast.GetBFast(BufferNames.Geometry); + doc.Geometry = G3D.Read(geo); + } + + var entities = bfast.GetBFast(BufferNames.Entities); + doc.EntityTables = GetEntityTables(entities, doc.Options.SchemaOnly).ToList(); + return doc; + } + + /// + /// Enumerates the SerializableEntityTables contained in the given entities buffer. + /// + private static IEnumerable GetEntityTables( + BFast bfast, + bool schemaOnly) + { + + foreach (var entry in bfast.Entries) + { + var b = bfast.GetBFast(entry); + var table = ReadEntityTable(b, schemaOnly); + table.Name = entry; + yield return table; + } + } + + /// + /// Returns a SerializableEntityTable based on the given buffer reader. + /// + public static SerializableEntityTable ReadEntityTable( + BFast bfast, + bool schemaOnly + ) + { + var et = new SerializableEntityTable(); + foreach (var entry in bfast.Entries) + { + var typePrefix = SerializableEntityTable.GetTypeFromName(entry); + + switch (typePrefix) + { + case VimConstants.IndexColumnNameTypePrefix: + { + //TODO: replace named buffer with arrays + var col = schemaOnly ? new int[0] : bfast.GetArray(entry); + et.IndexColumns.Add(col.ToNamedBuffer(entry)); + break; + } + case VimConstants.StringColumnNameTypePrefix: + { + var col = schemaOnly ? new int[0] : bfast.GetArray(entry); + et.StringColumns.Add(col.ToNamedBuffer(entry)); + break; + } + case VimConstants.IntColumnNameTypePrefix: + { + var col = schemaOnly ? new int[0] : bfast.GetArray(entry); + et.DataColumns.Add(col.ToNamedBuffer(entry)); + break; + } + case VimConstants.LongColumnNameTypePrefix: + { + var col = schemaOnly ? new long[0] : bfast.GetArray(entry); + et.DataColumns.Add(col.ToNamedBuffer(entry)); + break; + } + case VimConstants.DoubleColumnNameTypePrefix: + { + var col = schemaOnly ? new double[0] : bfast.GetArray(entry); + et.DataColumns.Add(col.ToNamedBuffer(entry)); + break; + } + case VimConstants.FloatColumnNameTypePrefix: + { + var col = schemaOnly ? new float[0] : bfast.GetArray(entry); + et.DataColumns.Add(col.ToNamedBuffer(entry)); + break; + } + case VimConstants.ByteColumnNameTypePrefix: + { + var col = schemaOnly ? new byte[0] : bfast.GetArray(entry); + et.DataColumns.Add(col.ToNamedBuffer(entry)); + break; + } + // For flexibility, we ignore the columns which do not contain a recognized prefix. + } + } + + return et; + } } } diff --git a/src/cs/vim/Vim.Format.Core/SerializableEntityTable.cs b/src/cs/vim/Vim.Format.Core/SerializableEntityTable.cs new file mode 100644 index 00000000..dd724bb9 --- /dev/null +++ b/src/cs/vim/Vim.Format.Core/SerializableEntityTable.cs @@ -0,0 +1,69 @@ +using System.Collections.Generic; +using System.Linq; +using System.Text.RegularExpressions; +using Vim.BFastLib; + +namespace Vim.Format +{ + /// + /// Tracks all of the data for a particular entity type in a conceptual table. + /// A column maybe a relation to another entity table (IndexColumn) + /// a data value stored as a double (DataColumn) or else + /// it is string data, stored as an index into the global lookup table (StringColumn). + /// + public class SerializableEntityTable + { + /// + /// Name of + /// + public string Name; + + /// + /// Relation to another entity table. For example surface to element. + /// + public List> IndexColumns = new List>(); + + /// + /// Data encoded as strings in the global string table + /// + public List> StringColumns = new List>(); + + /// + /// Numeric data encoded as byte, int, float, or doubles + /// + public List DataColumns = new List(); + + public IEnumerable ColumnNames + => IndexColumns.Select(c => c.Name) + .Concat(StringColumns.Select(c => c.Name)) + .Concat(DataColumns.Select(c => c.Name)); + + public IEnumerable AllColumns + => IndexColumns + .Concat(StringColumns) + .Concat(DataColumns); + + public static SerializableEntityTable FromBfast(string name, BFast bfast) + { + return null; + } + + private readonly static Regex TypePrefixRegex = new Regex(@"(\w+:).*"); + + public static string GetTypeFromName(string name) + { + var match = TypePrefixRegex.Match(name); + return match.Success ? match.Groups[1].Value : ""; + } + + public BFast ToBFast() + { + var bfast = new BFast(); + foreach (var col in AllColumns) + { + bfast.SetArray(col.Name, col.AsArray()); + } + return bfast; + } + } +} diff --git a/src/cs/vim/Vim.Format.Core/SerializableHeader.cs b/src/cs/vim/Vim.Format.Core/SerializableHeader.cs index c1c3a97c..c2e9010b 100644 --- a/src/cs/vim/Vim.Format.Core/SerializableHeader.cs +++ b/src/cs/vim/Vim.Format.Core/SerializableHeader.cs @@ -1,7 +1,10 @@ using System; using System.Collections.Generic; using System.Globalization; +using System.IO; using System.Linq; +using System.Text; +using Vim.BFastLib; using Vim.Util; namespace Vim.Format @@ -9,8 +12,8 @@ namespace Vim.Format public class SerializableHeader { public static readonly SerializableVersion CurrentVimFormatVersion = VimFormatVersion.Current; - - public const string FormatVersionField = "vim"; + + protected const string FormatVersionField = "vim"; public const string IdField = "id"; public const string RevisionField = "revision"; public const string GeneratorField = "generator"; @@ -93,6 +96,30 @@ public SerializableHeader(string generator, SerializableVersion schema, string v AddOptionalValues(values ?? new Dictionary(), versionString)) { } + public static SerializableHeader FromBytes(byte[] input) + { + return FromString(Encoding.UTF8.GetString(input)); + } + + + /// + /// Returns the VIM header from a vim file stream. + /// Will throw if the file is not a valid VIM. + /// + public static SerializableHeader FromStream(Stream stream) + { + var bfast = new BFast(stream); + var bytes = bfast.GetArray(BufferNames.Header); + if (bytes == null) return null; + return SerializableHeader.FromBytes(bytes); + } + + public static SerializableHeader FromPath(string path) + { + using (var file = new FileStream(path, FileMode.OpenOrCreate)) + return FromStream(file); + } + /// /// Parses the input. Throws exceptions if the input does not define a correctly formatted header. /// @@ -100,7 +127,7 @@ public SerializableHeader(string generator, SerializableVersion schema, string v /// /// /// - public static SerializableHeader Parse(string input) + public static SerializableHeader FromString(string input) { var lines = input.Split(EndOfLineChar) .Where(str => !string.IsNullOrEmpty(str)); @@ -119,7 +146,7 @@ public static SerializableHeader Parse(string input) { var tokens = line.Split(Separator); var numTokens = tokens.Length; - + // skip empty lines. if (numTokens == 0) continue; @@ -273,5 +300,10 @@ public static string CreateDummyPersistingId() /// public string PersistingId => CreatePersistingId(Id, Revision); + + public byte[] ToBytes() + { + return ToString().ToBytesUtf8(); + } } } diff --git a/src/cs/vim/Vim.Format.Core/Serializer.cs b/src/cs/vim/Vim.Format.Core/Serializer.cs deleted file mode 100644 index 63c9bdaf..00000000 --- a/src/cs/vim/Vim.Format.Core/Serializer.cs +++ /dev/null @@ -1,321 +0,0 @@ -using System.Collections.Generic; -using System.Linq; -using System; -using Vim.BFast; -using System.IO; -using System.Text; -using System.Text.RegularExpressions; -using Vim.G3d; -using Vim.Util; - -namespace Vim.Format -{ - public static class Serializer - { - public static List ToBuffers(this SerializableEntityTable table) - { - var r = new List(); - - r.AddRange(table.DataColumns); - r.AddRange(table.IndexColumns); - r.AddRange(table.StringColumns); - - return r; - } - - public static readonly Regex TypePrefixRegex = new Regex(@"(\w+:).*"); - - public static string GetTypePrefix(this string name) - { - var match = TypePrefixRegex.Match(name); - return match.Success ? match.Groups[1].Value : ""; - } - - /// - /// Returns the named buffer prefix, or null if no prefix was found. - /// - public static string GetTypePrefix(this INamedBuffer namedBuffer) - => namedBuffer.Name.GetTypePrefix(); - - /// - /// Returns a NamedBuffer representing to an entity table column. - /// If schemaOnly is enabled, the column is returned without any of its contained data; - /// this is useful for rapidly querying the schema of the entity table. - /// - public static NamedBuffer ReadEntityTableColumn( - this BFastBufferReader columnBufferReader, - bool schemaOnly) where T : unmanaged - { - var (name, size) = columnBufferReader; - - if (schemaOnly) - return new Buffer(Array.Empty()).ToNamedBuffer(name); - - return columnBufferReader - .Seek() - .ReadBufferFromNumberOfBytes(size) - .ToNamedBuffer(name); - } - - /// - /// Returns a SerializableEntityTable based on the given buffer reader. - /// - public static SerializableEntityTable ReadEntityTable( - this BFastBufferReader entityTableBufferReader, - bool schemaOnly) - { - var et = new SerializableEntityTable { Name = entityTableBufferReader.Name }; - - foreach (var colBr in entityTableBufferReader.Seek().GetBFastBufferReaders()) - { - var name = colBr.Name; - var typePrefix = name.GetTypePrefix(); - - switch (typePrefix) - { - case VimConstants.IndexColumnNameTypePrefix: - { - et.IndexColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - case VimConstants.StringColumnNameTypePrefix: - { - et.StringColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - case VimConstants.IntColumnNameTypePrefix: - { - et.DataColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - case VimConstants.LongColumnNameTypePrefix: - { - et.DataColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - case VimConstants.DoubleColumnNameTypePrefix: - { - et.DataColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - case VimConstants.FloatColumnNameTypePrefix: - { - et.DataColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - case VimConstants.ByteColumnNameTypePrefix: - { - et.DataColumns.Add(colBr.ReadEntityTableColumn(schemaOnly)); - break; - } - // For flexibility, we ignore the columns which do not contain a recognized prefix. - } - } - - return et; - } - - /// - /// Enumerates the SerializableEntityTables contained in the given entities buffer. - /// - public static IEnumerable EnumerateEntityTables( - this BFastBufferReader entitiesBufferReader, - bool schemaOnly) - { - foreach (var entityTableBufferReader in entitiesBufferReader.Seek().GetBFastBufferReaders()) - { - yield return entityTableBufferReader.ReadEntityTable(schemaOnly); - } - } - - /// - /// Enumerates the SerializableEntityTables contained in the given VIM file. - /// - public static IEnumerable EnumerateEntityTables(this FileInfo vimFileInfo, bool schemaOnly) - { - using (var stream = vimFileInfo.OpenRead()) - { - var entitiesBufferReader = stream.GetBFastBufferReaders(b => b.Name == BufferNames.Entities).FirstOrDefault(); - if (entitiesBufferReader == null) - yield break; - - foreach (var entityTable in entitiesBufferReader.EnumerateEntityTables(schemaOnly)) - { - yield return entityTable; - } - } - } - - public static BFastBuilder ToBFastBuilder(this IEnumerable entityTables) - { - var bldr = new BFastBuilder(); - foreach (var et in entityTables) - { - bldr.Add(et.Name, et.ToBuffers()); - } - return bldr; - } - - public static BFastBuilder ToBFastBuilder(this SerializableDocument doc) - => CreateBFastBuilder( - doc.Header, - doc.Assets, - doc.StringTable, - doc.EntityTables, - doc.Geometry.ToG3DWriter()); - - public static BFastBuilder CreateBFastBuilder( - SerializableHeader header, - IEnumerable assets, - IEnumerable stringTable, - IEnumerable entityTables, - IBFastComponent geometry) - { - var bfastBuilder = new BFastBuilder(); - bfastBuilder.Add(BufferNames.Header, header.ToString().ToBytesUtf8().ToBuffer()); - bfastBuilder.Add(BufferNames.Assets, assets ?? Array.Empty()); - bfastBuilder.Add(BufferNames.Entities, entityTables.ToBFastBuilder()); - bfastBuilder.Add(BufferNames.Strings, stringTable.PackStrings().ToBuffer()); - bfastBuilder.Add(BufferNames.Geometry, geometry); - return bfastBuilder; - } - - public static void Serialize( - Stream stream, - SerializableHeader header, - IEnumerable assets, - IEnumerable stringTable, - IEnumerable entityTables, - IBFastComponent geometry) - { - CreateBFastBuilder(header, assets, stringTable, entityTables, geometry).Write(stream); - } - - public static void Serialize(this SerializableDocument doc, Stream stream) - => doc.ToBFastBuilder().Write(stream); - - public static void Serialize(this SerializableDocument document, string filePath) - { - using (var stream = File.OpenWrite(filePath)) - document.Serialize(stream); - } - - public static SerializableHeader ToSerializableHeader(this byte[] bytes) - => SerializableHeader.Parse(Encoding.UTF8.GetString(bytes)); - - /// - /// Returns true if the SerializableHeader in the stream is successfully parsed. - /// - public static bool TryParseSerializableHeader(this Stream stream, out SerializableHeader header) - { - using (new SeekContext(stream)) - { - try - { - header = stream.ReadBFastBuffer(BufferNames.Header)?.Array.ToSerializableHeader(); - } - catch - { - header = null; - } - return header != null; - } - } - - /// - /// Returns true if the SerializableHeader in the given VIM file is successfully parsed. - /// - public static bool TryParseSerializableHeader(this FileInfo fileInfo, out SerializableHeader header) - { - using (var fs = fileInfo.OpenRead()) - { - return fs.TryParseSerializableHeader(out header); - } - } - - /// - /// Returns the VIM file's header schema version. Returns null if the header schema is not found. - /// - public static string GetSchemaVersion(this FileInfo fileInfo) - => fileInfo.TryParseSerializableHeader(out var header) - ? header.Schema?.ToString() - : null; - - public static void ReadBuffer(this SerializableDocument doc, BFastBufferReader bufferReader) - { - var (name, numBytes) = bufferReader; - var stream = bufferReader.Seek(); - - switch (name) - { - case BufferNames.Header: - { - doc.Header = stream.ReadArray((int)numBytes).ToSerializableHeader(); - break; - } - - case BufferNames.Assets: - { - if (doc.Options?.SkipAssets == true) - break; - - doc.Assets = stream.ReadBFast().ToArray(); - break; - } - - case BufferNames.Strings: - { - doc.StringTable = ReadStrings(stream, numBytes); - break; - } - - case BufferNames.Geometry: - { - if (doc.Options?.SkipGeometry == true) - break; - - doc.Geometry = G3D.Read(stream); - break; - } - - case BufferNames.Entities: - { - doc.EntityTables = - bufferReader - .EnumerateEntityTables(doc.Options?.SchemaOnly ?? false) - .ToList(); - break; - } - } - } - - public static string[] ReadStrings(Stream stream, long numBytes) - { - var stringBytes = stream.ReadArray((int)numBytes); - var joinedStringTable = Encoding.UTF8.GetString(stringBytes); - return joinedStringTable.Split('\0'); - } - - public static SerializableDocument Deserialize(Stream stream, LoadOptions loadOptions = null) - { - var doc = new SerializableDocument { Options = loadOptions }; - - foreach (var buffer in stream.GetBFastBufferReaders()) - { - doc.ReadBuffer(buffer); - } - - return doc; - } - - public static SerializableDocument Deserialize(string filePath, LoadOptions loadOptions = null) - { - using (var stream = File.OpenRead(filePath)) - { - var doc = Deserialize(stream, loadOptions); - doc.SetFileName(filePath); - return doc; - } - } - } -} diff --git a/src/cs/vim/Vim.Format.Core/Validation.cs b/src/cs/vim/Vim.Format.Core/Validation.cs index bb5287ab..5419ef54 100644 --- a/src/cs/vim/Vim.Format.Core/Validation.cs +++ b/src/cs/vim/Vim.Format.Core/Validation.cs @@ -1,6 +1,6 @@ using System; using System.Collections.Generic; -using Vim.BFast; +using Vim.BFastLib; using Vim.G3d; using Vim.LinqArray; diff --git a/src/cs/vim/Vim.Format.Core/Vim.Format.Core.csproj b/src/cs/vim/Vim.Format.Core/Vim.Format.Core.csproj index d845180e..382777aa 100644 --- a/src/cs/vim/Vim.Format.Core/Vim.Format.Core.csproj +++ b/src/cs/vim/Vim.Format.Core/Vim.Format.Core.csproj @@ -7,11 +7,11 @@ - - - - - + + + + + @@ -38,5 +38,11 @@ ArrayOps.tt + + + True + + + diff --git a/src/cs/vim/Vim.Format.Core/VimSchema.cs b/src/cs/vim/Vim.Format.Core/VimSchema.cs index 7790a17b..70b9e025 100644 --- a/src/cs/vim/Vim.Format.Core/VimSchema.cs +++ b/src/cs/vim/Vim.Format.Core/VimSchema.cs @@ -45,9 +45,12 @@ public EntityTableSchema AddEntityTableSchema(string entityTableName) } public static VimSchema Create(string filePath) - => Create(Serializer.Deserialize(filePath).ToDocument()); + => Create(SerializableDocument.FromPath(filePath, new LoadOptions() { SchemaOnly=true}) ); - public static VimSchema Create(Document doc) + public static VimSchema Create(SerializableDocument doc) + => Create(doc.ToDocument()); + + private static VimSchema Create(Document doc) { var vimSchema = new VimSchema(doc.Header); foreach (var entityTable in doc.EntityTables.Values.ToEnumerable()) diff --git a/src/cs/vim/Vim.Format.Tests/FormatTests.cs b/src/cs/vim/Vim.Format.Tests/FormatTests.cs index eb7cb0ed..544469ff 100644 --- a/src/cs/vim/Vim.Format.Tests/FormatTests.cs +++ b/src/cs/vim/Vim.Format.Tests/FormatTests.cs @@ -1,8 +1,8 @@ using System.Collections.Generic; using System.Linq; using NUnit.Framework; -using Vim.BFast; using Vim.LinqArray; +using Vim.BFastLib; namespace Vim.Format.Tests { @@ -131,8 +131,8 @@ public static void AssertIsSupersetOf(EntityTable et1, EntityTable et2) /// public static void AssertIsSuperSetOf(Document d1, Document d2, bool skipGeometryAndNodes = true) { - var schema1 = VimSchema.Create(d1); - var schema2 = VimSchema.Create(d2); + var schema1 = VimSchema.Create(d1._Document); + var schema2 = VimSchema.Create(d2._Document); Assert.IsTrue(VimSchema.IsSuperSetOf(schema1, schema2)); var etKeys1 = d1.EntityTables.Keys; @@ -154,8 +154,8 @@ public static void AssertIsSuperSetOf(Document d1, Document d2, bool skipGeometr public static void AssertEquals(Document d1, Document d2, bool skipGeometryAndNodes = false) { - var schema1 = VimSchema.Create(d1); - var schema2 = VimSchema.Create(d2); + var schema1 = VimSchema.Create(d1._Document); + var schema2 = VimSchema.Create(d2._Document); Assert.IsTrue(VimSchema.IsSame(schema1, schema2)); var entityTables1 = d1.EntityTables.Keys.ToEnumerable().OrderBy(n => n).ToArray(); diff --git a/src/cs/vim/Vim.Format.Tests/Geometry/GeometryTests.cs b/src/cs/vim/Vim.Format.Tests/Geometry/GeometryTests.cs index 6cf7f142..62cdac6f 100644 --- a/src/cs/vim/Vim.Format.Tests/Geometry/GeometryTests.cs +++ b/src/cs/vim/Vim.Format.Tests/Geometry/GeometryTests.cs @@ -298,8 +298,8 @@ public static void TriangleSerializationTest() .Add(submeshMaterials.ToIArray().ToSubmeshMaterialAttribute()) .ToG3D(); - var bfastBytes = g3d.WriteToBytes(); - var readG3d = G3D.Read(bfastBytes); + var bfast = g3d.ToBFast(); + var readG3d = G3D.Read(bfast); Assert.IsNotNull(readG3d); var mesh = readG3d.ToIMesh(); diff --git a/src/cs/vim/Vim.Format.Tests/SerializableDocumentTests.cs b/src/cs/vim/Vim.Format.Tests/SerializableDocumentTests.cs new file mode 100644 index 00000000..497180d5 --- /dev/null +++ b/src/cs/vim/Vim.Format.Tests/SerializableDocumentTests.cs @@ -0,0 +1,29 @@ +using NUnit.Framework; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using Vim.Util.Tests; + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using NUnit.Framework; +using Vim.Util.Tests; + +namespace Vim.Format.Tests; + + + +[TestFixture] +public static class SerializableDocumentTests +{ + [Test] + public static void TestEmpty() + { + var doc = new SerializableDocument(); + Assert.DoesNotThrow(() => doc.ToBFast()); + } +} + diff --git a/src/cs/vim/Vim.Format.Tests/Vim.Format.Tests.csproj b/src/cs/vim/Vim.Format.Tests/Vim.Format.Tests.csproj index 6a85c322..3f9308b8 100644 --- a/src/cs/vim/Vim.Format.Tests/Vim.Format.Tests.csproj +++ b/src/cs/vim/Vim.Format.Tests/Vim.Format.Tests.csproj @@ -16,9 +16,14 @@ - - - + + + + + + True + + diff --git a/src/cs/vim/Vim.Format.Vimx.Conversion/Chunking.cs b/src/cs/vim/Vim.Format.Vimx.Conversion/Chunking.cs new file mode 100644 index 00000000..2e42bc8d --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx.Conversion/Chunking.cs @@ -0,0 +1,199 @@ +using System.Collections.Generic; +using System.Linq; +using Vim.LinqArray; +using Vim.Math3d; +using Vim.G3dNext; + +namespace Vim.Format.VimxLib.Conversion +{ + public static class Chunking + { + public static VimChunks CreateChunks(ChunksDescription description) + { + var chunks = new G3dChunk[description.ChunkMeshes.Count]; + for (var i = 0; i < chunks.Length; i++) + { + var meshes = description.ChunkMeshes[i]; + chunks[i] = CreateChunk(description.g3d, meshes); + } + return new VimChunks(description, chunks); + } + + public static ChunksDescription ComputeChunks(MeshOrder ordering) + { + // 2MB once compressed -> 0.5MB + const int ChunkSize = 2000000; + + var meshChunks = new int[ordering.Meshes.Length]; + var meshIndex = new int[ordering.Meshes.Length]; + + var chunks = new List>(); + var chunk = new List(); + var chunkSize = 0L; + for (var i = 0; i < ordering.Meshes.Length; i++) + { + var mesh = ordering.Meshes[i]; + chunkSize += ordering.g3d.GetApproxSize(mesh); + if (chunkSize > ChunkSize && chunk.Count > 0) + { + chunks.Add(chunk); + chunk = new List(); + chunkSize = 0; + } + + meshChunks[i] = chunks.Count; + meshIndex[i] = chunk.Count; + chunk.Add(mesh); + } + if (chunk.Count > 0) + { + chunks.Add(chunk); + } + + return new ChunksDescription(ordering, meshChunks, meshIndex, chunks); + } + + public class SubmeshBuffer + { + int index = 0; + public int[] IndexOffsets; + public int[] VertexOffsets; + public int[] Materials; + + public SubmeshBuffer(int count) + { + IndexOffsets = new int[count]; + VertexOffsets = new int[count]; + Materials = new int[count]; + } + + public void Add(int indexOffset, int vertexOffset, int material) + { + IndexOffsets[index] = indexOffset; + VertexOffsets[index] = vertexOffset; + Materials[index] = material; + index++; + } + } + + public class PointsBuffer + { + public int[] indices; + public List vertices; + + public int IndexCount { get; private set; } = 0; + public int VertexCount => vertices.Count; + + public PointsBuffer(int indexCount, int vertexCount) + { + indices = new int[indexCount]; + vertices = new List(vertexCount); + } + + public void AddIndex(int index) + { + indices[IndexCount++] = index; + } + + public void AddVertex(Vector3 vertex) + { + vertices.Add(vertex); + } + } + + public static G3dChunk CreateChunk(G3dVim g3d, List meshes) + { + var meshSubmeshOffsets = new int[meshes.Count + 1]; + var meshOpaqueCounts = new int[meshes.Count]; + + var submeshCount = meshes.Sum(m => g3d.GetMeshSubmeshCount(m)); + var submeshBuffer = new SubmeshBuffer(submeshCount); + + var indexCount = meshes.Sum(m => g3d.GetMeshIndexCount(m)); + var vertexCount = meshes.Sum(m => g3d.GetMeshVertexCount(m)); + var pointsBuffer = new PointsBuffer(indexCount, vertexCount); + + for (var i = 0; i < meshes.Count; i++) + { + var mesh = meshes[i]; + + var opaqueCount = AppendSubmeshes( + g3d, + mesh, + false, + submeshBuffer, + pointsBuffer + ); + + var transparentCount = AppendSubmeshes( + g3d, + mesh, + true, + submeshBuffer, + pointsBuffer + ); + meshOpaqueCounts[i] = opaqueCount; + meshSubmeshOffsets[i + 1] = meshSubmeshOffsets[i] + opaqueCount + transparentCount; + } + + return new G3dChunk( + meshSubmeshOffset: meshSubmeshOffsets, + meshOpaqueSubmeshCounts: meshOpaqueCounts, + submeshIndexOffsets: submeshBuffer.IndexOffsets, + submeshVertexOffsets :submeshBuffer.VertexOffsets, + submeshMaterials: submeshBuffer.Materials, + indices: pointsBuffer.indices, + positions: pointsBuffer.vertices.ToArray() + ); + } + + private static int AppendSubmeshes( + G3dVim g3d, + int mesh, + bool transparent, + SubmeshBuffer submeshBuffer, + PointsBuffer pointsBuffer + ) + { + var subStart = g3d.GetMeshSubmeshStart(mesh); + var subEnd = g3d.GetMeshSubmeshEnd(mesh); + var count = 0; + for (var sub = subStart; sub < subEnd; sub++) + { + var currentMat = g3d.SubmeshMaterials[sub]; + var color = currentMat > 0 ? g3d.MaterialColors[currentMat] : Vector4.One; + var accept = color.W < 1 == transparent; + + if (!accept) continue; + count++; + submeshBuffer.Add(pointsBuffer.IndexCount, pointsBuffer.VertexCount, currentMat); + g3d.GetSubmesh(sub, pointsBuffer); + } + return count; + } + + private static void GetSubmesh(this G3dVim g3d, int submesh, PointsBuffer points) + { + var index = points.VertexCount; + var dict = new Dictionary(); + var start = g3d.GetSubmeshIndexStart(submesh); + var end = g3d.GetSubmeshIndexEnd(submesh); + + for (var i = start; i < end; i++) + { + var v = g3d.Indices[i]; + if (dict.ContainsKey(v)) + { + points.AddIndex(dict[v]); + } + else + { + points.AddIndex(index); + points.AddVertex(g3d.Positions[v]); + dict.Add(v, index); + index++; + } + } + } + } +} diff --git a/src/cs/vim/Vim.Format.Vimx.Conversion/Ordering.cs b/src/cs/vim/Vim.Format.Vimx.Conversion/Ordering.cs new file mode 100644 index 00000000..d079bf0b --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx.Conversion/Ordering.cs @@ -0,0 +1,78 @@ +using System; +using Vim.Format.ObjectModel; +using Vim.G3dNext; + +namespace Vim.Format.VimxLib.Conversion +{ + public static class Ordering + { + public static MeshOrder ComputeOrder(G3dVim g3d, DocumentModel bim) + { + var meshCount = g3d.GetMeshCount(); + var resultCount = 0; + for (var mesh = 0; mesh < meshCount; mesh++) + { + if (g3d.GetMeshInstances(mesh).Count > 0) resultCount++; + } + + var i = 0; + var order = new int[resultCount]; + var instanceCount = 0; + for (var mesh = 0; mesh < meshCount; mesh++) + { + var instances = g3d.GetMeshInstances(mesh); + if (instances.Count > 0) + { + instanceCount += instances.Count; + order[i++] = mesh; + } + } + Array.Sort(order, (a, b) => + { + var prioA = GetPriority(GetMeshName(g3d, bim, a)); + var prioB = GetPriority(GetMeshName(g3d, bim, b)); + return prioA - prioB; + }); + return new MeshOrder(g3d, order, instanceCount); + } + + static string GetMeshName(G3dVim g3d, DocumentModel bim, int mesh) + { + var node = g3d.GetMeshInstances(mesh)[0]; + + if (node < 0 || node >= bim.NodeElementIndex.Count) return ""; + var element = bim.NodeElementIndex[node]; + + if (element < 0 || element >= bim.ElementCategoryIndex.Count) return ""; + var category = bim.ElementCategoryIndex[element]; + + if (category < 0 || category >= bim.CategoryName.Count) return ""; + var name = bim.CategoryName[category]; + + return name; + } + + static int GetPriority(string value) + { + if (string.IsNullOrWhiteSpace(value)) return 0; + + if (value.Contains("Topography")) return 110; + if (value.Contains("Floor")) return 100; + if (value.Contains("Slab")) return 100; + if (value.Contains("Ceiling")) return 90; + if (value.Contains("Roof")) return 90; + + if (value.Contains("Curtain")) return 80; + if (value.Contains("Wall")) return 80; + if (value.Contains("Window")) return 70; + + if (value.Contains("Column")) return 60; + if (value.Contains("Structural")) return 60; + + if (value.Contains("Stair")) return 40; + if (value.Contains("Doors")) return 30; + + return 1; + } + } +} diff --git a/src/cs/vim/Vim.Format.Vimx.Conversion/Vim.Format.Vimx.Conversion.csproj b/src/cs/vim/Vim.Format.Vimx.Conversion/Vim.Format.Vimx.Conversion.csproj new file mode 100644 index 00000000..4e1f53c1 --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx.Conversion/Vim.Format.Vimx.Conversion.csproj @@ -0,0 +1,20 @@ + + + + netstandard2.0 + + + + + + + + + + + True + + + + + diff --git a/src/cs/vim/Vim.Format.Vimx.Conversion/VimxConverter.cs b/src/cs/vim/Vim.Format.Vimx.Conversion/VimxConverter.cs new file mode 100644 index 00000000..31d3e45f --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx.Conversion/VimxConverter.cs @@ -0,0 +1,227 @@ +using System.Linq; +using Vim.LinqArray; +using Vim.BFastLib; +using Vim.G3dNext; +using Vim.Format.ObjectModel; +using Vim.G3dNext; +using System.Collections.Generic; +using Vim.Math3d; +using System.Diagnostics; +using System; + +namespace Vim.Format.VimxLib.Conversion +{ + public static class VimxConverter + { + public static Vimx FromVimPath(string vimPath) + { + var vim = VimScene.LoadVim(vimPath, new LoadOptions() + { + SkipAssets = true, + SkipGeometry = true, + }); + + var g3d = G3dVim.FromVim(vimPath); + var vimx = ConvertFromVim(g3d, vim.DocumentModel); + + return vimx; + } + + public static Vimx ConvertFromVim(G3dVim g3d, DocumentModel bim) + { + // Split input Vim into chunks. + var chunks = CreateChunks(g3d, bim); + + // Compute the scene definition from chunks. + var scene = CreateScene(chunks, bim); + + // Materials are reused from input g3d. + var materials = new G3dMaterials(g3d); + + var header = VimxHeader.CreateDefault(); + + return new Vimx(header, MetaHeader.Default, scene, materials, chunks.Chunks); + } + + public static VimChunks CreateChunks(G3dVim g3d, DocumentModel bim) + { + // First compute a desirable presentation order. + var ordering = Ordering.ComputeOrder(g3d, bim); + + // Groups meshes up to a certain size. + var groups = Chunking.ComputeChunks(ordering); + // Append and merge geometry from g3d to create the chunks. + + var chunks = Chunking.CreateChunks(groups); + + return chunks; + } + + public static G3dScene CreateScene(VimChunks chunks, DocumentModel bim) + { + var nodeElements = bim.NodeElementIndex.ToArray(); + + var instanceCount = chunks.InstanceCount; + var instanceNodes = new int[instanceCount]; + var instanceMeshes = new int[instanceCount]; + var instanceGroups = new int[instanceCount]; + var instanceTransforms = new Matrix4x4[instanceCount]; + var instanceFlags = new ushort[instanceCount]; + var instanceTags = new long[instanceCount]; + var instanceMins = new Vector3[instanceCount]; + var instanceMaxs = new Vector3[instanceCount]; + + var meshCount = chunks.MeshCount; + var indexCounts = new int[meshCount]; + var vertexCounts = new int[meshCount]; + var opaqueIndexCounts = new int[meshCount]; + var opaqueVertexCounts = new int[meshCount]; + + var sw = Stopwatch.StartNew(); + sw.Stop(); + var instance = 0; + for (var i = 0; i < meshCount; i++) + { + var meshChunk = chunks.MeshChunks[i]; + var meshIndex = chunks.MeshIndex[i]; + var instances = chunks.GetInstances(i); + var chunk = chunks.Chunks[meshChunk]; + for (var j = 0; j < instances.Count; j++) + { + var node = instances[j]; + var element = nodeElements[node]; + var transform = chunks.g3d.InstanceTransforms[node]; + + // geometry + instanceMeshes[instance] = i; + instanceTransforms[instance] = transform; + + // bounding box + sw.Start(); + var box = chunk.GetAABox(meshIndex, transform); + sw.Stop(); + instanceMins[instance] = box.Min; + instanceMaxs[instance] = box.Max; + + // bim + instanceNodes[instance] = node; + instanceGroups[instance] = element; + instanceTags[instance] = bim.ElementId.SafeGet(element, -1); + + instance++; + } + + // geometry counts + indexCounts[i] = chunk.GetMeshIndexCount(meshIndex, MeshSection.All); + vertexCounts[i] = chunk.GetMeshVertexCount(meshIndex, MeshSection.All); + opaqueIndexCounts[i] = chunk.GetMeshIndexCount(meshIndex, MeshSection.Opaque); + opaqueVertexCounts[i] = chunk.GetMeshVertexCount(meshIndex, MeshSection.Opaque); ; + } + + // InstanceFlags is not always present. + if (chunks.g3d.InstanceFlags != null) + { + for(var i = 0; i < instanceNodes.Length; i++) + { + var node = instanceNodes[i]; + instanceFlags[i] = chunks.g3d.InstanceFlags[node]; + } + } + + var scene = new G3dScene( + + chunkCount: new[] { chunks.ChunkCount}, + instanceMeshes : instanceMeshes, + instanceTransformData: instanceTransforms, + instanceNodes: instanceNodes, + instanceFlags: instanceFlags, + instanceGroups: instanceGroups, + instanceMaxs: instanceMaxs, + instanceMins: instanceMins, + instanceTags: instanceTags, + meshChunks: chunks.MeshChunks, + meshChunkIndices: chunks.MeshIndex, + meshIndexCounts : indexCounts, + meshVertexCounts: vertexCounts, + meshOpaqueIndexCounts: opaqueIndexCounts, + meshOpaqueVertexCounts: opaqueVertexCounts + ); + return scene; + } + } + + /// + /// Initial step of vim->vimx conversion. + /// + public class MeshOrder + { + public readonly G3dVim g3d; + public readonly int[] Meshes; + public readonly int InstanceCount; + + public MeshOrder(G3dVim g3d, int[] meshes, int instanceCount) + { + this.g3d = g3d; + Meshes = meshes; + InstanceCount = instanceCount; + } + } + + /// + /// Describes how the meshes from the vim will be grouped in the vimx. + /// + public class ChunksDescription + { + public readonly G3dVim g3d; + public readonly int[] Meshes; + public readonly int[] MeshChunks; + public readonly int[] MeshIndex; + public readonly List> ChunkMeshes; + public readonly int InstanceCount; + + public ChunksDescription(MeshOrder ordering, int[] meshChunks, int[] meshIndex, List> chunkMeshes) + { + g3d = ordering.g3d; + Meshes = ordering.Meshes; + InstanceCount = ordering.InstanceCount; + MeshChunks = meshChunks; + MeshIndex = meshIndex; + ChunkMeshes = chunkMeshes; + } + } + + /// + /// Resulting Chunks of the vim->vimx conversion. + /// + public class VimChunks + { + public readonly G3dVim g3d; + public readonly int[] Meshes; + public readonly int[] MeshChunks; + public readonly int[] MeshIndex; + public readonly List> ChunkMeshes; + public readonly G3dChunk[] Chunks; + public readonly int InstanceCount; + + public VimChunks(ChunksDescription description, G3dChunk[] chunks) + { + g3d = description.g3d; + Meshes = description.Meshes; + MeshChunks = description.MeshChunks; + MeshIndex = description.MeshIndex; + ChunkMeshes = description.ChunkMeshes; + InstanceCount = description.InstanceCount; + Chunks = chunks; + } + + public int ChunkCount => Chunks.Length; + + public int MeshCount => Meshes.Length; + + public IReadOnlyList GetInstances(int meshIndex) + { + var m = Meshes[meshIndex]; + return g3d.GetMeshInstances(m); + } + } +} diff --git a/src/cs/vim/Vim.Format.Vimx/Vim.Format.Vimx.csproj b/src/cs/vim/Vim.Format.Vimx/Vim.Format.Vimx.csproj new file mode 100644 index 00000000..78fe3dbf --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx/Vim.Format.Vimx.csproj @@ -0,0 +1,24 @@ + + + + netstandard2.0 + + + + + + + + + + + + + + + True + + + + + diff --git a/src/cs/vim/Vim.Format.Vimx/Vimx.cs b/src/cs/vim/Vim.Format.Vimx/Vimx.cs new file mode 100644 index 00000000..db973d04 --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx/Vimx.cs @@ -0,0 +1,77 @@ +using System.Linq; +using Vim.BFastLib; +using Vim.G3dNext; + +namespace Vim.Format.VimxLib +{ + public static class BufferNames + { + public const string Header = "header"; + public const string Meta = "meta"; + public const string Scene = "scene"; + public const string Materials = "materials"; + public static string Chunk(int index) => $"chunk_{index}"; + } + + public static class BufferCompression + { + public const bool Scene = true; + public const bool Materials = true; + public const bool Chunks = true; + } + + public class Vimx + { + public readonly SerializableHeader Header; + public readonly MetaHeader Meta; + public readonly G3dScene Scene; + public readonly G3dMaterials Materials; + public readonly G3dChunk[] Chunks; + + public Vimx(SerializableHeader header, MetaHeader meta, G3dScene scene, G3dMaterials materials, G3dChunk[] chunks) + { + Meta = meta; + Header = header; + Scene = scene; + Materials = materials; + Chunks = chunks; + } + + public Vimx(BFast bfast) + { + Header = VimxHeader.FromBytes(bfast.GetArray(BufferNames.Header)); + + Scene = new G3dScene( + bfast.GetBFast(BufferNames.Scene, BufferCompression.Scene) + ); + + Materials = new G3dMaterials( + bfast.GetBFast(BufferNames.Materials, BufferCompression.Materials) + ); + + Chunks = Enumerable.Range(0, Scene.GetChunksCount()) + .Select(c => bfast.GetBFast(BufferNames.Chunk(c), BufferCompression.Chunks)) + .Select(b => new G3dChunk(b)) + .ToArray(); + } + + public static Vimx FromPath(string path) + => BFastHelpers.Read(path, b => new Vimx(b)); + + public BFast ToBFast() + { + var bfast = new BFast(); + bfast.SetArray(BufferNames.Meta, MetaHeader.Default.ToBytes()); + bfast.SetArray(BufferNames.Header, Header.ToVimxBytes()); + bfast.SetBFast(BufferNames.Scene, Scene.ToBFast(), BufferCompression.Scene); + bfast.SetBFast(BufferNames.Materials, Materials.ToBFast(), BufferCompression.Materials); + + for(var i =0; i < Chunks.Length; i++) + { + bfast.SetBFast(BufferNames.Chunk(i), Chunks[i].ToBFast(), BufferCompression.Chunks); + } + + return bfast; + } + } +} diff --git a/src/cs/vim/Vim.Format.Vimx/VimxHeader.cs b/src/cs/vim/Vim.Format.Vimx/VimxHeader.cs new file mode 100644 index 00000000..a513ae82 --- /dev/null +++ b/src/cs/vim/Vim.Format.Vimx/VimxHeader.cs @@ -0,0 +1,35 @@ +using System.Text; +using Vim.Util; + +namespace Vim.Format.VimxLib +{ + public static class VimxHeader + { + static SerializableVersion CurrentVersion = SerializableVersion.Parse("0.1.0"); + public static SerializableHeader FromString(string header) + { + return SerializableHeader.FromString(header.Replace("vimx", "vim")); + } + public static SerializableHeader FromBytes(byte[] header) + { + return FromString(Encoding.UTF8.GetString(header)); + } + + public static string ToVimxString(this SerializableHeader header) + { + return header.ToString().Replace("vim", "vimx"); + } + + public static byte[] ToVimxBytes(this SerializableHeader header) + { + return header.ToVimxString().ToBytesUtf8(); + } + + public static SerializableHeader CreateDefault() + { + return new SerializableHeader( + "Vim.Vimx.Converter", new SerializableVersion(), CurrentVersion.ToString() + ); + } + } +} diff --git a/src/cs/vim/Vim.Format/SceneBuilder/Validation.cs b/src/cs/vim/Vim.Format/SceneBuilder/Validation.cs index 55dc2b94..66365bb4 100644 --- a/src/cs/vim/Vim.Format/SceneBuilder/Validation.cs +++ b/src/cs/vim/Vim.Format/SceneBuilder/Validation.cs @@ -3,7 +3,7 @@ using System.Linq; using System.Reflection; using System.Threading.Tasks; -using Vim.BFast; +using Vim.BFastLib; using Vim.Format.Geometry; using Vim.Format.ObjectModel; using Vim.Util; @@ -153,12 +153,12 @@ public static void ValidateEquality(this DocumentBuilder db, VimScene vim) g.SubmeshMaterials.ToList() )).ToList(); - for (var i = 0; i < db.Meshes.Count; ++i) + for (var i = 0; i < db.Geometry.MeshCount; ++i) { - if (!db.Meshes[i].IsEquivalentTo(vimGeoBuilders[i])) + if (!db.Geometry.GetMesh(i).IsEquivalentTo(vimGeoBuilders[i])) throw new VimValidationException($"{nameof(DocumentBuilder)} mesh {i} is not equivalent to {nameof(VimScene)} mesh {i}"); - if (!db.Meshes[i].ToIMesh().GeometryEquals(vim.Meshes[i])) + if (!db.Geometry.GetMesh(i).ToIMesh().GeometryEquals(vim.Meshes[i])) throw new VimValidationException($"{nameof(DocumentBuilder)} mesh {i} geometry is not equal to {nameof(VimScene)} mesh {i}"); } diff --git a/src/cs/vim/Vim.Format/SceneBuilder/VimScene.cs b/src/cs/vim/Vim.Format/SceneBuilder/VimScene.cs index 0d744a39..a502aaa8 100644 --- a/src/cs/vim/Vim.Format/SceneBuilder/VimScene.cs +++ b/src/cs/vim/Vim.Format/SceneBuilder/VimScene.cs @@ -1,5 +1,4 @@ using System; -using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading.Tasks; @@ -10,8 +9,8 @@ using Vim.G3d; using Vim.LinqArray; using Vim.Math3d; - using IVimSceneProgress = System.IProgress<(string, double)>; +using Vim.BFastLib; namespace Vim { @@ -22,14 +21,38 @@ namespace Vim /// public class VimScene : IScene { + /// + /// Returns the VIM file's header schema version. Returns null if the Vim has no header. + /// + public static SerializableVersion GetSchemaVersion(string path) + { + return GetHeader(path)?.Schema; + } + + /// + /// Returns the VIM file's header. Returns null if the Vim has no header. + /// + public static SerializableHeader GetHeader(string path) + { + return SerializableHeader.FromPath(path); + } + + /// + /// Returns the VIM file's header. Returns null if the Vim has no header. + /// + public static SerializableHeader GetHeader(Stream stream) + { + return SerializableHeader.FromStream(stream); + } + public static VimScene LoadVim(string f, LoadOptions loadOptions, IVimSceneProgress progress = null, bool inParallel = false, int vimIndex = 0) - => new VimScene(Serializer.Deserialize(f, loadOptions), progress, inParallel, vimIndex); + => new VimScene(SerializableDocument.FromPath(f, loadOptions), progress, inParallel, vimIndex); public static VimScene LoadVim(string f, IVimSceneProgress progress = null, bool skipGeometry = false, bool skipAssets = false, bool skipNodes = false, bool inParallel = false) => LoadVim(f, new LoadOptions { SkipGeometry = skipGeometry, SkipAssets = skipAssets}, progress, inParallel); public static VimScene LoadVim(Stream stream, LoadOptions loadOptions, IVimSceneProgress progress = null, bool inParallel = false) - => new VimScene(Serializer.Deserialize(stream, loadOptions), progress, inParallel); + => new VimScene(SerializableDocument.FromBFast(new BFast(stream), loadOptions), progress, inParallel); public static VimScene LoadVim(Stream stream, IVimSceneProgress progress = null, bool skipGeometry = false, bool skipAssets = false, bool skipNodes = false, bool inParallel = false) => LoadVim(stream, new LoadOptions { SkipGeometry = skipGeometry, SkipAssets = skipAssets}, progress, inParallel); @@ -165,6 +188,11 @@ private IStep[] GetInitSteps(bool inParallel) private void CreateMeshes(bool inParallel) { + if (_SerializableDocument.Geometry == null) + { + return; + } + var srcGeo = _SerializableDocument.Geometry; var tmp = srcGeo?.Meshes.Select(ToIMesh); Meshes = (tmp == null) @@ -176,18 +204,33 @@ private void CreateMeshes(bool inParallel) private void CreateShapes(bool inParallel) { + if (_SerializableDocument.Geometry == null) + { + return; + } + var r = _SerializableDocument.Geometry.Shapes.Select((s, i) => new VimShape(this, i)); VimShapes = inParallel ? r.EvaluateInParallel() : r.Evaluate(); } private void CreateScene(bool inParallel) { + if (_SerializableDocument.Geometry == null) + { + return; + } + VimNodes = CreateVimSceneNodes(this, _SerializableDocument.Geometry, inParallel); Nodes = VimNodes.Select(n => n as ISceneNode); } private void CreateMaterials(bool inParallel) { + if (_SerializableDocument.Geometry == null) + { + return; + } + var query = _SerializableDocument.Geometry.Materials.Select(m => new VimMaterial(m) as IMaterial); Materials = inParallel ? query.EvaluateInParallel() : query.Evaluate(); } @@ -203,7 +246,7 @@ public static IArray CreateVimSceneNodes(VimScene scene, G3D g3d, } public void Save(string filePath) - => _SerializableDocument.Serialize(filePath); + => _SerializableDocument.ToBFast().Write(filePath); public string FileName => _SerializableDocument.FileName; diff --git a/src/cs/vim/Vim.Format/SceneBuilder/VimSceneHelpers.cs b/src/cs/vim/Vim.Format/SceneBuilder/VimSceneHelpers.cs index 3c6e5ceb..025a2c6c 100644 --- a/src/cs/vim/Vim.Format/SceneBuilder/VimSceneHelpers.cs +++ b/src/cs/vim/Vim.Format/SceneBuilder/VimSceneHelpers.cs @@ -173,6 +173,6 @@ public static ElementInfo GetElementInfo(this VimScene vim, Element element) => vim.DocumentModel.GetElementInfo(element); public static VimSchema GetVimSchema(this VimScene vim) - => VimSchema.Create(vim.Document); + => VimSchema.Create(vim._SerializableDocument); } } diff --git a/src/cs/vim/Vim.Format/Vim.Format.csproj b/src/cs/vim/Vim.Format/Vim.Format.csproj index ca5691f7..8fa44b7a 100644 --- a/src/cs/vim/Vim.Format/Vim.Format.csproj +++ b/src/cs/vim/Vim.Format/Vim.Format.csproj @@ -18,12 +18,11 @@ - - - - - - + + + + +