diff --git a/MBBSEmu.Tests/Assets/MBBSEMU.DB b/MBBSEmu.Tests/Assets/MBBSEMU.DB index 6c8108fc..1a98813b 100644 Binary files a/MBBSEmu.Tests/Assets/MBBSEMU.DB and b/MBBSEmu.Tests/Assets/MBBSEMU.DB differ diff --git a/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs b/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs index 2a6fba4c..f5c375eb 100644 --- a/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs +++ b/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs @@ -16,11 +16,11 @@ public class BtrieveFileProcessor_Tests : TestBase, IDisposable { const int RECORD_LENGTH = 74; - private const string EXPECTED_METADATA_T_SQL = "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL)"; + private const string EXPECTED_METADATA_T_SQL = "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL, version INTEGER NOT NULL, acs_name STRING, acs BLOB)"; private const string EXPECTED_KEYS_T_SQL = "CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, null_value INTEGER NOT NULL, UNIQUE(number, segment))"; private const string EXPECTED_DATA_T_SQL = "CREATE TABLE data_t(id INTEGER PRIMARY KEY, data BLOB NOT NULL, key_0 TEXT, key_1 INTEGER NOT NULL UNIQUE, key_2 TEXT, key_3 INTEGER NOT NULL UNIQUE)"; - private static readonly Random RANDOM = new Random(); + private static readonly Random RANDOM = new Random(Guid.NewGuid().GetHashCode()); protected readonly string _modulePath = Path.Join(Path.GetTempPath(), $"mbbsemu{RANDOM.Next()}"); @@ -1062,6 +1062,111 @@ public void SeekByKeyLessOrEqualFound() btrieve.PerformOperation(1, BitConverter.GetBytes(-2_000_000_000), EnumBtrieveOperationCodes.QueryLessOrEqual).Should().BeFalse(); } + private const int ACS_RECORD_LENGTH = 128; + + private static byte[] CreateRecord(string username) + { + var usernameBytes = Encoding.ASCII.GetBytes(username); + var record = new byte[ACS_RECORD_LENGTH]; + + Array.Fill(record, (byte)0xFF); + + Array.Copy(usernameBytes, 0, record, 2, usernameBytes.Length); + record[2 + usernameBytes.Length] = 0; + + return record; + } + + private static BtrieveFile CreateACSBtrieveFile() + { + // all upper case acs + var acs = new byte[256]; + for (var i = 0; i < acs.Length; ++i) + acs[i] = (byte)i; + for (var i = 'a'; i <= 'z'; ++i) + acs[i] = (byte)char.ToUpper(i); + + var btrieveFile = new BtrieveFile() + { + RecordLength = ACS_RECORD_LENGTH, + FileName = "TEST.DAT", + RecordCount = 0, + ACSName = "ALLCAPS", + ACS = acs, + }; + + var key = new BtrieveKey(); + key.Segments.Add(new BtrieveKeyDefinition() + { + Number = 0, + Attributes = EnumKeyAttributeMask.NumberedACS | EnumKeyAttributeMask.UseExtendedDataType, + DataType = EnumKeyDataType.Zstring, + Offset = 2, + Length = 30, + Segment = false, + ACS = acs + }); + + btrieveFile.Keys.Add(0, key); + + btrieveFile.Records.Add(new BtrieveRecord(1, CreateRecord("Sysop"))); + btrieveFile.Records.Add(new BtrieveRecord(2, CreateRecord("Paladine"))); + btrieveFile.Records.Add(new BtrieveRecord(3, CreateRecord("Testing"))); + return btrieveFile; + } + + [Fact] + public void CreatesACS() + { + var btrieve = new BtrieveFileProcessor(); + var connectionString = "Data Source=acs.db;Mode=Memory"; + + btrieve.CreateSqliteDBWithConnectionString(connectionString, CreateACSBtrieveFile()); + + btrieve.GetRecordCount().Should().Be(3); + btrieve.GetKeyLength(0).Should().Be(30); + + // validate acs + using var cmd = new SqliteCommand("SELECT acs_name, acs, LENGTH(acs) FROM metadata_t", btrieve.Connection); + using var reader = cmd.ExecuteReader(); + reader.Read().Should().BeTrue(); + reader.GetString(0).Should().Be("ALLCAPS"); + reader.GetInt32(2).Should().Be(256); + } + + [Fact] + public void ACSSeekByKey() + { + var btrieve = new BtrieveFileProcessor(); + var connectionString = "Data Source=acs.db;Mode=Memory"; + + btrieve.CreateSqliteDBWithConnectionString(connectionString, CreateACSBtrieveFile()); + + var key = new byte[30]; + Array.Copy(Encoding.ASCII.GetBytes("paladine"), key, 8); + + btrieve.PerformOperation(0, key, EnumBtrieveOperationCodes.QueryEqual).Should().BeTrue(); + var record = btrieve.GetRecord(btrieve.Position); + record.Should().NotBeNull(); + record.Offset.Should().Be(2); + // we searched by paladine but the actual data is Paladine + record.Data[2].Should().Be((byte)'P'); + } + + [Fact] + public void ACSInsertDuplicateFails() + { + var btrieve = new BtrieveFileProcessor(); + var connectionString = "Data Source=acs.db;Mode=Memory"; + + btrieve.CreateSqliteDBWithConnectionString(connectionString, CreateACSBtrieveFile()); + + var record = new byte[ACS_RECORD_LENGTH]; + Array.Copy(Encoding.ASCII.GetBytes("paladine"), 0, record, 2, 8); + + btrieve.Insert(record).Should().Be(0); + } + /// Creates a copy of data shrunk by cutOff bytes at the end private static byte[] MakeSmaller(byte[] data, int cutOff) { diff --git a/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs b/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs index 908546e4..fa54760b 100644 --- a/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs +++ b/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs @@ -12,7 +12,7 @@ namespace MBBSEmu.Tests.Btrieve { public class BtrieveFile_Tests : TestBase, IDisposable { - private static readonly Random RANDOM = new Random(); + private static readonly Random RANDOM = new Random(Guid.NewGuid().GetHashCode()); private readonly string[] _btrieveFiles = { "MBBSEMU.DAT" }; diff --git a/MBBSEmu.Tests/Btrieve/BtrieveKey_Tests.cs b/MBBSEmu.Tests/Btrieve/BtrieveKey_Tests.cs index 73a5eb77..3f8c521d 100644 --- a/MBBSEmu.Tests/Btrieve/BtrieveKey_Tests.cs +++ b/MBBSEmu.Tests/Btrieve/BtrieveKey_Tests.cs @@ -1,12 +1,8 @@ using FluentAssertions; using MBBSEmu.Btrieve; using MBBSEmu.Btrieve.Enums; -using MBBSEmu.DependencyInjection; -using MBBSEmu.Resources; -using NLog; using System; using System.Collections.Generic; -using System.IO; using System.Text; using Xunit; @@ -48,9 +44,9 @@ private static byte[] CreateNullPaddedString(string s, int length) [InlineData(8, EnumKeyDataType.OldBinary, 0xF8F7F6F5F4F3F2F1)] public void NegativeIntegerTypeConversion(ushort length, EnumKeyDataType type, object expected) { - var key = new BtrieveKey() { - Segments = new List() { - new BtrieveKeyDefinition() { + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { Number = 0, Offset = 0, Length = length, @@ -89,9 +85,9 @@ public void NegativeIntegerTypeConversion(ushort length, EnumKeyDataType type, o [InlineData(8, EnumKeyDataType.OldBinary, 0x807060504030201)] public void PositiveIntegerTypeConversion(ushort length, EnumKeyDataType type, object expected) { - var key = new BtrieveKey() { - Segments = new List() { - new BtrieveKeyDefinition() { + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { Number = 0, Offset = 0, Length = length, @@ -134,9 +130,9 @@ public void PositiveIntegerTypeConversion(ushort length, EnumKeyDataType type, o [InlineData(1, EnumKeyDataType.OldAscii, "T")] public void StringTypeConversion(ushort length, EnumKeyDataType type, object expected) { - var key = new BtrieveKey() { - Segments = new List() { - new BtrieveKeyDefinition() { + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { Number = 0, Offset = 0, Length = length, @@ -155,9 +151,9 @@ public void StringTypeConversion(ushort length, EnumKeyDataType type, object exp [Fact] public void CompositeKeyConcatentation() { - var key = new BtrieveKey() { - Segments = new List() { - new BtrieveKeyDefinition() { + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { Number = 0, Offset = 2, Length = 8, @@ -167,7 +163,7 @@ public void CompositeKeyConcatentation() SegmentIndex = 0, NullValue = 0, }, - new BtrieveKeyDefinition() { + new BtrieveKeyDefinition { Number = 0, Offset = 20, Length = 4, @@ -201,9 +197,9 @@ public void CompositeKeyConcatentation() [InlineData(EnumKeyDataType.OldBinary)] public void NullValueString(EnumKeyDataType dataType) { - var key = new BtrieveKey() { - Segments = new List() { - new BtrieveKeyDefinition() { + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { Number = 0, Offset = 2, Length = 8, @@ -223,5 +219,137 @@ public void NullValueString(EnumKeyDataType dataType) var sqlLiteObject = key.ExtractKeyInRecordToSqliteObject(record); sqlLiteObject.Should().Be(DBNull.Value); } + + private static byte[] UpperACS() + { + var acs = new byte[256]; + for (var i = 0; i < acs.Length; ++i) + acs[i] = (byte)i; + // make uppercase + for (var i = 'a'; i <= 'z'; ++i) + acs[i] = (byte)char.ToUpper(i); + + return acs; + } + + [Fact] + public void ACSReplacement_SingleKey() + { + var acs = UpperACS(); + + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { + Number = 0, + Offset = 2, + Length = 8, + DataType = EnumKeyDataType.Zstring, + Attributes = EnumKeyAttributeMask.UseExtendedDataType | EnumKeyAttributeMask.NumberedACS, + ACS = acs, + Segment = true, + SegmentIndex = 0, + NullValue = 0, + }} + }; + + var record = new byte[128]; + Array.Fill(record, (byte)0xFF, 0, record.Length); + // first segment is all spaces i.e. null + record[2] = (byte)'a'; + record[3] = (byte)'B'; + record[4] = (byte)'t'; + record[5] = (byte)'Z'; + record[6] = (byte)'%'; + record[7] = 0; + + var sqlLiteObject = key.ExtractKeyInRecordToSqliteObject(record); + sqlLiteObject.Should().Be("ABTZ%"); + } + + [Theory] + [InlineData("b", "B")] + [InlineData("test", "TEST")] + [InlineData("1234567890", "1234567890")] + [InlineData("test1234test4321", "TEST1234TEST4321")] + public void ACSReplacement_MultiKey(string input, string expected) + { + var acs = UpperACS(); + + var key = new BtrieveKey { + Segments = new List { + new BtrieveKeyDefinition { + Number = 0, + Offset = 2, + Length = 8, + DataType = EnumKeyDataType.Zstring, + Attributes = EnumKeyAttributeMask.UseExtendedDataType | EnumKeyAttributeMask.NumberedACS, + ACS = acs, + Segment = true, + SegmentIndex = 0, + NullValue = 0, + }, + new BtrieveKeyDefinition { + Number = 0, + Offset = 10, + Length = 8, + DataType = EnumKeyDataType.Zstring, + Attributes = EnumKeyAttributeMask.UseExtendedDataType | EnumKeyAttributeMask.NumberedACS, + ACS = acs, + Segment = false, + SegmentIndex = 1, + NullValue = 0, + }} + }; + + var record = new byte[128]; + Array.Fill(record, (byte)0x0, 0, record.Length); + Array.Copy(Encoding.ASCII.GetBytes(input), 0, record, 2, input.Length); + + var sqlLiteObject = Encoding.ASCII.GetString((byte[])key.ExtractKeyInRecordToSqliteObject(record)).TrimEnd((char)0); + sqlLiteObject.Should().Be(expected); + } + + [Theory] + [InlineData("b", "b")] + [InlineData("test", "test")] + [InlineData("1234567890", "1234567890")] + [InlineData("test1234test4321", "test1234TEST4321")] + public void ACSReplacement_ACSOnlyOnSecondKey(string input, string expected) + { + var acs = UpperACS(); + + var key = new BtrieveKey() + { + Segments = new List { + new BtrieveKeyDefinition { + Number = 0, + Offset = 2, + Length = 8, + DataType = EnumKeyDataType.Zstring, + Attributes = EnumKeyAttributeMask.UseExtendedDataType, + Segment = true, + SegmentIndex = 0, + NullValue = 0, + }, + new BtrieveKeyDefinition { + Number = 0, + Offset = 10, + Length = 8, + DataType = EnumKeyDataType.Zstring, + Attributes = EnumKeyAttributeMask.UseExtendedDataType | EnumKeyAttributeMask.NumberedACS, + ACS = acs, + Segment = false, + SegmentIndex = 1, + NullValue = 0, + }} + }; + + var record = new byte[128]; + Array.Fill(record, (byte)0x0, 0, record.Length); + Array.Copy(Encoding.ASCII.GetBytes(input), 0, record, 2, input.Length); + + var sqlLiteObject = Encoding.ASCII.GetString((byte[])key.ExtractKeyInRecordToSqliteObject(record)).TrimEnd((char)0); + sqlLiteObject.Should().Be(expected); + } } } diff --git a/MBBSEmu.Tests/Btrieve/BtrieveUtil_Test.cs b/MBBSEmu.Tests/Btrieve/BtrieveUtil_Test.cs new file mode 100644 index 00000000..bd6021ca --- /dev/null +++ b/MBBSEmu.Tests/Btrieve/BtrieveUtil_Test.cs @@ -0,0 +1,66 @@ +using FluentAssertions; +using MBBSEmu.Btrieve; +using System.IO; +using System; +using System.Text; +using Xunit; + +namespace MBBSEmu.Tests.Btrieve +{ + public class BtrieveUtil_Tests : TestBase + { + private class FakeMemoryStream : MemoryStream + { + internal int MaxBytesToRead { get; set; } + + public override int Read(byte[] buffer, int offset, int count) + { + return base.Read(buffer, offset, Math.Min(count, MaxBytesToRead)); + } + } + + [Fact] + public void FakeReadsChunksProperly() + { + using var stream = new FakeMemoryStream() { MaxBytesToRead = 2 }; + var bytes = Encoding.ASCII.GetBytes("123456789"); + + stream.Write(bytes); + stream.Flush(); + stream.Seek(0, SeekOrigin.Begin); + + var buffer = new byte[9]; + stream.Read(buffer, 0, 8).Should().Be(2); + stream.Read(buffer, 2, 8).Should().Be(2); + stream.Read(buffer, 4, 8).Should().Be(2); + stream.Read(buffer, 6, 8).Should().Be(2); + stream.Read(buffer, 8, 1).Should().Be(1); + + Encoding.ASCII.GetString(buffer).Should().Be("123456789"); + } + + [Theory] + [InlineData("testing", 32)] + [InlineData("testing", 8)] + [InlineData("testing", 7)] + [InlineData("testing", 6)] + [InlineData("testing", 5)] + [InlineData("testing", 4)] + [InlineData("testing", 3)] + [InlineData("testing", 2)] + [InlineData("testing", 1)] + public void ReadEntireStream(string input, int maxBytesToRead) + { + using var stream = new FakeMemoryStream() { MaxBytesToRead = maxBytesToRead }; + var bytes = Encoding.ASCII.GetBytes(input); + + stream.Write(bytes); + stream.Flush(); + stream.Seek(0, SeekOrigin.Begin); + + var read = BtrieveUtil.ReadEntireStream(stream); + + read.AsSpan().SequenceEqual(bytes).Should().BeTrue(); + } + } +} diff --git a/MBBSEmu.Tests/ExportedModules/ExportedModuleTestBase.cs b/MBBSEmu.Tests/ExportedModules/ExportedModuleTestBase.cs index c4c8f701..ceb96be6 100644 --- a/MBBSEmu.Tests/ExportedModules/ExportedModuleTestBase.cs +++ b/MBBSEmu.Tests/ExportedModules/ExportedModuleTestBase.cs @@ -1,11 +1,7 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text; using MBBSEmu.CPU; using MBBSEmu.Database.Repositories.Account; using MBBSEmu.Database.Repositories.AccountKey; +using MBBSEmu.Database.Session; using MBBSEmu.DependencyInjection; using MBBSEmu.Disassembler.Artifacts; using MBBSEmu.IO; @@ -13,12 +9,18 @@ using MBBSEmu.Module; using MBBSEmu.Session; using NLog; -using NLog.LayoutRenderers.Wrappers; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; +using System; namespace MBBSEmu.Tests.ExportedModules { public abstract class ExportedModuleTestBase : TestBase { + private static readonly Random RANDOM = new Random(Guid.NewGuid().GetHashCode()); + protected const ushort STACK_SEGMENT = 0; protected const ushort CODE_SEGMENT = 1; @@ -29,7 +31,7 @@ public abstract class ExportedModuleTestBase : TestBase protected HostProcess.ExportedModules.Majorbbs majorbbs; protected HostProcess.ExportedModules.Galgsbl galgsbl; protected PointerDictionary testSessions; - protected ServiceResolver _serviceResolver = new ServiceResolver(); + protected ServiceResolver _serviceResolver = new ServiceResolver(SessionBuilder.ForTest($"MBBSDb_{RANDOM.Next()}")); protected ExportedModuleTestBase() : this(Path.GetTempPath()) {} @@ -114,7 +116,7 @@ protected virtual void Reset() _serviceResolver.GetService(), mbbsModule, testSessions); - + } /// diff --git a/MBBSEmu/Assets/ALLCAPS b/MBBSEmu/Assets/ALLCAPS new file mode 100644 index 00000000..8813f6c7 Binary files /dev/null and b/MBBSEmu/Assets/ALLCAPS differ diff --git a/MBBSEmu/Assets/BBSGEN.DB b/MBBSEmu/Assets/BBSGEN.DB index 8e43f665..8448b3db 100644 Binary files a/MBBSEmu/Assets/BBSGEN.DB and b/MBBSEmu/Assets/BBSGEN.DB differ diff --git a/MBBSEmu/Assets/BBSUSR.DB b/MBBSEmu/Assets/BBSUSR.DB index e2378863..8448b3db 100644 Binary files a/MBBSEmu/Assets/BBSUSR.DB and b/MBBSEmu/Assets/BBSUSR.DB differ diff --git a/MBBSEmu/Assets/bbsgen.db.sql b/MBBSEmu/Assets/bbsgen.db.sql index bb1bde18..ed3763de 100644 --- a/MBBSEmu/Assets/bbsgen.db.sql +++ b/MBBSEmu/Assets/bbsgen.db.sql @@ -1,6 +1,9 @@ -CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL); +CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL, version INTEGER NOT NULL, acs_name STRING, acs BLOB); -INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records) VALUES(55, 75, 1024, 1); +INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records, version, acs_name, acs) +VALUES(55, 75, 1024, 1, 1, 'ALLCAPS', +x'000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f604142434445464748494a4b4c4d4e4f505152535455565758595a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff' +); CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, null_value INTEGER NOT NULL, UNIQUE (number, segment)); diff --git a/MBBSEmu/Assets/bbsusr.db.sql b/MBBSEmu/Assets/bbsusr.db.sql index c2b5763f..ab76b85c 100644 --- a/MBBSEmu/Assets/bbsusr.db.sql +++ b/MBBSEmu/Assets/bbsusr.db.sql @@ -1,6 +1,9 @@ -CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL); +CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL, version INTEGER NOT NULL); -INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records) VALUES(338, 338, 1024, 0); +INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records, version, acs_name, acs) +VALUES(338, 338, 1024, 0, 1, 'ALLCAPS', +x'000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f404142434445464748494a4b4c4d4e4f505152535455565758595a5b5c5d5e5f604142434445464748494a4b4c4d4e4f505152535455565758595a7b7c7d7e7f808182838485868788898a8b8c8d8e8f909192939495969798999a9b9c9d9e9fa0a1a2a3a4a5a6a7a8a9aaabacadaeafb0b1b2b3b4b5b6b7b8b9babbbcbdbebfc0c1c2c3c4c5c6c7c8c9cacbcccdcecfd0d1d2d3d4d5d6d7d8d9dadbdcdddedfe0e1e2e3e4e5e6e7e8e9eaebecedeeeff0f1f2f3f4f5f6f7f8f9fafbfcfdfeff' +); CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, null_value INTEGER NOT NULL, UNIQUE (number, segment)); diff --git a/MBBSEmu/Btrieve/BtrieveFile.cs b/MBBSEmu/Btrieve/BtrieveFile.cs index f2c08c08..87400b9b 100644 --- a/MBBSEmu/Btrieve/BtrieveFile.cs +++ b/MBBSEmu/Btrieve/BtrieveFile.cs @@ -4,6 +4,8 @@ using System; using System.Collections.Generic; using System.IO; +using System.Linq; +using System.Text; namespace MBBSEmu.Btrieve { @@ -138,10 +140,21 @@ public ushort KeyCount } } + /// + /// The ACS table name used by the database. null if there is none + /// + /// + public string ACSName { get; set; } + + /// + /// The ACS table for the database. null if there is none + /// + public byte[] ACS { get; set; } + /// /// Raw contents of Btrieve File /// - private byte[] Data { get; set; } + public byte[] Data { get; set; } /// /// Btrieve Records @@ -153,8 +166,6 @@ public ushort KeyCount /// public Dictionary Keys { get; set; } - private ILogger _logger; - /// /// Log Key is an internal value used by the Btrieve engine to track unique /// records -- it adds 8 bytes to the end of the record that's not accounted for @@ -202,8 +213,6 @@ public void LoadFile(ILogger logger, string path, string fileName) public void LoadFile(ILogger logger, string fullPath) { - _logger = logger; - var fileName = Path.GetFileName(fullPath); var fileData = File.ReadAllBytes(fullPath); @@ -219,7 +228,9 @@ public void LoadFile(ILogger logger, string fullPath) #endif DeletedRecordOffsets = GetRecordPointerList(GetRecordPointer(0x10)); + LoadACS(logger); LoadBtrieveKeyDefinitions(logger); + //Only load records if there are any present if (RecordCount > 0) LoadBtrieveRecords(logger); @@ -342,6 +353,14 @@ private void LoadBtrieveKeyDefinitions(ILogger logger) NullValue = data[0x1D], }; + if (keyDefinition.RequiresACS) + { + if (ACS == null) + throw new ArgumentException($"Key {keyDefinition.Number} requires ACS, but none was read. This database is likely corrupt: {FileName}"); + + keyDefinition.ACS = ACS; + } + //If it's a segmented key, don't increment so the next key gets added to the same ordinal as an additional segment if (!keyDefinition.Segment) currentKeyNumber++; @@ -383,6 +402,24 @@ private void LoadBtrieveKeyDefinitions(ILogger logger) } } + private readonly byte[] ACS_PAGE_HEADER = { 0, 0, 1, 0, 0, 0, 0xAC }; + + private bool LoadACS(ILogger logger) + { + // ACS page immediately follows FCR (the first) + var offset = PageLength; + var data = Data.AsSpan().Slice(offset); + + var pageHeader = data.Slice(0, ACS_PAGE_HEADER.Length); + if (!pageHeader.SequenceEqual(ACS_PAGE_HEADER)) + return false; + + // read the acs data + ACSName = Encoding.ASCII.GetString(data.Slice(7, 9)).TrimEnd((char)0); + ACS = data.Slice(0xF, 256).ToArray(); + return true; + } + /// /// Loads Btrieve Records from Data Pages /// diff --git a/MBBSEmu/Btrieve/BtrieveFileProcessor.cs b/MBBSEmu/Btrieve/BtrieveFileProcessor.cs index d8359aea..f7341188 100644 --- a/MBBSEmu/Btrieve/BtrieveFileProcessor.cs +++ b/MBBSEmu/Btrieve/BtrieveFileProcessor.cs @@ -26,6 +26,9 @@ namespace MBBSEmu.Btrieve /// public class BtrieveFileProcessor : IDisposable { + const int CURRENT_VERSION = 1; + const int ACS_LENGTH = 256; + protected static readonly Logger _logger = LogManager.GetCurrentClassLogger(typeof(CustomLogger)); private readonly IFileUtility _fileFinder; @@ -53,10 +56,12 @@ public class BtrieveFileProcessor : IDisposable /// public bool VariableLengthRecords { get; set; } + public byte[] ACS { get; set; } + /// /// The active connection to the Sqlite database. /// - private SqliteConnection _connection; + public SqliteConnection Connection; /// /// An offset -> BtrieveRecord cache used to speed up record access by reducing Sqlite @@ -94,9 +99,9 @@ public void Dispose() PreviousQuery?.Dispose(); - _connection.Close(); - _connection.Dispose(); - _connection = null; + Connection.Close(); + Connection.Dispose(); + Connection = null; _cache.Clear(); } @@ -162,18 +167,19 @@ private void LoadSqlite(string fullPath) DataSource = fullPath, }.ToString(); - _connection = new SqliteConnection(connectionString); - _connection.Open(); + Connection = new SqliteConnection(connectionString); + Connection.Open(); LoadSqliteMetadata(); + LoadSqliteKeys(); } /// - /// Loads metadata from the loaded Sqlite table, such as RecordLength and all the key metadata. + /// Loads metadata from the loaded Sqlite database into memory /// private void LoadSqliteMetadata() { - using (var cmd = new SqliteCommand("SELECT record_length, page_length, variable_length_records FROM metadata_t;", _connection)) + using (var cmd = new SqliteCommand("SELECT record_length, page_length, variable_length_records, version, acs FROM metadata_t;", Connection)) { using var reader = cmd.ExecuteReader(); try @@ -184,17 +190,36 @@ private void LoadSqliteMetadata() RecordLength = reader.GetInt32(0); PageLength = reader.GetInt32(1); VariableLengthRecords = reader.GetBoolean(2); + + var version = reader.GetInt32(3); + if (version != CURRENT_VERSION) + throw new ArgumentException($"Unable to load database, expected version {CURRENT_VERSION}, found {version}. Please delete the database so it can be regenerated"); + + if (!reader.IsDBNull(4)) + { + using var acsStream = reader.GetStream(4); + if (acsStream.Length != ACS_LENGTH) + throw new ArgumentException($"The ACS length is not 256 in the database. This is corrupt. {FullPath}"); + + ACS = BtrieveUtil.ReadEntireStream(acsStream); + } } finally { reader.Close(); } } + } + /// + /// Loads all the key data from keys_t into memory + /// + private void LoadSqliteKeys() + { using (var cmd = new SqliteCommand( "SELECT number, segment, attributes, data_type, offset, length FROM keys_t ORDER BY number, segment;", - _connection)) + Connection)) { using var reader = cmd.ExecuteReader(); while (reader.Read()) @@ -211,6 +236,14 @@ private void LoadSqliteMetadata() Length = (ushort)reader.GetInt32(5), }; + if (btrieveKeyDefinition.RequiresACS) + { + if (ACS == null) + throw new ArgumentException($"Key {btrieveKeyDefinition.Number} requires ACS, but none was read. This database is likely corrupt: {FullPath}"); + + btrieveKeyDefinition.ACS = ACS; + } + if (!Keys.TryGetValue(btrieveKeyDefinition.Number, out var btrieveKey)) { btrieveKey = new BtrieveKey(); @@ -233,7 +266,7 @@ private void LoadSqliteMetadata() /// public int GetRecordCount() { - using var stmt = new SqliteCommand("SELECT COUNT(*) FROM data_t;", _connection); + using var stmt = new SqliteCommand("SELECT COUNT(*) FROM data_t;", Connection); return (int)(long)stmt.ExecuteScalar(); } @@ -243,7 +276,7 @@ public int GetRecordCount() private bool StepFirst() { // TODO consider grabbing data at the same time and prepopulating the cache - using var cmd = new SqliteCommand("SELECT id FROM data_t ORDER BY id LIMIT 1", _connection); + using var cmd = new SqliteCommand("SELECT id FROM data_t ORDER BY id LIMIT 1", Connection); using var reader = cmd.ExecuteReader(); Position = reader.Read() ? (uint)reader.GetInt32(0) : 0; @@ -257,7 +290,7 @@ private bool StepFirst() private bool StepNext() { // TODO consider grabbing data at the same time and prepopulating the cache - using var cmd = new SqliteCommand($"SELECT id FROM data_t WHERE id > {Position} ORDER BY id LIMIT 1;", _connection); + using var cmd = new SqliteCommand($"SELECT id FROM data_t WHERE id > {Position} ORDER BY id LIMIT 1;", Connection); using var reader = cmd.ExecuteReader(); try { @@ -279,7 +312,7 @@ private bool StepNext() private bool StepPrevious() { using var cmd = new SqliteCommand($"SELECT id FROM data_t WHERE id < {Position} ORDER BY id DESC LIMIT 1;", - _connection); + Connection); using var reader = cmd.ExecuteReader(); try { @@ -301,7 +334,7 @@ private bool StepPrevious() private bool StepLast() { // TODO consider grabbing data at the same time and prepopulating the cache - using var cmd = new SqliteCommand("SELECT id FROM data_t ORDER BY id DESC LIMIT 1;", _connection); + using var cmd = new SqliteCommand("SELECT id FROM data_t ORDER BY id DESC LIMIT 1;", Connection); using var reader = cmd.ExecuteReader(); Position = reader.Read() ? (uint)reader.GetInt32(0) : 0; @@ -325,7 +358,7 @@ public BtrieveRecord GetRecord(uint offset) if (_cache.TryGetValue(offset, out var record)) return record; - using var cmd = new SqliteCommand($"SELECT data FROM data_t WHERE id = {offset}", _connection); + using var cmd = new SqliteCommand($"SELECT data FROM data_t WHERE id = {offset}", Connection); using var reader = cmd.ExecuteReader(System.Data.CommandBehavior.KeyInfo); try { @@ -333,8 +366,7 @@ public BtrieveRecord GetRecord(uint offset) return null; using var stream = reader.GetStream(0); - var data = new byte[stream.Length]; - stream.Read(data, 0, data.Length); + var data = BtrieveUtil.ReadEntireStream(stream); record = new BtrieveRecord(offset, data); _cache[offset] = record; @@ -366,10 +398,10 @@ public bool Update(uint offset, byte[] recordData) recordData = ForceSize(recordData, RecordLength); } - using var transaction = _connection.BeginTransaction(); + using var transaction = Connection.BeginTransaction(); using var updateCmd = new SqliteCommand() { - Connection = _connection, + Connection = Connection, Transaction = transaction }; @@ -440,7 +472,7 @@ private bool InsertAutoincrementValues(SqliteTransaction transaction, byte[] rec sb.Append(string.Join(", ", zeroedKeys)); sb.Append(" FROM data_t;"); - using var cmd = new SqliteCommand(sb.ToString(), _connection, transaction); + using var cmd = new SqliteCommand(sb.ToString(), Connection, transaction); using var reader = cmd.ExecuteReader(); try { @@ -486,8 +518,8 @@ public uint Insert(byte[] record) record = ForceSize(record, RecordLength); } - using var transaction = _connection.BeginTransaction(); - using var insertCmd = new SqliteCommand() { Connection = _connection }; + using var transaction = Connection.BeginTransaction(); + using var insertCmd = new SqliteCommand() { Connection = Connection }; insertCmd.Transaction = transaction; if (!InsertAutoincrementValues(transaction, record)) @@ -522,7 +554,7 @@ record = ForceSize(record, RecordLength); return 0; } - var lastInsertRowId = Convert.ToUInt32(new SqliteCommand("SELECT last_insert_rowid()", _connection, transaction).ExecuteScalar()); + var lastInsertRowId = Convert.ToUInt32(new SqliteCommand("SELECT last_insert_rowid()", Connection, transaction).ExecuteScalar()); transaction.Commit(); @@ -540,7 +572,7 @@ public bool Delete() { _cache.Remove(Position); - using var cmd = new SqliteCommand($"DELETE FROM data_t WHERE id={Position};", _connection); + using var cmd = new SqliteCommand($"DELETE FROM data_t WHERE id={Position};", Connection); return cmd.ExecuteNonQuery() > 0; } @@ -553,7 +585,7 @@ public bool DeleteAll() Position = 0; - using var cmd = new SqliteCommand($"DELETE FROM data_t;", _connection); + using var cmd = new SqliteCommand($"DELETE FROM data_t;", Connection); return cmd.ExecuteNonQuery() > 0; } @@ -666,12 +698,6 @@ public ReadOnlySpan GetRecordByOffset(uint absolutePosition) /// private bool GetByKeyPrevious(BtrieveQuery query) => NextReader(query, BtrieveQuery.CursorDirection.Reverse); - /// - /// Calls NextReader with an always-true query matcher. - /// - private bool NextReader(BtrieveQuery query, BtrieveQuery.CursorDirection cursorDirection) => - NextReader(query, (query, record) => true, cursorDirection); - /// /// Updates Position based on the value of current Sqlite cursor. /// @@ -679,42 +705,18 @@ private bool NextReader(BtrieveQuery query, BtrieveQuery.CursorDirection cursorD /// Sqlite cursor and continues from there. /// /// Current query - /// Delegate function for verifying results. If this matcher returns - /// false, the query is aborted and returns no more results. /// Which direction to move along the query results /// true if the Sqlite cursor returned a valid item - private bool NextReader(BtrieveQuery query, BtrieveQuery.QueryMatcher matcher, BtrieveQuery.CursorDirection cursorDirection) + private bool NextReader(BtrieveQuery query, BtrieveQuery.CursorDirection cursorDirection) { - var (success, record) = query.Next(matcher, cursorDirection); - - if (success) - Position = query.Position; - - if (record != null) - _cache[query.Position] = record; - - return success; - } + var record = query.Next(cursorDirection); - /// - /// Returns true if the retrievedRecord has equal keyData for the specified key. - /// - private static bool RecordMatchesKey(BtrieveRecord retrievedRecord, BtrieveKey key, byte[] keyData) - { - var keyA = key.ExtractKeyDataFromRecord(retrievedRecord.Data); - var keyB = keyData; + if (record == null) + return false; - switch (key.PrimarySegment.DataType) - { - case EnumKeyDataType.String: - case EnumKeyDataType.Lstring: - case EnumKeyDataType.Zstring: - case EnumKeyDataType.OldAscii: - return string.Equals(BtrieveKey.ExtractNullTerminatedString(keyA), - BtrieveKey.ExtractNullTerminatedString(keyB)); - default: - return keyA.SequenceEqual(keyB); - } + Position = query.Position; + _cache[query.Position] = record; + return true; } /// @@ -722,17 +724,15 @@ private static bool RecordMatchesKey(BtrieveRecord retrievedRecord, BtrieveKey k /// private bool GetByKeyEqual(BtrieveQuery query) { - BtrieveQuery.QueryMatcher initialMatcher = (query, record) => RecordMatchesKey(record, query.Key, query.KeyData); - var sqliteObject = query.Key.KeyDataToSqliteObject(query.KeyData); - var command = new SqliteCommand() { Connection = _connection }; + var command = new SqliteCommand() { Connection = Connection }; if (sqliteObject == null) { command.CommandText = $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t WHERE {query.Key.SqliteKeyName} IS NULL"; } else { - command.CommandText = $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t WHERE {query.Key.SqliteKeyName} >= @value ORDER BY {query.Key.SqliteKeyName} ASC"; + command.CommandText = $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t WHERE {query.Key.SqliteKeyName} = @value ORDER BY {query.Key.SqliteKeyName} ASC"; command.Parameters.AddWithValue("@value", query.Key.KeyDataToSqliteObject(query.KeyData)); } @@ -741,7 +741,8 @@ private bool GetByKeyEqual(BtrieveQuery query) DataReader = command.ExecuteReader(System.Data.CommandBehavior.KeyInfo), Command = command }; - return NextReader(query, initialMatcher, BtrieveQuery.CursorDirection.Forward); + query.Direction = BtrieveQuery.CursorDirection.Seek; + return NextReader(query, BtrieveQuery.CursorDirection.Seek); } /// @@ -753,7 +754,7 @@ private bool GetByKeyGreater(BtrieveQuery query, string oprator) { var command = new SqliteCommand( $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t WHERE {query.Key.SqliteKeyName} {oprator} @value ORDER BY {query.Key.SqliteKeyName} ASC", - _connection); + Connection); command.Parameters.AddWithValue("@value", query.Key.KeyDataToSqliteObject(query.KeyData)); query.Reader = new BtrieveQuery.SqliteReader() @@ -774,7 +775,7 @@ private bool GetByKeyLess(BtrieveQuery query, string oprator) // this query finds the first item less than var command = new SqliteCommand( $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t WHERE {query.Key.SqliteKeyName} {oprator} @value ORDER BY {query.Key.SqliteKeyName} DESC", - _connection); + Connection); command.Parameters.AddWithValue("@value", query.Key.KeyDataToSqliteObject(query.KeyData)); query.Reader = new BtrieveQuery.SqliteReader() @@ -792,7 +793,7 @@ private bool GetByKeyLess(BtrieveQuery query, string oprator) private bool GetByKeyFirst(BtrieveQuery query) { var command = new SqliteCommand( - $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t ORDER BY {query.Key.SqliteKeyName} ASC", _connection); + $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t ORDER BY {query.Key.SqliteKeyName} ASC", Connection); query.Reader = new BtrieveQuery.SqliteReader() { @@ -809,7 +810,7 @@ private bool GetByKeyLast(BtrieveQuery query) { var command = new SqliteCommand( $"SELECT id, {query.Key.SqliteKeyName}, data FROM data_t ORDER BY {query.Key.SqliteKeyName} DESC", - _connection); + Connection); query.Reader = new BtrieveQuery.SqliteReader() { @@ -847,7 +848,7 @@ private void CreateSqliteDataIndices(SqliteConnection connection, BtrieveFile bt var possiblyUnique = key.IsUnique ? "UNIQUE" : ""; using var command = new SqliteCommand( $"CREATE {possiblyUnique} INDEX {key.SqliteKeyName}_index on data_t({key.SqliteKeyName})", - _connection); + Connection); command.ExecuteNonQuery(); } } @@ -862,7 +863,7 @@ private void PopulateSqliteDataTable(SqliteConnection connection, BtrieveFile bt { using var insertCmd = new SqliteCommand() { - Connection = _connection, + Connection = Connection, Transaction = transaction }; @@ -891,24 +892,30 @@ private void PopulateSqliteDataTable(SqliteConnection connection, BtrieveFile bt transaction.Commit(); } + private static object SqliteNullable(object o) + => o == null ? DBNull.Value : o; + /// /// Creates the Sqlite metadata_t table. /// private void CreateSqliteMetadataTable(SqliteConnection connection, BtrieveFile btrieveFile) { const string statement = - "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL)"; + "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL, version INTEGER NOT NULL, acs_name STRING, acs BLOB)"; using var cmd = new SqliteCommand(statement, connection); cmd.ExecuteNonQuery(); using var insertCmd = new SqliteCommand() { Connection = connection }; cmd.CommandText = - "INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records) VALUES(@record_length, @physical_record_length, @page_length, @variable_length_records)"; + "INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records, version, acs_name, acs) VALUES(@record_length, @physical_record_length, @page_length, @variable_length_records, @version, @acs_name, @acs)"; cmd.Parameters.AddWithValue("@record_length", btrieveFile.RecordLength); cmd.Parameters.AddWithValue("@physical_record_length", btrieveFile.PhysicalRecordLength); cmd.Parameters.AddWithValue("@page_length", btrieveFile.PageLength); cmd.Parameters.AddWithValue("@variable_length_records", btrieveFile.VariableLengthRecords ? 1 : 0); + cmd.Parameters.AddWithValue("@version", CURRENT_VERSION); + cmd.Parameters.AddWithValue("@acs_name", SqliteNullable(btrieveFile.ACSName)); + cmd.Parameters.AddWithValue("@acs", SqliteNullable(btrieveFile.ACS)); cmd.ExecuteNonQuery(); } @@ -946,7 +953,7 @@ private void CreateSqliteKeysTable(SqliteConnection connection, BtrieveFile btri /// /// Creates the Sqlite database from btrieveFile. /// - public void CreateSqliteDB(string fullpath, BtrieveFile btrieveFile) + private void CreateSqliteDB(string fullpath, BtrieveFile btrieveFile) { _logger.Info($"Creating sqlite db {fullpath}"); @@ -958,8 +965,13 @@ public void CreateSqliteDB(string fullpath, BtrieveFile btrieveFile) DataSource = fullpath, }.ToString(); - _connection = new SqliteConnection(connectionString); - _connection.Open(); + CreateSqliteDBWithConnectionString(connectionString, btrieveFile); + } + + public void CreateSqliteDBWithConnectionString(string connectionString, BtrieveFile btrieveFile) + { + Connection = new SqliteConnection(connectionString); + Connection.Open(); RecordLength = btrieveFile.RecordLength; PageLength = btrieveFile.PageLength; @@ -970,11 +982,11 @@ public void CreateSqliteDB(string fullpath, BtrieveFile btrieveFile) AutoincrementedKeys.Add(key.Value.PrimarySegment.Number, key.Value); } - CreateSqliteMetadataTable(_connection, btrieveFile); - CreateSqliteKeysTable(_connection, btrieveFile); - CreateSqliteDataTable(_connection, btrieveFile); - CreateSqliteDataIndices(_connection, btrieveFile); - PopulateSqliteDataTable(_connection, btrieveFile); + CreateSqliteMetadataTable(Connection, btrieveFile); + CreateSqliteKeysTable(Connection, btrieveFile); + CreateSqliteDataTable(Connection, btrieveFile); + CreateSqliteDataIndices(Connection, btrieveFile); + PopulateSqliteDataTable(Connection, btrieveFile); } } } diff --git a/MBBSEmu/Btrieve/BtrieveKey.cs b/MBBSEmu/Btrieve/BtrieveKey.cs index c5b80691..371e41e2 100644 --- a/MBBSEmu/Btrieve/BtrieveKey.cs +++ b/MBBSEmu/Btrieve/BtrieveKey.cs @@ -22,44 +22,45 @@ public class BtrieveKey /// /// Represents the key number, starting from 0. Each database has at least one key. /// - public ushort Number - { - get => PrimarySegment.Number; - } + public ushort Number => PrimarySegment.Number; /// /// The primary segment in a key. Always first in the list of Segments. /// - public BtrieveKeyDefinition PrimarySegment - { - get => Segments[0]; - } + public BtrieveKeyDefinition PrimarySegment => Segments[0]; /// /// Whether the key is a composite key - composed of two or more segments. /// - public bool IsComposite - { - get => Segments.Count > 1; - } + public bool IsComposite => Segments.Count > 1; /// /// Whether the key data in the record can be modified once inserted. /// All segmented keys in a composite key must have the same value. /// - public bool IsModifiable { get => PrimarySegment.IsModifiable; } + public bool IsModifiable => PrimarySegment.IsModifiable; /// /// Whether the key data in the record is unique (no duplicates allowed). /// All segmented keys in a composite key must have the same value. /// - public bool IsUnique { get => PrimarySegment.IsUnique; } + public bool IsUnique => PrimarySegment.IsUnique; /// /// Whether the key data in the record is nullable. /// All segmented keys in a composite key must have the same value. /// - public bool IsNullable { get => PrimarySegment.IsNullable; } + public bool IsNullable => PrimarySegment.IsNullable; + + /// + /// Whether this key requires ACS. + /// + public bool RequiresACS => Segments.Any(segment => segment.RequiresACS); + + /// + /// The ACS table of this key. + /// + public byte[] ACS => Segments.Where(segment => segment.ACS != null).DefaultIfEmpty(null).Select(segment => segment.ACS).First(); /// /// The total length in bytes of the key. @@ -119,6 +120,36 @@ private static bool IsAllSameByteValue(ReadOnlySpan data, byte value) /// public object ExtractKeyInRecordToSqliteObject(ReadOnlySpan data) => KeyDataToSqliteObject(ExtractKeyDataFromRecord(data)); + private ReadOnlySpan ApplyACS(ReadOnlySpan keyData) + { + if (!RequiresACS) + return keyData; + + var dst = new byte[Length]; + var offset = 0; + foreach (var segment in Segments) + { + var dstSpan = dst.AsSpan().Slice(offset, segment.Length); + var key = keyData.Slice(offset, segment.Length); + if (segment.RequiresACS) + { + for (var i = 0; i < segment.Length; ++i) + { + dstSpan[i] = segment.ACS[key[i]]; + } + } + else + { + // simple copy + key.CopyTo(dstSpan); + } + + offset += segment.Length; + } + + return dst; + } + /// /// Returns an object that can be used for inserting into the data_t key column based on /// the type of this key from keyData. @@ -130,6 +161,8 @@ public object KeyDataToSqliteObject(ReadOnlySpan keyData) return DBNull.Value; } + keyData = ApplyACS(keyData); + if (IsComposite) return keyData.ToArray(); diff --git a/MBBSEmu/Btrieve/BtrieveKeyDefinition.cs b/MBBSEmu/Btrieve/BtrieveKeyDefinition.cs index aaa51495..924411b4 100644 --- a/MBBSEmu/Btrieve/BtrieveKeyDefinition.cs +++ b/MBBSEmu/Btrieve/BtrieveKeyDefinition.cs @@ -57,6 +57,16 @@ public class BtrieveKeyDefinition /// public ushort Position => (ushort)(Offset + 1); + /// + /// Whether this key uses the ACS table. + /// + public bool RequiresACS => Attributes.HasFlag(EnumKeyAttributeMask.NumberedACS); + + /// + /// The ACS table used by this key. + /// + public byte[] ACS { get; set; } + /// /// Whether the key inside the record can be modified after being inserted. /// diff --git a/MBBSEmu/Btrieve/BtrieveQuery.cs b/MBBSEmu/Btrieve/BtrieveQuery.cs index a453d434..758601da 100644 --- a/MBBSEmu/Btrieve/BtrieveQuery.cs +++ b/MBBSEmu/Btrieve/BtrieveQuery.cs @@ -27,7 +27,9 @@ public void Dispose() } } - public enum CursorDirection { + public enum CursorDirection + { + Seek, Forward, Reverse } @@ -91,14 +93,6 @@ public void Dispose() Reader = null; } - /// - /// A delegate function that returns true if the retrieved record matches the query. - /// - /// Query made - /// The record retrieve from the query - /// true if the record is valid for the query - public delegate bool QueryMatcher(BtrieveQuery query, BtrieveRecord record); - /// /// Moves along the cursor until we hit position /// @@ -131,7 +125,7 @@ private void ChangeDirection(CursorDirection newDirection) command.CommandText += $"<= @value ORDER BY {Key.SqliteKeyName} DESC"; break; default: - throw new ArgumentException("Bad direction"); + throw new ArgumentException($"Bad direction: {newDirection}"); } command.Parameters.AddWithValue("@value", LastKey); @@ -159,10 +153,8 @@ private void ChangeDirection(CursorDirection newDirection) /// Sqlite cursor and continues from there. /// /// Current query - /// Delegate function for verifying results. If this matcher returns - /// false, the query is aborted and returns no more results. - /// true if the Sqlite cursor returned a valid item along with the data - public (bool, BtrieveRecord) Next(QueryMatcher matcher, CursorDirection cursorDirection) + /// The found record + public BtrieveRecord Next(CursorDirection cursorDirection) { if (Direction != cursorDirection) { @@ -174,22 +166,16 @@ private void ChangeDirection(CursorDirection newDirection) if (Reader == null || !Reader.Read()) { Reader = null; - return (false, null); + return null; } Position = (uint)Reader.DataReader.GetInt32(0); LastKey = Reader.DataReader.GetValue(1); using var stream = Reader.DataReader.GetStream(2); - var data = new byte[stream.Length]; - stream.Read(data, 0, data.Length); - - var record = new BtrieveRecord(Position, data); - - if (!matcher.Invoke(this, record)) - return (false, record); + var data = BtrieveUtil.ReadEntireStream(stream); - return (true, record); + return new BtrieveRecord(Position, data); } } } diff --git a/MBBSEmu/Btrieve/BtrieveUtil.cs b/MBBSEmu/Btrieve/BtrieveUtil.cs new file mode 100644 index 00000000..bde86f45 --- /dev/null +++ b/MBBSEmu/Btrieve/BtrieveUtil.cs @@ -0,0 +1,26 @@ +using System; +using System.IO; + +namespace MBBSEmu.Btrieve +{ + public class BtrieveUtil + { + public static byte[] ReadEntireStream(Stream s) + { + var totalRead = 0; + var buffer = new byte[s.Length]; + while (totalRead != s.Length) + { + var numRead = s.Read(buffer, totalRead, (int)s.Length - totalRead); + // shouldn't happen, but guard against the infinite loop anyhow + if (numRead == 0) + { + throw new ArgumentException($"Failed to read entire blob stream of length {s.Length}"); + } + totalRead += numRead; + } + return buffer; + } + + } +} diff --git a/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs b/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs index 39f309d1..e2f7a878 100644 --- a/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs +++ b/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs @@ -50,6 +50,8 @@ private IntPtr16 _currentMcvFile private readonly Stopwatch _highResolutionTimer = new Stopwatch(); + private readonly Random _random = new Random(Guid.NewGuid().GetHashCode()); + private AgentStruct _galacticommClientServerAgent; /// @@ -2081,7 +2083,7 @@ private void hasmkey() /// private void rand() { - var randomValue = new Random(Guid.NewGuid().GetHashCode()).Next(1, short.MaxValue); + var randomValue = _random.Next(1, short.MaxValue); #if DEBUG //_logger.Info($"Generated random number {randomValue} and saved it to AX"); @@ -5919,13 +5921,7 @@ private void genrnd() if (max < min) max = min; - var randomValue = (ushort)new Random(Guid.NewGuid().GetHashCode()).Next(min, max); - -#if DEBUG - _logger.Info($"Generated Random Number: {randomValue}"); -#endif - - Registers.AX = randomValue; + Registers.AX = (ushort)_random.Next(min, max); } /// @@ -6955,7 +6951,7 @@ private void lngrnd() { var min = GetParameterLong(0); var max = GetParameterLong(2); - var randomValue = new Random(Guid.NewGuid().GetHashCode()).Next(min, max); + var randomValue = _random.Next(min, max); Registers.DX = (ushort)(randomValue >> 16); Registers.AX = (ushort)(randomValue & 0xFFFF);