diff --git a/MBBSDatabase/MBBSDatabase.csproj b/MBBSDatabase/MBBSDatabase.csproj
new file mode 100644
index 00000000..6b7837a4
--- /dev/null
+++ b/MBBSDatabase/MBBSDatabase.csproj
@@ -0,0 +1,16 @@
+
+
+
+ Exe
+ netcoreapp3.1
+ 8.0
+
+
+
+
+
+
+
+
+
+
diff --git a/MBBSDatabase/Program.cs b/MBBSDatabase/Program.cs
new file mode 100644
index 00000000..792dfc32
--- /dev/null
+++ b/MBBSDatabase/Program.cs
@@ -0,0 +1,53 @@
+using MBBSEmu.Btrieve;
+using MBBSEmu.DependencyInjection;
+using NLog;
+using System.Linq;
+using System.IO;
+using System;
+
+namespace MBBSEmu
+{
+ ///
+ /// An MBBSEmu database (.DB) utility program.
+ ///
+ /// Currently supports two modes of operation, view and convert.
+ /// View mode shows information about the specified DAT file, such as key information.
+ /// Convert mode converts the DAT file into a .DB file.
+ ///
+ public class Program
+ {
+ static void Main(string[] args)
+ {
+ new Program().Run(args);
+ }
+
+ private void Run(string[] args)
+ {
+ var serviceResolver = new ServiceResolver();
+ var logger = serviceResolver.GetService();
+
+ if (args.Length == 0)
+ Console.WriteLine("Usage: MBBSDatabase [view|convert] [files]");
+
+ var convert = (args[0] == "convert");
+
+ foreach (string s in args.Skip(1))
+ {
+ BtrieveFile file = new BtrieveFile();
+ try
+ {
+ file.LoadFile(logger, s);
+ if (convert)
+ {
+ using var processor = new BtrieveFileProcessor();
+ processor.CreateSqliteDB(Path.ChangeExtension(s, ".DB"), file);
+ }
+ }
+ catch (Exception e)
+ {
+ logger.Error(e, $"Failed to load Btrieve file {s}: {e.Message}\n{e.StackTrace}");
+ }
+ }
+ }
+ }
+}
diff --git a/MBBSEmu.Tests/Assets/MBBSEMU.DB b/MBBSEmu.Tests/Assets/MBBSEMU.DB
index d83fbe82..6c8108fc 100644
Binary files a/MBBSEmu.Tests/Assets/MBBSEMU.DB and b/MBBSEmu.Tests/Assets/MBBSEMU.DB differ
diff --git a/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs b/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs
index 01afc8cc..a6b4671b 100644
--- a/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs
+++ b/MBBSEmu.Tests/Btrieve/BtrieveFileProcessor_Tests.cs
@@ -16,7 +16,7 @@ public class BtrieveFileProcessor_Tests : TestBase, IDisposable
{
const int RECORD_LENGTH = 74;
- private const string EXPECTED_METADATA_T_SQL = "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL)";
+ private const string EXPECTED_METADATA_T_SQL = "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL)";
private const string EXPECTED_KEYS_T_SQL = "CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, null_value INTEGER NOT NULL, UNIQUE(number, segment))";
private const string EXPECTED_DATA_T_SQL = "CREATE TABLE data_t(id INTEGER PRIMARY KEY, data BLOB NOT NULL, key_0 TEXT, key_1 INTEGER NOT NULL UNIQUE, key_2 TEXT, key_3 INTEGER NOT NULL UNIQUE)";
@@ -99,6 +99,7 @@ public void LoadsFileAndConvertsProperly()
btrieve.Keys.Count.Should().Be(4);
btrieve.RecordLength.Should().Be(RECORD_LENGTH);
btrieve.PageLength.Should().Be(512);
+ btrieve.VariableLengthRecords.Should().BeFalse();
btrieve.Keys[0].PrimarySegment.Should().BeEquivalentTo(
new BtrieveKeyDefinition()
@@ -179,12 +180,13 @@ public void LoadsFileProperly()
btrieve.Keys.Count.Should().Be(4);
btrieve.RecordLength.Should().Be(RECORD_LENGTH);
btrieve.PageLength.Should().Be(512);
+ btrieve.VariableLengthRecords.Should().BeFalse();
btrieve.Keys[0].PrimarySegment.Should().BeEquivalentTo(
new BtrieveKeyDefinition()
{
Number = 0,
- Attributes = EnumKeyAttributeMask.Duplicates,
+ Attributes = EnumKeyAttributeMask.Duplicates | EnumKeyAttributeMask.UseExtendedDataType,
DataType = EnumKeyDataType.Zstring,
Offset = 2,
Length = 32,
@@ -194,7 +196,7 @@ public void LoadsFileProperly()
new BtrieveKeyDefinition()
{
Number = 1,
- Attributes = EnumKeyAttributeMask.Modifiable,
+ Attributes = EnumKeyAttributeMask.Modifiable | EnumKeyAttributeMask.UseExtendedDataType,
DataType = EnumKeyDataType.Integer,
Offset = 34,
Length = 4,
@@ -204,7 +206,7 @@ public void LoadsFileProperly()
new BtrieveKeyDefinition()
{
Number = 2,
- Attributes = EnumKeyAttributeMask.Duplicates | EnumKeyAttributeMask.Modifiable,
+ Attributes = EnumKeyAttributeMask.Duplicates | EnumKeyAttributeMask.Modifiable | EnumKeyAttributeMask.UseExtendedDataType,
DataType = EnumKeyDataType.Zstring,
Offset = 38,
Length = 32,
@@ -214,7 +216,7 @@ public void LoadsFileProperly()
new BtrieveKeyDefinition()
{
Number = 3,
- Attributes = 0,
+ Attributes = EnumKeyAttributeMask.UseExtendedDataType,
DataType = EnumKeyDataType.AutoInc,
Offset = 70,
Length = 4,
diff --git a/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs b/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs
index d1bb4dc9..908546e4 100644
--- a/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs
+++ b/MBBSEmu.Tests/Btrieve/BtrieveFile_Tests.cs
@@ -53,6 +53,7 @@ public void LoadsFile()
Assert.Equal(512, btrieve.PageLength);
Assert.Equal(5, btrieve.PageCount);
Assert.False(btrieve.LogKeyPresent);
+ Assert.False(btrieve.VariableLengthRecords);
Assert.Single(btrieve.Keys[0].Segments);
Assert.Single(btrieve.Keys[1].Segments);
diff --git a/MBBSEmu/Assets/BBSGEN.DB b/MBBSEmu/Assets/BBSGEN.DB
index c00955d0..8e43f665 100644
Binary files a/MBBSEmu/Assets/BBSGEN.DB and b/MBBSEmu/Assets/BBSGEN.DB differ
diff --git a/MBBSEmu/Assets/BBSUSR.DB b/MBBSEmu/Assets/BBSUSR.DB
index 29527290..e2378863 100644
Binary files a/MBBSEmu/Assets/BBSUSR.DB and b/MBBSEmu/Assets/BBSUSR.DB differ
diff --git a/MBBSEmu/Assets/bbsgen.db.sql b/MBBSEmu/Assets/bbsgen.db.sql
index d89570d5..bb1bde18 100644
--- a/MBBSEmu/Assets/bbsgen.db.sql
+++ b/MBBSEmu/Assets/bbsgen.db.sql
@@ -1,12 +1,15 @@
-CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL);
+CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL);
-INSERT INTO metadata_t(record_length, physical_record_length, page_length) VALUES(55, 55, 1024);
+INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records) VALUES(55, 75, 1024, 1);
-CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, UNIQUE (number, segment));
+CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, null_value INTEGER NOT NULL, UNIQUE (number, segment));
-INSERT INTO keys_t(number, segment, attributes, data_type, offset, length) VALUES(0, 0, 51, 11, 0, 30);
-INSERT INTO keys_t(number, segment, attributes, data_type, offset, length) VALUES(0, 1, 35, 11, 30, 25);
+INSERT INTO keys_t(number, segment, attributes, data_type, offset, length, null_value) VALUES(0, 0, 307, 11, 0, 30, 0);
+INSERT INTO keys_t(number, segment, attributes, data_type, offset, length, null_value) VALUES(0, 1, 291, 11, 30, 25, 0);
-INSERT INTO keys_t(number, segment, attributes, data_type, offset, length) VALUES(1, 0, 35, 11, 30, 25);
+INSERT INTO keys_t(number, segment, attributes, data_type, offset, length, null_value) VALUES(1, 0, 291, 11, 30, 25, 0);
CREATE TABLE data_t(id INTEGER PRIMARY KEY, data BLOB NOT NULL, key_0 STRING NOT NULL, key_1 STRING NOT NULL);
+
+CREATE INDEX key_0_index on data_t(key_0);
+CREATE INDEX key_1_index on data_t(key_1);
diff --git a/MBBSEmu/Assets/bbsusr.db.sql b/MBBSEmu/Assets/bbsusr.db.sql
index 9890a783..c2b5763f 100644
--- a/MBBSEmu/Assets/bbsusr.db.sql
+++ b/MBBSEmu/Assets/bbsusr.db.sql
@@ -1,9 +1,11 @@
-CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL);
+CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL);
-INSERT INTO metadata_t(record_length, physical_record_length, page_length) VALUES(338, 338, 1024);
+INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records) VALUES(338, 338, 1024, 0);
-CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, UNIQUE (number, segment));
+CREATE TABLE keys_t(id INTEGER PRIMARY KEY, number INTEGER NOT NULL, segment INTEGER NOT NULL, attributes INTEGER NOT NULL, data_type INTEGER NOT NULL, offset INTEGER NOT NULL, length INTEGER NOT NULL, null_value INTEGER NOT NULL, UNIQUE (number, segment));
-INSERT INTO keys_t(number, segment, attributes, data_type, offset, length) VALUES(0, 0, 0, 11, 0, 30);
+INSERT INTO keys_t(number, segment, attributes, data_type, offset, length, null_value) VALUES(0, 0, 288, 11, 0, 30, 0);
CREATE TABLE data_t(id INTEGER PRIMARY KEY, data BLOB NOT NULL, key_0 STRING NOT NULL);
+
+CREATE INDEX key_0_index on data_t(key_0);
diff --git a/MBBSEmu/Btrieve/BtrieveFile.cs b/MBBSEmu/Btrieve/BtrieveFile.cs
index e802c96f..f2c08c08 100644
--- a/MBBSEmu/Btrieve/BtrieveFile.cs
+++ b/MBBSEmu/Btrieve/BtrieveFile.cs
@@ -44,6 +44,11 @@ public ushort RecordCount
}
}
+ ///
+ /// Whether the records are variable length
+ ///
+ public bool VariableLengthRecords { get; set; }
+
private ushort _recordLength;
///
/// Defined Length of the records within the Btrieve File
@@ -148,6 +153,8 @@ public ushort KeyCount
///
public Dictionary Keys { get; set; }
+ private ILogger _logger;
+
///
/// Log Key is an internal value used by the Btrieve engine to track unique
/// records -- it adds 8 bytes to the end of the record that's not accounted for
@@ -157,10 +164,17 @@ public ushort KeyCount
///
public bool LogKeyPresent { get; set; }
+ ///
+ /// Set of absolute file position record offsets that are marked as deleted, and
+ /// therefore not loaded during initial load.
+ ///
+ public HashSet DeletedRecordOffsets { get; set; }
+
public BtrieveFile()
{
Records = new List();
Keys = new Dictionary();
+ DeletedRecordOffsets = new HashSet();
}
///
@@ -183,47 +197,129 @@ public void LoadFile(ILogger logger, string path, string fileName)
throw new FileNotFoundException($"Unable to locate existing btrieve file {fileName}");
}
- var fullPath = Path.Combine(path, fileName);
- var fileData = File.ReadAllBytes(fullPath);
+ LoadFile(logger, Path.Combine(path, fileName));
+ }
+
+ public void LoadFile(ILogger logger, string fullPath)
+ {
+ _logger = logger;
- if (fileData[0] == 'F' && fileData[1] == 'C' && fileData[2] == 0 && fileData[3] == 0)
- throw new ArgumentException($"Cannot import v6 Btrieve database {fileName} - only v5 databases are supported for now. Please contact your ISV for a downgraded database.");
+ var fileName = Path.GetFileName(fullPath);
+ var fileData = File.ReadAllBytes(fullPath);
FileName = fullPath;
Data = fileData;
+ var (valid, errorMessage) = ValidateDatabase();
+ if (!valid)
+ throw new ArgumentException($"Failed to load database {FileName}: {errorMessage}");
+
#if DEBUG
logger.Info($"Opened {fileName} and read {Data.Length} bytes");
#endif
- //Only Parse Keys if they are defined
- if (KeyCount > 0)
- LoadBtrieveKeyDefinitions(logger);
- else
- throw new ArgumentException("NO KEYS defined in {fileName}");
+ DeletedRecordOffsets = GetRecordPointerList(GetRecordPointer(0x10));
+ LoadBtrieveKeyDefinitions(logger);
//Only load records if there are any present
if (RecordCount > 0)
LoadBtrieveRecords(logger);
}
+ ///
+ /// Validates the Btrieve database being loaded
+ ///
+ /// True if valid. If false, the string is the error message.
+ private (bool, string) ValidateDatabase()
+ {
+ if (Data[0] == 'F' && Data[1] == 'C')
+ return (false, $"Cannot import v6 Btrieve database {FileName} - only v5 databases are supported for now. Please contact your ISV for a downgraded database.");
+ if (Data[0] != 0 && Data[1] != 0 && Data[2] != 0 && Data[3] != 0)
+ return (false, $"Doesn't appear to be a v5 Btrieve database {FileName}");
+
+ var versionCode = Data[6] << 16 | Data[7];
+ switch (versionCode)
+ {
+ case 3:
+ case 4:
+ case 5:
+ break;
+ default:
+ return (false, $"Invalid version code [{versionCode}] in v5 Btrieve database {FileName}");
+ }
+
+ var needsRecovery = (Data[0x22] == 0xFF && Data[0x23] == 0xFF);
+ if (needsRecovery)
+ return (false, $"Cannot import Btrieve database {FileName} since it's marked inconsistent and needs recovery.");
+
+ if (PageLength < 512 || (PageLength & 0x1FF) != 0)
+ return (false, $"Invalid PageLength, must be multiple of 512 {FileName}");
+
+ if (KeyCount <= 0)
+ return (false, $"NO KEYS defined in {FileName}");
+
+ var accelFlags = BitConverter.ToUInt16(Data.AsSpan().Slice(0xA, 2));
+ if (accelFlags != 0)
+ return (false, $"Valid accel flags, expected 0, got {accelFlags}! {FileName}");
+
+ var usrflgs = BitConverter.ToUInt16(Data.AsSpan().Slice(0x106, 2));
+ if ((usrflgs & 0x8) != 0)
+ return (false, $"Data is compressed, cannot handle {FileName}");
+
+ VariableLengthRecords = ((usrflgs & 0x1) != 0);
+ var recordsContainVariableLength = (Data[0x38] == 0xFF);
+
+ if (VariableLengthRecords ^ recordsContainVariableLength)
+ return (false, "Mismatched variable length fields");
+
+ return (true, "");
+ }
+
+ ///
+ /// Gets a record pointer offset at and then continues to walk
+ /// the chain of pointers until the end, returning all the offsets.
+ ///
+ /// Record pointer offset to start scanning from.
+ HashSet GetRecordPointerList(uint first)
+ {
+ var ret = new HashSet();
+ while (first != 0xFFFFFFFF)
+ {
+ ret.Add(first);
+
+ first = GetRecordPointer(first);
+ }
+
+ return ret;
+ }
+
+ ///
+ /// Returns the record pointer located at absolute file offset .
+ ///
+ private uint GetRecordPointer(uint offset) =>
+ GetRecordPointer(Data.AsSpan().Slice((int)offset, 4));
+
+ ///
+ /// Returns the record pointer located within the span starting at offset 0
+ ///
+ private uint GetRecordPointer(ReadOnlySpan data)
+ {
+ // 2 byte high word -> 2 byte low word
+ return (uint)BitConverter.ToUInt16(data.Slice(0, 2)) << 16 | (uint)BitConverter.ToUInt16(data.Slice(2, 2));
+ }
+
///
/// Loads Btrieve Key Definitions from the Btrieve DAT File Header
///
private void LoadBtrieveKeyDefinitions(ILogger logger)
{
- ushort keyDefinitionBase = 0x110;
+ var keyDefinitionBase = 0x110;
const ushort keyDefinitionLength = 0x1E;
- ReadOnlySpan btrieveFileContentSpan = Data;
+ var btrieveFileContentSpan = Data.AsSpan();
- //Check for Log Key
- if (btrieveFileContentSpan[0x10C] == 1)
- {
- logger.Warn($"Btrieve Log Key Present in {FileName}");
- LogKeyPresent = true;
- }
+ LogKeyPresent = (btrieveFileContentSpan[0x10C] == 1);
- ushort totalKeys = KeyCount;
- ushort currentKeyNumber = 0;
+ var totalKeys = KeyCount;
+ var currentKeyNumber = (ushort)0;
while (currentKeyNumber < totalKeys)
{
var data = btrieveFileContentSpan.Slice(keyDefinitionBase, keyDefinitionLength).ToArray();
@@ -241,12 +337,13 @@ private void LoadBtrieveKeyDefinitions(ILogger logger)
DataType = dataType,
Offset = BitConverter.ToUInt16(data, 0x14),
Length = BitConverter.ToUInt16(data, 0x16),
- Segment = false,
+ Segment = attributes.HasFlag(EnumKeyAttributeMask.SegmentedKey),
+ SegmentOf = attributes.HasFlag(EnumKeyAttributeMask.SegmentedKey) ? currentKeyNumber : (ushort)0,
NullValue = data[0x1D],
};
//If it's a segmented key, don't increment so the next key gets added to the same ordinal as an additional segment
- if (!keyDefinition.Attributes.HasFlag(EnumKeyAttributeMask.SegmentedKey))
+ if (!keyDefinition.Segment)
currentKeyNumber++;
#if DEBUG
@@ -296,24 +393,12 @@ private void LoadBtrieveRecords(ILogger logger)
//Starting at 1, since the first page is the header
for (var i = 1; i <= PageCount; i++)
{
- var pageOffset = (PageLength * i);
- var recordsInPage = (PageLength / PhysicalRecordLength);
-
- //Key Page
- if (BitConverter.ToUInt32(Data, pageOffset + 0x8) == uint.MaxValue)
- continue;
-
- //Key Constraint Page
- if (Data[pageOffset + 0x6] == 0xAC)
- continue;
+ var pageOffset = (uint)(PageLength * i);
+ var recordsInPage = ((PageLength - 6) / PhysicalRecordLength);
- //Verify Data Page
- if (!Data[pageOffset + 0x5].IsNegative())
- {
- logger.Warn(
- $"Skipping Non-Data Page, might have invalid data - Page Start: 0x{pageOffset + 0x5:X4}");
+ //Verify Data Page, high bit set on byte 5 (usage count)
+ if ((Data[pageOffset + 0x5] & 0x80) == 0)
continue;
- }
//Page data starts 6 bytes in
pageOffset += 6;
@@ -322,20 +407,164 @@ private void LoadBtrieveRecords(ILogger logger)
if (recordsLoaded == RecordCount)
break;
+ var recordOffset = (uint)pageOffset + (uint)(PhysicalRecordLength * j);
+ // Marked for deletion? Skip
+ if (DeletedRecordOffsets.Contains(recordOffset))
+ continue;
+
+ var record = Data.AsSpan().Slice((int)recordOffset, PhysicalRecordLength);
+ if (IsUnusedRecord(record))
+ break;
+
var recordArray = new byte[RecordLength];
- Array.Copy(Data, pageOffset + (PhysicalRecordLength * j), recordArray, 0, RecordLength);
+ Array.Copy(Data, recordOffset, recordArray, 0, RecordLength);
- //End of Page 0xFFFFFFFF
- if (BitConverter.ToUInt32(recordArray, 0) == uint.MaxValue)
- continue;
+ if (VariableLengthRecords)
+ {
+ using var stream = new MemoryStream();
+ stream.Write(recordArray);
+
+ Records.Add(new BtrieveRecord(recordOffset, GetVariableLengthData(recordOffset, stream)));
+ }
+ else
+ Records.Add(new BtrieveRecord(recordOffset, recordArray));
- Records.Add(new BtrieveRecord((uint)(pageOffset + (PhysicalRecordLength * j)), recordArray));
recordsLoaded++;
}
}
+
+ if (recordsLoaded != RecordCount)
+ {
+ logger.Warn($"Database {FileName} contains {RecordCount} records but only read {recordsLoaded}!");
+ }
#if DEBUG
logger.Info($"Loaded {recordsLoaded} records from {FileName}. Resetting cursor to 0");
#endif
}
+
+ ///
+ /// Returns true if the fixed record appears to be unused and should be skipped.
+ ///
+ /// Fixed length records are contiguous in the page, and unused records are all zero except
+ /// for the first 4 bytes, which is a record pointer to the next free page.
+ ///
+ private bool IsUnusedRecord(ReadOnlySpan fixedRecordData)
+ {
+ if (fixedRecordData.Slice(4).ContainsOnly(0))
+ {
+ // additional validation, to ensure the record pointer is valid
+ var offset = GetRecordPointer(fixedRecordData);
+ if (offset < Data.Length)
+ return true;
+ }
+
+ return false;
+ }
+
+ ///
+ /// Gets the complete variable length data from the specified ,
+ /// walking through all data pages and returning the concatenated data.
+ ///
+ /// Fixed record pointer offset of the record from a data page
+ /// MemoryStream containing the fixed record data already read.
+ private byte[] GetVariableLengthData(uint recordOffset, MemoryStream stream) {
+ var variableData = Data.AsSpan().Slice((int)recordOffset + RecordLength, PhysicalRecordLength - RecordLength);
+ var vrecPage = GetPageFromVariableLengthRecordPointer(variableData);
+ var vrecFragment = variableData[3];
+
+ while (true) {
+ // invalid page? abort and return what we have
+ if (vrecPage == 0xFFFFFF && vrecFragment == 0xFF)
+ return stream.ToArray();
+
+ // jump to that page
+ var vpage = Data.AsSpan().Slice((int)vrecPage * PageLength, PageLength);
+ var numFragmentsInPage = BitConverter.ToUInt16(vpage.Slice(0xA, 2));
+ // grab the fragment pointer
+ var (offset, length, nextPointerExists) = GetFragment(vpage, vrecFragment, numFragmentsInPage);
+ // now finally read the data!
+ variableData = vpage.Slice((int)offset, (int)length);
+ if (!nextPointerExists)
+ {
+ // read all the data and reached the end!
+ stream.Write(variableData);
+ return stream.ToArray();
+ }
+
+ // keep going through more pages!
+ vrecPage = GetPageFromVariableLengthRecordPointer(variableData);
+ vrecFragment = variableData[3];
+
+ stream.Write(variableData.Slice(4));
+ }
+ }
+
+ ///
+ /// Returns data about the specified fragment.
+ ///
+ /// The entire page's data, will be PageLength in size
+ /// The fragment to lookup, 0 based
+ /// The maximum number of fragments in the page.
+ /// Three items: 1) the offset within the page where the fragment data resides, 2)
+ /// the length of data contained in the fragment, and 3) a boolean indicating the fragment
+ /// has a "next pointer", meaning the fragment data is prefixed with 4 bytes of another
+ /// data page to continue reading from.
+ ///
+ private (uint, uint, bool) GetFragment(ReadOnlySpan page, uint fragment, uint numFragments)
+ {
+ var offsetPointer = (uint)PageLength - 2u * (fragment + 1u);
+ var (offset, nextPointerExists) = GetPageOffsetFromFragmentArray(page.Slice((int)offsetPointer, 2));
+
+ // to compute length, keep going until I read the next valid fragment and get its offset
+ // then we subtract the two offets to compute length
+ var nextFragmentOffset = offsetPointer;
+ var nextOffset = 0xFFFFFFFFu;
+ for (var i = fragment + 1; i <= numFragments; ++i)
+ {
+ nextFragmentOffset -= 2; // fragment array is at end of page and grows downward
+ (nextOffset, _) = GetPageOffsetFromFragmentArray(page.Slice((int)nextFragmentOffset, 2));
+ if (nextOffset == 0xFFFF)
+ continue;
+ // valid offset, break now
+ break;
+ }
+
+ // some sanity checks
+ if (nextOffset == 0xFFFFFFFFu)
+ throw new ArgumentException($"Can't find next fragment offset {fragment} numFragments:{numFragments} {FileName}");
+
+ var length = nextOffset - offset;
+ // final sanity check
+ if (offset < 0xC || (offset + length) > (PageLength - 2 * (numFragments + 1)))
+ throw new ArgumentException($"Variable data overflows page {fragment} numFragments:{numFragments} {FileName}");
+
+ return (offset, length, nextPointerExists);
+ }
+
+ ///
+ /// Reads the page offset from the fragment array
+ ///
+ /// Fragment arran entry, size of 2 bytes
+ /// The offset and a boolean indicating the offset contains a next pointer
+ private static (uint, bool) GetPageOffsetFromFragmentArray(ReadOnlySpan arrayEntry)
+ {
+ if (arrayEntry.ContainsOnly(0xFF))
+ return (0xFFFFu, false);
+
+ var offset = (uint)arrayEntry[0] | ((uint)arrayEntry[1] & 0x7F) << 8;
+ var nextPointerExists = (arrayEntry[1] & 0x80) != 0;
+ return (offset, nextPointerExists);
+ }
+
+ ///
+ /// Reads the variable length record pointer, which is contained in the first 4 bytes
+ /// of the footer after each fixed length record, and returns the page it points to.
+ ///
+ /// footer of the fixed record, at least 4 bytes in length
+ /// The page that this variable length record pointer points to
+ private static uint GetPageFromVariableLengthRecordPointer(ReadOnlySpan data) {
+ // high low mid, yep it's stupid
+ return (uint)data[0] << 16 | (uint)data[1] | (uint)data[2] << 8;
+ }
}
}
diff --git a/MBBSEmu/Btrieve/BtrieveFileProcessor.cs b/MBBSEmu/Btrieve/BtrieveFileProcessor.cs
index 6d63d578..66efe621 100644
--- a/MBBSEmu/Btrieve/BtrieveFileProcessor.cs
+++ b/MBBSEmu/Btrieve/BtrieveFileProcessor.cs
@@ -46,6 +46,13 @@ public class BtrieveFileProcessor : IDisposable
///
public int RecordLength { get; set; }
+ ///
+ /// Whether the database contains variable length records.
+ /// If true, the RecordLength field is the fixed record length portion. Total
+ /// record size is RecordLength + some variable length
+ ///
+ public bool VariableLengthRecords { get; set; }
+
///
/// The active connection to the Sqlite database.
///
@@ -102,6 +109,12 @@ public void Dispose()
_cache.Clear();
}
+ public BtrieveFileProcessor()
+ {
+ Keys = new Dictionary();
+ AutoincrementedKeys = new Dictionary();
+ }
+
///
/// Constructor to load the specified Btrieve File at the given Path
///
@@ -172,7 +185,7 @@ private void LoadSqlite(string fullPath)
///
private void LoadSqliteMetadata()
{
- using (var cmd = new SqliteCommand("SELECT record_length, page_length FROM metadata_t;", _connection))
+ using (var cmd = new SqliteCommand("SELECT record_length, page_length, variable_length_records FROM metadata_t;", _connection))
{
using var reader = cmd.ExecuteReader();
try
@@ -182,6 +195,7 @@ private void LoadSqliteMetadata()
RecordLength = reader.GetInt32(0);
PageLength = reader.GetInt32(1);
+ VariableLengthRecords = reader.GetBoolean(2);
}
finally
{
@@ -354,7 +368,10 @@ public BtrieveRecord GetRecord(uint offset)
///
public bool Update(uint offset, byte[] recordData)
{
- if (recordData.Length != RecordLength)
+ if (VariableLengthRecords && recordData.Length != RecordLength)
+ _logger.Warn($"Updating variable length record of {recordData.Length} bytes into {FullPath}");
+
+ if (!VariableLengthRecords && recordData.Length != RecordLength)
{
_logger.Warn(
$"Btrieve Record Size Mismatch. Expected Length {RecordLength}, Actual Length {recordData.Length}");
@@ -471,7 +488,10 @@ private bool InsertAutoincrementValues(SqliteTransaction transaction, byte[] rec
/// Position of the newly inserted item, or 0 on failure
public uint Insert(byte[] record)
{
- if (record.Length != RecordLength)
+ if (VariableLengthRecords && record.Length != RecordLength)
+ _logger.Warn($"Inserting variable length record of {record.Length} bytes into {FullPath}");
+
+ if (!VariableLengthRecords && record.Length != RecordLength)
{
_logger.Warn(
$"Btrieve Record Size Mismatch TRUNCATING. Expected Length {RecordLength}, Actual Length {record.Length}");
@@ -654,10 +674,12 @@ private bool NextReader(BtrieveQuery query, QueryMatcher matcher)
{
if (query.Reader == null || !query.Reader.Read())
{
+ var hadRows = query?.Reader?.DataReader?.HasRows ?? false;
+
query?.Reader?.Dispose();
query.Reader = null;
- if (query.ContinuationReader == null)
+ if (query.ContinuationReader == null || !hadRows)
return false;
query.Reader = query.ContinuationReader(query);
@@ -901,17 +923,18 @@ private void PopulateSqliteDataTable(SqliteConnection connection, BtrieveFile bt
private void CreateSqliteMetadataTable(SqliteConnection connection, BtrieveFile btrieveFile)
{
const string statement =
- "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL)";
+ "CREATE TABLE metadata_t(record_length INTEGER NOT NULL, physical_record_length INTEGER NOT NULL, page_length INTEGER NOT NULL, variable_length_records INTEGER NOT NULL)";
using var cmd = new SqliteCommand(statement, connection);
cmd.ExecuteNonQuery();
using var insertCmd = new SqliteCommand() { Connection = connection };
cmd.CommandText =
- "INSERT INTO metadata_t(record_length, physical_record_length, page_length) VALUES(@record_length, @physical_record_length, @page_length)";
+ "INSERT INTO metadata_t(record_length, physical_record_length, page_length, variable_length_records) VALUES(@record_length, @physical_record_length, @page_length, @variable_length_records)";
cmd.Parameters.AddWithValue("@record_length", btrieveFile.RecordLength);
cmd.Parameters.AddWithValue("@physical_record_length", btrieveFile.PhysicalRecordLength);
cmd.Parameters.AddWithValue("@page_length", btrieveFile.PageLength);
+ cmd.Parameters.AddWithValue("@variable_length_records", btrieveFile.VariableLengthRecords ? 1 : 0);
cmd.ExecuteNonQuery();
}
@@ -949,9 +972,9 @@ private void CreateSqliteKeysTable(SqliteConnection connection, BtrieveFile btri
///
/// Creates the Sqlite database from btrieveFile.
///
- private void CreateSqliteDB(string fullpath, BtrieveFile btrieveFile)
+ public void CreateSqliteDB(string fullpath, BtrieveFile btrieveFile)
{
- _logger.Warn($"Creating sqlite db {fullpath}");
+ _logger.Info($"Creating sqlite db {fullpath}");
FullPath = fullpath;
diff --git a/MBBSEmu/Extensions/ReadOnlySpanExtensions.cs b/MBBSEmu/Extensions/ReadOnlySpanExtensions.cs
index 2b0f80fc..4678142b 100644
--- a/MBBSEmu/Extensions/ReadOnlySpanExtensions.cs
+++ b/MBBSEmu/Extensions/ReadOnlySpanExtensions.cs
@@ -13,5 +13,18 @@ public static ReadOnlySpan ToCharSpan(this ReadOnlySpan readOnlySpan
}
return output;
}
+
+ ///
+ /// Returns true if readOnlySpan contains only value.
+ ///
+ public static bool ContainsOnly(this ReadOnlySpan readOnlySpan, byte value)
+ {
+ for (var i = 0; i < readOnlySpan.Length; ++i)
+ {
+ if (readOnlySpan[i] != value)
+ return false;
+ }
+ return true;
+ }
}
-}
\ No newline at end of file
+}
diff --git a/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs b/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs
index bd9e4e8f..8824950c 100644
--- a/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs
+++ b/MBBSEmu/HostProcess/ExportedModules/Majorbbs.cs
@@ -693,9 +693,11 @@ public ReadOnlySpan Invoke(ushort ordinal, bool offsetsOnly = false)
register_textvar();
break;
case 997:
- case 444:
obtbtvl();
break;
+ case 444:
+ obtbtv();
+ break;
case 158:
dclvda();
break;
@@ -854,7 +856,7 @@ public ReadOnlySpan Invoke(ushort ordinal, bool offsetsOnly = false)
outmlt();
break;
case 622:
- updvbtv();
+ upvbtv();
break;
case 959:
stlcpy();
@@ -2006,7 +2008,7 @@ private void hasmkey()
///
/// Returns a pseudo-random integral number in the range between 0 and RAND_MAX.
///
- /// Signature: int rand (void)
+ /// Signature: int rand()
/// Returns: AX = 16-bit Random Number
///
///
@@ -2204,7 +2206,6 @@ private void rstmbk()
#if DEBUG
_logger.Warn($"Queue Empty, Ignoring");
#endif
- Registers.AX = 0;
return;
}
@@ -2337,7 +2338,7 @@ private void stpbtv()
///
/// Restores the last Btrieve data block for use
///
- /// Signature: void rstbtv (void)
+ /// Signature: void rstbtv()
///
///
private void rstbtv()
@@ -2365,6 +2366,11 @@ private void rstbtv()
///
///
private void dupdbtv()
+ {
+ Registers.AX = updateBtv() ? (ushort)1 : (ushort)0;
+ }
+
+ private bool updateBtv()
{
var btrieveRecordPointerPointer = GetParameterPointer(0);
@@ -2372,12 +2378,11 @@ private void dupdbtv()
var dataToWrite = Module.Memory.GetArray(btrieveRecordPointerPointer, (ushort) currentBtrieveFile.RecordLength);
- Registers.AX = currentBtrieveFile.Update(dataToWrite.ToArray()) ? (ushort) 1 : (ushort) 0;
-
#if DEBUG
_logger.Info(
$"Updated current Btrieve record ({currentBtrieveFile.Position}) with {dataToWrite.Length} bytes");
#endif
+ return currentBtrieveFile.Update(dataToWrite.ToArray());
}
///
@@ -2387,9 +2392,7 @@ private void dupdbtv()
///
private void insbtv()
{
- dinsbtv();
-
- if (Registers.AX == 0)
+ if (!insertBtv())
throw new SystemException("Failed to insert database record");
}
@@ -2400,18 +2403,22 @@ private void insbtv()
///
///
private void dinsbtv()
+ {
+ Registers.AX = insertBtv() ? (ushort)1 : (ushort)0;
+ }
+
+ private bool insertBtv()
{
var btrieveRecordPointer = GetParameterPointer(0);
var currentBtrieveFile = BtrieveGetProcessor(Module.Memory.GetPointer("BB"));
var dataToWrite = Module.Memory.GetArray(btrieveRecordPointer, (ushort) currentBtrieveFile.RecordLength);
-
- Registers.AX = currentBtrieveFile.Insert(dataToWrite.ToArray()) == 0 ? (ushort) 0 : (ushort) 1;
-
#if DEBUG
_logger.Info(
$"Inserted Btrieve record at {currentBtrieveFile.Position} with {dataToWrite.Length} bytes");
#endif
+
+ return currentBtrieveFile.Insert(dataToWrite.ToArray()) != 0;
}
@@ -2714,6 +2721,18 @@ private void register_textvar()
#endif
}
+ ///
+ /// Does a GetEqual based on the Key -- the record corresponding to the key is returned
+ ///
+ /// Signature: int obtbtv (void *recptr, void *key, int keynum, int obtopt)
+ /// Returns: AX == 0 record not found, 1 record found
+ ///
+ /// true if record found
+ private void obtbtv()
+ {
+ Registers.AX = obtainBtv() ? (ushort)1 : (ushort)0;
+ }
+
///
/// Does a GetEqual based on the Key -- the record corresponding to the key is returned
///
@@ -2721,7 +2740,12 @@ private void register_textvar()
/// Returns: AX == 0 record not found, 1 record found
///
/// true if record found
- private bool obtbtvl()
+ private void obtbtvl()
+ {
+ Registers.AX = obtainBtv() ? (ushort)1 : (ushort)0;
+ }
+
+ private bool obtainBtv()
{
var recordPointer = GetParameterPointer(0);
var keyPointer = GetParameterPointer(2);
@@ -2786,7 +2810,6 @@ private bool obtbtvl()
Module.Memory.SetArray(btvStruct.key, currentBtrieveFile.Keys[keyNum].ExtractKeyDataFromRecord(record));
}
- Registers.AX = result ? (ushort) 1 : (ushort) 0;
return result;
}
@@ -4463,7 +4486,7 @@ private void curusr()
{
#if DEBUG
_logger.Warn($"Invalid Channel: {newUserNumber}");
-#endif
+#endif
return;
}
@@ -4547,7 +4570,6 @@ private void f_lxrsh()
private void byenow()
{
prfmsg();
- Registers.AX = 0;
}
///
@@ -4573,23 +4595,24 @@ private void byenow()
///
/// Update the Btrieve current record with a variable length record
///
- /// Signature: void updvbtv(char *recptr)
+ /// Signature: void upvbtv(char *recptr, int length)
///
///
- private void updvbtv()
+ private void upvbtv()
{
var btrieveRecordPointerPointer = GetParameterPointer(0);
+ var length = GetParameter(2);
var currentBtrieveFile = BtrieveGetProcessor(Module.Memory.GetPointer("BB"));
- var dataToWrite = Module.Memory.GetArray(btrieveRecordPointerPointer, (ushort) currentBtrieveFile.RecordLength);
+ var dataToWrite = Module.Memory.GetArray(btrieveRecordPointerPointer, length);
currentBtrieveFile.Update(dataToWrite.ToArray());
-#if DEBUG
+//#if DEBUG
_logger.Info(
- $"Updated current Btrieve record ({currentBtrieveFile.Position}) with {dataToWrite.Length} bytes");
-#endif
+ $"Updated current Btrieve record ({currentBtrieveFile.Position}) with variable {dataToWrite.Length} bytes");
+//#endif
}
///
@@ -4646,8 +4669,6 @@ private void begin_polling()
if (routinePointer == IntPtr16.Empty)
{
ChannelDictionary[channelNumber].PollingRoutine = null;
- Registers.AX = 0;
-
#if DEBUG
_logger.Info($"Unassigned Polling Routine on Channel {channelNumber}");
#endif
@@ -4662,8 +4683,6 @@ private void begin_polling()
_logger.Info(
$"Assigned Polling Routine {ChannelDictionary[channelNumber].PollingRoutine} to Channel {channelNumber}");
#endif
-
- Registers.AX = 0;
}
///
@@ -4676,12 +4695,10 @@ private void stop_polling()
var channelNumber = GetParameter(0);
ChannelDictionary[channelNumber].PollingRoutine = null;
- Registers.AX = 0;
#if DEBUG
_logger.Info($"Unassigned Polling Routine on Channel {channelNumber}");
#endif
- return;
}
///
@@ -5683,12 +5700,8 @@ private void f_ludiv()
///
private void getbtvl()
{
- ushort ax = Registers.AX;
-
- if (!obtbtvl())
+ if (!obtainBtv())
throw new ArgumentException($"No record found in getbtvl, bombing");
-
- Registers.AX = ax;
}
///
@@ -6001,15 +6014,15 @@ private void zonkhl()
var inputStringPointer = GetParameterPointer(0);
var inputString = Module.Memory.GetString(inputStringPointer).ToArray();
var isSpace = true;
-
+
for (var i = 0; i < inputString.Length; i++)
{
if (char.IsUpper((char)inputString[i]))
inputString[i] = (byte)char.ToLower((char)inputString[i]);
-
+
if (inputString[i] == (byte) ' ')
isSpace = true;
-
+
if (char.IsLower((char)inputString[i]) && isSpace)
{
inputString[i] = (byte)char.ToUpper((char)inputString[i]);
@@ -6450,9 +6463,7 @@ private void strncmp()
///
private void updbtv()
{
- dupdbtv();
-
- if (Registers.AX == 0)
+ if (!updateBtv())
throw new SystemException("Unable to update btrieve record");
}
@@ -7393,7 +7404,6 @@ private void cncwrd()
///
private void cntrbtv()
{
- // TODO THIS IS WRONG?
var currentBtrieveFilePointer = Module.Memory.GetPointer("BB");
var currentBtrieveFile = BtrieveGetProcessor(currentBtrieveFilePointer);
diff --git a/MBBSEmu/Memory/IGlobalCache.cs b/MBBSEmu/Memory/IGlobalCache.cs
index 49810fef..81f404d5 100644
--- a/MBBSEmu/Memory/IGlobalCache.cs
+++ b/MBBSEmu/Memory/IGlobalCache.cs
@@ -11,4 +11,4 @@ public interface IGlobalCache : IDisposable
bool ContainsKey(string key);
bool TryGet(string key, out T result);
}
-}
\ No newline at end of file
+}
diff --git a/MBBSEmu/Program.cs b/MBBSEmu/Program.cs
index 6df532ed..85f4acad 100644
--- a/MBBSEmu/Program.cs
+++ b/MBBSEmu/Program.cs
@@ -87,7 +87,7 @@ public class Program
private int _cancellationRequests = 0;
private ServiceResolver _serviceResolver;
-
+
static void Main(string[] args)
{
new Program().Run(args);
@@ -194,7 +194,7 @@ private void Run(string[] args) {
_serviceResolver = new ServiceResolver();
_logger = _serviceResolver.GetService();
-
+
//Setup Generic Database
var resourceManager = _serviceResolver.GetService();
var globalCache = _serviceResolver.GetService();
@@ -392,8 +392,8 @@ private void DatabaseReset()
}
}
- var sysopUserId = acct.InsertAccount("sysop", _newSysopPassword, "sysop@mbbsemu.com");
- var guestUserId = acct.InsertAccount("guest", "guest", "guest@mbbsemu.com");
+ var sysopUserId = acct.InsertAccount("Sysop", _newSysopPassword, "sysop@mbbsemu.com");
+ var guestUserId = acct.InsertAccount("Guest", "guest", "guest@mbbsemu.com");
var keys = _serviceResolver.GetService();