From 7d5a157162703c04f05e2deecaf5ebe1c1483ebf Mon Sep 17 00:00:00 2001 From: fallenoak Date: Sun, 29 Jan 2023 11:48:18 -0600 Subject: [PATCH] feat(big): add SBigFromUnsigned, SBigNew, and SBigToBinaryBuffer --- storm/Big.cpp | 25 +++++++++++++++++++++ storm/Big.hpp | 13 +++++++++++ storm/big/BigBuffer.cpp | 30 ++++++++++++++++++++++++++ storm/big/BigBuffer.hpp | 22 +++++++++++++++++++ storm/big/BigData.cpp | 9 ++++++++ storm/big/BigData.hpp | 20 +++++++++++++++++ storm/big/BigStack.hpp | 19 ++++++++++++++++ storm/big/Ops.cpp | 20 +++++++++++++++++ storm/big/Ops.hpp | 5 +++++ test/Big.cpp | 48 +++++++++++++++++++++++++++++++++++++++++ 10 files changed, 211 insertions(+) create mode 100644 storm/Big.cpp create mode 100644 storm/Big.hpp create mode 100644 storm/big/BigBuffer.cpp create mode 100644 storm/big/BigBuffer.hpp create mode 100644 storm/big/BigData.cpp create mode 100644 storm/big/BigData.hpp create mode 100644 storm/big/BigStack.hpp diff --git a/storm/Big.cpp b/storm/Big.cpp new file mode 100644 index 0000000..12b2d17 --- /dev/null +++ b/storm/Big.cpp @@ -0,0 +1,25 @@ +#include "storm/Big.hpp" +#include "storm/big/Ops.hpp" +#include "storm/Memory.hpp" +#include + +void SBigFromUnsigned(BigData* num, uint32_t val) { + FromUnsigned(num->Primary(), val); +} + +void SBigNew(BigData** num) { + auto m = SMemAlloc(sizeof(BigData), __FILE__, __LINE__, 0x0); + *num = new (m) BigData(); +} + +void SBigToBinaryBuffer(BigData* num, uint8_t* data, uint32_t maxBytes, uint32_t* bytes) { + auto& output = num->Output(); + ToBinary(output, num->Primary()); + + uint32_t n = output.Count() < maxBytes ? output.Count() : maxBytes; + memcpy(data, output.Ptr(), n); + + if (bytes) { + *bytes = n; + } +} diff --git a/storm/Big.hpp b/storm/Big.hpp new file mode 100644 index 0000000..3759bbd --- /dev/null +++ b/storm/Big.hpp @@ -0,0 +1,13 @@ +#ifndef STORM_BIG_HPP +#define STORM_BIG_HPP + +#include "storm/big/BigData.hpp" +#include + +void SBigFromUnsigned(BigData* num, uint32_t val); + +void SBigNew(BigData** num); + +void SBigToBinaryBuffer(BigData* num, uint8_t* data, uint32_t maxBytes, uint32_t* bytes); + +#endif diff --git a/storm/big/BigBuffer.cpp b/storm/big/BigBuffer.cpp new file mode 100644 index 0000000..c2280f2 --- /dev/null +++ b/storm/big/BigBuffer.cpp @@ -0,0 +1,30 @@ +#include "storm/big/BigBuffer.hpp" + +uint32_t& BigBuffer::operator[](uint32_t index) { + this->GrowToFit(index); + return this->m_data[this->m_offset + index]; +} + +uint32_t BigBuffer::operator[](uint32_t index) const { + if (this->IsUsed(index)) { + return const_cast&>(this->m_data)[this->m_offset + index]; + } + + return 0; +} + +uint32_t BigBuffer::Count() const { + return this->m_data.Count() - this->m_offset; +} + +void BigBuffer::GrowToFit(uint32_t index) { + this->m_data.GrowToFit(this->m_offset + index, 1); +} + +int32_t BigBuffer::IsUsed(uint32_t index) const { + return index + this->m_offset < this->m_data.Count(); +} + +void BigBuffer::SetCount(uint32_t count) { + this->m_data.SetCount(this->m_offset + count); +} diff --git a/storm/big/BigBuffer.hpp b/storm/big/BigBuffer.hpp new file mode 100644 index 0000000..9f6919c --- /dev/null +++ b/storm/big/BigBuffer.hpp @@ -0,0 +1,22 @@ +#ifndef STORM_BIG_BIG_BUFFER_HPP +#define STORM_BIG_BIG_BUFFER_HPP + +#include "storm/Array.hpp" +#include + +class BigBuffer { + public: + // Member variables + TSGrowableArray m_data; + uint32_t m_offset = 0; + + // Member functions + uint32_t& operator[](uint32_t index); + uint32_t operator[](uint32_t index) const; + uint32_t Count() const; + void GrowToFit(uint32_t index); + int32_t IsUsed(uint32_t index) const; + void SetCount(uint32_t count); +}; + +#endif diff --git a/storm/big/BigData.cpp b/storm/big/BigData.cpp new file mode 100644 index 0000000..ad19de5 --- /dev/null +++ b/storm/big/BigData.cpp @@ -0,0 +1,9 @@ +#include "storm/big/BigData.hpp" + +TSGrowableArray& BigData::Output() const { + return const_cast&>(this->m_output); +} + +BigBuffer& BigData::Primary() { + return this->m_primary; +} diff --git a/storm/big/BigData.hpp b/storm/big/BigData.hpp new file mode 100644 index 0000000..baae677 --- /dev/null +++ b/storm/big/BigData.hpp @@ -0,0 +1,20 @@ +#ifndef STORM_BIG_BIG_DATA_HPP +#define STORM_BIG_BIG_DATA_HPP + +#include "storm/big/BigBuffer.hpp" +#include "storm/big/BigStack.hpp" +#include "storm/Array.hpp" + +class BigData { + public: + // Member variables + BigBuffer m_primary; + BigStack m_stack; + TSGrowableArray m_output; + + // Member functions + TSGrowableArray& Output() const; + BigBuffer& Primary(); +}; + +#endif diff --git a/storm/big/BigStack.hpp b/storm/big/BigStack.hpp new file mode 100644 index 0000000..2192911 --- /dev/null +++ b/storm/big/BigStack.hpp @@ -0,0 +1,19 @@ +#ifndef STORM_BIG_BIG_STACK_HPP +#define STORM_BIG_BIG_STACK_HPP + +#include "storm/big/BigBuffer.hpp" +#include + +class BigStack { + public: + // Static variables + const static uint32_t SIZE = 16; + + // Member variables + BigBuffer m_buffer[SIZE]; + uint32_t m_used = 0; + + // Member functions +}; + +#endif diff --git a/storm/big/Ops.cpp b/storm/big/Ops.cpp index d2200d4..cc15f64 100644 --- a/storm/big/Ops.cpp +++ b/storm/big/Ops.cpp @@ -19,6 +19,26 @@ uint32_t ExtractLowPartSx(uint64_t& value) { return low; } +void FromUnsigned(BigBuffer& buffer, uint32_t value) { + buffer[0] = value; + buffer.SetCount(1); +} + uint64_t MakeLarge(uint32_t low, uint32_t high) { return low + (static_cast(high) << 32); } + +void ToBinaryAppend(TSGrowableArray& output, const BigBuffer& buffer) { + for (uint32_t i = 0; i < buffer.Count() * 4; i++) { + auto byte = buffer[i / 4] >> (8 * (i & 3)); + + if (byte || (i / 4) + 1 < buffer.Count()) { + *output.New() = byte; + } + } +} + +void ToBinary(TSGrowableArray& output, const BigBuffer& buffer) { + output.SetCount(0); + ToBinaryAppend(output, buffer); +} diff --git a/storm/big/Ops.hpp b/storm/big/Ops.hpp index fb0b9e5..096ca48 100644 --- a/storm/big/Ops.hpp +++ b/storm/big/Ops.hpp @@ -1,12 +1,17 @@ #ifndef STORM_BIG_OPS_HPP #define STORM_BIG_OPS_HPP +#include "storm/big/BigBuffer.hpp" #include uint32_t ExtractLowPart(uint64_t& value); uint32_t ExtractLowPartSx(uint64_t& value); +void FromUnsigned(BigBuffer& buffer, uint32_t value); + uint64_t MakeLarge(uint32_t low, uint32_t high); +void ToBinary(TSGrowableArray& output, const BigBuffer& buffer); + #endif diff --git a/test/Big.cpp b/test/Big.cpp index 702a5d9..1886ebc 100644 --- a/test/Big.cpp +++ b/test/Big.cpp @@ -1,3 +1,4 @@ +#include "storm/Big.hpp" #include "storm/big/Ops.hpp" #include "test/Test.hpp" @@ -66,3 +67,50 @@ TEST_CASE("MakeLarge", "[big]") { REQUIRE(value == 0x1122334400000000); } } + +TEST_CASE("SBigFromUnsigned", "[big]") { + SECTION("creates bigdata from 0") { + BigData* num; + SBigNew(&num); + SBigFromUnsigned(num, 0); + + CHECK(num->Primary().Count() == 1); + CHECK(num->Primary()[0] == 0); + } + + SECTION("creates bigdata from 0x12345678") { + BigData* num; + SBigNew(&num); + SBigFromUnsigned(num, 0x12345678); + + CHECK(num->Primary().Count() == 1); + CHECK(num->Primary()[0] == 0x12345678); + } +} + +TEST_CASE("SBigToBinaryBuffer", "[big]") { + SECTION("returns expected buffer for bigdata representing 0") { + BigData* num; + SBigNew(&num); + SBigFromUnsigned(num, 0); + + uint8_t buffer[4]; + uint32_t bytes; + SBigToBinaryBuffer(num, buffer, sizeof(buffer), &bytes); + + REQUIRE(bytes == 0); + } + + SECTION("returns expected buffer for bigdata representing 0x12345678") { + BigData* num; + SBigNew(&num); + SBigFromUnsigned(num, 0x12345678); + + uint8_t buffer[4]; + uint32_t bytes; + SBigToBinaryBuffer(num, buffer, sizeof(buffer), &bytes); + + CHECK(bytes == 4); + CHECK(*reinterpret_cast(buffer) == 0x12345678); + } +}