Skip to content

Commit 77564dd

Browse files
authored
[ML] Switch to using global memory constants (#1620)
We have global constants for memory in KB, MB, etc, but we were not reliably using them. This is a small tidy up to migrate to using them consistently.
1 parent 79dce85 commit 77564dd

16 files changed

+76
-60
lines changed

include/core/Constants.h

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,19 +42,19 @@ constexpr core_t::TTime WEEK{604800};
4242
constexpr core_t::TTime YEAR{31536000};
4343

4444
//! The number of bytes in a kilobyte
45-
const std::size_t BYTES_IN_KILOBYTE{1024ULL};
45+
constexpr std::size_t BYTES_IN_KILOBYTES{1024ULL};
4646

4747
//! The number of bytes in a megabyte
48-
const std::size_t BYTES_IN_MEGABYTE{1024ULL * 1024};
48+
constexpr std::size_t BYTES_IN_MEGABYTES{1024ULL * 1024};
4949

5050
//! The number of bytes in a gigabyte
51-
const std::size_t BYTES_IN_GIGABYTE{1024ULL * 1024 * 1024};
51+
constexpr std::size_t BYTES_IN_GIGABYTES{1024ULL * 1024 * 1024};
5252

5353
//! The number of bytes in a terabyte
54-
const std::size_t BYTES_IN_TERABYTE{1024ULL * 1024 * 1024 * 1024};
54+
constexpr std::size_t BYTES_IN_TERABYTES{1024ULL * 1024 * 1024 * 1024};
5555

5656
//! The number of bytes in a gigabyte
57-
const std::size_t BYTES_IN_PETABYTE{1024ULL * 1024 * 1024 * 1024 * 1024};
57+
constexpr std::size_t BYTES_IN_PETABYTES{1024ULL * 1024 * 1024 * 1024 * 1024};
5858

5959
//! Log of min double.
6060
const double LOG_MIN_DOUBLE{std::log(std::numeric_limits<double>::min())};
@@ -72,9 +72,9 @@ constexpr double LOG_TWO{0.693147180559945};
7272
constexpr double LOG_TWO_PI{1.83787706640935};
7373

7474
#ifdef Windows
75-
const char PATH_SEPARATOR = '\\';
75+
constexpr char PATH_SEPARATOR{'\\'};
7676
#else
77-
const char PATH_SEPARATOR = '/';
77+
constexpr char PATH_SEPARATOR{'/'};
7878
#endif
7979
}
8080
}

include/test/CDataFrameAnalyzerTrainingFactory.h

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
#include <core/CDataFrame.h>
1111
#include <core/CSmallVector.h>
12+
#include <core/Constants.h>
1213

1314
#include <maths/CBoostedTreeFactory.h>
1415
#include <maths/CBoostedTreeLoss.h>
@@ -162,8 +163,8 @@ class TEST_EXPORT CDataFrameAnalyzerTrainingFactory {
162163
treeFactory.featureBagFraction(featureBagFraction);
163164
}
164165

165-
const std::int64_t memoryLimit{1024 * 1024 * 1024}; // 1gb default value
166-
ml::api::CDataFrameTrainBoostedTreeInstrumentation instrumentation("testJob", memoryLimit);
166+
ml::api::CDataFrameTrainBoostedTreeInstrumentation instrumentation(
167+
"testJob", core::constants::BYTES_IN_GIGABYTES);
167168
treeFactory.analysisInstrumentation(instrumentation);
168169

169170
auto tree = treeFactory.buildFor(*frame, weights.size());

lib/api/CAnomalyJobConfig.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -276,13 +276,13 @@ std::size_t CAnomalyJobConfig::CAnalysisLimits::modelMemoryLimitMb(const std::st
276276
std::tie(memoryLimitBytes, std::ignore) = core::CStringUtils::memorySizeStringToBytes(
277277
memoryLimitStr, DEFAULT_MEMORY_LIMIT_BYTES);
278278

279-
std::size_t memoryLimitMb = memoryLimitBytes / core::constants::BYTES_IN_MEGABYTE;
279+
std::size_t memoryLimitMb{memoryLimitBytes / core::constants::BYTES_IN_MEGABYTES};
280280

281281
if (memoryLimitMb == 0) {
282282
LOG_ERROR(<< "Invalid limit value " << memoryLimitStr << ". Limit must have a minimum value of 1mb."
283283
<< " Using default memory limit value "
284-
<< DEFAULT_MEMORY_LIMIT_BYTES / core::constants::BYTES_IN_MEGABYTE);
285-
memoryLimitMb = DEFAULT_MEMORY_LIMIT_BYTES / core::constants::BYTES_IN_MEGABYTE;
284+
<< DEFAULT_MEMORY_LIMIT_BYTES / core::constants::BYTES_IN_MEGABYTES);
285+
memoryLimitMb = DEFAULT_MEMORY_LIMIT_BYTES / core::constants::BYTES_IN_MEGABYTES;
286286
}
287287

288288
return memoryLimitMb;

lib/api/CDataFrameAnalysisInstrumentation.cc

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
#include <api/CDataFrameAnalysisInstrumentation.h>
77

88
#include <core/CTimeUtils.h>
9+
#include <core/Constants.h>
910

1011
#include <maths/CBoostedTree.h>
1112

@@ -33,6 +34,7 @@ using TStrVec = std::vector<std::string>;
3334
const double MEMORY_LIMIT_INCREMENT{2.0}; // request 100% more memory
3435
const std::size_t MAXIMUM_FRACTIONAL_PROGRESS{std::size_t{1}
3536
<< ((sizeof(std::size_t) - 2) * 8)};
37+
const std::int64_t BYTES_IN_KB{static_cast<std::int64_t>(core::constants::BYTES_IN_KILOBYTES)};
3638

3739
// clang-format off
3840
const std::string CLASSIFICATION_STATS_TAG{"classification_stats"};
@@ -73,25 +75,25 @@ const std::string PHASE{"phase"};
7375
const std::string PROGRESS_PERCENT{"progress_percent"};
7476
// clang-format on
7577

76-
std::string bytesToString(double value) {
78+
std::string bytesToString(std::int64_t value) {
7779
std::ostringstream stream;
7880
stream << std::fixed;
7981
stream << std::setprecision(0);
80-
value = std::ceil(value / 1024);
81-
if (value < 1024) {
82+
value = (value + BYTES_IN_KB - 1) / BYTES_IN_KB;
83+
if (value < BYTES_IN_KB) {
8284
stream << value;
8385
stream << " kb";
8486
} else {
85-
value = std::ceil(value / 1024);
87+
value = (value + BYTES_IN_KB - 1) / BYTES_IN_KB;
8688
stream << value;
8789
stream << " mb";
8890
}
8991

9092
return stream.str();
9193
}
9294

93-
std::string bytesToString(std::int64_t bytes) {
94-
return bytesToString(static_cast<double>(bytes));
95+
std::string bytesToString(double bytes) {
96+
return bytesToString(static_cast<std::int64_t>(bytes));
9597
}
9698
}
9799

lib/api/CDataFrameAnalysisRunner.cc

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
#include <core/CJsonStatePersistInserter.h>
1111
#include <core/CLogger.h>
1212
#include <core/CStateCompressor.h>
13+
#include <core/Constants.h>
1314

1415
#include <api/CDataFrameAnalysisSpecification.h>
1516
#include <api/CMemoryUsageEstimationResultJsonWriter.h>
@@ -32,8 +33,6 @@ std::size_t maximumNumberPartitions(const CDataFrameAnalysisSpecification& spec)
3233
// user to allocate more resources for the job in this case.
3334
return static_cast<std::size_t>(std::sqrt(static_cast<double>(spec.numberRows())) + 0.5);
3435
}
35-
36-
const std::size_t BYTES_IN_MB{1024 * 1024};
3736
}
3837

3938
CDataFrameAnalysisRunner::CDataFrameAnalysisRunner(const CDataFrameAnalysisSpecification& spec)
@@ -49,15 +48,17 @@ void CDataFrameAnalysisRunner::estimateMemoryUsage(CMemoryUsageEstimationResultJ
4948
std::size_t numberColumns{m_Spec.numberColumns()};
5049
std::size_t maxNumberPartitions{maximumNumberPartitions(m_Spec)};
5150
if (maxNumberPartitions == 0) {
52-
writer.write("0", "0");
51+
writer.write("0mb", "0mb");
5352
return;
5453
}
5554
std::size_t expectedMemoryWithoutDisk{
5655
this->estimateMemoryUsage(numberRows, numberRows, numberColumns)};
5756
std::size_t expectedMemoryWithDisk{this->estimateMemoryUsage(
5857
numberRows, numberRows / maxNumberPartitions, numberColumns)};
5958
auto roundUpToNearestMb = [](std::size_t bytes) {
60-
return std::to_string((bytes + BYTES_IN_MB - 1) / BYTES_IN_MB) + "mb";
59+
return std::to_string((bytes + core::constants::BYTES_IN_MEGABYTES - 1) /
60+
core::constants::BYTES_IN_MEGABYTES) +
61+
"mb";
6162
};
6263
writer.write(roundUpToNearestMb(expectedMemoryWithoutDisk),
6364
roundUpToNearestMb(expectedMemoryWithDisk));
@@ -97,14 +98,14 @@ void CDataFrameAnalysisRunner::computeAndSaveExecutionStrategy() {
9798

9899
if (memoryUsage > memoryLimit) {
99100
auto roundMb = [](std::size_t memory) {
100-
return 0.01 * static_cast<double>((100 * memory) / BYTES_IN_MB);
101+
return 0.01 * static_cast<double>((100 * memory) / core::constants::BYTES_IN_MEGABYTES);
101102
};
102-
// Simply log the limit being configured too low.
103-
// If we exceed the limit during the process, we will fail and the user
104-
// will have to update the limit and attempt to re-run
105-
LOG_DEBUG(<< "Memory limit " << roundMb(memoryLimit) << "MB is configured lower than estimate "
106-
<< std::ceil(roundMb(memoryUsage)) << "MB."
107-
<< "Analytics process may fail due to low memory limit");
103+
// Simply log the limit being configured too low. If we exceed the limit
104+
// during the run, we will fail and the user will have to update the
105+
// limit and attempt to re-run.
106+
LOG_INFO(<< "Memory limit " << roundMb(memoryLimit) << "MB is configured lower"
107+
<< " than the estimate " << std::ceil(roundMb(memoryUsage)) << "MB."
108+
<< "The analytics process may fail due to hitting the memory limit.");
108109
}
109110
if (m_NumberPartitions > 1) {
110111
// The maximum number of rows is found by binary search in the interval

lib/api/CInferenceModelDefinition.cc

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
#include <core/CBase64Filter.h>
99
#include <core/CPersistUtils.h>
1010
#include <core/CStringUtils.h>
11+
#include <core/Constants.h>
1112

1213
#include <boost/iostreams/filter/gzip.hpp>
1314
#include <boost/iostreams/filtering_stream.hpp>
@@ -84,7 +85,7 @@ const std::string JSON_WEIGHTED_SUM_TAG{"weighted_sum"};
8485
const std::string JSON_WEIGHTS_TAG{"weights"};
8586
// clang-format on
8687

87-
const std::size_t MAX_DOCUMENT_SIZE(16 * 1024 * 1024); // 16MB
88+
const std::size_t MAX_DOCUMENT_SIZE(16 * core::constants::BYTES_IN_MEGABYTES);
8889

8990
auto toRapidjsonValue(std::size_t value) {
9091
return rapidjson::Value{static_cast<std::uint64_t>(value)};
@@ -431,9 +432,10 @@ void CInferenceModelDefinition::addToDocumentCompressed(TRapidJsonWriter& writer
431432
std::streamsize remained{compressedStream.tellg()};
432433
compressedStream.seekg(0, compressedStream.beg);
433434
std::size_t docNum{0};
435+
std::string buffer;
434436
while (remained > 0) {
435437
std::size_t bytesToProcess{std::min(MAX_DOCUMENT_SIZE, static_cast<size_t>(remained))};
436-
std::string buffer;
438+
buffer.clear();
437439
std::copy_n(std::istreambuf_iterator<char>(compressedStream.seekg(processed)),
438440
bytesToProcess, std::back_inserter(buffer));
439441
remained -= bytesToProcess;

lib/api/CSingleStreamDataAdder.cc

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,20 +6,21 @@
66
#include <api/CSingleStreamDataAdder.h>
77

88
#include <core/CLogger.h>
9+
#include <core/Constants.h>
910

1011
#include <ostream>
1112

1213
namespace ml {
1314
namespace api {
1415

15-
const size_t CSingleStreamDataAdder::MAX_DOCUMENT_SIZE(16 * 1024 * 1024); // 16MB
16+
const std::size_t CSingleStreamDataAdder::MAX_DOCUMENT_SIZE(16 * core::constants::BYTES_IN_MEGABYTES);
1617

1718
CSingleStreamDataAdder::CSingleStreamDataAdder(const TOStreamP& stream)
1819
: m_Stream(stream) {
1920
}
2021

2122
CSingleStreamDataAdder::TOStreamP CSingleStreamDataAdder::addStreamed(const std::string& id) {
22-
if (m_Stream != nullptr && !m_Stream->bad()) {
23+
if (m_Stream != nullptr && m_Stream->bad() == false) {
2324
// Start with metadata, leaving the index for the receiving code to set
2425
(*m_Stream) << "{\"index\":{\"_id\":\"" << id << "\"}}\n";
2526
}
@@ -33,7 +34,7 @@ bool CSingleStreamDataAdder::streamComplete(TOStreamP& stream, bool force) {
3334
return false;
3435
}
3536

36-
if (stream != nullptr && !stream->bad()) {
37+
if (stream != nullptr && stream->bad() == false) {
3738
// Each Elasticsearch document must be followed by a newline
3839
stream->put('\n');
3940

@@ -46,7 +47,7 @@ bool CSingleStreamDataAdder::streamComplete(TOStreamP& stream, bool force) {
4647
}
4748
}
4849

49-
return stream != nullptr && !stream->bad();
50+
return stream != nullptr && stream->bad() == false;
5051
}
5152

5253
std::size_t CSingleStreamDataAdder::maxDocumentSize() const {

lib/api/unittest/CAnomalyJobLimitTest.cc

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -418,13 +418,15 @@ BOOST_AUTO_TEST_CASE(testModelledEntityCountForFixedMemoryLimit) {
418418
LOG_DEBUG(<< "# partition = " << used.s_PartitionFields);
419419
LOG_DEBUG(<< "Memory status = " << used.s_MemoryStatus);
420420
LOG_DEBUG(<< "Memory usage bytes = " << used.s_Usage);
421-
LOG_DEBUG(<< "Memory limit bytes = " << memoryLimit * 1024 * 1024);
421+
LOG_DEBUG(<< "Memory limit bytes = "
422+
<< memoryLimit * core::constants::BYTES_IN_MEGABYTES);
422423
BOOST_TEST_REQUIRE(used.s_ByFields > testParam.s_ExpectedByFields);
423424
BOOST_TEST_REQUIRE(used.s_ByFields < 800);
424425
BOOST_REQUIRE_EQUAL(std::size_t(2), used.s_PartitionFields);
425426
BOOST_REQUIRE_CLOSE_ABSOLUTE(
426-
memoryLimit * 1024 * 1024 / 2, used.s_Usage,
427-
memoryLimit * 1024 * 1024 / testParam.s_ExpectedByMemoryUsageRelativeErrorDivisor);
427+
memoryLimit * core::constants::BYTES_IN_MEGABYTES / 2, used.s_Usage,
428+
memoryLimit * core::constants::BYTES_IN_MEGABYTES /
429+
testParam.s_ExpectedByMemoryUsageRelativeErrorDivisor);
428430
}
429431

430432
LOG_DEBUG(<< "**** Test partition with bucketLength = " << testParam.s_BucketLength
@@ -477,8 +479,9 @@ BOOST_AUTO_TEST_CASE(testModelledEntityCountForFixedMemoryLimit) {
477479
BOOST_TEST_REQUIRE(static_cast<double>(used.s_ByFields) >
478480
0.96 * static_cast<double>(used.s_PartitionFields));
479481
BOOST_REQUIRE_CLOSE_ABSOLUTE(
480-
memoryLimit * 1024 * 1024 / 2, used.s_Usage,
481-
memoryLimit * 1024 * 1024 / testParam.s_ExpectedPartitionUsageRelativeErrorDivisor);
482+
memoryLimit * core::constants::BYTES_IN_MEGABYTES / 2, used.s_Usage,
483+
memoryLimit * core::constants::BYTES_IN_MEGABYTES /
484+
testParam.s_ExpectedPartitionUsageRelativeErrorDivisor);
482485
}
483486

484487
LOG_DEBUG(<< "**** Test over with bucketLength = " << testParam.s_BucketLength
@@ -527,8 +530,9 @@ BOOST_AUTO_TEST_CASE(testModelledEntityCountForFixedMemoryLimit) {
527530
BOOST_TEST_REQUIRE(used.s_OverFields > testParam.s_ExpectedOverFields);
528531
BOOST_TEST_REQUIRE(used.s_OverFields < 7000);
529532
BOOST_REQUIRE_CLOSE_ABSOLUTE(
530-
memoryLimit * 1024 * 1024 / 2, used.s_Usage,
531-
memoryLimit * 1024 * 1024 / testParam.s_ExpectedOverUsageRelativeErrorDivisor);
533+
memoryLimit * core::constants::BYTES_IN_MEGABYTES / 2, used.s_Usage,
534+
memoryLimit * core::constants::BYTES_IN_MEGABYTES /
535+
testParam.s_ExpectedOverUsageRelativeErrorDivisor);
532536
}
533537
}
534538
}

lib/api/unittest/CDataFrameAnalysisInstrumentationTest.cc

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -101,16 +101,15 @@ void addOutlierTestData(TStrVec fieldNames,
101101

102102
BOOST_AUTO_TEST_CASE(testMemoryState) {
103103
std::string jobId{"testJob"};
104-
std::int64_t memoryLimit{1024 * 1024 * 1024}; //1gb default value
104+
std::size_t memoryLimit{core::constants::BYTES_IN_GIGABYTES};
105105
std::int64_t memoryUsage{500000};
106106
std::int64_t timeBefore{std::chrono::duration_cast<std::chrono::milliseconds>(
107107
std::chrono::system_clock::now().time_since_epoch())
108108
.count()};
109109
std::stringstream outputStream;
110110
{
111-
core::CJsonOutputStreamWrapper streamWrapper(outputStream);
112-
api::CDataFrameTrainBoostedTreeInstrumentation instrumentation{
113-
jobId, static_cast<std::size_t>(memoryLimit)};
111+
core::CJsonOutputStreamWrapper streamWrapper{outputStream};
112+
api::CDataFrameTrainBoostedTreeInstrumentation instrumentation{jobId, memoryLimit};
114113
api::CDataFrameTrainBoostedTreeInstrumentation::CScopeSetOutputStream setStream{
115114
instrumentation, streamWrapper};
116115
instrumentation.updateMemoryUsage(memoryUsage);

lib/api/unittest/CDataFrameAnalysisRunnerTest.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -181,7 +181,7 @@ void testEstimateMemoryUsage(std::int64_t numberRows,
181181
}
182182

183183
BOOST_AUTO_TEST_CASE(testEstimateMemoryUsageFor0Rows) {
184-
testEstimateMemoryUsage(0, "0", "0", 1);
184+
testEstimateMemoryUsage(0, "0mb", "0mb", 1);
185185
}
186186

187187
BOOST_AUTO_TEST_CASE(testEstimateMemoryUsageFor1Row) {

lib/core/CDataFrame.cc

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#include <core/CPackedBitVector.h>
1515
#include <core/CStringUtils.h>
1616
#include <core/Concurrency.h>
17+
#include <core/Constants.h>
1718

1819
#include <algorithm>
1920
#include <future>
@@ -716,7 +717,8 @@ CDataFrame::CDataFrameRowSliceWriter::finishWritingRows() {
716717
}
717718

718719
std::size_t dataFrameDefaultSliceCapacity(std::size_t numberColumns) {
719-
std::size_t oneMbChunkSize{1024 * 1024 / sizeof(CFloatStorage) / numberColumns};
720+
std::size_t oneMbChunkSize{constants::BYTES_IN_MEGABYTES /
721+
sizeof(CFloatStorage) / numberColumns};
720722
return std::max(oneMbChunkSize, std::size_t{128});
721723
}
722724

lib/core/CStringUtils.cc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -501,15 +501,15 @@ CStringUtils::memorySizeStringToBytes(const std::string& memorySizeStr, std::siz
501501
if (multiplierStr[0] == BYTES) {
502502
// no-op
503503
} else if (multiplierStr[0] == KILOBYTES) {
504-
size *= constants::BYTES_IN_KILOBYTE;
504+
size *= constants::BYTES_IN_KILOBYTES;
505505
} else if (multiplierStr[0] == MEGABYTES) {
506-
size *= constants::BYTES_IN_MEGABYTE;
506+
size *= constants::BYTES_IN_MEGABYTES;
507507
} else if (multiplierStr[0] == GIGABYTES) {
508-
size *= constants::BYTES_IN_GIGABYTE;
508+
size *= constants::BYTES_IN_GIGABYTES;
509509
} else if (multiplierStr[0] == TERABYTES) {
510-
size *= constants::BYTES_IN_TERABYTE;
510+
size *= constants::BYTES_IN_TERABYTES;
511511
} else if (multiplierStr[0] == PETABYTES) {
512-
size *= constants::BYTES_IN_PETABYTE;
512+
size *= constants::BYTES_IN_PETABYTES;
513513
}
514514

515515
return {size, true};

lib/maths/CBoostedTreeImpl.cc

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
#include <core/CPersistUtils.h>
1414
#include <core/CProgramCounters.h>
1515
#include <core/CStopWatch.h>
16+
#include <core/Constants.h>
1617

1718
#include <maths/CBasicStatisticsPersist.h>
1819
#include <maths/CBayesianOptimisation.h>
@@ -51,7 +52,7 @@ namespace {
5152
const double MINIMUM_SPLIT_REFRESH_INTERVAL{3.0};
5253
const std::string HYPERPARAMETER_OPTIMIZATION_ROUND{"hyperparameter_optimization_round_"};
5354
const std::string TRAIN_FINAL_FOREST{"train_final_forest"};
54-
const int BYTES_IN_MB{1024 * 1024};
55+
const double BYTES_IN_MB{static_cast<double>(core::constants::BYTES_IN_MEGABYTES)};
5556

5657
//! \brief Record the memory used by a supplied object using the RAII idiom.
5758
class CScopeRecordMemoryUsage {

lib/maths/unittest/CBoostedTreeTest.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ using TLossFunctionUPtr = maths::CBoostedTreeFactory::TLossFunctionUPtr;
5959
namespace {
6060

6161
const double LARGE_POSITIVE_CONSTANT{300.0};
62-
const int BYTES_IN_MB{1024 * 1024};
62+
const double BYTES_IN_MB{static_cast<double>(core::constants::BYTES_IN_MEGABYTES)};
6363

6464
class CTestInstrumentation : public maths::CDataFrameTrainBoostedTreeInstrumentationStub {
6565
public:

lib/model/CResourceMonitor.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,8 @@ void CResourceMonitor::updateMemoryLimitsAndPruneThreshold(std::size_t limitMBs)
9090
// more models?", and it causes problems if these calculations overflow.
9191
m_ByteLimitHigh = std::numeric_limits<std::size_t>::max() / 2 + 1;
9292
} else {
93-
m_ByteLimitHigh = static_cast<std::size_t>(
94-
(limitMBs * 1024 * 1024) / this->persistenceMemoryIncreaseFactor());
93+
m_ByteLimitHigh = (limitMBs * core::constants::BYTES_IN_MEGABYTES) /
94+
this->persistenceMemoryIncreaseFactor();
9595
}
9696
m_ByteLimitLow = (m_ByteLimitHigh * 49) / 50;
9797
m_PruneThreshold = (m_ByteLimitHigh * 3) / 5;

0 commit comments

Comments
 (0)