mirror of
https://github.com/facebook/zstd.git
synced 2025-10-05 00:07:15 -04:00
largeNbDicts bugfix + improvements
This commit is contained in:
parent
f313a773a4
commit
e0c4863c5c
@ -654,7 +654,8 @@ static int benchMem(slice_collection_t dstBlocks,
|
||||
slice_collection_t srcBlocks,
|
||||
ddict_collection_t ddictionaries,
|
||||
cdict_collection_t cdictionaries,
|
||||
unsigned nbRounds, int benchCompression)
|
||||
unsigned nbRounds, int benchCompression,
|
||||
const char* exeName)
|
||||
{
|
||||
assert(dstBlocks.nbSlices == srcBlocks.nbSlices);
|
||||
|
||||
@ -703,6 +704,22 @@ static int benchMem(slice_collection_t dstBlocks,
|
||||
}
|
||||
DISPLAY("\n");
|
||||
|
||||
char* csvFileName = malloc(strlen(exeName) + 5);
|
||||
strcpy(csvFileName, exeName);
|
||||
strcat(csvFileName, ".csv");
|
||||
FILE* csvFile = fopen(csvFileName, "r");
|
||||
if (!csvFile) {
|
||||
csvFile = fopen(csvFileName, "wt");
|
||||
assert(csvFile);
|
||||
fprintf(csvFile, "%s\n", exeName);
|
||||
} else {
|
||||
csvFile = fopen(csvFileName, "at");
|
||||
assert(csvFile);
|
||||
}
|
||||
fprintf(csvFile, "%.1f\n", bestSpeed);
|
||||
fclose(csvFile);
|
||||
free(csvFileName);
|
||||
|
||||
freeDecompressInstructions(di);
|
||||
freeCompressInstructions(ci);
|
||||
BMK_freeTimedFnState(benchState);
|
||||
@ -721,7 +738,8 @@ int bench(const char** fileNameTable, unsigned nbFiles,
|
||||
size_t blockSize, int clevel,
|
||||
unsigned nbDictMax, unsigned nbBlocks,
|
||||
unsigned nbRounds, int benchCompression,
|
||||
ZSTD_dictContentType_e dictContentType, ZSTD_CCtx_params* cctxParams)
|
||||
ZSTD_dictContentType_e dictContentType, ZSTD_CCtx_params* cctxParams,
|
||||
const char* exeName)
|
||||
{
|
||||
int result = 0;
|
||||
|
||||
@ -806,7 +824,7 @@ int bench(const char** fileNameTable, unsigned nbFiles,
|
||||
CONTROL(ddictionaries.ddicts != NULL);
|
||||
|
||||
if (benchCompression) {
|
||||
size_t const dictMem = ZSTD_estimateCDictSize(dictBuffer.size, DICT_LOAD_METHOD);
|
||||
size_t const dictMem = ZSTD_sizeof_CDict(cdictionaries.cdicts[0]);
|
||||
size_t const allDictMem = dictMem * nbDicts;
|
||||
DISPLAYLEVEL(3, "generating %u dictionaries, using %.1f MB of memory \n",
|
||||
nbDicts, (double)allDictMem / (1 MB));
|
||||
@ -816,7 +834,7 @@ int bench(const char** fileNameTable, unsigned nbFiles,
|
||||
buffer_collection_t resultCollection = createBufferCollection_fromSliceCollection(srcSlices);
|
||||
CONTROL(resultCollection.buffer.ptr != NULL);
|
||||
|
||||
result = benchMem(dstSlices, resultCollection.slices, ddictionaries, cdictionaries, nbRounds, benchCompression);
|
||||
result = benchMem(dstSlices, resultCollection.slices, ddictionaries, cdictionaries, nbRounds, benchCompression, exeName);
|
||||
|
||||
freeBufferCollection(resultCollection);
|
||||
} else {
|
||||
@ -830,7 +848,7 @@ int bench(const char** fileNameTable, unsigned nbFiles,
|
||||
buffer_collection_t resultCollection = createBufferCollection_fromSliceCollectionSizes(srcSlices);
|
||||
CONTROL(resultCollection.buffer.ptr != NULL);
|
||||
|
||||
result = benchMem(resultCollection.slices, dstSlices, ddictionaries, cdictionaries, nbRounds, benchCompression);
|
||||
result = benchMem(resultCollection.slices, dstSlices, ddictionaries, cdictionaries, nbRounds, benchCompression, exeName);
|
||||
|
||||
freeBufferCollection(resultCollection);
|
||||
}
|
||||
@ -988,7 +1006,7 @@ int main (int argc, const char** argv)
|
||||
ZSTD_CCtxParams_setParameter(cctxParams, ZSTD_c_nbWorkers, 0);
|
||||
ZSTD_CCtxParams_setParameter(cctxParams, ZSTD_c_forceAttachDict, dictAttachPref);
|
||||
|
||||
int result = bench(filenameTable->fileNames, (unsigned)filenameTable->tableSize, dictionary, blockSize, cLevel, nbDicts, nbBlocks, nbRounds, benchCompression, dictContentType, cctxParams);
|
||||
int result = bench(filenameTable->fileNames, (unsigned)filenameTable->tableSize, dictionary, blockSize, cLevel, nbDicts, nbBlocks, nbRounds, benchCompression, dictContentType, cctxParams, exeName);
|
||||
|
||||
UTIL_freeFileNamesTable(filenameTable);
|
||||
free(nameTable);
|
||||
|
Loading…
x
Reference in New Issue
Block a user