attempt to fix build
[pcsx_rearmed.git] / deps / libchdr / deps / lzma-22.01 / src / LzFind.c
CommitLineData
9e052883 1/* LzFind.c -- Match finder for LZ algorithms\r
22021-11-29 : Igor Pavlov : Public domain */\r
3\r
4#include "Precomp.h"\r
5\r
6#include <string.h>\r
7// #include <stdio.h>\r
8\r
9#include "CpuArch.h"\r
10#include "LzFind.h"\r
11#include "LzHash.h"\r
12\r
13#define kBlockMoveAlign (1 << 7) // alignment for memmove()\r
14#define kBlockSizeAlign (1 << 16) // alignment for block allocation\r
15#define kBlockSizeReserveMin (1 << 24) // it's 1/256 from 4 GB dictinary\r
16\r
17#define kEmptyHashValue 0\r
18\r
19#define kMaxValForNormalize ((UInt32)0)\r
20// #define kMaxValForNormalize ((UInt32)(1 << 20) + 0xFFF) // for debug\r
21\r
22// #define kNormalizeAlign (1 << 7) // alignment for speculated accesses\r
23\r
24#define GET_AVAIL_BYTES(p) \\r
25 Inline_MatchFinder_GetNumAvailableBytes(p)\r
26\r
27\r
28// #define kFix5HashSize (kHash2Size + kHash3Size + kHash4Size)\r
29#define kFix5HashSize kFix4HashSize\r
30\r
31/*\r
32 HASH2_CALC:\r
33 if (hv) match, then cur[0] and cur[1] also match\r
34*/\r
35#define HASH2_CALC hv = GetUi16(cur);\r
36\r
37// (crc[0 ... 255] & 0xFF) provides one-to-one correspondence to [0 ... 255]\r
38\r
39/*\r
40 HASH3_CALC:\r
41 if (cur[0]) and (h2) match, then cur[1] also match\r
42 if (cur[0]) and (hv) match, then cur[1] and cur[2] also match\r
43*/\r
44#define HASH3_CALC { \\r
45 UInt32 temp = p->crc[cur[0]] ^ cur[1]; \\r
46 h2 = temp & (kHash2Size - 1); \\r
47 hv = (temp ^ ((UInt32)cur[2] << 8)) & p->hashMask; }\r
48\r
49#define HASH4_CALC { \\r
50 UInt32 temp = p->crc[cur[0]] ^ cur[1]; \\r
51 h2 = temp & (kHash2Size - 1); \\r
52 temp ^= ((UInt32)cur[2] << 8); \\r
53 h3 = temp & (kHash3Size - 1); \\r
54 hv = (temp ^ (p->crc[cur[3]] << kLzHash_CrcShift_1)) & p->hashMask; }\r
55\r
56#define HASH5_CALC { \\r
57 UInt32 temp = p->crc[cur[0]] ^ cur[1]; \\r
58 h2 = temp & (kHash2Size - 1); \\r
59 temp ^= ((UInt32)cur[2] << 8); \\r
60 h3 = temp & (kHash3Size - 1); \\r
61 temp ^= (p->crc[cur[3]] << kLzHash_CrcShift_1); \\r
62 /* h4 = temp & p->hash4Mask; */ /* (kHash4Size - 1); */ \\r
63 hv = (temp ^ (p->crc[cur[4]] << kLzHash_CrcShift_2)) & p->hashMask; }\r
64\r
65#define HASH_ZIP_CALC hv = ((cur[2] | ((UInt32)cur[0] << 8)) ^ p->crc[cur[1]]) & 0xFFFF;\r
66\r
67\r
68static void LzInWindow_Free(CMatchFinder *p, ISzAllocPtr alloc)\r
69{\r
70 if (!p->directInput)\r
71 {\r
72 ISzAlloc_Free(alloc, p->bufferBase);\r
73 p->bufferBase = NULL;\r
74 }\r
75}\r
76\r
77\r
78static int LzInWindow_Create2(CMatchFinder *p, UInt32 blockSize, ISzAllocPtr alloc)\r
79{\r
80 if (blockSize == 0)\r
81 return 0;\r
82 if (!p->bufferBase || p->blockSize != blockSize)\r
83 {\r
84 // size_t blockSizeT;\r
85 LzInWindow_Free(p, alloc);\r
86 p->blockSize = blockSize;\r
87 // blockSizeT = blockSize;\r
88 \r
89 // printf("\nblockSize = 0x%x\n", blockSize);\r
90 /*\r
91 #if defined _WIN64\r
92 // we can allocate 4GiB, but still use UInt32 for (p->blockSize)\r
93 // we use UInt32 type for (p->blockSize), because\r
94 // we don't want to wrap over 4 GiB,\r
95 // when we use (p->streamPos - p->pos) that is UInt32.\r
96 if (blockSize >= (UInt32)0 - (UInt32)kBlockSizeAlign)\r
97 {\r
98 blockSizeT = ((size_t)1 << 32);\r
99 printf("\nchanged to blockSizeT = 4GiB\n");\r
100 }\r
101 #endif\r
102 */\r
103 \r
104 p->bufferBase = (Byte *)ISzAlloc_Alloc(alloc, blockSize);\r
105 // printf("\nbufferBase = %p\n", p->bufferBase);\r
106 // return 0; // for debug\r
107 }\r
108 return (p->bufferBase != NULL);\r
109}\r
110\r
111static const Byte *MatchFinder_GetPointerToCurrentPos(CMatchFinder *p) { return p->buffer; }\r
112\r
113static UInt32 MatchFinder_GetNumAvailableBytes(CMatchFinder *p) { return GET_AVAIL_BYTES(p); }\r
114\r
115\r
116MY_NO_INLINE\r
117static void MatchFinder_ReadBlock(CMatchFinder *p)\r
118{\r
119 if (p->streamEndWasReached || p->result != SZ_OK)\r
120 return;\r
121\r
122 /* We use (p->streamPos - p->pos) value.\r
123 (p->streamPos < p->pos) is allowed. */\r
124\r
125 if (p->directInput)\r
126 {\r
127 UInt32 curSize = 0xFFFFFFFF - GET_AVAIL_BYTES(p);\r
128 if (curSize > p->directInputRem)\r
129 curSize = (UInt32)p->directInputRem;\r
130 p->directInputRem -= curSize;\r
131 p->streamPos += curSize;\r
132 if (p->directInputRem == 0)\r
133 p->streamEndWasReached = 1;\r
134 return;\r
135 }\r
136 \r
137 for (;;)\r
138 {\r
139 Byte *dest = p->buffer + GET_AVAIL_BYTES(p);\r
140 size_t size = (size_t)(p->bufferBase + p->blockSize - dest);\r
141 if (size == 0)\r
142 {\r
143 /* we call ReadBlock() after NeedMove() and MoveBlock().\r
144 NeedMove() and MoveBlock() povide more than (keepSizeAfter)\r
145 to the end of (blockSize).\r
146 So we don't execute this branch in normal code flow.\r
147 We can go here, if we will call ReadBlock() before NeedMove(), MoveBlock().\r
148 */\r
149 // p->result = SZ_ERROR_FAIL; // we can show error here\r
150 return;\r
151 }\r
152\r
153 // #define kRead 3\r
154 // if (size > kRead) size = kRead; // for debug\r
155\r
156 p->result = ISeqInStream_Read(p->stream, dest, &size);\r
157 if (p->result != SZ_OK)\r
158 return;\r
159 if (size == 0)\r
160 {\r
161 p->streamEndWasReached = 1;\r
162 return;\r
163 }\r
164 p->streamPos += (UInt32)size;\r
165 if (GET_AVAIL_BYTES(p) > p->keepSizeAfter)\r
166 return;\r
167 /* here and in another (p->keepSizeAfter) checks we keep on 1 byte more than was requested by Create() function\r
168 (GET_AVAIL_BYTES(p) >= p->keepSizeAfter) - minimal required size */\r
169 }\r
170\r
171 // on exit: (p->result != SZ_OK || p->streamEndWasReached || GET_AVAIL_BYTES(p) > p->keepSizeAfter)\r
172}\r
173\r
174\r
175\r
176MY_NO_INLINE\r
177void MatchFinder_MoveBlock(CMatchFinder *p)\r
178{\r
179 const size_t offset = (size_t)(p->buffer - p->bufferBase) - p->keepSizeBefore;\r
180 const size_t keepBefore = (offset & (kBlockMoveAlign - 1)) + p->keepSizeBefore;\r
181 p->buffer = p->bufferBase + keepBefore;\r
182 memmove(p->bufferBase,\r
183 p->bufferBase + (offset & ~((size_t)kBlockMoveAlign - 1)),\r
184 keepBefore + (size_t)GET_AVAIL_BYTES(p));\r
185}\r
186\r
187/* We call MoveBlock() before ReadBlock().\r
188 So MoveBlock() can be wasteful operation, if the whole input data\r
189 can fit in current block even without calling MoveBlock().\r
190 in important case where (dataSize <= historySize)\r
191 condition (p->blockSize > dataSize + p->keepSizeAfter) is met\r
192 So there is no MoveBlock() in that case case.\r
193*/\r
194\r
195int MatchFinder_NeedMove(CMatchFinder *p)\r
196{\r
197 if (p->directInput)\r
198 return 0;\r
199 if (p->streamEndWasReached || p->result != SZ_OK)\r
200 return 0;\r
201 return ((size_t)(p->bufferBase + p->blockSize - p->buffer) <= p->keepSizeAfter);\r
202}\r
203\r
204void MatchFinder_ReadIfRequired(CMatchFinder *p)\r
205{\r
206 if (p->keepSizeAfter >= GET_AVAIL_BYTES(p))\r
207 MatchFinder_ReadBlock(p);\r
208}\r
209\r
210\r
211\r
212static void MatchFinder_SetDefaultSettings(CMatchFinder *p)\r
213{\r
214 p->cutValue = 32;\r
215 p->btMode = 1;\r
216 p->numHashBytes = 4;\r
217 p->bigHash = 0;\r
218}\r
219\r
220#define kCrcPoly 0xEDB88320\r
221\r
222void MatchFinder_Construct(CMatchFinder *p)\r
223{\r
224 unsigned i;\r
225 p->bufferBase = NULL;\r
226 p->directInput = 0;\r
227 p->hash = NULL;\r
228 p->expectedDataSize = (UInt64)(Int64)-1;\r
229 MatchFinder_SetDefaultSettings(p);\r
230\r
231 for (i = 0; i < 256; i++)\r
232 {\r
233 UInt32 r = (UInt32)i;\r
234 unsigned j;\r
235 for (j = 0; j < 8; j++)\r
236 r = (r >> 1) ^ (kCrcPoly & ((UInt32)0 - (r & 1)));\r
237 p->crc[i] = r;\r
238 }\r
239}\r
240\r
241static void MatchFinder_FreeThisClassMemory(CMatchFinder *p, ISzAllocPtr alloc)\r
242{\r
243 ISzAlloc_Free(alloc, p->hash);\r
244 p->hash = NULL;\r
245}\r
246\r
247void MatchFinder_Free(CMatchFinder *p, ISzAllocPtr alloc)\r
248{\r
249 MatchFinder_FreeThisClassMemory(p, alloc);\r
250 LzInWindow_Free(p, alloc);\r
251}\r
252\r
253static CLzRef* AllocRefs(size_t num, ISzAllocPtr alloc)\r
254{\r
255 size_t sizeInBytes = (size_t)num * sizeof(CLzRef);\r
256 if (sizeInBytes / sizeof(CLzRef) != num)\r
257 return NULL;\r
258 return (CLzRef *)ISzAlloc_Alloc(alloc, sizeInBytes);\r
259}\r
260\r
261#if (kBlockSizeReserveMin < kBlockSizeAlign * 2)\r
262 #error Stop_Compiling_Bad_Reserve\r
263#endif\r
264\r
265\r
266\r
267static UInt32 GetBlockSize(CMatchFinder *p, UInt32 historySize)\r
268{\r
269 UInt32 blockSize = (p->keepSizeBefore + p->keepSizeAfter);\r
270 /*\r
271 if (historySize > kMaxHistorySize)\r
272 return 0;\r
273 */\r
274 // printf("\nhistorySize == 0x%x\n", historySize);\r
275 \r
276 if (p->keepSizeBefore < historySize || blockSize < p->keepSizeBefore) // if 32-bit overflow\r
277 return 0;\r
278 \r
279 {\r
280 const UInt32 kBlockSizeMax = (UInt32)0 - (UInt32)kBlockSizeAlign;\r
281 const UInt32 rem = kBlockSizeMax - blockSize;\r
282 const UInt32 reserve = (blockSize >> (blockSize < ((UInt32)1 << 30) ? 1 : 2))\r
283 + (1 << 12) + kBlockMoveAlign + kBlockSizeAlign; // do not overflow 32-bit here\r
284 if (blockSize >= kBlockSizeMax\r
285 || rem < kBlockSizeReserveMin) // we reject settings that will be slow\r
286 return 0;\r
287 if (reserve >= rem)\r
288 blockSize = kBlockSizeMax;\r
289 else\r
290 {\r
291 blockSize += reserve;\r
292 blockSize &= ~(UInt32)(kBlockSizeAlign - 1);\r
293 }\r
294 }\r
295 // printf("\n LzFind_blockSize = %x\n", blockSize);\r
296 // printf("\n LzFind_blockSize = %d\n", blockSize >> 20);\r
297 return blockSize;\r
298}\r
299\r
300\r
301int MatchFinder_Create(CMatchFinder *p, UInt32 historySize,\r
302 UInt32 keepAddBufferBefore, UInt32 matchMaxLen, UInt32 keepAddBufferAfter,\r
303 ISzAllocPtr alloc)\r
304{\r
305 /* we need one additional byte in (p->keepSizeBefore),\r
306 since we use MoveBlock() after (p->pos++) and before dictionary using */\r
307 // keepAddBufferBefore = (UInt32)0xFFFFFFFF - (1 << 22); // for debug\r
308 p->keepSizeBefore = historySize + keepAddBufferBefore + 1;\r
309\r
310 keepAddBufferAfter += matchMaxLen;\r
311 /* we need (p->keepSizeAfter >= p->numHashBytes) */\r
312 if (keepAddBufferAfter < p->numHashBytes)\r
313 keepAddBufferAfter = p->numHashBytes;\r
314 // keepAddBufferAfter -= 2; // for debug\r
315 p->keepSizeAfter = keepAddBufferAfter;\r
316\r
317 if (p->directInput)\r
318 p->blockSize = 0;\r
319 if (p->directInput || LzInWindow_Create2(p, GetBlockSize(p, historySize), alloc))\r
320 {\r
321 const UInt32 newCyclicBufferSize = historySize + 1; // do not change it\r
322 UInt32 hs;\r
323 p->matchMaxLen = matchMaxLen;\r
324 {\r
325 // UInt32 hs4;\r
326 p->fixedHashSize = 0;\r
327 hs = (1 << 16) - 1;\r
328 if (p->numHashBytes != 2)\r
329 {\r
330 hs = historySize;\r
331 if (hs > p->expectedDataSize)\r
332 hs = (UInt32)p->expectedDataSize;\r
333 if (hs != 0)\r
334 hs--;\r
335 hs |= (hs >> 1);\r
336 hs |= (hs >> 2);\r
337 hs |= (hs >> 4);\r
338 hs |= (hs >> 8);\r
339 // we propagated 16 bits in (hs). Low 16 bits must be set later\r
340 hs >>= 1;\r
341 if (hs >= (1 << 24))\r
342 {\r
343 if (p->numHashBytes == 3)\r
344 hs = (1 << 24) - 1;\r
345 else\r
346 hs >>= 1;\r
347 /* if (bigHash) mode, GetHeads4b() in LzFindMt.c needs (hs >= ((1 << 24) - 1))) */\r
348 }\r
349 \r
350 // hs = ((UInt32)1 << 25) - 1; // for test\r
351 \r
352 // (hash_size >= (1 << 16)) : Required for (numHashBytes > 2)\r
353 hs |= (1 << 16) - 1; /* don't change it! */\r
354 \r
355 // bt5: we adjust the size with recommended minimum size\r
356 if (p->numHashBytes >= 5)\r
357 hs |= (256 << kLzHash_CrcShift_2) - 1;\r
358 }\r
359 p->hashMask = hs;\r
360 hs++;\r
361\r
362 /*\r
363 hs4 = (1 << 20);\r
364 if (hs4 > hs)\r
365 hs4 = hs;\r
366 // hs4 = (1 << 16); // for test\r
367 p->hash4Mask = hs4 - 1;\r
368 */\r
369\r
370 if (p->numHashBytes > 2) p->fixedHashSize += kHash2Size;\r
371 if (p->numHashBytes > 3) p->fixedHashSize += kHash3Size;\r
372 // if (p->numHashBytes > 4) p->fixedHashSize += hs4; // kHash4Size;\r
373 hs += p->fixedHashSize;\r
374 }\r
375\r
376 {\r
377 size_t newSize;\r
378 size_t numSons;\r
379 p->historySize = historySize;\r
380 p->hashSizeSum = hs;\r
381 p->cyclicBufferSize = newCyclicBufferSize; // it must be = (historySize + 1)\r
382 \r
383 numSons = newCyclicBufferSize;\r
384 if (p->btMode)\r
385 numSons <<= 1;\r
386 newSize = hs + numSons;\r
387\r
388 // aligned size is not required here, but it can be better for some loops\r
389 #define NUM_REFS_ALIGN_MASK 0xF\r
390 newSize = (newSize + NUM_REFS_ALIGN_MASK) & ~(size_t)NUM_REFS_ALIGN_MASK;\r
391\r
392 if (p->hash && p->numRefs == newSize)\r
393 return 1;\r
394 \r
395 MatchFinder_FreeThisClassMemory(p, alloc);\r
396 p->numRefs = newSize;\r
397 p->hash = AllocRefs(newSize, alloc);\r
398 \r
399 if (p->hash)\r
400 {\r
401 p->son = p->hash + p->hashSizeSum;\r
402 return 1;\r
403 }\r
404 }\r
405 }\r
406\r
407 MatchFinder_Free(p, alloc);\r
408 return 0;\r
409}\r
410\r
411\r
412static void MatchFinder_SetLimits(CMatchFinder *p)\r
413{\r
414 UInt32 k;\r
415 UInt32 n = kMaxValForNormalize - p->pos;\r
416 if (n == 0)\r
417 n = (UInt32)(Int32)-1; // we allow (pos == 0) at start even with (kMaxValForNormalize == 0)\r
418 \r
419 k = p->cyclicBufferSize - p->cyclicBufferPos;\r
420 if (k < n)\r
421 n = k;\r
422\r
423 k = GET_AVAIL_BYTES(p);\r
424 {\r
425 const UInt32 ksa = p->keepSizeAfter;\r
426 UInt32 mm = p->matchMaxLen;\r
427 if (k > ksa)\r
428 k -= ksa; // we must limit exactly to keepSizeAfter for ReadBlock\r
429 else if (k >= mm)\r
430 {\r
431 // the limitation for (p->lenLimit) update\r
432 k -= mm; // optimization : to reduce the number of checks\r
433 k++;\r
434 // k = 1; // non-optimized version : for debug\r
435 }\r
436 else\r
437 {\r
438 mm = k;\r
439 if (k != 0)\r
440 k = 1;\r
441 }\r
442 p->lenLimit = mm;\r
443 }\r
444 if (k < n)\r
445 n = k;\r
446 \r
447 p->posLimit = p->pos + n;\r
448}\r
449\r
450\r
451void MatchFinder_Init_LowHash(CMatchFinder *p)\r
452{\r
453 size_t i;\r
454 CLzRef *items = p->hash;\r
455 const size_t numItems = p->fixedHashSize;\r
456 for (i = 0; i < numItems; i++)\r
457 items[i] = kEmptyHashValue;\r
458}\r
459\r
460\r
461void MatchFinder_Init_HighHash(CMatchFinder *p)\r
462{\r
463 size_t i;\r
464 CLzRef *items = p->hash + p->fixedHashSize;\r
465 const size_t numItems = (size_t)p->hashMask + 1;\r
466 for (i = 0; i < numItems; i++)\r
467 items[i] = kEmptyHashValue;\r
468}\r
469\r
470\r
471void MatchFinder_Init_4(CMatchFinder *p)\r
472{\r
473 p->buffer = p->bufferBase;\r
474 {\r
475 /* kEmptyHashValue = 0 (Zero) is used in hash tables as NO-VALUE marker.\r
476 the code in CMatchFinderMt expects (pos = 1) */\r
477 p->pos =\r
478 p->streamPos =\r
479 1; // it's smallest optimal value. do not change it\r
480 // 0; // for debug\r
481 }\r
482 p->result = SZ_OK;\r
483 p->streamEndWasReached = 0;\r
484}\r
485\r
486\r
487// (CYC_TO_POS_OFFSET == 0) is expected by some optimized code\r
488#define CYC_TO_POS_OFFSET 0\r
489// #define CYC_TO_POS_OFFSET 1 // for debug\r
490\r
491void MatchFinder_Init(CMatchFinder *p)\r
492{\r
493 MatchFinder_Init_HighHash(p);\r
494 MatchFinder_Init_LowHash(p);\r
495 MatchFinder_Init_4(p);\r
496 // if (readData)\r
497 MatchFinder_ReadBlock(p);\r
498\r
499 /* if we init (cyclicBufferPos = pos), then we can use one variable\r
500 instead of both (cyclicBufferPos) and (pos) : only before (cyclicBufferPos) wrapping */\r
501 p->cyclicBufferPos = (p->pos - CYC_TO_POS_OFFSET); // init with relation to (pos)\r
502 // p->cyclicBufferPos = 0; // smallest value\r
503 // p->son[0] = p->son[1] = 0; // unused: we can init skipped record for speculated accesses.\r
504 MatchFinder_SetLimits(p);\r
505}\r
506\r
507\r
508\r
509#ifdef MY_CPU_X86_OR_AMD64\r
510 #if defined(__clang__) && (__clang_major__ >= 8) \\r
511 || defined(__GNUC__) && (__GNUC__ >= 8) \\r
512 || defined(__INTEL_COMPILER) && (__INTEL_COMPILER >= 1900)\r
513 #define USE_SATUR_SUB_128\r
514 #define USE_AVX2\r
515 #define ATTRIB_SSE41 __attribute__((__target__("sse4.1")))\r
516 #define ATTRIB_AVX2 __attribute__((__target__("avx2")))\r
517 #elif defined(_MSC_VER)\r
518 #if (_MSC_VER >= 1600)\r
519 #define USE_SATUR_SUB_128\r
520 #if (_MSC_VER >= 1900)\r
521 #define USE_AVX2\r
522 #include <immintrin.h> // avx\r
523 #endif\r
524 #endif\r
525 #endif\r
526\r
527// #elif defined(MY_CPU_ARM_OR_ARM64)\r
528#elif defined(MY_CPU_ARM64)\r
529\r
530 #if defined(__clang__) && (__clang_major__ >= 8) \\r
531 || defined(__GNUC__) && (__GNUC__ >= 8)\r
532 #define USE_SATUR_SUB_128\r
533 #ifdef MY_CPU_ARM64\r
534 // #define ATTRIB_SSE41 __attribute__((__target__("")))\r
535 #else\r
536 // #define ATTRIB_SSE41 __attribute__((__target__("fpu=crypto-neon-fp-armv8")))\r
537 #endif\r
538\r
539 #elif defined(_MSC_VER)\r
540 #if (_MSC_VER >= 1910)\r
541 #define USE_SATUR_SUB_128\r
542 #endif\r
543 #endif\r
544\r
545 #if defined(_MSC_VER) && defined(MY_CPU_ARM64)\r
546 #include <arm64_neon.h>\r
547 #else\r
548 #include <arm_neon.h>\r
549 #endif\r
550\r
551#endif\r
552\r
553/*\r
554#ifndef ATTRIB_SSE41\r
555 #define ATTRIB_SSE41\r
556#endif\r
557#ifndef ATTRIB_AVX2\r
558 #define ATTRIB_AVX2\r
559#endif\r
560*/\r
561\r
562#ifdef USE_SATUR_SUB_128\r
563\r
564// #define _SHOW_HW_STATUS\r
565\r
566#ifdef _SHOW_HW_STATUS\r
567#include <stdio.h>\r
568#define _PRF(x) x\r
569_PRF(;)\r
570#else\r
571#define _PRF(x)\r
572#endif\r
573\r
574#ifdef MY_CPU_ARM_OR_ARM64\r
575\r
576#ifdef MY_CPU_ARM64\r
577// #define FORCE_SATUR_SUB_128\r
578#endif\r
579\r
580typedef uint32x4_t v128;\r
581#define SASUB_128(i) \\r
582 *(v128 *)(void *)(items + (i) * 4) = \\r
583 vsubq_u32(vmaxq_u32(*(const v128 *)(const void *)(items + (i) * 4), sub2), sub2);\r
584\r
585#else\r
586\r
587#include <smmintrin.h> // sse4.1\r
588\r
589typedef __m128i v128;\r
590#define SASUB_128(i) \\r
591 *(v128 *)(void *)(items + (i) * 4) = \\r
592 _mm_sub_epi32(_mm_max_epu32(*(const v128 *)(const void *)(items + (i) * 4), sub2), sub2); // SSE 4.1\r
593\r
594#endif\r
595\r
596\r
597\r
598MY_NO_INLINE\r
599static\r
600#ifdef ATTRIB_SSE41\r
601ATTRIB_SSE41\r
602#endif\r
603void\r
604MY_FAST_CALL\r
605LzFind_SaturSub_128(UInt32 subValue, CLzRef *items, const CLzRef *lim)\r
606{\r
607 v128 sub2 =\r
608 #ifdef MY_CPU_ARM_OR_ARM64\r
609 vdupq_n_u32(subValue);\r
610 #else\r
611 _mm_set_epi32((Int32)subValue, (Int32)subValue, (Int32)subValue, (Int32)subValue);\r
612 #endif\r
613 do\r
614 {\r
615 SASUB_128(0)\r
616 SASUB_128(1)\r
617 SASUB_128(2)\r
618 SASUB_128(3)\r
619 items += 4 * 4;\r
620 }\r
621 while (items != lim);\r
622}\r
623\r
624\r
625\r
626#ifdef USE_AVX2\r
627\r
628#include <immintrin.h> // avx\r
629\r
630#define SASUB_256(i) *(__m256i *)(void *)(items + (i) * 8) = _mm256_sub_epi32(_mm256_max_epu32(*(const __m256i *)(const void *)(items + (i) * 8), sub2), sub2); // AVX2\r
631\r
632MY_NO_INLINE\r
633static\r
634#ifdef ATTRIB_AVX2\r
635ATTRIB_AVX2\r
636#endif\r
637void\r
638MY_FAST_CALL\r
639LzFind_SaturSub_256(UInt32 subValue, CLzRef *items, const CLzRef *lim)\r
640{\r
641 __m256i sub2 = _mm256_set_epi32(\r
642 (Int32)subValue, (Int32)subValue, (Int32)subValue, (Int32)subValue,\r
643 (Int32)subValue, (Int32)subValue, (Int32)subValue, (Int32)subValue);\r
644 do\r
645 {\r
646 SASUB_256(0)\r
647 SASUB_256(1)\r
648 items += 2 * 8;\r
649 }\r
650 while (items != lim);\r
651}\r
652#endif // USE_AVX2\r
653\r
654#ifndef FORCE_SATUR_SUB_128\r
655typedef void (MY_FAST_CALL *LZFIND_SATUR_SUB_CODE_FUNC)(\r
656 UInt32 subValue, CLzRef *items, const CLzRef *lim);\r
657static LZFIND_SATUR_SUB_CODE_FUNC g_LzFind_SaturSub;\r
658#endif // FORCE_SATUR_SUB_128\r
659\r
660#endif // USE_SATUR_SUB_128\r
661\r
662\r
663// kEmptyHashValue must be zero\r
664// #define SASUB_32(i) v = items[i]; m = v - subValue; if (v < subValue) m = kEmptyHashValue; items[i] = m;\r
665#define SASUB_32(i) v = items[i]; if (v < subValue) v = subValue; items[i] = v - subValue;\r
666\r
667#ifdef FORCE_SATUR_SUB_128\r
668\r
669#define DEFAULT_SaturSub LzFind_SaturSub_128\r
670\r
671#else\r
672\r
673#define DEFAULT_SaturSub LzFind_SaturSub_32\r
674\r
675MY_NO_INLINE\r
676static\r
677void\r
678MY_FAST_CALL\r
679LzFind_SaturSub_32(UInt32 subValue, CLzRef *items, const CLzRef *lim)\r
680{\r
681 do\r
682 {\r
683 UInt32 v;\r
684 SASUB_32(0)\r
685 SASUB_32(1)\r
686 SASUB_32(2)\r
687 SASUB_32(3)\r
688 SASUB_32(4)\r
689 SASUB_32(5)\r
690 SASUB_32(6)\r
691 SASUB_32(7)\r
692 items += 8;\r
693 }\r
694 while (items != lim);\r
695}\r
696\r
697#endif\r
698\r
699\r
700MY_NO_INLINE\r
701void MatchFinder_Normalize3(UInt32 subValue, CLzRef *items, size_t numItems)\r
702{\r
703 #define K_NORM_ALIGN_BLOCK_SIZE (1 << 6)\r
704 \r
705 CLzRef *lim;\r
706\r
707 for (; numItems != 0 && ((unsigned)(ptrdiff_t)items & (K_NORM_ALIGN_BLOCK_SIZE - 1)) != 0; numItems--)\r
708 {\r
709 UInt32 v;\r
710 SASUB_32(0);\r
711 items++;\r
712 }\r
713\r
714 {\r
715 #define K_NORM_ALIGN_MASK (K_NORM_ALIGN_BLOCK_SIZE / 4 - 1)\r
716 lim = items + (numItems & ~(size_t)K_NORM_ALIGN_MASK);\r
717 numItems &= K_NORM_ALIGN_MASK;\r
718 if (items != lim)\r
719 {\r
720 #if defined(USE_SATUR_SUB_128) && !defined(FORCE_SATUR_SUB_128)\r
721 if (g_LzFind_SaturSub)\r
722 g_LzFind_SaturSub(subValue, items, lim);\r
723 else\r
724 #endif\r
725 DEFAULT_SaturSub(subValue, items, lim);\r
726 }\r
727 items = lim;\r
728 }\r
729\r
730\r
731 for (; numItems != 0; numItems--)\r
732 {\r
733 UInt32 v;\r
734 SASUB_32(0);\r
735 items++;\r
736 }\r
737}\r
738\r
739\r
740\r
741// call MatchFinder_CheckLimits() only after (p->pos++) update\r
742\r
743MY_NO_INLINE\r
744static void MatchFinder_CheckLimits(CMatchFinder *p)\r
745{\r
746 if (// !p->streamEndWasReached && p->result == SZ_OK &&\r
747 p->keepSizeAfter == GET_AVAIL_BYTES(p))\r
748 {\r
749 // we try to read only in exact state (p->keepSizeAfter == GET_AVAIL_BYTES(p))\r
750 if (MatchFinder_NeedMove(p))\r
751 MatchFinder_MoveBlock(p);\r
752 MatchFinder_ReadBlock(p);\r
753 }\r
754\r
755 if (p->pos == kMaxValForNormalize)\r
756 if (GET_AVAIL_BYTES(p) >= p->numHashBytes) // optional optimization for last bytes of data.\r
757 /*\r
758 if we disable normalization for last bytes of data, and\r
759 if (data_size == 4 GiB), we don't call wastfull normalization,\r
760 but (pos) will be wrapped over Zero (0) in that case.\r
761 And we cannot resume later to normal operation\r
762 */\r
763 {\r
764 // MatchFinder_Normalize(p);\r
765 /* after normalization we need (p->pos >= p->historySize + 1); */\r
766 /* we can reduce subValue to aligned value, if want to keep alignment\r
767 of (p->pos) and (p->buffer) for speculated accesses. */\r
768 const UInt32 subValue = (p->pos - p->historySize - 1) /* & ~(UInt32)(kNormalizeAlign - 1) */;\r
769 // const UInt32 subValue = (1 << 15); // for debug\r
770 // printf("\nMatchFinder_Normalize() subValue == 0x%x\n", subValue);\r
771 size_t numSonRefs = p->cyclicBufferSize;\r
772 if (p->btMode)\r
773 numSonRefs <<= 1;\r
774 Inline_MatchFinder_ReduceOffsets(p, subValue);\r
775 MatchFinder_Normalize3(subValue, p->hash, (size_t)p->hashSizeSum + numSonRefs);\r
776 }\r
777\r
778 if (p->cyclicBufferPos == p->cyclicBufferSize)\r
779 p->cyclicBufferPos = 0;\r
780 \r
781 MatchFinder_SetLimits(p);\r
782}\r
783\r
784\r
785/*\r
786 (lenLimit > maxLen)\r
787*/\r
788MY_FORCE_INLINE\r
789static UInt32 * Hc_GetMatchesSpec(size_t lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,\r
790 size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,\r
791 UInt32 *d, unsigned maxLen)\r
792{\r
793 /*\r
794 son[_cyclicBufferPos] = curMatch;\r
795 for (;;)\r
796 {\r
797 UInt32 delta = pos - curMatch;\r
798 if (cutValue-- == 0 || delta >= _cyclicBufferSize)\r
799 return d;\r
800 {\r
801 const Byte *pb = cur - delta;\r
802 curMatch = son[_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)];\r
803 if (pb[maxLen] == cur[maxLen] && *pb == *cur)\r
804 {\r
805 UInt32 len = 0;\r
806 while (++len != lenLimit)\r
807 if (pb[len] != cur[len])\r
808 break;\r
809 if (maxLen < len)\r
810 {\r
811 maxLen = len;\r
812 *d++ = len;\r
813 *d++ = delta - 1;\r
814 if (len == lenLimit)\r
815 return d;\r
816 }\r
817 }\r
818 }\r
819 }\r
820 */\r
821\r
822 const Byte *lim = cur + lenLimit;\r
823 son[_cyclicBufferPos] = curMatch;\r
824\r
825 do\r
826 {\r
827 UInt32 delta;\r
828\r
829 if (curMatch == 0)\r
830 break;\r
831 // if (curMatch2 >= curMatch) return NULL;\r
832 delta = pos - curMatch;\r
833 if (delta >= _cyclicBufferSize)\r
834 break;\r
835 {\r
836 ptrdiff_t diff;\r
837 curMatch = son[_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)];\r
838 diff = (ptrdiff_t)0 - (ptrdiff_t)delta;\r
839 if (cur[maxLen] == cur[(ptrdiff_t)maxLen + diff])\r
840 {\r
841 const Byte *c = cur;\r
842 while (*c == c[diff])\r
843 {\r
844 if (++c == lim)\r
845 {\r
846 d[0] = (UInt32)(lim - cur);\r
847 d[1] = delta - 1;\r
848 return d + 2;\r
849 }\r
850 }\r
851 {\r
852 const unsigned len = (unsigned)(c - cur);\r
853 if (maxLen < len)\r
854 {\r
855 maxLen = len;\r
856 d[0] = (UInt32)len;\r
857 d[1] = delta - 1;\r
858 d += 2;\r
859 }\r
860 }\r
861 }\r
862 }\r
863 }\r
864 while (--cutValue);\r
865 \r
866 return d;\r
867}\r
868\r
869\r
870MY_FORCE_INLINE\r
871UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,\r
872 size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,\r
873 UInt32 *d, UInt32 maxLen)\r
874{\r
875 CLzRef *ptr0 = son + ((size_t)_cyclicBufferPos << 1) + 1;\r
876 CLzRef *ptr1 = son + ((size_t)_cyclicBufferPos << 1);\r
877 unsigned len0 = 0, len1 = 0;\r
878\r
879 UInt32 cmCheck;\r
880\r
881 // if (curMatch >= pos) { *ptr0 = *ptr1 = kEmptyHashValue; return NULL; }\r
882\r
883 cmCheck = (UInt32)(pos - _cyclicBufferSize);\r
884 if ((UInt32)pos <= _cyclicBufferSize)\r
885 cmCheck = 0;\r
886\r
887 if (cmCheck < curMatch)\r
888 do\r
889 {\r
890 const UInt32 delta = pos - curMatch;\r
891 {\r
892 CLzRef *pair = son + ((size_t)(_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);\r
893 const Byte *pb = cur - delta;\r
894 unsigned len = (len0 < len1 ? len0 : len1);\r
895 const UInt32 pair0 = pair[0];\r
896 if (pb[len] == cur[len])\r
897 {\r
898 if (++len != lenLimit && pb[len] == cur[len])\r
899 while (++len != lenLimit)\r
900 if (pb[len] != cur[len])\r
901 break;\r
902 if (maxLen < len)\r
903 {\r
904 maxLen = (UInt32)len;\r
905 *d++ = (UInt32)len;\r
906 *d++ = delta - 1;\r
907 if (len == lenLimit)\r
908 {\r
909 *ptr1 = pair0;\r
910 *ptr0 = pair[1];\r
911 return d;\r
912 }\r
913 }\r
914 }\r
915 if (pb[len] < cur[len])\r
916 {\r
917 *ptr1 = curMatch;\r
918 // const UInt32 curMatch2 = pair[1];\r
919 // if (curMatch2 >= curMatch) { *ptr0 = *ptr1 = kEmptyHashValue; return NULL; }\r
920 // curMatch = curMatch2;\r
921 curMatch = pair[1];\r
922 ptr1 = pair + 1;\r
923 len1 = len;\r
924 }\r
925 else\r
926 {\r
927 *ptr0 = curMatch;\r
928 curMatch = pair[0];\r
929 ptr0 = pair;\r
930 len0 = len;\r
931 }\r
932 }\r
933 }\r
934 while(--cutValue && cmCheck < curMatch);\r
935\r
936 *ptr0 = *ptr1 = kEmptyHashValue;\r
937 return d;\r
938}\r
939\r
940\r
941static void SkipMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,\r
942 size_t _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue)\r
943{\r
944 CLzRef *ptr0 = son + ((size_t)_cyclicBufferPos << 1) + 1;\r
945 CLzRef *ptr1 = son + ((size_t)_cyclicBufferPos << 1);\r
946 unsigned len0 = 0, len1 = 0;\r
947\r
948 UInt32 cmCheck;\r
949\r
950 cmCheck = (UInt32)(pos - _cyclicBufferSize);\r
951 if ((UInt32)pos <= _cyclicBufferSize)\r
952 cmCheck = 0;\r
953\r
954 if (// curMatch >= pos || // failure\r
955 cmCheck < curMatch)\r
956 do\r
957 {\r
958 const UInt32 delta = pos - curMatch;\r
959 {\r
960 CLzRef *pair = son + ((size_t)(_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);\r
961 const Byte *pb = cur - delta;\r
962 unsigned len = (len0 < len1 ? len0 : len1);\r
963 if (pb[len] == cur[len])\r
964 {\r
965 while (++len != lenLimit)\r
966 if (pb[len] != cur[len])\r
967 break;\r
968 {\r
969 if (len == lenLimit)\r
970 {\r
971 *ptr1 = pair[0];\r
972 *ptr0 = pair[1];\r
973 return;\r
974 }\r
975 }\r
976 }\r
977 if (pb[len] < cur[len])\r
978 {\r
979 *ptr1 = curMatch;\r
980 curMatch = pair[1];\r
981 ptr1 = pair + 1;\r
982 len1 = len;\r
983 }\r
984 else\r
985 {\r
986 *ptr0 = curMatch;\r
987 curMatch = pair[0];\r
988 ptr0 = pair;\r
989 len0 = len;\r
990 }\r
991 }\r
992 }\r
993 while(--cutValue && cmCheck < curMatch);\r
994 \r
995 *ptr0 = *ptr1 = kEmptyHashValue;\r
996 return;\r
997}\r
998\r
999\r
1000#define MOVE_POS \\r
1001 ++p->cyclicBufferPos; \\r
1002 p->buffer++; \\r
1003 { const UInt32 pos1 = p->pos + 1; p->pos = pos1; if (pos1 == p->posLimit) MatchFinder_CheckLimits(p); }\r
1004\r
1005#define MOVE_POS_RET MOVE_POS return distances;\r
1006\r
1007MY_NO_INLINE\r
1008static void MatchFinder_MovePos(CMatchFinder *p)\r
1009{\r
1010 /* we go here at the end of stream data, when (avail < num_hash_bytes)\r
1011 We don't update sons[cyclicBufferPos << btMode].\r
1012 So (sons) record will contain junk. And we cannot resume match searching\r
1013 to normal operation, even if we will provide more input data in buffer.\r
1014 p->sons[p->cyclicBufferPos << p->btMode] = 0; // kEmptyHashValue\r
1015 if (p->btMode)\r
1016 p->sons[(p->cyclicBufferPos << p->btMode) + 1] = 0; // kEmptyHashValue\r
1017 */\r
1018 MOVE_POS;\r
1019}\r
1020\r
1021#define GET_MATCHES_HEADER2(minLen, ret_op) \\r
1022 unsigned lenLimit; UInt32 hv; Byte *cur; UInt32 curMatch; \\r
1023 lenLimit = (unsigned)p->lenLimit; { if (lenLimit < minLen) { MatchFinder_MovePos(p); ret_op; }} \\r
1024 cur = p->buffer;\r
1025\r
1026#define GET_MATCHES_HEADER(minLen) GET_MATCHES_HEADER2(minLen, return distances)\r
1027#define SKIP_HEADER(minLen) do { GET_MATCHES_HEADER2(minLen, continue)\r
1028\r
1029#define MF_PARAMS(p) lenLimit, curMatch, p->pos, p->buffer, p->son, p->cyclicBufferPos, p->cyclicBufferSize, p->cutValue\r
1030\r
1031#define SKIP_FOOTER SkipMatchesSpec(MF_PARAMS(p)); MOVE_POS; } while (--num);\r
1032\r
1033#define GET_MATCHES_FOOTER_BASE(_maxLen_, func) \\r
1034 distances = func(MF_PARAMS(p), \\r
1035 distances, (UInt32)_maxLen_); MOVE_POS_RET;\r
1036\r
1037#define GET_MATCHES_FOOTER_BT(_maxLen_) \\r
1038 GET_MATCHES_FOOTER_BASE(_maxLen_, GetMatchesSpec1)\r
1039\r
1040#define GET_MATCHES_FOOTER_HC(_maxLen_) \\r
1041 GET_MATCHES_FOOTER_BASE(_maxLen_, Hc_GetMatchesSpec)\r
1042\r
1043\r
1044\r
1045#define UPDATE_maxLen { \\r
1046 const ptrdiff_t diff = (ptrdiff_t)0 - (ptrdiff_t)d2; \\r
1047 const Byte *c = cur + maxLen; \\r
1048 const Byte *lim = cur + lenLimit; \\r
1049 for (; c != lim; c++) if (*(c + diff) != *c) break; \\r
1050 maxLen = (unsigned)(c - cur); }\r
1051\r
1052static UInt32* Bt2_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1053{\r
1054 GET_MATCHES_HEADER(2)\r
1055 HASH2_CALC;\r
1056 curMatch = p->hash[hv];\r
1057 p->hash[hv] = p->pos;\r
1058 GET_MATCHES_FOOTER_BT(1)\r
1059}\r
1060\r
1061UInt32* Bt3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1062{\r
1063 GET_MATCHES_HEADER(3)\r
1064 HASH_ZIP_CALC;\r
1065 curMatch = p->hash[hv];\r
1066 p->hash[hv] = p->pos;\r
1067 GET_MATCHES_FOOTER_BT(2)\r
1068}\r
1069\r
1070\r
1071#define SET_mmm \\r
1072 mmm = p->cyclicBufferSize; \\r
1073 if (pos < mmm) \\r
1074 mmm = pos;\r
1075\r
1076\r
1077static UInt32* Bt3_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1078{\r
1079 UInt32 mmm;\r
1080 UInt32 h2, d2, pos;\r
1081 unsigned maxLen;\r
1082 UInt32 *hash;\r
1083 GET_MATCHES_HEADER(3)\r
1084\r
1085 HASH3_CALC;\r
1086\r
1087 hash = p->hash;\r
1088 pos = p->pos;\r
1089\r
1090 d2 = pos - hash[h2];\r
1091\r
1092 curMatch = (hash + kFix3HashSize)[hv];\r
1093 \r
1094 hash[h2] = pos;\r
1095 (hash + kFix3HashSize)[hv] = pos;\r
1096\r
1097 SET_mmm\r
1098\r
1099 maxLen = 2;\r
1100\r
1101 if (d2 < mmm && *(cur - d2) == *cur)\r
1102 {\r
1103 UPDATE_maxLen\r
1104 distances[0] = (UInt32)maxLen;\r
1105 distances[1] = d2 - 1;\r
1106 distances += 2;\r
1107 if (maxLen == lenLimit)\r
1108 {\r
1109 SkipMatchesSpec(MF_PARAMS(p));\r
1110 MOVE_POS_RET;\r
1111 }\r
1112 }\r
1113 \r
1114 GET_MATCHES_FOOTER_BT(maxLen)\r
1115}\r
1116\r
1117\r
1118static UInt32* Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1119{\r
1120 UInt32 mmm;\r
1121 UInt32 h2, h3, d2, d3, pos;\r
1122 unsigned maxLen;\r
1123 UInt32 *hash;\r
1124 GET_MATCHES_HEADER(4)\r
1125\r
1126 HASH4_CALC;\r
1127\r
1128 hash = p->hash;\r
1129 pos = p->pos;\r
1130\r
1131 d2 = pos - hash [h2];\r
1132 d3 = pos - (hash + kFix3HashSize)[h3];\r
1133 curMatch = (hash + kFix4HashSize)[hv];\r
1134\r
1135 hash [h2] = pos;\r
1136 (hash + kFix3HashSize)[h3] = pos;\r
1137 (hash + kFix4HashSize)[hv] = pos;\r
1138\r
1139 SET_mmm\r
1140\r
1141 maxLen = 3;\r
1142 \r
1143 for (;;)\r
1144 {\r
1145 if (d2 < mmm && *(cur - d2) == *cur)\r
1146 {\r
1147 distances[0] = 2;\r
1148 distances[1] = d2 - 1;\r
1149 distances += 2;\r
1150 if (*(cur - d2 + 2) == cur[2])\r
1151 {\r
1152 // distances[-2] = 3;\r
1153 }\r
1154 else if (d3 < mmm && *(cur - d3) == *cur)\r
1155 {\r
1156 d2 = d3;\r
1157 distances[1] = d3 - 1;\r
1158 distances += 2;\r
1159 }\r
1160 else\r
1161 break;\r
1162 }\r
1163 else if (d3 < mmm && *(cur - d3) == *cur)\r
1164 {\r
1165 d2 = d3;\r
1166 distances[1] = d3 - 1;\r
1167 distances += 2;\r
1168 }\r
1169 else\r
1170 break;\r
1171 \r
1172 UPDATE_maxLen\r
1173 distances[-2] = (UInt32)maxLen;\r
1174 if (maxLen == lenLimit)\r
1175 {\r
1176 SkipMatchesSpec(MF_PARAMS(p));\r
1177 MOVE_POS_RET\r
1178 }\r
1179 break;\r
1180 }\r
1181 \r
1182 GET_MATCHES_FOOTER_BT(maxLen)\r
1183}\r
1184\r
1185\r
1186static UInt32* Bt5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1187{\r
1188 UInt32 mmm;\r
1189 UInt32 h2, h3, d2, d3, maxLen, pos;\r
1190 UInt32 *hash;\r
1191 GET_MATCHES_HEADER(5)\r
1192\r
1193 HASH5_CALC;\r
1194\r
1195 hash = p->hash;\r
1196 pos = p->pos;\r
1197\r
1198 d2 = pos - hash [h2];\r
1199 d3 = pos - (hash + kFix3HashSize)[h3];\r
1200 // d4 = pos - (hash + kFix4HashSize)[h4];\r
1201\r
1202 curMatch = (hash + kFix5HashSize)[hv];\r
1203\r
1204 hash [h2] = pos;\r
1205 (hash + kFix3HashSize)[h3] = pos;\r
1206 // (hash + kFix4HashSize)[h4] = pos;\r
1207 (hash + kFix5HashSize)[hv] = pos;\r
1208\r
1209 SET_mmm\r
1210\r
1211 maxLen = 4;\r
1212\r
1213 for (;;)\r
1214 {\r
1215 if (d2 < mmm && *(cur - d2) == *cur)\r
1216 {\r
1217 distances[0] = 2;\r
1218 distances[1] = d2 - 1;\r
1219 distances += 2;\r
1220 if (*(cur - d2 + 2) == cur[2])\r
1221 {\r
1222 }\r
1223 else if (d3 < mmm && *(cur - d3) == *cur)\r
1224 {\r
1225 distances[1] = d3 - 1;\r
1226 distances += 2;\r
1227 d2 = d3;\r
1228 }\r
1229 else\r
1230 break;\r
1231 }\r
1232 else if (d3 < mmm && *(cur - d3) == *cur)\r
1233 {\r
1234 distances[1] = d3 - 1;\r
1235 distances += 2;\r
1236 d2 = d3;\r
1237 }\r
1238 else\r
1239 break;\r
1240\r
1241 distances[-2] = 3;\r
1242 if (*(cur - d2 + 3) != cur[3])\r
1243 break;\r
1244 UPDATE_maxLen\r
1245 distances[-2] = (UInt32)maxLen;\r
1246 if (maxLen == lenLimit)\r
1247 {\r
1248 SkipMatchesSpec(MF_PARAMS(p));\r
1249 MOVE_POS_RET;\r
1250 }\r
1251 break;\r
1252 }\r
1253 \r
1254 GET_MATCHES_FOOTER_BT(maxLen)\r
1255}\r
1256\r
1257\r
1258static UInt32* Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1259{\r
1260 UInt32 mmm;\r
1261 UInt32 h2, h3, d2, d3, pos;\r
1262 unsigned maxLen;\r
1263 UInt32 *hash;\r
1264 GET_MATCHES_HEADER(4)\r
1265\r
1266 HASH4_CALC;\r
1267\r
1268 hash = p->hash;\r
1269 pos = p->pos;\r
1270 \r
1271 d2 = pos - hash [h2];\r
1272 d3 = pos - (hash + kFix3HashSize)[h3];\r
1273 curMatch = (hash + kFix4HashSize)[hv];\r
1274\r
1275 hash [h2] = pos;\r
1276 (hash + kFix3HashSize)[h3] = pos;\r
1277 (hash + kFix4HashSize)[hv] = pos;\r
1278\r
1279 SET_mmm\r
1280\r
1281 maxLen = 3;\r
1282\r
1283 for (;;)\r
1284 {\r
1285 if (d2 < mmm && *(cur - d2) == *cur)\r
1286 {\r
1287 distances[0] = 2;\r
1288 distances[1] = d2 - 1;\r
1289 distances += 2;\r
1290 if (*(cur - d2 + 2) == cur[2])\r
1291 {\r
1292 // distances[-2] = 3;\r
1293 }\r
1294 else if (d3 < mmm && *(cur - d3) == *cur)\r
1295 {\r
1296 d2 = d3;\r
1297 distances[1] = d3 - 1;\r
1298 distances += 2;\r
1299 }\r
1300 else\r
1301 break;\r
1302 }\r
1303 else if (d3 < mmm && *(cur - d3) == *cur)\r
1304 {\r
1305 d2 = d3;\r
1306 distances[1] = d3 - 1;\r
1307 distances += 2;\r
1308 }\r
1309 else\r
1310 break;\r
1311\r
1312 UPDATE_maxLen\r
1313 distances[-2] = (UInt32)maxLen;\r
1314 if (maxLen == lenLimit)\r
1315 {\r
1316 p->son[p->cyclicBufferPos] = curMatch;\r
1317 MOVE_POS_RET;\r
1318 }\r
1319 break;\r
1320 }\r
1321 \r
1322 GET_MATCHES_FOOTER_HC(maxLen);\r
1323}\r
1324\r
1325\r
1326static UInt32 * Hc5_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1327{\r
1328 UInt32 mmm;\r
1329 UInt32 h2, h3, d2, d3, maxLen, pos;\r
1330 UInt32 *hash;\r
1331 GET_MATCHES_HEADER(5)\r
1332\r
1333 HASH5_CALC;\r
1334\r
1335 hash = p->hash;\r
1336 pos = p->pos;\r
1337\r
1338 d2 = pos - hash [h2];\r
1339 d3 = pos - (hash + kFix3HashSize)[h3];\r
1340 // d4 = pos - (hash + kFix4HashSize)[h4];\r
1341\r
1342 curMatch = (hash + kFix5HashSize)[hv];\r
1343\r
1344 hash [h2] = pos;\r
1345 (hash + kFix3HashSize)[h3] = pos;\r
1346 // (hash + kFix4HashSize)[h4] = pos;\r
1347 (hash + kFix5HashSize)[hv] = pos;\r
1348\r
1349 SET_mmm\r
1350 \r
1351 maxLen = 4;\r
1352\r
1353 for (;;)\r
1354 {\r
1355 if (d2 < mmm && *(cur - d2) == *cur)\r
1356 {\r
1357 distances[0] = 2;\r
1358 distances[1] = d2 - 1;\r
1359 distances += 2;\r
1360 if (*(cur - d2 + 2) == cur[2])\r
1361 {\r
1362 }\r
1363 else if (d3 < mmm && *(cur - d3) == *cur)\r
1364 {\r
1365 distances[1] = d3 - 1;\r
1366 distances += 2;\r
1367 d2 = d3;\r
1368 }\r
1369 else\r
1370 break;\r
1371 }\r
1372 else if (d3 < mmm && *(cur - d3) == *cur)\r
1373 {\r
1374 distances[1] = d3 - 1;\r
1375 distances += 2;\r
1376 d2 = d3;\r
1377 }\r
1378 else\r
1379 break;\r
1380\r
1381 distances[-2] = 3;\r
1382 if (*(cur - d2 + 3) != cur[3])\r
1383 break;\r
1384 UPDATE_maxLen\r
1385 distances[-2] = maxLen;\r
1386 if (maxLen == lenLimit)\r
1387 {\r
1388 p->son[p->cyclicBufferPos] = curMatch;\r
1389 MOVE_POS_RET;\r
1390 }\r
1391 break;\r
1392 }\r
1393 \r
1394 GET_MATCHES_FOOTER_HC(maxLen);\r
1395}\r
1396\r
1397\r
1398UInt32* Hc3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)\r
1399{\r
1400 GET_MATCHES_HEADER(3)\r
1401 HASH_ZIP_CALC;\r
1402 curMatch = p->hash[hv];\r
1403 p->hash[hv] = p->pos;\r
1404 GET_MATCHES_FOOTER_HC(2)\r
1405}\r
1406\r
1407\r
1408static void Bt2_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1409{\r
1410 SKIP_HEADER(2)\r
1411 {\r
1412 HASH2_CALC;\r
1413 curMatch = p->hash[hv];\r
1414 p->hash[hv] = p->pos;\r
1415 }\r
1416 SKIP_FOOTER\r
1417}\r
1418\r
1419void Bt3Zip_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1420{\r
1421 SKIP_HEADER(3)\r
1422 {\r
1423 HASH_ZIP_CALC;\r
1424 curMatch = p->hash[hv];\r
1425 p->hash[hv] = p->pos;\r
1426 }\r
1427 SKIP_FOOTER\r
1428}\r
1429\r
1430static void Bt3_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1431{\r
1432 SKIP_HEADER(3)\r
1433 {\r
1434 UInt32 h2;\r
1435 UInt32 *hash;\r
1436 HASH3_CALC;\r
1437 hash = p->hash;\r
1438 curMatch = (hash + kFix3HashSize)[hv];\r
1439 hash[h2] =\r
1440 (hash + kFix3HashSize)[hv] = p->pos;\r
1441 }\r
1442 SKIP_FOOTER\r
1443}\r
1444\r
1445static void Bt4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1446{\r
1447 SKIP_HEADER(4)\r
1448 {\r
1449 UInt32 h2, h3;\r
1450 UInt32 *hash;\r
1451 HASH4_CALC;\r
1452 hash = p->hash;\r
1453 curMatch = (hash + kFix4HashSize)[hv];\r
1454 hash [h2] =\r
1455 (hash + kFix3HashSize)[h3] =\r
1456 (hash + kFix4HashSize)[hv] = p->pos;\r
1457 }\r
1458 SKIP_FOOTER\r
1459}\r
1460\r
1461static void Bt5_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1462{\r
1463 SKIP_HEADER(5)\r
1464 {\r
1465 UInt32 h2, h3;\r
1466 UInt32 *hash;\r
1467 HASH5_CALC;\r
1468 hash = p->hash;\r
1469 curMatch = (hash + kFix5HashSize)[hv];\r
1470 hash [h2] =\r
1471 (hash + kFix3HashSize)[h3] =\r
1472 // (hash + kFix4HashSize)[h4] =\r
1473 (hash + kFix5HashSize)[hv] = p->pos;\r
1474 }\r
1475 SKIP_FOOTER\r
1476}\r
1477\r
1478\r
1479#define HC_SKIP_HEADER(minLen) \\r
1480 do { if (p->lenLimit < minLen) { MatchFinder_MovePos(p); num--; continue; } { \\r
1481 Byte *cur; \\r
1482 UInt32 *hash; \\r
1483 UInt32 *son; \\r
1484 UInt32 pos = p->pos; \\r
1485 UInt32 num2 = num; \\r
1486 /* (p->pos == p->posLimit) is not allowed here !!! */ \\r
1487 { const UInt32 rem = p->posLimit - pos; if (num2 > rem) num2 = rem; } \\r
1488 num -= num2; \\r
1489 { const UInt32 cycPos = p->cyclicBufferPos; \\r
1490 son = p->son + cycPos; \\r
1491 p->cyclicBufferPos = cycPos + num2; } \\r
1492 cur = p->buffer; \\r
1493 hash = p->hash; \\r
1494 do { \\r
1495 UInt32 curMatch; \\r
1496 UInt32 hv;\r
1497\r
1498\r
1499#define HC_SKIP_FOOTER \\r
1500 cur++; pos++; *son++ = curMatch; \\r
1501 } while (--num2); \\r
1502 p->buffer = cur; \\r
1503 p->pos = pos; \\r
1504 if (pos == p->posLimit) MatchFinder_CheckLimits(p); \\r
1505 }} while(num); \\r
1506\r
1507\r
1508static void Hc4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1509{\r
1510 HC_SKIP_HEADER(4)\r
1511\r
1512 UInt32 h2, h3;\r
1513 HASH4_CALC;\r
1514 curMatch = (hash + kFix4HashSize)[hv];\r
1515 hash [h2] =\r
1516 (hash + kFix3HashSize)[h3] =\r
1517 (hash + kFix4HashSize)[hv] = pos;\r
1518 \r
1519 HC_SKIP_FOOTER\r
1520}\r
1521\r
1522\r
1523static void Hc5_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1524{\r
1525 HC_SKIP_HEADER(5)\r
1526 \r
1527 UInt32 h2, h3;\r
1528 HASH5_CALC\r
1529 curMatch = (hash + kFix5HashSize)[hv];\r
1530 hash [h2] =\r
1531 (hash + kFix3HashSize)[h3] =\r
1532 // (hash + kFix4HashSize)[h4] =\r
1533 (hash + kFix5HashSize)[hv] = pos;\r
1534 \r
1535 HC_SKIP_FOOTER\r
1536}\r
1537\r
1538\r
1539void Hc3Zip_MatchFinder_Skip(CMatchFinder *p, UInt32 num)\r
1540{\r
1541 HC_SKIP_HEADER(3)\r
1542\r
1543 HASH_ZIP_CALC;\r
1544 curMatch = hash[hv];\r
1545 hash[hv] = pos;\r
1546\r
1547 HC_SKIP_FOOTER\r
1548}\r
1549\r
1550\r
1551void MatchFinder_CreateVTable(CMatchFinder *p, IMatchFinder2 *vTable)\r
1552{\r
1553 vTable->Init = (Mf_Init_Func)MatchFinder_Init;\r
1554 vTable->GetNumAvailableBytes = (Mf_GetNumAvailableBytes_Func)MatchFinder_GetNumAvailableBytes;\r
1555 vTable->GetPointerToCurrentPos = (Mf_GetPointerToCurrentPos_Func)MatchFinder_GetPointerToCurrentPos;\r
1556 if (!p->btMode)\r
1557 {\r
1558 if (p->numHashBytes <= 4)\r
1559 {\r
1560 vTable->GetMatches = (Mf_GetMatches_Func)Hc4_MatchFinder_GetMatches;\r
1561 vTable->Skip = (Mf_Skip_Func)Hc4_MatchFinder_Skip;\r
1562 }\r
1563 else\r
1564 {\r
1565 vTable->GetMatches = (Mf_GetMatches_Func)Hc5_MatchFinder_GetMatches;\r
1566 vTable->Skip = (Mf_Skip_Func)Hc5_MatchFinder_Skip;\r
1567 }\r
1568 }\r
1569 else if (p->numHashBytes == 2)\r
1570 {\r
1571 vTable->GetMatches = (Mf_GetMatches_Func)Bt2_MatchFinder_GetMatches;\r
1572 vTable->Skip = (Mf_Skip_Func)Bt2_MatchFinder_Skip;\r
1573 }\r
1574 else if (p->numHashBytes == 3)\r
1575 {\r
1576 vTable->GetMatches = (Mf_GetMatches_Func)Bt3_MatchFinder_GetMatches;\r
1577 vTable->Skip = (Mf_Skip_Func)Bt3_MatchFinder_Skip;\r
1578 }\r
1579 else if (p->numHashBytes == 4)\r
1580 {\r
1581 vTable->GetMatches = (Mf_GetMatches_Func)Bt4_MatchFinder_GetMatches;\r
1582 vTable->Skip = (Mf_Skip_Func)Bt4_MatchFinder_Skip;\r
1583 }\r
1584 else\r
1585 {\r
1586 vTable->GetMatches = (Mf_GetMatches_Func)Bt5_MatchFinder_GetMatches;\r
1587 vTable->Skip = (Mf_Skip_Func)Bt5_MatchFinder_Skip;\r
1588 }\r
1589}\r
1590\r
1591\r
1592\r
1593void LzFindPrepare()\r
1594{\r
1595 #ifndef FORCE_SATUR_SUB_128\r
1596 #ifdef USE_SATUR_SUB_128\r
1597 LZFIND_SATUR_SUB_CODE_FUNC f = NULL;\r
1598 #ifdef MY_CPU_ARM_OR_ARM64\r
1599 {\r
1600 if (CPU_IsSupported_NEON())\r
1601 {\r
1602 // #pragma message ("=== LzFind NEON")\r
1603 _PRF(printf("\n=== LzFind NEON\n"));\r
1604 f = LzFind_SaturSub_128;\r
1605 }\r
1606 // f = 0; // for debug\r
1607 }\r
1608 #else // MY_CPU_ARM_OR_ARM64\r
1609 if (CPU_IsSupported_SSE41())\r
1610 {\r
1611 // #pragma message ("=== LzFind SSE41")\r
1612 _PRF(printf("\n=== LzFind SSE41\n"));\r
1613 f = LzFind_SaturSub_128;\r
1614\r
1615 #ifdef USE_AVX2\r
1616 if (CPU_IsSupported_AVX2())\r
1617 {\r
1618 // #pragma message ("=== LzFind AVX2")\r
1619 _PRF(printf("\n=== LzFind AVX2\n"));\r
1620 f = LzFind_SaturSub_256;\r
1621 }\r
1622 #endif\r
1623 }\r
1624 #endif // MY_CPU_ARM_OR_ARM64\r
1625 g_LzFind_SaturSub = f;\r
1626 #endif // USE_SATUR_SUB_128\r
1627 #endif // FORCE_SATUR_SUB_128\r
1628}\r