/root/doris/contrib/faiss/faiss/clone_index.cpp
Line | Count | Source |
1 | | /* |
2 | | * Copyright (c) Meta Platforms, Inc. and affiliates. |
3 | | * |
4 | | * This source code is licensed under the MIT license found in the |
5 | | * LICENSE file in the root directory of this source tree. |
6 | | */ |
7 | | |
8 | | // -*- c++ -*- |
9 | | |
10 | | #include <faiss/clone_index.h> |
11 | | |
12 | | #include <cstdio> |
13 | | #include <cstdlib> |
14 | | |
15 | | #include <faiss/impl/FaissAssert.h> |
16 | | |
17 | | #include <faiss/Index2Layer.h> |
18 | | #include <faiss/IndexAdditiveQuantizer.h> |
19 | | #include <faiss/IndexAdditiveQuantizerFastScan.h> |
20 | | #include <faiss/IndexBinary.h> |
21 | | #include <faiss/IndexBinaryFlat.h> |
22 | | #include <faiss/IndexBinaryHNSW.h> |
23 | | #include <faiss/IndexBinaryIVF.h> |
24 | | #include <faiss/IndexFlat.h> |
25 | | #include <faiss/IndexHNSW.h> |
26 | | #include <faiss/IndexIVF.h> |
27 | | #include <faiss/IndexIVFAdditiveQuantizerFastScan.h> |
28 | | #include <faiss/IndexIVFFlat.h> |
29 | | #include <faiss/IndexIVFPQ.h> |
30 | | #include <faiss/IndexIVFPQFastScan.h> |
31 | | #include <faiss/IndexIVFPQR.h> |
32 | | #include <faiss/IndexIVFSpectralHash.h> |
33 | | #include <faiss/IndexLSH.h> |
34 | | #include <faiss/IndexLattice.h> |
35 | | #include <faiss/IndexNSG.h> |
36 | | #include <faiss/IndexPQ.h> |
37 | | #include <faiss/IndexPQFastScan.h> |
38 | | #include <faiss/IndexPreTransform.h> |
39 | | #include <faiss/IndexRefine.h> |
40 | | #include <faiss/IndexRowwiseMinMax.h> |
41 | | #include <faiss/IndexScalarQuantizer.h> |
42 | | |
43 | | #include <faiss/MetaIndexes.h> |
44 | | #include <faiss/VectorTransform.h> |
45 | | |
46 | | #include <faiss/impl/LocalSearchQuantizer.h> |
47 | | #include <faiss/impl/ProductQuantizer.h> |
48 | | #include <faiss/impl/ResidualQuantizer.h> |
49 | | #include <faiss/impl/ScalarQuantizer.h> |
50 | | #include <faiss/impl/pq4_fast_scan.h> |
51 | | |
52 | | #include <faiss/invlists/BlockInvertedLists.h> |
53 | | |
54 | | namespace faiss { |
55 | | |
56 | | /************************************************************* |
57 | | * cloning functions |
58 | | **************************************************************/ |
59 | | |
60 | 0 | Index* clone_index(const Index* index) { |
61 | 0 | Cloner cl; |
62 | 0 | return cl.clone_Index(index); |
63 | 0 | } |
64 | | |
65 | | // assumes there is a copy constructor ready. Always try from most |
66 | | // specific to most general. Most indexes don't have complicated |
67 | | // structs, the default copy constructor often just works. |
68 | | #define TRYCLONE(classname, obj) \ |
69 | 0 | if (const classname* clo##classname = \ |
70 | 0 | dynamic_cast<const classname*>(obj)) { \ |
71 | 0 | return new classname(*clo##classname); \ |
72 | 0 | } else |
73 | | |
74 | 0 | VectorTransform* Cloner::clone_VectorTransform(const VectorTransform* vt) { |
75 | 0 | TRYCLONE(RemapDimensionsTransform, vt) |
76 | 0 | TRYCLONE(OPQMatrix, vt) |
77 | 0 | TRYCLONE(PCAMatrix, vt) |
78 | 0 | TRYCLONE(ITQMatrix, vt) |
79 | 0 | TRYCLONE(RandomRotationMatrix, vt) |
80 | 0 | TRYCLONE(LinearTransform, vt) { |
81 | 0 | FAISS_THROW_MSG("clone not supported for this type of VectorTransform"); |
82 | 0 | } |
83 | 0 | return nullptr; |
84 | 0 | } |
85 | | |
86 | 0 | IndexIVF* Cloner::clone_IndexIVF(const IndexIVF* ivf) { |
87 | 0 | TRYCLONE(IndexIVFPQR, ivf) |
88 | 0 | TRYCLONE(IndexIVFPQ, ivf) |
89 | | |
90 | 0 | TRYCLONE(IndexIVFLocalSearchQuantizer, ivf) |
91 | 0 | TRYCLONE(IndexIVFProductLocalSearchQuantizer, ivf) |
92 | 0 | TRYCLONE(IndexIVFProductResidualQuantizer, ivf) |
93 | 0 | TRYCLONE(IndexIVFResidualQuantizer, ivf) |
94 | | |
95 | 0 | TRYCLONE(IndexIVFLocalSearchQuantizerFastScan, ivf) |
96 | 0 | TRYCLONE(IndexIVFProductLocalSearchQuantizerFastScan, ivf) |
97 | 0 | TRYCLONE(IndexIVFProductResidualQuantizerFastScan, ivf) |
98 | 0 | TRYCLONE(IndexIVFResidualQuantizerFastScan, ivf) |
99 | 0 | TRYCLONE(IndexIVFPQFastScan, ivf) |
100 | | |
101 | 0 | TRYCLONE(IndexIVFFlatDedup, ivf) |
102 | 0 | TRYCLONE(IndexIVFFlat, ivf) |
103 | | |
104 | 0 | TRYCLONE(IndexIVFSpectralHash, ivf) |
105 | | |
106 | 0 | TRYCLONE(IndexIVFScalarQuantizer, ivf) { |
107 | 0 | FAISS_THROW_MSG("clone not supported for this type of IndexIVF"); |
108 | 0 | } |
109 | 0 | return nullptr; |
110 | 0 | } |
111 | | |
112 | 0 | IndexBinaryIVF* clone_IndexBinaryIVF(const IndexBinaryIVF* ivf) { |
113 | 0 | TRYCLONE(IndexBinaryIVF, ivf) |
114 | 0 | return nullptr; |
115 | 0 | } |
116 | | |
117 | 0 | IndexRefine* clone_IndexRefine(const IndexRefine* ir) { |
118 | 0 | TRYCLONE(IndexRefineFlat, ir) |
119 | 0 | TRYCLONE(IndexRefine, ir) { |
120 | 0 | FAISS_THROW_MSG("clone not supported for this type of IndexRefine"); |
121 | 0 | } |
122 | 0 | } |
123 | | |
124 | 0 | IndexIDMap* clone_IndexIDMap(const IndexIDMap* im) { |
125 | 0 | TRYCLONE(IndexIDMap2, im) |
126 | 0 | TRYCLONE(IndexIDMap, im) { |
127 | 0 | FAISS_THROW_MSG("clone not supported for this type of IndexIDMap"); |
128 | 0 | } |
129 | 0 | } |
130 | | |
131 | 0 | IndexHNSW* clone_IndexHNSW(const IndexHNSW* ihnsw) { |
132 | 0 | TRYCLONE(IndexHNSW2Level, ihnsw) |
133 | 0 | TRYCLONE(IndexHNSWFlat, ihnsw) |
134 | 0 | TRYCLONE(IndexHNSWPQ, ihnsw) |
135 | 0 | TRYCLONE(IndexHNSWSQ, ihnsw) |
136 | 0 | TRYCLONE(IndexHNSW, ihnsw) { |
137 | 0 | FAISS_THROW_MSG("clone not supported for this type of IndexHNSW"); |
138 | 0 | } |
139 | 0 | } |
140 | | |
141 | 0 | IndexBinaryHNSW* clone_IndexBinaryHNSW(const IndexBinaryHNSW* ihnsw) { |
142 | 0 | TRYCLONE(IndexBinaryHNSW, ihnsw) |
143 | 0 | return nullptr; |
144 | 0 | } |
145 | | |
146 | 0 | IndexNNDescent* clone_IndexNNDescent(const IndexNNDescent* innd) { |
147 | 0 | TRYCLONE(IndexNNDescentFlat, innd) |
148 | 0 | TRYCLONE(IndexNNDescent, innd) { |
149 | 0 | FAISS_THROW_MSG("clone not supported for this type of IndexNNDescent"); |
150 | 0 | } |
151 | 0 | } |
152 | | |
153 | 0 | IndexNSG* clone_IndexNSG(const IndexNSG* insg) { |
154 | 0 | TRYCLONE(IndexNSGFlat, insg) |
155 | 0 | TRYCLONE(IndexNSGPQ, insg) |
156 | 0 | TRYCLONE(IndexNSGSQ, insg) |
157 | 0 | TRYCLONE(IndexNSG, insg) { |
158 | 0 | FAISS_THROW_MSG("clone not supported for this type of IndexNNDescent"); |
159 | 0 | } |
160 | 0 | } |
161 | | |
162 | | IndexRowwiseMinMaxBase* clone_IndexRowwiseMinMax( |
163 | 0 | const IndexRowwiseMinMaxBase* irmmb) { |
164 | 0 | TRYCLONE(IndexRowwiseMinMaxFP16, irmmb) |
165 | 0 | TRYCLONE(IndexRowwiseMinMax, irmmb) { |
166 | 0 | FAISS_THROW_MSG( |
167 | 0 | "clone not supported for this type of IndexRowwiseMinMax"); |
168 | 0 | } |
169 | 0 | } |
170 | | |
171 | 0 | #define TRYCAST(classname) classname* res = dynamic_cast<classname*>(index) |
172 | | |
173 | 0 | void reset_AdditiveQuantizerIndex(Index* index) { |
174 | 0 | auto clone_ProductQuantizers = |
175 | 0 | [](std::vector<AdditiveQuantizer*>& quantizers) { |
176 | 0 | for (auto& q : quantizers) { |
177 | 0 | q = dynamic_cast<AdditiveQuantizer*>(clone_Quantizer(q)); |
178 | 0 | } |
179 | 0 | }; |
180 | 0 | if (TRYCAST(IndexIVFLocalSearchQuantizerFastScan)) { |
181 | 0 | res->aq = &res->lsq; |
182 | 0 | } else if (TRYCAST(IndexIVFResidualQuantizerFastScan)) { |
183 | 0 | res->aq = &res->rq; |
184 | 0 | } else if (TRYCAST(IndexIVFProductLocalSearchQuantizerFastScan)) { |
185 | 0 | res->aq = &res->plsq; |
186 | 0 | clone_ProductQuantizers(res->plsq.quantizers); |
187 | 0 | } else if (TRYCAST(IndexIVFProductResidualQuantizerFastScan)) { |
188 | 0 | res->aq = &res->prq; |
189 | 0 | clone_ProductQuantizers(res->prq.quantizers); |
190 | 0 | } else if (TRYCAST(IndexIVFLocalSearchQuantizer)) { |
191 | 0 | res->aq = &res->lsq; |
192 | 0 | } else if (TRYCAST(IndexIVFResidualQuantizer)) { |
193 | 0 | res->aq = &res->rq; |
194 | 0 | } else if (TRYCAST(IndexIVFProductLocalSearchQuantizer)) { |
195 | 0 | res->aq = &res->plsq; |
196 | 0 | clone_ProductQuantizers(res->plsq.quantizers); |
197 | 0 | } else if (TRYCAST(IndexIVFProductResidualQuantizer)) { |
198 | 0 | res->aq = &res->prq; |
199 | 0 | clone_ProductQuantizers(res->prq.quantizers); |
200 | 0 | } else if (TRYCAST(IndexLocalSearchQuantizerFastScan)) { |
201 | 0 | res->aq = &res->lsq; |
202 | 0 | } else if (TRYCAST(IndexResidualQuantizerFastScan)) { |
203 | 0 | res->aq = &res->rq; |
204 | 0 | } else if (TRYCAST(IndexProductLocalSearchQuantizerFastScan)) { |
205 | 0 | res->aq = &res->plsq; |
206 | 0 | clone_ProductQuantizers(res->plsq.quantizers); |
207 | 0 | } else if (TRYCAST(IndexProductResidualQuantizerFastScan)) { |
208 | 0 | res->aq = &res->prq; |
209 | 0 | clone_ProductQuantizers(res->prq.quantizers); |
210 | 0 | } else if (TRYCAST(IndexLocalSearchQuantizer)) { |
211 | 0 | res->aq = &res->lsq; |
212 | 0 | } else if (TRYCAST(IndexResidualQuantizer)) { |
213 | 0 | res->aq = &res->rq; |
214 | 0 | } else if (TRYCAST(IndexProductLocalSearchQuantizer)) { |
215 | 0 | res->aq = &res->plsq; |
216 | 0 | clone_ProductQuantizers(res->plsq.quantizers); |
217 | 0 | } else if (TRYCAST(IndexProductResidualQuantizer)) { |
218 | 0 | res->aq = &res->prq; |
219 | 0 | clone_ProductQuantizers(res->prq.quantizers); |
220 | 0 | } else if (TRYCAST(LocalSearchCoarseQuantizer)) { |
221 | 0 | res->aq = &res->lsq; |
222 | 0 | } else if (TRYCAST(ResidualCoarseQuantizer)) { |
223 | 0 | res->aq = &res->rq; |
224 | 0 | } else { |
225 | 0 | FAISS_THROW_MSG( |
226 | 0 | "clone not supported for this type of additive quantizer index"); |
227 | 0 | } |
228 | 0 | } |
229 | | |
230 | 0 | Index* clone_AdditiveQuantizerIndex(const Index* index) { |
231 | | // IndexAdditiveQuantizer |
232 | 0 | TRYCLONE(IndexResidualQuantizer, index) |
233 | 0 | TRYCLONE(IndexProductResidualQuantizer, index) |
234 | 0 | TRYCLONE(IndexLocalSearchQuantizer, index) |
235 | 0 | TRYCLONE(IndexProductLocalSearchQuantizer, index) |
236 | | |
237 | | // IndexFastScan |
238 | 0 | TRYCLONE(IndexResidualQuantizerFastScan, index) |
239 | 0 | TRYCLONE(IndexLocalSearchQuantizerFastScan, index) |
240 | 0 | TRYCLONE(IndexProductResidualQuantizerFastScan, index) |
241 | 0 | TRYCLONE(IndexProductLocalSearchQuantizerFastScan, index) |
242 | | |
243 | | // AdditiveCoarseQuantizer |
244 | 0 | TRYCLONE(ResidualCoarseQuantizer, index) |
245 | 0 | TRYCLONE(LocalSearchCoarseQuantizer, index) { |
246 | 0 | FAISS_THROW_MSG( |
247 | 0 | "clone not supported for this type of additive quantizer index"); |
248 | 0 | } |
249 | 0 | } |
250 | | |
251 | | namespace { |
252 | | |
253 | 0 | InvertedLists* clone_InvertedLists(const InvertedLists* invlists) { |
254 | 0 | if (auto* ails = dynamic_cast<const ArrayInvertedLists*>(invlists)) { |
255 | 0 | return new ArrayInvertedLists(*ails); |
256 | 0 | } |
257 | 0 | if (auto* bils = dynamic_cast<const BlockInvertedLists*>(invlists)) { |
258 | 0 | auto* bils2 = new BlockInvertedLists(*bils); |
259 | 0 | if (bils->packer) { |
260 | 0 | auto* packerPQ4 = dynamic_cast<const CodePackerPQ4*>(bils->packer); |
261 | 0 | FAISS_THROW_IF_NOT(packerPQ4); |
262 | 0 | bils2->packer = new CodePackerPQ4(*packerPQ4); |
263 | 0 | } |
264 | 0 | return bils2; |
265 | 0 | } |
266 | 0 | FAISS_THROW_FMT( |
267 | 0 | "clone not supported for this type of inverted lists %s", |
268 | 0 | typeid(*invlists).name()); |
269 | 0 | } |
270 | | |
271 | | } // anonymous namespace |
272 | | |
273 | 0 | Index* Cloner::clone_Index(const Index* index) { |
274 | 0 | TRYCLONE(IndexPQ, index) |
275 | 0 | TRYCLONE(IndexLSH, index) |
276 | | |
277 | | // IndexFlat |
278 | 0 | TRYCLONE(IndexFlat1D, index) |
279 | 0 | TRYCLONE(IndexFlatL2, index) |
280 | 0 | TRYCLONE(IndexFlatIP, index) |
281 | 0 | TRYCLONE(IndexFlat, index) |
282 | | |
283 | 0 | TRYCLONE(IndexLattice, index) |
284 | 0 | TRYCLONE(IndexRandom, index) |
285 | 0 | TRYCLONE(IndexPQFastScan, index) |
286 | | |
287 | 0 | TRYCLONE(IndexScalarQuantizer, index) |
288 | 0 | TRYCLONE(MultiIndexQuantizer, index) |
289 | | |
290 | 0 | if (const IndexIVF* ivf = dynamic_cast<const IndexIVF*>(index)) { |
291 | 0 | IndexIVF* res = clone_IndexIVF(ivf); |
292 | 0 | if (ivf->invlists == nullptr) { |
293 | 0 | res->invlists = nullptr; |
294 | 0 | } else { |
295 | 0 | res->invlists = clone_InvertedLists(ivf->invlists); |
296 | 0 | res->own_invlists = true; |
297 | 0 | } |
298 | |
|
299 | 0 | res->own_fields = true; |
300 | 0 | res->quantizer = clone_Index(ivf->quantizer); |
301 | |
|
302 | 0 | if (dynamic_cast<const IndexIVFAdditiveQuantizerFastScan*>(res) || |
303 | 0 | dynamic_cast<const IndexIVFAdditiveQuantizer*>(res)) { |
304 | 0 | reset_AdditiveQuantizerIndex(res); |
305 | 0 | } |
306 | 0 | return res; |
307 | 0 | } else if ( |
308 | 0 | const IndexPreTransform* ipt = |
309 | 0 | dynamic_cast<const IndexPreTransform*>(index)) { |
310 | 0 | IndexPreTransform* res = new IndexPreTransform(); |
311 | 0 | res->d = ipt->d; |
312 | 0 | res->ntotal = ipt->ntotal; |
313 | 0 | res->is_trained = ipt->is_trained; |
314 | 0 | res->metric_type = ipt->metric_type; |
315 | 0 | res->metric_arg = ipt->metric_arg; |
316 | |
|
317 | 0 | res->index = clone_Index(ipt->index); |
318 | 0 | for (int i = 0; i < ipt->chain.size(); i++) |
319 | 0 | res->chain.push_back(clone_VectorTransform(ipt->chain[i])); |
320 | 0 | res->own_fields = true; |
321 | 0 | return res; |
322 | 0 | } else if ( |
323 | 0 | const IndexIDMap* idmap = dynamic_cast<const IndexIDMap*>(index)) { |
324 | 0 | IndexIDMap* res = clone_IndexIDMap(idmap); |
325 | 0 | res->own_fields = true; |
326 | 0 | res->index = clone_Index(idmap->index); |
327 | 0 | return res; |
328 | 0 | } else if (const IndexHNSW* ihnsw = dynamic_cast<const IndexHNSW*>(index)) { |
329 | 0 | IndexHNSW* res = clone_IndexHNSW(ihnsw); |
330 | 0 | res->own_fields = true; |
331 | | // make sure we don't get a GPU index here |
332 | 0 | res->storage = Cloner::clone_Index(ihnsw->storage); |
333 | 0 | return res; |
334 | 0 | } else if (const IndexNSG* insg = dynamic_cast<const IndexNSG*>(index)) { |
335 | 0 | IndexNSG* res = clone_IndexNSG(insg); |
336 | | |
337 | | // copy the dynamic allocated graph |
338 | 0 | if (auto& old_graph = insg->nsg.final_graph) { |
339 | 0 | auto& new_graph = res->nsg.final_graph; |
340 | 0 | new_graph = std::make_shared<nsg::Graph<int>>(*old_graph); |
341 | 0 | } |
342 | |
|
343 | 0 | res->own_fields = true; |
344 | 0 | res->storage = clone_Index(insg->storage); |
345 | 0 | return res; |
346 | 0 | } else if ( |
347 | 0 | const IndexNNDescent* innd = |
348 | 0 | dynamic_cast<const IndexNNDescent*>(index)) { |
349 | 0 | IndexNNDescent* res = clone_IndexNNDescent(innd); |
350 | 0 | res->own_fields = true; |
351 | 0 | res->storage = clone_Index(innd->storage); |
352 | 0 | return res; |
353 | 0 | } else if ( |
354 | 0 | const Index2Layer* i2l = dynamic_cast<const Index2Layer*>(index)) { |
355 | 0 | Index2Layer* res = new Index2Layer(*i2l); |
356 | 0 | res->q1.own_fields = true; |
357 | 0 | res->q1.quantizer = clone_Index(i2l->q1.quantizer); |
358 | 0 | return res; |
359 | 0 | } else if ( |
360 | 0 | const IndexRefine* ir = dynamic_cast<const IndexRefine*>(index)) { |
361 | 0 | IndexRefine* res = clone_IndexRefine(ir); |
362 | 0 | res->own_fields = true; |
363 | 0 | res->base_index = clone_Index(ir->base_index); |
364 | 0 | if (ir->refine_index != nullptr) { |
365 | 0 | res->own_refine_index = true; |
366 | 0 | res->refine_index = clone_Index(ir->refine_index); |
367 | 0 | } |
368 | 0 | return res; |
369 | 0 | } else if ( |
370 | 0 | const IndexRowwiseMinMaxBase* irmmb = |
371 | 0 | dynamic_cast<const IndexRowwiseMinMaxBase*>(index)) { |
372 | 0 | IndexRowwiseMinMaxBase* res = clone_IndexRowwiseMinMax(irmmb); |
373 | 0 | res->own_fields = true; |
374 | 0 | res->index = clone_Index(irmmb->index); |
375 | 0 | } else if ( |
376 | 0 | dynamic_cast<const IndexAdditiveQuantizerFastScan*>(index) || |
377 | 0 | dynamic_cast<const IndexAdditiveQuantizer*>(index) || |
378 | 0 | dynamic_cast<const AdditiveCoarseQuantizer*>(index)) { |
379 | 0 | Index* res = clone_AdditiveQuantizerIndex(index); |
380 | 0 | reset_AdditiveQuantizerIndex(res); |
381 | 0 | return res; |
382 | 0 | } else { |
383 | 0 | FAISS_THROW_FMT( |
384 | 0 | "clone not supported for this Index type %s", |
385 | 0 | typeid(*index).name()); |
386 | 0 | } |
387 | 0 | return nullptr; |
388 | 0 | } // namespace |
389 | | |
390 | 0 | Quantizer* clone_Quantizer(const Quantizer* quant) { |
391 | 0 | TRYCLONE(ResidualQuantizer, quant) |
392 | 0 | TRYCLONE(LocalSearchQuantizer, quant) |
393 | 0 | TRYCLONE(ProductQuantizer, quant) |
394 | 0 | TRYCLONE(ScalarQuantizer, quant) |
395 | 0 | FAISS_THROW_MSG("Did not recognize quantizer to clone"); |
396 | 0 | } |
397 | | |
398 | 0 | IndexBinary* clone_binary_index(const IndexBinary* index) { |
399 | 0 | if (auto ii = dynamic_cast<const IndexBinaryFlat*>(index)) { |
400 | 0 | return new IndexBinaryFlat(*ii); |
401 | 0 | } else if ( |
402 | 0 | const IndexBinaryIVF* ivf = |
403 | 0 | dynamic_cast<const IndexBinaryIVF*>(index)) { |
404 | 0 | IndexBinaryIVF* res = clone_IndexBinaryIVF(ivf); |
405 | 0 | if (ivf->invlists == nullptr) { |
406 | 0 | res->invlists = nullptr; |
407 | 0 | } else { |
408 | 0 | res->invlists = clone_InvertedLists(ivf->invlists); |
409 | 0 | res->own_invlists = true; |
410 | 0 | } |
411 | |
|
412 | 0 | res->own_fields = true; |
413 | 0 | res->quantizer = clone_binary_index(ivf->quantizer); |
414 | |
|
415 | 0 | return res; |
416 | 0 | } else if ( |
417 | 0 | const IndexBinaryHNSW* ihnsw = |
418 | 0 | dynamic_cast<const IndexBinaryHNSW*>(index)) { |
419 | 0 | IndexBinaryHNSW* res = clone_IndexBinaryHNSW(ihnsw); |
420 | 0 | res->own_fields = true; |
421 | 0 | res->storage = clone_binary_index(ihnsw->storage); |
422 | 0 | return res; |
423 | 0 | } else { |
424 | 0 | FAISS_THROW_MSG("cannot clone this type of index"); |
425 | 0 | } |
426 | 0 | } |
427 | | |
428 | | } // namespace faiss |