/** * Reads blob with specified name without resolving the blobName using using {@link #blobName} method. * * @param blobContainer blob container * @param blobName blob name */ public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { try (InputStream inputStream = blobContainer.readBlob(blobName)) { ByteArrayOutputStream out = new ByteArrayOutputStream(); Streams.copy(inputStream, out); final byte[] bytes = out.toByteArray(); final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")"; try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) { CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, codec, VERSION, VERSION); long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; BytesReference bytesReference = new BytesArray(bytes, (int) filePointer, (int) contentSize); return read(bytesReference); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
/** Like {@link * #checkHeader(DataInput,String,int,int)} except this * version assumes the first int has already been read * and validated from the input. */ public static int checkHeaderNoMagic(DataInput in, String codec, int minVersion, int maxVersion) throws IOException { final String actualCodec = in.readString(); if (!actualCodec.equals(codec)) { throw new CorruptIndexException("codec mismatch: actual codec=" + actualCodec + " vs expected codec=" + codec + " (resource: " + in + ")"); } final int actualVersion = in.readInt(); if (actualVersion < minVersion) { throw new IndexFormatTooOldException(in, actualVersion, minVersion, maxVersion); } if (actualVersion > maxVersion) { throw new IndexFormatTooNewException(in, actualVersion, minVersion, maxVersion); } return actualVersion; }
/** * Reads blob with specified name without resolving the blobName using using {@link #blobName} method. * * @param blobContainer blob container * @param blobName blob name */ public T readBlob(BlobContainer blobContainer, String blobName) throws IOException { try (InputStream inputStream = blobContainer.readBlob(blobName)) { byte[] bytes = ByteStreams.toByteArray(inputStream); final String resourceDesc = "ChecksumBlobStoreFormat.readBlob(blob=\"" + blobName + "\")"; try (ByteArrayIndexInput indexInput = new ByteArrayIndexInput(resourceDesc, bytes)) { CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, codec, VERSION, VERSION); long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; BytesReference bytesReference = new BytesArray(bytes, (int) filePointer, (int) contentSize); return read(bytesReference); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
/** * Reads the state from a given file and compares the expected version against the actual version of * the state. */ public final T read(Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (final IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; indexInput.readLong(); // version currently unused long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch(CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
private void failStoreIfCorrupted(Exception e) { if (e instanceof CorruptIndexException || e instanceof IndexFormatTooOldException || e instanceof IndexFormatTooNewException) { try { store.markStoreCorrupted((IOException) e); } catch (IOException inner) { inner.addSuppressed(e); logger.warn("store cannot be marked as corrupted", inner); } } }
/** * Restores a file * This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are * added to the {@code failures} list * * @param fileInfo file to be restored */ private void restoreFile(final BlobStoreIndexShardSnapshot.FileInfo fileInfo, final Store store) throws IOException { boolean success = false; try (InputStream partSliceStream = new PartSliceStream(blobContainer, fileInfo)) { final InputStream stream; if (restoreRateLimiter == null) { stream = partSliceStream; } else { stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreRateLimitingTimeInNanos::inc); } try (IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) { final byte[] buffer = new byte[BUFFER_SIZE]; int length; while ((length = stream.read(buffer)) > 0) { indexOutput.writeBytes(buffer, 0, length); recoveryState.getIndex().addRecoveredBytesToFile(fileInfo.name(), length); } Store.verify(indexOutput); indexOutput.close(); store.directory().sync(Collections.singleton(fileInfo.physicalName())); success = true; } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { try { store.markStoreCorrupted(ex); } catch (IOException e) { logger.warn("store cannot be marked as corrupted", e); } throw ex; } finally { if (success == false) { store.deleteQuiet(fileInfo.physicalName()); } } } }
/** * Reads the state from a given file and compares the expected version against the actual version of * the state. */ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) throws IOException { try (Directory dir = newDirectory(file.getParent())) { try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); final int fileVersion = CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; if (fileVersion == STATE_FILE_VERSION_ES_2X_AND_BELOW) { // format version 0, wrote a version that always came from the content state file and was never used indexInput.readLong(); // version currently unused } long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(namedXContentRegistry, new InputStreamIndexInput(slice, contentSize))) { return fromXContent(parser); } } } catch(CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // we trick this into a dedicated exception with the original stacktrace throw new CorruptStateException(ex); } } }
public void testVerifyingIndexOutputOnEmptyFile() throws IOException { Directory dir = newDirectory(); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo.bar", 0, Store.digestToString(0)), dir.createOutput("foo1.bar", IOContext.DEFAULT)); try { Store.verify(verifyingOutput); fail("should be a corrupted index"); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // ok } IOUtils.close(verifyingOutput, dir); }
public void testVerifyingIndexOutputWithBogusInput() throws IOException { Directory dir = newDirectory(); int length = scaledRandomIntBetween(10, 1024); IndexOutput verifyingOutput = new Store.LuceneVerifyingIndexOutput(new StoreFileMetaData("foo1.bar", length, ""), dir.createOutput("foo1.bar", IOContext.DEFAULT)); try { while (length > 0) { verifyingOutput.writeByte((byte) random().nextInt()); length--; } fail("should be a corrupted index"); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // ok } IOUtils.close(verifyingOutput, dir); }
public void testVerifyingIndexInput() throws IOException { Directory dir = newDirectory(); IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT); int iters = scaledRandomIntBetween(10, 100); for (int i = 0; i < iters; i++) { BytesRef bytesRef = new BytesRef(TestUtil.randomRealisticUnicodeString(random(), 10, 1024)); output.writeBytes(bytesRef.bytes, bytesRef.offset, bytesRef.length); } CodecUtil.writeFooter(output); output.close(); // Check file IndexInput indexInput = dir.openInput("foo.bar", IOContext.DEFAULT); long checksum = CodecUtil.retrieveChecksum(indexInput); indexInput.seek(0); IndexInput verifyingIndexInput = new Store.VerifyingIndexInput(dir.openInput("foo.bar", IOContext.DEFAULT)); readIndexInputFullyWithRandomSeeks(verifyingIndexInput); Store.verify(verifyingIndexInput); assertThat(checksum, equalTo(((ChecksumIndexInput) verifyingIndexInput).getChecksum())); IOUtils.close(indexInput, verifyingIndexInput); // Corrupt file and check again corruptFile(dir, "foo.bar", "foo1.bar"); verifyingIndexInput = new Store.VerifyingIndexInput(dir.openInput("foo1.bar", IOContext.DEFAULT)); readIndexInputFullyWithRandomSeeks(verifyingIndexInput); try { Store.verify(verifyingIndexInput); fail("should be a corrupted index"); } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { // ok } IOUtils.close(verifyingIndexInput); IOUtils.close(dir); }
private int checkValidFormat(IndexInput in) throws CorruptIndexException, IOException { int format = in.readInt(); if (format < FORMAT_MINIMUM) throw new IndexFormatTooOldException(in, format, FORMAT_MINIMUM, FORMAT_CURRENT); if (format > FORMAT_CURRENT) throw new IndexFormatTooNewException(in, format, FORMAT_MINIMUM, FORMAT_CURRENT); return format; }
/** Verifies that the code version which wrote the segment is supported. */ public static void checkCodeVersion(Directory dir, String segment) throws IOException { final String indexStreamFN = IndexFileNames.segmentFileName(segment, "", FIELDS_INDEX_EXTENSION); IndexInput idxStream = dir.openInput(indexStreamFN, IOContext.DEFAULT); try { int format = idxStream.readInt(); if (format < FORMAT_MINIMUM) throw new IndexFormatTooOldException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT); if (format > FORMAT_CURRENT) throw new IndexFormatTooNewException(idxStream, format, FORMAT_MINIMUM, FORMAT_CURRENT); } finally { idxStream.close(); } }
private void failStoreIfCorrupted(Throwable t) { if (t instanceof CorruptIndexException || t instanceof IndexFormatTooOldException || t instanceof IndexFormatTooNewException) { try { store.markStoreCorrupted((IOException) t); } catch (IOException e) { logger.warn("store cannot be marked as corrupted", e); } } }
public static IOException unwrapCorruption(Throwable t) { return (IOException) unwrap(t, CorruptIndexException.class, IndexFormatTooOldException.class, IndexFormatTooNewException.class); }
public void testWriteThrowable() throws IOException { final QueryShardException queryShardException = new QueryShardException(new Index("foo", "_na_"), "foobar", null); final UnknownException unknownException = new UnknownException("this exception is unknown", queryShardException); final Exception[] causes = new Exception[]{ new IllegalStateException("foobar"), new IllegalArgumentException("alalaal"), new NullPointerException("boom"), new EOFException("dadada"), new ElasticsearchSecurityException("nono!"), new NumberFormatException("not a number"), new CorruptIndexException("baaaam booom", "this is my resource"), new IndexFormatTooNewException("tooo new", 1, 2, 3), new IndexFormatTooOldException("tooo new", 1, 2, 3), new IndexFormatTooOldException("tooo new", "very old version"), new ArrayIndexOutOfBoundsException("booom"), new StringIndexOutOfBoundsException("booom"), new FileNotFoundException("booom"), new NoSuchFileException("booom"), new AlreadyClosedException("closed!!", new NullPointerException()), new LockObtainFailedException("can't lock directory", new NullPointerException()), unknownException}; for (final Exception cause : causes) { ElasticsearchException ex = new ElasticsearchException("topLevel", cause); ElasticsearchException deserialized = serialize(ex); assertEquals(deserialized.getMessage(), ex.getMessage()); assertTrue("Expected: " + deserialized.getCause().getMessage() + " to contain: " + ex.getCause().getClass().getName() + " but it didn't", deserialized.getCause().getMessage().contains(ex.getCause().getMessage())); if (ex.getCause().getClass() != UnknownException.class) { // unknown exception is not directly mapped assertEquals(deserialized.getCause().getClass(), ex.getCause().getClass()); } else { assertEquals(deserialized.getCause().getClass(), NotSerializableExceptionWrapper.class); } assertArrayEquals(deserialized.getStackTrace(), ex.getStackTrace()); assertTrue(deserialized.getStackTrace().length > 1); assertVersionSerializable(VersionUtils.randomVersion(random()), cause); assertVersionSerializable(VersionUtils.randomVersion(random()), ex); assertVersionSerializable(VersionUtils.randomVersion(random()), deserialized); } }
@Override public FieldInfos read(Directory directory, String segmentName, String segmentSuffix, IOContext iocontext) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentName, "", FIELD_INFOS_EXTENSION); IndexInput input = directory.openInput(fileName, iocontext); boolean success = false; try { final int format = input.readVInt(); if (format > FORMAT_MINIMUM) { throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT); } if (format < FORMAT_CURRENT) { throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT); } final int size = input.readVInt(); //read in the size FieldInfo infos[] = new FieldInfo[size]; for (int i = 0; i < size; i++) { String name = input.readString(); final int fieldNumber = i; byte bits = input.readByte(); boolean isIndexed = (bits & IS_INDEXED) != 0; boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0; boolean omitNorms = (bits & OMIT_NORMS) != 0; boolean storePayloads = (bits & STORE_PAYLOADS) != 0; final IndexOptions indexOptions; if (!isIndexed) { indexOptions = null; } else if ((bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0) { indexOptions = IndexOptions.DOCS_ONLY; } else if ((bits & OMIT_POSITIONS) != 0) { if (format <= FORMAT_OMIT_POSITIONS) { indexOptions = IndexOptions.DOCS_AND_FREQS; } else { throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")"); } } else { indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } // LUCENE-3027: past indices were able to write // storePayloads=true when omitTFAP is also true, // which is invalid. We correct that, here: if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { storePayloads = false; } infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, isIndexed && !omitNorms? DocValuesType.NUMERIC : null, -1, Collections.<String,String>emptyMap()); } if (input.getFilePointer() != input.length()) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")"); } FieldInfos fieldInfos = new FieldInfos(infos); success = true; return fieldInfos; } finally { if (success) { input.close(); } else { IOUtils.closeWhileHandlingException(input); } } }
/** * Restores a file * This is asynchronous method. Upon completion of the operation latch is getting counted down and any failures are * added to the {@code failures} list * * @param fileInfo file to be restored */ private void restoreFile(final FileInfo fileInfo) throws IOException { boolean success = false; try (InputStream partSliceStream = new PartSliceStream(blobContainer, fileInfo)) { final InputStream stream; if (restoreRateLimiter == null) { stream = partSliceStream; } else { stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreThrottleListener); } try (final IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) { final byte[] buffer = new byte[BUFFER_SIZE]; int length; while ((length = stream.read(buffer)) > 0) { indexOutput.writeBytes(buffer, 0, length); recoveryState.getIndex().addRecoveredBytesToFile(fileInfo.name(), length); } Store.verify(indexOutput); indexOutput.close(); // write the checksum if (fileInfo.metadata().hasLegacyChecksum()) { Store.LegacyChecksums legacyChecksums = new Store.LegacyChecksums(); legacyChecksums.add(fileInfo.metadata()); legacyChecksums.write(store); } store.directory().sync(Collections.singleton(fileInfo.physicalName())); success = true; } catch (CorruptIndexException | IndexFormatTooOldException | IndexFormatTooNewException ex) { try { store.markStoreCorrupted(ex); } catch (IOException e) { logger.warn("store cannot be marked as corrupted", e); } throw ex; } finally { if (success == false) { store.deleteQuiet(fileInfo.physicalName()); } } } }
@Override public FieldInfos read(Directory directory, String segmentName, String segmentSuffix, IOContext iocontext) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentName, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION); IndexInput input = directory.openInput(fileName, iocontext); try { final int format = input.readVInt(); if (format > FORMAT_MINIMUM) { throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT); } if (format < PreFlexRWFieldInfosWriter.FORMAT_CURRENT && format != PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW) { throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT); } final int size = input.readVInt(); //read in the size FieldInfo infos[] = new FieldInfo[size]; for (int i = 0; i < size; i++) { String name = input.readString(); final int fieldNumber = format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW ? input.readInt() : i; byte bits = input.readByte(); boolean isIndexed = (bits & PreFlexRWFieldInfosWriter.IS_INDEXED) != 0; boolean storeTermVector = (bits & PreFlexRWFieldInfosWriter.STORE_TERMVECTOR) != 0; boolean omitNorms = (bits & PreFlexRWFieldInfosWriter.OMIT_NORMS) != 0; boolean storePayloads = (bits & PreFlexRWFieldInfosWriter.STORE_PAYLOADS) != 0; final IndexOptions indexOptions; if (!isIndexed) { indexOptions = null; } else if ((bits & PreFlexRWFieldInfosWriter.OMIT_TERM_FREQ_AND_POSITIONS) != 0) { indexOptions = IndexOptions.DOCS_ONLY; } else if ((bits & PreFlexRWFieldInfosWriter.OMIT_POSITIONS) != 0) { if (format <= PreFlexRWFieldInfosWriter.FORMAT_OMIT_POSITIONS) { indexOptions = IndexOptions.DOCS_AND_FREQS; } else { throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")"); } } else { indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } // LUCENE-3027: past indices were able to write // storePayloads=true when omitTFAP is also true, // which is invalid. We correct that, here: if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { storePayloads = false; } DocValuesType normType = isIndexed && !omitNorms ? DocValuesType.NUMERIC : null; if (format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW && normType != null) { // RW can have norms but doesn't write them normType = input.readByte() != 0 ? DocValuesType.NUMERIC : null; } infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, normType, -1, null); } if (input.getFilePointer() != input.length()) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")"); } return new FieldInfos(infos); } finally { input.close(); } }
@Override public FieldInfos read(Directory directory, String segmentName, IOContext iocontext) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentName, "", PreFlexRWFieldInfosWriter.FIELD_INFOS_EXTENSION); IndexInput input = directory.openInput(fileName, iocontext); try { final int format = input.readVInt(); if (format > FORMAT_MINIMUM) { throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT); } if (format < PreFlexRWFieldInfosWriter.FORMAT_CURRENT && format != PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW) { throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, PreFlexRWFieldInfosWriter.FORMAT_CURRENT); } final int size = input.readVInt(); //read in the size FieldInfo infos[] = new FieldInfo[size]; for (int i = 0; i < size; i++) { String name = input.readString(); final int fieldNumber = format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW ? input.readInt() : i; byte bits = input.readByte(); boolean isIndexed = (bits & PreFlexRWFieldInfosWriter.IS_INDEXED) != 0; boolean storeTermVector = (bits & PreFlexRWFieldInfosWriter.STORE_TERMVECTOR) != 0; boolean omitNorms = (bits & PreFlexRWFieldInfosWriter.OMIT_NORMS) != 0; boolean storePayloads = (bits & PreFlexRWFieldInfosWriter.STORE_PAYLOADS) != 0; final IndexOptions indexOptions; if (!isIndexed) { indexOptions = null; } else if ((bits & PreFlexRWFieldInfosWriter.OMIT_TERM_FREQ_AND_POSITIONS) != 0) { indexOptions = IndexOptions.DOCS_ONLY; } else if ((bits & PreFlexRWFieldInfosWriter.OMIT_POSITIONS) != 0) { if (format <= PreFlexRWFieldInfosWriter.FORMAT_OMIT_POSITIONS) { indexOptions = IndexOptions.DOCS_AND_FREQS; } else { throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")"); } } else { indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } // LUCENE-3027: past indices were able to write // storePayloads=true when omitTFAP is also true, // which is invalid. We correct that, here: if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { storePayloads = false; } DocValuesType normType = isIndexed && !omitNorms ? DocValuesType.NUMERIC : null; if (format == PreFlexRWFieldInfosWriter.FORMAT_PREFLEX_RW && normType != null) { // RW can have norms but doesn't write them normType = input.readByte() != 0 ? DocValuesType.NUMERIC : null; } infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, normType, null); } if (input.getFilePointer() != input.length()) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")"); } return new FieldInfos(infos); } finally { input.close(); } }
@Override public FieldInfos read(Directory directory, String segmentName, IOContext iocontext) throws IOException { final String fileName = IndexFileNames.segmentFileName(segmentName, "", FIELD_INFOS_EXTENSION); IndexInput input = directory.openInput(fileName, iocontext); boolean success = false; try { final int format = input.readVInt(); if (format > FORMAT_MINIMUM) { throw new IndexFormatTooOldException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT); } if (format < FORMAT_CURRENT) { throw new IndexFormatTooNewException(input, format, FORMAT_MINIMUM, FORMAT_CURRENT); } final int size = input.readVInt(); //read in the size FieldInfo infos[] = new FieldInfo[size]; for (int i = 0; i < size; i++) { String name = input.readString(); final int fieldNumber = i; byte bits = input.readByte(); boolean isIndexed = (bits & IS_INDEXED) != 0; boolean storeTermVector = (bits & STORE_TERMVECTOR) != 0; boolean omitNorms = (bits & OMIT_NORMS) != 0; boolean storePayloads = (bits & STORE_PAYLOADS) != 0; final IndexOptions indexOptions; if (!isIndexed) { indexOptions = null; } else if ((bits & OMIT_TERM_FREQ_AND_POSITIONS) != 0) { indexOptions = IndexOptions.DOCS_ONLY; } else if ((bits & OMIT_POSITIONS) != 0) { if (format <= FORMAT_OMIT_POSITIONS) { indexOptions = IndexOptions.DOCS_AND_FREQS; } else { throw new CorruptIndexException("Corrupt fieldinfos, OMIT_POSITIONS set but format=" + format + " (resource: " + input + ")"); } } else { indexOptions = IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; } // LUCENE-3027: past indices were able to write // storePayloads=true when omitTFAP is also true, // which is invalid. We correct that, here: if (indexOptions != IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) { storePayloads = false; } infos[i] = new FieldInfo(name, isIndexed, fieldNumber, storeTermVector, omitNorms, storePayloads, indexOptions, null, isIndexed && !omitNorms? DocValuesType.NUMERIC : null, Collections.<String,String>emptyMap()); } if (input.getFilePointer() != input.length()) { throw new CorruptIndexException("did not read all bytes from file \"" + fileName + "\": read " + input.getFilePointer() + " vs size " + input.length() + " (resource: " + input + ")"); } FieldInfos fieldInfos = new FieldInfos(infos); success = true; return fieldInfos; } finally { if (success) { input.close(); } else { IOUtils.closeWhileHandlingException(input); } } }