/** * This method removes all lucene files from the given directory. It will first try to delete all commit points / segments * files to ensure broken commits or corrupted indices will not be opened in the future. If any of the segment files can't be deleted * this operation fails. */ public static void cleanLuceneIndex(Directory directory) throws IOException { try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { for (final String file : directory.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS) || file.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { directory.deleteFile(file); // remove all segment_N files } } } try (IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.STANDARD_ANALYZER) .setMergePolicy(NoMergePolicy.INSTANCE) // no merges .setCommitOnClose(false) // no commits .setOpenMode(IndexWriterConfig.OpenMode.CREATE))) // force creation - don't append... { // do nothing and close this will kick of IndexFileDeleter which will remove all pending files } }
/** expert: Creates a new writer */ public Lucene45DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, Lucene45DocValuesFormat.VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, Lucene45DocValuesFormat.VERSION_CURRENT); maxDoc = state.segmentInfo.getDocCount(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
Lucene49NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { maxDoc = state.segmentInfo.getDocCount(); boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
/** expert: Creates a new writer */ public Lucene49DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, Lucene49DocValuesFormat.VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, Lucene49DocValuesFormat.VERSION_CURRENT); maxDoc = state.segmentInfo.getDocCount(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
@Override public SegmentInfo read(Directory directory, String segmentName, IOContext context) throws IOException { // NOTE: this is NOT how 3.x is really written... String fileName = IndexFileNames.segmentFileName(segmentName, "", Lucene3xSegmentInfoFormat.UPGRADED_SI_EXTENSION); boolean success = false; IndexInput input = directory.openInput(fileName, context); try { SegmentInfo si = readUpgradedSegmentInfo(segmentName, directory, input); success = true; return si; } finally { if (!success) { IOUtils.closeWhileHandlingException(input); } else { input.close(); } } }
/** Returns file names for shared doc stores, if any, else * null. */ public static Set<String> getDocStoreFiles(SegmentInfo info) { if (Lucene3xSegmentInfoFormat.getDocStoreOffset(info) != -1) { final String dsName = Lucene3xSegmentInfoFormat.getDocStoreSegment(info); Set<String> files = new HashSet<>(); if (Lucene3xSegmentInfoFormat.getDocStoreIsCompoundFile(info)) { files.add(IndexFileNames.segmentFileName(dsName, "", COMPOUND_FILE_STORE_EXTENSION)); } else { files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_INDEX_EXTENSION)); files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xStoredFieldsReader.FIELDS_EXTENSION)); files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION)); files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION)); files.add(IndexFileNames.segmentFileName(dsName, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION)); } return files; } else { return null; } }
/** expert: Creates a new writer */ public Lucene410DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, Lucene410DocValuesFormat.VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, Lucene410DocValuesFormat.VERSION_CURRENT); maxDoc = state.segmentInfo.getDocCount(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
/** Returns an array of strings, one for each file in the directory. */ @Override public String[] listAll() { ensureOpen(); String[] res; if (writer != null) { res = writer.listAll(); } else { res = entries.keySet().toArray(new String[entries.size()]); // Add the segment name String seg = IndexFileNames.parseSegmentName(fileName); for (int i = 0; i < res.length; i++) { res[i] = seg + res[i]; } } return res; }
public CompletionFieldsConsumer(SegmentWriteState state) throws IOException { this.delegatesFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); IndexOutput output = null; boolean success = false; try { output = state.directory.createOutput(suggestFSTFile, state.context); CodecUtil.writeHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT); /* * we write the delegate postings format name so we can load it * without getting an instance in the ctor */ output.writeString(delegatePostingsFormat.getName()); output.writeString(writeProvider.getName()); this.suggestFieldsConsumer = writeProvider.consumer(output); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); } } }
/** * Verifies that the last file is segments_N and fails otherwise. It also * removes and returns the file from the list, because it needs to be handled * last, after all files. This is important in order to guarantee that if a * reader sees the new segments_N, all other segment files are already on * stable storage. * <p> * The reason why the code fails instead of putting segments_N file last is * that this indicates an error in the Revision implementation. */ public static String getSegmentsFile(List<String> files, boolean allowEmpty) { if (files.isEmpty()) { if (allowEmpty) { return null; } else { throw new IllegalStateException("empty list of files not allowed"); } } String segmentsFile = files.remove(files.size() - 1); if (!segmentsFile.startsWith(IndexFileNames.SEGMENTS) || segmentsFile.equals(IndexFileNames.SEGMENTS_GEN)) { throw new IllegalStateException("last file to copy+sync must be segments_N but got " + segmentsFile + "; check your Revision implementation!"); } return segmentsFile; }
@Test public void testSegmentsFileLast() throws Exception { Directory indexDir = newDirectory(); IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); IndexWriter indexWriter = new IndexWriter(indexDir, conf); Directory taxoDir = newDirectory(); SnapshotDirectoryTaxonomyWriter taxoWriter = new SnapshotDirectoryTaxonomyWriter(taxoDir); try { indexWriter.addDocument(newDocument(taxoWriter)); indexWriter.commit(); taxoWriter.commit(); Revision rev = new IndexAndTaxonomyRevision(indexWriter, taxoWriter); Map<String,List<RevisionFile>> sourceFiles = rev.getSourceFiles(); assertEquals(2, sourceFiles.size()); for (List<RevisionFile> files : sourceFiles.values()) { String lastFile = files.get(files.size() - 1).fileName; assertTrue(lastFile.startsWith(IndexFileNames.SEGMENTS) && !lastFile.equals(IndexFileNames.SEGMENTS_GEN)); } } finally { IOUtils.close(indexWriter, taxoWriter, taxoDir, indexDir); } }
@Test public void testRevisionRelease() throws Exception { // we look to see that certain files are deleted: if (sourceDir instanceof MockDirectoryWrapper) { ((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(false); } try { replicator.publish(createRevision(1)); assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1")); replicator.publish(createRevision(2)); // now the files of revision 1 can be deleted assertTrue(slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_2")); assertFalse("segments_1 should not be found in index directory after revision is released", slowFileExists(sourceDir, IndexFileNames.SEGMENTS + "_1")); } finally { if (sourceDir instanceof MockDirectoryWrapper) { // set back to on for other tests ((MockDirectoryWrapper)sourceDir).setEnableVirusScanner(true); } } }
@Test public void testSegmentsFileLast() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = new IndexWriterConfig(TEST_VERSION_CURRENT, null); conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy())); IndexWriter writer = new IndexWriter(dir, conf); try { writer.addDocument(new Document()); writer.commit(); Revision rev = new IndexRevision(writer); @SuppressWarnings("unchecked") Map<String, List<RevisionFile>> sourceFiles = rev.getSourceFiles(); assertEquals(1, sourceFiles.size()); List<RevisionFile> files = sourceFiles.values().iterator().next(); String lastFile = files.get(files.size() - 1).fileName; assertTrue(lastFile.startsWith(IndexFileNames.SEGMENTS) && !lastFile.equals(IndexFileNames.SEGMENTS_GEN)); } finally { IOUtils.close(writer, dir); } }
public FSTTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException { final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); this.postingsWriter = postingsWriter; this.fieldInfos = state.fieldInfos; this.out = state.directory.createOutput(termsFileName, state.context); boolean success = false; try { writeHeader(out); this.postingsWriter.init(out); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); } } }
DirectDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) throws IOException { maxDoc = state.segmentInfo.getDocCount(); boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
MemoryDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException { this.acceptableOverheadRatio = acceptableOverheadRatio; maxDoc = state.segmentInfo.getDocCount(); boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
public FSTOrdTermsWriter(SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException { final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); final String termsBlockFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_BLOCK_EXTENSION); this.postingsWriter = postingsWriter; this.fieldInfos = state.fieldInfos; boolean success = false; try { this.indexOut = state.directory.createOutput(termsIndexFileName, state.context); this.blockOut = state.directory.createOutput(termsBlockFileName, state.context); writeHeader(indexOut); writeHeader(blockOut); this.postingsWriter.init(blockOut); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(indexOut, blockOut); } } }
public FixedGapTermsIndexWriter(SegmentWriteState state) throws IOException { final String indexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); termIndexInterval = state.termIndexInterval; out = state.directory.createOutput(indexFileName, state.context); boolean success = false; try { fieldInfos = state.fieldInfos; writeHeader(out); out.writeInt(termIndexInterval); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); } } }
public BlockTermsWriter(TermsIndexWriterBase termsIndexWriter, SegmentWriteState state, PostingsWriterBase postingsWriter) throws IOException { final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); this.termsIndexWriter = termsIndexWriter; out = state.directory.createOutput(termsFileName, state.context); boolean success = false; try { fieldInfos = state.fieldInfos; writeHeader(out); currentField = null; this.postingsWriter = postingsWriter; // segment = state.segmentName; //System.out.println("BTW.init seg=" + state.segmentName); postingsWriter.init(out); // have consumer write its format/header success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out); } } }
@Override public void close() throws IOException { wrappedPostingsWriter.close(); if (wrappedPostingsWriter instanceof PulsingPostingsWriter || VERSION_CURRENT < VERSION_META_ARRAY) { return; } String summaryFileName = IndexFileNames.segmentFileName(segmentState.segmentInfo.name, segmentState.segmentSuffix, SUMMARY_EXTENSION); IndexOutput out = null; try { out = segmentState.directory.createOutput(summaryFileName, segmentState.context); CodecUtil.writeHeader(out, CODEC, VERSION_CURRENT); out.writeVInt(fields.size()); for (FieldMetaData field : fields) { out.writeVInt(field.fieldNumber); out.writeVInt(field.longsSize); } out.close(); } finally { IOUtils.closeWhileHandlingException(out); } }
@Override public FieldsProducer fieldsProducer(SegmentReadState readState) throws IOException { // Load our ID: final String idFileName = IndexFileNames.segmentFileName(readState.segmentInfo.name, readState.segmentSuffix, ID_EXTENSION); IndexInput in = readState.directory.openInput(idFileName, readState.context); boolean success = false; final int id; try { CodecUtil.checkHeader(in, RAM_ONLY_NAME, VERSION_START, VERSION_LATEST); id = in.readVInt(); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(in); } else { IOUtils.close(in); } } synchronized(state) { return state.get(id); } }
TermInfosWriter(Directory directory, String segment, FieldInfos fis, int interval) throws IOException { initialize(directory, segment, fis, interval, false); boolean success = false; try { other = new TermInfosWriter(directory, segment, fis, interval, true); other.other = this; success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); try { directory.deleteFile(IndexFileNames.segmentFileName(segment, "", (isIndex ? Lucene3xPostingsFormat.TERMS_INDEX_EXTENSION : Lucene3xPostingsFormat.TERMS_EXTENSION))); } catch (IOException ignored) { } } } }
public PreFlexRWTermVectorsWriter(Directory directory, String segment, IOContext context) throws IOException { this.directory = directory; this.segment = segment; boolean success = false; try { // Open files for TermVector storage tvx = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_INDEX_EXTENSION), context); tvx.writeInt(Lucene3xTermVectorsReader.FORMAT_CURRENT); tvd = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_DOCUMENTS_EXTENSION), context); tvd.writeInt(Lucene3xTermVectorsReader.FORMAT_CURRENT); tvf = directory.createOutput(IndexFileNames.segmentFileName(segment, "", Lucene3xTermVectorsReader.VECTORS_FIELDS_EXTENSION), context); tvf.writeInt(Lucene3xTermVectorsReader.FORMAT_CURRENT); success = true; } finally { if (!success) { abort(); } } }
Lucene42NormsConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException { this.acceptableOverheadRatio = acceptableOverheadRatio; maxDoc = state.segmentInfo.getDocCount(); boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); CodecUtil.writeHeader(data, dataCodec, VERSION_CURRENT); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, VERSION_CURRENT); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
Lucene42DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension, float acceptableOverheadRatio) throws IOException { this.acceptableOverheadRatio = acceptableOverheadRatio; maxDoc = state.segmentInfo.getDocCount(); boolean success = false; try { String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); data = state.directory.createOutput(dataName, state.context); // this writer writes the format 4.2 did! CodecUtil.writeHeader(data, dataCodec, VERSION_GCD_COMPRESSION); String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); meta = state.directory.createOutput(metaName, state.context); CodecUtil.writeHeader(meta, metaCodec, VERSION_GCD_COMPRESSION); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } }
/** * This method deletes every file in this store that is not contained in the given source meta data or is a * legacy checksum file. After the delete it pulls the latest metadata snapshot from the store and compares it * to the given snapshot. If the snapshots are inconsistent an illegal state exception is thrown * * @param reason the reason for this cleanup operation logged for each deleted file * @param sourceMetaData the metadata used for cleanup. all files in this metadata should be kept around. * @throws IOException if an IOException occurs * @throws IllegalStateException if the latest snapshot in this store differs from the given one after the cleanup. */ public void cleanupAndVerify(String reason, MetadataSnapshot sourceMetaData) throws IOException { metadataLock.writeLock().lock(); try (Lock writeLock = directory.obtainLock(IndexWriter.WRITE_LOCK_NAME)) { final StoreDirectory dir = directory; for (String existingFile : dir.listAll()) { if (Store.isAutogenerated(existingFile) || sourceMetaData.contains(existingFile)) { continue; // don't delete snapshot file, or the checksums file (note, this is extra protection since the Store won't delete checksum) } try { dir.deleteFile(reason, existingFile); // FNF should not happen since we hold a write lock? } catch (IOException ex) { if (existingFile.startsWith(IndexFileNames.SEGMENTS) || existingFile.equals(IndexFileNames.OLD_SEGMENTS_GEN)) { // TODO do we need to also fail this if we can't delete the pending commit file? // if one of those files can't be deleted we better fail the cleanup otherwise we might leave an old commit point around? throw new IllegalStateException("Can't delete " + existingFile + " - cleanup failed", ex); } logger.debug((Supplier<?>) () -> new ParameterizedMessage("failed to delete file [{}]", existingFile), ex); // ignore, we don't really care, will get deleted later on } } final Store.MetadataSnapshot metadataOrEmpty = getMetadata(null); verifyAfterCleanup(sourceMetaData, metadataOrEmpty); } finally { metadataLock.writeLock().unlock(); } }
private int numSegmentFiles() { // only for asserts int count = 0; for (StoreFileMetaData file : this) { if (file.name().startsWith(IndexFileNames.SEGMENTS)) { count++; } } return count; }
/** * Reads the segments infos from the given commit, failing if it fails to load */ public static SegmentInfos readSegmentInfos(IndexCommit commit) throws IOException { // Using commit.getSegmentsFileName() does NOT work here, have to // manually create the segment filename String filename = IndexFileNames.fileNameFromGeneration(IndexFileNames.SEGMENTS, "", commit.getGeneration()); return SegmentInfos.readCommit(commit.getDirectory(), filename); }
public static void assertConsistent(Store store, Store.MetadataSnapshot metadata) throws IOException { for (String file : store.directory().listAll()) { if (!IndexWriter.WRITE_LOCK_NAME.equals(file) && !IndexFileNames.OLD_SEGMENTS_GEN.equals(file) && file.startsWith("extra") == false) { assertTrue(file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file)); } else { assertFalse(file + " is not in the map: " + metadata.asMap().size() + " vs. " + store.directory().listAll().length, metadata.asMap().containsKey(file)); } } }
/** * prunes the list of index files such that only the latest del generation files are contained. */ private void pruneOldDeleteGenerations(Set<Path> files) { final TreeSet<Path> delFiles = new TreeSet<>(); for (Path file : files) { if (file.getFileName().toString().endsWith(".liv")) { delFiles.add(file); } } Path last = null; for (Path current : delFiles) { if (last != null) { final String newSegmentName = IndexFileNames.parseSegmentName(current.getFileName().toString()); final String oldSegmentName = IndexFileNames.parseSegmentName(last.getFileName().toString()); if (newSegmentName.equals(oldSegmentName)) { int oldGen = Integer.parseInt(IndexFileNames.stripExtension(IndexFileNames.stripSegmentName(last.getFileName().toString())).replace("_", ""), Character.MAX_RADIX); int newGen = Integer.parseInt(IndexFileNames.stripExtension(IndexFileNames.stripSegmentName(current.getFileName().toString())).replace("_", ""), Character.MAX_RADIX); if (newGen > oldGen) { files.remove(last); } else { files.remove(current); continue; } } } last = current; } }
/** Save a single segment's info. */ @Override public void write(Directory dir, SegmentInfo si, FieldInfos fis, IOContext ioContext) throws IOException { final String fileName = IndexFileNames.segmentFileName(si.name, "", Lucene40SegmentInfoFormat.SI_EXTENSION); si.addFile(fileName); final IndexOutput output = dir.createOutput(fileName, ioContext); boolean success = false; try { CodecUtil.writeHeader(output, Lucene40SegmentInfoFormat.CODEC_NAME, Lucene40SegmentInfoFormat.VERSION_CURRENT); // Write the Lucene version that created this segment, since 3.1 output.writeString(si.getVersion().toString()); output.writeInt(si.getDocCount()); output.writeByte((byte) (si.getUseCompoundFile() ? SegmentInfo.YES : SegmentInfo.NO)); output.writeStringStringMap(si.getDiagnostics()); output.writeStringStringMap(Collections.<String,String>emptyMap()); output.writeStringSet(si.files()); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(output); // TODO: why must we do this? do we not get tracking dir wrapper? IOUtils.deleteFilesIgnoringExceptions(si.dir, fileName); } else { output.close(); } } }
@Override public DocValuesProducer normsProducer(SegmentReadState state) throws IOException { String filename = IndexFileNames.segmentFileName(state.segmentInfo.name, "nrm", IndexFileNames.COMPOUND_FILE_EXTENSION); return new Lucene40DocValuesReader(state, filename, Lucene40FieldInfosReader.LEGACY_NORM_TYPE_KEY); }
public void abort() { try { close(); } catch (Throwable ignored) {} IOUtils.deleteFilesIgnoringExceptions(directory, IndexFileNames.segmentFileName(segment, "", FIELDS_EXTENSION), IndexFileNames.segmentFileName(segment, "", FIELDS_INDEX_EXTENSION)); }
/** Sole constructor. */ public Lucene40PostingsReader(Directory dir, FieldInfos fieldInfos, SegmentInfo segmentInfo, IOContext ioContext, String segmentSuffix) throws IOException { boolean success = false; IndexInput freqIn = null; IndexInput proxIn = null; try { freqIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.FREQ_EXTENSION), ioContext); CodecUtil.checkHeader(freqIn, FRQ_CODEC, VERSION_START, VERSION_CURRENT); // TODO: hasProx should (somehow!) become codec private, // but it's tricky because 1) FIS.hasProx is global (it // could be all fields that have prox are written by a // different codec), 2) the field may have had prox in // the past but all docs w/ that field were deleted. // Really we'd need to init prxOut lazily on write, and // then somewhere record that we actually wrote it so we // know whether to open on read: if (fieldInfos.hasProx()) { proxIn = dir.openInput(IndexFileNames.segmentFileName(segmentInfo.name, segmentSuffix, Lucene40PostingsFormat.PROX_EXTENSION), ioContext); CodecUtil.checkHeader(proxIn, PRX_CODEC, VERSION_START, VERSION_CURRENT); } else { proxIn = null; } this.freqIn = freqIn; this.proxIn = proxIn; success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(freqIn, proxIn); } } }