@NotNull protected VirtualFile createProjectJarSubFile(String relativePath, Pair<ByteSequence, String>... contentEntries) throws IOException { assertTrue("Use 'jar' extension for JAR files: '" + relativePath + "'", FileUtilRt.extensionEquals(relativePath, "jar")); File f = new File(getProjectPath(), relativePath); FileUtil.ensureExists(f.getParentFile()); FileUtil.ensureCanCreateFile(f); final boolean created = f.createNewFile(); if (!created) { throw new AssertionError("Unable to create the project sub file: " + f.getAbsolutePath()); } Manifest manifest = new Manifest(); manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0"); JarOutputStream target = new JarOutputStream(new FileOutputStream(f), manifest); for (Pair<ByteSequence, String> contentEntry : contentEntries) { addJarEntry(contentEntry.first.getBytes(), contentEntry.second, target); } target.close(); final VirtualFile virtualFile = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(f); assertNotNull(virtualFile); final VirtualFile jarFile = JarFileSystem.getInstance().getJarRootForLocalFile(virtualFile); assertNotNull(jarFile); return jarFile; }
@Override public void writeBytes(final int record, final ByteSequence bytes, final boolean fixedSize) throws IOException { if (myDoNotZipCaches) { super.writeBytes(record, bytes, fixedSize); return; } waitForPendingWriteForRecord(record); synchronized (myLock) { myPendingWriteRequestsSize += bytes.getLength(); if (myPendingWriteRequestsSize > MAX_PENDING_WRITE_SIZE) { zipAndWrite(bytes, record, fixedSize); } else { myPendingWriteRequests.put(record, myPendingWriteRequestsExecutor.submit(new Callable<Object>() { @Override public Object call() throws IOException { zipAndWrite(bytes, record, fixedSize); return null; } })); } } }
private void zipAndWrite(ByteSequence bytes, int record, boolean fixedSize) throws IOException { BufferExposingByteArrayOutputStream s = new BufferExposingByteArrayOutputStream(); DeflaterOutputStream out = new DeflaterOutputStream(s); try { out.write(bytes.getBytes(), bytes.getOffset(), bytes.getLength()); } finally { out.close(); } synchronized (myLock) { doWrite(record, fixedSize, s); myPendingWriteRequestsSize -= bytes.getLength(); myPendingWriteRequests.remove(record); } }
protected void appendBytes(int record, ByteSequence bytes) throws IOException { final int delta = bytes.getLength(); if (delta == 0) return; synchronized (myLock) { int capacity = myRecordsTable.getCapacity(record); int oldSize = myRecordsTable.getSize(record); int newSize = oldSize + delta; if (newSize > capacity) { if (oldSize > 0) { final byte[] newbytes = new byte[newSize]; System.arraycopy(readBytes(record), 0, newbytes, 0, oldSize); System.arraycopy(bytes.getBytes(), bytes.getOffset(), newbytes, oldSize, delta); writeBytes(record, new ByteSequence(newbytes), false); } else { writeBytes(record, bytes, false); } } else { long address = myRecordsTable.getAddress(record) + oldSize; myDataTable.writeBytes(address, bytes.getBytes(), bytes.getOffset(), bytes.getLength()); myRecordsTable.setSize(record, newSize); } } }
@Nullable @Override public FileType detect(@NotNull VirtualFile file, @NotNull ByteSequence firstBytes, @Nullable CharSequence firstCharsIfText) { if(ArrayUtil.contains(file.getExtension(), ourAssetExtensions)) { if(firstCharsIfText == null) { return Unity3dBinaryAssetFileType.INSTANCE; } if(firstCharsIfText.length() > 5) { CharSequence sequence = firstCharsIfText.subSequence(0, 5); if(StringUtil.equals("%YAML", sequence)) { return Unity3dYMLAssetFileType.INSTANCE; } } return Unity3dBinaryAssetFileType.INSTANCE; } return null; }
@Override public void close() throws IOException { super.close(); try { synchronized (myAttributeId) { final BufferExposingByteArrayOutputStream _out = (BufferExposingByteArrayOutputStream)out; final int page; try { w.lock(); incModCount(myFileId); page = findAttributePage(myFileId, myAttributeId, true); } finally { w.unlock(); } getAttributesStorage().writeBytes(page, new ByteSequence(_out.getInternalBuffer(), 0, _out.size()), myFixedSize); } } catch (Throwable e) { throw DbConnection.handleError(e); } }
public PersistentHashMapValueStorage(String path) throws IOException { myPath = path; myFile = new File(path); mySize = myFile.length(); // volatile write if (mySize == 0) { appendBytes(new ByteSequence("Header Record For PersistentHashMapValueStorage".getBytes()), 0); // avoid corruption issue when disk fails to write first record synchronously, code depends on correct value of mySize (IDEA-106306) CacheValue<DataOutputStream> streamCacheValue = ourAppendersCache.getIfCached(myPath); if (streamCacheValue != null) { try { IOUtil.syncStream(streamCacheValue.get()); } catch (IOException e) { throw new RuntimeException(e); } finally { streamCacheValue.release(); } } long currentLength = myFile.length(); if (currentLength != mySize) Logger.getInstance(getClass().getName()).info("Avoided PSHM corruption due to write failure"); mySize = currentLength; // volatile write } }
private boolean processFirstBytes(@Nonnull final InputStream stream, final int length, @Nonnull Processor<ByteSequence> processor) throws IOException { final byte[] bytes = FileUtilRt.getThreadLocalBuffer(); assert bytes.length >= length : "Cannot process more than " + bytes.length + " in one call, requested:" + length; int n = stream.read(bytes, 0, length); if (n <= 0) { // maybe locked because someone else is writing to it // repeat inside read action to guarantee all writes are finished if (toLog()) { log("F: processFirstBytes(): inputStream.read() returned " + n + "; retrying with read action. stream=" + streamInfo(stream)); } n = ApplicationManager.getApplication().runReadAction((ThrowableComputable<Integer, IOException>)() -> stream.read(bytes, 0, length)); if (toLog()) { log("F: processFirstBytes(): under read action inputStream.read() returned " + n + "; stream=" + streamInfo(stream)); } if (n <= 0) { return false; } } return processor.process(new ByteSequence(bytes, 0, n)); }
private ByteSequence readContents(Integer hashId) throws IOException { if (SharedIndicesData.ourFileSharedIndicesEnabled) { if (SharedIndicesData.DO_CHECKS) { synchronized (myContents) { ByteSequence contentBytes = SharedIndicesData.recallContentData(hashId, myIndexId, ByteSequenceDataExternalizer.INSTANCE); ByteSequence contentBytesFromContents = myContents.get(hashId); if ((contentBytes == null && contentBytesFromContents != null) || !Comparing.equal(contentBytesFromContents, contentBytes)) { SharedIndicesData.associateContentData(hashId, myIndexId, contentBytesFromContents, ByteSequenceDataExternalizer.INSTANCE); if (contentBytes != null) { LOG.error("Unexpected indexing diff with hashid " + myIndexId + "," + hashId); } contentBytes = contentBytesFromContents; } return contentBytes; } } else { return SharedIndicesData.recallContentData(hashId, myIndexId, ByteSequenceDataExternalizer.INSTANCE); } } return myContents.get(hashId); }
private boolean processFirstBytes(@NotNull final InputStream stream, final int length, @NotNull Processor<ByteSequence> processor) throws IOException { final byte[] bytes = FileUtilRt.getThreadLocalBuffer(); assert bytes.length >= length : "Cannot process more than " + bytes.length + " in one call, requested:" + length; int n = stream.read(bytes, 0, length); if (n <= 0) { // maybe locked because someone else is writing to it // repeat inside read action to guarantee all writes are finished if (toLog()) { log("F: inputStream.read() returned "+n+"; retrying with read action. stream="+ streamInfo(stream)); } n = ApplicationManager.getApplication().runReadAction(new ThrowableComputable<Integer, IOException>() { @Override public Integer compute() throws IOException { return stream.read(bytes, 0, length); } }); if (toLog()) { log("F: under read action inputStream.read() returned "+n+"; stream="+ streamInfo(stream)); } if (n <= 0) { return false; } } return processor.process(new ByteSequence(bytes, 0, n)); }
private static boolean guessIfText(@NotNull VirtualFile file, @NotNull ByteSequence byteSequence) { byte[] bytes = byteSequence.getBytes(); Trinity<Charset, CharsetToolkit.GuessedEncoding, byte[]> guessed = LoadTextUtil.guessFromContent(file, bytes, byteSequence.getLength()); if (guessed == null) return false; file.setBOM(guessed.third); if (guessed.first != null) { // charset was detected unambiguously return true; } // use wild guess CharsetToolkit.GuessedEncoding guess = guessed.second; return guess != null && (guess == CharsetToolkit.GuessedEncoding.VALID_UTF8 || guess == CharsetToolkit.GuessedEncoding.SEVEN_BIT); }
public static void writeContent(int fileId, ByteSequence bytes, boolean readOnly) throws IOException { try { new ContentOutputStream(fileId, readOnly).writeBytes(bytes); } catch (Throwable e) { throw DbConnection.handleError(e); } }
@Override public void close() throws IOException { super.close(); try { final BufferExposingByteArrayOutputStream _out = (BufferExposingByteArrayOutputStream)out; writeBytes(new ByteSequence(_out.getInternalBuffer(), 0, _out.size())); } catch (Throwable e) { throw DbConnection.handleError(e); } }
public void writeBytes(int record, ByteSequence bytes, boolean fixedSize) throws IOException { synchronized (myLock) { final int requiredLength = bytes.getLength(); final int currentCapacity = myRecordsTable.getCapacity(record); final int currentSize = myRecordsTable.getSize(record); assert currentSize >= 0; if (requiredLength == 0 && currentSize == 0) return; final long address; if (currentCapacity >= requiredLength) { address = myRecordsTable.getAddress(record); } else { myDataTable.reclaimSpace(currentCapacity); int newCapacity = fixedSize ? requiredLength:myCapacityAllocationPolicy.calculateCapacity(requiredLength); if (newCapacity < requiredLength) newCapacity = requiredLength; address = myDataTable.allocateSpace(newCapacity); myRecordsTable.setAddress(record, address); myRecordsTable.setCapacity(record, newCapacity); } myDataTable.writeBytes(address, bytes.getBytes(), bytes.getOffset(), bytes.getLength()); myRecordsTable.setSize(record, requiredLength); } }
public void replaceBytes(int record, int offset, ByteSequence bytes) throws IOException { synchronized (myLock) { final int changedBytesLength = bytes.getLength(); final int currentSize = myRecordsTable.getSize(record); assert currentSize >= 0; assert offset + bytes.getLength() <= currentSize; if (changedBytesLength == 0) return; final long address = myRecordsTable.getAddress(record); myDataTable.writeBytes(address + offset, bytes.getBytes(), bytes.getOffset(), bytes.getLength()); } }
public PersistentHashMapValueStorage(String path) throws IOException { myExceptionalIOCancellationCallback = CreationTimeOptions.EXCEPTIONAL_IO_CANCELLATION.get(); myPath = path; myFile = new File(path); myCompressedAppendableFile = COMPRESSION_ENABLED ? new MyCompressedAppendableFile() : null; if (myCompressedAppendableFile != null) { mySize = myCompressedAppendableFile.length(); // volatile write } else { mySize = myFile.length(); // volatile write } if (mySize == 0) { appendBytes(new ByteSequence("Header Record For PersistentHashMapValueStorage".getBytes()), 0); // avoid corruption issue when disk fails to write first record synchronously or unexpected first write file increase (IDEA-106306), // code depends on correct value of mySize FileAccessorCache.Handle<DataOutputStream> streamCacheValue = ourAppendersCache.getIfCached(myPath); if (streamCacheValue != null) { try { IOUtil.syncStream(streamCacheValue.get()); } catch (IOException e) { throw new RuntimeException(e); } finally { streamCacheValue.release(); } } long currentLength = myFile.length(); if (currentLength > mySize) { // if real file length (unexpectedly) increases Logger.getInstance(getClass().getName()).info("Avoided PSHM corruption due to write failure"); mySize = currentLength; // volatile write } } }
private PersistentHashMap<Integer, ByteSequence> createContentsIndex() throws IOException { final File saved = myHasSnapshotMapping && myIndexId != null ? new File(IndexInfrastructure.getPersistentIndexRootDir(myIndexId), "values") : null; if (saved != null) { try { return new PersistentHashMap<Integer, ByteSequence>(saved, EnumeratorIntegerDescriptor.INSTANCE, ByteSequenceDataExternalizer.INSTANCE); } catch (IOException ex) { IOUtil.deleteAllFilesStartingWith(saved); throw ex; } } else { return null; } }
private Map<Key, Value> deserializeSavedPersistentData(ByteSequence bytes) throws IOException { DataInputStream stream = new DataInputStream(new UnsyncByteArrayInputStream(bytes.getBytes(), bytes.getOffset(), bytes.getLength())); int pairs = DataInputOutputUtil.readINT(stream); if (pairs == 0) return Collections.emptyMap(); Map<Key, Value> result = new THashMap<Key, Value>(pairs); while (stream.available() > 0) { Value value = myValueExternalizer.read(stream); Collection<Key> keys = mySnapshotIndexExternalizer.read(stream); for(Key k:keys) result.put(k, value); } return result; }
@Nullable @Override public FileType detect(@NotNull VirtualFile file, @NotNull ByteSequence firstBytes, @Nullable CharSequence firstCharsIfText) { if (FileUtil.isHashBangLine(firstCharsIfText, "groovy")) { return GroovyFileType.GROOVY_FILE_TYPE; } return null; }
@Nullable @Override public FileType detect(@NotNull VirtualFile file, @NotNull ByteSequence byteSequence, @Nullable CharSequence charSequence) { if(FileUtil.isHashBangLine(charSequence, "/usr/bin/env node")) { return JavaScriptFileType.INSTANCE; } return null; }
private static boolean guessIfText(VirtualFile file, ByteSequence byteSequence) { byte[] bytes = byteSequence.getBytes(); Trinity<Charset, CharsetToolkit.GuessedEncoding, byte[]> guessed = LoadTextUtil.guessFromContent(file, bytes, byteSequence.getLength()); if (guessed == null) return false; file.setBOM(guessed.third); if (guessed.first != null) { // charset was detected unambiguously return true; } // use wild guess CharsetToolkit.GuessedEncoding guess = guessed.second; return guess != null && guess != CharsetToolkit.GuessedEncoding.INVALID_UTF8; }
public void writeBytes(ByteSequence bytes) throws IOException { int page; RefCountingStorage contentStorage = getContentStorage(); final boolean fixedSize; try { w.lock(); incModCount(myFileId); checkFileIsValid(myFileId); if (weHaveContentHashes) { page = findOrCreateContentRecord(bytes.getBytes(), bytes.getOffset(), bytes.getLength()); incModCount(myFileId); checkFileIsValid(myFileId); setContentRecordId(myFileId, page > 0 ? page : -page); if (page > 0) return; page = -page; fixedSize = true; } else { page = getContentRecordId(myFileId); if (page == 0 || contentStorage.getRefCount(page) > 1) { page = contentStorage.acquireNewRecord(); setContentRecordId(myFileId, page); } fixedSize = myFixedSize; } } finally { w.unlock(); } contentStorage.writeBytes(page, bytes, fixedSize); }
private static boolean guessIfText(@Nonnull VirtualFile file, @Nonnull ByteSequence byteSequence) { byte[] bytes = byteSequence.getBytes(); Trinity<Charset, CharsetToolkit.GuessedEncoding, byte[]> guessed = LoadTextUtil.guessFromContent(file, bytes, byteSequence.getLength()); if (guessed == null) return false; file.setBOM(guessed.third); if (guessed.first != null) { // charset was detected unambiguously return true; } // use wild guess CharsetToolkit.GuessedEncoding guess = guessed.second; return guess != null && (guess == CharsetToolkit.GuessedEncoding.VALID_UTF8 || guess == CharsetToolkit.GuessedEncoding.SEVEN_BIT); }
public PersistentHashMapValueStorage(String path) throws IOException { myExceptionalIOCancellationCallback = CreationTimeOptions.EXCEPTIONAL_IO_CANCELLATION.get(); myReadOnly = CreationTimeOptions.READONLY.get() == Boolean.TRUE; myCompactChunksWithValueDeserialization = CreationTimeOptions.COMPACT_CHUNKS_WITH_VALUE_DESERIALIZATION.get() == Boolean.TRUE; myPath = path; myFile = new File(path); myCompressedAppendableFile = COMPRESSION_ENABLED ? new MyCompressedAppendableFile() : null; if (myCompressedAppendableFile != null) { mySize = myCompressedAppendableFile.length(); // volatile write } else { mySize = myFile.length(); // volatile write } if (mySize == 0 && !myReadOnly) { appendBytes(new ByteSequence("Header Record For PersistentHashMapValueStorage".getBytes()), 0); // avoid corruption issue when disk fails to write first record synchronously or unexpected first write file increase (IDEA-106306), // code depends on correct value of mySize FileAccessorCache.Handle<DataOutputStream> streamCacheValue = ourAppendersCache.getIfCached(myPath); if (streamCacheValue != null) { try { IOUtil.syncStream(streamCacheValue.get()); } catch (IOException e) { throw new RuntimeException(e); } finally { streamCacheValue.release(); } } long currentLength = myFile.length(); if (currentLength > mySize) { // if real file length (unexpectedly) increases Logger.getInstance(getClass().getName()).info("Avoided PSHM corruption due to write failure"); mySize = currentLength; // volatile write } } }
long compactChunks(PersistentHashMap.ValueDataAppender appender, ReadResult result) throws IOException { checkCancellation(); long startedTime = ourDumpChunkRemovalTime ? System.nanoTime() : 0; long newValueOffset; if (myCompactChunksWithValueDeserialization) { final BufferExposingByteArrayOutputStream stream = new BufferExposingByteArrayOutputStream(result.buffer.length); DataOutputStream testStream = new DataOutputStream(stream); appender.append(testStream); newValueOffset = appendBytes(stream.getInternalBuffer(), 0, stream.size(), 0); myChunksBytesAfterRemoval += stream.size(); } else { newValueOffset = appendBytes(new ByteSequence(result.buffer), 0); myChunksBytesAfterRemoval += result.buffer.length; } if (ourDumpChunkRemovalTime) { myChunksRemovalTime += System.nanoTime() - startedTime; if (myChunks - myLastReportedChunksCount > 1000) { myLastReportedChunksCount = myChunks; System.out.println(myChunks + " chunks were read " + (myChunksReadingTime / 1000000) + "ms, bytes: " + myChunksOriginalBytes + (myChunksOriginalBytes != myChunksBytesAfterRemoval ? "->" + myChunksBytesAfterRemoval : "") + " compaction:" + (myChunksRemovalTime / 1000000) + "ms in " + myPath); } } return newValueOffset; }
@Nonnull public Map<Key, Value> readInputKeys(int inputId) throws IOException { Integer currentHashId = readInputHashId(inputId); if (currentHashId != null) { ByteSequence byteSequence = readContents(currentHashId); if (byteSequence != null) { return deserializeSavedPersistentData(byteSequence); } } return Collections.emptyMap(); }
private PersistentHashMap<Integer, ByteSequence> createContentsIndex() throws IOException { final File saved = new File(IndexInfrastructure.getPersistentIndexRootDir(myIndexId), "values"); try { return new PersistentHashMap<>(saved, EnumeratorIntegerDescriptor.INSTANCE, ByteSequenceDataExternalizer.INSTANCE); } catch (IOException ex) { IOUtil.deleteAllFilesStartingWith(saved); throw ex; } }
private Map<Key, Value> deserializeSavedPersistentData(ByteSequence bytes) throws IOException { DataInputStream stream = new DataInputStream(new UnsyncByteArrayInputStream(bytes.getBytes(), bytes.getOffset(), bytes.getLength())); int pairs = DataInputOutputUtil.readINT(stream); if (pairs == 0) return Collections.emptyMap(); Map<Key, Value> result = new THashMap<>(pairs); while (stream.available() > 0) { Value value = myIndexExtension.getValueExternalizer().read(stream); Collection<Key> keys = mySnapshotIndexExternalizer.read(stream); for(Key k:keys) result.put(k, value); } return result; }