/** * Inserts publication annex document. */ public void insertAnnexDocument(BinaryFile bf, String dateString) throws ParseException { try { GridFS gfs = new GridFS(db, MongoCollectionsInterface.PUB_ANNEXES); BasicDBObject whereQuery = new BasicDBObject(); whereQuery.put("repositoryDocId", bf.getRepositoryDocId()); whereQuery.put("filename", bf.getFileName()); gfs.remove(whereQuery); //version ? GridFSInputFile gfsFile = gfs.createFile(bf.getStream(), true); gfsFile.put("uploadDate", Utilities.parseStringDate(dateString)); gfsFile.setFilename(bf.getFileName()); gfsFile.put("source", bf.getSource()); gfsFile.put("version", bf.getRepositoryDocVersion()); gfsFile.put("repositoryDocId", bf.getRepositoryDocId()); gfsFile.put("anhalyticsId", bf.getAnhalyticsId()); gfsFile.save(); } catch (ParseException e) { logger.error(e.getMessage(), e.getCause()); } }
public DataAddress uploadData(String data, DataAddress dataAddress) throws UnknownHostException { ServerAddress server = new ServerAddress(dataAddress.hostname, dataAddress.port); GridFS database = connectToDatabase(server); logger.info("Database connected"); GridFSInputFile file = database.createFile(data.getBytes()); int newID = getNextId(database); logger.info("Got new id for uploaded file: " + newID); file.setFilename(String.valueOf(newID)); file.put("_id", newID); file.save(); logger.info("after save"); return new DataAddress(dataAddress.hostname, dataAddress.port, newID); }
@Override public Boolean storeBlob(CallingContext context, String docPath, InputStream newContent, Boolean append) { GridFS gridFS = getGridFS(); GridFSInputFile file; if (!append) { gridFS.remove(docPath); file = createNewFile(docPath, newContent); } else { GridFSDBFile existing = gridFS.findOne(docPath); if (existing != null) { try { file = updateExisting(context, docPath, newContent, gridFS, existing); } catch (IOException e) { file = null; log.error(String.format("Error while appending to docPath %s: %s", docPath, ExceptionToString.format(e))); } } else { file = createNewFile(docPath, newContent); } } return file != null; }
@Override public void process(CAS aCAS) throws AnalysisEngineProcessException { Type documentIdType = aCAS.getTypeSystem() .getType("edu.umn.biomedicus.uima.type1_5.DocumentId"); Feature docIdFeat = documentIdType.getFeatureByBaseName("documentId"); String documentId = aCAS.getIndexRepository() .getAllIndexedFS(documentIdType) .get() .getStringValue(docIdFeat); if (documentId == null) { documentId = UUID.randomUUID().toString(); } GridFSInputFile file = gridFS.createFile(documentId + ".xmi"); try (OutputStream outputStream = file.getOutputStream()) { XmiCasSerializer.serialize(aCAS, outputStream); } catch (IOException | SAXException e) { throw new AnalysisEngineProcessException(e); } }
@Override public void storeAttachment(AttachmentId attachmentId, InputStream data) throws IOException { GridFSInputFile file = getAttachmentGrid().createFile(data, attachmentId.serialise()); try { file.save(); } catch (MongoException e) { // Unfortunately, file.save() wraps any IOException thrown in a // 'MongoException'. Since the interface explicitly throws IOExceptions, // we unwrap any IOExceptions thrown. Throwable innerException = e.getCause(); if (innerException instanceof IOException) { throw (IOException) innerException; } else { throw e; } } }
public String saveFile(InputStream is, String contentType) throws IOException { GridFSInputFile file = getNewFile(); String id = file.getId().toString(); OutputStream os = getFileOutputStream(id, contentType); if (os != null) { try { byte data[] = new byte[4096]; int len = 0; while ((len = is.read(data, 0, data.length)) > 0) { os.write(data, 0, len); } return id; } finally { os.close(); } } return null; }
/** * Inserts grobid tei using GridFS. */ public void insertGrobidTei(String teiString, String repositoryDocId, String anhalyticsId, String version, String source, String type, String date) { try { GridFS gfs = new GridFS(db, MongoCollectionsInterface.GROBID_TEIS); gfs.remove(repositoryDocId + ".tei.xml"); GridFSInputFile gfsFile = gfs.createFile(new ByteArrayInputStream(teiString.getBytes()), true); gfsFile.put("uploadDate", Utilities.parseStringDate(date)); gfsFile.setFilename(repositoryDocId + ".tei.xml"); gfsFile.put("repositoryDocId", repositoryDocId); gfsFile.put("anhalyticsId", anhalyticsId); gfsFile.put("source", source); gfsFile.put("version", version); gfsFile.put("documentType", type); gfsFile.save(); } catch (ParseException e) { logger.error(e.getMessage(), e.getCause()); } }
/** * Inserts TEI metadata document in the GridFS. */ public void insertMetadataTei(String tei, String doi, String pdfUrl, String source, String repositoryDocId, String version, String type, String date) { try { GridFS gfs = new GridFS(db, MongoCollectionsInterface.METADATAS_TEIS); gfs.remove(repositoryDocId + ".tei.xml"); GridFSInputFile gfsFile = gfs.createFile(new ByteArrayInputStream(tei.getBytes()), true); gfsFile.put("uploadDate", Utilities.parseStringDate(date)); gfsFile.setFilename(repositoryDocId + ".tei.xml"); gfsFile.put("repositoryDocId", repositoryDocId); gfsFile.put("anhalyticsId", generateAnhalyticsId(repositoryDocId, doi, pdfUrl)); gfsFile.put("source", source); gfsFile.put("version", version); gfsFile.put("documentType", type); gfsFile.save(); } catch (ParseException e) { logger.error(e.getMessage(), e.getCause()); } }
/** * Inserts PDF binary document in the GridFS. */ public void insertBinaryDocument(BinaryFile bf, String date) { try { GridFS gfs = new GridFS(db, MongoCollectionsInterface.BINARIES); gfs.remove(bf.getFileName()); GridFSInputFile gfsFile = gfs.createFile(bf.getStream(), true); gfsFile.put("uploadDate", Utilities.parseStringDate(date)); gfsFile.setFilename(bf.getFileName()); gfsFile.put("repositoryDocId", bf.getRepositoryDocId()); gfsFile.put("anhalyticsId", bf.getAnhalyticsId()); gfsFile.put("source", bf.getSource()); gfsFile.put("version", bf.getRepositoryDocVersion()); gfsFile.put("documentType", bf.getDocumentType()); gfsFile.setContentType(bf.getFileType()); gfsFile.save(); } catch (ParseException e) { logger.error(e.getMessage(), e.getCause()); } }
/** * Updates already existing tei with new (more enriched one, fulltext..). */ public void updateTei(String newTei, String repositoryDocId, String collection) { try { GridFS gfs = new GridFS(db, collection); GridFSDBFile gdf = gfs.findOne(repositoryDocId + ".tei.xml"); GridFSInputFile gfsNew = gfs.createFile(new ByteArrayInputStream(newTei.getBytes()), true); gfsNew.put("uploadDate", gdf.getUploadDate()); gfsNew.setFilename(gdf.get("repositoryDocId") + ".tei.xml"); gfsNew.put("repositoryDocId", gdf.get("repositoryDocId")); gfsNew.put("documentType", gdf.get("documentType")); gfsNew.put("anhalyticsId", gdf.get("anhalyticsId")); gfsNew.put("source", gdf.get("source")); gfsNew.save(); gfs.remove(gdf); } catch (Exception e) { logger.error(e.getMessage(), e.getCause()); } }
/** * inserts a Arxiv/istex TEI document in the GridFS. */ public void insertExternalTeiDocument(InputStream file, String identifier, String repository, String namespace, String dateString) { try { GridFS gfs = new GridFS(db, namespace); GridFSInputFile gfsFile = gfs.createFile(file, true); gfs.remove(identifier + ".pdf"); gfsFile.put("uploadDate", Utilities.parseStringDate(dateString)); gfsFile.setFilename(identifier + ".tei.xml"); gfsFile.put("identifier", identifier); gfsFile.put("repository", repository); gfsFile.setContentType("application/tei+xml"); gfsFile.save(); } catch (ParseException e) { logger.error(e.getMessage(), e.getCause()); } }
@Override public void writeBlob(final String blobName, final InputStream is, final long sizeInBytes, final WriterListener listener) { blobStore.executor().execute(new Runnable() { @Override public void run() { try { blobStore.gridFS().remove(buildKey(blobName)); // need to remove old file if already exist GridFSInputFile file = blobStore.gridFS().createFile(is, buildKey(blobName)); file.save(); listener.onCompleted(); } catch (Exception e) { listener.onFailure(e); } } }); }
@Override public OutputStream createOutputStreamToWrite() { checkState(State.NEW); storingOutputStream = new FilterOutputStream(((GridFSInputFile) dbFile).getOutputStream()) { @Override public void close() throws IOException { putMetadataInGridFS(false); super.close(); refreshAttributesOnClose(); } }; return storingOutputStream; }
/** * 保存文件到Mongo中 * @param file 文件对象 * @param id id_ 自定义序列 * @param metaData 元数据类型 Key Value * @return */ public boolean concatGridFile(File file, Object id, DBObject metaData){ GridFSInputFile gridFSInputFile; DBObject query = new BasicDBObject("_id", id); GridFSDBFile gridFSDBFile = myFS.findOne(query); if(gridFSDBFile!= null) return false; try { gridFSInputFile = myFS.createFile(file); gridFSInputFile.put("_id",id); gridFSInputFile.setFilename(file.getName()); gridFSInputFile.setMetaData(metaData); gridFSInputFile.setContentType(file.getName().substring(file.getName().lastIndexOf("."))); gridFSInputFile.save(); } catch (Exception e) { e.printStackTrace(); return false; } return true; }
@Override public void run() { try{ File localPath = new File(localRoot, file.getFilename()); log.info("Save to local file:" + localPath.getAbsolutePath()); File dirName = localPath.getParentFile(); if(!dirName.exists()){ dirName.mkdirs(); } file.writeTo(localPath); GridFSInputFile newFile = fs.createFile(new byte[]{0, 0,}); newFile.setMetaData(file.getMetaData()); newFile.setFilename(file.getFilename()); newFile.put("localLength", file.getLength()); newFile.save(10); //log.info("remove:%s" + file.getId() + ", fn:" + file.getFilename()); fs.remove((ObjectId)file.getId()); }catch(Throwable e){ log.error("Failed to dump file to local fs, error:" + e.toString(), e); } }
@Override public void saveBlob(final MD5 md5, final InputStream data, final boolean sorted) throws BlobStoreCommunicationException { if(data == null || md5 == null) { throw new NullPointerException("Arguments cannot be null"); } if (getFile(md5) != null) { return; //already exists } final GridFSInputFile gif = gfs.createFile(data, true); gif.setId(md5.getMD5()); gif.setFilename(md5.getMD5()); gif.put(Fields.GFS_SORTED, sorted); try { gif.save(); } catch (DuplicateKeyException dk) { // already here, done } catch (MongoException me) { throw new BlobStoreCommunicationException( "Could not write to the mongo database", me); } }
@Test public void dataWithoutSortMarker() throws Exception { String s = "pootypoot"; final GridFSInputFile gif = gfs.createFile(s.getBytes("UTF-8")); MD5 md5 = new MD5(a32); gif.setId(md5.getMD5()); gif.setFilename(md5.getMD5()); gif.save(); ByteArrayFileCache d = gfsb.getBlob(md5, new ByteArrayFileCacheManager(16000000, 2000000000L, tfm)); assertThat("data returned marked as unsorted", d.isSorted(), is(false)); String returned = IOUtils.toString(d.getJSON()); assertThat("Didn't get same data back from store", returned, is(s)); gfsb.removeBlob(md5); }
@Override public String putBlob(String container, Blob blob, PutOptions options) { if (options != null && !options.isMultipart()) { throw new IllegalArgumentException("only multipart is supported by this provider"); } Payload payload = checkNotNull(blob.getPayload()); BlobMetadata metadata = blob.getMetadata(); ContentMetadata contentMetadata = metadata.getContentMetadata(); GridFS gridFS = parseGridFSIdentifier(container).connect(mongo); GridFSInputFile inputFile = gridFS.createFile(payload.getInput(), metadata.getName(), true); inputFile.setContentType(contentMetadata.getContentType()); DBObject fileMetadata = new BasicDBObject(); fileMetadata.putAll(metadata.getUserMetadata()); inputFile.setMetaData(fileMetadata); inputFile.save(); return inputFile.getMD5(); }
public Optional<FileEntry> save(final String name, final String mime, final String creator, final boolean privateFile, final String description, InputStream in) { GridFS gf = gridFS; GridFSInputFile f = gf.createFile(in); f.setFilename(name); f.setContentType(mime); // DBObject metadata = f.getMetaData(); if (metadata == null) { metadata = new BasicDBObject(); f.setMetaData(metadata); } metadata.put("creator", creator); metadata.put("private", privateFile); metadata.put("description", description); // f.save(); // return this.loadFileEntry((ObjectId) f.getId()); }
@Override synchronized public OutputStream openOutputStream(final byte[] key) { return new ByteArrayOutputStream(256) { @Override public void close() throws IOException { super.close(); byte[] keyString = adoptKey(key); byte[] byteArray = toByteArray(); if (byteArray.length < MAX_BSON_SIZE) { collection.update(new BasicDBObject(KEY, keyString), new BasicDBObject(KEY, keyString).append(TYPE, TYPE_BSON).append(VALUE, byteArray), true, false); } else { // do grid fs GridFSInputFile gridFsFile = gridFs.createFile(byteArray); String fileName = URIUtils.encode(key); gridFsFile.setFilename(fileName); gridFsFile.save(); collection.update(new BasicDBObject(KEY, keyString), new BasicDBObject(KEY, keyString).append(TYPE, TYPE_GRID_FS).append(FILE_NAME, fileName), true, false); } } }; }
@Override public void execute() throws Exception { MongoClient mdb = MongoFactory.getInst().getMongo( sName ); if ( mdb == null ) throw new Exception("no server selected"); if ( sDb == null ) throw new Exception("no database selected"); MongoFactory.getInst().setActiveDB(sDb); DB db = mdb.getDB(sDb); GridFS gfs = new GridFS( db, sColl.substring(0,sColl.lastIndexOf(".")) ); GridFSInputFile gridFSInputFile = gfs.createFile(getFile); gridFSInputFile.setContentType( MimetypesFileTypeMap.getDefaultFileTypeMap().getContentType(getFile) ); gridFSInputFile.save(); setMessage( "fileLoaded=" + getFile + "; size=" + getFile.length() ); }
public DataAddress prefetchData(DataAddress givenAddress, ServerAddress destAddress) throws IOException { logger.info("yo2"); ServerAddress givenServer = new ServerAddress(givenAddress.hostname, givenAddress.port); GridFS givenDatabase = connectToDatabase(givenServer); logger.info("yo"); GridFSDBFile givenPackage = givenDatabase.findOne(new BasicDBObject("_id", givenAddress.ID)); ByteArrayOutputStream baos = new ByteArrayOutputStream((int)givenPackage.getLength()); givenPackage.writeTo(baos); logger.info("Prefetched"); GridFS destDatabase = connectToDatabase(destAddress); GridFSInputFile destPackage = destDatabase.createFile(baos.toByteArray()); int newID = getNextId(destDatabase); logger.info("Got new id for prefetched package: " + newID); destPackage.put("_id", newID); destPackage.save(); logger.info("after save"); DataAddress ret = new DataAddress(); ret.hostname = destAddress.getHost(); ret.port = destAddress.getPort(); ret.ID = newID; return ret; }
/** * * @param inputStream 文件流 * @param format 文件格式,“pdf”,“png”等,不包含后缀符号“.” * @return */ public String saveFile(InputStream inputStream,String format,String uid) { try { GridFS gridFS = getInstance(); //随机生成文件名称,多次重试 String filename = this.randomFileName(); //如果有文件重复,则重新生成filename while (true) { GridFSDBFile _current = gridFS.findOne(filename); //如果文件不存在,则保存操作 if (_current == null) { break; } filename = this.randomFileName(); } GridFSInputFile file = gridFS.createFile(inputStream, filename); if(format != null) { file.put("format", format); } if(uid != null) { file.put("uid",uid); } file.put("content-type","application/octet-stream"); file.save(); return concat(filename,format); }catch (Exception e) { throw new RuntimeException(e); } finally { try{ inputStream.close(); }catch (Exception ex) { // } } }
@Override public boolean create(String filename, byte[] data) { this.delete(filename); GridFSInputFile file = getGridFS().createFile(data); file.setFilename(filename); file.save(); return true; }
@Override public void put(String keyText, T obj) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream objectOutput = new ObjectOutputStream(baos); objectOutput.writeObject(obj); objectOutput.close(); byte[] binaryObject = baos.toByteArray(); GridFSInputFile objFile = gridfs.createFile(binaryObject); objFile.setFilename(DigestUtils.sha256Hex(keyText)); //it will not update content of existing file objFile.save(); }
protected GridFSInputFile createNewFile(String docPath, InputStream content) { GridFSInputFile file = getGridFS().createFile(content, docPath); if (file != null) { file.save(); } return file; }
public long createGridFsFile(Id id, InputStream inputStream, String filename, String mimeType) { DB db = (DB) connections.getConnection("mongodb.dma"); GridFS fs = new GridFS(db); GridFSInputFile gridFile = fs.createFile(inputStream, filename); gridFile.setId(id); gridFile.setContentType(mimeType); gridFile.save(); return gridFile.getLength(); }
private long createGridFsFile(Id id, InputStream inputStream, String filename, String mimeType) { DB db = (DB) connections.getConnection("mongodb.dma"); GridFS fs = new GridFS(db); GridFSInputFile gridFile = fs.createFile(inputStream, filename); gridFile.setId(id); gridFile.setContentType(mimeType); gridFile.save(); return gridFile.getLength(); }
@SuppressWarnings("serial") @POST @Consumes(MediaType.APPLICATION_JSON) public Response insertImageInDb(String jsonRequest, @Context Request request) throws IOException { EndpointUtil.printClientInfo(request); DBObject json = ((DBObject) JSON.parse(jsonRequest)); String imageData = (String) json.get("imageData"); byte[] screenshotBytes = Base64Utils.decode(imageData); String testName = json.get(BaseScreenshotModel.TEST_NAME).toString(); String testBrowser = json.get(BaseScreenshotModel.TEST_BROWSER).toString(); String description = json.get(BaseScreenshotModel.DESCRIPTION).toString(); Type type = new TypeToken<List<Rectangle>>() { }.getType(); String ignoreZonesString = ((Object) json.get(BaseScreenshotModel.IGNORE_ZONES)).toString(); List<Rectangle> ignoreZones = new ArrayList<>(); if (ignoreZonesString != null) { ignoreZones = GsonUtil.gson.fromJson(ignoreZonesString, type); } File tmpFile = new File("tmpFile"); FileUtils.writeByteArrayToFile(tmpFile, screenshotBytes); GridFSInputFile gfsFile = GFS_PHOTO.createFile(tmpFile); gfsFile.setFilename(String.format("%s|%s|%s", testName, testBrowser, description)); gfsFile.save(); // after the file has been saved, get the id and add it into the table of base_images BaseScreenshotModel up = new BaseScreenshotModel(testName, testBrowser, description, new ObjectId(gfsFile.getId().toString()), ignoreZones); TMP_IMAGES.save(up); tmpFile.delete(); return Response.ok().entity(JSON.serialize(up)).build(); }
@Override public String saveArtifact(DBKey dbKey, InputStream data, String contentType) { String resultObjectId = null; GridFS gfs = getGridFS(dbKey); GridFSInputFile file = gfs.createFile(data); if (file != null) { file.setContentType(contentType); file.save(); resultObjectId = file.getId().toString(); } return resultObjectId; }
public GridFSInputFile build(IGridFSSession gridFS) throws Exception { GridFSInputFile _inFile = null; switch (__type) { case 1: // is File _inFile = gridFS.getGridFS().createFile((File) __targetObject); break; case 2: // is InputStream _inFile = gridFS.getGridFS().createFile((InputStream) __targetObject); break; case 3: // is Array _inFile = gridFS.getGridFS().createFile((byte[]) __targetObject); } if (_inFile != null) { _inFile.setFilename(__filename); _inFile.setContentType(__contentType); if (__chunkSize > 0) { _inFile.setChunkSize(__chunkSize); } if (!__attributes.isEmpty()) { for (Map.Entry<String, Object> _entry : __attributes.entrySet()) { _inFile.put(_entry.getKey(), _entry.getValue()); } } } return _inFile; }
private OutputStream getFileOutputStream(String id, String contentType) { if (mFS != null) { try { deleteFile(id); GridFSInputFile dbFile = mFS.createFile(id); if (contentType != null) { dbFile.setContentType(contentType); } return dbFile.getOutputStream(); } catch (Exception e) { e.printStackTrace(); } } return null; }
/** * Inserts generated tei using GridFS. */ public void insertTei(TEIFile tei, String date, String collection) { try { GridFS gfs = new GridFS(db, collection); gfs.remove(tei.getFileName()); GridFSInputFile gfsFile = gfs.createFile(new ByteArrayInputStream(tei.getTei().getBytes()), true); gfsFile.put("uploadDate", Utilities.parseStringDate(date)); gfsFile.setFilename(tei.getFileName()); gfsFile.put("repositoryDocId", tei.getRepositoryDocId()); if (collection.equals(MongoCollectionsInterface.METADATAS_TEIS)) { String anhalyticsID = generateAnhalyticsId(tei.getRepositoryDocId(), tei.getDoi(), (tei.getPdfdocument() != null) ? tei.getPdfdocument().getUrl() : null); gfsFile.put("anhalyticsId", anhalyticsID); if (tei.getPdfdocument() != null) { tei.getPdfdocument().setAnhalyticsId(anhalyticsID); } for (BinaryFile annex : tei.getAnnexes()) { annex.setAnhalyticsId(anhalyticsID); } } else { gfsFile.put("anhalyticsId", tei.getAnhalyticsId()); } gfsFile.put("source", tei.getSource()); gfsFile.put("version", tei.getRepositoryDocVersion()); gfsFile.put("documentType", tei.getDocumentType()); gfsFile.setContentType(tei.getFileType()); gfsFile.save(); } catch (ParseException e) { logger.error(e.getMessage(), e.getCause()); } }
/** * go through all the embedded content, store it to GridFS, replace the doc embeddings with a hyperlink to the saved content. */ protected void embedSteps(final DBObject feature, final GridFS gridFS, final Coordinates coordinates) { final BasicDBList elements = (BasicDBList) feature.get("elements"); final String featureId = (String) feature.get("_id"); if (elements != null) { for (int j = 0; j < elements.size(); j++) { final DBObject scenario = (DBObject) elements.get(j); final String scenarioId = (String) scenario.get("_id"); final BasicDBList steps = (BasicDBList) scenario.get("steps"); if (steps != null) { for (int k = 0; k < steps.size(); k++) { final DBObject step = (DBObject) steps.get(k); final BasicDBList embeddings = (BasicDBList) step.get("embeddings"); if (embeddings != null) { for (int l = 0; l < embeddings.size(); l++) { final DBObject embedding = (DBObject) embeddings.get(l); final GridFSInputFile image = gridFS .createFile(Base64.decodeBase64(((String) embedding.get("data")).getBytes())); image.setFilename(guid()); final BasicDBObject metadata = new BasicDBObject().append("product", coordinates.getProduct()) .append("major", coordinates.getMajor()).append("minor", coordinates.getMinor()) .append("servicePack", coordinates.getServicePack()).append("build", coordinates.getBuild()) .append("feature", featureId) .append("scenario", scenarioId); image.setMetaData(metadata); image.setContentType((String) embedding.get("mime_type")); image.save(); embeddings.put(l, image.getFilename()); } } } } } } }
/** * @param attachmentContentStream * @return */ @Override public AttachmentContentMetadata createAttachmentContent(String name, String contentType, InputStream attachmentContentStream) { // Do not specify a bucket (so the data will be stored in fs.files and fs.chunks) GridFSInputFile gfsFile = gridFS.createFile(attachmentContentStream); ObjectId id = new ObjectId(); gfsFile.setContentType(contentType); gfsFile.setId(id); String filename = id.toString(); gfsFile.setFilename(filename); gfsFile.save(); return new AttachmentContentMetadata(gfsFile.getFilename(), gfsFile.getLength()); }
private void saveToDataBase(Path file) throws IOException{ // create file GridFSInputFile in = model.createFile(file.toFile()); // set the file name in.setFilename(file.getFileName().toString()); // save in.save(); }
private void makeBloomFilter(ArrayList<String> chunk, int distributionID) { final String firstResource = chunk.get(0); final String lastResource = chunk.get(chunk.size() - 1); int chunkSize = chunk.size(); if (chunkSize < 5000) chunkSize = 5000; BloomFilterI filter = BloomFilterFactory.newBloomFilter(); filter.create(chunkSize, fpp); for (String resource : chunk) { filter.add(resource); } ByteArrayOutputStream out = new ByteArrayOutputStream(); try { filter.writeTo(out); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } GridFS gfs = new GridFS(DBSuperClass2.getDBInstance(), COLLECTION_NAME); GridFSInputFile gfsFile; try { gfsFile = gfs.createFile(new BufferedInputStream(new ByteArrayInputStream(out.toByteArray()))); gfsFile.put(FIRST_RESOURCE, firstResource); gfsFile.put(LAST_RESOURCE, lastResource); gfsFile.put(DISTRIBUTION_ID, distributionID); gfsFile.save(); } catch (Exception e) { System.out.println(firstResource); System.out.println(lastResource); System.out.println(distributionID); // TODO Auto-generated catch block e.printStackTrace(); } }
public String update(String gridFSId, InputStream in, String database) throws FileNotFoundException { // Récupération de l'ancien fichier et suppression final GridFSDBFile oldFile = gridFS(database).findOne(new ObjectId(gridFSId)); if (oldFile != null) { GridFSInputFile updatedFile = gridFS(database).createFile(in, oldFile.getFilename()); updatedFile.save(); gridFS(database).remove(oldFile); return updatedFile.getId().toString(); } else { throw new FileNotFoundException(gridFSId); } }
@Override public FileStorage.FileWriteBean createFileWriteBean(Id id, FileStorage.FileMetaBean meta) { meta.setLastModified(new Date()); meta.setCreated(new Date()); GridFS fs = new GridFS(MongoDBConnectionHelper.getConnection(id.getDatabase()), id.getCollection()); fs.remove(id.getEntity()); GridFSInputFile gfsFile = fs.createFile(id.getEntity()); gfsFile.setContentType(meta.getContentType()); gfsFile.setMetaData(MongoDBFormat.fromMap(meta.toMap())); GridFSFileWriteBean gridFSFileWriteBean = new GridFSFileWriteBean(id,gfsFile.getOutputStream(),meta); gridFSFileWriteBean.gfsFile = gfsFile; return gridFSFileWriteBean; }