private static Table<String, String, String> loadSHRMapping() { if (!USE_SHR_EXTENSIONS) { // don't bother creating the table unless we need it return null; } Table<String,String,String> mappingTable = HashBasedTable.create(); List<LinkedHashMap<String,String>> csvData; try { csvData = SimpleCSV.parse(Utilities.readResource("shr_mapping.csv")); } catch (IOException e) { e.printStackTrace(); return null; } for (LinkedHashMap<String,String> line : csvData) { String system = line.get("SYSTEM"); String code = line.get("CODE"); String url = line.get("URL"); mappingTable.put(system, code, url); } return mappingTable; }
public static Table<String, String, Float> parseMatrix(Reader reader) throws IOException { Table<String, String, Float> table = HashBasedTable.create(); try (ICsvListReader csvReader = new CsvListReader(reader, CsvPreference.STANDARD_PREFERENCE)) { List<String> columnHeaders = csvReader.read(); List<String> row; while ((row = csvReader.read()) != null) { String rowHeader = row.get(0); for (int i = 1; i < row.size(); i++) { String columnHeader = columnHeaders.get(i); String value = row.get(i); table.put(rowHeader, columnHeader, value == null ? Float.NaN : Float.parseFloat(value)); } } } return table; }
/** * 根据Profile分组,同名配置,根据Profile的优先级覆盖 * * @param configItemList * @param profileIdList * @return */ private List<BuildConfigItem> groupByProfile(List<BuildConfigItem> configItemList, List<Integer> profileIdList) { List<BuildConfigItem> filteredConfigItemList = new ArrayList<>(); Table<String, Integer, List<BuildConfigItem>> configItemTable = getConfigItemTable(configItemList); for (String itemName : configItemTable.rowKeySet()) { for (int profileId : profileIdList) { List<BuildConfigItem> itemList = configItemTable.get(itemName, profileId); if (itemList != null && !itemList.isEmpty()) { filteredConfigItemList.addAll(itemList); break; } } } return filteredConfigItemList; }
/** * 将配置项构造成一个二维表,[配置名称, Profile ID, 配置项] * * @param configItemList * @return */ private Table<String, Integer, List<BuildConfigItem>> getConfigItemTable(List<BuildConfigItem> configItemList) { Table<String, Integer, List<BuildConfigItem>> configItemTable = HashBasedTable.create(); List<BuildConfigItem> listByNameAndProfile = null; for (BuildConfigItem configItem : configItemList) { listByNameAndProfile = configItemTable.get(configItem.getConfigName(), configItem.getProfileId()); if (listByNameAndProfile == null) { listByNameAndProfile = new ArrayList<>(); configItemTable.put(configItem.getConfigName(), configItem.getProfileId(), listByNameAndProfile); } listByNameAndProfile.add(configItem); } return configItemTable; }
public void buildPropertyValueTable(Map<Map<IProperty, Comparable>, BlockState.StateImplementation> map) { if (this.propertyValueTable != null) { throw new IllegalStateException(); } else { Table<IProperty, Comparable, IBlockState> table = HashBasedTable.<IProperty, Comparable, IBlockState>create(); for (IProperty <? extends Comparable > iproperty : this.properties.keySet()) { for (Comparable comparable : iproperty.getAllowedValues()) { if (comparable != this.properties.get(iproperty)) { table.put(iproperty, comparable, map.get(this.getPropertiesWithValue(iproperty, comparable))); } } } this.propertyValueTable = ImmutableTable.<IProperty, Comparable, IBlockState>copyOf(table); } }
@Override public void storeExternalCommunicatorMessage(final ExternalCommunicatorMessage message) { final String[] split = message.getTransferId().split("-"); final Long piID = Long.valueOf(split[PI_ID_INDEX]); final Long mfId = Long.valueOf(split[MF_ID_INDEX]); final Long sId = Long.valueOf(split[S_ID_INDEX]); final ProcessInstance processInstance = processInstanceRepository.findOne(piID); final MessageFlow messageFlow = Optional.ofNullable(messageFlowRepository.findOne(mfId)) .orElseThrow(() -> new IllegalArgumentException( "Could not find message flow for MF_ID [" + mfId + "]")); final Set<BusinessObjectInstance> businessObjectInstances = getBusinessObjectInstances(processInstance, messageFlow.getBusinessObjectModels()); final Table<String, String, BusinessObjectField> records = convertToMap(message.getBusinessObjects()); businessObjectInstances.stream() .forEachOrdered(objectInstance -> storeValues(records, objectInstance)); final Subject subject = subjectRepository.findOne(sId); subject.getSubjectState().setToReceived(messageFlow); subjectRepository.save((SubjectImpl) subject); }
private ReliableTaildirEventReader getReader(Map<String, String> filePaths, Table<String, String, String> headerTable, boolean addByteOffset) { ReliableTaildirEventReader reader; try { reader = new ReliableTaildirEventReader.Builder() .filePaths(filePaths) .headerTable(headerTable) .positionFilePath(posFilePath) .skipToEnd(false) .addByteOffset(addByteOffset) .build(); reader.updateTailFiles(); } catch (IOException ioe) { throw Throwables.propagate(ioe); } return reader; }
/** * retrieve a rating matrix from the tensor. Warning: it assumes there is at most one entry for each (user, item) * pair. * * @return a sparse rating matrix */ public SparseMatrix rateMatrix() { Table<Integer, Integer, Double> dataTable = HashBasedTable.create(); Multimap<Integer, Integer> colMap = HashMultimap.create(); for (TensorEntry te : this) { int u = te.key(userDimension); int i = te.key(itemDimension); dataTable.put(u, i, te.get()); colMap.put(i, u); } return new SparseMatrix(dimensions[userDimension], dimensions[itemDimension], dataTable, colMap); }
public static Table<String, String, String> assignmentsToTable( SortedMap<String, SortedSet<SingleWorkerAssignment<Step2bGoldReasonAnnotator.SentenceLabel>>> assignments) { TreeBasedTable<String, String, String> result = TreeBasedTable.create(); assignments.forEach((unitID, singleWorkerAssignments) -> { singleWorkerAssignments.forEach(sentenceLabelSingleWorkerAssignment -> { String workerID = sentenceLabelSingleWorkerAssignment.getWorkerID(); String label = sentenceLabelSingleWorkerAssignment.getLabel().toString(); // update the table result.put(unitID, workerID, label); }); }); return result; }
MediaTypeClassifierImpl(Iterable<? extends MediaType> mts) { Table<String, String, Set<MediaType>> typeTable = HashBasedTable.<String, String, Set<MediaType>>create(); for (MediaType mt : mts) { String type = mt.type(); String subtype = mt.subtype(); Set<MediaType> typeSet = typeTable.get(type, subtype); if (typeSet == null) { typeSet = Sets.newLinkedHashSet(); typeTable.put(type, subtype, typeSet); } typeSet.add(mt); } ImmutableTable.Builder<String, String, ImmutableSet<MediaType>> b = ImmutableTable.builder(); for (Table.Cell<String, String, Set<MediaType>> cell : typeTable.cellSet()) { b.put(cell.getRowKey(), cell.getColumnKey(), ImmutableSet.copyOf(cell.getValue())); } this.types = b.build(); }
/** * Create a ReliableTaildirEventReader to watch the given directory. map<serverid.appid.logid, logpath> */ private ReliableTaildirEventReader(Map<String, LogPatternInfo> filePaths, Table<String, String, String> headerTable, String positionFilePath, boolean skipToEnd, boolean addByteOffset, String os) throws IOException { // Sanity checks Preconditions.checkNotNull(filePaths); Preconditions.checkNotNull(positionFilePath); // get operation system info if (logger.isDebugEnable()) { logger.debug(this, "Initializing {" + ReliableTaildirEventReader.class.getSimpleName() + "} with directory={" + filePaths + "}"); } // tailFile this.tailFileTable = CacheBuilder.newBuilder().expireAfterWrite(2, TimeUnit.DAYS) .<String, LogPatternInfo> build(); this.headerTable = headerTable; this.addByteOffset = addByteOffset; this.os = os; updatelog(filePaths); updateTailFiles(skipToEnd); logger.info(this, "tailFileTable: " + tailFileTable.toString()); logger.info(this, "headerTable: " + headerTable.toString()); logger.info(this, "Updating position from position file: " + positionFilePath); loadPositionFile(positionFilePath); }
/** * Create a ReliableTaildirEventReader to watch the given directory. map<serverid.appid.logid, logpath> */ private ReliableTaildirEventReader(Map<String, CollectTask> tasks, Table<String, String, String> headerTable, boolean skipToEnd, boolean addByteOffset) throws IOException { Map<String, LogPatternInfo> filePaths = getFilePaths(tasks); // Sanity checks Preconditions.checkNotNull(filePaths); // get operation system info if (log.isDebugEnable()) { log.debug(this, "Initializing {" + ReliableTaildirEventReader.class.getSimpleName() + "} with directory={" + filePaths + "}"); } // tailFile this.tailFileTable = CacheBuilder.newBuilder().expireAfterWrite(2, TimeUnit.DAYS) .<String, LogPatternInfo> build(); this.headerTable = headerTable; this.addByteOffset = addByteOffset; this.os = JVMToolHelper.isWindows() ? OS_WINDOWS : null; updatelog(filePaths); updateTailFiles(skipToEnd); log.info(this, "tailFileTable: " + tailFileTable.toString()); log.info(this, "headerTable: " + headerTable.toString()); }
public void buildPropertyValueTable(Map < Map < IProperty<?>, Comparable<? >> , BlockStateContainer.StateImplementation > map) { if (this.propertyValueTable != null) { throw new IllegalStateException(); } else { Table < IProperty<?>, Comparable<?>, IBlockState > table = HashBasedTable. < IProperty<?>, Comparable<?>, IBlockState > create(); for (Entry < IProperty<?>, Comparable<? >> entry : this.properties.entrySet()) { IProperty<?> iproperty = (IProperty)entry.getKey(); for (Comparable<?> comparable : iproperty.getAllowedValues()) { if (comparable != entry.getValue()) { table.put(iproperty, comparable, map.get(this.getPropertiesWithValue(iproperty, comparable))); } } } this.propertyValueTable = ImmutableTable. < IProperty<?>, Comparable<?>, IBlockState > copyOf(table); } }
public static void main(String[] args) { TreeBasedTable<Integer, Integer, Integer> table = TreeBasedTable.create(); table.put(2, 0, 6); table.put(3, 2, 4); table.put(0, 0, 5); table.put(0, 3, 2); table.put(4, 1, 2); table.put(4, 4, 9); CSRSparseMatrix csr = new CSRSparseMatrix(table, 5); for (Table.Cell<Integer, Integer, Integer> cell : table.cellSet()) { if (csr.get(cell.getRowKey(), cell.getColumnKey()) == cell.getValue()) { System.out.println(String.format("%d->%d = %d", cell.getRowKey(), cell.getColumnKey(), cell.getValue())); } else { System.out.println("ERROR"); } } }
public static void main(String[] args) { Table<String, String, String> table = TreeBasedTable.create(); table.put("Row1", "Column1", "Data1"); table.put("Row1", "Column2", "Data2"); table.put("Row2", "Column1", "Data3"); table.put("Row2", "Column2", "Data4"); table.put("Row3", "Column1", "Data5"); table.put("Row3", "Column2", "Data6"); table.put("Row3", "Column3", "Data7"); Joiner.MapJoiner mapJoiner = Joiner.on(',').withKeyValueSeparator("="); //Let's a Guava Joiner to illustrate that table.rowKeySet().forEach(r -> { System.out.println(r + "->" + mapJoiner.join(table.row(r))); }); }
@Override protected void fillTable(int num, String[] inputs, Table<EspressoTerm, String, EspressoValue> table) { for(State state : stategraph.getStates()) { BitSet x = state.getBinaryRepresentationNormalised(stategraph.getAllSignals()); EspressoTerm t = new EspressoTerm(BitSetHelper.formatBitset(x, num), inputs); for(Signal sig : stategraph.getAllSignals()) { if(sig.getType() == SignalType.output || sig.getType() == SignalType.internal) { switch(state.getStateValues().get(sig)) { case high: case rising: table.put(t, sig.getName(), EspressoValue.one); break; case falling: case low: table.put(t, sig.getName(), EspressoValue.zero); break; } } } } }
protected EspressoTable createEspressoTable() { int num = stategraph.getAllSignals().size(); if(resetname != null) { num++; } int i = 0; String[] inputs = new String[num]; if(resetname != null) { inputs[i++] = resetname; } for(Signal sig : stategraph.getAllSignals()) { inputs[i++] = sig.getName(); } Table<EspressoTerm, String, EspressoValue> table = HashBasedTable.create(); fillTable(num, inputs, table); return new EspressoTable(inputs, table); }
private Table<Integer, Material, Integer> loadChunkMaterial() throws SQLException { Table<Integer, Material, Integer> target = HashBasedTable.create(); ResultSet resultSet = selectChunkMaterial.executeQuery(); while (resultSet.next()) { int id = resultSet.getInt("id"); int chunkId = resultSet.getInt("chunk_id"); int materialId = resultSet.getInt("material_id"); int count = resultSet.getInt("count"); identityCache.setChunkMaterialId(chunkId, materialId, id); identityCache.getMaterial(materialId).ifPresent(material -> target.put(chunkId, material, count)); } resultSet.close(); return target; }
private Table<Integer, WorthType, Double> loadChunkWorth() throws SQLException { Table<Integer, WorthType, Double> target = HashBasedTable.create(); ResultSet resultSet = selectChunkWorth.executeQuery(); while (resultSet.next()) { int id = resultSet.getInt("id"); int chunkId = resultSet.getInt("chunk_id"); int worthId = resultSet.getInt("worth_id"); double worth = resultSet.getDouble("worth"); identityCache.setChunkWorthId(chunkId, worthId, id); identityCache.getWorthType(worthId).ifPresent(worthType -> target.put(chunkId, worthType, worth)); } resultSet.close(); return target; }
private Table<String, ResourceType, Response<Map<String, String>>> getAllFromRedis(Set<String> userIds) { if (userIds.size() == 0) { return HashBasedTable.create(); } try (Jedis jedis = jedisSource.getJedis()) { Table<String, ResourceType, Response<Map<String, String>>> responseTable = ArrayTable.create(userIds, new ArrayIterator<>(ResourceType.values())); Pipeline p = jedis.pipelined(); for (String userId : userIds) { for (ResourceType r : ResourceType.values()) { responseTable.put(userId, r, p.hgetAll(userKey(userId, r))); } } p.sync(); return responseTable; } catch (Exception e) { log.error("Storage exception reading all entries.", e); } return null; }
/** * Renews the {@link SecureStore} for all the running applications. * * @param liveApps set of running applications that need to have secure store renewal * @param renewer the {@link SecureStoreRenewer} for renewal * @param mergeCredentials {@code true} to merge with existing credentials * @return a {@link Multimap} containing the application runs that were failed to have secure store renewed */ private Multimap<String, RunId> renewSecureStore(Table<String, RunId, YarnTwillController> liveApps, SecureStoreRenewer renewer, boolean mergeCredentials) { Multimap<String, RunId> failureRenews = HashMultimap.create(); // Renew the secure store for each running application for (Table.Cell<String, RunId, YarnTwillController> liveApp : liveApps.cellSet()) { String application = liveApp.getRowKey(); RunId runId = liveApp.getColumnKey(); YarnTwillController controller = liveApp.getValue(); try { renewer.renew(application, runId, new YarnSecureStoreWriter(application, runId, controller, mergeCredentials)); } catch (Exception e) { LOG.warn("Failed to renew secure store for {}:{}", application, runId, e); failureRenews.put(application, runId); } } return failureRenews; }
public RowObjectCreator(BeanInstantiator<T> instantiator, List<ExcelBeanField> beanFields, boolean cellDataMapAttachable, Sheet sheet, Table<Integer, Integer, ImageData> imageDataTable, DataFormatter cellFormatter, int rowNum) { this.beanFields = beanFields; this.cellDataMapAttachable = cellDataMapAttachable; if (cellDataMapAttachable) cellDataMap = Maps.newHashMap(); else cellDataMap = null; this.sheet = sheet; this.imageDataTable = imageDataTable; this.cellFormatter = cellFormatter; this.row = sheet.getRow(rowNum); this.object = this.row == null ? null : (T) instantiator.newInstance(); }
private static Table<Integer, Integer, ImageData> readAllCellImages(XSSFDrawing drawing, Sheet sheet) { val images = HashBasedTable.<Integer, Integer, ImageData>create(); for (val shape : drawing.getShapes()) { if (!(shape instanceof XSSFPicture)) continue; val picture = (XSSFPicture) shape; val imageData = createImageData(picture.getPictureData()); val axisRow = computeAxisRowIndex(sheet, picture); val axisCol = computeAxisColIndex(sheet, picture); images.put(axisRow, axisCol, imageData); } return images; }
private AppStats getAppStats(long appId, long collectTime, Table<RedisConstant, String, Long> table, Map<RedisConstant, Map<String, Object>> infoMap) { AppStats appStats = new AppStats(); appStats.setAppId(appId); appStats.setCollectTime(collectTime); appStats.setModifyTime(new Date()); appStats.setUsedMemory(MapUtils.getLong(infoMap.get(RedisConstant.Memory), RedisInfoEnum.used_memory.getValue(), 0L)); appStats.setHits(MapUtils.getLong(table.row(RedisConstant.Stats), RedisInfoEnum.keyspace_hits.getValue(), 0L)); appStats.setMisses(MapUtils.getLong(table.row(RedisConstant.Stats), RedisInfoEnum.keyspace_misses.getValue(), 0L)); appStats.setEvictedKeys(MapUtils.getLong(table.row(RedisConstant.Stats), RedisInfoEnum.evicted_keys.getValue(), 0L)); appStats.setExpiredKeys(MapUtils.getLong(table.row(RedisConstant.Stats), RedisInfoEnum.expired_keys.getValue(), 0L)); appStats.setNetInputByte(MapUtils.getLong(table.row(RedisConstant.Stats), RedisInfoEnum.total_net_input_bytes.getValue(), 0L)); appStats.setNetOutputByte(MapUtils.getLong(table.row(RedisConstant.Stats), RedisInfoEnum.total_net_output_bytes.getValue(), 0L)); appStats.setConnectedClients(MapUtils.getIntValue(infoMap.get(RedisConstant.Clients), RedisInfoEnum.connected_clients.getValue(), 0)); appStats.setObjectSize(getObjectSize(infoMap)); return appStats; }
private List<AppCommandStats> getCommandStatsList(long appId, long collectTime, Table<RedisConstant, String, Long> table) { Map<String, Long> commandMap = table.row(RedisConstant.Commandstats); List<AppCommandStats> list = new ArrayList<AppCommandStats>(); if (commandMap == null) { return list; } for (String key : commandMap.keySet()) { String commandName = key.replace("cmdstat_", ""); long callCount = MapUtils.getLong(commandMap, key, 0L); if (callCount == 0L) { continue; } AppCommandStats commandStats = new AppCommandStats(); commandStats.setAppId(appId); commandStats.setCollectTime(collectTime); commandStats.setCommandName(commandName); commandStats.setCommandCount(callCount); commandStats.setModifyTime(new Date()); list.add(commandStats); } return list; }
@Override protected boolean hasValidValueId(final FhAttributeData attribute, final List<Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); for (final Optional<String> optional : values.rowKeySet()) { final String key = optional == null || !optional.isPresent() ? "_absent_" : optional.get(); if (!key.matches(VALUE_ID_PATTERN)) { rejectValue(attribute, violations, "The \"set\" attribute valueId key \"" + key + "\" does not match the appropriate pattern."); return false; } } return true; }
@Override protected void validateValue(final FhAttributeData attribute, final List<com.fredhopper.core.connector.index.report.Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); for (final Optional<String> valueId : values.rowKeySet()) { final Map<Optional<Locale>, String> valueMap = values.row(valueId); if (CollectionUtils.isEmpty(valueMap) || valueMap.containsKey(Optional.empty()) || valueMap.containsKey(null)) { rejectValue(attribute, violations, "The \"set\" attribute Locale key must be set."); return; } if (valueMap.containsValue(null) || valueMap.containsValue("")) { rejectValue(attribute, violations, "The \"set\" attribute value must not be blank."); return; } } }
@Override protected void validateValue(final FhAttributeData attribute, final List<Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); final Map<Optional<Locale>, String> valueMap = values.row(Optional.empty()); if (CollectionUtils.isEmpty(valueMap) || valueMap.entrySet().size() != 1 || !valueMap.containsKey(Optional.empty())) { rejectValue(attribute, violations, "The \"text\" attribute value cannot be localized."); return; } final String value = valueMap.get(Optional.empty()); if (StringUtils.isBlank(value)) { rejectValue(attribute, violations, "The \"text\" attribute value must not be blank."); } }
@Override protected boolean isValidSize(final FhAttributeData attribute, final List<Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); if (values.isEmpty()) { rejectValue(attribute, violations, "The \"list64\" attribute \"" + attribute.getAttributeId() + "\" has no values assigned."); return false; } if (values.rowKeySet().size() > MAX_SIZE) { rejectValue(attribute, violations, "The \"list64\" attribute \"" + attribute.getAttributeId() + "\" values total must not exceed 64."); return false; } return true; }
@Override protected boolean isValidSize(final FhAttributeData attribute, final List<Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); if (values.isEmpty()) { rejectValue(attribute, violations, "The \"set64\" attribute \"" + attribute.getAttributeId() + "\" has no values assigned."); return false; } if (values.rowKeySet().size() > MAX_SIZE) { rejectValue(attribute, violations, "The \"set64\" attribute \"" + attribute.getAttributeId() + "\" values total must not exceed 64."); return false; } return true; }
@Override protected void validateValue(final FhAttributeData attribute, final List<Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); final Map<Optional<Locale>, String> valueMap = values.row(Optional.empty()); if (CollectionUtils.isEmpty(valueMap) || valueMap.containsKey(Optional.empty()) || valueMap.containsKey(null)) { rejectValue(attribute, violations, "The \"asset\" value's Locale key must be set."); return; } for (final Entry<Optional<Locale>, String> entry : valueMap.entrySet()) { if (StringUtils.isBlank(entry.getValue())) { rejectValue(attribute, violations, "The \"asset\" value must not be blank."); } } }
@Override protected void validateValue(final FhAttributeData attribute, final List<Violation> violations) { final Table<Optional<String>, Optional<Locale>, String> values = attribute.getValues(); final Map<Optional<Locale>, String> valueMap = values.row(Optional.empty()); if (CollectionUtils.isEmpty(valueMap) || valueMap.entrySet().size() != 1 || !valueMap.containsKey(Optional.empty())) { rejectValue(attribute, violations, "The \"int\" attribute value cannot be localized."); return; } final String value = valueMap.get(Optional.empty()); try { if (StringUtils.isBlank(value) || !(Integer.parseInt(value) > 0 && Double.valueOf(Integer.MAX_VALUE).compareTo(Double.valueOf(value)) > 0)) { rejectValue(attribute, violations, "The \"int\" attribute value is not in the supported value range."); } } catch (final NumberFormatException ex) { rejectValue(attribute, violations, "The \"int\" attribute value does not have the appropriate format."); } }
@Override public Set<Pair<Unit, Abstraction>> endSummary(SootMethod m, Abstraction d3) { Set<Pair<Unit, Abstraction>> res = null; for (Unit sP : icfg.getStartPointsOf(m)) { Set<Table.Cell<Unit,Abstraction,EdgeFunction<IFDSSolver.BinaryDomain>>> endSum = super.endSummary(sP, d3); if (endSum == null || endSum.isEmpty()) continue; if (res == null) res = new HashSet<>(); for (Table.Cell<Unit,Abstraction,EdgeFunction<IFDSSolver.BinaryDomain>> cell : endSum) res.add(new Pair<>(cell.getRowKey(), cell.getColumnKey())); } return res; }
/** * Combine junctions from both strands. Used for Sashimi plot. * Note: Flanking depth arrays are not combined. */ private List<SpliceJunctionFeature> combineStrandJunctionsMaps() { // Start with all + junctions Table<Integer, Integer, SpliceJunctionFeature> combinedStartEndJunctionsMap = HashBasedTable.create(posStartEndJunctionsMap); // Merge in - junctions for (Table.Cell<Integer, Integer, SpliceJunctionFeature> negJunctionCell : negStartEndJunctionsMap.cellSet()) { int junctionStart = negJunctionCell.getRowKey(); int junctionEnd = negJunctionCell.getColumnKey(); SpliceJunctionFeature negFeat = negJunctionCell.getValue(); SpliceJunctionFeature junction = combinedStartEndJunctionsMap.get(junctionStart, junctionEnd); if (junction == null) { // No existing (+) junction here, just add the (-) one\ combinedStartEndJunctionsMap.put(junctionStart, junctionEnd, negFeat); } else { int newJunctionDepth = junction.getJunctionDepth() + negFeat.getJunctionDepth(); junction.setJunctionDepth(newJunctionDepth); } } return new ArrayList<SpliceJunctionFeature>(combinedStartEndJunctionsMap.values()); }
static String getXPath(Table<ACTIONS, LinkedList<ATTRIBUTES>, LinkedList<XPathValues>> xPathTable) { String xPath = ""; for (Table.Cell<ACTIONS, LinkedList<ATTRIBUTES>, LinkedList<XPathValues>> tableCell : xPathTable.cellSet()) { if (tableCell.getColumnKey() == null) Assert.assertTrue("attributesList is null", false); if (tableCell.getValue() == null) Assert.assertTrue("listOfListValues is null", false); for (ATTRIBUTES attribute : tableCell.getColumnKey()) { for (XPathValues values : tableCell.getValue()) { xPath = xPath + XPathBuilder.getXPath(tableCell.getRowKey(), attribute, values); } } } return xPath; }
/** * Construct a sparse matrix with CRS structures (CCS structure optional). * * @deprecated I don't recommend to use this method as it (takes time and) * is better to constructe the column structure at the time when * you construct the row structure (of data table). This method * is put here (as an example) to show how to construct column * structure according to the data table. */ public SparseMatrix(int rows, int cols, Table<Integer, Integer, Float> dataTable, boolean isCCSUsed) { numRows = rows; numColumns = cols; Multimap<Integer, Integer> colMap = null; if (isCCSUsed) { colMap = HashMultimap.create(); for (Cell<Integer, Integer, Float> cell : dataTable.cellSet()) colMap.put(cell.getColumnKey(), cell.getRowKey()); } construct(dataTable, colMap); }
/** * Catalog all concepts from the given module into the given Table. * * @param concepts Table of concepts to add to * @param module Module to parse for concepts and codes */ public static void inventoryModule(Table<String, String, String> concepts, JsonObject module) { JsonObject states = module.get("states").getAsJsonObject(); for (Entry<String, JsonElement> entry : states.entrySet()) { JsonObject state = entry.getValue().getAsJsonObject(); inventoryState(concepts, state); } }