private Map<String, Object> getEurekaStatus() { Map<String, Object> stats = new HashMap<>(); stats.put("time", new Date()); stats.put("currentTime", StatusResource.getCurrentTimeAsString()); stats.put("upTime", StatusInfo.getUpTime()); stats.put("environment", ConfigurationManager.getDeploymentContext() .getDeploymentEnvironment()); stats.put("datacenter", ConfigurationManager.getDeploymentContext() .getDeploymentDatacenter()); PeerAwareInstanceRegistry registry = getRegistry(); stats.put("isBelowRenewThreshold", registry.isBelowRenewThresold() == 1); populateInstanceInfo(stats); return stats; }
private static <T extends Entity> boolean isEntityTypeValid(ICommandSender commandSender, Map<String, String> params) { String s = func_179651_b(params, "type"); s = s != null && s.startsWith("!") ? s.substring(1) : s; if (s != null && !EntityList.isStringValidEntityName(s)) { ChatComponentTranslation chatcomponenttranslation = new ChatComponentTranslation("commands.generic.entity.invalidType", new Object[] {s}); chatcomponenttranslation.getChatStyle().setColor(EnumChatFormatting.RED); commandSender.addChatMessage(chatcomponenttranslation); return false; } else { return true; } }
public Map<K, V> unmarshall(JsonUnmarshallerContext context) throws Exception { Map<K, V> map = new HashMap<K, V>(); int originalDepth = context.getCurrentDepth(); if (context.getCurrentToken() == JsonToken.VALUE_NULL) { return null; } while (true) { JsonToken token = context.nextToken(); if (token == null) return map; if (token == FIELD_NAME) { K k = keyUnmarshaller.unmarshall(context); token = context.nextToken(); V v = valueUnmarshaller.unmarshall(context); map.put(k, v); } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getCurrentDepth() <= originalDepth) return map; } } }
public static void logQueueStatus() { for (Map.Entry<String, AbstractNulsQueue> entry : QUEUES_MAP.entrySet()) { try { AbstractNulsQueue queue = entry.getValue(); long nowIn = queue.getStatInfo().getInCount().get(); long nowOut = queue.getStatInfo().getOutCount().get(); long latelyInTps = (nowIn - queue.getStatInfo().getLastInCount()) / queue.getStatInfo().getLatelySecond(); long latelyOutTps = (nowOut - queue.getStatInfo().getLastOutCount()) / queue.getStatInfo().getLatelySecond(); queue.getStatInfo().setLatelyInTps(latelyInTps); queue.getStatInfo().setLatelyOutTps(latelyOutTps); queue.getStatInfo().setLastInCount(nowIn); queue.getStatInfo().setLastOutCount(nowOut); Log.info(queue.getStatInfo().toString()); } catch (Exception e) { } } }
private String createProxy ( final ValueArchive archive, final List<String> sources ) { final Map<String, String> data = new HashMap<String, String> (); final String id = Names.makeName ( archive ) + ".proxy"; for ( final String source : sources ) { if ( source.contains ( "," ) ) { throw new IllegalArgumentException ( String.format ( "Item name '%s' is invalid. Proxy sources must not contain comma (,) in their name.", source ) ); } } Collections.sort ( sources ); // make output reproducible data.put ( "sources", StringHelper.join ( sources, ", " ) ); addData ( Factories.FACTORY_DA_PROXY_SOURCE, id, data ); return id; }
@Override protected void render(Block html) { set(TITLE, join("Angel WorkerCounterBlock", $(WORKER_ATTEMPT_ID))); try { WorkerAttemptId workerAttemptId = new WorkerAttemptId($(WORKER_ATTEMPT_ID)); Map<String, String> metricsMap = amContext.getWorkerManager().getWorker(workerAttemptId.getWorkerId()) .getWorkerAttempt(workerAttemptId).getMetrics(); TABLE<Hamlet> worker_metrics_table = html.table(); html.h6($(WORKER_ATTEMPT_ID)); worker_metrics_table.tr().th(_TH, "NAME").th(_TH, "VALUE")._(); for (String key : metricsMap.keySet()) { String value = metricsMap.get(key); worker_metrics_table.tr().td(String.valueOf(key)).td(value)._(); } worker_metrics_table._(); } catch (UnvalidIdStrException e) { LOG.error("unvalid id string, ", e); } }
@Test public void testGetHT_SINE() { try { Map<LocalDate, HtSineIndicator> indicatorMap = technicalIndicatorOperation.getHT_SINE("VOD.L", TimeInterval.DAILY, SeriesType.high); assertEquals(4456, indicatorMap.size()); } catch (InvalidApiKeyException ex1) { ex1.printStackTrace(); fail("Invalid Api Key Exception"); } catch (InvalidFunctionOptionException ex2) { ex2.printStackTrace(); fail("Required Function Properties Missing or is Invalid in Exception"); } catch (MalFormattedFunctionException ex3) { ex3.printStackTrace(); fail("Invalid Function Exception"); } catch (MissingApiKeyException ex4) { ex4.printStackTrace(); fail("Missing Api Key Exception"); } catch (UltraHighFrequencyRequestException ex5) { ex5.printStackTrace(); fail("Ultra High Frequency Request Exception"); } }
private File createModule(String cnb, File cluster, String... attr) throws IOException { String dashes = cnb.replace('.', '-'); File tmp = new File(new File(cluster, "modules"), dashes + ".jar"); Map<String,String> attribs = new HashMap<String, String>(); for (int i = 0; i < attr.length; i += 2) { attribs.put(attr[i], attr[i + 1]); } Map<String,String> files = new HashMap<String, String>(); files.put("fake/" + cnb, cnb); tmp.getParentFile().mkdirs(); SetupHid.createJar(tmp, files, attribs); return tmp; }
public static ModbusMaster create ( final BundleContext context, final ScheduledExecutorService executor, final String id, final NioProcessor processor, final Map<String, String> parameters ) throws Exception { final ModbusMaster device = new ModbusMaster ( context, id, executor, processor, "ModbusMaster", "modbus" ); try { device.configure ( parameters ); } catch ( final Exception e ) { // dispose what was already created device.dispose (); throw e; } return device; }
/** 查询出sql对应的field类型 */ public static Field getClassFieldByClassName(String name, String sqlField) { sqlField = sqlField.replaceAll("_", ""); if (modelSqlFieldMap.get(StringUtils.lowerCase(name)) != null) { } else { if (modelFieldMap.get(StringUtils.lowerCase(name)) != null) { List<Field> list = modelFieldMap.get(StringUtils.lowerCase(name)); Map<String, Field> map = new HashMap<>(); for (Field field : list) { map.put(StringUtils.lowerCase(field.getName()), field); } modelSqlFieldMap.put(StringUtils.lowerCase(name), map); } else { return null; } } return modelSqlFieldMap.get(StringUtils.lowerCase(name)).get(StringUtils.lowerCase(sqlField)); }
private Collection<Map<String, Object>> loadData( Map<String, Object> parameter, String entityQuerySql,Criteria criteria) { Map<String,Object> entityQueryParameters=(Map<String,Object>)parameter.get(Constants.ENTITY_QUERY_PARAMETERS); Map<String, Object> queryParam = retriveQueryParameters(parameter,entityQueryParameters); String querySql=parseQuerySql(entityQuerySql,parameter); ParseResult result=this.parseCriteria(criteria,true,"x"); if(result!=null){ querySql="select * from ("+querySql+") x where "+result.getAssemblySql().toString(); if(queryParam==null){ queryParam=new HashMap<String,Object>(); } queryParam.putAll(result.getValueMap()); } if(queryParam==null){ return this.getJdbcTemplate().queryForList(querySql); }else{ return this.getNamedParameterJdbcTemplate().queryForList(querySql, queryParam); } }
private Stream<String> writeTimer(Timer timer, Map<String, DatadogMetricMetadata> metadata) { final long wallTime = clock.wallTime(); final HistogramSnapshot snapshot = timer.takeSnapshot(false); final Stream.Builder<String> metrics = Stream.builder(); Meter.Id id = timer.getId(); metrics.add(writeMetric(id, "sum", wallTime, snapshot.total(getBaseTimeUnit()))); metrics.add(writeMetric(id, "count", wallTime, snapshot.count())); metrics.add(writeMetric(id, "avg", wallTime, snapshot.mean(getBaseTimeUnit()))); metrics.add(writeMetric(id, "max", wallTime, snapshot.max(getBaseTimeUnit()))); addToMetadataList(metadata, id, "sum", Statistic.TotalTime, null); addToMetadataList(metadata, id, "count", Statistic.Count, "occurrence"); addToMetadataList(metadata, id, "avg", Statistic.Value, null); addToMetadataList(metadata, id, "max", Statistic.Max, null); for (ValueAtPercentile v : snapshot.percentileValues()) { String suffix = DoubleFormat.toString(v.percentile() * 100) + "percentile"; metrics.add(writeMetric(id, suffix, wallTime, v.value(getBaseTimeUnit()))); addToMetadataList(metadata, id, suffix, Statistic.Value, null); } return metrics.build(); }
@Override public BatchBuilder initiateService(Map configurationValues, ServiceRegistryImplementor registry) { final Object builder = configurationValues.get( BUILDER ); if ( builder == null ) { return new BatchBuilderImpl( ConfigurationHelper.getInt( Environment.STATEMENT_BATCH_SIZE, configurationValues, 1 ) ); } if ( BatchBuilder.class.isInstance( builder ) ) { return (BatchBuilder) builder; } final String builderClassName = builder.toString(); try { return (BatchBuilder) registry.getService( ClassLoaderService.class ).classForName( builderClassName ).newInstance(); } catch (Exception e) { throw new ServiceException( "Could not build explicit BatchBuilder [" + builderClassName + "]", e ); } }
/** * @see com.it.br.gameserver.network.clientpackets.ClientBasePacket#runImpl() */ @Override protected void runImpl() { L2PcInstance activeChar = getClient().getActiveChar(); if(activeChar == null) return; if (_bossId != 0) { _log.info("C5: RequestGetBossRecord: d: "+_bossId+" ActiveChar: "+activeChar); // should be always 0, log it if isnt 0 for furture research } RaidBossPointsManager.getInstance(); int points = RaidBossPointsManager.getPointsByOwnerId(activeChar.getObjectId()); RaidBossPointsManager.getInstance(); int ranking = RaidBossPointsManager.calculateRanking(activeChar.getObjectId()); RaidBossPointsManager.getInstance(); Map<Integer, Integer> list = RaidBossPointsManager.getList(activeChar); // trigger packet activeChar.sendPacket(new ExGetBossRecord(ranking, points, list)); }
public void testEntrySetSetValue() { // TODO: Investigate the extent to which, in practice, maps that support // put() also support Entry.setValue(). if (!supportsPut) { return; } final Map<K, V> map; final V valueToSet; try { map = makePopulatedMap(); valueToSet = getValueNotInPopulatedMap(); } catch (UnsupportedOperationException e) { return; } Set<Entry<K, V>> entrySet = map.entrySet(); Entry<K, V> entry = entrySet.iterator().next(); final V oldValue = entry.getValue(); final V returnedValue = entry.setValue(valueToSet); assertEquals(oldValue, returnedValue); assertTrue(entrySet.contains(mapEntry(entry.getKey(), valueToSet))); assertEquals(valueToSet, map.get(entry.getKey())); assertInvariants(map); }
/** * 回滚整个事务组 * * @param txGroupId 事务组id */ @Override public void rollBack(String txGroupId) { try { txManagerService.updateTxTransactionItemStatus(txGroupId, txGroupId, TransactionStatusEnum.ROLLBACK.getCode(),null); final List<TxTransactionItem> txTransactionItems = txManagerService.listByTxGroupId(txGroupId); if (CollectionUtils.isNotEmpty(txTransactionItems)) { final Map<Boolean, List<TxTransactionItem>> listMap = filterData(txTransactionItems); if (Objects.isNull(listMap)) { LogUtil.info(LOGGER, "事务组id:{},提交失败!数据不完整", () -> txGroupId); return; } final List<TxTransactionItem> currentItem = listMap.get(Boolean.TRUE); final List<TxTransactionItem> elseItems = listMap.get(Boolean.FALSE); doRollBack(txGroupId, currentItem, elseItems); } } finally { //txManagerService.removeRedisByTxGroupId(txGroupId); } }
public static LoggerConfig getOrCreateLoggerConfig(String name) { LoggerContext context = (LoggerContext) LogManager.getContext(false); Configuration config = context.getConfiguration(); LoggerConfig logConfig = config.getLoggerConfig(name); boolean update = false; if (!logConfig.getName().equals(name)) { List<AppenderRef> appenderRefs = logConfig.getAppenderRefs(); Map<Property, Boolean> properties = logConfig.getProperties(); Set<Property> props = properties == null ? null : properties.keySet(); logConfig = LoggerConfig.createLogger(String.valueOf(logConfig.isAdditive()), logConfig.getLevel(), name, String.valueOf(logConfig.isIncludeLocation()), appenderRefs == null ? null : appenderRefs.toArray(new AppenderRef[appenderRefs.size()]), props == null ? null : props.toArray(new Property[props.size()]), config, null); config.addLogger(name, logConfig); update = true; } if (update) { context.updateLoggers(); } return logConfig; }
private static NodeWrapper getWrapper(Map<NodeType,NodeWrapper> nodes, NodeType node) { if (nodes.containsKey(node)) { return nodes.get(node); } NodeWrapper nw = new NodeWrapper(node); nodes.put(node, nw); return nw; }
public void testHashCodeForEmptyMap() { final Map<K, V> map; try { map = makeEmptyMap(); } catch (UnsupportedOperationException e) { return; } assertInvariants(map); }
@RequestMapping("sendmail-template-list") public String list(@ModelAttribute Page page, @RequestParam Map<String, Object> parameterMap, Model model) { String tenantId = tenantHolder.getTenantId(); List<PropertyFilter> propertyFilters = PropertyFilter .buildFromMap(parameterMap); propertyFilters.add(new PropertyFilter("EQS_tenantId", tenantId)); page = sendmailTemplateManager.pagedQuery(page, propertyFilters); model.addAttribute("page", page); return "sendmail/sendmail-template-list"; }
public Map<Set<TItem>, Set<TItem>> calculateAssociationRulesForColumnPositions(Map<Set<TItem>, Integer> itemSets) { // create association rules final Map<Set<TItem>, Set<TItem>> rules = new HashMap<>(); // iterate over all frequent item sets for(Set<TItem> itemset : itemSets.keySet()) { if(itemset.size()>1) { // move each item from the condition to the consequent, step by step for(TItem item : itemset) { Set<TItem> condition = new HashSet<>(itemset); condition.remove(item); Set<TItem> consequent = new HashSet<>(); consequent.add(item); // double confidence = (double)itemSets.get(itemset) / (double)itemSets.get(condition); // if(confidence==1.0) { rules.put(condition, consequent); // } } } } return rules; }
/** * Finds node with the minimum/maximum devices from a list of nodes. * * @param min true: minimum, false: maximum * @param controllerDevices controller nodes to devices map * @return controller node with minimum/maximum devices */ private ControllerNode findBucket(boolean min, Map<ControllerNode, Set<DeviceId>> controllerDevices) { int xSize = min ? Integer.MAX_VALUE : -1; ControllerNode xNode = null; for (ControllerNode node : controllerDevices.keySet()) { int size = controllerDevices.get(node).size(); if ((min && size < xSize) || (!min && size > xSize)) { xSize = size; xNode = node; } } return xNode; }
public Map<String, String> getTextParams() { AlipayHashMap txtParams = new AlipayHashMap(); txtParams.put("biz_content", this.bizContent); if(udfParams != null) { txtParams.putAll(this.udfParams); } return txtParams; }
public String doAliPayReq(String channelId, JSONObject payOrder, String resKey) { Map<String,Object> paramMap = new HashMap<>(); paramMap.put("payOrder", payOrder); String jsonParam = RpcUtil.createBaseParam(paramMap); Map<String, Object> result; switch (channelId) { case PayConstant.PAY_CHANNEL_ALIPAY_MOBILE : result = rpcCommonService.rpcPayChannel4AliService.doAliPayMobileReq(jsonParam); break; case PayConstant.PAY_CHANNEL_ALIPAY_PC : result = rpcCommonService.rpcPayChannel4AliService.doAliPayPcReq(jsonParam); break; case PayConstant.PAY_CHANNEL_ALIPAY_WAP : result = rpcCommonService.rpcPayChannel4AliService.doAliPayWapReq(jsonParam); break; case PayConstant.PAY_CHANNEL_ALIPAY_QR : result = rpcCommonService.rpcPayChannel4AliService.doAliPayQrReq(jsonParam); break; default: result = null; break; } String s = RpcUtil.mkRet(result); if(s == null) { return XXPayUtil.makeRetData(XXPayUtil.makeRetMap(PayConstant.RETURN_VALUE_SUCCESS, "", PayConstant.RETURN_VALUE_FAIL, "0111", "调用支付宝支付失败"), resKey); } Map<String, Object> map = XXPayUtil.makeRetMap(PayConstant.RETURN_VALUE_SUCCESS, "", PayConstant.RETURN_VALUE_SUCCESS, null); map.putAll((Map) result.get("bizResult")); return XXPayUtil.makeRetData(map, resKey); }
protected DeviceMediaRendererManual( DeviceManagerImpl _manager, Map _map ) throws IOException { super(_manager, _map ); }
@RequestMapping(value = "/now") @ResponseBody public Map<String,String> getTime() { Map<String,String> map = new HashMap<>(); map.put("time", ""+System.currentTimeMillis()); return map; }
@Test public void testEquals() { RMapCacheReactive<String, String> map = redisson.getMapCache("simple"); sync(map.put("1", "7")); sync(map.put("2", "4")); sync(map.put("3", "5")); Map<String, String> testMap = new HashMap<String, String>(); testMap.put("1", "7"); testMap.put("2", "4"); testMap.put("3", "5"); Assert.assertEquals(map, testMap); Assert.assertEquals(testMap.hashCode(), map.hashCode()); }
public static <T> List<T> getBeans(Class<T> clazz) { Map<String, T> nameBeans = applicationContext.getBeansOfType(clazz); List<T> beans = new ArrayList<T>(); if (nameBeans != null) { beans.addAll(nameBeans.values()); } return beans; }
@Override public void actionPerformed(ActionEvent e) { List<TreeListNode> selectedNodes = DashboardViewer.getInstance().getSelectedNodes(); Map<String, List<TreeListNode>> map = new HashMap<String, List<TreeListNode>>(); for (TreeListNode treeListNode : selectedNodes) { List<TreeListNode> list = map.get(treeListNode.getClass().getName()); if (list == null) { list = new ArrayList<TreeListNode>(); } list.add(treeListNode); map.put(treeListNode.getClass().getName(), list); } for (String key : map.keySet()) { List<TreeListNode> value = map.get(key); Action action = null; if (key.equals(RepositoryNode.class.getName()) || key.equals(ClosedRepositoryNode.class.getName())) { action = new Actions.RemoveRepositoryAction(value.toArray(new RepositoryNode[value.size()])); } else if (key.equals(CategoryNode.class.getName()) || key.equals(ClosedCategoryNode.class.getName())) { action = new Actions.DeleteCategoryAction(value.toArray(new CategoryNode[value.size()])); } else if (key.equals(QueryNode.class.getName())) { action = new Actions.DeleteQueryAction(value.toArray(new QueryNode[value.size()])); } else if (key.equals(TaskNode.class.getName())) { action = new Actions.DeleteLocalTaskAction(value.toArray(new TaskNode[value.size()])); } if (action != null && action.isEnabled()) { action.actionPerformed(e); } } }
private List<GUIGraphNodeDecorator> loadGraphNodesForGUI() throws ConfigurationException { List<GUIGraphNodeDecorator> nodes = new ArrayList<GUIGraphNodeDecorator>(); for (ConfigurationNode node : config.getRoot().getChildren(NODE)) { GUIGraphNodeDecorator guiNode = loadGraphNodeForGUI(node); List<IKernelFile> sourceFiles = loadSourceFiles(node); guiNode.setSourceFiles(sourceFiles); Map<String, Object> nodeProperties = loadGraphNodeProperties(node); guiNode.getGraphNode().setProperties(nodeProperties); nodes.add(guiNode); } return nodes; }
public AudioStream(@NotNull Fraction avg_frame_rate, long bit_rate, @NotNull String codec_long_name, @NotNull String codec_name, long codec_tag, @NotNull String codec_tag_string, @NotNull Fraction codec_time_base, @NotNull CodecType codec_type, @NotNull Disposition disposition, float duration, long duration_ts, long index, long nb_frames, long nb_read_packets, @NotNull String profile, @NotNull Fraction r_frame_rate, long start_pts, float start_time, @NotNull Map<String, String> tags, @NotNull Fraction time_base, long bits_per_sample, @NotNull String channel_layout, long channels, long max_bit_rate, @NotNull String sample_fmt, long sample_rate) { super(avg_frame_rate, bit_rate, codec_long_name, codec_name, codec_tag, codec_tag_string, codec_time_base, codec_type, disposition, duration, duration_ts, index, nb_frames, nb_read_packets, profile, r_frame_rate, start_pts, start_time, tags, time_base); this.bits_per_sample = bits_per_sample; this.channel_layout = channel_layout; this.channels = channels; this.max_bit_rate = max_bit_rate; this.sample_fmt = sample_fmt; this.sample_rate = sample_rate; }
void addVisibleMethods(Map methodMap) { /* * Add methods from * parent types first, so that the methods in this class will * overwrite them in the hash table */ Iterator iter = superinterfaces().iterator(); while (iter.hasNext()) { InterfaceTypeImpl interfaze = (InterfaceTypeImpl)iter.next(); interfaze.addVisibleMethods(methodMap); } addToMethodMap(methodMap, methods()); }
public byte[] getBody() throws AuthFailureError { Map<String, String> params = getParams(); if (params == null || params.size() <= 0) { return null; } return encodeParameters(params, getParamsEncoding()); }
@Test public void testSerializePointGeneratorAttribute() throws Exception { // BoundingBox box = new BoundingBox(); // box.setFastAxisStart(0); // box.setSlowAxisStart(0); // box.setFastAxisLength(3); // box.setSlowAxisLength(3); // // GridModel gridModel = new GridModel("x", "y"); // gridModel.setFastAxisPoints(20); // gridModel.setSlowAxisPoints(50); // gridModel.setBoundingBox(box); // // CompoundModel<?> compoundModel = new CompoundModel<>(gridModel); // // IPointGeneratorService genService = new PointGeneratorService(); // IPointGenerator<?> gen = genService.createCompoundGenerator(compoundModel); // It seems that point generator attribute just stores a map, so we just test that Map<String, String> map = new HashMap<>(); map.put("key", "value"); map.put("foo", "bar"); PointGeneratorAttribute attrib = new PointGeneratorAttribute(); attrib.setName("pointGeneratorAttribute"); attrib.setLabel("Point Generator Attribute"); attrib.setDescription("Description of a point generator attribute"); attrib.setTags(new String[] { "foo", "bar" }); attrib.setWriteable(true); attrib.setValue(map); String json = service.marshal(attrib); PointGeneratorAttribute newAttrib = service.unmarshal(json, PointGeneratorAttribute.class); assertEquals(attrib, newAttrib); }
/** * Test replica placement policy in case last node is invalid. * We create 6 nodes but the last node is in fault topology (with rack info), * so cannot be added to cluster. We should test proper exception is thrown in * adding node but shouldn't affect the cluster. */ @Test public void testChooseRandomExcludedNode() { String scope = "~" + NodeBase.getPath(dataNodes[0]); Map<Node, Integer> frequency = pickNodesAtRandom(100, scope); for (Node key : dataNodes) { // all nodes except the first should be more than zero assertTrue(frequency.get(key) > 0 || key == dataNodes[0]); } }
private okhttp3.Call deleteCall(String webId, final ProgressResponseBody.ProgressListener progressListener, final ProgressRequestBody.ProgressRequestListener progressRequestListener) throws ApiException { Object localVarPostBody = null; // verify the required parameter 'webId' is set if (webId == null) throw new ApiException("Missing required parameter 'webId'"); String localVarPath = "/securityidentities/{webId}"; Map<String, String> localVarHeaderParams = new HashMap<String, String>(); Map<String, Object> localVarFormParams = new HashMap<String, Object>(); List<Pair> localVarQueryParams = new ArrayList<Pair>(); final String[] localVarAccepts = {"application/json", "text/json", "text/html", "application/x-ms-application"}; final String localVarAccept = apiClient.selectHeaderAccept(localVarAccepts); if (localVarAccept != null) localVarHeaderParams.put("Accept", localVarAccept); final String[] localVarContentTypes = {"application/json", "text/json" }; final String localVarContentType = apiClient.selectHeaderContentType(localVarContentTypes); localVarHeaderParams.put("Content-Type", localVarContentType); localVarPath = localVarPath.replaceAll("\\{webId\\}", apiClient.escapeString(webId.toString())); if (progressListener != null) { apiClient.getHttpClient().networkInterceptors().add(new okhttp3.Interceptor() { @Override public okhttp3.Response intercept(okhttp3.Interceptor.Chain chain) throws IOException { okhttp3.Response originalResponse = chain.proceed(chain.request()); return originalResponse.newBuilder() .body(new ProgressResponseBody(originalResponse.body(), progressListener)) .build(); } }); } String[] localVarAuthNames = new String[] {"Basic" }; return apiClient.buildCall(localVarPath, "DELETE", localVarQueryParams, localVarPostBody, localVarHeaderParams, localVarFormParams, localVarAuthNames, progressRequestListener); }
@Lazy @Bean(name = "sql-stored-start-connector-component") @ConditionalOnClass(CamelContext.class) @ConditionalOnMissingBean public SqlStoredStartConnectorComponent configureSqlStoredStartConnectorComponent() throws Exception { SqlStoredStartConnectorComponent connector = new SqlStoredStartConnectorComponent(); connector.setCamelContext(camelContext); Map<String, Object> parameters = new HashMap<>(); IntrospectionSupport.getProperties(configuration, parameters, null, false); CamelPropertiesHelper.setCamelProperties(camelContext, connector, parameters, false); connector.setOptions(parameters); if (ObjectHelper.isNotEmpty(customizers)) { for (ConnectorCustomizer<SqlStoredStartConnectorComponent> customizer : customizers) { boolean useCustomizer = (customizer instanceof HasId) ? HierarchicalPropertiesEvaluator .evaluate( applicationContext.getEnvironment(), "camel.connector.customizer", "camel.connector.sql-stored-start-connector.customizer", ((HasId) customizer).getId()) : HierarchicalPropertiesEvaluator .evaluate(applicationContext.getEnvironment(), "camel.connector.customizer", "camel.connector.sql-stored-start-connector.customizer"); if (useCustomizer) { LOGGER.debug("Configure connector {}, with customizer {}", connector, customizer); customizer.customize(connector); } } } return connector; }
public Map<String, String> getApiLinks() { final Map<String, String> links = Maps.newHashMap(); final NamespaceKey datasetPath = new NamespaceKey(fullPath); final String dottedFullPath = datasetPath.toUrlEncodedString(); final String fullPathString = PathUtils.toFSPath(fullPath).toString(); links.put("jobs", this.getJobsUrl()); switch (datasetType) { case VIRTUAL_DATASET: links.put("edit", "/dataset/" + dottedFullPath + "/version/" + datasetVersion + "/preview"); // edit dataset links.put("run", "/datasets/new_untitled?parentDataset=" + dottedFullPath + "&newVersion=" + DatasetVersion.newVersion()); //create new dataset break; case PHYSICAL_DATASET_HOME_FILE: links.put("run", "/home/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString); break; case PHYSICAL_DATASET_HOME_FOLDER: // Folder not supported yet break; case PHYSICAL_DATASET_SOURCE_FILE: links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_file" + fullPathString); break; case PHYSICAL_DATASET_SOURCE_FOLDER: links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_folder" + fullPathString); break; case PHYSICAL_DATASET: links.put("run", "/source/" + fullPath.get(0) + "new_untitled_from_physical_dataset" + fullPathString); break; default: break; } return links; }
@SuppressWarnings("unchecked") public static Object getValue(Object container, VariableItemWraper varVo) { if (null == container) { return null; } if (XCO.class == container.getClass()) { return OgnlXCO.getValue((XCO) container, varVo); } else if (Map.class.isAssignableFrom(container.getClass())) { return OgnlMap.getValue((Map<String, Object>) container, varVo); } else { throw new OgnlException("Ognl.getValue不支持的类型:" + container.getClass()); } }
public static byte[] generate(String address, int size) { try { Map<EncodeHintType, Object> hintMap = new EnumMap<EncodeHintType, Object>(EncodeHintType.class); hintMap.put(EncodeHintType.MARGIN, 0); hintMap.put(EncodeHintType.ERROR_CORRECTION, ErrorCorrectionLevel.L); hintMap.put(EncodeHintType.CHARACTER_SET, "UTF-8"); QRCodeWriter qrWriter = new QRCodeWriter(); BitMatrix qrMatrix = qrWriter.encode(address, BarcodeFormat.QR_CODE, size, size, hintMap); int width = qrMatrix.getWidth(); BufferedImage image = new BufferedImage(width, width, BufferedImage.TYPE_INT_RGB); image.createGraphics(); Graphics2D graphics = (Graphics2D) image.getGraphics(); graphics.setColor(Color.WHITE); graphics.fillRect(0, 0, width, width); graphics.setColor(Color.BLACK); for (int i = 0; i < width; i++) { for (int j = 0; j < width; j++) { if (qrMatrix.get(i, j)) { graphics.fillRect(i, j, 1, 1); } } } ByteArrayOutputStream bos = new ByteArrayOutputStream(); ImageIO.write(image, IMAGE_FILE_TYPE, bos); return bos.toByteArray(); } catch (Exception e) { e.printStackTrace(); } return null; }