public void setupDepartments(EditRoomDeptForm editRoomDeptForm, HttpServletRequest request, Location location) throws Exception { Collection availableDepts = new Vector(); Collection currentDepts = new HashSet(); Set<Department> departments = Department.getUserDepartments(sessionContext.getUser()); boolean hasControl = false; for (RoomDept rd: location.getRoomDepts()) { currentDepts.add(rd.getDepartment()); if (departments.contains(rd.getDepartment()) && rd.isControl()) hasControl = true; } Set<Department> set = Department.findAllBeingUsed(location.getSession().getUniqueId()); for (Department d: set) { if (hasControl || departments.contains(d) || !currentDepts.contains(d)) availableDepts.add(new LabelValueBean(d.getDeptCode() + " - " + d.getName(), d.getUniqueId().toString())); } request.setAttribute(Department.DEPT_ATTR_NAME, availableDepts); }
private void logFormRequest(final Form form) { if (LOGGER.isDebugEnabled()) { final Set<String> pairs = new HashSet<String>(); for (final String name : form.getNames()) { final StringBuilder builder = new StringBuilder(); builder.append(name); builder.append(": "); if (!"password".equalsIgnoreCase(name)) { builder.append(form.getValues(name)); } else { builder.append("*****"); } pairs.add(builder.toString()); } LOGGER.debug(StringUtils.join(pairs, ", ")); } }
/** * Returns the set of entry names and reports any duplicate entry names in the {@code result} * as errors. */ private static Set<String> checkForDuplicateEntries( List<CentralDirectoryRecord> cdRecords, Result result) { Set<String> cdEntryNames = new HashSet<>(cdRecords.size()); Set<String> duplicateCdEntryNames = null; for (CentralDirectoryRecord cdRecord : cdRecords) { String entryName = cdRecord.getName(); if (!cdEntryNames.add(entryName)) { // This is an error. Report this once per duplicate name. if (duplicateCdEntryNames == null) { duplicateCdEntryNames = new HashSet<>(); } if (duplicateCdEntryNames.add(entryName)) { result.addError(Issue.JAR_SIG_DUPLICATE_ZIP_ENTRY, entryName); } } } return cdEntryNames; }
@Override @RequestMapping(value = ApiConfig.ITEM_TYPE + "/{providerType}", method = RequestMethod.GET, headers = ApiConfig.API_HEADERS, produces = {ApiConfig.API_PRODUCES}) @ResponseBody public ItemTypeList getItemTypes(@PathVariable final String providerType, @CookieValue(value = SessionManager.SESSION_COOKIE, required = false) final String sessionId, final HttpServletResponse response) { if (rateLimiter.tryAcquire()) { modelValidator.validateProviderType(providerType); sessionManager.getSession(sessionId, response); if (LOG.isDebugEnabled()) { LOG.debug("Get all itemTypes for a provider type"); } return new ItemTypeList(new HashSet<>(itemTypeManager.getItemTypes(providerType))); } else { throw new ApiThrottlingException("Exceeded max number of requests per second"); } }
/** * * @param v * @return */ private Set<Vertex> computeParentVertices(Vertex v) { HashSet<Vertex> parentVertices = new HashSet<Vertex>(); Edge reverseEdge = reverseEdges.get(v); Vertex currentVertex = v; while (reverseEdge != null) { Vertex parentVertex = reverseEdge.getOppositeVertex(currentVertex); parentVertices.add(parentVertex); currentVertex = parentVertex; reverseEdge = reverseEdges.get(currentVertex); } return parentVertices; }
public ConfusionMatrix(int[] truth, int[] prediction) { if(truth.length != prediction.length){ throw new IllegalArgumentException(String.format("The vector sizes don't match: %d != %d.", truth.length, prediction.length)); } Set<Integer> ySet = new HashSet<>(); for(int i = 0; i < truth.length; i++){ ySet.add(truth[i]); } matrix = new int[ySet.size()][ySet.size()]; for(int i = 0; i < truth.length; i++){ matrix[truth[i]][prediction[i]] += 1; } ySet.clear(); }
public void testRenameUnversionedFile_FO() throws Exception { // init File fromFile = new File(repositoryLocation, "fromFile"); fromFile.createNewFile(); File toFile = new File(repositoryLocation, "toFile"); // rename h.setFilesToRefresh(new HashSet<File>(Arrays.asList(fromFile, toFile))); renameFO(fromFile, toFile); assertTrue(h.waitForFilesToRefresh()); // test assertFalse(fromFile.exists()); assertTrue(toFile.exists()); assertEquals(EnumSet.of(Status.UPTODATE), getCache().getStatus(fromFile).getStatus()); assertEquals(EnumSet.of(Status.NEW_INDEX_WORKING_TREE, Status.NEW_HEAD_WORKING_TREE), getCache().getStatus(toFile).getStatus()); }
/** * testGenerateCandidates */ public void testGenerateCandidates() throws Exception { Table catalog_tbl = this.getTable(TM1Constants.TABLENAME_SUBSCRIBER); Column target_col = this.getColumn(catalog_tbl, "S_ID"); Collection<VerticalPartitionColumn> candidates = VerticalPartitionerUtil.generateCandidates(target_col, info.stats); assertNotNull(candidates); assertFalse(candidates.isEmpty()); VerticalPartitionColumn vpc = CollectionUtil.first(candidates); assertNotNull(vpc); Collection<Column> expected_cols = CollectionUtil.addAll(new HashSet<Column>(), this.getColumn(catalog_tbl, "SUB_NBR"), this.getColumn(catalog_tbl, "S_ID")); assertEquals(expected_cols.size(), vpc.getVerticalMultiColumn().size()); assertTrue(expected_cols + " <=> " + vpc.getVerticalPartitionColumns(), expected_cols.containsAll(vpc.getVerticalPartitionColumns())); Collection<Statement> expected_stmts = new HashSet<Statement>(); expected_stmts.add(this.getStatement(this.getProcedure(DeleteCallForwarding.class), "query")); expected_stmts.add(this.getStatement(this.getProcedure(InsertCallForwarding.class), "query1")); expected_stmts.add(this.getStatement(this.getProcedure(UpdateLocation.class), "getSubscriber")); assertEquals(expected_stmts.size(), vpc.getOptimizedQueries().size()); assert(expected_stmts.containsAll(vpc.getOptimizedQueries())); }
/** * Parse string according given class * @param string Source string * @param clazz Target class * @return Parsed class object */ private static Object parse(String string, Class clazz) throws IOException { if (Integer.class.isAssignableFrom(clazz)) { return (int) Double.parseDouble(string); } if (Boolean.class.isAssignableFrom(clazz)) { return Boolean.parseBoolean(string); } if (Double.class.isAssignableFrom(clazz)) { return Double.parseDouble(string); } if (HashSet.class.isAssignableFrom(clazz)) { return Utils.toHashSet(Utils.tokenize(string, ",")); } if (File.class.isAssignableFrom(clazz)) { return new File(string); } throw new IOException("Unknown instance: " + clazz.getSimpleName()); }
public void testDeleteA_RenameB2A_DO_129805() throws Exception { // init File fileA = new File(repositoryLocation, "A"); fileA.createNewFile(); File fileB = new File(repositoryLocation, "B"); fileB.createNewFile(); add(); commit(); // delete A h.setFilesToRefresh(new HashSet<File>(Arrays.asList(fileA, fileB))); delete(fileA); // rename B to A renameDO(fileB, fileA); assertTrue(h.waitForFilesToRefresh()); // test assertFalse(fileB.exists()); assertTrue(fileA.exists()); assertEquals(EnumSet.of(Status.UPTODATE), getCache().getStatus(fileA).getStatus()); }
private static Set<Object> getFactories(String serviceName) { HashSet<Object> result = new HashSet<Object>(); if ((serviceName == null) || (serviceName.length() == 0) || (serviceName.endsWith("."))) { return result; } Provider[] provs = Security.getProviders(); Object fac; for (Provider p : provs) { Iterator<Service> iter = p.getServices().iterator(); while (iter.hasNext()) { Service s = iter.next(); if (s.getType().equals(serviceName)) { try { fac = loadFactory(s); if (fac != null) { result.add(fac); } } catch (Exception ignore) { } } } } return Collections.unmodifiableSet(result); }
private void doConsume() { try { consumer.seekToEnd(new HashSet<TopicPartition>()); while (!stopped) { final ConsumerRecords<String, byte[]> records = consumer.poll(10); for (final ConsumerRecord<String, byte[]> record : records) { recordSet.add(record); LOG.info("Reading record: topic = {}, partition = {}, offset = {}, key = {}, value = {}", record.topic(), record.partition(), record.offset(), record.key(), new String(record.value())); } consumer.commitSync(); Thread.sleep(10); } } catch (final InterruptedException e) { LOG.error("interrupted", e); } finally { consumer.close(); consumer.unsubscribe(); consumer = null; } }
public static void registerTopic(ChannelEntity chn, String topic) { if (chn == null) { return; } if (topic == null) { return; } Set<String> topicSet = channelTopicMap.get(chn); if (topicSet == null) { topicSet = new HashSet<String>(1); } topicSet.add(topic); channelTopicMap.put(chn, topicSet); Set<ChannelEntity> channelSet = topicChannelMap.get(topic); if (channelSet == null) { channelSet = new HashSet<ChannelEntity>(1); } channelSet.add(chn); topicChannelMap.put(topic, channelSet); }
private void initDebuggerManagerListeners () { synchronized (loadedListenersLock) { if (loadedListeners == null) { loadedListeners = new HashSet<LazyDebuggerManagerListener>(); listenersLookupList = lookup.lookup (null, LazyDebuggerManagerListener.class); refreshDebuggerManagerListeners(listenersLookupList); ((Customizer) listenersLookupList).addPropertyChangeListener(new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent evt) { refreshDebuggerManagerListeners((List<? extends LazyDebuggerManagerListener>) evt.getSource()); } }); } } }
/** * Apply Bundle matched properties. */ public static CloudIotOptions reconfigure(CloudIotOptions original, Bundle bundle) { try { if (Log.isLoggable(TAG, Log.INFO)) { HashSet<String> valid = new HashSet<>(Arrays.asList(new String[] {"project_id", "registry_id", "device_id","cloud_region", "mqtt_bridge_hostname", "mqtt_bridge_port"})); valid.retainAll(bundle.keySet()); Log.i(TAG, "Configuring options using the following intent extras: " + valid); } CloudIotOptions result = new CloudIotOptions(); result.projectId = bundle.getString("project_id", original.projectId); result.registryId = bundle.getString("registry_id", original.registryId); result.deviceId = bundle.getString("device_id", original.deviceId); result.cloudRegion = bundle.getString("cloud_region", original.cloudRegion); result.bridgeHostname = bundle.getString("mqtt_bridge_hostname", original.bridgeHostname); result.bridgePort = (short) bundle.getInt("mqtt_bridge_port", original.bridgePort); return result; } catch (Exception e) { throw new IllegalArgumentException("While processing configuration options", e); } }
@Override protected Attribute read(ClassReader cr, int off, int len, char[] buf, int codeOff, Label[] labels) { // package count int package_count = cr.readUnsignedShort(off); off += 2; // packages Set<String> packages = new HashSet<>(); for (int i=0; i<package_count; i++) { String pkg = cr.readPackage(off, buf).replace('/', '.'); packages.add(pkg); off += 2; } return new ModulePackagesAttribute(packages); }
@Override protected Object clone() { Pixlr pixlr = null; try { pixlr = (Pixlr) super.clone(); pixlr.setUid(null); // create an empty set for the pixlrSession pixlr.pixlrSessions = new HashSet<PixlrSession>(); } catch (CloneNotSupportedException cnse) { Pixlr.log.error("Error cloning " + Pixlr.class); } return pixlr; }
@Test public void T_filter_7() throws IOException{ List<PrimitiveObject> dic = new ArrayList<PrimitiveObject>(); dic.add( new StringObj( "abc" ) ); dic.add( new StringObj( "bcd" ) ); dic.add( new StringObj( "cde" ) ); dic.add( new StringObj( "def" ) ); dic.add( new StringObj( "efg" ) ); IntBuffer buffer = IntBuffer.allocate( 100 ); for( int i = 0 ; i < 100 ; i++ ){ buffer.put( i % 5 ); } ICellIndex index = new BufferDirectSequentialStringCellIndex( new TestDicManager( dic ) , buffer ); Set<String> filterDic = new HashSet<String>(); filterDic.add( "abc" ); filterDic.add( "bcd" ); IFilter filter = new StringDictionaryFilter( filterDic ); FilterdExpressionIndex result = new FilterdExpressionIndex( index.filter( filter , new boolean[100] ) ); assertEquals( result.size() , 40 ); for( int i = 0,n=0 ; n < 100 ; i+=2,n+=5 ){ assertEquals( result.get(i) , n ); assertEquals( result.get(i+1) , n+1 ); } }
@Test public void should_generate_milestone_blog_from_template_for_tag_with_v_prefix_release() throws IOException { this.props.getPom().setBranch("vDalston.M1"); Projects projects = new Projects( new HashSet<ProjectVersion>() {{ add(new ProjectVersion("spring-cloud-sleuth", "1.0.0.M1")); add(new ProjectVersion("spring-cloud-consul", "1.0.1.M1")); }} ); File generatedBlog = new TemplateGenerator(this.props, this.handler).blog(projects); then(content(generatedBlog)) .contains("Milestone 1 (M1) of the [Spring Cloud Dalston]") .contains("The release can be found in [Spring Milestone]") .contains("### Spring Cloud Sleuth") .contains("| Spring Cloud Sleuth \t| 1.0.0.M1 \t|") .contains("<id>spring-milestones</id>") .contains("url 'http://repo.spring.io/milestone'") .contains("<version>Dalston.M1</version>") .contains("mavenBom 'org.springframework.cloud:spring-cloud-dependencies:Dalston.M1'"); }
public HashSet<String> queryTopicConsumeByWho(final String topic) { HashSet<String> groups = new HashSet<>(); Iterator<Entry<String, ConsumerGroupInfo>> it = this.consumerTable.entrySet().iterator(); while (it.hasNext()) { Entry<String, ConsumerGroupInfo> entry = it.next(); ConcurrentHashMap<String, SubscriptionData> subscriptionTable = entry.getValue().getSubscriptionTable(); if (subscriptionTable.containsKey(topic)) { groups.add(entry.getKey()); } } return groups; }
public static DepTree getValidStructureDepTree(DepTree tree, String sentence, HashSet<String> prevalidset){ // lemmatize , set in/or not in dustbin List<String> sen = Datum.lemmatizeSen(sentence); for (int i = 1; i < tree.getSentence().length; i++){ tree.getSentence()[i].setWord(sen.get(i-1)); if (!prevalidset.contains(tree.getSentence()[i].getWord())){ tree.getSentence()[i].setDustbin(true); } else if(!DepSet.valid_rel.contains(tree.getSentence()[i].getReln()) || !DepSet.real_postag.contains(tree.getSentence()[i].getPostag())){ tree.getSentence()[i].setDustbin(true); // dep rel/pos tag 不合格也不行 } } tree.setSetDustbin(true); tree.adjustTreeLevel(); // set dustbin 之后就可以重新调整 tree nodes 的 level了 // set dustbin 与 adjustTreeLevel 应该一起调用 return tree; }
/** * 初始化自定义进程坑坑位 * * @param prefix xxx.xx.loader.a.Activity * @param containers 保存所有 Activity 坑名称 * @param states 当前进程所有坑位的状态 * @param suffix p0, p1, p2 */ private void init2(String prefix, HashSet<String> containers, ProcessStates states, String suffix) { suffix = suffix.toUpperCase(); // Standard states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_MULTIPLE, true, HostConfigHelper.ACTIVITY_PIT_COUNT_TS_STANDARD); states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_MULTIPLE, false, HostConfigHelper.ACTIVITY_PIT_COUNT_NTS_STANDARD); // SingleTop states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_SINGLE_TOP, true, HostConfigHelper.ACTIVITY_PIT_COUNT_TS_SINGLE_TOP); states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_SINGLE_TOP, false, HostConfigHelper.ACTIVITY_PIT_COUNT_NTS_SINGLE_TOP); // SingleTask states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_SINGLE_TASK, true, HostConfigHelper.ACTIVITY_PIT_COUNT_TS_SINGLE_TASK); states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_SINGLE_TASK, false, HostConfigHelper.ACTIVITY_PIT_COUNT_NTS_SINGLE_TASK); // SingleInstance states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_SINGLE_INSTANCE, true, HostConfigHelper.ACTIVITY_PIT_COUNT_TS_SINGLE_INSTANCE); states.mLaunchModeStates.addStates(mStates, containers, prefix + suffix, LAUNCH_SINGLE_INSTANCE, false, HostConfigHelper.ACTIVITY_PIT_COUNT_NTS_SINGLE_INSTANCE); // taskAffinity states.mTaskAffinityStates.init(prefix, suffix, mStates, containers); }
void generateConfigFiles() { Set<String> autoload = new HashSet<String>(); Set<String> eager = new HashSet<String>(); Set<String> enabled = new HashSet<String>(); modules.categorize(autoload, eager, enabled); try { AntProjectCookie apc = AntScriptUtils.antProjectCookieFor(findBuildXml(project)); AntTargetExecutor.Env execenv = new AntTargetExecutor.Env(); // execenv.setLogger(new NullOutputStream()); Properties p = execenv.getProperties(); toProperty(p, "include.autoload", autoload); // NOI18N toProperty(p, "include.enabled", enabled); // NOI18N toProperty(p, "include.eager", eager); // NOI18N p.setProperty("cluster", file.getPath()); // NOI18N execenv.setProperties(p); String[] targetNames = { "clusterize" }; // NOI18N ExecutorTask t = AntTargetExecutor.createTargetExecutor(execenv).execute(apc, targetNames); t.waitFinished(); } catch (IOException e) { Util.err.notify(e); } }
public void computeRecipes(Integer count, ItemStack stack) { if (stack.getItemDamage() == 0 && stack.getItem() instanceof LOTRItemMug) { Item item = stack.getItem(); List<ItemStack> list = new ArrayList<>(); item.getSubItems(item, null, list); list.forEach(stack2 -> { if (LOTRPoisonedDrinks.canPoison(stack2)) { ItemStack result = stack2.copy(); LOTRPoisonedDrinks.setDrinkPoisoned(result, true); ItemStack ingred = stack2.copy(); ExtendedCachedShapelessRecipe recipe = new ExtendedCachedShapelessRecipe( Arrays.asList(ingred, POISON_STACK), Arrays.asList(result)); if (craftingRecipes.get(result) == null) craftingRecipes.put(result, new HashSet<>()); craftingRecipes.get(result).add(recipe); if (usageRecipes.get(ingred) == null) usageRecipes.put(ingred, new HashSet<>()); usageRecipes.get(ingred).add(recipe); if (usageRecipes.get(POISON_STACK) == null) usageRecipes.put(POISON_STACK, new HashSet<>()); usageRecipes.get(POISON_STACK).add(recipe); } }); } }
private boolean isPerson(String uri){ if(personTypes.isEmpty()){ return true; } else { //g et types of URI Set<String> types = new HashSet<>(); try { String query = "SELECT ?type WHERE {<" + uri + "> a ?type.}"; try(QueryExecution qe = qef.createQueryExecution(query)) { ResultSet rs = qe.execSelect(); while(rs.hasNext()){ types.add(rs.next().getResource("type").getURI()); } } } catch (Exception e) { int code = ((QueryExceptionHTTP)e.getCause()).getResponseCode(); logger.warn("SPARQL query execution failed: " + code + " - " + HttpSC.getCode(code).getMessage()); } // check for overlap between types of entity and person types return !Sets.intersection(personTypes, types).isEmpty(); } }
@Override public Result execute(AtomicVertex[] graph) { final ResultContainer result = new ResultContainer(); final boolean directPathsOnly = DIRECTLY_INDEPENDENT_OF_KEY_WORD.equals(dependencyType); final boolean dependsOnly = DependencyDefinitionParser.DEPENDENT_ONLY_ON_KEY_WORD.equals(dependencyType); for (int i = 0; i < startConditions.length; i++) { final VertexCondition startCondition = startConditions[i]; final StringPattern startSet = startSets[i]; if (dependsOnly) { final Set<AtomicVertex> invalids = new HashSet<>(); for (final AtomicVertex vertex : graph) { if (startCondition.isFulfilled(vertex)) { for (int j = 0, n = vertex.getNumberOfOutgoingArcs(); j < n; j++) { final Vertex headVertex = vertex.getHeadVertex(j); if (finalCondition.isFulfilled(headVertex) == false && startCondition.isFulfilled(headVertex) == false) { invalids.add(vertex); invalids.add((AtomicVertex) headVertex); } } } } result.add(new DependencyResult(startSet, finalSet, toString(startSet, finalSet), invalids.toArray(new AtomicVertex[0]))); } else { for (int j = 0; j < finalConditions.length; j++) { final PathsFinder finder = new PathsFinder(startCondition, finalConditions[j], renderer.onlyShortestPaths(), directPathsOnly); result.add(new DependencyResult(startSet, finalSets[j], toString(i, j), finder.findPaths(graph))); } } } return result; }
@Test public void testGetAvailablePaymentTypesFromSupplier_WithRelationNoServicePaymentTypes() throws Exception { Set<String> prodPt = new HashSet<>(); Set<String> custPt = new HashSet<>( Arrays.asList(BaseAdmUmTest.PAYMENT_TYPE_IDS_INV_CC)); // intersect prodPt and custPt Set<String> expPt = new HashSet<>(); getAvailablePaymentTypesFromSupplier(prodPt, custPt, true, expPt); }
/** * Instantiates a new Role. * * @param id the id * @param name the name * @param privileges the privileges * @param users the users * @param active the active */ @Builder public Role(final String id, final String name, final Set<Privilege> privileges, final Set<User> users, final Boolean active) { super(id); // Null safe this.name = Optional.ofNullable(name).orElse(""); this.privileges = Optional.ofNullable(privileges).orElse(new HashSet<>()); this.users = Optional.ofNullable(users).orElse(new HashSet<>()); setActive(Optional.ofNullable(active).orElse(false)); }
private static HashSet<Integer> createHashSet() { int capacity = rnd.nextInt(MAX_CAPACITY); float loadFactor = Float.MIN_VALUE + rnd.nextFloat()*MAX_LOAD_FACTOR; HashSet<Integer> hashSet = new HashSet<Integer>(capacity, loadFactor); float multiplier = 2*rnd.nextFloat(); // range [0,2] int size = (int)(capacity*loadFactor*multiplier); for (int i = 0; i < size; i++) { hashSet.add(rnd.nextInt()); } return hashSet; }
/** * Collects all implemented interfaces for a given class * @param clazz the IClass object to analyze * @return a set of IClass objects representing the interfaces */ public static Set<IClass> collectAllInterfaces(IClass clazz) { // do not check array classes if (clazz.isArrayClass()) return new HashSet<IClass>(); Set<IClass> interfaces = new HashSet<IClass>(clazz.getDirectInterfaces()); for (IClass c : clazz.getDirectInterfaces()) interfaces.addAll(collectAllInterfaces(c)); return interfaces; }
@Override public void editAnnotation(Annotation ann, AnnotationSet set) { this.ann = ann; this.set = set; if(ann == null) { typeCombo.setModel(new DefaultComboBoxModel<String>()); featuresEditor.setSchema(new AnnotationSchema()); // popupWindow.doLayout(); popupWindow.validate(); return; } // repopulate the types combo String annType = ann.getType(); Set<String> types = new HashSet<String>(schemasByType.keySet()); types.add(annType); types.addAll(set.getAllTypes()); java.util.List<String> typeList = new ArrayList<String>(types); Collections.sort(typeList); typeCombo.setModel(new DefaultComboBoxModel<String>(typeList.toArray(new String[typeList.size()]))); typeCombo.setSelectedItem(annType); featuresEditor.setSchema(schemasByType.get(annType)); featuresEditor.setTargetFeatures(ann.getFeatures()); setEditingEnabled(true); popupWindow.pack(); setVisible(true); if(!pinnedButton.isSelected()) { hideTimer.restart(); } }
@Test public void testRandomTestFromGitHub() throws ParseException, IOException { Set<String> excluded = new HashSet<>(); // pre-EIP155 wrong chain id (negative) String json = JSONReader.loadJSONFromCommit("TransactionTests/RandomTests/tr201506052141PYTHON.json", shacommit); GitHubJSONTestSuite.runGitHubJsonTransactionTest(json, excluded); }
public Set<RegionQueue> getQueues() { if (this.eventProcessor != null) { if (!(this.eventProcessor instanceof ConcurrentSerialGatewaySenderEventProcessor)) { Set<RegionQueue> queues = new HashSet<RegionQueue>(); queues.add(this.eventProcessor.getQueue()); return queues; } return ((ConcurrentSerialGatewaySenderEventProcessor) this.eventProcessor).getQueues(); } return null; }
public java.util.Set list() { check() ; Set result = new HashSet() ; // Obtain all the keys from the property object Enumeration theKeys = savedProps.propertyNames(); while (theKeys.hasMoreElements()) { result.add( theKeys.nextElement() ) ; } return result ; }
/** * Gets the content properties for a node * @param node the node to process */ public static Set<ContentData> getContentData(TransferManifestNormalNode node) { Set<ContentData> content = new HashSet<ContentData>(); for(Serializable value : node.getProperties().values()) { if(value instanceof ContentData) { content.add((ContentData)value); } } return content; }
public TestBreakpointListener(JPDABreakpoint breakpoint, boolean isFiltered, List<JPDABreakpoint> allBreakpoints) { this.breakpoint = breakpoint; this.isFiltered = isFiltered; this.allBreakpoints = allBreakpoints; if (isFiltered) { toBeHitIn = new HashSet<String>(); toBeHitIn.add(APP_CLASS_NAME); toBeHitIn.add(APP2_CLASS_NAME); } else { toBeHitIn = null; } }
private Request(Parcel parcel) { String enumValue = parcel.readString(); this.loginBehavior = enumValue != null ? LoginBehavior.valueOf(enumValue) : null; ArrayList<String> permissionsList = new ArrayList<>(); parcel.readStringList(permissionsList); this.permissions = new HashSet<String>(permissionsList); enumValue = parcel.readString(); this.defaultAudience = enumValue != null ? DefaultAudience.valueOf(enumValue) : null; this.applicationId = parcel.readString(); this.authId = parcel.readString(); this.isRerequest = parcel.readByte() != 0 ? true : false; }
private static Set<DataObject> getModifiedFiles (Set<DataObject> openedFiles) { Set<DataObject> set = new HashSet<DataObject> (openedFiles.size ()); for (DataObject obj: openedFiles) { if (obj.isModified ()) { set.add (obj); } } return set; }
@Test public void getOrganizationRoles_PlatformOperator() { // given Set<OrganizationRoleType> orgRoles = new HashSet<OrganizationRoleType>(); orgRoles.add(OrganizationRoleType.PLATFORM_OPERATOR); VOUserDetails voUserDetails = new VOUserDetails(); voUserDetails.setOrganizationRoles(orgRoles); Set<UserRoleType> userRoles = new HashSet<UserRoleType>(); userRoles.add(UserRoleType.PLATFORM_OPERATOR); voUserDetails.setUserRoles(userRoles); given(exportBillingService.idService.getCurrentUserDetails()) .willReturn(voUserDetails); // when List<BillingSharesResultType> returnedRoles = exportBillingService .getBillingShareResultTypes(); // then assertTrue(returnedRoles .contains(BillingSharesResultType.MARKETPLACE_OWNER)); assertTrue(returnedRoles.contains(BillingSharesResultType.RESELLER)); assertTrue(returnedRoles.contains(BillingSharesResultType.BROKER)); assertTrue(returnedRoles.contains(BillingSharesResultType.SUPPLIER)); }
@NonNull public Set<String> getComponentSet() { Set<String> set = new HashSet<>(dataList.size()); for (Bean bean : dataList) { set.add(bean.getPkg() + "/" + bean.getLauncher()); } return set; }