/** * Setup TestNG method to create Rapture login object and objects. * * @param RaptureURL * Passed in from <env>_testng.xml suite file * @param RaptureUser * Passed in from <env>_testng.xml suite file * @param RapturePassword * Passed in from <env>_testng.xml suite file * @return none */ @BeforeMethod @BeforeClass(groups = { "mongo" }) @Parameters({ "RaptureURL", "RaptureUser", "RapturePassword" }) public void setUp(@Optional("http://localhost:8665/rapture") String url, @Optional("rapture") String username, @Optional("rapture") String password) { // If running from eclipse set env var -Penv=docker or use the following // url variable settings: // url="http://192.168.99.101:8665/rapture"; //docker // url="http://localhost:8665/rapture"; System.out.println("Using url " + url); raptureLogin = new HttpLoginApi(url, new SimpleCredentialsProvider(username, password)); raptureLogin.login(); series = new HttpSeriesApi(raptureLogin); document = new HttpDocApi(raptureLogin); script = new HttpScriptApi(raptureLogin); event = new HttpEventApi(raptureLogin); fountain = new HttpIdGenApi(raptureLogin); blobApi = new HttpBlobApi(raptureLogin); Kernel.initBootstrap(); context = ContextFactory.getKernelUser(); }
/** * Setup TestNG method to create Rapture login object and objects. * * @param RaptureURL Passed in from <env>_testng.xml suite file * @param RaptureUser Passed in from <env>_testng.xml suite file * @param RapturePassword Passed in from <env>_testng.xml suite file * @return none */ @BeforeClass(groups={"smoke"}) @Parameters({"RaptureURL","RaptureUser","RapturePassword"}) public void setUp(@Optional("http://localhost:8665/rapture")String url, @Optional("rapture")String username, @Optional("rapture")String password ) { //If running from eclipse set env var -Penv=docker or use the following url variable settings: //url="http://192.168.99.101:8665/rapture"; //docker //url="http://localhost:8665/rapture"; System.out.println("Using url " + url); raptureLogin = new HttpLoginApi(url, new SimpleCredentialsProvider(username, password)); raptureLogin.login(); series = new HttpSeriesApi(raptureLogin); document = new HttpDocApi(raptureLogin); script = new HttpScriptApi(raptureLogin); event = new HttpEventApi(raptureLogin); fountain = new HttpIdGenApi(raptureLogin); }
/** * Setup TestNG method to create Rapture login object and objects. * * @param RaptureURL Passed in from <env>_testng.xml suite file * @param RaptureUser Passed in from <env>_testng.xml suite file * @param RapturePassword Passed in from <env>_testng.xml suite file * * @return none */ @BeforeClass(groups={"document"}) @Parameters({"RaptureURL","RaptureUser","RapturePassword"}) public void setUp(@Optional("http://localhost:8665/rapture")String url, @Optional("rapture")String username, @Optional("rapture")String password ) { ///If running from eclipse set environment variable -Penv=docker //or use the following: // url="http://localhost:8665/rapture"; // url="http://192.168.99.101:8665/rapture"; //docker Reporter.log("Using URL: " + url,true); raptureLogin = new HttpLoginApi(url, new SimpleCredentialsProvider(username, password)); try{ raptureLogin.login(); document = new HttpDocApi(raptureLogin); } catch (RaptureException re) { Reporter.log(re.getFormattedMessage(),true); } }
/** * Setup TestNG method to create Rapture login object and objects. * * @param RaptureURL * Passed in from <env>_testng.xml suite file * @param RaptureUser * Passed in from <env>_testng.xml suite file * @param RapturePassword * Passed in from <env>_testng.xml suite file * @return none */ @BeforeClass(groups = { "nightly" }) @Parameters({ "RaptureURL", "RaptureUser", "RapturePassword" }) public void setUp(@Optional("http://localhost:8665/rapture") String url, @Optional("rapture") String username, @Optional("rapture") String password) { // If running from eclipse set env var -Penv=docker or use the following // url variable settings: // url="http://192.168.99.101:8665/rapture"; //docker // url="http://localhost:8665/rapture"; helper = new IntegrationTestHelper(url, username, password); raptureLogin = helper.getRaptureLogin(); docApi = helper.getDocApi(); operationApi = helper.getOperationApi(); callingContext = raptureLogin.getContext(); repo = helper.getRandomAuthority(Scheme.DOCUMENT); helper.configureTestRepo(repo, "MEMORY"); }
/** * Setup TestNG method to create Rapture login object and objects. * * @param RaptureURL * Passed in from <env>_testng.xml suite file * @param RaptureUser * Passed in from <env>_testng.xml suite file * @param RapturePassword * Passed in from <env>_testng.xml suite file * @return none */ @BeforeMethod @BeforeClass(groups = { "mongo" }) @Parameters({ "RaptureURL", "RaptureUser", "RapturePassword" }) public void setUp(@Optional("http://localhost:8665/rapture") String url, @Optional("rapture") String username, @Optional("rapture") String password) { // If running from eclipse set env var -Penv=docker or use the following // url variable settings: // url="http://192.168.99.101:8665/rapture"; //docker // url="http://localhost:8665/rapture"; // System.out.println("Using url " + url); // raptureLogin = new HttpLoginApi(url, new SimpleCredentialsProvider(username, password)); // raptureLogin.login(); // seriesApi = new HttpSeriesApi(raptureLogin); // docApi = new HttpDocApi(raptureLogin); // scriptApi = new HttpScriptApi(raptureLogin); // eventApi = new HttpEventApi(raptureLogin); // fountainApi = new HttpIdGenApi(raptureLogin); // blobApi = new HttpBlobApi(raptureLogin); // callingContext = raptureLogin.getContext(); // }
/** * Setup TestNG method to create Rapture login object and objects. * * @param RaptureURL * Passed in from <env>_testng.xml suite file * @param RaptureUser * Passed in from <env>_testng.xml suite file * @param RapturePassword * Passed in from <env>_testng.xml suite file * @return none */ @BeforeClass(groups = { "nightly", "search" }) @Parameters({ "RaptureURL", "RaptureUser", "RapturePassword" }) public void setUp(@Optional("http://localhost:8665/rapture") String url, @Optional("rapture") String username, @Optional("rapture") String password) { // If running from eclipse set env var -Penv=docker or use the following // url variable settings: // url="http://192.168.99.101:8665/rapture"; //docker // url="http://localhost:8665/rapture"; helper = new IntegrationTestHelper(url, username, password); raptureLogin = helper.getRaptureLogin(); seriesApi = helper.getSeriesApi(); scriptApi = helper.getScriptApi(); docApi = helper.getDocApi(); blobApi = helper.getBlobApi(); searchApi = new HttpSearchApi(raptureLogin); callingContext = raptureLogin.getContext(); forceCleanUp(username); if (!username.equals("rapture")) forceCleanUp("rapture"); }
/** * Drop and recreates the databaseName from the template files. * * @param skipDatabaseCreation * If set to true, the databaseName creation will be skipped (Default: false). * * @throws Exception * Exception. */ @Parameters({ "skipDatabaseCreation" }) @BeforeClass(dependsOnMethods = { "setupIntegrationTest" }, groups = GROUP_INTEGRATION_TEST_SETUP) public void setupDatabase(@Optional("false") String skipDatabaseCreation) throws Exception { if (BooleanUtils.toBoolean(skipDatabaseCreation)) { return; } LOGGER.info("Using the following JDBC URL for the test database: " + jdbcURL); try { DatabaseUtils.recreateDatabase(jdbcTempURL, suUsername, suPassword, databaseName, databaseType, username); initializeDatabaseSchemaAndContent(); } catch (Exception e) { LOGGER.error(e.getMessage(), e); throw e; } }
/** * Setup credentials for DB access. * * @param dbUsername * The username to use. (Default: communote). * @param dbPassword * The passwort to use for the given user (Default: communote). * @param dbSuUsername * name of the user to use for dropping an existing databaseName and creating a new * databaseName. This user also needs to have access to the temp databaseName. If * unset, username will be used. * @param dbSuPassword * password of the user identified by suUsername. Will be the password parameter if * suUsername is blank. */ @Parameters({ "dbUsername", "dbPassword", "dbSuUsername", "dbSuPassword" }) @BeforeClass(groups = GROUP_INTEGRATION_TEST_SETUP) public void setupDatabaseUser(@Optional("communote") String dbUsername, @Optional("communote") String dbPassword, @Optional("") String dbSuUsername, @Optional("") String dbSuPassword) { this.username = dbUsername; this.password = dbPassword; if (StringUtils.isBlank(dbSuUsername)) { this.suUsername = dbUsername; this.suPassword = dbPassword; } else { this.suUsername = dbSuUsername; this.suPassword = dbSuPassword; } }
/** * Setup. * * @param ldifFile * ldif file as classpath or file URL. Default is to load from classpath. * @throws Exception * Exception. */ @Parameters({ "ldifFile" }) @BeforeClass(groups = "ldap-test-setup") public void setup( @Optional("classpath:/com/communote/server/test/ldap/test_ldap.ldif") String ldifFile) throws Exception { server.setPort(getNextFreePort()); server.start(); LOG.info("Load ldif from: " + ldifFile); URL url; if (ldifFile.startsWith("classpath:")) { ldifFile = ldifFile.substring(10); url = getClass().getResource(ldifFile); } else { url = new URL(ldifFile); } try (InputStream in = url.openStream()) { server.importLdifFromStream(in); } }
/** * Setup. * * @param numberOfMessages * The number of messages to generate. * * @throws Exception * Exception. */ @Parameters({ "numberOfMessages" }) @BeforeClass(dependsOnGroups = "integration-test-setup") public void setup(@Optional("1000") String numberOfMessages) throws Exception { user = TestUtils.createRandomUser(false); blogForDeletion = TestUtils.createRandomBlog(true, true, user); blogForMovingFrom = TestUtils.createRandomBlog(true, true, user); blogForMovingTo = TestUtils.createRandomBlog(true, true, user); this.numberOfMessages = Integer.parseInt(numberOfMessages); for (int i = 1; i <= this.numberOfMessages; i++) { TestUtils.createAndStoreCommonNote(blogForDeletion, user.getId(), "Message " + i); TestUtils.createAndStoreCommonNote(blogForMovingFrom, user.getId(), "Message " + i); } blogManagement = ServiceLocator.instance().getService(BlogManagement.class); noteDao = ServiceLocator.findService(NoteDao.class); Assert.assertEquals(noteDao.getNotesForBlog(blogForDeletion.getId(), null, null).size(), this.numberOfMessages); Assert.assertEquals(noteDao.getNotesForBlog(blogForMovingFrom.getId(), null, null).size(), this.numberOfMessages); }
/** * Setups the group search. * * @param searchFilter * The search filter for groups. * @param searchBase * The search base for groups. * @param searchSubtree * True, when subtrees should be search too. * @param propertyMapping * Mapping of properties as String. * @param isMemberMode * True, if the mode is "member", false if "memberOf" */ @BeforeMethod(groups = "setupSearchBase") @Parameters({ "groupSearchFilter", "groupSearchBase", "groupSearchSubtree", "groupPropertyMapping", "isMemberMode" }) public void setupGroupSearch( @Optional("(objectClass=group)") String searchFilter, String searchBase, @Optional("true") String searchSubtree, @Optional("name=name,alias=cn,membership=memberOf,description=name,uid=cn") String propertyMapping, @Optional("false") String isMemberMode) { groupSyncConfig = LdapGroupSyncConfiguration.Factory.newInstance(); groupSyncConfig.setMemberMode(Boolean.parseBoolean(isMemberMode)); groupSyncConfig.setGroupIdentifierIsBinary(false); LdapSearchConfiguration groupSearchConfiguration = LdapSearchConfiguration.Factory .newInstance(); groupSearchConfiguration.setSearchFilter(searchFilter); LdapSearchBaseDefinition searchBaseDefinition = LdapSearchBaseDefinition.Factory .newInstance(searchBase, Boolean.parseBoolean(searchSubtree)); groupSearchConfiguration.setSearchBases(new ArrayList<LdapSearchBaseDefinition>()); groupSearchConfiguration.getSearchBases().add(searchBaseDefinition); groupSearchConfiguration.setPropertyMapping(propertyMapping); groupSyncConfig.setGroupSearch(groupSearchConfiguration); }
@BeforeClass @Parameters({ "cntAuthenticationUsername", "cntAuthenticationPassword", "cntManagerAlias", "cntUserAlias", "cntManagerId", "externalGroupId" }) public void setupCommunoteParameters( @Optional("sharepoint.system") String cntAuthenticationUsername, @Optional("123456") String cntAuthenticationPassword, @Optional("kenmei") String communoteManagerAlias, @Optional("kenmei") String communoteUserAlias, @Optional String communoteManagerId, @Optional("mqTestExternalGroup") String externalGroupId) { this.cntAuthenticationUsername = cntAuthenticationUsername; this.cntAuthenticationPassword = cntAuthenticationPassword; this.setCommunoteManagerAlias(communoteManagerAlias); this.setCommunoteUserAlias(communoteUserAlias); if (communoteManagerId == null || communoteManagerId.length() == 0) { this.setCommunoteManagerId(1L); } else { this.setCommunoteManagerId(Long.parseLong(communoteManagerId)); } this.externalGroupId = externalGroupId; }
@Parameters({"useNativeCodeToSign"}) @BeforeClass @SuppressWarnings("deprecation") public void beforeClass(@Optional Boolean useNativeCodeToSign) throws IOException, NoSuchAlgorithmException { if (useNativeCodeToSign == null) { this.useNativeCodeToSign = true; } else { this.useNativeCodeToSign = useNativeCodeToSign; } this.signer = new ThreadLocalSigner(this.useNativeCodeToSign); // Removes any existing instances - so that we can reset state this.signer.remove(); this.testKeyPair = SignerTestUtil.testKeyPair("rsa_2048"); this.testKeyFingerprint = SignerTestUtil.testKeyMd5Fingerprint("rsa_2048"); credentials = new UsernamePasswordCredentials("username", testKeyFingerprint); this.authScheme = new HttpSignatureAuthScheme(testKeyPair, this.useNativeCodeToSign); this.interceptor = new HttpSignatureRequestInterceptor(authScheme, credentials, this.useNativeCodeToSign); }
@Test(groups = "perfHash") @Parameters({"readRatio", "threadMin", "threadMax", "threadIncrement", "hashTableSize", "hashTableImpl"}) public static void benchmark(String readRatio, String threadMin, String threadMax, String threadIncrement, String hashTableSize, @Optional("0") String hashTableImpl) throws Exception { String[] args = { readRatio, threadMin, threadMax, threadIncrement, hashTableSize, hashTableImpl }; main(args); }
/** * Sets the before test configuration for the test. * * @param url * the url * @param browserString * the browser string * @param context * the context * @throws Exception * the exception */ @BeforeTest @Parameters({"selenium.url", "selenium.browser" }) public final void setUp( @Optional("http://www.google.com") final String url, @Optional final String browserString, final ITestContext context) throws Exception { Logger log = getLog(); try { String browserStr = super.getBrowserString(); if (browserStr == null || browserStr.isEmpty()) { super.setBrowserString(runtimeBrowserString()); super.setUp(url, getBrowserString()); } else { super.setUp(url, getBrowserString()); } log.info("Execution Browser : " + browserStr); } catch (Exception e) { log.error("Exception occured while setting up the test ", e); } super.setCaptureScreenShotOnFailure(true); cleanDriverServerSessions(); }
@BeforeClass @Parameters(Env.ICE_CONFIG_LOCATION) public void setUpBlitzClient(@Optional String iceConfigLocation) throws ServerError, CannotCreateSessionException, PermissionDeniedException { String iceConfig = System.getenv(Env.ICE_CONFIG); if (!empty(iceConfig) && !empty(iceConfig.trim())) { log.debug("Loading Ice configuration from 'ICE_CONFIG' at {}", iceConfig); client = new omero.client(); } else if (!empty(iceConfigLocation) && !empty(iceConfigLocation.trim())){ log.debug("Loading Ice configuration from 'ice.config.location' at {}", iceConfigLocation); client = new omero.client(new File(iceConfigLocation)); } else { fail("Run integration tests with ICE_CONFIG or ice.config.location environment variables"); } session = client.createSession(); session.detachOnDestroy(); log.debug("Got session {} from client {} - secure: {}", session, client, client.isSecure()); setUpAfterIceConnection(session); }
@RunAsClient @Parameters({ "gluuConfigurationPath", "webTarget" }) @Consumes(MediaType.APPLICATION_JSON) @Test public void getConfigurationTest(String gluuConfigurationPath, @Optional @ArquillianResteasyResource("") final WebTarget webTarget) throws Exception { Response response = webTarget.path(gluuConfigurationPath).request().get(); String entity = response.readEntity(String.class); BaseTest.showResponse("UMA : TConfiguration.configuration", response, entity); assertEquals(response.getStatus(), 200, "Unexpected response code."); try { GluuConfiguration appConfiguration = ServerUtil.createJsonMapper().readValue(entity, GluuConfiguration.class); System.err.println(appConfiguration.getIdGenerationEndpoint()); assertNotNull(appConfiguration, "Meta data configuration is null"); assertNotNull(appConfiguration.getIdGenerationEndpoint()); assertNotNull(appConfiguration.getIntrospectionEndpoint()); assertNotNull(appConfiguration.getAuthLevelMapping()); assertNotNull(appConfiguration.getScopeToClaimsMapping()); } catch (IOException e) { e.printStackTrace(); fail(); } }
@Test @Parameters({"sql2", "n"}) public void testSimpleStarQuery(String sql, @Optional Integer n) throws Exception { List<Object> params = new LinkedList<Object>(); RiverMouth output = new MockRiverMouth() { @Override public void index(IndexableObject object, boolean create) throws IOException { logger.debug("object={}", object); } }; PreparedStatement statement = source.prepareQuery(sql); source.bind(statement, params); ResultSet results = source.executeQuery(statement); KeyValueStreamListener listener = new StringKeyValueStreamListener() .output(output); long rows = 0L; source.beforeRows(results, listener); while (source.nextRow(results, listener)) { rows++; } source.afterRows(results, listener); assertEquals(rows, n == null ? 5 : n); source.close(results); source.close(statement); }
@Test @Parameters("waitOn") public void testStackAndClusterStart(@Optional(NOWAIT) Boolean waitOn) throws Exception { // GIVEN IntegrationTestContext itContext = getItContext(); String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID); Integer stackIntId = Integer.valueOf(stackId); String ambariUser = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_USER_ID); String ambariPassword = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_PASSWORD_ID); String ambariPort = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_PORT_ID); // WHEN UpdateStackJson updateStackJson = new UpdateStackJson(); updateStackJson.setStatus(StatusRequest.valueOf(STARTED)); CloudbreakUtil.checkResponse("StartStack", getCloudbreakClient().stackV1Endpoint().put(Long.valueOf(stackIntId), updateStackJson)); if (Boolean.TRUE.equals(waitOn)) { CloudbreakUtil.waitAndCheckStackStatus(getCloudbreakClient(), stackId, "AVAILABLE"); } UpdateClusterJson updateClusterJson = new UpdateClusterJson(); updateClusterJson.setStatus(StatusRequest.valueOf(STARTED)); CloudbreakUtil.checkResponse("StartCluster", getCloudbreakClient().clusterEndpoint().put(Long.valueOf(stackIntId), updateClusterJson)); CloudbreakUtil.waitAndCheckClusterStatus(getCloudbreakClient(), stackId, "AVAILABLE"); // THEN CloudbreakUtil.checkClusterAvailability(getCloudbreakClient().stackV1Endpoint(), ambariPort, stackId, ambariUser, ambariPassword, true); }
@Test @Parameters("waitOn") public void testClusterAndStackStop(@Optional(NOWAIT) Boolean waitOn) throws Exception { // GIVEN IntegrationTestContext itContext = getItContext(); String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID); Integer stackIntId = Integer.valueOf(stackId); String ambariUser = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_USER_ID); String ambariPassword = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_PASSWORD_ID); String ambariPort = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_PORT_ID); // WHEN UpdateClusterJson updateClusterJson = new UpdateClusterJson(); updateClusterJson.setStatus(StatusRequest.valueOf(STOPPED)); CloudbreakUtil.checkResponse("StopCluster", getCloudbreakClient().clusterEndpoint().put(Long.valueOf(stackIntId), updateClusterJson)); if (Boolean.TRUE.equals(waitOn)) { CloudbreakUtil.waitAndCheckClusterStatus(getCloudbreakClient(), stackId, STOPPED); } UpdateStackJson updateStackJson = new UpdateStackJson(); updateStackJson.setStatus(StatusRequest.valueOf(STOPPED)); CloudbreakUtil.checkResponse("StopStack", getCloudbreakClient().stackV1Endpoint().put(Long.valueOf(stackIntId), updateStackJson)); CloudbreakUtil.waitAndCheckStackStatus(getCloudbreakClient(), stackId, STOPPED); // THEN CloudbreakUtil.checkClusterStopped(getCloudbreakClient().stackV1Endpoint(), ambariPort, stackId, ambariUser, ambariPassword); }
@Test @Parameters({ "instanceGroup", "scalingAdjustment" }) public void testStackScaling(@Optional("slave_1") String instanceGroup, int scalingAdjustment) throws Exception { // GIVEN IntegrationTestContext itContext = getItContext(); String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID); int stackIntId = Integer.parseInt(stackId); StackV1Endpoint stackV1Endpoint = itContext.getContextParam(CloudbreakITContextConstants.CLOUDBREAK_CLIENT, CloudbreakClient.class).stackV1Endpoint(); int expectedNodeCount = ScalingUtil.getNodeCountStack(stackV1Endpoint, stackId) + scalingAdjustment; // WHEN UpdateStackJson updateStackJson = new UpdateStackJson(); updateStackJson.setWithClusterEvent(false); InstanceGroupAdjustmentJson instanceGroupAdjustmentJson = new InstanceGroupAdjustmentJson(); instanceGroupAdjustmentJson.setInstanceGroup(instanceGroup); instanceGroupAdjustmentJson.setScalingAdjustment(scalingAdjustment); updateStackJson.setInstanceGroupAdjustment(instanceGroupAdjustmentJson); CloudbreakUtil.checkResponse("ScalingStack", getCloudbreakClient().stackV1Endpoint().put((long) stackIntId, updateStackJson)); CloudbreakUtil.waitAndCheckStackStatus(getCloudbreakClient(), stackId, "AVAILABLE"); // THEN ScalingUtil.checkStackScaled(stackV1Endpoint, stackId, expectedNodeCount); StackResponse stackResponse = stackV1Endpoint.get(Long.valueOf(stackId), new HashSet<>()); itContext.putContextParam(CloudbreakITContextConstants.INSTANCE_COUNT, ScalingUtil.getNodeCountByHostgroup(stackResponse)); }
@Test @Parameters({ "instanceGroup", "scalingAdjustment" }) public void testClusterScaling(@Optional("slave_1") String instanceGroup, int scalingAdjustment) throws Exception { // GIVEN IntegrationTestContext itContext = getItContext(); String stackId = itContext.getContextParam(CloudbreakITContextConstants.STACK_ID); int stackIntId = Integer.parseInt(stackId); StackV1Endpoint stackV1Endpoint = itContext.getContextParam(CloudbreakITContextConstants.CLOUDBREAK_CLIENT, CloudbreakClient.class).stackV1Endpoint(); String ambariUser = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_USER_ID); String ambariPassword = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_PASSWORD_ID); String ambariPort = itContext.getContextParam(CloudbreakITContextConstants.AMBARI_PORT_ID); int expectedNodeCount = ScalingUtil.getNodeCountAmbari(stackV1Endpoint, ambariPort, stackId, ambariUser, ambariPassword, itContext) + scalingAdjustment; // WHEN UpdateClusterJson updateClusterJson = new UpdateClusterJson(); HostGroupAdjustmentJson hostGroupAdjustmentJson = new HostGroupAdjustmentJson(); hostGroupAdjustmentJson.setHostGroup(instanceGroup); hostGroupAdjustmentJson.setWithStackUpdate(false); hostGroupAdjustmentJson.setScalingAdjustment(scalingAdjustment); updateClusterJson.setHostGroupAdjustment(hostGroupAdjustmentJson); CloudbreakUtil.checkResponse("ScalingCluster", getCloudbreakClient().clusterEndpoint().put((long) stackIntId, updateClusterJson)); CloudbreakUtil.waitAndCheckClusterStatus(getCloudbreakClient(), stackId, "AVAILABLE"); // THEN ScalingUtil.checkClusterScaled(stackV1Endpoint, ambariPort, stackId, ambariUser, ambariPassword, expectedNodeCount, itContext); }
@BeforeClass @Parameters({"stackName", "mockPort", "sshPort"}) public void configMockServer(String stackName, @Optional("9443") int mockPort, @Optional("2020") int sshPort) { IntegrationTestContext itContext = getItContext(); List<InstanceGroup> instanceGroups = itContext.getContextParam(CloudbreakITContextConstants.TEMPLATE_ID, List.class); int numberOfServers = 0; for (InstanceGroup ig : instanceGroups) { numberOfServers += ig.getNodeCount(); } StackCreationMock stackCreationMock = (StackCreationMock) applicationContext.getBean( StackCreationMock.NAME, mockPort, sshPort, numberOfServers); stackCreationMock.addSPIEndpoints(); stackCreationMock.mockImageCatalogResponse(itContext); itContext.putContextParam(CloudbreakV2Constants.MOCK_SERVER, stackCreationMock); itContext.putContextParam(CloudbreakITContextConstants.MOCK_INSTANCE_MAP, stackCreationMock.getInstanceMap()); }
@Test @Parameters({ "networkName", "subnetCIDR" }) public void testGcpTemplateCreation(@Optional("it-mock-network") String networkName, @Optional("10.0.36.0/24") String subnetCIDR) throws Exception { // GIVEN // WHEN NetworkRequest networkRequest = new NetworkRequest(); networkRequest.setDescription("Mock network for integration testing"); networkRequest.setName(networkName); networkRequest.setSubnetCIDR(subnetCIDR); networkRequest.setCloudPlatform("MOCK"); String id = getCloudbreakClient().networkEndpoint().postPrivate(networkRequest).getId().toString(); // THEN Assert.assertNotNull(id); getItContext().putContextParam(CloudbreakITContextConstants.NETWORK_ID, id, true); }
@Test @Parameters({ "mockName", "mockInstanceType", "volumeType", "volumeCount", "volumeSize" }) public void testGcpTemplateCreation(@Optional("it-mock-template") String templateName, @Optional("small") String mockInstanceType, @Optional("magnetic") String volumeType, @Optional("1") String volumeCount, @Optional("30") String volumeSize) throws Exception { // GIVEN // WHEN TemplateRequest templateRequest = new TemplateRequest(); templateRequest.setName(templateName); templateRequest.setDescription("MOCK template for integration testing"); templateRequest.setInstanceType(mockInstanceType); templateRequest.setVolumeCount(Integer.valueOf(volumeCount)); templateRequest.setVolumeSize(Integer.valueOf(volumeSize)); templateRequest.setVolumeType(volumeType); templateRequest.setCloudPlatform("MOCK"); String id = getCloudbreakClient().templateEndpoint().postPrivate(templateRequest).getId().toString(); // THEN Assert.assertNotNull(id); additionHelper.handleTemplateAdditions(getItContext(), id, additions); }
@Test @Parameters({ "credentialName" }) public void testMockCredentialCreation(@Optional("") String credentialName) throws Exception { // GIVEN credentialName = StringUtils.hasLength(credentialName) ? credentialName : defaultName; credentialName = credentialName + UUID.randomUUID(); CredentialRequest credentialRequest = new CredentialRequest(); credentialRequest.setName(credentialName); credentialRequest.setDescription("Mock Rm credential for integrationtest"); credentialRequest.setCloudPlatform("MOCK"); // WHEN String id = getCloudbreakClient().credentialEndpoint().postPrivate(credentialRequest).getId().toString(); // THEN Assert.assertNotNull(id); getItContext().putContextParam(CloudbreakITContextConstants.CREDENTIAL_ID, id, true); getItContext().putContextParam(CloudbreakV2Constants.CREDENTIAL_NAME, credentialName); }
@BeforeClass @Parameters({"stackName", "mockPort", "sshPort"}) public void configMockServer(String stackName, @Optional("9443") int mockPort, @Optional("2020") int sshPort) { IntegrationTestContext itContext = getItContext(); Map<String, InstanceGroupV2Request> instanceGroupV2RequestMap = itContext.getContextParam(CloudbreakV2Constants.INSTANCEGROUP_MAP, Map.class); int numberOfServers = 0; for (InstanceGroupV2Request igr : instanceGroupV2RequestMap.values()) { numberOfServers += igr.getNodeCount(); } StackCreationMock stackCreationMock = (StackCreationMock) applicationContext.getBean( StackCreationMock.NAME, mockPort, sshPort, numberOfServers); stackCreationMock.addSPIEndpoints(); stackCreationMock.mockImageCatalogResponse(itContext); stackCreationMock.addSaltMappings(); stackCreationMock.addAmbariMappings(stackName); itContext.putContextParam(CloudbreakV2Constants.MOCK_SERVER, stackCreationMock); itContext.putContextParam(CloudbreakITContextConstants.MOCK_INSTANCE_MAP, stackCreationMock.getInstanceMap()); }
@BeforeClass @Parameters({"mockPort", "sshPort", "desiredCount", "hostGroup"}) public void configMockServer(@Optional("9443") int mockPort, @Optional("2020") int sshPort, int desiredCount, String hostGroup) { IntegrationTestContext itContext = getItContext(); String clusterName = itContext.getContextParam(CloudbreakV2Constants.STACK_NAME); StackResponse response = getCloudbreakClient().stackV2Endpoint().getPrivate(clusterName, null); java.util.Optional<InstanceGroupResponse> igg = response.getInstanceGroups().stream().filter(ig -> ig.getGroup().equals(hostGroup)).findFirst(); Map<String, CloudVmInstanceStatus> instanceMap = itContext.getContextParam(CloudbreakITContextConstants.MOCK_INSTANCE_MAP, Map.class); ScalingMock scalingMock = (ScalingMock) applicationContext.getBean(ScalingMock.NAME, mockPort, sshPort, instanceMap); scalingMock.addSPIEndpoints(); scalingMock.addMockEndpoints(); scalingMock.addAmbariMappings(clusterName); itContext.putContextParam(CloudbreakV2Constants.MOCK_SERVER, scalingMock); igg.ifPresent(ig -> { int scalingAdjustment = desiredCount - ig.getNodeCount(); if (scalingAdjustment > 0) { scalingMock.addInstance(scalingAdjustment); } }); }
@Test @Parameters({"credentialName", "region", "availabilityZone", "selectedKeyName"}) public void testSshKeySelection(@Optional("") String credentialName, @Optional("") String region, @Optional("") String availabilityZone, String selectedKeyName) { // GIVEN IntegrationTestContext itContext = getItContext(); credentialName = StringUtils.hasText(credentialName) ? credentialName : itContext.getContextParam(CloudbreakV2Constants.CREDENTIAL_NAME); region = StringUtils.hasText(region) ? region : itContext.getContextParam(CloudbreakV2Constants.REGION); availabilityZone = StringUtils.hasText(availabilityZone) ? availabilityZone : itContext.getContextParam(CloudbreakV2Constants.AVAILABILTYZONE); PlatformResourceRequestJson resourceRequestJson = new PlatformResourceRequestJson(); resourceRequestJson.setCredentialName(credentialName); resourceRequestJson.setRegion(region); resourceRequestJson.setAvailabilityZone(availabilityZone); // WHEN PlatformSshKeysResponse response = getCloudbreakClient().connectorV1Endpoint().getCloudSshKeys(resourceRequestJson); // THEN Set<PlatformSshKeyResponse> regionKeys = response.getSshKeys().get(region); Assert.assertNotNull(regionKeys, "keys cannot be null for " + region); java.util.Optional<PlatformSshKeyResponse> selected = regionKeys.stream().filter(rk -> rk.getName().equals(selectedKeyName)).findFirst(); Assert.assertTrue(selected.isPresent(), "the sshkey list doesn't contain [" + selectedKeyName + "]"); getItContext().putContextParam(CloudbreakV2Constants.SSH_PUBLICKEY_ID, selected.get().getName()); }
@BeforeMethod(groups = "igRequestCreation") @Parameters({"group", "nodeCount", "groupType", "recoveryMode"}) public void createInstanceGroupRequest(String group, int nodeCount, String groupType, @Optional("MANUAL") String recoveryMode) { InstanceGroupV2Request instanceGroupV2Request = new InstanceGroupV2Request(); instanceGroupV2Request.setGroup(group); instanceGroupV2Request.setNodeCount(nodeCount); instanceGroupV2Request.setType(InstanceGroupType.valueOf(groupType)); IntegrationTestContext itContext = getItContext(); Map<String, InstanceGroupV2Request> igMap; synchronized (itContext) { igMap = itContext.getContextParam(CloudbreakV2Constants.INSTANCEGROUP_MAP, Map.class); if (igMap == null) { igMap = Maps.newConcurrentMap(); itContext.putContextParam(CloudbreakV2Constants.INSTANCEGROUP_MAP, igMap); } } igMap.put(group, instanceGroupV2Request); }
@Test @Parameters({"credentialName", "region", "availabilityZone", "poolName"}) public void testIpPoolSelection(@Optional("") String credentialName, @Optional("") String region, @Optional("") String availabilityZone, String poolName) { // GIVEN IntegrationTestContext itContext = getItContext(); credentialName = StringUtils.hasText(credentialName) ? credentialName : itContext.getContextParam(CloudbreakV2Constants.CREDENTIAL_NAME); region = StringUtils.hasText(region) ? region : itContext.getContextParam(CloudbreakV2Constants.REGION); availabilityZone = StringUtils.hasText(availabilityZone) ? availabilityZone : itContext.getContextParam(CloudbreakV2Constants.AVAILABILTYZONE); PlatformResourceRequestJson resourceRequestJson = new PlatformResourceRequestJson(); resourceRequestJson.setCredentialName(credentialName); resourceRequestJson.setRegion(region); resourceRequestJson.setAvailabilityZone(availabilityZone); // WHEN PlatformIpPoolsResponse response = getCloudbreakClient().connectorV1Endpoint().getIpPoolsCredentialId(resourceRequestJson); // THEN Set<IpPoolJson> ipPools = response.getIppools().get(availabilityZone); Assert.assertNotNull(ipPools, "ippools cannot be null for " + region); java.util.Optional<IpPoolJson> selected = ipPools.stream().filter(rk -> rk.getName().equals(poolName)).findFirst(); Assert.assertTrue(selected.isPresent(), "the ippool list doesn't contain [" + poolName + "]"); getItContext().putContextParam(CloudbreakV2Constants.OPENSTACK_FLOATING_POOL, selected.get().getId()); }
@Test @Parameters({ "name", "ports", "provider" }) public void testSecurityGroupCreation(@Optional("it-restricted-ambari") String name, @Optional("22,443,9443,8080") String ports, @Optional("MOCK") String provider) throws Exception { // GIVEN // WHEN SecurityGroupRequest securityGroupRequest = new SecurityGroupRequest(); securityGroupRequest.setDescription("Security group created by IT"); securityGroupRequest.setName(name); SecurityRuleRequest securityRuleRequest = new SecurityRuleRequest(""); securityRuleRequest.setProtocol("tcp"); securityRuleRequest.setSubnet("0.0.0.0/0"); securityRuleRequest.setPorts(ports); securityGroupRequest.setSecurityRules(Collections.singletonList(securityRuleRequest)); securityGroupRequest.setCloudPlatform(provider); String id = getCloudbreakClient().securityGroupEndpoint().postPrivate(securityGroupRequest).getId().toString(); // THEN Assert.assertNotNull(id); getItContext().putContextParam(CloudbreakITContextConstants.SECURITY_GROUP_ID, id, true); }
@Test @Parameters({ "credentialName", "projectId", "serviceAccountId", "serviceAccountPrivateKeyP12File" }) public void testGCPCredentialCreation(@Optional("")String credentialName, @Optional("")String projectId, @Optional("")String serviceAccountId, @Optional("")String serviceAccountPrivateKeyP12File) throws Exception { // GIVEN credentialName = StringUtils.hasLength(credentialName) ? credentialName : defaultName; projectId = StringUtils.hasLength(projectId) ? projectId : defaultProjectId; serviceAccountId = StringUtils.hasLength(serviceAccountId) ? serviceAccountId : defaultServiceAccountId; serviceAccountPrivateKeyP12File = StringUtils.hasLength(serviceAccountPrivateKeyP12File) ? serviceAccountPrivateKeyP12File : defaultP12File; String serviceAccountPrivateKey = ResourceUtil.readBase64EncodedContentFromResource(applicationContext, serviceAccountPrivateKeyP12File); CredentialRequest credentialRequest = new CredentialRequest(); credentialRequest.setCloudPlatform("GCP"); credentialRequest.setDescription("GCP credential for integartiontest"); credentialRequest.setName(credentialName); Map<String, Object> map = new HashMap<>(); map.put("projectId", projectId); map.put("serviceAccountId", serviceAccountId); map.put("serviceAccountPrivateKey", serviceAccountPrivateKey); credentialRequest.setParameters(map); // WHEN String id = getCloudbreakClient().credentialEndpoint().postPrivate(credentialRequest).getId().toString(); // THEN Assert.assertNotNull(id); getItContext().putContextParam(CloudbreakITContextConstants.CREDENTIAL_ID, id, true); }
@Test @Parameters({ "azureTemplateName", "azureVmType", "azureVolumeCount", "azureVolumeSize" }) public void testAzureTemplateCreation(@Optional("it-azure-template") String azureTemplateName, @Optional("MEDIUM") String azureVmType, @Optional("1") String azureVolumeCount, @Optional("10") String azureVolumeSize) throws Exception { // GIVEN // WHEN TemplateRequest templateRequest = new TemplateRequest(); templateRequest.setName(azureTemplateName); templateRequest.setDescription("AZURE_RM template for integration testing"); templateRequest.setCloudPlatform("AZURE_RM"); templateRequest.setInstanceType(azureVmType); templateRequest.setVolumeType("Standard_LRS"); templateRequest.setVolumeCount(Integer.valueOf(azureVolumeCount)); templateRequest.setVolumeSize(Integer.valueOf(azureVolumeSize)); String id = getCloudbreakClient().templateEndpoint().postPrivate(templateRequest).getId().toString(); // THEN Assert.assertNotNull(id); templateAdditionHelper.handleTemplateAdditions(getItContext(), id, additions); }
@Test @Parameters({ "blueprintName", "blueprintFile" }) public void testBlueprintCreation(@Optional("it-hdp-multi-blueprint") String blueprintName, @Optional("classpath:/blueprint/hdp-multinode-default.bp") String blueprintFile) throws Exception { // GIVEN String blueprintContent = ResourceUtil.readStringFromResource(applicationContext, blueprintFile); // WHEN BlueprintRequest blueprintRequest = new BlueprintRequest(); blueprintRequest.setName(blueprintName); blueprintRequest.setDescription("Blueprint for integration testing"); blueprintRequest.setAmbariBlueprint(blueprintContent); String id = getCloudbreakClient().blueprintEndpoint().postPrivate(blueprintRequest).getId().toString(); // THEN Assert.assertNotNull(id); getItContext().putContextParam(CloudbreakITContextConstants.BLUEPRINT_ID, id, true); }
@Test @Parameters({ "azureTemplateName", "azureVmType", "azureVolumeCount", "azureVolumeSize" }) public void testAzureTemplateCreation(@Optional("it-azure-template") String azureTemplateName, @Optional("MEDIUM") String azureVmType, @Optional("1") String azureVolumeCount, @Optional("10") String azureVolumeSize) throws Exception { // GIVEN // WHEN // TODO publicInAccount TemplateRequest templateRequest = new TemplateRequest(); templateRequest.setName(azureTemplateName); templateRequest.setDescription("AZURE template for integration testing"); templateRequest.setCloudPlatform("AZURE"); templateRequest.setInstanceType(azureVmType); templateRequest.setVolumeType("Standard_LRS"); templateRequest.setVolumeCount(Integer.valueOf(azureVolumeCount)); templateRequest.setVolumeSize(Integer.valueOf(azureVolumeSize)); String id = getCloudbreakClient().templateEndpoint().postPrivate(templateRequest).getId().toString(); // THEN Assert.assertNotNull(id); templateAdditionHelper.handleTemplateAdditions(getItContext(), id, additions); }