@Override public void removeAccessTokenUsingRefreshToken(OAuth2RefreshToken refreshToken) { String tokenValue = refreshToken.getValue(); // Lookup RefreshTokenToAccessToken table for locating access token RefreshTokenToAccessToken refreshTokenToAccessToken = refreshTokenToAccessTokenRepository.findOne(tokenValue); if (refreshTokenToAccessToken != null) { String accessTokenKey = refreshTokenToAccessToken.getAccessTokenKey(); AccessToken accessToken = accessTokenRepository.findOne(accessTokenKey); String jsonOAuth2AccessToken = accessToken.getoAuth2AccessToken(); OAuth2AccessToken oAuth2AccessToken = OAuthUtil.deserializeOAuth2AccessToken(jsonOAuth2AccessToken); // Delete access token from all related tables List<RegularStatement> statementList = prepareRemoveAccessTokenStatements(oAuth2AccessToken); // Delete from RefreshTokenToAccessToken table Delete refreshTokenToAccessTokenDelete = CassandraTemplate.createDeleteQuery(RefreshTokenToAccessToken.TABLE, refreshTokenToAccessToken, null, cassandraTemplate.getConverter()); statementList.add(refreshTokenToAccessTokenDelete); Batch batch = QueryBuilder.batch(statementList.toArray(new RegularStatement[statementList.size()])); cassandraTemplate.execute(batch); } }
/** * Get the name of a statement. * * @param arg0 The statement. * @return The name used for logging. */ public static String getStatementName( Statement arg0 ) { String returnValue = "unknown"; if ( arg0 instanceof RegularStatement ) { returnValue = ( (RegularStatement) arg0 ).getQueryString(); } else if ( arg0 instanceof BoundStatement ) { PreparedStatement preparedStatement = ( (BoundStatement) arg0 ).preparedStatement(); returnValue = preparedStatement.getQueryString(); } else if ( arg0 instanceof BatchStatement ) { StringBuilder value = new StringBuilder( "Batch : " ); Collection<Statement> statements = ( (BatchStatement) arg0 ).getStatements(); boolean first = true; for ( Statement statement : statements ) { if ( first ) { first = false; } else { value.append( ", " ); } String statementName = getStatementName( statement ); value.append( statementName ); } returnValue = value.toString(); } return returnValue; }
/** * Execute CQL as PreparedStatement */ private ResultSet executePreparedStatement(Session session, Object messageCql, Object[] cqlParams) { ResultSet resultSet; PreparedStatement lPreparedStatement; if (messageCql == null) { // URI CQL lPreparedStatement = this.preparedStatement; } else if (messageCql instanceof String) { // Message CQL lPreparedStatement = getEndpoint().prepareStatement((String) messageCql); } else if (messageCql instanceof RegularStatement) { // Message Statement lPreparedStatement = getEndpoint().getSession().prepare((RegularStatement) messageCql); } else { throw new IllegalArgumentException("Invalid " + CassandraConstants.CQL_QUERY + " header"); } if (isEmpty(cqlParams)) { resultSet = session.execute(lPreparedStatement.bind()); } else { resultSet = session.execute(lPreparedStatement.bind(cqlParams)); } return resultSet; }
private void runBatchInsert(List<Insert> insertRequest) { try { Batch batch; if (config.getLoggedBatch()) { batch = QueryBuilder.batch(insertRequest .toArray(new RegularStatement[insertRequest.size()])); } else { batch = QueryBuilder.unloggedBatch(insertRequest .toArray(new RegularStatement[insertRequest.size()])); } totalCassandraInsertRequest.addAndGet(insertRequest.size()); ResultSetFuture future = cassandraSession.executeAsync(batch); CallBackListener listener = new CallBackListener(future, null); future.addListener(listener, pool); incrementBatchInsertCounter(); pendingRequestCounter.incrementAndGet(); } catch (Throwable ex) { LOGGER.error("Error publising metrics in MetricCassandraCollector:" + ex.getMessage()); cassandraErrorCount.increment(); registerError(ex); } finally { insertRequest.clear(); } }
private void runBatchUpdate(List<Update> updateRequest) { try { Batch batch; if (config.getLoggedBatch()) { batch = QueryBuilder.batch(updateRequest .toArray(new RegularStatement[updateRequest.size()])); } else { batch = QueryBuilder.unloggedBatch(updateRequest .toArray(new RegularStatement[updateRequest.size()])); } totalCassandraUpdateRequest.addAndGet(updateRequest.size()); ResultSetFuture future = cassandraSession.executeAsync(batch); CallBackListener listener = new CallBackListener(future, null); future.addListener(listener, pool); incrementBatchUpdateCounter(); pendingRequestCounter.incrementAndGet(); } catch (Throwable ex) { LOGGER.error("Error publising metrics in MetricCassandraCollector:" + ex.getMessage()); cassandraErrorCount.increment(); registerError(ex); } finally { updateRequest.clear(); } }
@Override public void delete(final Context context, final Resource resource) { final Timer.Context ctx = m_deleteTimer.time(); final ConsistencyLevel writeConsistency = m_contextConfigurations.getWriteConsistency(context); final List<RegularStatement> statements = Lists.newArrayList(); definitelyUnindexResource(statements, context, resource, writeConsistency); definitelyUnindexResourceAttributes(statements, context, resource, writeConsistency); definitelyRemoveMetricName(statements, context, resource, writeConsistency); try { if (!statements.isEmpty()) { m_session.execute(batch(statements.toArray(new RegularStatement[statements.size()]))); } m_cache.delete(context, resource); } finally { ctx.stop(); } }
private String retrieveSql(Object args0) { String sql; if (args0 instanceof BoundStatement) { sql = ((BoundStatement) args0).preparedStatement().getQueryString(); } else if (args0 instanceof RegularStatement) { sql = ((RegularStatement) args0).getQueryString(); } else if (args0 instanceof StatementWrapper) { // method to get wrapped statement is package-private, skip. sql = null; } else if (args0 instanceof BatchStatement) { // we could unroll all the batched statements and append ; between them if need be but it could be too long. sql = null; } else if (args0 instanceof String) { sql = (String) args0; } else { sql = null; } return sql; }
private Collection<RegularStatement> diffSet(String table, String column, Clause whereClause, Set<?> past, Set<?> present) { List<RegularStatement> queries = Lists.newArrayList(); Set<?> removes = Sets.newHashSet(past); removes.removeAll(present); if (!removes.isEmpty()) { queries.add(QueryBuilder.update(table).with(removeAll(column, removes)).where(whereClause)); } Set<?> adds = Sets.newHashSet(present); adds.removeAll(past); if (!adds.isEmpty()) { queries.add(QueryBuilder.update(table).with(addAll(column, adds)).where(whereClause)); } return queries; }
@Override public void storeRefreshToken(OAuth2RefreshToken refreshToken, OAuth2Authentication authentication) { List<RegularStatement> statementList = new ArrayList<RegularStatement>(); byte[] serializedRefreshToken = SerializationUtils.serialize(refreshToken); ByteBuffer bufferedRefreshToken = ByteBuffer.wrap(serializedRefreshToken); byte[] serializedAuthentication = SerializationUtils.serialize(authentication); ByteBuffer bufferedAuthentication = ByteBuffer.wrap(serializedAuthentication); WriteOptions refreshWriteOptions = new WriteOptions(); if (refreshToken instanceof ExpiringOAuth2RefreshToken) { ExpiringOAuth2RefreshToken expiringRefreshToken = (ExpiringOAuth2RefreshToken) refreshToken; Date expiration = expiringRefreshToken.getExpiration(); if (expiration != null) { int seconds = Long.valueOf((expiration.getTime() - System.currentTimeMillis()) / 1000L).intValue(); refreshWriteOptions.setTtl(seconds); } } // Insert into RefreshToken table Insert accessInsert = CassandraTemplate.createInsertQuery(RefreshToken.TABLE, new RefreshToken(refreshToken.getValue(), bufferedRefreshToken), refreshWriteOptions, cassandraTemplate.getConverter()); statementList.add(accessInsert); // Insert into RefreshTokenAuthentication table Insert authInsert = CassandraTemplate.createInsertQuery(RefreshTokenAuthentication.TABLE, new RefreshTokenAuthentication(refreshToken.getValue(), bufferedAuthentication), refreshWriteOptions, cassandraTemplate.getConverter()); statementList.add(authInsert); Batch batch = QueryBuilder.batch(statementList.toArray(new RegularStatement[statementList.size()])); cassandraTemplate.execute(batch); }
@Override public void removeRefreshToken(OAuth2RefreshToken token) { String tokenValue = token.getValue(); List<RegularStatement> statementList = new ArrayList<RegularStatement>(); // Delete from RefreshToken table statementList.add(prepareDeleteByPrimaryKeyRegularStatement(RefreshToken.class, tokenValue)); // Delete from RefreshTokenAuthentication table statementList.add(prepareDeleteByPrimaryKeyRegularStatement(RefreshTokenAuthentication.class, tokenValue)); // Delete from RefreshTokenToAccessToken table statementList.add(prepareDeleteByPrimaryKeyRegularStatement(RefreshTokenToAccessToken.class, tokenValue)); Batch batch = QueryBuilder.batch(statementList.toArray(new RegularStatement[statementList.size()])); cassandraTemplate.execute(batch); }
private RegularStatement prepareDeleteByPrimaryKeyRegularStatement(Class<?> repositoryClass, String primaryKeyValue) { RegularStatement deleteRegularStatement; try { deleteRegularStatement = QueryBuilder.delete().from(repositoryClass.getDeclaredField("TABLE").get(null).toString()).where(QueryBuilder.eq(cassandraMappingContext.getPersistentEntity(repositoryClass).getIdProperty().getColumnName().toCql(), primaryKeyValue)); } catch (IllegalArgumentException | IllegalAccessException | NoSuchFieldException | SecurityException e) { logger.error("Error preparing delete statement for repository {}.", repositoryClass.getSimpleName()); throw new RuntimeException(e); } return deleteRegularStatement; }
@Test public void testPrepareRegularStatement() { RegularStatement statement = Mockito.mock( RegularStatement.class ); PreparedStatement prepare = session.prepare( statement ); Mockito.verify( mock ).prepare( statement ); assertThat( prepare, CoreMatchers.instanceOf( ProfiledPreparedStatement.class ) ); }
@Test public void testPrepareAsyncRegularStatement() { RegularStatement statement = Mockito.mock( RegularStatement.class ); ListenableFuture<PreparedStatement> prepare = session.prepareAsync( statement ); Mockito.verify( mock ).prepareAsync( statement ); assertThat( prepare, CoreMatchers.instanceOf( ProfiledListenableFutureForPreparedStatement.class ) ); }
protected RegularStatement addStatement(GCEvent event) { return (RegularStatement) QueryBuilder.insertInto(TABLE_NAME).value("id", event.id() != null ? UUID.fromString(event.id()) : uuid()) .value("parent_id", event.parentEvent().isPresent() ? UUID.fromString(event.parentEvent().orElse(null)) : null) .value("analyse_id", UUID.fromString(event.analyseId())) .value("bucket_id", event.bucketId()) .value("date", event.occurred().toString(DATE_PATTERN)) .value("jvm_id", event.jvmId()) .value("description", event.description()) .value("tmstm", event.timestamp()) .value("written_at", UUIDGen.getTimeUUID(event.occurred().getMillis())) .value("occurred", event.occurred().getMillis()) .value("cause", event.cause().type()) .value("properties", event.properties()) .value("vm_event_type", event.vmEventType().type()) .value("capacity", Arrays.asList(event.capacity().usedBefore(), event.capacity().usedAfter(), event.capacity().total())) .value("total_capacity", Arrays.asList(event.totalCapacity().usedBefore(), event.totalCapacity().usedAfter(), event.totalCapacity().total())) .value("pause_mu", event.pauseMu()) .value("user_time", event.user()) .value("sys_time", event.sys()) .value("real_time", event.real()) .value("phase", event.phase().type()) .value("generations", EnumSetUtils.encode(event.generations())) .value("concurrency", event.concurrency().type()) .value("gen_cap_before", processKeyMap(event.capacityByGeneration(), Generation::type, Capacity::usedBefore)) .value("gen_cap_after", processKeyMap(event.capacityByGeneration(), Generation::type, Capacity::usedAfter)) .value("gen_cap_total", processKeyMap(event.capacityByGeneration(), Generation::type, Capacity::total)) .value("ext", event.ext()).setConsistencyLevel(ConsistencyLevel.ONE); }
protected RegularStatement addStatement(ObjectsAges oa) { return QueryBuilder.insertInto(TABLE_NAME) .value("analyse_id", UUID.fromString(oa.analyseId())) .value("occurred", oa.occurred().toDate()) .value("written_at", UUIDGen.getTimeUUID(oa.occurred().getMillis())) .value("jvm_id", oa.jvmId()) .value("desired_sv_size", oa.desiredSurvivorSize()) .value("occupied", oa.occupied()) .value("total", oa.total()) .value("ext", oa.ext()); }
/** * Apply consistency level if provided, else leave default. */ public static <T extends RegularStatement> T applyConsistencyLevel(T statement, ConsistencyLevel consistencyLevel) { if (consistencyLevel != null) { statement.setConsistencyLevel(consistencyLevel); } return statement; }
private void definitelyUnindexResource(List<RegularStatement> statement, Context context, Resource resource, ConsistencyLevel writeConsistencyLevel) { for (String s : m_resourceIdSplitter.splitIdIntoElements(resource.getId())) { RegularStatement delete = QueryBuilder.delete() .from(Constants.Schema.T_TERMS) .where(QueryBuilder.eq(Constants.Schema.C_TERMS_CONTEXT, context.getId())) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_FIELD, Constants.DEFAULT_TERM_FIELD)) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_VALUE, s)) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_RESOURCE, resource.getId())); delete.setConsistencyLevel(writeConsistencyLevel); statement.add(delete); } if (m_options.isHierarchicalIndexingEnabled()) { recursivelyUnindexResourceElements(statement, context, resource.getId(), writeConsistencyLevel); } }
private void definitelyUnindexResourceAttributes(List<RegularStatement> statement, Context context, Resource resource, ConsistencyLevel writeConsistency) { if (!resource.getAttributes().isPresent()) { return; } for (Entry<String, String> field : resource.getAttributes().get().entrySet()) { // Search unindexing RegularStatement delete = QueryBuilder.delete().from(Constants.Schema.T_TERMS) .where(QueryBuilder.eq(Constants.Schema.C_TERMS_CONTEXT, context.getId())) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_FIELD, Constants.DEFAULT_TERM_FIELD)) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_VALUE, field.getValue())) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_RESOURCE, resource.getId())); delete.setConsistencyLevel(writeConsistency); statement.add(delete); delete = QueryBuilder.delete().from(Constants.Schema.T_TERMS) .where(QueryBuilder.eq(Constants.Schema.C_TERMS_CONTEXT, context.getId())) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_FIELD, field.getKey())) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_VALUE, field.getValue())) .and(QueryBuilder.eq(Constants.Schema.C_TERMS_RESOURCE, resource.getId())); delete.setConsistencyLevel(writeConsistency); statement.add(delete); // Storage delete = QueryBuilder.delete().from(Constants.Schema.T_ATTRS) .where(QueryBuilder.eq(Constants.Schema.C_ATTRS_CONTEXT, context.getId())) .and(QueryBuilder.eq(Constants.Schema.C_ATTRS_RESOURCE, resource.getId())) .and(QueryBuilder.eq(Constants.Schema.C_ATTRS_ATTR, field.getKey())); delete.setConsistencyLevel(writeConsistency); statement.add(delete); } }
private void definitelyRemoveMetricName(List<RegularStatement> statement, Context context, Resource resource, ConsistencyLevel writeConsistency) { RegularStatement delete = QueryBuilder.delete().from(Constants.Schema.T_METRICS) .where(QueryBuilder.eq(Constants.Schema.C_METRICS_CONTEXT, context.getId())) .and(QueryBuilder.eq(Constants.Schema.C_METRICS_RESOURCE, resource.getId())); delete.setConsistencyLevel(writeConsistency); statement.add(delete); }
@Override public RegularStatement toStatement() { LOG.trace("Inserting metric in context: '{}' with resource id: '{}' with name: '{}'", m_context, m_resourceId, m_metric); return insertInto(Constants.Schema.T_METRICS) .value(Constants.Schema.C_METRICS_CONTEXT, m_context.getId()) .value(Constants.Schema.C_METRICS_RESOURCE, m_resourceId) .value(Constants.Schema.C_METRICS_NAME, m_metric) .using(ttl(m_ttl)); }
@Override public RegularStatement toStatement() { LOG.trace("Inserting attribute in context: '{}' with resource id: '{}' with name: '{}' and value: '{}'", m_context, m_resourceId, m_field, m_value); return insertInto(Constants.Schema.T_ATTRS) .value(Constants.Schema.C_ATTRS_CONTEXT, m_context.getId()) .value(Constants.Schema.C_ATTRS_RESOURCE, m_resourceId) .value(Constants.Schema.C_ATTRS_ATTR, m_field) .value(Constants.Schema.C_ATTRS_VALUE, m_value) .using(ttl(m_ttl)); }
private Batch prepareBatch(RegularStatement... statement) { Batch batch; if (batchType != null && batchType.equals(CassandraBatchType.UNLOGGED)) { batch = QueryBuilder.unloggedBatch(statement); } else { batch = QueryBuilder.batch(statement); } batch.setConsistencyLevel(getWriteConsistencyLevel()); return batch; }
@Override protected void prepareAfterTrace(Object target, Object[] args, Object result, Throwable throwable) { final boolean success = InterceptorUtils.isSuccess(throwable); if (success) { if (target instanceof DatabaseInfoAccessor) { // set databaseInfo to PreparedStatement only when // preparedStatement is generated successfully. DatabaseInfo databaseInfo = ((DatabaseInfoAccessor) target)._$PINPOINT$_getDatabaseInfo(); if (databaseInfo != null) { if (result instanceof DatabaseInfoAccessor) { ((DatabaseInfoAccessor) result)._$PINPOINT$_setDatabaseInfo(databaseInfo); } } } if (result instanceof ParsingResultAccessor) { String sql; if (args[0] instanceof RegularStatement) { sql = ((RegularStatement) args[0]).getQueryString(); } else { // we have string sql = (String) args[0]; } ParsingResult parsingResult = traceContext.parseSql(sql); if (parsingResult != null) { ((ParsingResultAccessor) result)._$PINPOINT$_setParsingResult(parsingResult); } else { if (logger.isErrorEnabled()) { logger.error("sqlParsing fail. parsingResult is null sql:{}", sql); } } } } }
private Collection<RegularStatement> diffCollection(String table, String column, Clause whereClause, Object past, Object present) { if (past instanceof Set<?>) { return diffSet(table, column, whereClause, (Set<?>) past, (Set<?>) present); } else if (past instanceof Map<?, ?>) { return diffMap(table, column, whereClause, (Map<?, ?>) past, (Map<?, ?>) present); } else { throw new RuntimeException("unknown collection type!"); } }
private Collection<RegularStatement> diffMap(String table, String column, Clause whereClause, Map<?, ?> past, Map<?, ?> present) { List<RegularStatement> queries = Lists.newArrayList(); Set<?> removed = Sets.newHashSet(past.keySet()); removed.removeAll(present.keySet()); if (!removed.isEmpty()) { Delete.Selection delete = QueryBuilder.delete(); for (Object o : removed) { delete.mapElt(column, o); } queries.add(delete.from(table).where(whereClause)); } Set<Entry<?, ?>> changed = Sets.<Entry<?, ?>> newHashSet(present.entrySet()); changed.removeAll(past.entrySet()); if (!changed.isEmpty()) { Update update = QueryBuilder.update(table); for (Entry<?, ?> entry : changed) { update.with(QueryBuilder.put(column, entry.getKey(), entry.getValue())); } queries.add(update.where(whereClause)); } return queries; }
@Before public void setup() throws Exception { // mock DatastaxIO.getSession() & Session PowerMockito.mockStatic( DatastaxIO.class ); Session mockSession = mock( Session.class ); when( DatastaxIO.getSession()).thenReturn(mockSession); PreparedStatement mockPreparedStatement = mock( PreparedStatement.class ); when( mockSession.prepare( any( RegularStatement.class ) ) ).thenReturn( mockPreparedStatement ); when( mockSession.prepare( anyString() ) ).thenReturn(mockPreparedStatement); when( mockPreparedStatement.setConsistencyLevel(any(ConsistencyLevel.class)) ).thenReturn( mockPreparedStatement ); }
protected DatastaxCassandraOrganisationStore( Session session, ConsistencyLevel writeConsistency, ConsistencyLevel readConsistency, OrganisationUriStore uriStore, MetricRegistry metricRegistry, String metricPrefix ) { this.session = checkNotNull(session); this.readConsistency = checkNotNull(readConsistency); this.serializer = new OrganisationSerializer(); this.writeConsistency = checkNotNull(writeConsistency); this.organisationUriStore = uriStore; RegularStatement statement = select( PRIMARY_KEY_COLUMN, DATA_COLUMN ) .from(ORGANISATION_TABLE) .where(in(PRIMARY_KEY_COLUMN, bindMarker(KEYS))); statement.setFetchSize(Integer.MAX_VALUE); selectStatement = session.prepare(statement); rowUpdate = session.prepare(update(ORGANISATION_TABLE) .where(eq(PRIMARY_KEY_COLUMN, bindMarker(ORGANISATION_ID))) .with(set(DATA_COLUMN, bindMarker(DATA)))) .setConsistencyLevel(writeConsistency); this.metricRegistry = metricRegistry; this.writeMetricPrefix = metricPrefix + "write."; }
@Override public void removeAccessToken(OAuth2AccessToken token) { List<RegularStatement> statementList = prepareRemoveAccessTokenStatements(token); Batch batch = QueryBuilder.batch(statementList.toArray(new RegularStatement[statementList.size()])); cassandraTemplate.execute(batch); }
private List<RegularStatement> prepareRemoveAccessTokenStatements(OAuth2AccessToken token) { //String tokenId = token.getValue(); String tokenValue = token.getValue(); String jsonOAuth2AccessToken = OAuthUtil.serializeOAuth2AccessToken(token); List<RegularStatement> statementList = new ArrayList<RegularStatement>(); // Delete from AccessToken table RegularStatement accessTokenDelete = prepareDeleteByPrimaryKeyRegularStatement(AccessToken.class, tokenValue); statementList.add(accessTokenDelete); // Lookup Authentication table for further deleting from AuthenticationToAccessToken table Authentication authentication = authenticationRepository.findOne(tokenValue); if (authentication != null) { ByteBuffer bufferedOAuth2Authentication = authentication.getoAuth2Authentication(); byte[] serializedOAuth2Authentication = new byte[bufferedOAuth2Authentication.remaining()]; bufferedOAuth2Authentication.get(serializedOAuth2Authentication); OAuth2Authentication oAuth2Authentication = SerializationUtils.deserialize(serializedOAuth2Authentication); String clientId = oAuth2Authentication.getOAuth2Request().getClientId(); // Delete from Authentication table RegularStatement authenticationDelete = prepareDeleteByPrimaryKeyRegularStatement(Authentication.class, tokenValue); statementList.add(authenticationDelete); // Delete from AuthenticationToAccessToken table RegularStatement authToAccessDelete = prepareDeleteByPrimaryKeyRegularStatement(AuthenticationToAccessToken.class, authenticationKeyGenerator.extractKey(oAuth2Authentication)); statementList.add(authToAccessDelete); // Delete from UsernameToAccessToken table Optional<UsernameToAccessToken> optionalUsernameToAccessToken = usernameToAccessTokenRepository.findByKeyAndOAuth2AccessToken(OAuthUtil.getApprovalKey(clientId, oAuth2Authentication.getName()), jsonOAuth2AccessToken); optionalUsernameToAccessToken.ifPresent(usernameToAccessToken -> { Delete usernameToAccessDelete = CassandraTemplate.createDeleteQuery(UsernameToAccessToken.TABLE, usernameToAccessToken, null, cassandraTemplate.getConverter()); statementList.add(usernameToAccessDelete); }); // Delete from ClientIdToAccessToken table Optional<ClientIdToAccessToken> optionalClientIdToAccessToken = clientIdToAccessTokenRepository.findByKeyAndOAuth2AccessToken(clientId, jsonOAuth2AccessToken); optionalClientIdToAccessToken.ifPresent(clientIdToAccessToken -> { Delete clientIdToAccessDelete = CassandraTemplate.createDeleteQuery(ClientIdToAccessToken.TABLE, clientIdToAccessToken, null, cassandraTemplate.getConverter()); statementList.add(clientIdToAccessDelete); }); } return statementList; }
@Override public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) { return delegate.prepareAsync(statement); }
private RegularStatement maybeUseTtl(Insert value) { return indexTtl == null ? value : value.using(QueryBuilder.ttl(QueryBuilder.bindMarker("ttl_"))); }
@Override public void add(List<T> events) { connector.session().execute( QueryBuilder.unloggedBatch(events.stream().map(this::addStatement).toArray(RegularStatement[]::new)).setIdempotent(true)); }
@Override public void addAsync(List<T> events) { connector.session().executeAsync( QueryBuilder.unloggedBatch(events.stream().map(this::addStatement).toArray(RegularStatement[]::new)).setIdempotent(true)); }
public PreparedStatement prepare(RegularStatement statement) { return session.prepare(statement); }
public com.google.common.util.concurrent.ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) { return session.prepareAsync(statement); }
@Override public PreparedStatement prepare(RegularStatement statement) { return sessionRef.prepare(statement); }
@Override public ListenableFuture<PreparedStatement> prepareAsync(RegularStatement statement) { return sessionRef.prepareAsync(statement); }
@Test public void insertStatementsAreDeduplicatedWhenIndexingManySamples() { CassandraSession session = mock(CassandraSession.class); ArgumentCaptor<Statement> statementCaptor = ArgumentCaptor.forClass(Statement.class); when(session.executeAsync(statementCaptor.capture())).thenReturn(mock(ResultSetFuture.class)); PreparedStatement statement = mock(PreparedStatement.class); BoundStatement boundStatement = mock(BoundStatement.class); when(session.prepare(any(RegularStatement.class))).thenReturn(statement); when(statement.bind()).thenReturn(boundStatement); when(boundStatement.setString(any(String.class), any(String.class))).thenReturn(boundStatement); CassandraIndexingOptions options = new CassandraIndexingOptions.Builder() .withHierarchicalIndexing(true) // Limit the batch size so we can accurately count the number of statements .withMaxBatchSize(1).build(); MetricRegistry registry = new MetricRegistry(); GuavaResourceMetadataCache cache = new GuavaResourceMetadataCache(2048, registry); CassandraIndexer indexer = new CassandraIndexer(session, 0, cache, registry, options, new EscapableResourceIdSplitter(), new ContextConfigurations()); Resource r = new Resource("snmp:1589:vmware5Cpu:2:vmware5Cpu"); List<Sample> samples = Lists.newArrayList(); samples.add(new Sample(Timestamp.now(), r, "CpuCostopSum", MetricType.GAUGE, new Gauge(0))); samples.add(new Sample(Timestamp.now(), r, "CpuIdleSum", MetricType.GAUGE, new Gauge(19299.0))); samples.add(new Sample(Timestamp.now(), r, "CpuMaxLdSum", MetricType.GAUGE, new Gauge(0))); samples.add(new Sample(Timestamp.now(), r, "CpuOverlapSum", MetricType.GAUGE, new Gauge(5.0))); samples.add(new Sample(Timestamp.now(), r, "CpuRdySum", MetricType.GAUGE, new Gauge(41.0))); samples.add(new Sample(Timestamp.now(), r, "CpuRunSum", MetricType.GAUGE, new Gauge(619.0))); samples.add(new Sample(Timestamp.now(), r, "CpuSpwaitSum", MetricType.GAUGE, new Gauge(0))); samples.add(new Sample(Timestamp.now(), r, "CpuSystemSum", MetricType.GAUGE, new Gauge(0))); samples.add(new Sample(Timestamp.now(), r, "CpuUsagemhzAvg", MetricType.GAUGE, new Gauge(32.0))); samples.add(new Sample(Timestamp.now(), r, "CpuUsedSum", MetricType.GAUGE, new Gauge(299.0))); samples.add(new Sample(Timestamp.now(), r, "CpuWaitSum", MetricType.GAUGE, new Gauge(19343))); // Index the collection of samples indexer.update(samples); // Verify the number of exectuteAsync calls verify(session, times(20)).executeAsync(any(Statement.class)); }
@Test public void canIndexManyResources() { final int numResources = 20000; final int numSamplesPerResource = 3; // Setup the indexer ResultSetFuture future = mock(ResultSetFuture.class); CassandraSession session = mock(CassandraSession.class); when(session.executeAsync(any(Statement.class))).thenReturn(future); PreparedStatement preparedStatement = mock(PreparedStatement.class); BoundStatement boundStatement = mock(BoundStatement.class); when(session.prepare(any(RegularStatement.class))).thenReturn(preparedStatement); when(preparedStatement.bind()).thenReturn(boundStatement); when(boundStatement.setString(any(String.class), any(String.class))).thenReturn(boundStatement); ContextConfigurations contexts = new ContextConfigurations(); MetricRegistry metrics = new MetricRegistry(); CassandraIndexingOptions options = new CassandraIndexingOptions.Builder() .withHierarchicalIndexing(true).build(); ResourceIdSplitter resourceIdSplitter = new EscapableResourceIdSplitter(); GuavaResourceMetadataCache cache = new GuavaResourceMetadataCache(numResources * 2, metrics); CassandraIndexer indexer = new CassandraIndexer(session, 0, cache, metrics, options, resourceIdSplitter, contexts); // Generate the resources and sample sets Resource resources[] = new Resource[numResources]; List<List<Sample>> sampleSets = Lists.newArrayListWithCapacity(numResources); System.out.println("Building sample sets..."); for (int i = 0; i < numResources; i++) { resources[i] = new Resource(String.format("snmp:%d:eth0-x:ifHcInOctets", i)); List<Sample> samples = Lists.newArrayListWithCapacity(numSamplesPerResource); for (int j = 0; j < numSamplesPerResource; j++) { samples.add(new Sample(Timestamp.now(), resources[i], "y" + j, MetricType.COUNTER, new Counter(i * j))); } sampleSets.add(samples); }; System.out.println("Done building sample sets."); // Index the resources and associated samples several times over for (int k = 0; k < 3; k++) { System.out.println("Indexing samples sets..."); long start = System.currentTimeMillis(); for (List<Sample> sampleSet : sampleSets) { indexer.update(sampleSet); } long elapsed = System.currentTimeMillis() - start; System.out.println("Done indexing samples in : " + elapsed + " ms"); } }
public PreparedStatement prepare(RegularStatement statement) { try { return m_session.prepare(statement); } catch (DriverException excep) { throw new CassandraException(excep); } }