@Override public List<E> queryByOpId(String opId) throws EventStoreException { Select select = QueryBuilder.select(CassandraEventRecorder.ENTITY_ID).from(tableName); select.where(QueryBuilder.eq(CassandraEventRecorder.OP_ID, opId)); List<Row> entityEventDatas = cassandraSession.execute(select, PagingIterable::all); Map<String, E> resultList = new HashMap<>(); for (Row entityEvent : entityEventDatas) { String entityId = entityEvent.getString(CassandraEventRecorder.ENTITY_ID); if (!resultList.containsKey(entityId)) { E value = queryEntity(entityId); if (value != null) resultList.put(entityId, value); } } return new ArrayList<>(resultList.values()); }
@Test public void testQueryPrimeSimple() throws Exception { RequestPrime prime = HttpTestUtil.createSimplePrimedQuery("Select * FROM TABLE2"); HttpTestResponse response = server.prime(prime); assertNotNull(response); RequestPrime responseQuery = om.readValue(response.body, RequestPrime.class); assertThat(responseQuery).isEqualTo(prime); String contactPoint = HttpTestUtil.getContactPointString(server.getCluster(), 0); ResultSet set = HttpTestUtil.makeNativeQuery("Select * FROM TABLE2", contactPoint); List<Row> results = set.all(); assertThat(1).isEqualTo(results.size()); Row row1 = results.get(0); String column1 = row1.getString("column1"); assertThat(column1).isEqualTo("column1"); Long column2 = row1.getLong("column2"); assertThat(column2).isEqualTo(new Long(2)); }
@Test public void testBoundStatementPositional() throws Exception { HashMap<String, String> paramTypes = new HashMap<>(); paramTypes.put("c1", "ascii"); HashMap<String, Object> params = new HashMap<>(); params.put("c1", "c1"); RequestPrime prime = HttpTestUtil.createSimpleParameterizedQuery( "SELECT table FROM foo WHERE c1=?", params, paramTypes); HttpTestResponse response = server.prime(prime); assertNotNull(response); RequestPrime responseQuery = om.readValue(response.body, RequestPrime.class); assertThat(responseQuery).isEqualTo(prime); String contactPoint = HttpTestUtil.getContactPointString(server.getCluster(), 0); ResultSet set = HttpTestUtil.makeNativeBoundQueryWithPositionalParam( "SELECT table FROM foo WHERE c1=?", contactPoint, "c1"); List<Row> results = set.all(); assertThat(1).isEqualTo(results.size()); Row row1 = results.get(0); String column1 = row1.getString("column1"); assertThat(column1).isEqualTo("column1"); Long column2 = row1.getLong("column2"); }
/** * Creates and stores a new {@link Addressbook} inside of Cassandra. {@link Contact} classes are converted using the * custom {@link example.springdata.cassandra.convert.ConverterConfiguration.PersonWriteConverter}. */ @Test public void shouldCreateAddressbook() { Addressbook addressbook = new Addressbook(); addressbook.setId("private"); addressbook.setMe(new Contact("Walter", "White")); addressbook.setFriends(Arrays.asList(new Contact("Jesse", "Pinkman"), new Contact("Saul", "Goodman"))); operations.insert(addressbook); Row row = operations.selectOne(QueryBuilder.select().from("addressbook"), Row.class); assertThat(row, is(notNullValue())); assertThat(row.getString("id"), is(equalTo("private"))); assertThat(row.getString("me"), containsString("\"firstname\":\"Walter\"")); assertThat(row.getList("friends", String.class), hasSize(2)); }
/** * {@link CassandraTemplate} allows selection of projections on template-level. All basic data types including * {@link Row} can be selected. */ @Test @SuppressWarnings("unchecked") public void selectProjections() { User user = new User(); user.setId(42L); user.setUsername("heisenberg"); user.setFirstname("Walter"); user.setLastname("White"); template.insert(user); Long id = template.selectOne(QueryBuilder.select("user_id").from("users"), Long.class); assertThat(id, is(user.getId())); Row row = template.selectOne(QueryBuilder.select("user_id").from("users"), Row.class); assertThat(row.getLong(0), is(user.getId())); Map<String, Object> map = template.selectOne(QueryBuilder.select().from("users"), Map.class); assertThat(map, hasEntry("user_id", user.getId())); assertThat(map, hasEntry("fname", "Walter")); }
@Override public DataPackage downloadPackage(long packageID) { DataPackage dataPack = new DataPackage(); try { Session session; Cluster cluster; cluster = Cluster.builder().addContactPoint("127.0.0.1").build(); session = cluster.connect(); Statement statement = QueryBuilder.select() .all() .from(DATABASE, MAIN_TABLE) .where(eq(COL_ID, packageID)); ResultSet results = session.execute(statement); for(Row row : results) { dataPack.setId(row.getLong(COL_ID)); dataPack.setDescription(row.getString(COL_DESC)); dataPack.setData(row.getBytes(COL_DATA).array()); } } catch (Exception ex) { System.out.println(ex.getMessage()); } return dataPack; }
@Override public List<DataPackage> listPackages() { List<DataPackage> dataPacks = new ArrayList<>(); try { Session session; Cluster cluster; cluster = Cluster.builder().addContactPoint("127.0.0.1").build(); session = cluster.connect(); Statement statement = QueryBuilder.select() .all() .from(DATABASE, MAIN_TABLE); ResultSet results = session.execute(statement); for(Row row : results) { DataPackage dataPack = new DataPackage(); dataPack.setId(row.getLong(COL_ID)); dataPack.setDescription(row.getString(COL_DESC)); dataPacks.add(dataPack); } } catch (Exception ex) { System.out.println(ex.getMessage()); } return dataPacks; }
static EntityEvent convertToEntityEvent(Row entityEventData) { EventKey eventKey = new EventKey(entityEventData.getString(CassandraEventRecorder.ENTITY_ID), entityEventData.getInt(CassandraEventRecorder.VERSION)); String opId = entityEventData.getString(CassandraEventRecorder.OP_ID); String eventData = entityEventData.getString(CassandraEventRecorder.EVENT_DATA); // ObjectNode jsonNode = (ObjectNode) objectMapper.readTree(eventData); /* for (String indexedField : indexedFields) { if (entityEventData.getColumnDefinitions().contains(indexedField)) jsonNode.put(indexedField, entityEventData.getString(indexedField)); }*/ return new EntityEvent(eventKey, opId, entityEventData.getTimestamp(CassandraEventRecorder.OP_DATE), entityEventData.getString(CassandraEventRecorder.EVENT_TYPE), EventState.valueOf(entityEventData.getString(CassandraEventRecorder.STATUS)), entityEventData.getString(CassandraEventRecorder.AUDIT_INFO), eventData); }
/** * getUserLastGeoCoordinates * @param userId * @return user's last geo coordinates and null if it doesn't exist * @throws Exception */ public static GeoCoordinates getUserLastGeoCoordinates ( UUID userId) throws Exception { ResultSet resultSet = UsersInfo.i().executeSyncSelectLastLocation(userId); if (resultSet.isExhausted() == false) { Row row = resultSet.one(); double latitude = row.getDouble(UsersInfo.kLastLatitudeColumnName); double longitude = row.getDouble(UsersInfo.kLastLongitudeColumnName); return new GeoCoordinates(latitude, longitude); } else { return null; } }
/** * 描述: 查询数据库表名[未实现] * 时间: 2017年11月15日 上午11:29:59 * @author yi.zhang * @return 返回表 */ @Deprecated public List<String> queryTables(){ try { List<String> tables = new ArrayList<String>(); String useQuery = "describe tables"; ResultSet rs = session.execute(useQuery); for (Row row : rs.all()) { String table = row.getString(1); tables.add(table); } return tables; } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
/** * @desc This method is used for creating response from the resultset i.e return map * <String,Object> or map<columnName,columnValue> * @param results ResultSet * @return Response Response */ public static Response createResponse(ResultSet results) { Response response = new Response(); List<Row> rows = results.all(); Map<String, Object> map = null; List<Map<String, Object>> responseList = new ArrayList<>(); String str = results.getColumnDefinitions().toString().substring(8, results.getColumnDefinitions().toString().length() - 1); String[] keyArray = str.split("\\), "); for (Row row : rows) { map = new HashMap<>(); for (int i = 0; i < keyArray.length; i++) { int pos = keyArray[i].indexOf(Constants.OPEN_BRACE); String column = instance.getProperty(keyArray[i].substring(0, pos).trim()); map.put(column, row.getObject(column)); } responseList.add(map); } ProjectLogger.log(responseList.toString()); response.put(Constants.RESPONSE, responseList); return response; }
private Set<ValueGroup> getValueGroupsForRuleSet(final String ruleSetName) { return EhSupport.propagateFn(() -> { final ResultSet resultSet = this.session.execute(CQL_GET_VALUE_GROUPS_FOR_RULESET, ruleSetName); final Set<ValueGroup> groups = ConcurrentHashMap.newKeySet(); for (final Row row : resultSet) { final UUID id = row.getUUID(0); final String name = row.getString(1); final List<String> drivers = row.getList(2, String.class); final DateRange range = new DateRange(row.get(3, Instant.class), row.get(4, Instant.class)); final ValueGroup group = new ValueGroup(id, name, drivers, range); groups.add(group); } return groups; }); }
private Set<RuleChange> getRuleChangeForChange(final Row row) { final String type = row.getString("rulechangetype"); if (type != null && Type.valueOf(type) != Type.NONE) { final Builder<RuleBuilder, DecisionTreeRule> ruleBuilder = RuleBuilder.creator() .with(RuleBuilder::input, row.getList("ruledrivers", String.class)) .with(RuleBuilder::output, row.getList("ruleoutputs", String.class)) .with(RuleBuilder::setId, row.getUUID("ruleid")) .with(RuleBuilder::setCode, row.getUUID("rulecode")) .with(RuleBuilder::start, row.get("rulestart", Instant.class)) .with(RuleBuilder::end, row.get("ruleend", Instant.class)); final DecisionTreeRule rule = ruleBuilder.build(); return Collections.singleton(new RuleChange(Type.valueOf(type), rule)); } return Collections.emptySet(); }
private Set<ValueGroupChange> getValueGroupChange(final Row row) { final String type = row.getString("vgchangetype"); if (type != null && Type.valueOf(type) != Type.NONE) { final ValueGroup group = new ValueGroup(row.getUUID("vgid"), row.getString("vgname"), row.getList("vgdrivers", String.class), new DateRange(row.get("vgstart", Instant.class), row.get("vgend", Instant.class))); final String driver = row.getString("vgdrivername"); if (driver != null && !driver.isEmpty()) { group.setNewRuleData(driver, row.getList("vgrulecodes", UUID.class)); } return Collections.singleton(new ValueGroupChange(Type.valueOf(type), group)); } return Collections.emptySet(); }
/** * Count. * * @param tableName the table name * @return the long * @throws Exception the exception */ public long count( String tableName ) throws Exception { long cnt = -1; try { String selectCount = String.format("SELECT COUNT(*) FROM %s", tableName); logger.debug("selectCount={}",selectCount); long before = System.currentTimeMillis(); ResultSet rs = execute(selectCount); Row row = rs.one(); if( row != null ) { cnt = row.getLong(0); } logger.info(">>> elapsed {}",(System.currentTimeMillis()-before)); } catch( Exception e ) { logger.error(e.getLocalizedMessage(), e); throw e; } return cnt; }
@Override public boolean isCheckpointCommitted(int subtaskIdx, long checkpointId) { // Pending checkpointed buffers are committed in ascending order of their // checkpoint id. This way we can tell if a checkpointed buffer was committed // just by asking the third-party storage system for the last checkpoint id // committed by the specified subtask. Long lastCommittedCheckpoint = lastCommittedCheckpoints.get(subtaskIdx); if (lastCommittedCheckpoint == null) { String statement = String.format( "SELECT checkpoint_id FROM %s.%s where sink_id='%s' and sub_id=%d;", keySpace, table, operatorId, subtaskIdx); Iterator<Row> resultIt = session.execute(statement).iterator(); if (resultIt.hasNext()) { lastCommittedCheckpoint = resultIt.next().getLong("checkpoint_id"); lastCommittedCheckpoints.put(subtaskIdx, lastCommittedCheckpoint); } } return lastCommittedCheckpoint != null && checkpointId <= lastCommittedCheckpoint; }
protected IndexCreatedEvent marshalRow(Row row) { if (row == null) { return null; } //look up index here Index index = new Index(); index.setName(row.getString(Columns.INDEX_NAME)); index.setTable(row.getString(Columns.DATABASE), row.getString(Columns.TABLE)); Index toUse; try { toUse = indexRepo.read(index.getId()); } catch (ItemNotFoundException e)//this should only happen in tests that do not have full test data established; errors will be evident if this happens in the actual app { toUse = index; } IndexCreatedEvent i = new IndexCreatedEvent(row.getUUID(Columns.ID), row.getDate(Columns.STARTED_AT), row.getDate(Columns.UPDATED_AT), toUse, row.getLong(Columns.TOTAL_RECORDS), row.getLong(Columns.RECORDS_COMPLETED)); i.setFatalError(row.getString(Columns.FATAL_ERROR)); i.setErrors(row.getList(Columns.ERRORS, String.class)); i.calculateValues(); return i; }
private RepairRun buildRepairRunFromRow(Row repairRunResult, UUID id) { LOG.trace("buildRepairRunFromRow {} / {}", id, repairRunResult); return new RepairRun.Builder( repairRunResult.getString("cluster_name"), repairRunResult.getUUID("repair_unit_id"), new DateTime(repairRunResult.getTimestamp("creation_time")), repairRunResult.getDouble("intensity"), repairRunResult.getInt("segment_count"), RepairParallelism.fromName(repairRunResult.getString("repair_parallelism"))) .cause(repairRunResult.getString("cause")) .owner(repairRunResult.getString("owner")) .endTime(new DateTime(repairRunResult.getTimestamp("end_time"))) .lastEvent(repairRunResult.getString("last_event")) .pauseTime(new DateTime(repairRunResult.getTimestamp("pause_time"))) .runState(RunState.valueOf(repairRunResult.getString("state"))) .startTime(new DateTime(repairRunResult.getTimestamp("start_time"))) .build(id); }
@Override protected Row computeNext() { if (_currentRow == null) { // First iteration, return the already fetched first row for this group _currentRow = _firstRowOfCurrentGroup; return _currentRow; } _currentRow = nextRow(); if (_currentRow == null || !Objects.equal(_currentKey, getKeyForRow(_currentRow))) { // End of the complete result set or the start of a new group setFirstRowOfNextGroup(_currentRow); // The current group has now been fully iterated _currentGroupFullyIterated = true; return endOfData(); } return _currentRow; }
private Record read(Key key, ByteBuffer rowKey, ReadConsistency consistency, DeltaPlacement placement) { checkNotNull(key, "key"); checkNotNull(consistency, "consistency"); TableDDL tableDDL = placement.getDeltaTableDDL(); Statement statement = selectFrom(tableDDL) .where(eq(tableDDL.getRowKeyColumnName(), rowKey)) .setConsistencyLevel(SorConsistencies.toCql(consistency)); // Track metrics _randomReadMeter.mark(); Iterator<Iterable<Row>> groupedRows = deltaQuery(placement, statement, true, "Failed to read record %s", key); Iterable<Row> rows; if (groupedRows.hasNext()) { rows = groupedRows.next(); } else { rows = ImmutableList.of(); } // Convert the results into a Record object, lazily fetching the rest of the columns as necessary. return newRecordFromCql(key, rows); }
@Test public void test() throws Exception { CassandraConnector connector = new CassandraConnector(); connector.setHosts(new String[] { EmbeddedCassandraServerHelper.getHost() }); connector.setPort(server.getNativePort()); connector.setKeyspace(null); connector.init(); ResultSet rs = connector.session().execute("select release_version from system.local"); Row row = rs.one(); row.getString("release_version"); Assert.assertEquals(row.getString("release_version"), "3.11.0"); connector.session().execute("CREATE KEYSPACE gcplot WITH REPLICATION = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 }"); connector.destroy(); connector = new CassandraConnector(); connector.setHosts(new String[] { EmbeddedCassandraServerHelper.getHost() }); connector.setPort(server.getNativePort()); connector.setKeyspace("gcplot"); connector.init(); connector.destroy(); }
/** * Like {@link #decodeChangesFromCql(java.util.Iterator)} except filtered to only include compactions. */ private Iterator<Map.Entry<UUID, Compaction>> decodeCompactionsFromCql(final Iterator<Row> iter) { return new AbstractIterator<Map.Entry<UUID, Compaction>>() { @Override protected Map.Entry<UUID, Compaction> computeNext() { while (iter.hasNext()) { Row row = iter.next(); Compaction compaction = _changeEncoder.decodeCompaction(getValue(row)); if (compaction != null) { return Maps.immutableEntry(getChangeId(row), compaction); } } return endOfData(); } }; }
private Collection<? extends RepairRun> getRepairRunsWithStateForCluster( Collection<UUID> clusterRepairRunsId, RunState runState) { Collection<RepairRun> repairRuns = Sets.newHashSet(); List<ResultSetFuture> futures = Lists.newArrayList(); for (UUID repairRunId : clusterRepairRunsId) { futures.add(session.executeAsync(getRepairRunPrepStmt.bind(repairRunId))); } for (ResultSetFuture future : futures) { ResultSet repairRunResult = future.getUninterruptibly(); for (Row row : repairRunResult) { repairRuns.add(buildRepairRunFromRow(row, row.getUUID("id"))); } } return repairRuns.stream().filter(repairRun -> repairRun.getRunState() == runState).collect(Collectors.toSet()); }
@Override protected void verifyResultsDataDiscardingUponRestore( OneInputStreamOperatorTestHarness<Tuple3<String, Integer, Integer>, Tuple3<String, Integer, Integer>> harness, CassandraTupleWriteAheadSink<Tuple3<String, Integer, Integer>> sink) { ResultSet result = session.execute(SELECT_DATA_QUERY); ArrayList<Integer> list = new ArrayList<>(); for (int x = 1; x <= 20; x++) { list.add(x); } for (int x = 41; x <= 60; x++) { list.add(x); } for (Row s : result) { list.remove(new Integer(s.getInt("counter"))); } Assert.assertTrue("The following ID's were not found in the ResultSet: " + list.toString(), list.isEmpty()); }
private String verifyNextRow(ResultSet rs) { String rowid = null; for (int col=0; col < 137; col++) { Row row = rs.one(); assertNotNull(row); if (rowid == null) { rowid = row.get("rowid", TypeCodec.varchar()); } else { assertEquals(row.get("rowid", TypeCodec.varchar()), rowid); } assertEquals((long) row.get("col", TypeCodec.bigint()), col); assertEquals( row.get("data", TypeCodec.blob()).asIntBuffer().get(), col); } return rowid; }
@Test public void testMultipleGroupsMultipleRows() throws Exception { Row[][] expected = new Row[10][50]; for (int i=0; i < 10; i++) { for (int j=0; j < 50; j++) { addRow("a" + i, "b" + j, "c" + j); expected[i][j] = row("a" + i, "b" + j, "c" + j); } } for (int i=0; i < 10; i++) { assertTrue(_iterator.hasNext()); assertRowGroupOf(_iterator.next(), expected[i]); } assertFalse(_iterator.hasNext()); }
private void testVerifyQueryParticularCluster(Function<ClusterSpec, String> f) { HttpClient client = vertx.createHttpClient(); ClusterSpec clusterQueried = this.createMultiNodeCluster(client, "3,3"); ClusterSpec clusterUnused = this.createMultiNodeCluster(client, "3,3"); String query = "Select * FROM TABLE2_" + clusterQueried.getName(); RequestPrime prime = createSimplePrimedQuery(query); HttpTestResponse response = this.primeSimpleRequest(client, prime, "/prime" + "/" + f.apply(clusterQueried)); Iterator<NodeSpec> nodeIteratorQueried = clusterQueried.getNodes().iterator(); Iterator<NodeSpec> nodeIteratorUnused = clusterUnused.getNodes().iterator(); while (nodeIteratorQueried.hasNext()) { NodeSpec node = nodeIteratorQueried.next(); String contactPoint = HttpTestUtil.getContactPointStringByNodeID(node); ResultSet set = HttpTestUtil.makeNativeQuery(query, contactPoint); List<Row> results = set.all(); assertThat(1).isEqualTo(results.size()); } while (nodeIteratorUnused.hasNext()) { String contactPointUnused = HttpTestUtil.getContactPointStringByNodeID(nodeIteratorUnused.next()); ResultSet setUnused = HttpTestUtil.makeNativeQuery(query, contactPointUnused); List<Row> resultsUnused = setUnused.all(); assertThat(0).isEqualTo(resultsUnused.size()); } }
private void testVerifyQueryParticularDatacenter( Function<ClusterSpec, String> fc, Function<DataCenterSpec, String> fd) { HttpClient client = vertx.createHttpClient(); ClusterSpec clusterQueried = this.createMultiNodeCluster(client, "3,3"); ClusterSpec clusterUnused = this.createMultiNodeCluster(client, "3,3"); String query = "Select * FROM TABLE2_" + clusterQueried.getName(); RequestPrime prime = createSimplePrimedQuery(query); List<DataCenterSpec> datacenters = (List<DataCenterSpec>) clusterQueried.getDataCenters(); DataCenterSpec datacenterQueried = datacenters.get(0); this.primeSimpleRequest(client, prime, fc.apply(clusterQueried), fd.apply(datacenterQueried)); Iterator<NodeSpec> nodeIteratorQueried = clusterQueried.getNodes().iterator(); Iterator<NodeSpec> nodeIteratorUnused = clusterUnused.getNodes().iterator(); while (nodeIteratorQueried.hasNext()) { NodeSpec node = nodeIteratorQueried.next(); String contactPoint = HttpTestUtil.getContactPointStringByNodeID(node); ResultSet set = HttpTestUtil.makeNativeQuery(query, contactPoint); List<Row> results = set.all(); if (node.getDataCenter().equals(datacenterQueried)) { assertThat(1).isEqualTo(results.size()); } else { assertThat(0).isEqualTo(results.size()); } } while (nodeIteratorUnused.hasNext()) { String contactPointUnused = HttpTestUtil.getContactPointStringByNodeID(nodeIteratorUnused.next()); ResultSet setUnused = HttpTestUtil.makeNativeQuery(query, contactPointUnused); List<Row> resultsUnused = setUnused.all(); assertThat(0).isEqualTo(resultsUnused.size()); } }
@Test public void testQueryPositionalParamSimple() throws Exception { HashMap<String, String> paramTypes = new HashMap<>(); paramTypes.put("c1", "ascii"); HashMap<String, Object> params = new HashMap<>(); params.put("c1", "c1"); RequestPrime prime = HttpTestUtil.createSimpleParameterizedQuery( "SELECT table FROM foo WHERE c1=?", params, paramTypes); HttpTestResponse response = server.prime(prime); assertNotNull(response); RequestPrime responseQuery = om.readValue(response.body, RequestPrime.class); assertThat(responseQuery).isEqualTo(prime); String contactPoint = HttpTestUtil.getContactPointString(server.getCluster(), 0); ResultSet set = HttpTestUtil.makeNativeQueryWithPositionalParams( "SELECT table FROM foo WHERE c1=?", contactPoint, "c1"); List<Row> results = set.all(); assertThat(1).isEqualTo(results.size()); Row row1 = results.get(0); String column1 = row1.getString("column1"); assertThat(column1).isEqualTo("column1"); Long column2 = row1.getLong("column2"); // Extra Param set = HttpTestUtil.makeNativeQueryWithPositionalParams( "SELECT table FROM foo WHERE c1=?", contactPoint, "c1", "extraParam"); assertThat(set.all().size()).isEqualTo(0); // Wrong Param set = HttpTestUtil.makeNativeQueryWithPositionalParams( "SELECT table FROM foo WHERE ci1=?", contactPoint, "d1"); assertThat(set.all().size()).isEqualTo(0); }
@Test public void testBoundStatementNamed() throws Exception { HashMap<String, String> paramTypes = new HashMap<>(); paramTypes.put("id", "bigint"); paramTypes.put("id2", "bigint"); HashMap<String, Object> params = new HashMap<>(); params.put("id", new Long(1)); params.put("id2", new Long(2)); RequestPrime prime = HttpTestUtil.createSimpleParameterizedQuery( "SELECT * FROM users WHERE id = :id and id2 = :id2", params, paramTypes); HttpTestResponse response = server.prime(prime); assertNotNull(response); RequestPrime responseQuery = (RequestPrime) om.readValue(response.body, RequestPrime.class); assertThat(responseQuery).isEqualTo(prime); Map<String, Long> values = ImmutableMap.<String, Long>of("id", new Long(1), "id2", new Long(2)); String contactPoint = HttpTestUtil.getContactPointString(server.getCluster(), 0); ResultSet set = HttpTestUtil.makeNativeBoundQueryWithNameParams( "SELECT * FROM users WHERE id = :id and id2 = :id2", contactPoint, values); List<Row> results = set.all(); assertThat(1).isEqualTo(results.size()); Row row1 = results.get(0); String column1 = row1.getString("column1"); assertThat(column1).isEqualTo("column1"); Long column2 = row1.getLong("column2"); assertThat(column2).isEqualTo(new Long(2)); Map<String, String> values2 = ImmutableMap.<String, String>of("id", "1", "id2", "2"); }
private void assertResult(ResultSet set) { List<Row> results = set.all(); assertThat(1).isEqualTo(results.size()); Row row1 = results.get(0); boolean column1 = row1.getBool("applied"); assertThat(column1).isTrue(); }
private E queryEntityInternal(String entityId, Select select) throws EventStoreException { List<Row> entityEventDatas = cassandraSession.execute(select, PagingIterable::all); E initialInstance, result = null; try { initialInstance = entityType.newInstance(); } catch (InstantiationException | IllegalAccessException e) { log.error(e.getMessage(), e); throw new EventStoreException(e); } for (Row entityEventData : entityEventDatas) { EntityEvent entityEvent = convertToEntityEvent(entityEventData); if (entityEvent.getStatus() == EventState.CREATED || entityEvent.getStatus() == EventState.SUCCEDEED) { EntityFunctionSpec<E, ?> functionSpec = functionMap.get(entityEvent.getEventType()); if (functionSpec != null) { EntityEventWrapper eventWrapper = new EntityEventWrapper<>(functionSpec.getQueryType(), objectMapper, entityEvent); EntityFunction<E, ?> entityFunction = functionSpec.getEntityFunction(); result = (E) entityFunction.apply(result == null ? initialInstance : result, eventWrapper); } else log.trace("Function Spec is not available for " + entityEvent.getEventType() + " EntityId:" + entityId + " Table:" + tableName); } if (result != null) { result.setId(entityId); result.setVersion(entityEvent.getEventKey().getVersion()); } } return (result == null || result.getId() == null) ? null : result; }
/** * Get entity id by entity key. * * @param key the entity key * @param tenant tenant name * @return entity id */ public Long getIdByKey(String key, String tenant) { Select select = QueryBuilder.select(ENTITY_ID_COL).from(tenant, TABLE_KEY_ID); select.where(eq(ENTITY_KEY_COL, key)); ResultSet resultSet = session.execute(select); Row row = resultSet.one(); return row == null ? null : row.getLong(ENTITY_ID_COL); }
/** * Get entity key by entity id. * * @param entityId entity id * @param tenant tenant name * @return entity key */ public String getKeyById(Long entityId, String tenant) { Select select = QueryBuilder.select(ENTITY_KEY_COL).from(tenant, VIEW_ID_KEY); select.where(eq(ENTITY_ID_COL, entityId)); ResultSet resultSet = session.execute(select); Row row = resultSet.one(); return row == null ? null : row.getString(ENTITY_KEY_COL); }
private TimelinePageVM getPage(Select select, String page, int limit) { //If we have a 'next' page set we deserialise it and add it to the select //statement if (page != null) { select.setPagingState(PagingState.fromString(page)); } //Execute the query ResultSet resultSet = session.execute(select); //Get the next paging state PagingState newPagingState = resultSet.getExecutionInfo().getPagingState(); //The number of rows that can be read without fetching int remaining = resultSet.getAvailableWithoutFetching(); List<XmTimeline> timelines = new ArrayList<>(limit); for (Row row : resultSet) { XmTimeline timeline = TimelineMapper.createTimeline(row); timelines.add(timeline); //If we can't move to the next row without fetching we break if (--remaining == 0) { break; } } //Serialise the next paging state String serializedNewPagingState = newPagingState != null ? newPagingState.toString() : null; //Return an object with a list of timelines and the next paging state return new TimelinePageVM(timelines, serializedNewPagingState); }
/** * Convert cassandra row to timeline. * @param row the cassandra row * @return timeline object */ public static XmTimeline createTimeline(Row row) { XmTimeline timeline = new XmTimeline(); timeline.setRid(getString(row, "rid")); timeline.setLogin(getString(row, "login")); timeline.setUserKey(getString(row, "user_key")); timeline.setTenant(getString(row, "tenant")); timeline.setMsName(getString(row, "ms_name")); timeline.setOperationName(getString(row, "operation")); timeline.setEntityId(getLong(row, "entity_id")); timeline.setEntityKey(getString(row, "entity_key")); timeline.setEntityTypeKey(getString(row, "entity_type_key")); timeline.setOperationUrl(getString(row, "operation_url")); timeline.setHttpMethod(getString(row, "http_method")); timeline.setHttpStatusCode(getInt(row, "http_status_code")); timeline.setStartDate(getInstant(row, "start_date")); timeline.setRequestBody(getString(row, "request_body")); timeline.setRequestLength(getLong(row, "request_length")); timeline.setResponseBody(getString(row, "response_body")); timeline.setRequestLength(getLong(row, "response_length")); timeline.setRequestHeaders(getMap(row, "request_headers")); timeline.setResponseHeaders(getMap(row, "response_headers")); timeline.setRequestLength(getLong(row, "exec_time")); timeline.setChannelType(getString(row, "channel_type")); parseUserAgent(timeline); return timeline; }
private static Instant getInstant(Row row, String column) { if (row.getColumnDefinitions().contains(column)) { Date date = row.getTimestamp(column); return date != null ? date.toInstant() : null; } return null; }
/** {@inheritDoc} */ @Override public void process(Row row, int seqNum) { if (processed.contains(seqNum)) { return; } process(row); processed.add(seqNum); }
/** {@inheritDoc} */ @Override protected Object buildObject(Row row, PersistenceSettings settings) { if (row == null) { return null; } PersistenceStrategy stgy = settings.getStrategy(); Class clazz = settings.getJavaClass(); String col = settings.getColumn(); List<PojoField> fields = settings.getFields(); if (PersistenceStrategy.PRIMITIVE.equals(stgy)) { return isTombstone(row, col) ? ActiveCacheStore.TOMBSTONE : PropertyMappingHelper.getCassandraColumnValue(row, col, clazz, null); } if (PersistenceStrategy.BLOB.equals(stgy)) { return isTombstone(row, settings.getSerializer(), col) ? ActiveCacheStore.TOMBSTONE : settings.getSerializer().deserialize(row.getBytes(col)); } if (isTombstone(row, fields)) { return ActiveCacheStore.TOMBSTONE; } Object obj; try { obj = clazz.newInstance(); } catch (Throwable e) { throw new IgniteException("Failed to instantiate object of type '" + clazz.getName() + "' using reflection", e); } for (PojoField field : fields) { field.setValueFromRow(row, obj, settings.getSerializer()); } return obj; }