/** * 描述: 查询数据表字段名(key:字段名,value:字段类型名) * 时间: 2017年11月15日 上午11:29:32 * @author yi.zhang * @param table 表名 * @return */ public Map<String,String> queryColumns(String table){ try { String sql = "select * from "+table; ResultSet rs = session.execute(sql); ColumnDefinitions rscd = rs.getColumnDefinitions(); int count = rscd.size(); Map<String,String> reflect = new HashMap<String,String>(); for (int i = 0; i < count; i++) { String column = rscd.getName(i); String type = rscd.getType(i).getName().name().toLowerCase(); reflect.put(column, type); } return reflect; } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; }
private Object getValue(Row musicRow, String colname) { ColumnDefinitions cdef = musicRow.getColumnDefinitions(); String type = cdef.getType(colname).getName().toString().toUpperCase(); switch (type) { case "BIGINT": return musicRow.getLong(colname); case "BOOLEAN": return musicRow.getBool(colname); case "BLOB": return musicRow.getBytes(colname); case "DOUBLE": return musicRow.getDouble(colname); case "INT": return musicRow.getInt(colname); case "TIMESTAMP": return musicRow.getTimestamp(colname); default: logger.error("UNEXPECTED COLUMN TYPE: columname="+colname+", columntype="+type); // fall thru case "VARCHAR": return musicRow.getString(colname); } }
public SchemaStatement(Timer timer, StressSettings settings, DataSpec spec, PreparedStatement statement, Integer thriftId, ConsistencyLevel cl, ValidationType validationType) { super(timer, settings, spec); this.statement = statement; this.thriftId = thriftId; this.cl = cl; this.validationType = validationType; argumentIndex = new int[statement.getVariables().size()]; bindBuffer = new Object[argumentIndex.length]; int i = 0; for (ColumnDefinitions.Definition definition : statement.getVariables()) argumentIndex[i++] = spec.partitionGenerator.indexOf(definition.getName()); statement.setConsistencyLevel(JavaDriverClient.from(cl)); }
public RawNumericMetric map(Row row) { RawNumericMetric metricRow = new RawNumericMetric(row.getString(0), row.getString(1), row.getDate(2).getTime(), row.getInt(3)); ColumnDefinitions columeDef = row.getColumnDefinitions(); List<Definition> columeDefList = columeDef.asList(); Map<String, String> tagMap = new HashMap<String, String>(); for(Definition def: columeDefList){ if(def.getName().startsWith("tag_")){ tagMap.put(def.getName(), row.getString(def.getName())); } } if(tagMap.size()>0){ metricRow.setTagMap(tagMap); } return metricRow; }
@Test public void testDirect() throws TranslatorException { CassandraExecutionFactory cef = new CassandraExecutionFactory(); cef.setSupportsDirectQueryProcedure(true); String input = "call native('select $1', 'a')"; TranslationUtility util = FakeTranslationFactory.getInstance().getExampleTranslationUtility(); Command command = util.parseCommand(input); ExecutionContext ec = Mockito.mock(ExecutionContext.class); RuntimeMetadata rm = Mockito.mock(RuntimeMetadata.class); CassandraConnection connection = Mockito.mock(CassandraConnection.class); ResultSet rs = Mockito.mock(ResultSet.class); Row row = Mockito.mock(Row.class); ColumnDefinitions cd = Mockito.mock(ColumnDefinitions.class); Mockito.stub(row.getColumnDefinitions()).toReturn(cd); Mockito.stub(rs.one()).toReturn(row).toReturn(null); Mockito.stub(connection.executeQuery("select 'a'")).toReturn(rs); ResultSetExecution execution = (ResultSetExecution)cef.createExecution(command, ec, rm, connection); execution.execute(); List<?> vals = execution.next(); assertTrue(vals.get(0) instanceof Object[]); }
public SchemaStatement(Timer timer, StressSettings settings, DataSpec spec, PreparedStatement statement, Integer thriftId, ConsistencyLevel cl) { super(timer, settings, spec); this.statement = statement; this.thriftId = thriftId; this.cl = cl; argumentIndex = new int[statement.getVariables().size()]; bindBuffer = new Object[argumentIndex.length]; definitions = statement.getVariables(); int i = 0; for (ColumnDefinitions.Definition definition : definitions) argumentIndex[i++] = spec.partitionGenerator.indexOf(definition.getName()); statement.setConsistencyLevel(JavaDriverClient.from(cl)); }
/** * {@inheritDoc} * * @param key * @param fields * @return */ @Override public Persistent get(Object key, String[] fields) { if (fields == null) { fields = getFields(); } ArrayList<String> cassandraKeys = new ArrayList<>(); ArrayList<Object> cassandraValues = new ArrayList<>(); AvroCassandraUtils.processKeys(mapping, key, cassandraKeys, cassandraValues); String cqlQuery = CassandraQueryFactory.getSelectObjectWithFieldsQuery(mapping, fields, cassandraKeys); SimpleStatement statement = new SimpleStatement(cqlQuery, cassandraValues.toArray()); if (readConsistencyLevel != null) { statement.setConsistencyLevel(ConsistencyLevel.valueOf(readConsistencyLevel)); } ResultSet resultSet = this.client.getSession().execute(statement); Iterator<Row> iterator = resultSet.iterator(); ColumnDefinitions definitions = resultSet.getColumnDefinitions(); T obj = null; if (iterator.hasNext()) { obj = cassandraDataStore.newPersistent(); AbstractGettableData row = (AbstractGettableData) iterator.next(); populateValuesToPersistent(row, definitions, obj, fields); } return obj; }
/** * {@inheritDoc} * * @param key * @return */ @Override public Persistent get(Object key) { ArrayList<String> cassandraKeys = new ArrayList<>(); ArrayList<Object> cassandraValues = new ArrayList<>(); AvroCassandraUtils.processKeys(mapping, key, cassandraKeys, cassandraValues); String cqlQuery = CassandraQueryFactory.getSelectObjectQuery(mapping, cassandraKeys); SimpleStatement statement = new SimpleStatement(cqlQuery, cassandraValues.toArray()); if (readConsistencyLevel != null) { statement.setConsistencyLevel(ConsistencyLevel.valueOf(readConsistencyLevel)); } ResultSet resultSet = client.getSession().execute(statement); Iterator<Row> iterator = resultSet.iterator(); ColumnDefinitions definitions = resultSet.getColumnDefinitions(); T obj = null; if (iterator.hasNext()) { obj = cassandraDataStore.newPersistent(); AbstractGettableData row = (AbstractGettableData) iterator.next(); populateValuesToPersistent(row, definitions, obj, mapping.getFieldNames()); } return obj; }
/** * This method wraps result set data in to DataEntry and creates a list of DataEntry. **/ private void populateValuesToPersistent(AbstractGettableData row, ColumnDefinitions columnDefinitions, PersistentBase base, String[] fields) { Object paramValue; for (String fieldName : fields) { Schema.Field avroField = base.getSchema().getField(fieldName); Field field = mapping.getFieldFromFieldName(fieldName); //to ignore unspecified fields in the mapping if (field == null || avroField == null) { continue; } Schema fieldSchema = avroField.schema(); String columnName = field.getColumnName(); paramValue = getValue(row, columnDefinitions.getType(columnName), columnName, fieldSchema); Object value = AvroCassandraUtils.getAvroFieldValue(paramValue, fieldSchema); base.put(avroField.pos(), value); } }
@Override public void runPostQuery(Object result, XMLStreamWriter xmlWriter, InternalParamCollection params, int queryLevel) throws DataServiceFault { ResultSet rs = (ResultSet) result; if (this.hasResult()) { Iterator<Row> itr = rs.iterator(); Row row; DataEntry dataEntry; ColumnDefinitions defs = rs.getColumnDefinitions(); while (itr.hasNext()) { row = itr.next(); dataEntry = this.getDataEntryFromRow(row, defs); this.writeResultEntry(xmlWriter, dataEntry, params, queryLevel); } } }
@Override void index(Map<Row, ColumnDefinitions> rows) throws Exception { IndexDocument indexDocument = new IndexDocument(lang); FunctionUtils.forEachEx(rows, (row, columnDefinitions) -> fieldMap.mapRow(fieldMapContext, row, columnDefinitions, indexDocument, filePathSet)); if (uniqueField != null && !indexDocument.hasContent(uniqueField)) { rwl.w.lock(); try { ignoredDocumentCount++; } finally { rwl.w.unlock(); } return; } indexDocumentList.add(indexDocument); rwl.w.lock(); try { pendingIndexDocumentCount++; } finally { rwl.w.unlock(); } if (DatabaseCrawlCassandraThread.this.index(indexDocumentList, bufferSize)) setStatus(CrawlStatus.CRAWL); }
private void execute(final Object joinColumnValue, final ComplexQuery complexQuery, final LinkedHashMap<Row, ColumnDefinitions> rowStack) throws Exception { final ResultSet resultSet = joinColumnValue == null || StringUtils.isBlank(joinColumnValue.toString()) ? session.executeWithFetchSize(complexQuery.cql, bufferSize) : session.executeWithFetchSize(complexQuery.cql, bufferSize, joinColumnValue); if (resultSet == null) return; final ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions(); for (final Row row : resultSet) { rowStack.put(row, columnDefinitions); if (complexQuery.join != null) { FunctionUtils.forEachEx(complexQuery.join, (column, queries) -> { final Object columnValue = row.getObject(column); if (queries != null) for (ComplexQuery query : queries) execute(columnValue, query, rowStack); }); } if (complexQuery.index != null && complexQuery.index) index(rowStack); rowStack.remove(row); if (abort()) break; } }
private void handleBlob(FieldMapContext context, Row row, ColumnDefinitions columns, IndexDocument target, Set<String> filePathSet, String columnName, CommonFieldTarget targetField) throws IOException, SearchLibException, InterruptedException, ParseException, SyntaxError, InstantiationException, URISyntaxException, IllegalAccessException, ClassNotFoundException { final String filePath = columns.contains(targetField.getFilePathPrefix()) ? row.getString(targetField.getFilePathPrefix()) : null; if (StringUtils.isBlank(filePath)) return; final String fileName = FilenameUtils.getName(filePath); Path binaryPath = null; try { binaryPath = Files.createTempFile("oss", fileName); File binaryFile = binaryPath.toFile(); if (!doBlob(row, binaryFile, columnName)) return; mapFieldTarget(context, targetField, true, binaryPath.toString(), target, filePathSet); } finally { if (binaryPath != null) Files.deleteIfExists(binaryPath); } }
private String convertResultSet(ResultSet rs) { // TODO Auto-generated method stub String colStr = ""; String rowStr = ""; JsonObject response = new JsonObject(); List<Row> rows = rs.all(); if (!rows.isEmpty() && rows.size() == 1) { rowStr = rows.get(0).toString(); } ColumnDefinitions colDefs = rs.getColumnDefinitions(); colStr = colDefs.toString(); response.putString("columns", colStr.substring(8, colStr.length() - 1)); response.putString("values", rowStr.substring(4, rowStr.length() - 1)); return response.toString(); }
void createOutputRowMeta(RowMetaInterface row, ResultSet rs) { row.clear(); for (ColumnDefinitions.Definition d : rs.getColumnDefinitions()) { logDebug(d.getName() + ',' + d.getType().getName() + ',' + d.getType().asFunctionParameterString()); ValueMetaBase valueMeta = new ValueMetaBase(d.getName(), Utils.convertDataType(d.getType())); valueMeta.setTrimType(0); row.addValueMeta(valueMeta); } }
private static void syncQuorum(String key){ logger.info("Performing sync operation---"); String[] splitString = key.split("\\."); String keyspaceName = splitString[0]; String tableName = splitString[1]; String primaryKeyValue = splitString[2]; //get the primary key d TableMetadata tableInfo = returnColumnMetadata(keyspaceName, tableName); String primaryKeyName = tableInfo.getPrimaryKey().get(0).getName();//we only support single primary key DataType primaryKeyType = tableInfo.getPrimaryKey().get(0).getType(); String cqlFormattedPrimaryKeyValue = convertToCQLDataType(primaryKeyType, primaryKeyValue); //get the row of data from a quorum String selectQuery = "SELECT * FROM "+keyspaceName+"."+tableName+ " WHERE "+primaryKeyName+"="+cqlFormattedPrimaryKeyValue+";"; ResultSet results = getDSHandle().executeCriticalGet(selectQuery); //write it back to a quorum Row row = results.one(); ColumnDefinitions colInfo = row.getColumnDefinitions(); int totalColumns = colInfo.size(); int counter =1; String fieldValueString=""; for (Definition definition : colInfo){ String colName = definition.getName(); if(colName.equals(primaryKeyName)) continue; DataType colType = definition.getType(); Object valueObj = getDSHandle().getColValue(row, colName, colType); String valueString = convertToCQLDataType(colType,valueObj); fieldValueString = fieldValueString+ colName+"="+valueString; if(counter!=(totalColumns-1)) fieldValueString = fieldValueString+","; counter = counter +1; } String updateQuery = "UPDATE "+keyspaceName+"."+tableName+" SET "+fieldValueString+" WHERE "+primaryKeyName+"="+cqlFormattedPrimaryKeyValue+";"; getDSHandle().executePut(updateQuery, "critical"); }
public boolean doesRowSatisfyCondition(Row row, Map<String, Object> condition){ ColumnDefinitions colInfo = row.getColumnDefinitions(); for (Map.Entry<String, Object> entry : condition.entrySet()){ String colName = entry.getKey(); DataType colType = colInfo.getType(colName); Object columnValue = getColValue(row, colName, colType); Object conditionValue = convertToActualDataType(colType, entry.getValue()); if(columnValue.equals(conditionValue) == false) return false; } return true; }
public Map<String, HashMap<String, Object>> marshalData(ResultSet results){ Map<String, HashMap<String, Object>> resultMap = new HashMap<String, HashMap<String,Object>>(); int counter =0; for (Row row : results) { ColumnDefinitions colInfo = row.getColumnDefinitions(); HashMap<String,Object> resultOutput = new HashMap<String, Object>(); for (Definition definition : colInfo) { if(!definition.getName().equals("vector_ts")) resultOutput.put(definition.getName(), getColValue(row, definition.getName(), definition.getType())); } resultMap.put("row "+counter, resultOutput); counter++; } return resultMap; }
@Deprecated public List<Map<String, String>> OLDselectRows(String tablename, Map<String, String> cols) { String query = String.format("SELECT * FROM %s", tablename); if (cols.size() > 0) { // add WHERE clause // String[] parts = tablename.split("\\."); // KeyspaceMetadata ks = cluster.getMetadata().getKeyspace(parts[0]); // TableMetadata tableInfo = ks.getTable(parts[1]); String whereclause = " WHERE"; String prefix = ""; for (String key : cols.keySet()) { String val = cols.get(key); // DataType colType = tableInfo.getColumn(key).getType(); whereclause = String.format("%s%s %s = '%s'", whereclause, prefix, key, val); prefix = " AND"; } query += whereclause; } LOG.debug(query); ResultSet resultset = session.execute(query); List<Map<String, String>> results = new ArrayList<Map<String,String>>(); for (Row row : resultset) { ColumnDefinitions colInfo = row.getColumnDefinitions(); Map<String, String> map = new HashMap<String, String>(); for (Definition definition : colInfo) { // map.put(definition.getName(), (String)MusicDataStore.readRow(row, definition.getName(), definition.getType())); } results.add(map); } return results; }
@Before public void setUp() { ColumnDefinitions columns = Mockito.mock( ColumnDefinitions.class ); Mockito.when( columns.size() ).thenReturn( 3 ); mock = Mockito.mock( PreparedStatement.class ); Mockito.when( mock.getVariables() ).thenReturn( columns ); statement = new ProfiledBoundStatement( mock ); }
@Before public void setUp() { mock = Mockito.mock( PreparedStatement.class ); statement = new ProfiledPreparedStatement( mock ); ColumnDefinitions columns = Mockito.mock( ColumnDefinitions.class ); Mockito.when( columns.size() ).thenReturn( 3 ); Mockito.when( mock.getVariables() ).thenReturn( columns ); PreparedId mockID = Mockito.mock( PreparedId.class ); Mockito.when( mock.getPreparedId() ).thenReturn( mockID ); }
/** * * @param targetClass an entity class with getter/setter method or <code>Map.class</code> * @param resultSet * @return */ public static DataSet extractData(final Class<?> targetClass, final ResultSet resultSet) { final boolean isEntity = targetClass != null && N.isEntity(targetClass); final boolean isMap = targetClass != null && Map.class.isAssignableFrom(targetClass); final ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions(); final int columnCount = columnDefinitions.size(); final List<Row> rowList = resultSet.all(); final int rowCount = N.isNullOrEmpty(rowList) ? 0 : rowList.size(); final List<String> columnNameList = new ArrayList<>(columnCount); final List<List<Object>> columnList = new ArrayList<>(columnCount); final Class<?>[] columnClasses = new Class<?>[columnCount]; for (int i = 0; i < columnCount; i++) { columnNameList.add(columnDefinitions.getName(i)); columnList.add(new ArrayList<>(rowCount)); columnClasses[i] = isEntity ? ClassUtil.getPropGetMethod(targetClass, columnNameList.get(i)).getReturnType() : (isMap ? Map.class : Object[].class); } Object propValue = null; for (Row row : rowList) { for (int i = 0; i < columnCount; i++) { propValue = row.getObject(i); if (propValue instanceof Row && (columnClasses[i] == null || !columnClasses[i].isAssignableFrom(Row.class))) { columnList.get(i).add(readRow(columnClasses[i], (Row) propValue)); } else if (propValue == null || targetClass == null || isMap || columnClasses[i] == null || columnClasses[i].isAssignableFrom(propValue.getClass())) { columnList.get(i).add(propValue); } else { columnList.get(i).add(N.as(columnClasses[i], propValue)); } } } return new RowDataSet(columnNameList, columnList); }
@SafeVarargs public final Stream<Object[]> stream(final String query, final Object... parameters) { final MutableInt columnCount = MutableInt.of(0); return Stream.of(execute(query, parameters).iterator()).map(new Function<Row, Object[]>() { @Override public Object[] apply(Row row) { if (columnCount.value() == 0) { final ColumnDefinitions columnDefinitions = row.getColumnDefinitions(); columnCount.setAndGet(columnDefinitions.size()); } final Object[] a = new Object[columnCount.value()]; Object propValue = null; for (int i = 0, len = a.length; i < len; i++) { propValue = row.getObject(i); if (propValue instanceof Row) { a[i] = readRow(Object[].class, (Row) propValue); } else { a[i] = propValue; } } return a; } }); }
/** * Find row. * * @param pk the pk * @return the map * @throws Exception the exception */ public Map<String,Object> findRow( String pk ) throws Exception { try { Map<String,Object> map = null; String select = String.format("SELECT * FROM actuator_state where login_source_sensor='%s'", pk); logger.debug("select={}",select); long before = System.currentTimeMillis(); ResultSet rs = execute(select); Iterator<Row> it = rs.iterator(); Row row = rs.one(); if( row != null ) { map = new HashMap<String,Object>(); logger.info(">>> elapsed {}",(System.currentTimeMillis()-before)); ColumnDefinitions columnDefinitions = row.getColumnDefinitions(); for( ColumnDefinitions.Definition columnDefinition : columnDefinitions) { String name = columnDefinition.getName(); Object value = row.getObject(name); map.put( name, value ); } } return map; } catch( Exception e ) { logger.error(e.getLocalizedMessage(), e); throw e; } }
private void populateFieldInfosFromPojo(ColumnDefinitions rsMetaData) { fieldInfos = Lists.newArrayList(); Field[] fields = pojoClass.getDeclaredFields(); for (int i = 0; i < rsMetaData.size(); i++) { String columnName = rsMetaData.getName(i); String pojoField = getMatchingField(fields, columnName); if (pojoField != null && pojoField.length() != 0) { fieldInfos.add(new FieldInfo(columnName, pojoField, null)); } else { LOG.warn("Couldn't find corrosponding pojo field for column: " + columnName); } } }
public QueryResult executeQuery(String sql) throws QueryExecutionException { ensureConnected(); ResultSet rs = session.execute(sql); List<ColumnDefinitions.Definition> definitions = rs.getColumnDefinitions().asList(); List<JDBCType> types = definitions.stream() .map(definition -> getJDBCType(definition.getType())) .collect(toList()); List<String> columnNames = definitions.stream() .map(ColumnDefinitions.Definition::getName) .collect(toList()); QueryResult.QueryResultBuilder resultBuilder = new QueryResult.QueryResultBuilder(types, columnNames); for (Row row : rs) { List<Object> builderRow = newArrayList(); for (int i = 0; i < types.size(); ++i) { builderRow.add(row.getToken(i).getValue()); } resultBuilder.addRow(builderRow); } return resultBuilder.build(); }
protected List<Object> asList(Row row) { if (row == null) return null; List<ColumnDefinitions.Definition> columns = row.getColumnDefinitions().asList(); List<Object> list = Lists.newArrayListWithCapacity(columns.size()); for (ColumnDefinitions.Definition column : columns) list.add(getObject(row, column.getName(), column.getType())); return list; }
/** * row as map. */ protected Map<String, Object> asMap(Row row) { if (row == null) return null; List<ColumnDefinitions.Definition> columns = row.getColumnDefinitions().asList(); Map<String, Object> map = Maps.newHashMapWithExpectedSize(columns.size()); for (ColumnDefinitions.Definition column : columns) { String name = column.getName(); map.put(name, getObject(row, name, column.getType())); } return map; }
public static MapperKey<DatastaxColumnKey> mapperKey(ColumnDefinitions metaData) { DatastaxColumnKey[] keys = new DatastaxColumnKey[metaData.size()]; for(int i = 0; i < metaData.size(); i++) { keys[i] = of(metaData, i); } return new MapperKey<DatastaxColumnKey>(keys); }
public BoundStatementMapper<T> mapTo(ColumnDefinitions variables) { MapperKey<DatastaxColumnKey> mapperKey = DatastaxColumnKey.mapperKey(variables); BoundStatementMapper<T> mapper = cache.get(mapperKey); if (mapper == null) { mapper = createMapper(mapperKey); } return mapper; }
private void initMetaData() { columnsMap = new ColumnsMap(); try { ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions(); columnsCount = columnDefinitions.size(); jdbcTypes = new String[columnsCount]; for (int i = 1; i <= columnsCount; i++) { columnsMap.registerColumn(columnDefinitions.getName(i), i); columnsMap.registerColumn(columnDefinitions.getName(i), i); jdbcTypes[i-1]=columnDefinitions.getType(i).getName().name(); //Store column types for converter } } catch (Exception e) { throw new CassandraProviderException("Unable to process result set ", e); } }
public SchemaStatement(Timer timer, PartitionGenerator generator, StressSettings settings, Distribution partitionCount, PreparedStatement statement, Integer thriftId, ConsistencyLevel cl, ValidationType validationType) { super(timer, generator, settings, partitionCount); this.generator = generator; this.statement = statement; this.thriftId = thriftId; this.cl = cl; this.validationType = validationType; argumentIndex = new int[statement.getVariables().size()]; bindBuffer = new Object[argumentIndex.length]; int i = 0; for (ColumnDefinitions.Definition definition : statement.getVariables()) argumentIndex[i++] = generator.indexOf(definition.getName()); }
/** * Iterate over a resultSet and all fields and values to prepare these into an JsonArray of JsonObjects per row. * * @param resultSet * The results to iterate over * @param retVals * The JsonArray to add the JsonObjects processed per row * @return */ protected JsonArray processResult(ResultSet resultSet, JsonArray retVals) { // Iterate the results for(Row row : resultSet) { // Row result JsonObject retVal = new JsonObject(); // Get the column definitions to iterate over the different types // and check ColumnDefinitions rowColumnDefinitions = row.getColumnDefinitions(); for(int i = 0; i < rowColumnDefinitions.size(); i++) { // Null empty columns if(row.isNull(i)) { continue; } // Read the column bytes unsafe and operate on the deserialized // object instead of iterating over the // type of the definitions Object columnValue = rowColumnDefinitions.getType(i).deserialize(row.getBytesUnsafe(i), PROTOCOL_VERSION); // Parse the returning object to a supported type retVal = addRow(rowColumnDefinitions.getName(i), columnValue, retVal); } // Add the row retVals.addObject(retVal); } // return retVals; }
@SuppressWarnings("rawtypes") private void equalsColumns(ColumnDefinitions resCols, Map<String, String> dataTableColumns) { Iterator it = dataTableColumns.entrySet().iterator(); while (it.hasNext()) { Map.Entry e = (Map.Entry) it.next(); assertThat(resCols.toString()).as("The table not contains the column.").contains(e.getKey().toString()); DataType type = resCols.getType(e.getKey().toString()); assertThat(type.getName().toString()).as("The column type is not equals.").isEqualTo(e.getValue().toString()); } }
@Override public List<ODataEntry> readTable(String tableName) throws ODataServiceFault { Statement statement = new SimpleStatement("Select * from " + this.keyspace + "." + tableName); ResultSet resultSet = this.session.execute(statement); Iterator<Row> iterator = resultSet.iterator(); List<ODataEntry> entryList = new ArrayList<>(); ColumnDefinitions columnDefinitions = resultSet.getColumnDefinitions(); while (iterator.hasNext()) { ODataEntry dataEntry = createDataEntryFromRow(tableName, iterator.next(), columnDefinitions); entryList.add(dataEntry); } return entryList; }
@Override public List<ODataEntry> readTableWithKeys(String tableName, ODataEntry keys) throws ODataServiceFault { List<ColumnMetadata> cassandraTableMetaData = this.session.getCluster().getMetadata().getKeyspace(this.keyspace) .getTable(tableName).getColumns(); List<String> pKeys = this.primaryKeys.get(tableName); String query = createReadSqlWithKeys(tableName, keys); List<Object> values = new ArrayList<>(); for (String column : keys.getNames()) { if (this.tableMetaData.get(tableName).keySet().contains(column) && pKeys.contains(column)) { bindParams(column, keys.getValue(column), values, cassandraTableMetaData); } } PreparedStatement statement = this.preparedStatementMap.get(query); if (statement == null) { statement = this.session.prepare(query); this.preparedStatementMap.put(query, statement); } ResultSet resultSet = this.session.execute(statement.bind(values.toArray())); List<ODataEntry> entryList = new ArrayList<>(); Iterator<Row> iterator = resultSet.iterator(); ColumnDefinitions definitions = resultSet.getColumnDefinitions(); while (iterator.hasNext()) { ODataEntry dataEntry = createDataEntryFromRow(tableName, iterator.next(), definitions); entryList.add(dataEntry); } return entryList; }
/** * Extract name of columndefinions * @param definition Definition Objetc * @return name of definition */ @Override public String transform(ColumnDefinitions.Definition definition) { String name = definition.getName(); if (name==null) name=""; return name; }
private ResultSet mockResultSet(List<Row> rows,ColumnDefinitions columDefiniton){ ResultSet resultSet = mock(ResultSet.class); ColumnDefinitions.Definition mockDefinition = new DoubleDefinition().buildDefinitionWithName(""); expect(resultSet.getColumnDefinitions()).andStubReturn(columDefiniton); expect(resultSet.all()).andStubReturn(rows); replay(resultSet); return resultSet; }
private ColumnDefinitions mockColumnDefinions(){ ColumnDefinitions columnDefinions = mock(ColumnDefinitions.class); List<ColumnDefinitions.Definition> columnDefinitions = new ArrayList<>(); columnDefinitions.add(new DoubleDefinition().buildDefinitionWithName("")); expect(columnDefinions.asList()).andStubReturn(columnDefinitions); replay(columnDefinions); return columnDefinions; }