public Optional<List<ColumnMetadata>> getTableColMetadata(String connectorId, String dbName, String tblName) { log.debug("Get list of column metadata of table " + formName(dbName, tblName)); List<ColumnMetadata> colMetadatas = new ArrayList<>(); MetaProto.StringListType dataTypeList = metaClient.listColumnsDataType(dbName, tblName); MetaProto.StringListType colNameList = metaClient.listColumns(dbName, tblName); if (dataTypeList.getIsEmpty() || colNameList.getIsEmpty()) { log.warn("No col matches!"); return Optional.empty(); } for (int i = 0; i < dataTypeList.getStrCount(); i++) { String dataType = dataTypeList.getStr(i); Type type = getType(dataType); ColumnMetadata metadata = new ColumnMetadata( colNameList.getStr(i), type, "", false); colMetadatas.add(metadata); } return Optional.of(colMetadatas); }
@ScalarFunction("parse_agent") @Description("Returns Map, which has keys such as 'category', 'name', 'os', 'version', 'vendor' and 'os_version'") @SqlType("map<varchar,varchar>") public Block parseAgent(@TypeParameter("map<varchar,varchar>") Type mapType, @SqlType(StandardTypes.VARCHAR) Slice slice) { String argument = slice.toStringUtf8(); Map<String, String> stringMap = Classifier.parse(argument); if (pageBuilder.isFull()) { pageBuilder.reset(); } BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(0); BlockBuilder singleMapBlockBuilder = blockBuilder.beginBlockEntry(); for (Map.Entry<String, String> entry : stringMap.entrySet()) { VARCHAR.writeSlice(singleMapBlockBuilder, Slices.utf8Slice(entry.getKey())); VARCHAR.writeSlice(singleMapBlockBuilder, Slices.utf8Slice(entry.getValue())); } blockBuilder.closeEntry(); pageBuilder.declarePosition(); return (Block) mapType.getObject(blockBuilder, blockBuilder.getPositionCount() - 1); }
private static Slice getSliceExpressedValue(Object value, Type type) { Slice sliceValue; if (value instanceof String) { sliceValue = Slices.utf8Slice((String) value); } else if (value instanceof byte[]) { sliceValue = Slices.wrappedBuffer((byte[]) value); } else if (value instanceof Integer) { sliceValue = Slices.utf8Slice(value.toString()); } else { throw new IllegalStateException("unsupported string field type: " + value.getClass().getName()); } if (isVarcharType(type)) { sliceValue = truncateToLength(sliceValue, type); } if (isCharType(type)) { sliceValue = trimSpacesAndTruncateToLength(sliceValue, type); } return sliceValue; }
private static void serializePrimitive(Type type, BlockBuilder builder, Object object) { requireNonNull(builder, "parent builder is null"); if (object == null) { builder.appendNull(); return; } if (BOOLEAN.equals(type)) { BOOLEAN.writeBoolean(builder, (Boolean) object); } else if (BIGINT.equals(type) || INTEGER.equals(type) || SMALLINT.equals(type) || TINYINT.equals(type) || REAL.equals(type) || DATE.equals(type) || TIMESTAMP.equals(type)) { type.writeLong(builder, getLongExpressedValue(object)); } else if (DOUBLE.equals(type)) { DOUBLE.writeDouble(builder, ((Number) object).doubleValue()); } else if (isVarcharType(type) || VARBINARY.equals(type) || isCharType(type)) { type.writeSlice(builder, getSliceExpressedValue(object, type)); } else { throw new UnsupportedOperationException("Unsupported primitive type: " + type); } }
public KuduRecordSet(KuduTables kuduTables, KuduClientManager kuduClientManager, KuduSplit split, List<KuduColumnHandle> columns) { //将要查询的kudu列 this.columns = requireNonNull(columns, "column handles is null"); this.columnNames = columns.stream().map(kuduColumn -> kuduColumn.getColumnName()).collect(Collectors.toList()); requireNonNull(split, "split is null"); //将要查询的kudu列的数据类型 ImmutableList.Builder<Type> types = ImmutableList.builder(); for (KuduColumnHandle column : columns) { types.add(column.getColumnType()); } this.columnTypes = types.build(); // this.address = Iterables.getOnlyElement(split.getAddresses()); this.effectivePredicate = split.getEffectivePredicate(); this.tableName = split.getTableName(); this.kuduTables = requireNonNull(kuduTables, "kuduTables is null"); this.kuduSplit = requireNonNull(split, "kuduTables is null"); this.kuduClientManager = requireNonNull(kuduClientManager, "kuduClientManager is null"); }
private static LocalQueryRunner createLocalQueryRunner() { Session defaultSession = testSessionBuilder() .setCatalog("tpch") .setSchema(TINY_SCHEMA_NAME) .build(); LocalQueryRunner localQueryRunner = new LocalQueryRunner(defaultSession); InMemoryNodeManager nodeManager = localQueryRunner.getNodeManager(); localQueryRunner.createCatalog("tpch", new TpchConnectorFactory(nodeManager, 1), ImmutableMap.<String, String>of()); HyperLogLogPlugin plugin = new HyperLogLogPlugin(); for (Type type : plugin.getTypes()) { localQueryRunner.getTypeManager().addType(type); } for (ParametricType parametricType : plugin.getParametricTypes()) { localQueryRunner.getTypeManager().addParametricType(parametricType); } localQueryRunner.getMetadata().addFunctions(extractFunctions(plugin.getFunctions())); return localQueryRunner; }
/** * Sets the vector element to true if the bit is set, skipping the null values. */ public void getSetBits(Type type, int batchSize, BlockBuilder builder, boolean[] isNull) throws IOException { for (int i = 0; i < batchSize; i++) { if (isNull[i]) { builder.appendNull(); } else { // read more data if necessary if (bitsInData == 0) { readByte(); } // read bit type.writeBoolean(builder, (data & HIGH_BIT_MASK) != 0); // mark bit consumed data <<= 1; bitsInData--; } } }
public WindowFunctionSupplier getWindowFunctionImplementation(Signature signature) { checkArgument(signature.getKind() == WINDOW || signature.getKind() == AGGREGATE, "%s is not a window function", signature); checkArgument(signature.getTypeParameterRequirements().isEmpty(), "%s has unbound type parameters", signature); Iterable<SqlFunction> candidates = functions.get(QualifiedName.of(signature.getName())); // search for exact match for (SqlFunction operator : candidates) { Type returnType = typeManager.getType(signature.getReturnType()); List<Type> argumentTypes = resolveTypes(signature.getArgumentTypes(), typeManager); Map<String, Type> boundTypeParameters = operator.getSignature().bindTypeParameters(returnType, argumentTypes, false, typeManager); if (boundTypeParameters != null) { try { return specializedWindowCache.getUnchecked(new SpecializedFunctionKey(operator, boundTypeParameters, signature.getArgumentTypes().size())); } catch (UncheckedExecutionException e) { throw Throwables.propagate(e.getCause()); } } } throw new PrestoException(FUNCTION_IMPLEMENTATION_MISSING, format("%s not found", signature)); }
@Override public void configure(Binder binder) { binder.bind(TypeManager.class).toInstance(typeManager); binder.bind(ExampleConnector.class).in(Scopes.SINGLETON); binder.bind(ExampleConnectorId.class).toInstance(new ExampleConnectorId(connectorId)); binder.bind(ExampleMetadata.class).in(Scopes.SINGLETON); binder.bind(ExampleClient.class).in(Scopes.SINGLETON); binder.bind(ExampleSplitManager.class).in(Scopes.SINGLETON); binder.bind(ExampleRecordSetProvider.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(ExampleConfig.class); jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); jsonCodecBinder(binder).bindMapJsonCodec(String.class, listJsonCodec(ExampleTable.class)); }
public TopNOperatorFactory( int operatorId, PlanNodeId planNodeId, List<? extends Type> types, int n, List<Integer> sortChannels, List<SortOrder> sortOrders, boolean partial) { this.operatorId = operatorId; this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.sourceTypes = ImmutableList.copyOf(requireNonNull(types, "types is null")); this.n = n; ImmutableList.Builder<Type> sortTypes = ImmutableList.builder(); for (int channel : sortChannels) { sortTypes.add(types.get(channel)); } this.sortTypes = sortTypes.build(); this.sortChannels = ImmutableList.copyOf(requireNonNull(sortChannels, "sortChannels is null")); this.sortOrders = ImmutableList.copyOf(requireNonNull(sortOrders, "sortOrders is null")); this.partial = partial; }
private static StreamReader[] createStreamReaders(OrcDataSource orcDataSource, List<OrcType> types, DateTimeZone hiveStorageTimeZone, Map<Integer, Type> includedColumns) { List<StreamDescriptor> streamDescriptors = createStreamDescriptor("", "", 0, types, orcDataSource).getNestedStreams(); OrcType rowType = types.get(0); StreamReader[] streamReaders = new StreamReader[rowType.getFieldCount()]; for (int columnId = 0; columnId < rowType.getFieldCount(); columnId++) { if (includedColumns.containsKey(columnId)) { StreamDescriptor streamDescriptor = streamDescriptors.get(columnId); streamReaders[columnId] = StreamReaders.createStreamReader(streamDescriptor, hiveStorageTimeZone); } } return streamReaders; }
public ScanFilterAndProjectOperatorFactory( int operatorId, PlanNodeId planNodeId, PlanNodeId sourceId, PageSourceProvider pageSourceProvider, CursorProcessor cursorProcessor, PageProcessor pageProcessor, Iterable<ColumnHandle> columns, List<Type> types) { this.operatorId = operatorId; this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.cursorProcessor = requireNonNull(cursorProcessor, "cursorProcessor is null"); this.pageProcessor = requireNonNull(pageProcessor, "pageProcessor is null"); this.sourceId = requireNonNull(sourceId, "sourceId is null"); this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null"); this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null")); this.types = requireNonNull(types, "types is null"); }
public RowType(List<Type> fieldTypes, Optional<List<String>> fieldNames) { super(new TypeSignature( ROW, Lists.transform(fieldTypes, Type::getTypeSignature), fieldNames.orElse(ImmutableList.of()).stream() .collect(toImmutableList())), Block.class); ImmutableList.Builder<RowField> builder = ImmutableList.builder(); for (int i = 0; i < fieldTypes.size(); i++) { int index = i; builder.add(new RowField(fieldTypes.get(i), fieldNames.map((names) -> names.get(index)))); } fields = builder.build(); }
private Type instantiateParametricType(TypeSignature signature) { List<TypeParameter> parameters = new ArrayList<>(); for (TypeSignatureParameter parameter : signature.getParameters()) { TypeParameter typeParameter = TypeParameter.of(parameter, this); if (typeParameter == null) { return null; } parameters.add(typeParameter); } ParametricType parametricType = parametricTypes.get(signature.getBase().toLowerCase(Locale.ENGLISH)); if (parametricType == null) { return null; } Type instantiatedType = parametricType.createType(parameters); checkState(instantiatedType.getTypeSignature().equals(signature), "Instantiated parametric type name (%s) does not match expected name (%s)", instantiatedType, signature); return instantiatedType; }
@Override public boolean equalTo(Block leftBlock, int leftPosition, Block rightBlock, int rightPosition) { Block leftRow = leftBlock.getObject(leftPosition, Block.class); Block rightRow = rightBlock.getObject(rightPosition, Block.class); for (int i = 0; i < leftRow.getPositionCount(); i++) { checkElementNotNull(leftRow.isNull(i)); checkElementNotNull(rightRow.isNull(i)); Type fieldType = fields.get(i).getType(); if (!fieldType.equalTo(leftRow, i, rightRow, i)) { return false; } } return true; }
private static int serializedSize(List<? extends Type> types, Page expectedPage) { DynamicSliceOutput sliceOutput = new DynamicSliceOutput(1024); writePages(blockEncodingManager, sliceOutput, expectedPage); Slice slice = sliceOutput.slice(); Iterator<Page> pageIterator = readPages(blockEncodingManager, slice.getInput()); if (pageIterator.hasNext()) { assertPageEquals(types, pageIterator.next(), expectedPage); } else { assertEquals(expectedPage.getPositionCount(), 0); } assertFalse(pageIterator.hasNext()); return slice.length(); }
private boolean isElementType(parquet.schema.Type repeatedType, String parentName) { if (repeatedType.isPrimitive() || (repeatedType.asGroupType().getFieldCount() > 1)) { return true; } if (repeatedType.getName().equals("array")) { return true; // existing avro data } if (repeatedType.getName().equals(parentName + "_tuple")) { return true; // existing thrift data } // false for the following cases: // * name is "list", which matches the spec // * name is "bag", which indicates existing hive or pig data // * ambiguous case, which should be assumed is 3-level according to spec return false; }
public LookupOuterOperatorFactory( int operatorId, PlanNodeId planNodeId, OuterLookupSourceSupplier lookupSourceSupplier, List<Type> probeTypes) { this.operatorId = operatorId; this.planNodeId = requireNonNull(planNodeId, "planNodeId is null"); this.lookupSourceSupplier = requireNonNull(lookupSourceSupplier, "lookupSourceSupplier is null"); this.probeTypes = ImmutableList.copyOf(requireNonNull(probeTypes, "probeTypes is null")); this.types = ImmutableList.<Type>builder() .addAll(probeTypes) .addAll(lookupSourceSupplier.getTypes()) .build(); }
public static RowExpression translate( Expression expression, FunctionKind functionKind, IdentityHashMap<Expression, Type> types, FunctionRegistry functionRegistry, TypeManager typeManager, Session session, boolean optimize) { RowExpression result = new Visitor(functionKind, types, typeManager, session.getTimeZoneKey()).process(expression, null); requireNonNull(result, "translated expression is null"); if (optimize) { ExpressionOptimizer optimizer = new ExpressionOptimizer(functionRegistry, typeManager, session); return optimizer.optimize(result); } return result; }
@JsonCreator public HDFSColumnHandle( @JsonProperty("name") String name, @JsonProperty("type") Type type, @JsonProperty("comment") String comment, @JsonProperty("colType") ColumnType colType, @JsonProperty("connectorId") String connectorId) { this.name = requireNonNull(name, "name is null"); this.type = requireNonNull(type, "type is null"); this.comment = requireNonNull(comment, "comment is null"); this.colType = requireNonNull(colType, "col type is null"); this.connectorId = requireNonNull(connectorId, "connectorId is null"); }
public HDFSPageSource( ParquetReader parquetReader, ParquetDataSource dataSource, MessageType fileSchema, MessageType requestedSchema, long totalBytes, List<HDFSColumnHandle> columns, TypeManager typeManager) { checkArgument(totalBytes >= 0, "totalBytes is negative"); this.parquetReader = requireNonNull(parquetReader, "parquetReader is null"); this.dataSource = requireNonNull(dataSource, "dataSource is null"); this.fileSchema = requireNonNull(fileSchema, "fileSchema is null"); this.requestedSchema = requireNonNull(requestedSchema, "requestedSchema is null"); this.totalBytes = totalBytes; this.columnSize = columns.size(); this.constantBlocks = new Block[columnSize]; ImmutableList.Builder<String> namesBuilder = ImmutableList.builder(); ImmutableList.Builder<Type> typesBuilder = ImmutableList.builder(); for (int columnIndex = 0; columnIndex < columnSize; columnIndex++) { HDFSColumnHandle column = columns.get(columnIndex); String name = column.getName(); Type type = typeManager.getType(column.getType().getTypeSignature()); namesBuilder.add(name); typesBuilder.add(type); if (getParquetType(column, fileSchema) == null) { constantBlocks[columnIndex] = RunLengthEncodedBlock.create(type, null, MAX_VECTOR_LENGTH); } } columnNames = namesBuilder.build(); types = typesBuilder.build(); }
private parquet.schema.Type getParquetType(HDFSColumnHandle column, MessageType messageType) { if (messageType.containsField(column.getName())) { return messageType.getType(column.getName()); } // parquet is case-insensitive, all hdfs-columns get converted to lowercase for (parquet.schema.Type type : messageType.getFields()) { if (type.getName().equalsIgnoreCase(column.getName())) { return type; } } return null; }
public int getFieldIndex(MessageType fileSchema, String name) { try { return fileSchema.getFieldIndex(name); } catch (InvalidRecordException e) { for (parquet.schema.Type type : fileSchema.getFields()) { if (type.getName().equalsIgnoreCase(name)) { return fileSchema.getFieldIndex(type.getName()); } } return -1; } }
/** * Get all column handles of specified table * */ @Override public Optional<List<HDFSColumnHandle>> getTableColumnHandle(String connectorId, String dbName, String tblName) { log.debug("Get list of column handles of table " + formName(dbName, tblName)); List<HDFSColumnHandle> columnHandles = new ArrayList<>(); String colName; String colTypeName; String dataTypeName; MetaProto.StringListType listColumns = metaClient.listColumns(dbName, tblName); if (listColumns.getIsEmpty()) { log.warn("No col matches!"); return Optional.empty(); } for (int i = 0; i < listColumns.getStrCount(); i++) { colName = listColumns.getStr(i); MetaProto.ColParam colParam = metaClient.getColumn(dbName, tblName, colName); colTypeName = String.valueOf(colParam.getColType()); dataTypeName = String.valueOf(colParam.getDataType()); // Deal with col type HDFSColumnHandle.ColumnType colType = getColType(colTypeName); // Deal with data type Type type = getType(dataTypeName); columnHandles.add(new HDFSColumnHandle(colName, type, "", colType, connectorId)); } return Optional.of(columnHandles); }
private HDFSColumnHandle getColumnHandle(String connectorId, String colName, String tblName, String dbName) { MetaProto.ColParam colParam = metaClient.getColumn(dbName, tblName, colName); if (colParam.getIsEmpty()) { log.error("Match more/less than one column"); } String colTypeName = colParam.getColType(); String dataType = colParam.getDataType(); // Deal with colType HDFSColumnHandle.ColumnType colType = getColType(colTypeName); // Deal with type Type type = getType(dataType); return new HDFSColumnHandle(colName, type, "", colType, connectorId); }
@Override public RecordSink getRecordSink(ConnectorTransactionHandle transactionHandle, ConnectorSession session, ConnectorInsertTableHandle connectorTableHandle) { RestInsertTableHandle insertTableHandle = Types.checkType(connectorTableHandle, RestInsertTableHandle.class, "tableHandle"); RestTableHandle tableHandle = insertTableHandle.getTableHandle(); SchemaTableName schemaTableName = tableHandle.getSchemaTableName(); Consumer<List> rowSink = rest.createRowSink(schemaTableName); List<Type> types = rest.getTypes(schemaTableName); return new InMemoryObjectRecordSink(types, rowSink); }
@JsonCreator public RestColumnHandle( @JsonProperty("name") String name, @JsonProperty("type") Type type) { this.name = name; this.type = type; }
@Override public RecordSet getRecordSet( ConnectorTransactionHandle connectorTransactionHandle, ConnectorSession connectorSession, ConnectorSplit connectorSplit, List<? extends ColumnHandle> list) { RestConnectorSplit split = Types.checkType(connectorSplit, RestConnectorSplit.class, "split"); // TODO fix below cast List<RestColumnHandle> restColumnHandles = (List<RestColumnHandle>) list; SchemaTableName schemaTableName = split.getTableHandle().getSchemaTableName(); Collection<? extends List<?>> rows = rest.getRows(schemaTableName); ConnectorTableMetadata tableMetadata = rest.getTableMetadata(schemaTableName); List<Integer> columnIndexes = restColumnHandles.stream() .map(column -> { int index = 0; for (ColumnMetadata columnMetadata : tableMetadata.getColumns()) { if (columnMetadata.getName().equalsIgnoreCase(column.getName())) { return index; } index++; } throw new IllegalStateException("Unknown column: " + column.getName()); }) .collect(toList()); Collection<? extends List<?>> mappedRows = rows.stream() .map(row -> columnIndexes.stream() .map(index -> row.get(index)) .collect(toList())) .collect(toList()); List<Type> mappedTypes = restColumnHandles.stream() .map(RestColumnHandle::getType) .collect(toList()); return new InMemoryRecordSet(mappedTypes, mappedRows); }
@Override public void configure(Binder binder) { binder.bind(EthereumConnector.class).in(Scopes.SINGLETON); binder.bind(EthereumMetadata.class).in(Scopes.SINGLETON); binder.bind(EthereumWeb3jProvider.class).in(Scopes.SINGLETON); binder.bind(EthereumSplitManager.class).in(Scopes.SINGLETON); binder.bind(EthereumRecordSetProvider.class).in(Scopes.SINGLETON); configBinder(binder).bindConfig(EthereumConnectorConfig.class); jsonBinder(binder).addDeserializerBinding(Type.class).to(TypeDeserializer.class); }
EthereumRecordSet(Web3j web3j, List<EthereumColumnHandle> columnHandles, EthereumSplit split) { this.split = requireNonNull(split, "split is null"); this.web3j = requireNonNull(web3j, "web3j is null"); this.columnHandles = requireNonNull(columnHandles, "columnHandles is null"); ImmutableList.Builder<Type> typeBuilder = ImmutableList.builder(); for (EthereumColumnHandle handle : columnHandles) { typeBuilder.add(handle.getType()); } this.columnTypes = typeBuilder.build(); }
private static Block serializeObject(Type type, BlockBuilder builder, Object object) { if (!isStructuralType(type)) { serializePrimitive(type, builder, object); return null; } else if (isArrayType(type)) { return serializeList(type, builder, object); } else if (isMapType(type)) { return serializeMap(type, builder, object); } else if (isRowType(type)) { return serializeStruct(type, builder, object); } throw new RuntimeException("Unknown object type: " + type); }
private static Block serializeList(Type type, BlockBuilder builder, Object object) { List<?> list = (List) object; if (list == null) { requireNonNull(builder, "parent builder is null").appendNull(); return null; } List<Type> typeParameters = type.getTypeParameters(); checkArgument(typeParameters.size() == 1, "list must have exactly 1 type parameter"); Type elementType = typeParameters.get(0); BlockBuilder currentBuilder; if (builder != null) { currentBuilder = builder.beginBlockEntry(); } else { currentBuilder = elementType.createBlockBuilder(new BlockBuilderStatus(), list.size()); } for (Object element : list) { serializeObject(elementType, currentBuilder, element); } if (builder != null) { builder.closeEntry(); return null; } else { Block resultBlock = currentBuilder.build(); return resultBlock; } }
private static Block serializeMap(Type type, BlockBuilder builder, Object object) { Map<?, ?> map = (Map) object; if (map == null) { requireNonNull(builder, "parent builder is null").appendNull(); return null; } List<Type> typeParameters = type.getTypeParameters(); checkArgument(typeParameters.size() == 2, "map must have exactly 2 type parameter"); Type keyType = typeParameters.get(0); Type valueType = typeParameters.get(1); BlockBuilder currentBuilder; if (builder != null) { currentBuilder = builder.beginBlockEntry(); } else { currentBuilder = new InterleavedBlockBuilder(typeParameters, new BlockBuilderStatus(), map.size()); } for (Map.Entry<?, ?> entry : map.entrySet()) { // Hive skips map entries with null keys if (entry.getKey() != null) { serializeObject(keyType, currentBuilder, entry.getKey()); serializeObject(valueType, currentBuilder, entry.getValue()); } } if (builder != null) { builder.closeEntry(); return null; } else { Block resultBlock = currentBuilder.build(); return resultBlock; } }
@JsonCreator public EthereumColumnHandle( @JsonProperty("connectorId") String connectorId, @JsonProperty("ordinalPosition") int ordinalPosition, @JsonProperty("name") String name, @JsonProperty("type") Type type ) { this.connectorId = requireNonNull(connectorId, "connectorId is null"); this.ordinalPosition = ordinalPosition; this.name = requireNonNull(name, "name is null"); this.type = requireNonNull(type, "type is null"); }
@Override public Type getType(int i) { log.info("INFORMATION: AmpoolRecordCursor getType() called."); checkArgument(i < columnHandles.size(), "Invalid field index"); return columnHandles.get(i).getColumnType(); }
private void checkFieldType(int field, Type... expected) { log.info("INFORMATION: AmpoolRecordCursor checkFieldType() called."); Type actual = getType(field); for (Type type : expected) { if (actual.equals(type)) { return; } } String expectedTypes = Joiner.on(", ").join(expected); throw new IllegalArgumentException(format("Expected field %s to be type %s but is %s", field, expectedTypes, actual)); }
@JsonCreator public AmpoolColumnHandle(@JsonProperty("connectorId") String connectorId, @JsonProperty("columnName") String columnName, @JsonProperty("columnType") Type columnType, @JsonProperty("ordinalPosition") int ordinalPosition) { this.connectorId = requireNonNull(connectorId, "connectorId is null"); this.columnName = requireNonNull(columnName, "columnName is null"); this.columnType = requireNonNull(columnType, "columnType is null"); this.ordinalPosition = ordinalPosition; log.info("INFORMATION: AmpoolColumnHandle created."); }
public AmpoolRecordSet(AmpoolSplit split, List<AmpoolColumnHandle> columnHandles, Iterator<Row> iterator) { requireNonNull(split, "split is null"); this.columnHandles = requireNonNull(columnHandles, "column handles is null"); ImmutableList.Builder<Type> types = ImmutableList.builder(); for (AmpoolColumnHandle column : columnHandles) { types.add(column.getColumnType()); } this.columnTypes = types.build(); this.iterator = iterator; log.info("INFORMATION: AmpoolRecordSet created successfully."); }
@JsonCreator public KuduColumnHandle( @JsonProperty("columnName") String columnName, @JsonProperty("columnType") Type columnType, @JsonProperty("ordinalPosition") int ordinalPosition) { this.columnName = requireNonNull(columnName, "columnName is null"); this.columnType = requireNonNull(columnType, "columnType is null"); this.ordinalPosition = ordinalPosition; }
@TypeParameter(StandardTypes.DOUBLE) @SqlType(StandardTypes.BOOLEAN) @Nullable public static Boolean contains( @TypeParameter(StandardTypes.DOUBLE) Type elementType, @SqlType("array(double)") Block arrayBlock, @SqlType(StandardTypes.DOUBLE) double lng, @SqlType(StandardTypes.DOUBLE) double lat) { double[] array= new double[arrayBlock.getPositionCount()] ; Polygon poly = new Polygon(); for (int i = 0; i < arrayBlock.getPositionCount(); i++) { if (arrayBlock.isNull(i)) { continue; } array[i]=elementType.getDouble(arrayBlock, i); } poly.startPath(array[0], array[1]); for (int i = 2; i < array.length; i += 2) { poly.lineTo(array[i], array[i + 1]); } return OperatorContains.local().execute(poly, new Point(lng,lat), null, null); }