Skip to content

Commit

Permalink
rebase to apache-iceberg-1.3.1
Browse files Browse the repository at this point in the history
  • Loading branch information
duyalei committed Aug 28, 2023
1 parent 38346af commit 1aa2634
Show file tree
Hide file tree
Showing 13 changed files with 120 additions and 113 deletions.
8 changes: 0 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,3 @@ WHERE ST_Contains(geom, ST_Point(0.5, 0.5));
## Quickstart

Check this repo [docker-spark-geolake](https://github.com/spatialx-project/docker-spark-geolake) for early access, there are some [notebooks](https://github.com/spatialx-project/docker-spark-geolake/tree/main/spark/notebooks) inside.

Source code and documentation will be released soon.

## PVLDB Artifact

We are submitting a paper titled "GeoLake: Bringing Geospatial Support to Lakehouses" to VLDB (Very Large Data Bases), and we have made the experiment-related code, data, and results available at this repository. Specialy, check [parquet-benchmark](https://github.com/spatialx-project/geplake-parquet-benchmark) for Parquet-related experiments(paper's section 7.2), check [serde-benckmark](https://github.com/Kontinuation/play-with-geometry-serde) for Serde-related experiments(paper's section 7.3), check [Partition-Resolution](https://github.com/spatialx-project/docker-spark-geolake/blob/main/spark/notebooks/benchmark-portotaxi.ipynb) for Partition-related experiments(paper's section 7.4), check [end-2-end](https://github.com/spatialx-project/docker-spark-geolake/blob/main/spark/notebooks/benchmark-portotaxi.ipynb) for end-2-end experiments(paper's section 7.5).

It is noteworthy that, for Partition-related experiments and end-2-end experiments, the corresponding repository only contains code for the Portotaxi dataset. For the TIGER2018 and MSBuildings datasets, you only need to modify the logic for reading the dataset in the code.
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@
*/
package org.apache.iceberg.expressions;

import java.util.Comparator;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.types.Comparators;
import org.apache.iceberg.types.Conversions;
import org.apache.iceberg.types.Type.PrimitiveType;
import org.apache.iceberg.types.Types;

import java.util.Comparator;

public class GeoMinMaxAggregate<T> extends ValueAggregate<T> {
private final int fieldId;

Expand Down Expand Up @@ -57,13 +56,17 @@ protected Object evaluateRef(DataFile file) {
}
Pair<Double, Double> res;
if (this.geomOp == Operation.ST_MINX || this.geomOp == Operation.ST_MINY) {
res = Conversions.fromByteBuffer(Types.GeometryBoundType.get(), safeGet(file.lowerBounds(), fieldId));
res =
Conversions.fromByteBuffer(
Types.GeometryBoundType.get(), safeGet(file.lowerBounds(), fieldId));
if (res == null) {
return null;
}
return this.geomOp == Operation.ST_MINX ? res.first() : res.second();
} else if (this.geomOp == Operation.ST_MAXX || this.geomOp == Operation.ST_MAXY) {
res = Conversions.fromByteBuffer(Types.GeometryBoundType.get(), safeGet(file.upperBounds(), fieldId));
res =
Conversions.fromByteBuffer(
Types.GeometryBoundType.get(), safeGet(file.upperBounds(), fieldId));
if (res == null) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ protected boolean hasValue(DataFile file) {

@Override
protected Object evaluateRef(DataFile file) {
PrimitiveType resType = type.equals(Types.GeometryType.get()) ? Types.GeometryBoundType.get() : type;
PrimitiveType resType =
type.equals(Types.GeometryType.get()) ? Types.GeometryBoundType.get() : type;
return Conversions.fromByteBuffer(resType, safeGet(file.upperBounds(), fieldId));
}

Expand Down
41 changes: 21 additions & 20 deletions api/src/main/java/org/apache/iceberg/expressions/MaxAggregator.java
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* * Licensed to the Apache Software Foundation (ASF) under one
* * or more contributor license agreements. See the NOTICE file
* * distributed with this work for additional information
* * regarding copyright ownership. The ASF licenses this file
* * to you under the Apache License, Version 2.0 (the
* * "License"); you may not use this file except in compliance
* * with the License. You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing,
* * software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* * KIND, either express or implied. See the License for the
* * specific language governing permissions and limitations
* * under the License.
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.expressions;

import java.util.Comparator;
Expand All @@ -41,10 +38,14 @@ protected void update(T value) {
// this only happens when the value is a Pair<Double, Double> (bounds of a geometry)
Pair<Double, Double> valuePair = (Pair<Double, Double>) value;
Pair<Double, Double> maxPair = (Pair<Double, Double>) max;
this.max = (T) Pair.of(Math.max(valuePair.first(), maxPair.first()),
Math.max(valuePair.second(), maxPair.second()));
this.max =
(T)
Pair.of(
Math.max(valuePair.first(), maxPair.first()),
Math.max(valuePair.second(), maxPair.second()));
} catch (ClassCastException e) {
throw new UnsupportedOperationException("MaxAggregator only supports Pair<Double, Double> values");
throw new UnsupportedOperationException(
"MaxAggregator only supports Pair<Double, Double> values");
}
} else if (comparator.compare(value, max) > 0) {
this.max = value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,8 @@ protected boolean hasValue(DataFile file) {

@Override
protected Object evaluateRef(DataFile file) {
PrimitiveType resType = type.equals(Types.GeometryType.get()) ? Types.GeometryBoundType.get() : type;
PrimitiveType resType =
type.equals(Types.GeometryType.get()) ? Types.GeometryBoundType.get() : type;
return Conversions.fromByteBuffer(resType, safeGet(file.lowerBounds(), fieldId));
}

Expand Down
41 changes: 21 additions & 20 deletions api/src/main/java/org/apache/iceberg/expressions/MinAggregator.java
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* * Licensed to the Apache Software Foundation (ASF) under one
* * or more contributor license agreements. See the NOTICE file
* * distributed with this work for additional information
* * regarding copyright ownership. The ASF licenses this file
* * to you under the Apache License, Version 2.0 (the
* * "License"); you may not use this file except in compliance
* * with the License. You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing,
* * software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* * KIND, either express or implied. See the License for the
* * specific language governing permissions and limitations
* * under the License.
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.iceberg.expressions;

import java.util.Comparator;
Expand All @@ -41,10 +38,14 @@ protected void update(T value) {
// this only happens when the value is a Pair<Double, Double> (bounds of a geometry)
Pair<Double, Double> valuePair = (Pair<Double, Double>) value;
Pair<Double, Double> minPair = (Pair<Double, Double>) min;
this.min = (T) Pair.of(Math.min(valuePair.first(), minPair.first()),
Math.min(valuePair.second(), minPair.second()));
this.min =
(T)
Pair.of(
Math.min(valuePair.first(), minPair.first()),
Math.min(valuePair.second(), minPair.second()));
} catch (ClassCastException e) {
throw new UnsupportedOperationException("MinAggregator only supports Pair<Double, Double> values");
throw new UnsupportedOperationException(
"MinAggregator only supports Pair<Double, Double> values");
}
} else if (comparator.compare(value, min) < 0) {
this.min = value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,10 @@ boolean hasValue(DataFile file, int fieldId) {
Long valueCount = safeGet(file.valueCounts(), fieldId);
Long nullCount = safeGet(file.nullValueCounts(), fieldId);
boolean boundAllNull =
valueCount != null
&& valueCount > 0
&& nullCount != null
&& nullCount.longValue() == valueCount.longValue();
valueCount != null
&& valueCount > 0
&& nullCount != null
&& nullCount.longValue() == valueCount.longValue();
return hasBound || boundAllNull;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@
*/
package org.apache.iceberg.expressions;

import static org.apache.iceberg.expressions.TestGeometryHelpers.MetricEvalData.*;
import static org.apache.iceberg.expressions.TestGeometryHelpers.MetricEvalData.GEOM_X_MAX;
import static org.apache.iceberg.expressions.TestGeometryHelpers.MetricEvalData.GEOM_X_MIN;
import static org.apache.iceberg.expressions.TestGeometryHelpers.MetricEvalData.GEOM_Y_MAX;
import static org.apache.iceberg.expressions.TestGeometryHelpers.MetricEvalData.GEOM_Y_MIN;
import static org.apache.iceberg.types.Conversions.toByteBuffer;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;
Expand Down Expand Up @@ -54,7 +56,7 @@ public class TestAggregateEvaluator {
// any value counts, including nulls
ImmutableMap.of(1, 50L, 3, 50L, 4, 50L, 5, 50L),
// null value counts
ImmutableMap.of(1, 10L, 3, 50L, 4, 10L, 5,0L),
ImmutableMap.of(1, 10L, 3, 50L, 4, 10L, 5, 0L),
// nan value counts
null,
// lower bounds
Expand Down Expand Up @@ -95,68 +97,78 @@ public class TestAggregateEvaluator {
ImmutableMap.of(1, toByteBuffer(IntegerType.get(), 3333)));

private static final DataFile GEOM_FILE =
new TestDataFile(
"file.avro",
Row.of(),
50,
// any value counts, including nulls
ImmutableMap.of(1, 50L, 3, 50L, 4, 50L, 5, 50L),
// null value counts
ImmutableMap.of(1, 10L, 3, 50L, 4, 10L, 5,0L),
// nan value counts
null,
// lower bounds
ImmutableMap.of(5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MIN + 1, GEOM_Y_MIN))),
// upper bounds
ImmutableMap.of(5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MAX, GEOM_Y_MAX - 1))));
new TestDataFile(
"file.avro",
Row.of(),
50,
// any value counts, including nulls
ImmutableMap.of(1, 50L, 3, 50L, 4, 50L, 5, 50L),
// null value counts
ImmutableMap.of(1, 10L, 3, 50L, 4, 10L, 5, 0L),
// nan value counts
null,
// lower bounds
ImmutableMap.of(
5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MIN + 1, GEOM_Y_MIN))),
// upper bounds
ImmutableMap.of(
5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MAX, GEOM_Y_MAX - 1))));

private static final DataFile MISSING_SOME_NULLS_GEOM_FILE =
new TestDataFile(
"file.avro",
Row.of(),
50,
// any value counts, including nulls
ImmutableMap.of(1, 50L, 3, 50L, 4, 50L, 5, 40L),
// null value counts
ImmutableMap.of(1, 10L, 3, 50L, 4, 10L, 5,10L),
// nan value counts
null,
// lower bounds
ImmutableMap.of(5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MIN, GEOM_Y_MIN + 1))),
// upper bounds
ImmutableMap.of(5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MAX - 1, GEOM_Y_MAX))));
new TestDataFile(
"file.avro",
Row.of(),
50,
// any value counts, including nulls
ImmutableMap.of(1, 50L, 3, 50L, 4, 50L, 5, 40L),
// null value counts
ImmutableMap.of(1, 10L, 3, 50L, 4, 10L, 5, 10L),
// nan value counts
null,
// lower bounds
ImmutableMap.of(
5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MIN, GEOM_Y_MIN + 1))),
// upper bounds
ImmutableMap.of(
5, toByteBuffer(Types.GeometryBoundType.get(), Pair.of(GEOM_X_MAX - 1, GEOM_Y_MAX))));

private static final DataFile[] dataFiles = {
FILE, MISSING_SOME_NULLS_STATS_1, MISSING_SOME_NULLS_STATS_2
};

private static final DataFile[] geoDataFiles = {
GEOM_FILE, MISSING_SOME_NULLS_GEOM_FILE
};
private static final DataFile[] geoDataFiles = {GEOM_FILE, MISSING_SOME_NULLS_GEOM_FILE};

@Test
public void testGeomAggregate() {
List<Expression> list =
ImmutableList.of(
Expressions.countStar(),
Expressions.count("geom"),
Expressions.max("geom"),
Expressions.min("geom"),
Expressions.stMinX("geom"),
Expressions.stMinY("geom"),
Expressions.stMaxX("geom"),
Expressions.stMaxY("geom")
);
ImmutableList.of(
Expressions.countStar(),
Expressions.count("geom"),
Expressions.max("geom"),
Expressions.min("geom"),
Expressions.stMinX("geom"),
Expressions.stMinY("geom"),
Expressions.stMaxX("geom"),
Expressions.stMaxY("geom"));
AggregateEvaluator aggregateEvaluator = AggregateEvaluator.create(SCHEMA, list);
for (DataFile dataFile : geoDataFiles) {
aggregateEvaluator.update(dataFile);
}
Assert.assertTrue(aggregateEvaluator.allAggregatorsValid());
StructLike result = aggregateEvaluator.result();
for (int i=0; i<result.size(); i++) {
for (int i = 0; i < result.size(); i++) {
System.out.println(result.get(i, Object.class));
}
Object[] expected = {100L, 80L, Pair.of(GEOM_X_MAX, GEOM_Y_MAX), Pair.of(GEOM_X_MIN, GEOM_Y_MIN), GEOM_X_MIN, GEOM_Y_MIN, GEOM_X_MAX, GEOM_Y_MAX};
Object[] expected = {
100L,
80L,
Pair.of(GEOM_X_MAX, GEOM_Y_MAX),
Pair.of(GEOM_X_MIN, GEOM_Y_MIN),
GEOM_X_MIN,
GEOM_Y_MIN,
GEOM_X_MAX,
GEOM_Y_MAX
};
assertEvaluatorResult(result, expected);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,9 @@
import static org.apache.iceberg.types.Conversions.toByteBuffer;
import static org.apache.iceberg.types.Types.NestedField.optional;
import static org.apache.iceberg.types.Types.NestedField.required;

import java.util.List;
import java.util.Random;
import java.util.stream.Stream;
import org.apache.iceberg.AssertHelpers;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.Schema;
import org.apache.iceberg.TestHelpers.Row;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
*/
package org.apache.iceberg.data;

import java.util.Map;
import java.util.Collection;
import java.util.Map;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableScan;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.TimestampNTZType$;
import org.apache.spark.sql.types.TimestampType$;
import org.apache.spark.sql.types.TimestampType;
import org.apache.spark.sql.types.UserDefinedType$;
import org.apache.spark.sql.types.UserDefinedType;

public class PruneColumnsWithoutReordering extends TypeUtil.CustomOrderSchemaVisitor<Type> {
private final StructType requestedType;
Expand Down Expand Up @@ -200,7 +199,7 @@ public Type map(Types.MapType map, Supplier<Type> keyResult, Supplier<Type> valu
public Type primitive(Type.PrimitiveType primitive) {
Set<Class<? extends DataType>> expectedType = TYPES.get(primitive.typeId());
Preconditions.checkArgument(
expectedType != null && expectedType.contains(current.getClass()),
expectedType != null && expectedType.stream().anyMatch(t -> t.isInstance(current)),
"Cannot project %s to incompatible type: %s",
primitive,
current);
Expand Down Expand Up @@ -247,6 +246,6 @@ public Type primitive(Type.PrimitiveType primitive) {
.put(TypeID.STRING, ImmutableSet.of(StringType$.class))
.put(TypeID.FIXED, ImmutableSet.of(BinaryType$.class))
.put(TypeID.BINARY, ImmutableSet.of(BinaryType$.class))
.put(TypeID.GEOMETRY, ImmutableSet.of(UserDefinedType$.class))
.put(TypeID.GEOMETRY, ImmutableSet.of(UserDefinedType.class))
.buildOrThrow();
}
Loading

0 comments on commit 1aa2634

Please sign in to comment.