Skip to content

Commit a8a8bb7

Browse files
committed
Merge branch 'release/0.8.3'
2 parents 14c8dc5 + eaaaf58 commit a8a8bb7

File tree

68 files changed

+353
-216
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

68 files changed

+353
-216
lines changed

build/circleci/Dockerfile

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
FROM circleci/openjdk:8-jdk
22

3-
ENV OPENJPEG_VERSION 2.3.0
3+
ENV OPENJPEG_VERSION 2.3.1
44
ENV GDAL_VERSION 2.4.1
55
ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64/
66

@@ -11,26 +11,37 @@ RUN sudo apt-get update && \
1111
python python-minimal python2.7 python2.7-minimal \
1212
libpython-stdlib libpython2.7 libpython2.7-minimal libpython2.7-stdlib \
1313
&& sudo apt-get install -y \
14-
python3 \
15-
python3-pip \
1614
pandoc \
1715
wget \
1816
gcc g++ build-essential \
17+
libreadline-gplv2-dev libncursesw5-dev libssl-dev libsqlite3-dev tk-dev libgdbm-dev libc6-dev libbz2-dev \
1918
libcurl4-gnutls-dev \
2019
libproj-dev \
2120
libgeos-dev \
2221
libhdf4-alt-dev \
23-
libhdf5-serial-dev \
2422
bash-completion \
2523
cmake \
2624
imagemagick \
2725
libpng-dev \
28-
swig \
29-
ant \
26+
libffi-dev \
3027
&& sudo apt autoremove \
31-
&& sudo apt-get clean all \
32-
&& pip3 install setuptools ipython==6.2.1 \
33-
&& sudo update-alternatives --install /usr/bin/python python /usr/bin/python3 1
28+
&& sudo apt-get clean all
29+
# && sudo update-alternatives --install /usr/bin/python python /usr/bin/python3 1
30+
# todo s
31+
32+
RUN cd /tmp && \
33+
wget https://www.python.org/ftp/python/3.7.4/Python-3.7.4.tgz && \
34+
tar xzf Python-3.7.4.tgz && \
35+
cd Python-3.7.4 && \
36+
./configure --with-ensurepip=install --prefix=/usr/local --enable-optimization && \
37+
make && \
38+
sudo make altinstall && \
39+
rm -rf Python-3.7.4*
40+
41+
RUN sudo ln -s /usr/local/bin/python3.7 /usr/local/bin/python && \
42+
sudo curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
43+
sudo python get-pip.py && \
44+
sudo pip3 install setuptools ipython==6.2.1
3445

3546
# install OpenJPEG
3647
RUN cd /tmp && \
@@ -52,11 +63,9 @@ RUN cd /tmp && \
5263
./configure \
5364
--with-curl \
5465
--with-hdf4 \
55-
--with-hdf5 \
5666
--with-geos \
5767
--with-geotiff=internal \
5868
--with-hide-internal-symbols \
59-
--with-java=$JAVA_HOME \
6069
--with-libtiff=internal \
6170
--with-libz=internal \
6271
--with-mrf \
@@ -68,7 +77,5 @@ RUN cd /tmp && \
6877
&& \
6978
make -j 8 && \
7079
sudo make install && \
71-
cd swig/java && \
72-
sudo make install && \
7380
sudo ldconfig && \
7481
cd /tmp && sudo rm -Rf gdal*

build/circleci/Makefile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
all:
2+
docker build -t "s22s/rasterframes-circleci:latest" .

build/circleci/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# CircleCI Dockerfile Build file
22

33
```bash
4-
docker build -t s22s/rasterframes-circleci:latest .
4+
make
55
docker push s22s/rasterframes-circleci:latest
66
```

core/src/main/resources/reference.conf

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,9 @@ vlm.gdal {
1414
AWS_REQUEST_PAYER = "requester"
1515
GDAL_DISABLE_READDIR_ON_OPEN = "YES"
1616
CPL_VSIL_CURL_ALLOWED_EXTENSIONS = ".tif,.tiff,.jp2,.mrf,.idx,.lrc,.mrf.aux.xml,.vrt"
17+
GDAL_CACHEMAX = 512
18+
GDAL_PAM_ENABLED = "NO"
19+
CPL_VSIL_CURL_CHUNK_SIZE = 1000000
1720
}
1821
// set this to `false` if CPL_DEBUG is `ON`
1922
useExceptions = true

core/src/main/scala/org/apache/spark/sql/rf/TileUDT.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ import org.apache.spark.sql.types.{DataType, _}
2626
import org.locationtech.rasterframes.encoders.CatalystSerializer
2727
import org.locationtech.rasterframes.encoders.CatalystSerializer._
2828
import org.locationtech.rasterframes.model.{Cells, TileDataContext}
29+
import org.locationtech.rasterframes.ref.RasterRef.RasterRefTile
2930
import org.locationtech.rasterframes.tiles.InternalRowTile
3031

3132

@@ -75,12 +76,15 @@ case object TileUDT {
7576
implicit def tileSerializer: CatalystSerializer[Tile] = new CatalystSerializer[Tile] {
7677

7778
override val schema: StructType = StructType(Seq(
78-
StructField("cell_context", schemaOf[TileDataContext], false),
79+
StructField("cell_context", schemaOf[TileDataContext], true),
7980
StructField("cell_data", schemaOf[Cells], false)
8081
))
8182

8283
override def to[R](t: Tile, io: CatalystIO[R]): R = io.create(
83-
io.to(TileDataContext(t)),
84+
t match {
85+
case _: RasterRefTile => null
86+
case o => io.to(TileDataContext(o))
87+
},
8488
io.to(Cells(t))
8589
)
8690

core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryLocalRasterOp.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,22 @@
2121

2222
package org.locationtech.rasterframes.expressions
2323

24-
import org.locationtech.rasterframes.encoders.CatalystSerializer._
25-
import org.locationtech.rasterframes.expressions.DynamicExtractors._
26-
import com.typesafe.scalalogging.LazyLogging
24+
import com.typesafe.scalalogging.Logger
2725
import geotrellis.raster.Tile
2826
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
2927
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
3028
import org.apache.spark.sql.catalyst.expressions.BinaryExpression
3129
import org.apache.spark.sql.rf.TileUDT
3230
import org.apache.spark.sql.types.DataType
31+
import org.locationtech.rasterframes.encoders.CatalystSerializer._
32+
import org.locationtech.rasterframes.expressions.DynamicExtractors._
33+
import org.slf4j.LoggerFactory
3334

3435
/** Operation combining two tiles or a tile and a scalar into a new tile. */
35-
trait BinaryLocalRasterOp extends BinaryExpression with LazyLogging {
36+
trait BinaryLocalRasterOp extends BinaryExpression {
37+
38+
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
39+
3640

3741
override def dataType: DataType = left.dataType
3842

core/src/main/scala/org/locationtech/rasterframes/expressions/BinaryRasterOp.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,20 @@
2121

2222
package org.locationtech.rasterframes.expressions
2323

24-
import org.locationtech.rasterframes.expressions.DynamicExtractors.tileExtractor
25-
import org.locationtech.rasterframes.encoders.CatalystSerializer._
26-
import com.typesafe.scalalogging.LazyLogging
24+
import com.typesafe.scalalogging.Logger
2725
import geotrellis.raster.Tile
2826
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
2927
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
3028
import org.apache.spark.sql.catalyst.expressions.BinaryExpression
3129
import org.apache.spark.sql.rf.TileUDT
3230
import org.apache.spark.sql.types.DataType
31+
import org.locationtech.rasterframes.encoders.CatalystSerializer._
32+
import org.locationtech.rasterframes.expressions.DynamicExtractors.tileExtractor
33+
import org.slf4j.LoggerFactory
3334

3435
/** Operation combining two tiles into a new tile. */
35-
trait BinaryRasterOp extends BinaryExpression with LazyLogging {
36+
trait BinaryRasterOp extends BinaryExpression {
37+
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
3638

3739
override def dataType: DataType = left.dataType
3840

core/src/main/scala/org/locationtech/rasterframes/expressions/UnaryLocalRasterOp.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,20 @@
2121

2222
package org.locationtech.rasterframes.expressions
2323

24-
import org.locationtech.rasterframes.encoders.CatalystSerializer._
25-
import org.locationtech.rasterframes.expressions.DynamicExtractors._
26-
import com.typesafe.scalalogging.LazyLogging
24+
import com.typesafe.scalalogging.Logger
2725
import geotrellis.raster.Tile
2826
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
2927
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
3028
import org.apache.spark.sql.catalyst.expressions.UnaryExpression
3129
import org.apache.spark.sql.rf.TileUDT
3230
import org.apache.spark.sql.types.DataType
31+
import org.locationtech.rasterframes.encoders.CatalystSerializer._
32+
import org.locationtech.rasterframes.expressions.DynamicExtractors._
33+
import org.slf4j.LoggerFactory
3334

3435
/** Operation on a tile returning a tile. */
35-
trait UnaryLocalRasterOp extends UnaryExpression with LazyLogging {
36+
trait UnaryLocalRasterOp extends UnaryExpression {
37+
@transient protected lazy val logger = Logger(LoggerFactory.getLogger(getClass.getName))
3638

3739
override def dataType: DataType = child.dataType
3840

core/src/main/scala/org/locationtech/rasterframes/expressions/UnaryRasterAggregate.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,10 +37,11 @@ trait UnaryRasterAggregate extends DeclarativeAggregate {
3737
def children = Seq(child)
3838

3939
protected def tileOpAsExpression[R: TypeTag](name: String, op: Tile => R): Expression => ScalaUDF =
40-
udfexpr[R, Any](name, (a: Any) => op(extractTileFromAny(a)))
40+
udfexpr[R, Any](name, (a: Any) => if(a == null) null.asInstanceOf[R] else op(extractTileFromAny(a)))
4141

4242
protected val extractTileFromAny = (a: Any) => a match {
4343
case t: Tile => t
4444
case r: Row => rowTileExtractor(child.dataType)(r)._1
45+
case null => null
4546
}
4647
}

core/src/main/scala/org/locationtech/rasterframes/expressions/accessors/GetCRS.scala

Lines changed: 28 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,33 +21,53 @@
2121

2222
package org.locationtech.rasterframes.expressions.accessors
2323

24-
import org.locationtech.rasterframes.encoders.CatalystSerializer._
25-
import org.locationtech.rasterframes.encoders.StandardEncoders.crsEncoder
26-
import org.locationtech.rasterframes.expressions.OnTileContextExpression
2724
import geotrellis.proj4.CRS
2825
import org.apache.spark.sql.catalyst.InternalRow
26+
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
27+
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
2928
import org.apache.spark.sql.catalyst.expressions._
3029
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
31-
import org.apache.spark.sql.types.DataType
30+
import org.apache.spark.sql.types.{DataType, StringType}
3231
import org.apache.spark.sql.{Column, TypedColumn}
33-
import org.locationtech.rasterframes.model.TileContext
32+
import org.apache.spark.unsafe.types.UTF8String
33+
import org.locationtech.rasterframes.encoders.CatalystSerializer._
34+
import org.locationtech.rasterframes.encoders.StandardEncoders.crsEncoder
35+
import org.locationtech.rasterframes.expressions.DynamicExtractors.projectedRasterLikeExtractor
36+
import org.locationtech.rasterframes.model.LazyCRS
3437

3538
/**
3639
* Expression to extract the CRS out of a RasterRef or ProjectedRasterTile column.
3740
*
3841
* @since 9/9/18
3942
*/
4043
@ExpressionDescription(
41-
usage = "_FUNC_(raster) - Fetches the CRS of a ProjectedRasterTile or RasterSource.",
44+
usage = "_FUNC_(raster) - Fetches the CRS of a ProjectedRasterTile or RasterSource, or converts a proj4 string column.",
4245
examples = """
4346
Examples:
4447
> SELECT _FUNC_(raster);
4548
....
4649
""")
47-
case class GetCRS(child: Expression) extends OnTileContextExpression with CodegenFallback {
50+
case class GetCRS(child: Expression) extends UnaryExpression with CodegenFallback {
4851
override def dataType: DataType = schemaOf[CRS]
4952
override def nodeName: String = "rf_crs"
50-
override def eval(ctx: TileContext): InternalRow = ctx.crs.toInternalRow
53+
54+
override def checkInputDataTypes(): TypeCheckResult = {
55+
if (child.dataType != StringType && !projectedRasterLikeExtractor.isDefinedAt(child.dataType)) {
56+
TypeCheckFailure(s"Input type '${child.dataType}' does not conform to `String` or `ProjectedRasterLike`.")
57+
}
58+
else TypeCheckSuccess
59+
}
60+
61+
override protected def nullSafeEval(input: Any): Any = {
62+
input match {
63+
case s: UTF8String => LazyCRS(s.toString).toInternalRow
64+
case row: InternalRow
65+
val prl = projectedRasterLikeExtractor(child.dataType)(row)
66+
prl.crs.toInternalRow
67+
case o throw new IllegalArgumentException(s"Unsupported input type: $o")
68+
}
69+
}
70+
5171
}
5272

5373
object GetCRS {

0 commit comments

Comments
 (0)