diff --git a/spark/spark-3.0/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala b/spark/spark-3.0/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala index b304b28f4b..542a6e553f 100644 --- a/spark/spark-3.0/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala +++ b/spark/spark-3.0/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala @@ -139,8 +139,8 @@ class GeoParquetWriteSupport extends WriteSupport[InternalRow] with Logging { case version: String => Some(version) } defaultGeoParquetCrs = Option(configuration.get(GEOPARQUET_CRS_KEY)).map(parse(_)) - configuration.getPropsWithPrefix(GEOPARQUET_CRS_KEY + ".").asScala.foreach { - case (key, value) => geoParquetColumnCrsMap.put(key, parse(value)) + geometryColumnInfoMap.keys.map(schema(_).name).foreach { name => + Option(configuration.get(GEOPARQUET_CRS_KEY + "." + name)).foreach(crs => geoParquetColumnCrsMap.put(name, parse(crs))) } val messageType = new SparkToParquetSchemaConverter(configuration).convert(schema) diff --git a/spark/spark-3.4/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala b/spark/spark-3.4/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala index d4a6dac71d..482beb11ff 100644 --- a/spark/spark-3.4/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala +++ b/spark/spark-3.4/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala @@ -139,8 +139,8 @@ class GeoParquetWriteSupport extends WriteSupport[InternalRow] with Logging { case version: String => Some(version) } defaultGeoParquetCrs = Option(configuration.get(GEOPARQUET_CRS_KEY)).map(parse(_)) - configuration.getPropsWithPrefix(GEOPARQUET_CRS_KEY + ".").asScala.foreach { - case (key, value) => geoParquetColumnCrsMap.put(key, parse(value)) + geometryColumnInfoMap.keys.map(schema(_).name).foreach { name => + Option(configuration.get(GEOPARQUET_CRS_KEY + "." + name)).foreach(crs => geoParquetColumnCrsMap.put(name, parse(crs))) } val messageType = new SparkToParquetSchemaConverter(configuration).convert(schema) diff --git a/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala b/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala index 0803e9563f..6294e291ec 100644 --- a/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala +++ b/spark/spark-3.5/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/GeoParquetWriteSupport.scala @@ -139,8 +139,8 @@ class GeoParquetWriteSupport extends WriteSupport[InternalRow] with Logging { case version: String => Some(version) } defaultGeoParquetCrs = Option(configuration.get(GEOPARQUET_CRS_KEY)).map(parse(_)) - configuration.getPropsWithPrefix(GEOPARQUET_CRS_KEY + ".").asScala.foreach { - case (key, value) => geoParquetColumnCrsMap.put(key, parse(value)) + geometryColumnInfoMap.keys.map(schema(_).name).foreach { name => + Option(configuration.get(GEOPARQUET_CRS_KEY + "." + name)).foreach(crs => geoParquetColumnCrsMap.put(name, parse(crs))) } val messageType = new SparkToParquetSchemaConverter(configuration).convert(schema)