Skip to content

Commit

Permalink
Merge branch 'sedona_master' into develop_Nilesh_Raster_Clone_Bug
Browse files Browse the repository at this point in the history
  • Loading branch information
iGN5117 committed Nov 15, 2023
2 parents 3e68c96 + 1bf3fea commit 97460a7
Show file tree
Hide file tree
Showing 138 changed files with 864 additions and 425 deletions.
11 changes: 11 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: monthly

- package-ecosystem: pip
directory: /docker/sedona-spark-jupyterlab
schedule:
interval: monthly
2 changes: 1 addition & 1 deletion .github/issue_template.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,4 @@ JRE version = 1.8, 1.11?

Python version = ?

Environment = Standalone, AWS EC2, EMR, Azure, Databricks?
Environment = Standalone, AWS EC2, EMR, Azure, Databricks?
4 changes: 2 additions & 2 deletions .github/workflows/docker-build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,12 @@ jobs:
shell: bash

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: actions/setup-java@v1
with:
java-version: 11
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ jobs:
build:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
with:
fetch-depth: 0
- uses: actions/setup-python@v2
- uses: actions/setup-python@v4
with:
python-version: 3.x
- run: pip install mkdocs-material
Expand All @@ -34,7 +34,7 @@ jobs:
any::pkgdown
working-directory : './R'
- run: Rscript -e 'pkgdown::build_site(pkg = "./R", preview = FALSE, override = list(destination = "../docs/api/rdocs"))'
- uses: actions/cache@v2
- uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
Expand All @@ -46,7 +46,7 @@ jobs:
- run: mike deploy --update-aliases current-snapshot
- run: mkdir staging
- run: cp -r site/* staging/
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v3
with:
name: generated-docs
path: staging
6 changes: 3 additions & 3 deletions .github/workflows/example.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
runs-on: ubuntu-22.04

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: actions/setup-java@v1
with:
java-version: '8'
Expand All @@ -26,14 +26,14 @@ jobs:
- run: sudo apt-get update
- run: sudo apt-get install sbt
- name: Cache SBT
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: |
~/.ivy2/cache
~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/java.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,15 @@ jobs:
skipTests: ''

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: actions/setup-java@v1
with:
java-version: ${{ matrix.jdk }}
- uses: actions/setup-python@v2
- uses: actions/setup-python@v4
with:
python-version: '3.7'
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand All @@ -76,7 +76,7 @@ jobs:
- run: cp spark-shaded/target/sedona-*.jar staging
- run: |
[ -d "flink-shaded/target/" ] && cp flink-shaded/target/sedona-*.jar staging 2>/dev/null || true
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v3
with:
name: generated-jars
path: staging
4 changes: 2 additions & 2 deletions .github/workflows/python-extension.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ jobs:
shell: bash

steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- name: Install pipenv
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/python-wheel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@ jobs:
shell: bash

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Set up QEMU
if: runner.os == 'Linux'
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v3
with:
platforms: all
- name: Build wheels
uses: pypa/cibuildwheel@v2.12.0
uses: pypa/cibuildwheel@v2.16.2
env:
CIBW_SKIP: 'pp* *musl*'
CIBW_ARCHS_LINUX: 'x86_64 aarch64'
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/python.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,15 +52,15 @@ jobs:
hadoop: '2.7'

steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- uses: actions/setup-java@v1
with:
java-version: '8'
- uses: actions/setup-python@v2
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
- name: Cache Maven packages
uses: actions/cache@v2
uses: actions/cache@v3
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/r.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
run: |
sudo apt-get -y remove --purge default-jdk adoptopenjdk-11-hotspot || :
shell: bash
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- uses: r-lib/actions/setup-r@v2
with:
r-version: ${{ matrix.r }}
Expand Down Expand Up @@ -112,7 +112,7 @@ jobs:
cd ./R/tests
NOT_CRAN='true' Rscript testthat.R
shell: bash
- uses: actions/upload-artifact@v1
- uses: actions/upload-artifact@v3
if: failure()
with:
name: Worker logs
Expand Down
3 changes: 2 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,5 @@ repos:
# - id: check-yaml
- id: detect-private-key
- id: end-of-file-fixer
files: \.java$|\.scala$
files: \.(java|md|py|scala)$
exclude: ^docs/image|^spark/common/src/test/resources
22 changes: 11 additions & 11 deletions R/tests/testthat/test-data-interface.R
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ test_that("spark_read_geoparquet() works as expected", {
)

## Right registered name
expect_equal(geoparquet_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geoparquet_sdf %>% dbplyr::remote_name(), sdf_name)

## Right schema
expect_equivalent(
Expand Down Expand Up @@ -442,7 +442,7 @@ test_that("spark_read_geoparquet() throws an error with plain parquet files", {

expect_error(
spark_read_geoparquet(sc, geoparquet("plain.parquet")),
regexp = "GeoParquet file does not contain valid geo"
regexp = "not contain valid geo"
)

})
Expand All @@ -463,7 +463,7 @@ test_that("spark_read_geojson() works as expected", {
)

## Right registered name
expect_equal(geojson_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geojson_sdf %>% dbplyr::remote_name(), sdf_name)

})

Expand All @@ -478,7 +478,7 @@ test_that("spark_read_geojson() works as expected, no feat", {
)

## Right registered name
expect_equal(geojson_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geojson_sdf %>% dbplyr::remote_name(), sdf_name)

sc %>% DBI::dbExecute(paste0("DROP TABLE ", sdf_name))

Expand All @@ -494,7 +494,7 @@ test_that("spark_read_geojson() works as expected, null values", {
)

## Right registered name
expect_equal(geojson_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geojson_sdf %>% dbplyr::remote_name(), sdf_name)

sc %>% DBI::dbExecute(paste0("DROP TABLE ", sdf_name))

Expand Down Expand Up @@ -522,7 +522,7 @@ test_that("spark_read_geojson() works as expected, with id", {
)

## Right registered name
expect_equal(geojson_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geojson_sdf %>% dbplyr::remote_name(), sdf_name)

sc %>% DBI::dbExecute(paste0("DROP TABLE ", sdf_name))

Expand All @@ -541,7 +541,7 @@ test_that("spark_read_geojson() works as expected, invalid geom", {
)

## Right registered name
expect_equal(geojson_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geojson_sdf %>% dbplyr::remote_name(), sdf_name)


# Remove invalid
Expand All @@ -553,7 +553,7 @@ test_that("spark_read_geojson() works as expected, invalid geom", {
)

## Right registered name
expect_equal(geojson_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(geojson_sdf %>% dbplyr::remote_name(), sdf_name)

sc %>% DBI::dbExecute(paste0("DROP TABLE ", sdf_name))

Expand Down Expand Up @@ -676,13 +676,13 @@ test_that("spark_write_geojson() works as expected", {

## order of columns changes !
expect_equal(
names(geojson_sdf) %>% sort(),
names(geojson_2_sdf) %>% sort()
colnames(geojson_sdf) %>% sort(),
colnames(geojson_2_sdf) %>% sort()
)
expect_equal(
geojson_sdf %>% mutate(geometry = geometry %>% st_astext()) %>% collect(),
geojson_2_sdf %>% mutate(geometry = geometry %>% st_astext()) %>% collect() %>%
select(names(geojson_sdf))
select(colnames(geojson_sdf))
)


Expand Down
6 changes: 3 additions & 3 deletions R/tests/testthat/test-sdf-interface.R
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ test_that("sdf_register() works as expected for Spatial RDDs", {

)
)
expect_equal(pt_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(pt_sdf %>% dbplyr::remote_name(), sdf_name)

pt_sdf %>% collect()
succeed()
Expand Down Expand Up @@ -68,7 +68,7 @@ test_that("sdf_register() works as expected for Spatial RDDs with fieldNames", {
)
)

expect_equal(polygon_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(polygon_sdf %>% dbplyr::remote_name(), sdf_name)

polygon_sdf %>% collect()
succeed()
Expand All @@ -91,7 +91,7 @@ test_that("as.spark.dataframe() works as expected for Spatial RDDs with non-spat
attr_3 = list(name = "attr_3", type = "StringType")
)
)
expect_equal(pt_sdf %>% dbplyr::remote_name(), dbplyr::ident(sdf_name))
expect_equal(pt_sdf %>% dbplyr::remote_name(), sdf_name)

pt_sdf %>% collect()
succeed()
Expand Down
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@
<img alt="Apache Sedona" src="docs/image/sedona_logo.png" width="500">
</a>

[![Scala and Java build](https://github.com/apache/sedona/actions/workflows/java.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/java.yml) [![Python build](https://github.com/apache/sedona/actions/workflows/python.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/python.yml) [![R build](https://github.com/apache/sedona/actions/workflows/r.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/r.yml)[![Docker image build](https://github.com/apache/sedona/actions/workflows/docker-build.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/docker-build.yml) [![Example project build](https://github.com/apache/sedona/actions/workflows/example.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/example.yml) [![Docs build](https://github.com/apache/sedona/actions/workflows/docs.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/docs.yml)
[![Scala and Java build](https://github.com/apache/sedona/actions/workflows/java.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/java.yml) [![Python build](https://github.com/apache/sedona/actions/workflows/python.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/python.yml) [![R build](https://github.com/apache/sedona/actions/workflows/r.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/r.yml) [![Docker image build](https://github.com/apache/sedona/actions/workflows/docker-build.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/docker-build.yml) [![Example project build](https://github.com/apache/sedona/actions/workflows/example.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/example.yml) [![Docs build](https://github.com/apache/sedona/actions/workflows/docs.yml/badge.svg)](https://github.com/apache/sedona/actions/workflows/docs.yml)

|Download statistics| **Maven** | **PyPI** | **CRAN** | **DockerHub** |
|:-------------:|:------------------:|:--------------:|:---------:|:------:|
| Apache Sedona | 225k/month |[![PyPI - Downloads](https://img.shields.io/pypi/dm/apache-sedona)](https://pepy.tech/project/apache-sedona) [![Downloads](https://static.pepy.tech/personalized-badge/apache-sedona?period=total&units=international_system&left_color=black&right_color=brightgreen&left_text=total%20downloads)](https://pepy.tech/project/apache-sedona)|[![](https://cranlogs.r-pkg.org/badges/apache.sedona?color=brightgreen)](https://cran.r-project.org/package=apache.sedona) [![](https://cranlogs.r-pkg.org/badges/grand-total/apache.sedona?color=brightgreen)](https://cran.r-project.org/package=apache.sedona)|[![Docker pulls](https://img.shields.io/docker/pulls/apache/sedona?color=brightgreen)](https://hub.docker.com/r/apache/sedona)|
| Archived GeoSpark releases |10k/month|[![PyPI - Downloads](https://img.shields.io/pypi/dm/geospark)](https://pepy.tech/project/geospark)[![Downloads](https://static.pepy.tech/personalized-badge/geospark?period=total&units=international_system&left_color=black&right_color=brightgreen&left_text=total%20downloads)](https://pepy.tech/project/geospark)| | |
| Download statistics | **Maven** | **PyPI** | Conda-forge | **CRAN** | **DockerHub** |
|----------------------------|------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------|
| Apache Sedona | 225k/month | [![PyPI - Downloads](https://img.shields.io/pypi/dm/apache-sedona)](https://pepy.tech/project/apache-sedona) [![Downloads](https://static.pepy.tech/personalized-badge/apache-sedona?period=total&units=international_system&left_color=black&right_color=brightgreen&left_text=total%20downloads)](https://pepy.tech/project/apache-sedona) | [![Anaconda-Server Badge](https://anaconda.org/conda-forge/apache-sedona/badges/downloads.svg)](https://anaconda.org/conda-forge/apache-sedona) | [![](https://cranlogs.r-pkg.org/badges/apache.sedona?color=brightgreen)](https://cran.r-project.org/package=apache.sedona) [![](https://cranlogs.r-pkg.org/badges/grand-total/apache.sedona?color=brightgreen)](https://cran.r-project.org/package=apache.sedona) | [![Docker pulls](https://img.shields.io/docker/pulls/apache/sedona?color=brightgreen)](https://hub.docker.com/r/apache/sedona) |
| Archived GeoSpark releases | 10k/month | [![PyPI - Downloads](https://img.shields.io/pypi/dm/geospark)](https://pepy.tech/project/geospark)[![Downloads](https://static.pepy.tech/personalized-badge/geospark?period=total&units=international_system&left_color=black&right_color=brightgreen&left_text=total%20downloads)](https://pepy.tech/project/geospark) | | | |



Expand Down
2 changes: 1 addition & 1 deletion binder/utilities.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,4 +110,4 @@ def getConfig():
17.18305478057247,
31.1442867897876],
'mapStyles': {}}}}
return config
return config
Loading

0 comments on commit 97460a7

Please sign in to comment.