From 2b7e0e1594412c12a2a64ee14ec935d46c0b498d Mon Sep 17 00:00:00 2001 From: Luiz Gabriel Date: Mon, 18 Dec 2023 08:20:23 -0300 Subject: [PATCH 1/5] Add pyproject.toml (PyPA specifications) --- .gitignore | 8 +++++++- pyproject.toml | 36 ++++++++++++++++++++++++++++++++++++ setup.py | 43 ------------------------------------------- 3 files changed, 43 insertions(+), 44 deletions(-) create mode 100644 pyproject.toml delete mode 100644 setup.py diff --git a/.gitignore b/.gitignore index 855ab62..8ec8823 100644 --- a/.gitignore +++ b/.gitignore @@ -93,4 +93,10 @@ ENV/ ./stmetrics/spatial.py # Vs Code -.vscode/ \ No newline at end of file +.vscode/ + +# Poetry +poetry.lock + +# others +TODO \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..db68e23 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,36 @@ +[tool.poetry] +name = "stmetrics" +version = "0.1.8" +description = "A package to compute features from Satellite Image Time Series (SITS)." +authors = ["Brazil Data Cube Team "] +readme = "README.rst" + +[tool.poetry.dependencies] +python = "=3.10.13" +dtaidistance = "^2.3.11" +pandas = "^2.1.4" +matplotlib = "^3.8.2" +numba = "^0.58.1" +numpy = "^1.26.2" +xarray = "^2023.12.0" +rasterio = "^1.3.9" +shapely = "^2.0.2" +affine = "^2.4.0" +descartes = "^1.1.0" +connected-components-3d = "^3.12.4" +rasterstats = "^0.19.0" +pointpats = "^2.4.0" +geopandas = "^0.14.1" +nolds = "^0.5.2" +urllib3 = "~1" +fastremap = "^1.14.0" + + + +[tool.poetry.group.dev.dependencies] +pytest = "=7.4.3" +flake8 = "=6.1.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/setup.py b/setup.py deleted file mode 100644 index 76cf07c..0000000 --- a/setup.py +++ /dev/null @@ -1,43 +0,0 @@ -import setuptools - -with open("README.rst", "r") as fh: - long_description = fh.read() - -setuptools.setup( - name="stmetrics", - version="0.1.7", - author="Brazil Data Cube Team", - author_email="brazildatacube@dpi.inpe.br", - description="A package to compute features from Satellite Image Time Series (SITS).", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/brazil-data-cube/stmetrics/", - packages=['stmetrics'], - install_requires=[ - 'scipy', - 'sklearn', - 'pandas', - 'numpy', - 'matplotlib', - 'shapely', - 'descartes', - 'nolds', - 'dtaidistance', - 'rasterio', - 'geopandas', - 'pointpats', - 'fastremap', - 'connected-components-3d', - 'rasterstats', - 'xarray', - 'affine', - 'numba', - 'tqdm' - ], - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Development Status :: 4 - Beta" - ], -) From 7677a7b4abdc8e0827a9d6f44e4f55ebe0f1b3ca Mon Sep 17 00:00:00 2001 From: Luiz Gabriel Date: Mon, 18 Dec 2023 08:25:10 -0300 Subject: [PATCH 2/5] Remove lazy imports and implement style corrections (flake8) --- pytest.ini | 3 - tests/test_gmetrics.py | 597 ++++++++++++++++++++--------------------- 2 files changed, 288 insertions(+), 312 deletions(-) delete mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 07ea8db..0000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -addopts = --color=auto --cov=stmetrics --cov-report=term-missing -testpaths = tests \ No newline at end of file diff --git a/tests/test_gmetrics.py b/tests/test_gmetrics.py index a427013..48e0393 100644 --- a/tests/test_gmetrics.py +++ b/tests/test_gmetrics.py @@ -1,429 +1,408 @@ """Unit-test for stmetrics.""" +import numpy +import pytest + +import stmetrics + + def test_getmetrics(): - import numpy - import stmetrics - - series = numpy.array([0.157327502966,0.168894290924,0.141409546137, - 0.113800831139,0.0922891944647,0.0747280195355, - 0.0537555813789,0.0660935789347,0.0770644843578, - 0.0739007592201,0.0983928665519,0.192401319742, - 0.286366194487,0.367539167404,0.420437157154, - 0.418041080236,0.413386583328,0.375436246395, - 0.335108757019,0.307270467281,0.250428706408, - 0,1,0, - 0.103006377816,0.115561470389,0.114221975207, - 0.172464296222,0.284338653088,0.386188000441, - 0.45704460144,0.571164608002,0.707974851131, - 0.648853778839,0.580699682236,0.566288888454, - 0.547502994537,0.500209212303,0.447707682848, - 0.39193546772,0.357513874769,0.290982276201, - 0.217830166221,0.148564651608,0.101060912013, - 0.111838668585,0.121473513544,0.113883294165, - 0.114351868629,0.116994164884,0.0982540994883, - 0.0843055993319,0.0827744230628,0.0758764594793, - 0.0936531722546,0.0942907482386,0.172556817532]) - - metrics = {'basics': {'max_ts': 0.707974, - 'min_ts': 0.0, - 'mean_ts': 0.237823, - 'std_ts': 0.183005, - 'sum_ts': 13.318112, - 'amplitude_ts': 0.707974, - 'mse_ts': 5.042865, - 'fslope_ts': 0.250428, - 'skew_ts': 0.795801, - 'amd_ts': 0.043546, - 'abs_sum_ts': 13.318112, - 'iqr_ts': 0.28086, - 'fqr_ts': 0.096272, - 'tqr_ts': 0.380812, - 'sqr_ts': 0.158729}, - 'polar': {'ecc_metric': 0.987689, - 'gyration_radius': 0.378319, - 'area_ts': 0.276252, - 'polar_balance': 0.069048, - 'angle': 3.541431, - 'area_q1': 0.046879, - 'area_q2': 0.033173, - 'area_q3': 0.186429, - 'area_q4': 0.00977, - 'csi': 2.658336}, - 'fractal': {'dfa_fd': 2.053765, 'hurst_exp': 0.87168, 'katz_fd': 1.437053}} - - out = stmetrics.metrics.get_metrics(series,nodata=0.157327502966) - assert metrics == out + + series = numpy.array([0.157327502966, 0.168894290924, 0.141409546137, + 0.113800831139, 0.0922891944647, 0.0747280195355, + 0.0537555813789, 0.0660935789347, 0.0770644843578, + 0.0739007592201, 0.0983928665519, 0.192401319742, + 0.286366194487, 0.367539167404, 0.420437157154, + 0.418041080236, 0.413386583328, 0.375436246395, + 0.335108757019, 0.307270467281, 0.250428706408, + 0, 1, 0, + 0.103006377816, 0.115561470389, 0.114221975207, + 0.172464296222, 0.284338653088, 0.386188000441, + 0.45704460144, 0.571164608002, 0.707974851131, + 0.648853778839, 0.580699682236, 0.566288888454, + 0.547502994537, 0.500209212303, 0.447707682848, + 0.39193546772, 0.357513874769, 0.290982276201, + 0.217830166221, 0.148564651608, 0.101060912013, + 0.111838668585, 0.121473513544, 0.113883294165, + 0.114351868629, 0.116994164884, 0.0982540994883, + 0.0843055993319, 0.0827744230628, 0.0758764594793, + 0.0936531722546, 0.0942907482386, 0.172556817532]) + + metrics = {'basics': {'max_ts': 0.707974, + 'min_ts': 0.0, + 'mean_ts': 0.237823, + 'std_ts': 0.183005, + 'sum_ts': 13.318112, + 'amplitude_ts': 0.707974, + 'mse_ts': 5.042865, + 'fslope_ts': 0.250428, + 'skew_ts': 0.795801, + 'amd_ts': 0.043546, + 'abs_sum_ts': 13.318112, + 'iqr_ts': 0.28086, + 'fqr_ts': 0.096272, + 'tqr_ts': 0.380812, + 'sqr_ts': 0.158729}, + 'polar': {'ecc_metric': 0.987689, + 'gyration_radius': 0.378319, + 'area_ts': 0.276252, + 'polar_balance': 0.069048, + 'angle': 3.541431, + 'area_q1': 0.046879, + 'area_q2': 0.033173, + 'area_q3': 0.186429, + 'area_q4': 0.00977, + 'csi': 2.658336}, + 'fractal': {'dfa_fd': 2.053765, + 'hurst_exp': 0.87168, + 'katz_fd': 1.437053}} + + out = stmetrics.metrics.get_metrics(series, nodata=0.157327502966) + assert metrics == out def test_basics(): - import stmetrics - import numpy - - basicas = {'max_ts': 1.0, - 'min_ts': 1.0, - 'mean_ts': 1.0, - 'std_ts': 0.0, - 'sum_ts': 360.0, - 'amplitude_ts': 0.0, - 'mse_ts': 360.0, - 'fslope_ts': 0.0, - 'skew_ts': 0.0, - 'amd_ts': 0.0, - 'abs_sum_ts': 360.0, - 'iqr_ts': 0.0, - 'fqr_ts': 1.0, - 'tqr_ts': 1.0, - 'sqr_ts': 1.0} - - bmetrics = stmetrics.basics.ts_basics(numpy.ones((1,360)).T) - - assert basicas == bmetrics + basicas = {'max_ts': 1.0, + 'min_ts': 1.0, + 'mean_ts': 1.0, + 'std_ts': 0.0, + 'sum_ts': 360.0, + 'amplitude_ts': 0.0, + 'mse_ts': 360.0, + 'fslope_ts': 0.0, + 'skew_ts': 0.0, + 'amd_ts': 0.0, + 'abs_sum_ts': 360.0, + 'iqr_ts': 0.0, + 'fqr_ts': 1.0, + 'tqr_ts': 1.0, + 'sqr_ts': 1.0} + + bmetrics = stmetrics.basics.ts_basics(numpy.ones((1, 360)).T) + + assert basicas == bmetrics + def test_fractal(): - - import stmetrics - import numpy - fractais = {'dfa_fd': nan, - 'hurst_exp': nan, - 'katz_fd': nan} + fractais = {'dfa_fd': numpy.nan, + 'hurst_exp': numpy.nan, + 'katz_fd': numpy.nan} - bmetrics = stmetrics.fractal.ts_fractal(numpy.ones((1,360)).T) + bmetrics = stmetrics.fractal.ts_fractal(numpy.ones((1, 360)).T) - assert fractais == bmetrics + assert fractais == bmetrics def test_polares(): - - import stmetrics - import numpy - polares = {'ecc_metric': 1.0, - 'gyration_radius': 1.0, - 'area_ts': 3.141433, - 'polar_balance': 0.0, - 'angle': 0.0, - 'area_q1': 0.785358, - 'area_q2': 0.785358, - 'area_q3': 0.785358, - 'area_q4': 0.785358, - 'csi': 1.000025} + polares = {'ecc_metric': 1.0, + 'gyration_radius': 1.0, + 'area_ts': 3.141433, + 'polar_balance': 0.0, + 'angle': 0.0, + 'area_q1': 0.785358, + 'area_q2': 0.785358, + 'area_q3': 0.785358, + 'area_q4': 0.785358, + 'csi': 1.000025} - bmetrics = stmetrics.polar.ts_polar(numpy.ones((360))) + bmetrics = stmetrics.polar.ts_polar(numpy.ones((360))) - assert polares == bmetrics + assert polares == bmetrics def test_utils(): - import numpy - import stmetrics - - series = numpy.array([0.157327502966,0.168894290924,0.141409546137, - 0.113800831139,0.0922891944647,0.0747280195355, - 0.0537555813789,0.0660935789347,0.0770644843578, - 0.0739007592201,0.0983928665519,0.192401319742, - 0.286366194487,0.367539167404,0.420437157154, - 0.418041080236,0.413386583328,0.375436246395, - 0.335108757019,0.307270467281,0.250428706408, - 0.178802281618,0.117247626185,0.11457183212, - 0.103006377816,0.115561470389,0.114221975207, - 0.172464296222,0.284338653088,0.386188000441, - 0.45704460144,0.571164608002,0.707974851131, - 0.648853778839,0.580699682236,0.566288888454, - 0.547502994537,0.500209212303,0.447707682848, - 0.39193546772,0.357513874769,0.290982276201, - 0.217830166221,0.148564651608,0.101060912013, - 0.111838668585,0.121473513544,0.113883294165, - 0.114351868629,0.116994164884,0.0982540994883, - 0.0843055993319,0.0827744230628,0.0758764594793, - 0.0936531722546,0.0942907482386,0.172556817532]) - - - geometry = stmetrics.utils.create_polygon(series) - - if geometry.is_valid == True: - pass + + series = numpy.array([0.157327502966, 0.168894290924, 0.141409546137, + 0.113800831139, 0.0922891944647, 0.0747280195355, + 0.0537555813789, 0.0660935789347, 0.0770644843578, + 0.0739007592201, 0.0983928665519, 0.192401319742, + 0.286366194487, 0.367539167404, 0.420437157154, + 0.418041080236, 0.413386583328, 0.375436246395, + 0.335108757019, 0.307270467281, 0.250428706408, + 0.178802281618, 0.117247626185, 0.11457183212, + 0.103006377816, 0.115561470389, 0.114221975207, + 0.172464296222, 0.284338653088, 0.386188000441, + 0.45704460144, 0.571164608002, 0.707974851131, + 0.648853778839, 0.580699682236, 0.566288888454, + 0.547502994537, 0.500209212303, 0.447707682848, + 0.39193546772, 0.357513874769, 0.290982276201, + 0.217830166221, 0.148564651608, 0.101060912013, + 0.111838668585, 0.121473513544, 0.113883294165, + 0.114351868629, 0.116994164884, 0.0982540994883, + 0.0843055993319, 0.0827744230628, 0.0758764594793, + 0.0936531722546, 0.0942907482386, 0.172556817532]) + + geometry = stmetrics.utils.create_polygon(series) + + if geometry.is_valid: + pass def test_polar(): - import numpy - from stmetrics import utils - polares = utils.error_polar() + polares = stmetrics.utils.error_polar() - if all(numpy.isnan(value) != numpy.nan for value in polares.values()) == True: - pass + if all(numpy.isnan(value) != numpy.nan for value in polares.values()): + pass -def test_fractal(): - import numpy - from stmetrics import utils - fractal = utils.error_fractal() +# def test_fractal(): - if all(numpy.isnan(value) != numpy.nan for value in fractal.values()) == True: - pass +# fractal = stmetrics.utils.error_fractal() +# if all(numpy.isnan(value) != numpy.nan for value in fractal.values()): +# pass -def test_basics(): - import numpy - from stmetrics import utils - basics = utils.error_basics() - if all(numpy.isnan(value) != numpy.nan for value in basics.values()) == True: - pass +# def test_basics(): + +# basics = stmetrics.utils.error_basics() + +# if all(numpy.isnan(value) != numpy.nan for value in basics.values()): +# pass + def test_symmetric_distance(): - import numpy - from stmetrics import polar + import numpy + from stmetrics import polar + + s1 = numpy.ones((360)) + s2 = numpy.ones((360)) - s1 = numpy.ones((360)) - s2 = numpy.ones((360)) + dist = polar.symmetric_distance(s1, s2) - dist = polar.symmetric_distance(s1, s2) + assert dist == 0 - assert dist == 0 def test_symmetric_distance_ii(): - import numpy - from stmetrics import polar - s1 = numpy.ones((360))-0.1 - s2 = numpy.ones((360)) + s1 = numpy.ones((360))-0.1 + s2 = numpy.ones((360)) - dist = polar.symmetric_distance(s1, s2) + dist = stmetrics.polar.symmetric_distance(s1, s2) + + assert dist == 0.596872 - assert dist == 0.596872 def test_geometries(): - from shapely import geometry - from stmetrics import spatial + from shapely import geometry - p1 = geometry.Point(0,0) - p2 = geometry.Point(1,0) - p3 = geometry.Point(1,1) - p4 = geometry.Point(0,1) + p1 = geometry.Point(0, 0) + p2 = geometry.Point(1, 0) + p3 = geometry.Point(1, 1) + p4 = geometry.Point(0, 1) - pointList = [p1, p2, p3, p4, p1] + pointList = [p1, p2, p3, p4, p1] - poly = geometry.Polygon([[p.x, p.y] for p in pointList]) + poly = geometry.Polygon([[p.x, p.y] for p in pointList]) - out = [0.0, 1.0, 0.6376435773361453, 0.0, 1.0, 1.0] + out = [0.0, 1.0, 0.6376435773361453, 0.0, 1.0, 1.0] - res = [spatial.symmetry(poly), - spatial.aspect_ratio(poly), - spatial.reock_compactness(poly), - spatial.rectangular_fit(poly), - spatial.width(poly), - spatial.length(poly)] + res = [stmetrics.spatial.symmetry(poly), + stmetrics.spatial.aspect_ratio(poly), + stmetrics.spatial.reock_compactness(poly), + stmetrics.spatial.rectangular_fit(poly), + stmetrics.spatial.width(poly), + stmetrics.spatial.length(poly)] - assert out == res + assert out == res def test_getmetrics_sits(): - import numpy - from stmetrics import metrics - out = numpy.array([ 1. , 1. , 1. , 0. , 360. , - 0. , 360. , 0. , 0. , 0. , - 360. , 0. , 1. , 1. , 1. , - 3.141433, 0. , 0.785358, 0.785358, 0.785358, - 0.785358, 0. , 1. , 1. , - 1.000025]) + out = numpy.array([1, 1, 1, 0, 360, + 0, 360, 0, 0, 0, + 360, 0, 1, 1, 1, + 3.141433, 0, 0.785358, 0.785358, 0.785358, + 0.785358, 0, 1, 1, 1.000025]) - res = metrics._getmetrics(numpy.ones((360))) + res = stmetrics.metrics._getmetrics(numpy.ones((360))) res = res[~numpy.isnan(res)] assert all(out == res) + def test_list_metrics(): - from stmetrics import utils - - out = ['max_ts', - 'min_ts', - 'mean_ts', - 'std_ts', - 'sum_ts', - 'amplitude_ts', - 'mse_ts', - 'fslope_ts', - 'skew_ts', - 'amd_ts', - 'abs_sum_ts', - 'iqr_ts', - 'fqr_ts', - 'sqr_ts', - 'tqr_ts', - 'area_ts', - 'angle', - 'area_q1', - 'area_q2', - 'area_q3', - 'area_q4', - 'polar_balance', - 'ecc_metric', - 'gyration_radius', - 'csi', - 'dfa_fd', - 'hurst_exp', - 'katz_fd'] - - assert all([out == utils.list_metrics()]) + from stmetrics import utils + + out = ['max_ts', + 'min_ts', + 'mean_ts', + 'std_ts', + 'sum_ts', + 'amplitude_ts', + 'mse_ts', + 'fslope_ts', + 'skew_ts', + 'amd_ts', + 'abs_sum_ts', + 'iqr_ts', + 'fqr_ts', + 'sqr_ts', + 'tqr_ts', + 'area_ts', + 'angle', + 'area_q1', + 'area_q2', + 'area_q3', + 'area_q4', + 'polar_balance', + 'ecc_metric', + 'gyration_radius', + 'csi', + 'dfa_fd', + 'hurst_exp', + 'katz_fd'] + + assert all([out == utils.list_metrics()]) def test_sits2metrics_exception(): - import numpy - import stmetrics - import pytest - with pytest.raises(Exception): - assert stmetrics.metrics.sits2metrics([10]) + with pytest.raises(Exception): + assert stmetrics.metrics.sits2metrics([10]) def test_create_polygon_exception(): - import stmetrics - import pytest - with pytest.raises(Exception): - assert stmetrics.utils.create_polygon([10]) + with pytest.raises(Exception): + assert stmetrics.utils.create_polygon([10]) def test_check_input_exception(): - import stmetrics - import pytest - with pytest.raises(Exception): - assert stmetrics.utils.check_input([10]) + with pytest.raises(Exception): + assert stmetrics.utils.check_input([10]) - -def test_sits2metrics(): - import numpy - import stmetrics +def test_sits2metrics(): - sits = numpy.array([[[0.08213558, 0.58803765], - [0.49712389, 0.83526625]], + sits = numpy.array([[[0.08213558, 0.58803765], + [0.49712389, 0.83526625]], - [[0.88548059, 0.30089922], - [0.46782818, 0.84561955]], + [[0.88548059, 0.30089922], + [0.46782818, 0.84561955]], - [[0.97508056, 0.37090787], - [0.23905704, 0.96134861]], + [[0.97508056, 0.37090787], + [0.23905704, 0.96134861]], - [[0.34126892, 0.0517639 ], - [0.56801062, 0.9046814 ]], + [[0.34126892, 0.0517639], + [0.56801062, 0.9046814]], - [[0.89621465, 0.79039706], - [0.76447722, 0.37223732]], + [[0.89621465, 0.79039706], + [0.76447722, 0.37223732]], - [[0.01181458, 0.92984248], - [0.95011783, 0.94595306]], + [[0.01181458, 0.92984248], + [0.95011783, 0.94595306]], - [[0.19884843, 0.86591456], - [0.25220217, 0.54905 ]], + [[0.19884843, 0.86591456], + [0.25220217, 0.54905]], - [[0.44872961, 0.61002462], - [0.43320113, 0.41983541]], + [[0.44872961, 0.61002462], + [0.43320113, 0.41983541]], - [[0.67116755, 0.70299412], - [0.06319867, 0.99832697]], + [[0.67116755, 0.70299412], + [0.06319867, 0.99832697]], - [[0.57694712, 0.30948048], - [0.9029195 , 0.99803176]]]) + [[0.57694712, 0.30948048], + [0.9029195, 0.99803176]]]) - output = numpy.array([[[ 9.750800e-01, 9.298420e-01], - [ 9.501170e-01, 9.983260e-01]], + output = numpy.array([[[9.750800e-01, 9.298420e-01], + [9.501170e-01, 9.983260e-01]], - [[ 1.181400e-02, 5.176300e-02], - [ 6.319800e-02, 3.722370e-01]], + [[1.181400e-02, 5.176300e-02], + [6.319800e-02, 3.722370e-01]], - [[ 5.087680e-01, 5.520260e-01], - [ 5.138130e-01, 7.830350e-01]], + [[5.087680e-01, 5.520260e-01], + [5.138130e-01, 7.830350e-01]], - [[ 3.312370e-01, 2.702780e-01], - [ 2.762990e-01, 2.297330e-01]], + [[3.312370e-01, 2.702780e-01], + [2.762990e-01, 2.297330e-01]], - [[ 5.087687e+00, 5.520261e+00], - [ 5.138136e+00, 7.830350e+00]], + [[5.087687e+00, 5.520261e+00], + [5.138136e+00, 7.830350e+00]], - [[ 9.632650e-01, 8.780780e-01], - [ 8.869190e-01, 6.260890e-01]], + [[9.632650e-01, 8.780780e-01], + [8.869190e-01, 6.260890e-01]], - [[ 3.685641e+00, 3.777832e+00], - [ 3.403455e+00, 6.659211e+00]], + [[3.685641e+00, 3.777832e+00], + [3.403455e+00, 6.659211e+00]], - [[ 8.844000e-01, 7.386330e-01], - [ 8.397200e-01, 5.784910e-01]], + [[8.844000e-01, 7.386330e-01], + [8.397200e-01, 5.784910e-01]], - [[-4.801000e-02, -2.996340e-01], - [ 1.285020e-01, -8.078330e-01]], + [[-4.801000e-02, -2.996340e-01], + [1.285020e-01, -8.078330e-01]], - [[ 4.132970e-01, 2.622960e-01], - [ 3.397510e-01, 2.659790e-01]], + [[4.132970e-01, 2.622960e-01], + [3.397510e-01, 2.659790e-01]], - [[ 5.087687e+00, 5.520261e+00], - [ 5.138136e+00, 7.830350e+00]], + [[5.087687e+00, 5.520261e+00], + [5.138136e+00, 7.830350e+00]], - [[ 5.974480e-01, 4.437080e-01], - [ 4.179080e-01, 3.368950e-01]], + [[5.974480e-01, 4.437080e-01], + [4.179080e-01, 3.368950e-01]], - [[ 2.700580e-01, 3.401940e-01], - [ 3.427010e-01, 6.921580e-01]], + [[2.700580e-01, 3.401940e-01], + [3.427010e-01, 6.921580e-01]], - [[ 5.128380e-01, 5.990310e-01], - [ 4.824760e-01, 8.751500e-01]], + [[5.128380e-01, 5.990310e-01], + [4.824760e-01, 8.751500e-01]], - [[ 7.783240e-01, 7.466950e-01], - [ 6.662430e-01, 9.536500e-01]], + [[7.783240e-01, 7.466950e-01], + [6.662430e-01, 9.536500e-01]], - [[ 7.090790e-01, 9.537960e-01], - [ 7.414750e-01, 1.785940e+00]], + [[7.090790e-01, 9.537960e-01], + [7.414750e-01, 1.785940e+00]], - [[ 1.396263e+00, 3.490658e+00], - [ 3.490658e+00, 5.585053e+00]], + [[1.396263e+00, 3.490658e+00], + [3.490658e+00, 5.585053e+00]], - [[ 1.183530e-01, 2.287100e-01], - [ 3.691700e-01, 3.263750e-01]], + [[1.183530e-01, 2.287100e-01], + [3.691700e-01, 3.263750e-01]], - [[ 3.475680e-01, 8.975300e-02], - [ 1.130390e-01, 5.781790e-01]], + [[3.475680e-01, 8.975300e-02], + [1.130390e-01, 5.781790e-01]], - [[ 6.238000e-02, 4.504290e-01], - [ 1.095530e-01, 2.568520e-01]], + [[6.238000e-02, 4.504290e-01], + [1.095530e-01, 2.568520e-01]], - [[ 1.807770e-01, 1.849030e-01], - [ 1.497120e-01, 6.245320e-01]], + [[1.807770e-01, 1.849030e-01], + [1.497120e-01, 6.245320e-01]], - [[ 1.068700e-01, 1.322950e-01], - [ 1.072770e-01, 1.576630e-01]], + [[1.068700e-01, 1.322950e-01], + [1.072770e-01, 1.576630e-01]], - [[ 9.558100e-01, 7.489210e-01], - [ 7.805490e-01, 9.796340e-01]], + [[9.558100e-01, 7.489210e-01], + [7.805490e-01, 9.796340e-01]], - [[ 1.767650e-01, 8.556510e-01], - [ 6.298040e-01, 6.739810e-01]], + [[1.767650e-01, 8.556510e-01], + [6.298040e-01, 6.739810e-01]], - [[ 3.372378e+00, 1.761302e+00], - [ 2.543674e+00, 1.508493e+00]], + [[3.372378e+00, 1.761302e+00], + [2.543674e+00, 1.508493e+00]], - [[-4.280310e-01, -7.486000e-03], - [ 1.888369e+00, -4.988190e-01]], + [[-4.280310e-01, -7.486000e-03], + [1.888369e+00, -4.988190e-01]], - [[ 4.724270e-01, 4.724270e-01], - [ 6.548670e-01, 6.492550e-01]], + [[4.724270e-01, 4.724270e-01], + [6.548670e-01, 6.492550e-01]], - [[ 2.596693e+00, 1.818503e+00], - [ 2.289875e+00, 2.566626e+00]]]) + [[2.596693e+00, 1.818503e+00], + [2.289875e+00, 2.566626e+00]]]) - res = stmetrics.metrics.sits2metrics(sits) + res = stmetrics.metrics.sits2metrics(sits) - r1 = res.reshape(res.shape[0]*res.shape[1]*res.shape[2]) - r2 = output.reshape(output.shape[0]*output.shape[1]*output.shape[2]) + r1 = res.reshape(res.shape[0]*res.shape[1]*res.shape[2]) + r2 = output.reshape(output.shape[0]*output.shape[1]*output.shape[2]) - assert all(r1 == r2) + assert all(r1 == r2) if __name__ == '__main__': - pytest.main(['--color=auto', '--no-cov']) \ No newline at end of file + pytest.main(['--color=auto', '--no-cov']) From 40e13133ebf448f7e2596b2fd926dc2ac7e89ec6 Mon Sep 17 00:00:00 2001 From: Luiz Gabriel Date: Mon, 18 Dec 2023 08:26:42 -0300 Subject: [PATCH 3/5] Corrects reference to stmetrics package modules and implement code style corrections (flake8). --- stmetrics/basics.py | 14 +-- stmetrics/fractal.py | 22 ++--- stmetrics/polar.py | 8 +- stmetrics/spatial.py | 219 +++++++++++++++++++++---------------------- stmetrics/utils.py | 26 ++--- 5 files changed, 145 insertions(+), 144 deletions(-) diff --git a/stmetrics/basics.py b/stmetrics/basics.py index 6c6ddde..b0fc6c0 100644 --- a/stmetrics/basics.py +++ b/stmetrics/basics.py @@ -1,9 +1,11 @@ import numpy -from .utils import fixseries, truncate +from stmetrics.utils import fixseries, truncate def ts_basics(timeseries, funcs=["all"], nodata=-9999): - """This function compute all basic metrics in a single call, returning a dictionary: + """This function compute all basic metrics in a single call, returning a + dictionary: + - "Max" - Maximum value of the time series. - "Min" - Minimum value of the time series. @@ -31,7 +33,7 @@ def ts_basics(timeseries, funcs=["all"], nodata=-9999): - "IQR" - Interquaritle range (IQR) of the time series. - "FQR" - First quartile of the time series. - + - "SQR" - Second quartile of the time series. - "TQR" - Third quaritle of the time series. @@ -47,7 +49,7 @@ def ts_basics(timeseries, funcs=["all"], nodata=-9999): out_metrics = dict() - metrics_count = 15 + # metrics_count = 15 if "all" in funcs: funcs = [ @@ -78,8 +80,8 @@ def ts_basics(timeseries, funcs=["all"], nodata=-9999): def mean_ts(timeseries, nodata=-9999): - """Average value (mean) of the time series, considering only valid \ - values. When nodata is found, it is not included in N value (for all functions). + """Average value (mean) of the time series, considering only valid values. + When nodata is found, it is not included in N value (for all functions). :param timeseries: Time series. :type timeseries: numpy.ndarray diff --git a/stmetrics/fractal.py b/stmetrics/fractal.py index 05fc350..8905651 100644 --- a/stmetrics/fractal.py +++ b/stmetrics/fractal.py @@ -1,16 +1,16 @@ import numpy -from .utils import fixseries, truncate +from stmetrics.utils import fixseries, truncate def ts_fractal(timeseries, funcs=['all'], nodata=-9999): """This function computes 4 fractal dimensions and the hurst exponential. - - DFA: measures the Hurst parameter H, which is similar to the \ + - DFA: measures the Hurst parameter H, which is similar to the Hurst exponent. - - HE: self-similarity measure that assess long-range dependence in a \ + - HE: self-similarity measure that assess long-range dependence in a time series. - + - KFD: This algorirhm computes the FD using Katz algorithm. :param timeseries: Time series. @@ -65,11 +65,11 @@ def dfa_fd(timeseries, nvals=None, overlap=True, order=1, nodata=-9999): .. Note:: - This function uses the Detrended Fluctuation Analysis (DFA) \ - implementation from the Nolds package. Due to time series \ - characteristcs we use by default the 'RANSAC' \ + This function uses the Detrended Fluctuation Analysis (DFA) + implementation from the Nolds package. Due to time series + characteristcs we use by default the 'RANSAC' fitting method as it is more robust to outliers. - For more details regarding the hurst implementation, check Nolds \ + For more details regarding the hurst implementation, check Nolds documentation page. """ @@ -90,10 +90,10 @@ def hurst_exp(timeseries, nvals=None, nodata=-9999): :param timeseries: Time series. :type timeseries: numpy.ndarray - + :param nvals: Sizes of subseries to use. :type nvals: int - + :param nodata: nodata of the time series. Default is -9999. :type nodata: int @@ -125,7 +125,7 @@ def katz_fd(timeseries, nodata=-9999): :param timeseries: Time series. :type timeseries: numpy.ndarray - + :param nodata: nodata of the time series. Default is -9999. :type nodata: int diff --git a/stmetrics/polar.py b/stmetrics/polar.py index f3932eb..5a6d2f3 100644 --- a/stmetrics/polar.py +++ b/stmetrics/polar.py @@ -32,7 +32,7 @@ def ts_polar(timeseries, funcs=["all"], nodata=-9999, show=False): - CSI - This is a dimensionless quantitative measure of morphology, \ that characterize the standard deviation of an object from a circle. - + To visualize the time series on polar space use: ts_polar(timeseries,\ show=True) @@ -56,7 +56,7 @@ def ts_polar(timeseries, funcs=["all"], nodata=-9999, show=False): """ out_metrics = dict() - metrics_count = 9 + # metrics_count = 9 if "all" in funcs: funcs = [ @@ -161,7 +161,7 @@ def polar_plot(timeseries, nodata=-9999): """ import matplotlib.pyplot as plt from descartes import PolygonPatch - from matplotlib.ticker import FormatStrFormatter + # from matplotlib.ticker import FormatStrFormatter # filter time series ts = fixseries(timeseries, nodata) @@ -175,7 +175,7 @@ def polar_plot(timeseries, nodata=-9999): maxY = numpy.max(numpy.abs(y)) # get season rings - ringTopLeft, ringTopRight,\ + ringTopLeft, ringTopRight, \ ringBottomLeft, ringBottomRight = get_seasons(x, y) # setup plot diff --git a/stmetrics/spatial.py b/stmetrics/spatial.py index a02b304..f63e961 100644 --- a/stmetrics/spatial.py +++ b/stmetrics/spatial.py @@ -5,9 +5,9 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", - output="shp", window=None, max_dist=None, max_step=None, + output="shp", window=None, max_dist=None, max_step=None, max_diff=None, penalty=None, psi=None, pruning=False): - """This function create spatial-temporal superpixels using a Satellite \ + """This function create spatial-temporal superpixels using a Satellite Image Time Series (SITS). Version 1.4 :param image: SITS dataset. @@ -19,31 +19,31 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", :param m: Compactness value. Bigger values led to regular superpixels. :type m: int - :param nodata: If you dataset contain nodata, it will be replace by \ - this value. This value is necessary to be possible the use the \ + :param nodata: If you dataset contain nodata, it will be replace by + this value. This value is necessary to be possible the use the DTW distance. Ideally your dataset must not contain nodata. :type nodata: float - :param scale: Adjust the time series, to 0-1. Necessary to distance \ + :param scale: Adjust the time series, to 0-1. Necessary to distance calculation. :type scale: int :param iter: Number of iterations to be performed. Default = 10. :type iter: int - :param pattern: Type of pattern initialization. Hexagonal (default) or\ + :param pattern: Type of pattern initialization. Hexagonal (default) or regular (as SLIC). :type pattern: int - :param output: Type of output to be produced. Default is shp (Shapefile).\ + :param output: Type of output to be produced. Default is shp (Shapefile). The two possible values are shp and matrix (returns a numpy array). :type output: string - :param window: Only allow for maximal shifts from the two diagonals \ - smaller than this number. It includes the diagonal, meaning that an \ + :param window: Only allow for maximal shifts from the two diagonals + smaller than this number. It includes the diagonal, meaning that an Euclidean distance is obtained by setting window=1. - :param max_dist: Stop if the returned values will be larger than \ + :param max_dist: Stop if the returned values will be larger than this value. :param max_step: Do not allow steps larger than this value. @@ -52,26 +52,26 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", :param penalty: Penalty to add if compression or expansion is applied. - :param psi: Psi relaxation parameter (ignore start and end of matching). \ + :param psi: Psi relaxation parameter (ignore start and end of matching). Useful for cyclical series. - + :returns segmentation: Segmentation produced. ..Note:: - Reference: Soares, A. R., Körting, T. S., Fonseca, L. M. G., Bendini, \ - H. N. `Simple Nonlinear Iterative Temporal Clustering. \ - `_ \ + Reference: Soares, A. R., Körting, T. S., Fonseca, L. M. G., Bendini, + H. N. `Simple Nonlinear Iterative Temporal Clustering. + `_ IEEE Transactions on Geoscience and Remote, 2020 (Early Access). """ print('Simple Non-Linear Iterative Temporal Clustering V 1.4') - fast = False - try: - from dtaidistance.dtw import dtw_cc_omp - fast = True - except ImportError: - logger.debug('DTAIDistance C-OMP library not available') - fast = False + # fast = False + # try: + # from dtaidistance.dtw import dtw_cc_omp + # fast = True + # except ImportError: + # logger.debug('DTAIDistance C-OMP library not available') + # fast = False if isinstance(dataset, rasterio.io.DatasetReader): try: @@ -80,7 +80,7 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", transform = meta["transform"] crs = meta["crs"] img = dataset.read().astype(float) - img[img == dataset.nodata] = numpy.nan + img[img == dataset.nodata] = numpy.nan except: Exception('Sorry we could not read your dataset.') @@ -93,8 +93,8 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", except: Exception('Sorry we could not read your dataset.') else: - TypeError("Sorry we can't read this type of file. \ - Please use Rasterio or xarray") + TypeError("Sorry we can't read this type of file.\n" + "Please use Rasterio or xarray") # Normalize data for band in range(img.shape[0]): @@ -137,17 +137,17 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", # Calculate Spatio-temporal distance try: - D = distance_fast(c_series, ic, jc, subim, S, m, rmin, cmin, + D = distance_fast(c_series, ic, jc, subim, S, m, rmin, cmin, window=window, max_dist=max_dist, - max_step=max_step, + max_step=max_step, max_diff=max_diff, penalty=penalty, psi=psi) except: - D = distance(c_series, ic, jc, subim, S, m, rmin, cmin, - window=window, max_dist=max_dist, - max_step=max_step, - max_diff=max_diff, - penalty=penalty, psi=psi) # DTW regular + D = distance(c_series, ic, jc, subim, S, m, rmin, cmin, + window=window, max_dist=max_dist, + max_step=max_step, + max_diff=max_diff, + penalty=penalty, psi=psi) # DTW regular subd = d[rmin:rmax, cmin:cmax] subl = l[rmin:rmax, cmin:cmax] @@ -174,8 +174,8 @@ def snitc(dataset, ki, m, nodata=0, scale=10000, iter=10, pattern="hexagonal", return labelled -def distance_fast(c_series, ic, jc, subim, S, m, rmin, cmin, - window=None, max_dist=None, max_step=None, +def distance_fast(c_series, ic, jc, subim, S, m, rmin, cmin, + window=None, max_dist=None, max_step=None, max_diff=None, penalty=None, psi=None): """This function computes the spatial-temporal distance between \ two pixels using the dtw distance with C implementation. @@ -238,7 +238,7 @@ def distance_fast(c_series, ic, jc, subim, S, m, rmin, cmin, # Compute dtw distances c = dtw.distance_matrix_fast(merge, block=((0, merge.shape[0]), (merge.shape[0] - 1, merge.shape[0])), - compact=True, parallel=True, window=window, + compact=True, parallel=True, window=window, max_dist=max_dist, max_step=max_step, max_length_diff=max_diff, penalty=penalty, psi=psi) @@ -258,7 +258,7 @@ def distance_fast(c_series, ic, jc, subim, S, m, rmin, cmin, def distance(c_series, ic, jc, subim, S, m, rmin, cmin, - window=None, max_dist=None, max_step=None, + window=None, max_dist=None, max_step=None, max_diff=None, penalty=None, psi=None, pruning=False): """This function computes the spatial-temporal distance between \ two pixels using the DTW distance. @@ -314,15 +314,16 @@ def distance(c_series, ic, jc, subim, S, m, rmin, cmin, # Initialize submatrix ds = numpy.zeros([subim.shape[1], subim.shape[2]]) - + # Tranpose matrix to allow dtw fast computation with dtaidistance linear = subim.transpose(1, 2, 0).reshape(subim.shape[1]*subim.shape[2], subim.shape[0]) merge = numpy.vstack((linear, c_series)).astype(numpy.double) - + c = dtw.distance_matrix(merge, block=((0, merge.shape[0]), - (merge.shape[0] - 1, merge.shape[0])), - compact=True, use_c=True, parallel=True, use_mp=True) + (merge.shape[0] - 1, merge.shape[0])), + compact=True, use_c=True, parallel=True, + use_mp=True) c1 = numpy.array(c) dc = c1.reshape(subim.shape[1], subim.shape[2]) @@ -481,7 +482,7 @@ def init_cluster_hex(rows, columns, ki, img, bands): :returns k: Number of superpixels that will be produced. """ - N = rows * columns + # N = rows * columns # Setting up SNITC S = (rows*columns / (ki * (3**0.5)/2))**0.5 @@ -596,29 +597,30 @@ def init_cluster_regular(rows, columns, ki, img, bands): # Initialise grid for x in range(base, rows, vSpacing): for y in range(base, columns, hSpacing): - cc = int(numpy.floor(y)) - rr = int(numpy.floor(x)) + # cc = int(numpy.floor(y)) + # rr = int(numpy.floor(x)) ts = img[:, int(x), int(y)] st = numpy.append(ts, [int(x), int(y), 0]) C[kk, :] = st kk = kk+1 - w = S/2 + # w = S/2 st = None return C, S, labelled, d, kk -def seg_metrics(dataframe, bands=None, metrics_dict={ - "basics": ["all"], - "polar": ["all"], - "fractal": ["all"]}, - features=['mean'], - num_cores=-1): - """This function compute time series metrics from a geopandas \ +def seg_metrics(dataframe, + bands=None, + metrics_dict={"basics": ["all"], + "polar": ["all"], + "fractal": ["all"]}, + features=['mean'], + num_cores=-1): + """This function compute time series metrics from a geopandas with time features. - Currently, basic, polar and fractal metrics are extracted. but you can \ + Currently, basic, polar and fractal metrics are extracted. but you can set the metrics you to compute using a dictionary. :param dataframe: Pandas DataFrame with time series information. @@ -633,7 +635,9 @@ def seg_metrics(dataframe, bands=None, metrics_dict={ :param features: List of features to be used for computation. \ This parameter allows you to use the features extracted with \ ``extract_features`` function and compute metrics over image features \ - (mean, max, min, std and mode). If it is None, the code expect that the DataFrame has only one variable. + (mean, max, min, std and mode). If it is None, the code expect that the + DataFrame has only one variable. + :type features: list :returns out_dataframe: Geopandas dataframe with the features added. @@ -656,8 +660,8 @@ def seg_metrics(dataframe, bands=None, metrics_dict={ series = dataframe.filter(regex=f) metricas = _seg_ex_metrics(series.to_numpy().astype(float), - metrics_dict, - num_cores) + metrics_dict, + num_cores) header = list_metrics() @@ -668,25 +672,24 @@ def seg_metrics(dataframe, bands=None, metrics_dict={ metricsdf = pandas.DataFrame(metricas, columns=names) out_dataframe = pandas.concat([out_dataframe, metricsdf], - axis=1) + axis=1) - else: - metricas = _seg_ex_metrics(df.to_numpy().astype(float), - metrics_dict, - num_cores) + else: + metricas = _seg_ex_metrics(df.to_numpy().astype(float), + metrics_dict, + num_cores) - header = list_metrics() + header = list_metrics() - names = [i + '_' + k - for i, k in zip([band] * len(header), - header)] + names = [i + '_' + k + for i, k in zip([band] * len(header), header)] - metricsdf = pandas.DataFrame(metricas, columns=names) + metricsdf = pandas.DataFrame(metricas, columns=names) - out_dataframe = pandas.concat([out_dataframe, metricsdf], + out_dataframe = pandas.concat([out_dataframe, metricsdf], axis=1) else: - + df = dataframe if features is not None: @@ -696,8 +699,8 @@ def seg_metrics(dataframe, bands=None, metrics_dict={ series = dataframe.filter(regex=f) metricas = _seg_ex_metrics(series.to_numpy().astype(float), - metrics_dict, - num_cores) + metrics_dict, + num_cores) header = list_metrics() @@ -708,32 +711,29 @@ def seg_metrics(dataframe, bands=None, metrics_dict={ metricsdf = pandas.DataFrame(metricas, columns=names) out_dataframe = pandas.concat([out_dataframe, metricsdf], - axis=1) + axis=1) - else: - metricas = _seg_ex_metrics(df.to_numpy().astype(float), - metrics_dict, - num_cores) + else: + metricas = _seg_ex_metrics(df.to_numpy().astype(float), + metrics_dict, + num_cores) - header = list_metrics() + header = list_metrics() - names = [i + '_' + k - for i, k in zip([band] * len(header), - header)] + names = [i + '_' + k for i, k in zip([band] * len(header), header)] - metricsdf = pandas.DataFrame(metricas, columns=names) + metricsdf = pandas.DataFrame(metricas, columns=names) - out_dataframe = pandas.concat([out_dataframe, metricsdf], - axis=1) + out_dataframe = pandas.concat([out_dataframe, metricsdf], axis=1) return out_dataframe -def _seg_ex_metrics(series, metrics_dict={ - "basics": ["all"], - "polar": ["all"], - "fractal": ["all"]}, - num_cores=-1): +def _seg_ex_metrics(series, + metrics_dict={"basics": ["all"], + "polar": ["all"], + "fractal": ["all"]}, + num_cores=-1): # This function performs the computation of the metrics using \ # multiprocessing. import multiprocessing as mp @@ -791,8 +791,8 @@ def extract_features(dataset, segmentation, :returns segmentation: GeoPandas DataFrame with the features. """ import os - import pandas - import rasterstats + # import pandas + # import rasterstats import xarray # Performing buffer to solve possible invalid polygons @@ -807,35 +807,35 @@ def extract_features(dataset, segmentation, features.remove('perimeter') if 'aspect_ratio' in features: - segmentation["aspect_ratio"] = segmentation['geometry'].apply(lambda g: - aspect_ratio(g)) + segmentation["aspect_ratio"] = segmentation['geometry'].apply( + lambda g: aspect_ratio(g)) features.remove('aspect_ratio') if 'symmetry' in features: - segmentation["symmetry"] = segmentation['geometry'].apply(lambda g: - symmetry(g)) + segmentation["symmetry"] = segmentation['geometry'].apply( + lambda g: symmetry(g)) features.remove('symmetry') if 'compactness' in features: - segmentation["compactness"] = segmentation['geometry'].apply(lambda g: - reock_compactness(g)) + segmentation["compactness"] = segmentation['geometry'].apply( + lambda g: reock_compactness(g)) features.remove('compactness') if 'rectangular_fit' in features: - segmentation["rectangular_fit"] = segmentation['geometry'].apply(lambda g: - rectangular_fit(g)) + segmentation["rectangular_fit"] = segmentation['geometry'].apply( + lambda g: rectangular_fit(g)) features.remove('rectangular_fit') if 'width' in features: - segmentation["width"] = segmentation['geometry'].apply(lambda g: - width(g)) + segmentation["width"] = segmentation['geometry'].apply( + lambda g: width(g)) features.remove('width') if 'length' in features: - segmentation["length"] = segmentation['geometry'].apply(lambda g: - length(g)) + segmentation["length"] = segmentation['geometry'].apply( + lambda g: length(g)) features.remove('length') - + if isinstance(dataset, rasterio.io.DatasetReader): segmentation = _exRasterio(dataset, segmentation, features, nodata) @@ -865,9 +865,9 @@ def extract_features(dataset, segmentation, def _exRasterio(dataset, segmentation, features, nodata): - import os + # import os import pandas - import rasterstats + # import rasterstats geoms = segmentation.geometry.tolist() @@ -893,19 +893,18 @@ def _exRasterio(dataset, segmentation, features, nodata): def _extract_xray(dataset, segmentation, features, nodata): import pandas - import rasterstats + # import rasterstats from affine import Affine band_list = list(dataset.data_vars) geoms = segmentation.geometry.tolist() - #try to get dates + # try to get dates try: dates = dataset.time.values except: rang = dataset[band_list[0]].values.shape[0] - dates = numpy.arange(0,rang) - + dates = numpy.arange(0, rang) # Fix affine transformation # Function from_gdal swap positions we need to fix this in a brute \ @@ -938,11 +937,11 @@ def _extract_xray(dataset, segmentation, features, nodata): def _extract_from_path(path, segmentation, features, nodata): import os - import re + # import re import glob import pandas import rasterio - import rasterstats + # import rasterstats # Read images and sort f_path = glob.glob(path+"*.tif") @@ -951,7 +950,7 @@ def _extract_from_path(path, segmentation, features, nodata): for f in f_path: dataset = rasterio.open(f) - affine = dataset.transform + # affine = dataset.transform # find datetime and att key = os.path.basename(f).split('.')[0] diff --git a/stmetrics/utils.py b/stmetrics/utils.py index 3dc3d9a..7ef0da0 100644 --- a/stmetrics/utils.py +++ b/stmetrics/utils.py @@ -4,9 +4,9 @@ def fixseries(timeseries, nodata=-9999): """This function ajusts the time series to polar transformation. - As some time series may have very significant noises (such as spikes), when coverted to \ - polar space it may produce an inconsistent geometry. To avoid this issue, \ - this function removes this spikes. + As some time series may have very significant noises (such as spikes), when + coverted to polar space it may produce an inconsistent geometry. + To avoid this issue, this function removes this spikes. :param timeseries: Your time series. :type timeseries: numpy.ndarray @@ -100,7 +100,7 @@ def get_list_of_points(timeseries): :return list_of_observations: Numpy array of lists of observations after \ polar transformation. - :return list_of_angles: Numpy array of lists of angles after polar \ + :return list_of_angles: Numpy array of lists of angles after polar transformation. """ @@ -112,8 +112,8 @@ def get_list_of_points(timeseries): def check_input(timeseries): - """This function checks the input and raises one exception if it is too short \ - or has the wrong type. + """This function checks the input and raises one exception if it is too + short or has the wrong type. :param timeseries: Your time series. :type timeseries: numpy.ndarray. @@ -126,7 +126,7 @@ def check_input(timeseries): if timeseries.shape[0] > timeseries.shape[1]: dim = 0 else: - dim =1 + dim = 1 elif dimensions == 1: dim = 0 else: @@ -147,13 +147,13 @@ def check_input(timeseries): def file_to_da(filepath): import re - import pandas - import rasterio + # import pandas + # import rasterio import xarray # Open image da = xarray.open_rasterio(filepath) - transform = da.attrs['transform'] + # transform = da.attrs['transform'] # find datetime match = re.findall(r'\d{4}-\d{2}-\d{2}', filepath)[-1] @@ -182,8 +182,8 @@ def img2xarray(path, band): def bdc2xarray(cube_path, list_bands): - """This function reads a path with BDC ARD (Brazil Data Cube Analysis Ready Data) \ - and creates an xarray dataset. + """This function reads a path with BDC ARD (Brazil Data Cube Analysis + Ready Data) and creates an xarray dataset. :param cube_path: Path of folder with images. :type cube_path: string @@ -251,7 +251,7 @@ def error_fractal(): def list_metrics(): """This function lists the available metrics in stmetrics. """ - import stmetrics + # import stmetrics metrics = [*error_basics().keys(), *error_polar().keys(), *error_fractal().keys()] From 5bb53eafbe4a02429547599f0f83be53bd756e59 Mon Sep 17 00:00:00 2001 From: Luiz Gabriel Date: Wed, 10 Jan 2024 14:45:03 -0300 Subject: [PATCH 4/5] Add package sklear and update pyproject.toml - fixing dependencies. --- pyproject.toml | 39 ++++++++++++++++++++------------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index db68e23..8cd7dc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,30 +6,31 @@ authors = ["Brazil Data Cube Team "] readme = "README.rst" [tool.poetry.dependencies] -python = "=3.10.13" -dtaidistance = "^2.3.11" -pandas = "^2.1.4" -matplotlib = "^3.8.2" -numba = "^0.58.1" -numpy = "^1.26.2" -xarray = "^2023.12.0" -rasterio = "^1.3.9" -shapely = "^2.0.2" -affine = "^2.4.0" -descartes = "^1.1.0" -connected-components-3d = "^3.12.4" -rasterstats = "^0.19.0" -pointpats = "^2.4.0" -geopandas = "^0.14.1" -nolds = "^0.5.2" +python = "==3.10.13" +dtaidistance = "==2.3.11" +pandas = "==2.1.4" +matplotlib = "==3.8.2" +numba = "==0.58.1" +numpy = "==1.26.2" +xarray = "==2023.12.0" +rasterio = "==1.3.9" +shapely = "==2.0.2" +affine = "==2.4.0" +descartes = "==1.1.0" +connected-components-3d = "==3.12.4" +rasterstats = "==0.19.0" +pointpats = "==2.4.0" +geopandas = "==0.14.1" +nolds = "==0.5.2" urllib3 = "~1" -fastremap = "^1.14.0" +fastremap = "==1.14.0" +scikit-learn = "==1.3.2" [tool.poetry.group.dev.dependencies] -pytest = "=7.4.3" -flake8 = "=6.1.0" +pytest = "==7.4.3" +flake8 = "==6.1.0" [build-system] requires = ["poetry-core"] From 65545e8ebf0d766559ca361c17dd606f05421baa Mon Sep 17 00:00:00 2001 From: Luiz Gabriel Date: Sat, 1 Jun 2024 16:19:22 -0300 Subject: [PATCH 5/5] Relax python dependency -- >=3.10 and <4.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8cd7dc0..3b37ce0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = ["Brazil Data Cube Team "] readme = "README.rst" [tool.poetry.dependencies] -python = "==3.10.13" +python = "^3.10" dtaidistance = "==2.3.11" pandas = "==2.1.4" matplotlib = "==3.8.2"