Skip to content

Commit

Permalink
Revert "Revert "Issue #110 - Made some changes which *should* result …
Browse files Browse the repository at this point in the history
…in the API documentation being generated for all projects.""

This reverts commit 975f747.
  • Loading branch information
Rob Barry committed Aug 2, 2021
1 parent 19f191d commit 5722b35
Show file tree
Hide file tree
Showing 24 changed files with 1,726 additions and 602 deletions.
43 changes: 42 additions & 1 deletion Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,40 @@ pipeline {
stash name: "wheels", includes: "**/dist/*.whl"
}
}
stage('Documentation') {
agent {
dockerfile {
args '-u root -v /var/run/docker.sock:/var/run/docker.sock'
reuseNode true
}
}
steps {
script {
dir("devtools") {
sh "pipenv run sphinx-apidoc -e -F -M -a -P -e --tocfile index.rst -d 10 -o docs devtools \"setup*\""
sh "pipenv run sphinx-build -W -b html docs docs/_build/html"
}

dir("sharedmodels") {
sh "pipenv run sphinx-apidoc -e -F -M -a -P -e --tocfile index.rst -d 10 -o docs sharedmodels \"setup*\""
sh "pipenv run sphinx-build -W -b html docs docs/_build/html"
}

dir("pmd") {
sh "pipenv run sphinx-apidoc -e -F -M -a -P -e --tocfile index.rst -d 10 -o docs pmd \"setup*\""
sh "pipenv run sphinx-build -W -b html docs docs/_build/html"
}

dir("csvqb") {
sh "pipenv run sphinx-apidoc -e -F -M -a -P -e --tocfile index.rst -d 10 -o docs csvqb \"setup*\""
sh "pipenv run sphinx-build -W -b html docs docs/_build/html"
}

stash name: "docs", includes: "**/docs/_build/html/*"
}
}
}

}
post {
always {
Expand All @@ -131,7 +165,14 @@ pipeline {
echo "wheels stash does not exist"
}

archiveArtifacts artifacts: '**/dist/*.whl', fingerprint: true
try {
unstash name: "docs"
} catch (Exception e) {
echo "docs stash does not exist"
}


archiveArtifacts artifacts: '**/dist/*.whl, **/docs/_build/html/*', fingerprint: true
}
}
}
Expand Down
286 changes: 229 additions & 57 deletions csvqb/Pipfile.lock

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""Data Structure Definitions"""
from abc import ABC, abstractmethod
import pandas as pd
from typing import List


from csvqb.models.validationerror import ValidationError


Expand All @@ -14,31 +14,37 @@ class QbDataStructureDefinition(ABC):

@abstractmethod
def validate(self) -> List[ValidationError]:
"""Validate this component's metadata."""
"""
Validate this component's metadata.
"""
pass

@abstractmethod
def validate_data(self, data: pd.Series) -> List[ValidationError]:
"""Validate some data against this component's definition."""
"""
Validate some data against this component's definition.
"""
pass

@abstractmethod
def __str__(self) -> str:
"""Ensure that descendents implement the to string method to help users debug their data."""
"""
Ensure that descendents implement the to string method to help users debug their data.
"""
pass


class ColumnarQbDataStructureDefinition(QbDataStructureDefinition, ABC):
"""
Base class representing Qb Data Structure Definitions which can be directly attached to a pd.DataFrame column.
Base class representing Qb Data Structure Definitions which can be directly attached to a `pd.DataFrame` column.
"""

pass


class MultiQbDataStructureDefinition(ColumnarQbDataStructureDefinition, ABC):
"""
Base class representing an entity which defines a group of `QbDataStructureDefinition`s
Base class representing an entity which defines a group of `QbDataStructureDefinition` s.
"""

@abstractmethod
Expand Down
10 changes: 1 addition & 9 deletions csvqb/csvqb/models/cube/csvqb/components/dimension.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,7 @@ def from_data(
range_uri: Optional[str] = None,
) -> "NewQbDimension":
"""
Creates a new dimension and code list from the columnar data provided.
:param label:
:param data:
:param description:
:param uri_safe_identifier:
:param parent_dimension_uri:
:param source_uri:
:param range_uri:
:return: NewQbDimension
Creates a new dimension and code list from the columnar data provided.
"""
return NewQbDimension(
label,
Expand Down
4 changes: 1 addition & 3 deletions csvqb/csvqb/models/cube/csvqb/components/unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,7 @@ def __str__(self) -> str:
@staticmethod
def new_units_from_data(data: PandasDataTypes) -> "QbMultiUnits":
"""
Automatically generates new units from a units column.
:param data: The data column defining the full list of available units.
:return: QbMultiUnits
Automatically generates new units from a units column.
"""
return QbMultiUnits(
[NewQbUnit(u) for u in set(pandas_input_to_columnar_str(data))]
Expand Down
9 changes: 4 additions & 5 deletions csvqb/csvqb/tests/unit/cube/test_cube_errorvalidation.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,15 +44,14 @@ def test_two_column_same_title():
"""
If cube with two columns with the same title is defined, we get an error
"""
data = pd.DataFrame({
"Some Dimension": ["A", "B", "C"],
"Some Dimension": ["A", "B", "C"]
})
data = pd.DataFrame(
{"Some Dimension": ["A", "B", "C"], "Some Dimension": ["A", "B", "C"]}
)

metadata = CatalogMetadata("Some Dataset")
columns: List[CsvColumn] = [
SuppressedCsvColumn("Some Dimension"),
SuppressedCsvColumn("Some Dimension")
SuppressedCsvColumn("Some Dimension"),
]

cube = Cube(metadata, data, columns)
Expand Down
7 changes: 7 additions & 0 deletions csvqb/csvqb/tests/unit/writers/test_qbwriter.py
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,7 @@ def test_virtual_columns_generated_for_multi_meas_obs_val():
assert "cube-name.csv#unit/some-unit" == virt_unit["valueUrl"]



def test_about_url_generation():
"""
Ensuring that when an aboutUrl is defined for a non-multimeasure cube, the resulting URL
Expand Down Expand Up @@ -511,7 +512,13 @@ def test_about_url_generation():
cube = Cube(metadata, data, columns)

actual_about_url = QbWriter(cube)._get_about_url()
<<<<<<< HEAD
expected_about_url = "some-dataset.csv#obs/{+existing_dimension}/{+local_dimension}"
=======
expected_about_url = (
"./some-dataset.csv#obs/{+existing_dimension}/{+local_dimension}"
)
>>>>>>> 0a071ad (Revert "Revert "Issue #110 - Made some changes which *should* result in the API documentation being generated for all projects."")
assert actual_about_url == expected_about_url


Expand Down
2 changes: 1 addition & 1 deletion csvqb/csvqb/writers/skoscodelistwriter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
Write `NewQbCodeList`s to CSV-Ws as `skos:ConceptScheme`s with DCAT2 metadata.
Write `NewQbCodeList`s to CSV-Ws as `skos:ConceptScheme` s with DCAT2 metadata.
"""
import datetime
import json
Expand Down
82 changes: 82 additions & 0 deletions csvqb/docs/conf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html

# -- Path setup --------------------------------------------------------------

# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys

sys.path.insert(0, "..")


# -- Project information -----------------------------------------------------

project = "csvwlib-csvqb"
copyright = "2021, Office for National Statistics"
author = "Office for National Statistics"


# -- General configuration ---------------------------------------------------

# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinx.ext.todo",
"sphinx.ext.intersphinx",
]

# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]


intersphinx_mapping = {
"python": ("https://docs.python.org/3.9", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/dev", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"rdflib": ("https://rdflib.readthedocs.io/en/stable/", None),
"sharedmodels": (
"../../../../sharedmodels/docs/_build/html/",
"../../sharedmodels/docs/_build/html/objects.inv",
),
}

# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]


# -- Options for HTML output -------------------------------------------------

# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_book_theme"

autodoc_default_options = {
"members": True,
"member-order": "bysource",
"private-members": True,
"inherited-members": True,
"show-inheritance": True,
"special-members": "__init__",
"undoc-members": True,
"exclude-members": "__weakref__,__dict__,__module__,__slots__,_abc_impl,__abstractmethods__,__annotations__,__orig_bases__,__parameters__,_is_protocol,__init_subclass__,__class_getitem__",
}
7 changes: 5 additions & 2 deletions devtools/Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ url = "https://pypi.org/simple"
verify_ssl = true

[dev-packages]
black = "21.7b0"
flake8 = "*"

[packages]
Expand All @@ -13,8 +12,12 @@ behave = "*"
pytest = "*"
rdflib = "*"
python-dateutil = "*"
pipenv-setup = "*"
csvw = "*"
sphinx = "*"
sphinx-book-theme = "*"
pipenv-setup = "*"
chardet = "*"
black = "*"

[requires]
python_version = "3.9"
Expand Down
Loading

0 comments on commit 5722b35

Please sign in to comment.