diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 216449083de123..23d4091fda4513 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -553,15 +553,7 @@ Airflow dependencies
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
@@ -788,15 +780,7 @@ Pinned constraint files
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst
index caa9014eeb1640..ea5f547f16aaba 100644
--- a/CONTRIBUTORS_QUICK_START.rst
+++ b/CONTRIBUTORS_QUICK_START.rst
@@ -167,15 +167,7 @@ Setup Airflow with Breeze and PyCharm
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
diff --git a/Dockerfile b/Dockerfile
index b86a3c84ff2a1a..a75cfb28bee574 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -44,7 +44,7 @@ ARG AIRFLOW_GID="50000"
ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster"
-ARG AIRFLOW_PIP_VERSION=20.2.4
+ARG AIRFLOW_PIP_VERSION=21.1
# By default PIP has progress bar but you can disable it.
ARG PIP_PROGRESS_BAR="on"
diff --git a/Dockerfile.ci b/Dockerfile.ci
index 2c8d140e6684ad..881760fa9c143c 100644
--- a/Dockerfile.ci
+++ b/Dockerfile.ci
@@ -212,7 +212,7 @@ ARG AIRFLOW_PRE_CACHED_PIP_PACKAGES="true"
# By default in the image, we are installing all providers when installing from sources
ARG INSTALL_PROVIDERS_FROM_SOURCES="true"
ARG INSTALL_FROM_PYPI="true"
-ARG AIRFLOW_PIP_VERSION=20.2.4
+ARG AIRFLOW_PIP_VERSION=21.1
# Setup PIP
# By default PIP install run without cache to make image smaller
ARG PIP_NO_CACHE_DIR="true"
diff --git a/IMAGES.rst b/IMAGES.rst
index 7ab1ca74ee0dd3..34332e438c9a0b 100644
--- a/IMAGES.rst
+++ b/IMAGES.rst
@@ -172,15 +172,7 @@ This will build the image using command similar to:
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
@@ -632,7 +624,7 @@ The following build arguments (``--build-arg`` in docker build command) can be u
| ``ADDITIONAL_RUNTIME_APT_ENV`` | | Additional env variables defined |
| | | when installing runtime deps |
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_PIP_VERSION`` | ``20.2.4`` | PIP version used. |
+| ``AIRFLOW_PIP_VERSION`` | ``21.1`` | PIP version used. |
+------------------------------------------+------------------------------------------+------------------------------------------+
| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation |
+------------------------------------------+------------------------------------------+------------------------------------------+
diff --git a/INSTALL b/INSTALL
index 46d15f62aa87b9..919c4f540d83a0 100644
--- a/INSTALL
+++ b/INSTALL
@@ -28,14 +28,6 @@ java -jar apache-rat.jar -E ./.rat-excludes -d .
python3 -m venv PATH_TO_YOUR_VENV
source PATH_TO_YOUR_VENV/bin/activate
-NOTE!!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
-``--use-deprecated legacy-resolver`` to your pip install command.
-
# [required] building and installing by pip (preferred)
pip install .
diff --git a/LOCAL_VIRTUALENV.rst b/LOCAL_VIRTUALENV.rst
index caf94a56658821..15f828d4627917 100644
--- a/LOCAL_VIRTUALENV.rst
+++ b/LOCAL_VIRTUALENV.rst
@@ -63,15 +63,7 @@ Extra Packages
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
@@ -137,15 +129,7 @@ To create and initialize the local virtualenv:
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
diff --git a/README.md b/README.md
index 0b7c50401c17fb..01a02fa507e92a 100644
--- a/README.md
+++ b/README.md
@@ -149,14 +149,7 @@ correct Airflow tag/version/branch and Python versions in the URL.
NOTE!!!
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
-depend on your choice of extras. In order to install Airflow reliably, you might need to either downgrade
-pip to version 20.2.4 `pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3,
-you might need to add option] `--use-deprecated legacy-resolver` to your pip install command.
-While `pip 20.3.3` solved most of the `teething` problems of 20.3, this note will remain here until we
-set `pip 20.3` as official version in our CI pipeline where we are testing the installation as well.
-Due to those constraints, only `pip` installation is currently officially supported.
+Only `pip` installation is currently officially supported.
While they are some successes with using other tools like [poetry](https://python-poetry.org) or
[pip-tools](https://pypi.org/project/pip-tools), they do not share the same workflow as
diff --git a/UPDATING.md b/UPDATING.md
index ac44862a0a6e1d..24435b5e0db9f1 100644
--- a/UPDATING.md
+++ b/UPDATING.md
@@ -1795,13 +1795,6 @@ you should use `pip install apache-airflow[apache.atlas]`.
NOTE!
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
-
If you want to install integration for Microsoft Azure, then instead of
```
diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json
index 2216766b7c9616..a393461fad97c0 100644
--- a/airflow/provider.yaml.schema.json
+++ b/airflow/provider.yaml.schema.json
@@ -191,6 +191,10 @@
"items": {
"type": "string"
}
+ },
+ "additional-extras": {
+ "type": "object",
+ "description": "Additional extras that the provider should have"
}
},
"additionalProperties": false,
diff --git a/airflow/providers/apache/beam/CHANGELOG.rst b/airflow/providers/apache/beam/CHANGELOG.rst
index c3129b48e02458..b48aaf5a324f4d 100644
--- a/airflow/providers/apache/beam/CHANGELOG.rst
+++ b/airflow/providers/apache/beam/CHANGELOG.rst
@@ -19,6 +19,64 @@
Changelog
---------
+2.0.0
+.....
+
+Breaking changes
+~~~~~~~~~~~~~~~~
+
+Integration with the ``google`` provider
+````````````````````````````````````````
+
+In 2.0.0 version of the provider we've changed the way of integrating with the ``google`` provider.
+The previous versions of both providers caused conflicts when trying to install them together
+using PIP > 20.2.4. The conflict is not detected by PIP 20.2.4 and below but it was there and
+the version of ``Google BigQuery`` python client was not matching on both sides. As the result, when
+both ``apache.beam`` and ``google`` provider were installed, some features of the ``BigQuery`` operators
+might not work properly. This was cause by ``apache-beam`` client not yet supporting the new google
+python clients when ``apache-beam[gcp]`` extra was used. The ``apache-beam[gcp]`` extra is used
+by ``Dataflow`` operators and while they might work with the newer version of the ``Google BigQuery``
+python client, it is not guaranteed.
+
+This version introduces additional extra requirement for the ``apache.beam`` extra of the ``google`` provider
+and symmetrically the additional requirement for the ``google`` extra of the ``apache.beam`` provider.
+Both ``google`` and ``apache.beam`` provider do not use those extras by default, but you can specify
+them when installing the providers. The consequence of that is that some functionality of the ``Dataflow``
+operators might not be available.
+
+Unfortunately the only ``complete`` solution to the problem is for the ``apache.beam`` to migrate to the
+new (>=2.0.0) Google Python clients.
+
+This is the extra for the ``google`` provider:
+
+.. code-block:: python
+
+ extras_require={
+ ...
+ 'apache.beam': ['apache-airflow-providers-apache-beam', 'apache-beam[gcp]'],
+ ....
+ },
+
+And likewise this is the extra for the ``apache.beam`` provider:
+
+.. code-block:: python
+
+ extras_require={'google': ['apache-airflow-providers-google', 'apache-beam[gcp]']},
+
+You can still run this with PIP version <= 20.2.4 and go back to the previous behaviour:
+
+.. code-block:: shell
+
+ pip install apache-airflow-providers-google[apache.beam]
+
+or
+
+.. code-block:: shell
+
+ pip install apache-airflow-providers-apache-beam[google]
+
+But be aware that some ``BigQuery`` operators functionality might not be available in this case.
+
1.0.1
.....
diff --git a/airflow/providers/apache/beam/README.md b/airflow/providers/apache/beam/README.md
index 206fa4e8f1f569..ea76b4777eaf41 100644
--- a/airflow/providers/apache/beam/README.md
+++ b/airflow/providers/apache/beam/README.md
@@ -41,14 +41,6 @@ are in `airflow.providers.apache.beam` python package.
## Installation
-NOTE!
-
-On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
-does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
-of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
-`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option
-`--use-deprecated legacy-resolver` to your pip install command.
-
You can install this package on top of an existing airflow 2.* installation via
`pip install apache-airflow-providers-apache-beam`
diff --git a/airflow/providers/apache/beam/provider.yaml b/airflow/providers/apache/beam/provider.yaml
index 8634f89c8abbf3..46807a043f6af5 100644
--- a/airflow/providers/apache/beam/provider.yaml
+++ b/airflow/providers/apache/beam/provider.yaml
@@ -22,6 +22,7 @@ description: |
`Apache Beam `__.
versions:
+ - 2.0.0
- 1.0.1
- 1.0.0
@@ -41,3 +42,6 @@ hooks:
- integration-name: Apache Beam
python-modules:
- airflow.providers.apache.beam.hooks.beam
+
+additional-extras:
+ google: apache-beam[gcp]
diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
index 5e5af8c5dbe867..090a70285af77d 100644
--- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
+# pylint: disable=no-member
"""This module contains operator to move data from MSSQL to Hive."""
from collections import OrderedDict
diff --git a/airflow/providers/google/CHANGELOG.rst b/airflow/providers/google/CHANGELOG.rst
index bdc6b25d23e77f..79cc9335213051 100644
--- a/airflow/providers/google/CHANGELOG.rst
+++ b/airflow/providers/google/CHANGELOG.rst
@@ -19,6 +19,64 @@
Changelog
---------
+3.0.0
+.....
+
+Breaking changes
+~~~~~~~~~~~~~~~~
+
+Integration with the ``apache.beam`` provider
+`````````````````````````````````````````````
+
+In 3.0.0 version of the provider we've changed the way of integrating with the ``apache.beam`` provider.
+The previous versions of both providers caused conflicts when trying to install them together
+using PIP > 20.2.4. The conflict is not detected by PIP 20.2.4 and below but it was there and
+the version of ``Google BigQuery`` python client was not matching on both sides. As the result, when
+both ``apache.beam`` and ``google`` provider were installed, some features of the ``BigQuery`` operators
+might not work properly. This was cause by ``apache-beam`` client not yet supporting the new google
+python clients when ``apache-beam[gcp]`` extra was used. The ``apache-beam[gcp]`` extra is used
+by ``Dataflow`` operators and while they might work with the newer version of the ``Google BigQuery``
+python client, it is not guaranteed.
+
+This version introduces additional extra requirement for the ``apache.beam`` extra of the ``google`` provider
+and symmetrically the additional requirement for the ``google`` extra of the ``apache.beam`` provider.
+Both ``google`` and ``apache.beam`` provider do not use those extras by default, but you can specify
+them when installing the providers. The consequence of that is that some functionality of the ``Dataflow``
+operators might not be available.
+
+Unfortunately the only ``complete`` solution to the problem is for the ``apache.beam`` to migrate to the
+new (>=2.0.0) Google Python clients.
+
+This is the extra for the ``google`` provider:
+
+.. code-block:: python
+
+ extras_require={
+ ...
+ 'apache.beam': ['apache-airflow-providers-apache-beam', 'apache-beam[gcp]'],
+ ....
+ },
+
+And likewise this is the extra for the ``apache.beam`` provider:
+
+.. code-block:: python
+
+ extras_require={'google': ['apache-airflow-providers-google', 'apache-beam[gcp]']},
+
+You can still run this with PIP version <= 20.2.4 and go back to the previous behaviour:
+
+.. code-block:: shell
+
+ pip install apache-airflow-providers-google[apache.beam]
+
+or
+
+.. code-block:: shell
+
+ pip install apache-airflow-providers-apache-beam[google]
+
+But be aware that some ``BigQuery`` operators functionality might not be available in this case.
+
2.2.0
.....
diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml
index db2f2d59cd7855..644f093be22971 100644
--- a/airflow/providers/google/provider.yaml
+++ b/airflow/providers/google/provider.yaml
@@ -29,6 +29,7 @@ description: |
- `Google Workspace `__ (formerly Google Suite)
versions:
+ - 3.0.0
- 2.2.0
- 2.1.0
- 2.0.0
@@ -742,3 +743,6 @@ extra-links:
- airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleLink
- airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleIndexableLink
- airflow.providers.google.cloud.operators.mlengine.AIPlatformConsoleLink
+
+additional-extras:
+ apache.beam: apache-beam[gcp]
diff --git a/airflow/providers/microsoft/mssql/hooks/mssql.py b/airflow/providers/microsoft/mssql/hooks/mssql.py
index 4acdb52d186420..928d0c45d3fec1 100644
--- a/airflow/providers/microsoft/mssql/hooks/mssql.py
+++ b/airflow/providers/microsoft/mssql/hooks/mssql.py
@@ -15,6 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+# pylint: disable=no-member
"""Microsoft SQLServer hook module"""
import pymssql
diff --git a/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2
index 21e379931bba66..a95376ba4cd5b6 100644
--- a/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2
+++ b/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2
@@ -47,15 +47,6 @@ are in ``{{FULL_PACKAGE_NAME}}`` python package.
Installation
------------
-.. note::
-
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice
- of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4
- ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
- ``--use-deprecated legacy-resolver`` to your pip install command.
-
-
You can install this package on top of an existing airflow 2.* installation via
``pip install {{PACKAGE_PIP_NAME}}``
{%- if PIP_REQUIREMENTS %}
diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
index 9975925481511b..7847d797a2a77a 100644
--- a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
+++ b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2
@@ -44,14 +44,6 @@ in the `documentation Dict[str, List[str]]:
if cross_provider_dependencies.get(provider_package_id)
else {}
)
+ provider_yaml_dict = get_provider_yaml(provider_package_id)
+ additional_extras = provider_yaml_dict.get('additional-extras')
+ if additional_extras:
+ for key in additional_extras:
+ if key in extras_dict:
+ extras_dict[key].append(additional_extras[key])
+ else:
+ extras_dict[key] = additional_extras[key]
return extras_dict
@@ -1295,11 +1303,11 @@ def convert_to_provider_info(provider_info: Dict[str, Any]) -> Dict[str, Any]:
This method converts the full provider.yaml schema into the limited version needed at runtime.
"""
updated_provider_info = deepcopy(provider_info)
- expression = jsonpath_ng.parse("[hooks,operators,integrations,sensors,transfers]")
+ expression = jsonpath_ng.parse("[hooks,operators,integrations,sensors,transfers,additional-extras]")
return expression.filter(lambda x: True, updated_provider_info)
-def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, Any]:
+def get_provider_yaml(provider_package_id: str) -> Dict[str, Any]:
"""
Retrieves provider info from the provider yaml file. The provider yaml file contains more information
than provider_info that is used at runtime. This method converts the full provider yaml file into
@@ -1312,6 +1320,18 @@ def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str,
raise Exception(f"The provider.yaml file is missing: {provider_yaml_file_name}")
with open(provider_yaml_file_name) as provider_file:
provider_yaml_dict = yaml.load(provider_file, SafeLoader) # noqa
+ return provider_yaml_dict
+
+
+def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, Any]:
+ """
+ Retrieves provider info from the provider yaml file. The provider yaml file contains more information
+ than provider_info that is used at runtime. This method converts the full provider yaml file into
+ stripped-down provider info and validates it against deprecated 2.0.0 schema and runtime schema.
+ :param provider_package_id: package id to retrieve provider.yaml from
+ :return: provider_info dictionary
+ """
+ provider_yaml_dict = get_provider_yaml(provider_package_id=provider_package_id)
provider_info = convert_to_provider_info(provider_yaml_dict)
validate_provider_info_with_2_0_0_schema(provider_info)
validate_provider_info_with_runtime_schema(provider_info)
diff --git a/docs/apache-airflow/installation.rst b/docs/apache-airflow/installation.rst
index 16917a9eca4cd3..f297a348fecbbc 100644
--- a/docs/apache-airflow/installation.rst
+++ b/docs/apache-airflow/installation.rst
@@ -56,12 +56,7 @@ tested with Python 3.6, 3.7, and 3.8, but does not yet support Python 3.9.
Installation tools
''''''''''''''''''
-The official way of installing Airflow is with the ``pip`` tool.
-There was a recent (November 2020) change in resolver, so currently only 20.2.4 version is officially
-supported, although you might have a success with 20.3.3+ version (to be confirmed if all initial
-issues from ``pip`` 20.3.0 release have been fixed in 20.3.3). In order to install Airflow you need to
-either downgrade pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option
-``--use-deprecated legacy-resolver`` to your pip install command.
+Only ``pip`` installation is currently officially supported.
While there are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
diff --git a/docs/apache-airflow/start/local.rst b/docs/apache-airflow/start/local.rst
index 407f18f0812150..c59a05a31ddb92 100644
--- a/docs/apache-airflow/start/local.rst
+++ b/docs/apache-airflow/start/local.rst
@@ -24,15 +24,7 @@ This quick start guide will help you bootstrap a Airflow standalone instance on
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
diff --git a/docs/apache-airflow/upgrade-check.rst b/docs/apache-airflow/upgrade-check.rst
index 18bbec00945363..0c94f367abb079 100644
--- a/docs/apache-airflow/upgrade-check.rst
+++ b/docs/apache-airflow/upgrade-check.rst
@@ -38,15 +38,7 @@ time of writing: 1.10.15) and then to download this package and run the script a
.. note::
- On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver
- might work with Apache Airflow as of 20.3.3, but it might lead to errors in installation. It might
- depend on your choice of extras. In order to install Airflow you might need to either downgrade
- pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3,
- you need to add option ``--use-deprecated legacy-resolver`` to your pip install command.
-
- While ``pip 20.3.3`` solved most of the ``teething`` problems of 20.3, this note will remain here until we
- set ``pip 20.3`` as official version in our CI pipeline where we are testing the installation as well.
- Due to those constraints, only ``pip`` installation is currently officially supported.
+ Only ``pip`` installation is currently officially supported.
While they are some successes with using other tools like `poetry `_ or
`pip-tools `_, they do not share the same workflow as
diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst
index d3d35400729055..20aa77a43911a4 100644
--- a/docs/docker-stack/build-arg-ref.rst
+++ b/docs/docker-stack/build-arg-ref.rst
@@ -45,7 +45,7 @@ Those are the most common arguments that you use when you want to build a custom
+------------------------------------------+------------------------------------------+------------------------------------------+
| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. |
+------------------------------------------+------------------------------------------+------------------------------------------+
-| ``AIRFLOW_PIP_VERSION`` | ``20.2.4`` | PIP version used. |
+| ``AIRFLOW_PIP_VERSION`` | ``21.1`` | PIP version used. |
+------------------------------------------+------------------------------------------+------------------------------------------+
| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation |
+------------------------------------------+------------------------------------------+------------------------------------------+
diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh
index b034710041bfe9..1b2b7f210c07b7 100644
--- a/scripts/ci/libraries/_initialization.sh
+++ b/scripts/ci/libraries/_initialization.sh
@@ -410,8 +410,7 @@ function initialization::initialize_image_build_variables() {
export INSTALLED_PROVIDERS
export INSTALLED_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,imap,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv"
- # default version of PIP USED (This has to be < 20.3 until https://github.com/apache/airflow/issues/12838 is solved)
- AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="20.2.4"}
+ AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION:="21.1"}
export AIRFLOW_PIP_VERSION
# We also pin version of wheel used to get consistent builds
diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh
index a7a37df52ac40b..8bdcc295302d4e 100644
--- a/scripts/in_container/_in_container_utils.sh
+++ b/scripts/in_container/_in_container_utils.sh
@@ -268,12 +268,6 @@ function install_airflow_from_sdist() {
pip install "${airflow_package}${extras}"
}
-function install_remaining_dependencies() {
- group_start "Installs all remaining dependencies that are not installed by '${AIRFLOW_EXTRAS}' "
- pip install apache-beam[gcp]
- group_end
-}
-
function uninstall_airflow() {
pip uninstall -y apache-airflow || true
find /root/airflow/ -type f -print0 | xargs -0 rm -f --
diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh
index 53f4372d4a0d67..6dd19084f68430 100755
--- a/scripts/in_container/run_install_and_test_provider_packages.sh
+++ b/scripts/in_container/run_install_and_test_provider_packages.sh
@@ -196,7 +196,6 @@ function discover_all_field_behaviours() {
setup_provider_packages
verify_parameters
install_airflow_as_specified
-install_remaining_dependencies
install_provider_packages
import_all_provider_classes
diff --git a/scripts/in_container/run_prepare_provider_documentation.sh b/scripts/in_container/run_prepare_provider_documentation.sh
index 71a22917dc8b12..34d22ad268ed9b 100755
--- a/scripts/in_container/run_prepare_provider_documentation.sh
+++ b/scripts/in_container/run_prepare_provider_documentation.sh
@@ -99,11 +99,6 @@ export PYTHONPATH="${AIRFLOW_SOURCES}"
verify_suffix_versions_for_package_preparation
install_supported_pip_version
-
-# install extra packages missing in devel_ci
-# TODO: remove it when devel_all == devel_ci
-install_remaining_dependencies
-
import_all_provider_classes
verify_provider_packages_named_properly
diff --git a/scripts/in_container/run_prepare_provider_packages.sh b/scripts/in_container/run_prepare_provider_packages.sh
index 7ddcd3a328f837..4cb8d507470cb9 100755
--- a/scripts/in_container/run_prepare_provider_packages.sh
+++ b/scripts/in_container/run_prepare_provider_packages.sh
@@ -169,7 +169,6 @@ function rename_packages_if_needed() {
popd >/dev/null
}
-install_remaining_dependencies
setup_provider_packages
cd "${PROVIDER_PACKAGES_DIR}" || exit 1
diff --git a/setup.py b/setup.py
index 675c9690cbbd60..6582428c7c04e8 100644
--- a/setup.py
+++ b/setup.py
@@ -198,7 +198,7 @@ def get_sphinx_theme_version() -> str:
'watchtower~=0.7.3',
]
apache_beam = [
- 'apache-beam[gcp]',
+ 'apache-beam>=2.20.0',
]
async_packages = [
'eventlet>= 0.9.7',
@@ -502,7 +502,7 @@ def get_sphinx_theme_version() -> str:
'paramiko',
'pipdeptree',
'pre-commit',
- 'pylint>=2.7.0',
+ 'pylint~=2.7.4',
'pysftp',
'pytest~=6.0',
'pytest-cov',
@@ -756,20 +756,6 @@ def add_all_deprecated_provider_packages() -> None:
]
)
-# Those packages are excluded because they break tests and they are not needed to run our test suite.
-# This can be removed as soon as we get non-conflicting
-# requirements for the apache-beam as well.
-#
-# Currently Apache Beam has very narrow and old dependencies for 'mock' package which
-# are required only for our tests.
-# once https://github.com/apache/beam/pull/14328 is solved and new version of apache-beam is released
-# we will be able to remove this exclusion and get rid of `install_remaining_dependencies`
-# function in `scripts/in_container`.
-#
-PACKAGES_EXCLUDED_FOR_CI = [
- 'apache-beam',
-]
-
def is_package_excluded(package: str, exclusion_list: List[str]):
"""
@@ -788,13 +774,7 @@ def is_package_excluded(package: str, exclusion_list: List[str]):
if not is_package_excluded(package=package, exclusion_list=PACKAGES_EXCLUDED_FOR_ALL)
]
-devel_ci = [
- package
- for package in devel_all
- if not is_package_excluded(
- package=package, exclusion_list=PACKAGES_EXCLUDED_FOR_CI + PACKAGES_EXCLUDED_FOR_ALL
- )
-]
+devel_ci = devel_all
# Those are extras that we have to add for development purposes
diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
index 671cc8b855baec..953affd7cba8a7 100644
--- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
@@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
-
+# pylint: disable=no-member
import unittest
from collections import OrderedDict