Skip to content

Commit

Permalink
Squelch more deprecation warnings (#21003)
Browse files Browse the repository at this point in the history
  • Loading branch information
uranusjr authored Jan 21, 2022
1 parent 754d8bc commit 506efb6
Show file tree
Hide file tree
Showing 10 changed files with 73 additions and 75 deletions.
6 changes: 5 additions & 1 deletion airflow/providers/google/cloud/transfers/s3_to_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,13 @@

from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.amazon.aws.operators.s3_list import S3ListOperator
from airflow.providers.google.cloud.hooks.gcs import GCSHook, _parse_gcs_url, gcs_object_is_directory

try:
from airflow.providers.amazon.aws.operators.s3 import S3ListOperator
except ImportError:
from airflow.providers.amazon.aws.operators.s3_list import S3ListOperator

if TYPE_CHECKING:
from airflow.utils.context import Context

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from datetime import datetime, timedelta

from airflow import DAG
from airflow.providers.tableau.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator
from airflow.providers.tableau.operators.tableau import TableauOperator
from airflow.providers.tableau.sensors.tableau_job_status import TableauJobStatusSensor

with DAG(
Expand All @@ -35,15 +35,21 @@
tags=['example'],
) as dag:
# Refreshes a workbook and waits until it succeeds.
task_refresh_workbook_blocking = TableauRefreshWorkbookOperator(
workbook_name='MyWorkbook',
blocking=True,
task_refresh_workbook_blocking = TableauOperator(
resource='workbooks',
method='refresh',
find='MyWorkbook',
match_with='name',
blocking_refresh=True,
task_id='refresh_tableau_workbook_blocking',
)
# Refreshes a workbook and does not wait until it succeeds.
task_refresh_workbook_non_blocking = TableauRefreshWorkbookOperator(
workbook_name='MyWorkbook',
blocking=False,
task_refresh_workbook_non_blocking = TableauOperator(
resource='workbooks',
method='refresh',
find='MyWorkbook',
match_with='name',
blocking_refresh=False,
task_id='refresh_tableau_workbook_non_blocking',
)
# The following task queries the status of the workbook refresh job until it succeeds.
Expand Down
11 changes: 5 additions & 6 deletions tests/always/test_deprecations.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,15 @@
import sys
import warnings
from inspect import isabstract
from unittest import TestCase, mock
from unittest import mock

import pytest
from parameterized import parameterized

from airflow.models.baseoperator import BaseOperator
from tests.deprecated_classes import ALL, RENAMED_ALL


class TestDeprecations(TestCase):
class TestDeprecations:
@staticmethod
def assert_warning(msg: str, warnings):
error = f"Text '{msg}' not in warnings"
Expand Down Expand Up @@ -72,7 +71,7 @@ def get_class_from_path(path_to_class, parent=False):
return new_class
return class_

@parameterized.expand(RENAMED_ALL)
@pytest.mark.parametrize("new_module, old_module", RENAMED_ALL)
def test_is_class_deprecated(self, new_module, old_module):
self.skip_test_with_mssql_in_py38(new_module, old_module)
deprecation_warning_msg = "This class is deprecated."
Expand All @@ -92,15 +91,15 @@ def test_is_class_deprecated(self, new_module, old_module):
assert __file__ in files, old_module
init_mock.assert_called_once()

@parameterized.expand(ALL)
@pytest.mark.parametrize("parent_class_path, sub_class_path", ALL)
def test_is_subclass(self, parent_class_path, sub_class_path):
self.skip_test_with_mssql_in_py38(parent_class_path, sub_class_path)
with mock.patch(f"{parent_class_path}.__init__"), warnings.catch_warnings(record=True):
parent_class_path = self.get_class_from_path(parent_class_path, parent=True)
sub_class_path = self.get_class_from_path(sub_class_path)
self.assert_is_subclass(sub_class_path, parent_class_path)

@parameterized.expand(ALL)
@pytest.mark.parametrize("new_path, old_path", ALL)
def test_warning_on_import(self, new_path, old_path):
self.skip_test_with_mssql_in_py38(new_path, old_path)
self.assert_proper_import(old_path, new_path)
Expand Down
10 changes: 5 additions & 5 deletions tests/providers/amazon/aws/hooks/test_batch_waiters.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,31 +79,31 @@ class AwsClients(NamedTuple):
log: "botocore.client.CloudWatchLogs"


@pytest.yield_fixture(scope="module")
@pytest.fixture(scope="module")
def batch_client(aws_region):
with mock_batch():
yield boto3.client("batch", region_name=aws_region)


@pytest.yield_fixture(scope="module")
@pytest.fixture(scope="module")
def ec2_client(aws_region):
with mock_ec2():
yield boto3.client("ec2", region_name=aws_region)


@pytest.yield_fixture(scope="module")
@pytest.fixture(scope="module")
def ecs_client(aws_region):
with mock_ecs():
yield boto3.client("ecs", region_name=aws_region)


@pytest.yield_fixture(scope="module")
@pytest.fixture(scope="module")
def iam_client(aws_region):
with mock_iam():
yield boto3.client("iam", region_name=aws_region)


@pytest.yield_fixture(scope="module")
@pytest.fixture(scope="module")
def logs_client(aws_region):
with mock_logs():
yield boto3.client("logs", region_name=aws_region)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
from airflow.models import DAG, DagRun, TaskInstance
from airflow.operators.dummy import DummyOperator
from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook
from airflow.utils.log.cloudwatch_task_handler import CloudwatchTaskHandler
from airflow.providers.amazon.aws.log.cloudwatch_task_handler import CloudwatchTaskHandler
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from tests.test_utils.config import conf_vars
Expand Down
64 changes: 32 additions & 32 deletions tests/providers/google/cloud/operators/test_bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
# specific language governing permissions and limitations
# under the License.

import unittest
from typing import Dict, List
from unittest import mock

Expand All @@ -25,7 +24,6 @@
from google.cloud.bigtable.column_family import MaxVersionsGCRule
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable_admin_v2 import enums
from parameterized import parameterized

from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.operators.bigtable import (
Expand Down Expand Up @@ -57,18 +55,24 @@
EMPTY_COLUMN_FAMILIES = {} # type: Dict


class TestBigtableInstanceCreate(unittest.TestCase):
@parameterized.expand(
class TestBigtableInstanceCreate:
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id, main_cluster_id, main_cluster_zone",
[
('instance_id', PROJECT_ID, '', CLUSTER_ID, CLUSTER_ZONE),
('main_cluster_id', PROJECT_ID, INSTANCE_ID, '', CLUSTER_ZONE),
('main_cluster_zone', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
)
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_empty_attribute(
self, missing_attribute, project_id, instance_id, main_cluster_id, main_cluster_zone, mock_hook
self,
mock_hook,
missing_attribute,
project_id,
instance_id,
main_cluster_id,
main_cluster_zone,
):
with pytest.raises(AirflowException) as ctx:
BigtableCreateInstanceOperator(
Expand Down Expand Up @@ -232,7 +236,7 @@ def test_create_instance_with_replicas_that_doesnt_exists(self, mock_hook):
)


class TestBigtableInstanceUpdate(unittest.TestCase):
class TestBigtableInstanceUpdate:
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableUpdateInstanceOperator(
Expand Down Expand Up @@ -284,14 +288,12 @@ def test_update_execute_empty_project_id(self, mock_hook):
timeout=None,
)

@parameterized.expand(
[
('instance_id', PROJECT_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id",
[('instance_id', PROJECT_ID, '')],
)
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_hook):
def test_empty_attribute(self, mock_hook, missing_attribute, project_id, instance_id):
with pytest.raises(AirflowException) as ctx:
BigtableUpdateInstanceOperator(
project_id=project_id,
Expand Down Expand Up @@ -389,17 +391,17 @@ def test_different_error_reraised(self, mock_hook):
)


class TestBigtableClusterUpdate(unittest.TestCase):
@parameterized.expand(
class TestBigtableClusterUpdate:
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id, cluster_id, nodes",
[
('instance_id', PROJECT_ID, '', CLUSTER_ID, NODES),
('cluster_id', PROJECT_ID, INSTANCE_ID, '', NODES),
('nodes', PROJECT_ID, INSTANCE_ID, CLUSTER_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
)
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, cluster_id, nodes, mock_hook):
def test_empty_attribute(self, mock_hook, missing_attribute, project_id, instance_id, cluster_id, nodes):
with pytest.raises(AirflowException) as ctx:
BigtableUpdateClusterOperator(
project_id=project_id,
Expand Down Expand Up @@ -545,7 +547,7 @@ def test_different_error_reraised(self, mock_hook):
)


class TestBigtableInstanceDelete(unittest.TestCase):
class TestBigtableInstanceDelete:
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableDeleteInstanceOperator(
Expand Down Expand Up @@ -581,14 +583,12 @@ def test_delete_execute_empty_project_id(self, mock_hook):
project_id=None, instance_id=INSTANCE_ID
)

@parameterized.expand(
[
('instance_id', PROJECT_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id",
[('instance_id', PROJECT_ID, '')],
)
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, mock_hook):
def test_empty_attribute(self, mock_hook, missing_attribute, project_id, instance_id):
with pytest.raises(AirflowException) as ctx:
BigtableDeleteInstanceOperator(project_id=project_id, instance_id=instance_id, task_id="id")
err = ctx.value
Expand Down Expand Up @@ -661,7 +661,7 @@ def test_different_error_reraised(self, mock_hook):
)


class TestBigtableTableDelete(unittest.TestCase):
class TestBigtableTableDelete:
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_delete_execute(self, mock_hook):
op = BigtableDeleteTableOperator(
Expand All @@ -681,15 +681,15 @@ def test_delete_execute(self, mock_hook):
project_id=PROJECT_ID, instance_id=INSTANCE_ID, table_id=TABLE_ID
)

@parameterized.expand(
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id, table_id",
[
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
)
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id, mock_hook):
def test_empty_attribute(self, mock_hook, missing_attribute, project_id, instance_id, table_id):
with pytest.raises(AirflowException) as ctx:
BigtableDeleteTableOperator(
project_id=project_id,
Expand Down Expand Up @@ -795,7 +795,7 @@ def test_different_error_reraised(self, mock_hook):
)


class TestBigtableTableCreate(unittest.TestCase):
class TestBigtableTableCreate:
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_create_execute(self, mock_hook):
op = BigtableCreateTableOperator(
Expand All @@ -821,15 +821,15 @@ def test_create_execute(self, mock_hook):
column_families=EMPTY_COLUMN_FAMILIES,
)

@parameterized.expand(
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id, table_id",
[
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
)
@mock.patch('airflow.providers.google.cloud.operators.bigtable.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id, mock_hook):
def test_empty_attribute(self, mock_hook, missing_attribute, project_id, instance_id, table_id):
with pytest.raises(AirflowException) as ctx:
BigtableCreateTableOperator(
project_id=project_id,
Expand Down
8 changes: 3 additions & 5 deletions tests/providers/google/cloud/sensors/test_bigtable.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,12 @@
# specific language governing permissions and limitations
# under the License.

import unittest
from unittest import mock

import google.api_core.exceptions
import pytest
from google.cloud.bigtable.instance import Instance
from google.cloud.bigtable.table import ClusterState
from parameterized import parameterized

from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.sensors.bigtable import BigtableTableReplicationCompletedSensor
Expand All @@ -35,13 +33,13 @@
IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]


class BigtableWaitForTableReplicationTest(unittest.TestCase):
@parameterized.expand(
class BigtableWaitForTableReplicationTest:
@pytest.mark.parametrize(
"missing_attribute, project_id, instance_id, table_id",
[
('instance_id', PROJECT_ID, '', TABLE_ID),
('table_id', PROJECT_ID, INSTANCE_ID, ''),
],
testcase_func_name=lambda f, n, p: 'test_empty_attribute.empty_' + p.args[0],
)
@mock.patch('airflow.providers.google.cloud.sensors.bigtable.BigtableHook')
def test_empty_attribute(self, missing_attribute, project_id, instance_id, table_id, mock_hook):
Expand Down
Loading

0 comments on commit 506efb6

Please sign in to comment.
  翻译: