diff --git a/airflow/configuration.py b/airflow/configuration.py
index a3e7c8f8b026a..41dc8d718e0d9 100644
--- a/airflow/configuration.py
+++ b/airflow/configuration.py
@@ -31,7 +31,6 @@
import sys
import warnings
from base64 import b64encode
-from collections import OrderedDict
from configparser import ConfigParser, NoOptionError, NoSectionError
from contextlib import contextmanager
from copy import deepcopy
@@ -1343,12 +1342,12 @@ def getsection(self, section: str) -> ConfigOptionsDictType | None:
if not self.has_section(section) and not self._default_values.has_section(section):
return None
if self._default_values.has_section(section):
- _section: ConfigOptionsDictType = OrderedDict(self._default_values.items(section))
+ _section: ConfigOptionsDictType = dict(self._default_values.items(section))
else:
- _section = OrderedDict()
+ _section = {}
if self.has_section(section):
- _section.update(OrderedDict(self.items(section)))
+ _section.update(self.items(section))
section_prefix = self._env_var_name(section, "")
for env_var in sorted(os.environ.keys()):
@@ -1499,7 +1498,7 @@ def _include_secrets(
opt = value.replace("%", "%%")
else:
opt = value
- config_sources.setdefault(section, OrderedDict()).update({key: opt})
+ config_sources.setdefault(section, {}).update({key: opt})
del config_sources[section][key + "_secret"]
def _include_commands(
@@ -1522,7 +1521,7 @@ def _include_commands(
opt_to_set = str(opt_to_set).replace("%", "%%")
if opt_to_set is not None:
dict_to_update: dict[str, str | tuple[str, str]] = {key: opt_to_set}
- config_sources.setdefault(section, OrderedDict()).update(dict_to_update)
+ config_sources.setdefault(section, {}).update(dict_to_update)
del config_sources[section][key + "_cmd"]
def _include_envs(
@@ -1560,7 +1559,7 @@ def _include_envs(
# with AIRFLOW_. Therefore, we need to make it a special case.
if section != "kubernetes_environment_variables":
key = key.lower()
- config_sources.setdefault(section, OrderedDict()).update({key: opt})
+ config_sources.setdefault(section, {}).update({key: opt})
def _filter_by_source(
self,
@@ -1721,7 +1720,7 @@ def _replace_section_config_with_display_sources(
include_cmds: bool,
include_secret: bool,
):
- sect = config_sources.setdefault(section, OrderedDict())
+ sect = config_sources.setdefault(section, {})
if isinstance(config, AirflowConfigParser):
with config.suppress_future_warnings():
items: Iterable[tuple[str, Any]] = config.items(section=section, raw=raw)
diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py
index 81c441b521f83..d7f08ad93536e 100644
--- a/airflow/executors/base_executor.py
+++ b/airflow/executors/base_executor.py
@@ -21,7 +21,7 @@
import logging
import sys
import warnings
-from collections import OrderedDict, defaultdict
+from collections import defaultdict
from dataclasses import dataclass, field
from datetime import datetime
from typing import TYPE_CHECKING, Any, List, Optional, Sequence, Tuple
@@ -121,7 +121,7 @@ class BaseExecutor(LoggingMixin):
def __init__(self, parallelism: int = PARALLELISM):
super().__init__()
self.parallelism: int = parallelism
- self.queued_tasks: OrderedDict[TaskInstanceKey, QueuedTaskInstanceType] = OrderedDict()
+ self.queued_tasks: dict[TaskInstanceKey, QueuedTaskInstanceType] = {}
self.running: set[TaskInstanceKey] = set()
self.event_buffer: dict[TaskInstanceKey, EventBufferValueType] = {}
self.attempts: dict[TaskInstanceKey, RunningRetryAttemptType] = defaultdict(RunningRetryAttemptType)
diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py
index 0cf85c74276ce..8d32b7c48c989 100644
--- a/airflow/providers/apache/hive/hooks/hive.py
+++ b/airflow/providers/apache/hive/hooks/hive.py
@@ -24,7 +24,6 @@
import subprocess
import time
import warnings
-from collections import OrderedDict
from tempfile import NamedTemporaryFile, TemporaryDirectory
from typing import Any, Iterable, Mapping
@@ -354,7 +353,7 @@ def load_df(
:param table: target Hive table, use dot notation to target a
specific database
:param field_dict: mapping from column name to hive data type.
- Note that it must be OrderedDict so as to keep columns' order.
+ Note that Python dict is ordered so it keeps columns' order.
:param delimiter: field delimiter in the file
:param encoding: str encoding to use when writing DataFrame to file
:param pandas_kwargs: passed to DataFrame.to_csv
@@ -375,7 +374,7 @@ def _infer_field_types_from_df(df: pd.DataFrame) -> dict[Any, Any]:
"V": "STRING", # void
}
- order_type = OrderedDict()
+ order_type = {}
for col, dtype in df.dtypes.items():
order_type[col] = dtype_kind_hive_type[dtype.kind]
return order_type
@@ -431,7 +430,7 @@ def load_file(
:param delimiter: field delimiter in the file
:param field_dict: A dictionary of the fields name in the file
as keys and their Hive types as values.
- Note that it must be OrderedDict so as to keep columns' order.
+ Note that Python dict is ordered so it keeps columns' order.
:param create: whether to create the table if it doesn't exist
:param overwrite: whether to overwrite the data in table or partition
:param partition: target partition as a dict of partition columns
diff --git a/airflow/providers/apache/hive/operators/hive_stats.py b/airflow/providers/apache/hive/operators/hive_stats.py
index 43e4b4c9644e0..d816541ffa62f 100644
--- a/airflow/providers/apache/hive/operators/hive_stats.py
+++ b/airflow/providers/apache/hive/operators/hive_stats.py
@@ -19,7 +19,6 @@
import json
import warnings
-from collections import OrderedDict
from typing import TYPE_CHECKING, Any, Callable, Sequence
from airflow.exceptions import AirflowException
@@ -134,7 +133,6 @@ def execute(self, context: Context) -> None:
assign_exprs = self.get_default_exprs(col, col_type)
exprs.update(assign_exprs)
exprs.update(self.extra_exprs)
- exprs = OrderedDict(exprs)
exprs_str = ",\n ".join(f"{v} AS {k[0]}__{k[1]}" for k, v in exprs.items())
where_clause_ = [f"{k} = '{v}'" for k, v in self.partition.items()]
diff --git a/airflow/providers/apache/hive/transfers/mssql_to_hive.py b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
index 4d444f1b6914b..c7ecdfd34d298 100644
--- a/airflow/providers/apache/hive/transfers/mssql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mssql_to_hive.py
@@ -19,7 +19,6 @@
from __future__ import annotations
import csv
-from collections import OrderedDict
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Sequence
@@ -117,9 +116,10 @@ def execute(self, context: Context):
cursor.execute(self.sql)
with NamedTemporaryFile(mode="w", encoding="utf-8") as tmp_file:
csv_writer = csv.writer(tmp_file, delimiter=self.delimiter)
- field_dict = OrderedDict()
- for col_count, (key, val) in enumerate(cursor.description, start=1):
- field_dict[key or f"Column{col_count}"] = self.type_map(val)
+ field_dict = {}
+ for col_count, field in enumerate(cursor.description, start=1):
+ col_position = f"Column{col_count}"
+ field_dict[col_position if field[0] == "" else field[0]] = self.type_map(field[1])
csv_writer.writerows(cursor)
tmp_file.flush()
diff --git a/airflow/providers/apache/hive/transfers/mysql_to_hive.py b/airflow/providers/apache/hive/transfers/mysql_to_hive.py
index d01433e16ed1f..ab1d8199538b2 100644
--- a/airflow/providers/apache/hive/transfers/mysql_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/mysql_to_hive.py
@@ -19,7 +19,6 @@
from __future__ import annotations
import csv
-from collections import OrderedDict
from contextlib import closing
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Sequence
@@ -147,7 +146,7 @@ def execute(self, context: Context):
quotechar=self.quotechar if self.quoting != csv.QUOTE_NONE else None,
escapechar=self.escapechar,
)
- field_dict = OrderedDict()
+ field_dict = {}
if cursor.description is not None:
for field in cursor.description:
field_dict[field[0]] = self.type_map(field[1])
diff --git a/airflow/providers/apache/hive/transfers/vertica_to_hive.py b/airflow/providers/apache/hive/transfers/vertica_to_hive.py
index d2b5bcf4a28e9..6f65fd35b9443 100644
--- a/airflow/providers/apache/hive/transfers/vertica_to_hive.py
+++ b/airflow/providers/apache/hive/transfers/vertica_to_hive.py
@@ -19,7 +19,6 @@
from __future__ import annotations
import csv
-from collections import OrderedDict
from tempfile import NamedTemporaryFile
from typing import TYPE_CHECKING, Any, Sequence
@@ -121,9 +120,10 @@ def execute(self, context: Context):
cursor.execute(self.sql)
with NamedTemporaryFile(mode="w", encoding="utf-8") as f:
csv_writer = csv.writer(f, delimiter=self.delimiter)
- field_dict = OrderedDict()
- for col_count, (key, val) in enumerate(cursor.description, start=1):
- field_dict[key or f"Column{col_count}"] = self.type_map(val)
+ field_dict = {}
+ for col_count, field in enumerate(cursor.description, start=1):
+ col_position = f"Column{col_count}"
+ field_dict[col_position if field[0] == "" else field[0]] = self.type_map(field[1])
csv_writer.writerows(cursor.iterate())
f.flush()
cursor.close()
diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py
index febc42a592d92..deda61293e2f7 100644
--- a/airflow/providers_manager.py
+++ b/airflow/providers_manager.py
@@ -27,7 +27,6 @@
import sys
import traceback
import warnings
-from collections import OrderedDict
from dataclasses import dataclass
from functools import wraps
from time import perf_counter
@@ -443,7 +442,7 @@ def initialize_providers_list(self):
self._discover_all_airflow_builtin_providers_from_local_sources()
self._discover_all_providers_from_packages()
self._verify_all_providers_all_compatible()
- self._provider_dict = OrderedDict(sorted(self._provider_dict.items()))
+ self._provider_dict = dict(sorted(self._provider_dict.items()))
def _verify_all_providers_all_compatible(self):
from packaging import version as packaging_version
@@ -466,7 +465,7 @@ def initialize_providers_hooks(self):
"""Lazy initialization of providers hooks."""
self.initialize_providers_list()
self._discover_hooks()
- self._hook_provider_dict = OrderedDict(sorted(self._hook_provider_dict.items()))
+ self._hook_provider_dict = dict(sorted(self._hook_provider_dict.items()))
@provider_info_cache("taskflow_decorators")
def initialize_providers_taskflow_decorator(self):
@@ -782,21 +781,21 @@ def _discover_hooks(self) -> None:
provider,
provider_uses_connection_types,
)
- self._hook_provider_dict = OrderedDict(sorted(self._hook_provider_dict.items()))
+ self._hook_provider_dict = dict(sorted(self._hook_provider_dict.items()))
@provider_info_cache("import_all_hooks")
def _import_info_from_all_hooks(self):
"""Force-import all hooks and initialize the connections/fields."""
# Retrieve all hooks to make sure that all of them are imported
_ = list(self._hooks_lazy_dict.values())
- self._field_behaviours = OrderedDict(sorted(self._field_behaviours.items()))
+ self._field_behaviours = dict(sorted(self._field_behaviours.items()))
# Widgets for connection forms are currently used in two places:
# 1. In the UI Connections, expected same order that it defined in Hook.
# 2. cli command - `airflow providers widgets` and expected that it in alphabetical order.
# It is not possible to recover original ordering after sorting,
# that the main reason why original sorting moved to cli part:
- # self._connection_form_widgets = OrderedDict(sorted(self._connection_form_widgets.items()))
+ # self._connection_form_widgets = dict(sorted(self._connection_form_widgets.items()))
def _discover_taskflow_decorators(self) -> None:
for name, info in self._provider_dict.items():
diff --git a/tests/api_connexion/endpoints/test_provider_endpoint.py b/tests/api_connexion/endpoints/test_provider_endpoint.py
index 7f910bb4120ca..b970f5f47091b 100644
--- a/tests/api_connexion/endpoints/test_provider_endpoint.py
+++ b/tests/api_connexion/endpoints/test_provider_endpoint.py
@@ -16,7 +16,6 @@
# under the License.
from __future__ import annotations
-from collections import OrderedDict
from unittest import mock
import pytest
@@ -25,36 +24,28 @@
from airflow.security import permissions
from tests.test_utils.api_connexion_utils import create_user, delete_user
-MOCK_PROVIDERS = OrderedDict(
- [
- (
- "apache-airflow-providers-amazon",
- ProviderInfo(
- "1.0.0",
- {
- "package-name": "apache-airflow-providers-amazon",
- "name": "Amazon",
- "description": "`Amazon Web Services (AWS) `__.\n",
- "versions": ["1.0.0"],
- },
- "package",
- ),
- ),
- (
- "apache-airflow-providers-apache-cassandra",
- ProviderInfo(
- "1.0.0",
- {
- "package-name": "apache-airflow-providers-apache-cassandra",
- "name": "Apache Cassandra",
- "description": "`Apache Cassandra `__.\n",
- "versions": ["1.0.0"],
- },
- "package",
- ),
- ),
- ]
-)
+MOCK_PROVIDERS = {
+ "apache-airflow-providers-amazon": ProviderInfo(
+ "1.0.0",
+ {
+ "package-name": "apache-airflow-providers-amazon",
+ "name": "Amazon",
+ "description": "`Amazon Web Services (AWS) `__.\n",
+ "versions": ["1.0.0"],
+ },
+ "package",
+ ),
+ "apache-airflow-providers-apache-cassandra": ProviderInfo(
+ "1.0.0",
+ {
+ "package-name": "apache-airflow-providers-apache-cassandra",
+ "name": "Apache Cassandra",
+ "description": "`Apache Cassandra `__.\n",
+ "versions": ["1.0.0"],
+ },
+ "package",
+ ),
+}
@pytest.fixture(scope="module")
diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py
index 6d6641a73e05c..e957df689d074 100644
--- a/tests/core/test_configuration.py
+++ b/tests/core/test_configuration.py
@@ -25,7 +25,6 @@
import tempfile
import textwrap
import warnings
-from collections import OrderedDict
from unittest import mock
from unittest.mock import patch
@@ -547,12 +546,14 @@ def test_getsection(self):
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
- assert OrderedDict([("key1", "hello"), ("key2", "airflow")]) == test_conf.getsection("test")
- assert OrderedDict(
- [("key3", "value3"), ("testkey", "testvalue"), ("testpercent", "with%percent")]
- ) == test_conf.getsection("testsection")
+ assert {"key1": "hello", "key2": "airflow"} == test_conf.getsection("test")
+ assert {
+ "key3": "value3",
+ "testkey": "testvalue",
+ "testpercent": "with%percent",
+ } == test_conf.getsection("testsection")
- assert OrderedDict([("key", "value")]) == test_conf.getsection("new_section")
+ assert {"key": "value"} == test_conf.getsection("new_section")
assert test_conf.getsection("non_existent_section") is None
@@ -581,7 +582,7 @@ def test_kubernetes_environment_variables_section(self):
test_conf = AirflowConfigParser(default_config=parameterized_config(test_config_default))
test_conf.read_string(test_config)
- assert OrderedDict([("key1", "hello"), ("AIRFLOW_HOME", "/root/airflow")]) == test_conf.getsection(
+ assert {"key1": "hello", "AIRFLOW_HOME": "/root/airflow"} == test_conf.getsection(
"kubernetes_environment_variables"
)
diff --git a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py
index 5dcc1890c244b..7a7d90a5328e6 100644
--- a/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py
+++ b/tests/providers/amazon/aws/transfers/test_salesforce_to_s3.py
@@ -16,7 +16,6 @@
# under the License.
from __future__ import annotations
-from collections import OrderedDict
from unittest import mock
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
@@ -31,18 +30,14 @@
AWS_CONNECTION_ID = "aws_default"
SALESFORCE_RESPONSE = {
"records": [
- OrderedDict(
- [
- (
- "attributes",
- OrderedDict(
- [("type", "Lead"), ("url", "/services/data/v42.0/sobjects/Lead/00Q3t00001eJ7AnEAK")]
- ),
- ),
- ("Id", "00Q3t00001eJ7AnEAK"),
- ("Company", "Hello World Inc"),
- ]
- )
+ {
+ "attributes": {
+ "type": "Lead",
+ "url": "/services/data/v42.0/sobjects/Lead/00Q3t00001eJ7AnEAK",
+ },
+ "Id": "00Q3t00001eJ7AnEAK",
+ "Company": "Hello World Inc",
+ }
],
"totalSize": 1,
"done": True,
diff --git a/tests/providers/apache/hive/hooks/test_hive.py b/tests/providers/apache/hive/hooks/test_hive.py
index 9f55e870b7713..11658692517c9 100644
--- a/tests/providers/apache/hive/hooks/test_hive.py
+++ b/tests/providers/apache/hive/hooks/test_hive.py
@@ -29,7 +29,7 @@
import datetime
import itertools
-from collections import OrderedDict, namedtuple
+from collections import namedtuple
from unittest import mock
import pandas as pd
@@ -242,7 +242,7 @@ def test_load_file_without_create_table(self, mock_run_cli):
def test_load_file_create_table(self, mock_run_cli):
filepath = "/path/to/input/file"
table = "output_table"
- field_dict = OrderedDict([("name", "string"), ("gender", "string")])
+ field_dict = {"name": "string", "gender": "string"}
fields = ",\n ".join(f"`{k.strip('`')}` {v}" for k, v in field_dict.items())
hook = MockHiveCliHook()
@@ -281,7 +281,7 @@ def test_load_df(self, mock_to_csv, mock_load_file):
kwargs = mock_load_file.call_args.kwargs
assert kwargs["delimiter"] == delimiter
assert kwargs["field_dict"] == {"c": "STRING"}
- assert isinstance(kwargs["field_dict"], OrderedDict)
+ assert isinstance(kwargs["field_dict"], dict)
assert kwargs["table"] == table
@mock.patch("airflow.providers.apache.hive.hooks.hive.HiveCliHook.load_file")
@@ -300,17 +300,18 @@ def test_load_df_with_optional_parameters(self, mock_to_csv, mock_load_file):
@mock.patch("airflow.providers.apache.hive.hooks.hive.HiveCliHook.run_cli")
def test_load_df_with_data_types(self, mock_run_cli):
- ord_dict = OrderedDict()
- ord_dict["b"] = [True]
- ord_dict["i"] = [-1]
- ord_dict["t"] = [1]
- ord_dict["f"] = [0.0]
- ord_dict["c"] = ["c"]
- ord_dict["M"] = [datetime.datetime(2018, 1, 1)]
- ord_dict["O"] = [object()]
- ord_dict["S"] = [b"STRING"]
- ord_dict["U"] = ["STRING"]
- ord_dict["V"] = [None]
+ ord_dict = {
+ "b": [True],
+ "i": [-1],
+ "t": [1],
+ "f": [0.0],
+ "c": ["c"],
+ "M": [datetime.datetime(2018, 1, 1)],
+ "O": [object()],
+ "S": [b"STRING"],
+ "U": ["STRING"],
+ "V": [None],
+ }
df = pd.DataFrame(ord_dict)
hook = MockHiveCliHook()
diff --git a/tests/providers/apache/hive/operators/test_hive_stats.py b/tests/providers/apache/hive/operators/test_hive_stats.py
index 27a7772236a4e..e419d2da00b92 100644
--- a/tests/providers/apache/hive/operators/test_hive_stats.py
+++ b/tests/providers/apache/hive/operators/test_hive_stats.py
@@ -19,7 +19,6 @@
import os
import re
-from collections import OrderedDict
from unittest.mock import MagicMock, patch
import pytest
@@ -150,7 +149,6 @@ def test_execute(self, mock_hive_metastore_hook, mock_presto_hook, mock_mysql_ho
exprs = {("", "count"): "COUNT(*)"}
for col, col_type in list(field_types.items()):
exprs.update(hive_stats_collection_operator.get_default_exprs(col, col_type))
- exprs = OrderedDict(exprs)
rows = [
(
hive_stats_collection_operator.ds,
@@ -198,7 +196,6 @@ def assignment_func(col, _):
exprs = {("", "count"): "COUNT(*)"}
for col, col_type in list(field_types.items()):
exprs.update(hive_stats_collection_operator.assignment_func(col, col_type))
- exprs = OrderedDict(exprs)
rows = [
(
hive_stats_collection_operator.ds,
@@ -246,7 +243,6 @@ def assignment_func(_, __):
exprs = {("", "count"): "COUNT(*)"}
for col, col_type in list(field_types.items()):
exprs.update(hive_stats_collection_operator.get_default_exprs(col, col_type))
- exprs = OrderedDict(exprs)
rows = [
(
hive_stats_collection_operator.ds,
diff --git a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
index 693d594348b75..4f9e8700dbb73 100644
--- a/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mssql_to_hive.py
@@ -17,7 +17,6 @@
# under the License.
from __future__ import annotations
-from collections import OrderedDict
from unittest.mock import Mock, PropertyMock, patch
import pytest
@@ -73,7 +72,7 @@ def test_execute(self, mock_hive_hook, mock_mssql_hook, mock_tmp_file, mock_csv)
mock_mssql_hook_cursor.return_value.execute.assert_called_once_with(mssql_to_hive_transfer.sql)
mock_tmp_file.assert_called_with(mode="w", encoding="utf-8")
mock_csv.writer.assert_called_once_with(mock_tmp_file, delimiter=mssql_to_hive_transfer.delimiter)
- field_dict = OrderedDict()
+ field_dict = {}
for field in mock_mssql_hook_cursor.return_value.description:
field_dict[field[0]] = mssql_to_hive_transfer.type_map(field[1])
mock_csv.writer.return_value.writerows.assert_called_once_with(mock_mssql_hook_cursor.return_value)
@@ -102,7 +101,7 @@ def test_execute_empty_description_field(self, mock_hive_hook, mock_mssql_hook,
mssql_to_hive_transfer = MsSqlToHiveOperator(**self.kwargs)
mssql_to_hive_transfer.execute(context={})
- field_dict = OrderedDict()
+ field_dict = {}
for col_count, field in enumerate(mock_mssql_hook_cursor.return_value.description, start=1):
col_position = f"Column{col_count}"
field_dict[col_position] = mssql_to_hive_transfer.type_map(field[1])
diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
index a7cf8f025b5d0..87d67f306da8e 100644
--- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py
@@ -19,7 +19,6 @@
import csv
import textwrap
-from collections import OrderedDict
from contextlib import closing
from unittest import mock
@@ -233,13 +232,14 @@ def test_mysql_to_hive_type_conversion(self, spy_on_hive):
op.execute({})
assert spy_on_hive.load_file.call_count == 1
- ordered_dict = OrderedDict()
- ordered_dict["c0"] = "SMALLINT"
- ordered_dict["c1"] = "INT"
- ordered_dict["c2"] = "INT"
- ordered_dict["c3"] = "BIGINT"
- ordered_dict["c4"] = "DECIMAL(38,0)"
- ordered_dict["c5"] = "TIMESTAMP"
+ ordered_dict = {
+ "c0": "SMALLINT",
+ "c1": "INT",
+ "c2": "INT",
+ "c3": "BIGINT",
+ "c4": "DECIMAL(38,0)",
+ "c5": "TIMESTAMP",
+ }
assert spy_on_hive.load_file.call_args.kwargs["field_dict"] == ordered_dict
finally:
with closing(hook.get_conn()) as conn:
diff --git a/tests/providers/apache/hive/transfers/test_s3_to_hive.py b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
index 13bc2b0962862..3f674ec3fa54c 100644
--- a/tests/providers/apache/hive/transfers/test_s3_to_hive.py
+++ b/tests/providers/apache/hive/transfers/test_s3_to_hive.py
@@ -23,7 +23,6 @@
import itertools
import logging
import shutil
-from collections import OrderedDict
from gzip import GzipFile
from tempfile import NamedTemporaryFile, mkdtemp
from unittest import mock
@@ -43,7 +42,7 @@ def setup_attrs(self):
self.file_names = {}
self.task_id = "S3ToHiveTransferTest"
self.s3_key = "S32hive_test_file"
- self.field_dict = OrderedDict([("Sno", "BIGINT"), ("Some,Text", "STRING")])
+ self.field_dict = {"Sno": "BIGINT", "Some,Text": "STRING"}
self.hive_table = "S32hive_test_table"
self.delimiter = "\t"
self.create = True
@@ -168,7 +167,7 @@ def test__get_top_row_as_list(self):
assert header_list == ["Sno\tSome", "Text"], "Top row from file doesn't matched expected value"
def test__match_headers(self):
- self.kwargs["field_dict"] = OrderedDict([("Sno", "BIGINT"), ("Some,Text", "STRING")])
+ self.kwargs["field_dict"] = {"Sno": "BIGINT", "Some,Text": "STRING"}
assert S3ToHiveOperator(**self.kwargs)._match_headers(
["Sno", "Some,Text"]
), "Header row doesn't match expected value"
diff --git a/tests/providers/apache/sqoop/hooks/test_sqoop.py b/tests/providers/apache/sqoop/hooks/test_sqoop.py
index 3c7d4fb5e8e92..f28c139509dbc 100644
--- a/tests/providers/apache/sqoop/hooks/test_sqoop.py
+++ b/tests/providers/apache/sqoop/hooks/test_sqoop.py
@@ -17,7 +17,6 @@
# under the License.
from __future__ import annotations
-import collections
import json
from io import StringIO
from unittest import mock
@@ -41,9 +40,7 @@ class TestSqoopHook:
"hcatalog_table": "hive_table",
}
_config_export_extra_options = {
- "extra_options": collections.OrderedDict(
- [("update-key", "id"), ("update-mode", "allowinsert"), ("fetch-size", 1)]
- ),
+ "extra_options": {"update-key": "id", "update-mode": "allowinsert", "fetch-size": 1},
}
_config_export = {
"table": "export_data_to",
diff --git a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py
index 3001376ca0329..149fee29fa319 100644
--- a/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py
+++ b/tests/providers/google/cloud/transfers/test_salesforce_to_gcs.py
@@ -16,7 +16,6 @@
# under the License.
from __future__ import annotations
-from collections import OrderedDict
from unittest import mock
from airflow.providers.google.cloud.hooks.gcs import GCSHook
@@ -32,18 +31,11 @@
GCP_CONNECTION_ID = "google_cloud_default"
SALESFORCE_RESPONSE = {
"records": [
- OrderedDict(
- [
- (
- "attributes",
- OrderedDict(
- [("type", "Lead"), ("url", "/services/data/v42.0/sobjects/Lead/00Q3t00001eJ7AnEAK")]
- ),
- ),
- ("Id", "00Q3t00001eJ7AnEAK"),
- ("Company", "Hello World Inc"),
- ]
- )
+ {
+ "attributes": {"type": "Lead", "url": "/services/data/v42.0/sobjects/Lead/00Q3t00001eJ7AnEAK"},
+ "Id": "00Q3t00001eJ7AnEAK",
+ "Company": "Hello World Inc",
+ }
],
"totalSize": 1,
"done": True,
diff --git a/tests/providers/http/hooks/test_http.py b/tests/providers/http/hooks/test_http.py
index 8970c8eda8a9d..e1606aefa9f7e 100644
--- a/tests/providers/http/hooks/test_http.py
+++ b/tests/providers/http/hooks/test_http.py
@@ -21,7 +21,6 @@
import json
import logging
import os
-from collections import OrderedDict
from http import HTTPStatus
from unittest import mock
@@ -292,7 +291,7 @@ def test_verify_set_to_true_by_default(self, mock_session_send):
mock.ANY,
allow_redirects=True,
cert=None,
- proxies=OrderedDict(),
+ proxies={},
stream=False,
timeout=None,
verify=True,
@@ -310,7 +309,7 @@ def test_requests_ca_bundle_env_var(self, mock_session_send):
mock.ANY,
allow_redirects=True,
cert=None,
- proxies=OrderedDict(),
+ proxies={},
stream=False,
timeout=None,
verify="/tmp/test.crt",
@@ -328,7 +327,7 @@ def test_verify_respects_requests_ca_bundle_env_var(self, mock_session_send):
mock.ANY,
allow_redirects=True,
cert=None,
- proxies=OrderedDict(),
+ proxies={},
stream=False,
timeout=None,
verify="/tmp/test.crt",
@@ -346,7 +345,7 @@ def test_verify_false_parameter_overwrites_set_requests_ca_bundle_env_var(self,
mock.ANY,
allow_redirects=True,
cert=None,
- proxies=OrderedDict(),
+ proxies={},
stream=False,
timeout=None,
verify=False,