Skip to content

Commit 97e0f0c

Browse files
rueiyhuangtimm4205
authored andcommitted
refactor: Fix precommit hook code formating specified in .pre-commit-config.yaml
1 parent 171b40c commit 97e0f0c

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+2418
-1289
lines changed

redshift_connector/__init__.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@
103103

104104
__author__ = "Mathieu Fenniak"
105105

106+
106107
def validate_keepalive_values(idle, interval, count):
107108
if idle is not None:
108109
if idle <= 0:
@@ -116,6 +117,7 @@ def validate_keepalive_values(idle, interval, count):
116117
if count <= 0:
117118
raise ValueError("tcp_keepalive_count must be positive")
118119

120+
119121
def connect(
120122
user: typing.Optional[str] = None,
121123
database: typing.Optional[str] = None,
@@ -420,11 +422,7 @@ def connect(
420422

421423
if info.tcp_keepalive:
422424
try:
423-
validate_keepalive_values(
424-
info.tcp_keepalive_idle,
425-
info.tcp_keepalive_interval,
426-
info.tcp_keepalive_count
427-
)
425+
validate_keepalive_values(info.tcp_keepalive_idle, info.tcp_keepalive_interval, info.tcp_keepalive_count)
428426
except ValueError as e:
429427
raise InterfaceError(str(e))
430428

redshift_connector/core.py

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import os
33
import socket
44
import typing
5-
from collections import deque, OrderedDict
5+
from collections import OrderedDict, deque
66
from copy import deepcopy
77
from datetime import datetime as Datetime
88
from datetime import timedelta as Timedelta
@@ -18,8 +18,8 @@
1818
from scramp import ScramClient # type: ignore
1919

2020
from redshift_connector.config import (
21-
DEFAULT_PROTOCOL_VERSION,
2221
DEFAULT_MAX_PREPARED_STATEMENTS,
22+
DEFAULT_PROTOCOL_VERSION,
2323
ClientProtocolVersion,
2424
DbApiParamstyle,
2525
_client_encoding,
@@ -358,6 +358,7 @@ def create_message(code: bytes, data: bytes = b"") -> bytes:
358358

359359
arr_trans: typing.Mapping[int, typing.Optional[str]] = dict(zip(map(ord, "[] 'u"), ["{", "}", None, None, None]))
360360

361+
361362
class Connection:
362363
# DBAPI Extension: supply exceptions as attributes on the connection
363364
Warning = property(lambda self: self._getError(Warning))
@@ -569,9 +570,7 @@ def get_calling_module() -> str:
569570
"BrowserIdcAuthPlugin",
570571
):
571572
redshift_native_auth = True
572-
self.set_idc_plugins_params(
573-
init_params, credentials_provider, identity_namespace, token_type
574-
)
573+
self.set_idc_plugins_params(init_params, credentials_provider, identity_namespace, token_type)
575574

576575
if redshift_native_auth and provider_name:
577576
init_params["provider_name"] = provider_name
@@ -688,24 +687,24 @@ def get_calling_module() -> str:
688687
# Set TCP keepalive parameters if supported by platform and values are defined
689688
if tcp_keepalive_idle is not None:
690689
# Mac OS X uses TCP_KEEPALIVE instead of TCP_KEEPIDLE
691-
if hasattr(socket, 'TCP_KEEPIDLE'):
690+
if hasattr(socket, "TCP_KEEPIDLE"):
692691
self._usock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, tcp_keepalive_idle)
693692
_logger.debug(f"Set TCP_KEEPIDLE to {tcp_keepalive_idle}")
694-
elif hasattr(socket, 'TCP_KEEPALIVE'): # macOS/BSD
693+
elif hasattr(socket, "TCP_KEEPALIVE"): # macOS/BSD
695694
self._usock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPALIVE, tcp_keepalive_idle)
696695
_logger.debug(f"Set TCP_KEEPALIVE to {tcp_keepalive_idle}")
697696
else:
698697
_logger.warning("Neither TCP_KEEPIDLE nor TCP_KEEPALIVE supported on this platform")
699698

700699
if tcp_keepalive_interval is not None:
701-
if hasattr(socket, 'TCP_KEEPINTVL'):
700+
if hasattr(socket, "TCP_KEEPINTVL"):
702701
self._usock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, tcp_keepalive_interval)
703702
_logger.debug(f"Set TCP_KEEPINTVL to {tcp_keepalive_interval}")
704703
else:
705704
_logger.warning("TCP_KEEPINTVL not supported on this platform")
706705

707706
if tcp_keepalive_count is not None:
708-
if hasattr(socket, 'TCP_KEEPCNT'):
707+
if hasattr(socket, "TCP_KEEPCNT"):
709708
self._usock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, tcp_keepalive_count)
710709
_logger.debug(f"Set TCP_KEEPCNT to {tcp_keepalive_count}")
711710
else:
@@ -1686,7 +1685,7 @@ def execute(self: "Connection", cursor: Cursor, operation: str, vals) -> None:
16861685
cache = param_cache[pid] = {
16871686
"statement": {},
16881687
"ps": {},
1689-
"statement_dict": OrderedDict() if self.max_prepared_statements > 0 else None
1688+
"statement_dict": OrderedDict() if self.max_prepared_statements > 0 else None,
16901689
}
16911690

16921691
try:
@@ -1709,7 +1708,7 @@ def execute(self: "Connection", cursor: Cursor, operation: str, vals) -> None:
17091708
try:
17101709
ps = cache["ps"][key]
17111710
# If statement exists, move it to end of ordered dict (most recently used)
1712-
if self.max_prepared_statements > 0 and 'statement_dict' in cache and key in cache["statement_dict"]:
1711+
if self.max_prepared_statements > 0 and "statement_dict" in cache and key in cache["statement_dict"]:
17131712
cache["statement_dict"].move_to_end(key)
17141713
_logger.debug("Using cached prepared statement")
17151714
cursor.ps = ps
@@ -2543,6 +2542,8 @@ def get_statement_name_bin(self, statement_name: str) -> bytes:
25432542

25442543
def get_max_prepared_statement(self, max_prepared_statements: int) -> int:
25452544
if max_prepared_statements < 0:
2546-
_logger.error("Parameter max_prepared_statements must >= 0. Using default value %d", DEFAULT_MAX_PREPARED_STATEMENTS)
2545+
_logger.error(
2546+
"Parameter max_prepared_statements must >= 0. Using default value %d", DEFAULT_MAX_PREPARED_STATEMENTS
2547+
)
25472548
return DEFAULT_MAX_PREPARED_STATEMENTS
25482549
return max_prepared_statements

redshift_connector/cursor.py

Lines changed: 74 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -4,12 +4,9 @@
44
import typing
55
from collections import deque
66
from itertools import count, islice
7-
from typing import TYPE_CHECKING
7+
from typing import TYPE_CHECKING, Optional
88
from warnings import warn
99

10-
from redshift_connector.metadataServerAPIHelper import MetadataServerAPIHelper
11-
from redshift_connector.metadataAPIPostProcessing import MetadataAPIPostProcessing
12-
1310
import redshift_connector
1411
from redshift_connector.config import (
1512
ClientProtocolVersion,
@@ -22,6 +19,8 @@
2219
InterfaceError,
2320
ProgrammingError,
2421
)
22+
from redshift_connector.metadataAPIPostProcessing import MetadataAPIPostProcessing
23+
from redshift_connector.metadataServerAPIHelper import MetadataServerAPIHelper
2524

2625
if TYPE_CHECKING:
2726
from redshift_connector.core import Connection
@@ -111,11 +110,11 @@ def __init__(self: "Cursor", connection: "Connection", paramstyle=None) -> None:
111110

112111
self._metadataServerAPIHelper = MetadataServerAPIHelper(self)
113112
self._metadataAPIPostProcessing = MetadataAPIPostProcessing(self)
114-
self._cur_catalog: str = None
115-
self._SHOW_DATABASES_Col_index: typing.Optional = None
116-
self._SHOW_SCHEMAS_Col_index: typing.Optional = None
117-
self._SHOW_TABLES_Col_index: typing.Optional = None
118-
self._SHOW_COLUMNS_Col_index: typing.Optional = None
113+
self._cur_catalog: Optional[str] = None
114+
self._SHOW_DATABASES_Col_index: Optional[typing.Dict] = None
115+
self._SHOW_SCHEMAS_Col_index: Optional[typing.Dict] = None
116+
self._SHOW_TABLES_Col_index: Optional[typing.Dict] = None
117+
self._SHOW_COLUMNS_Col_index: Optional[typing.Dict] = None
119118

120119
# The minimum show discovery version for the following metadata api was version 2:
121120
# get_catalogs, get_schemas, get_tables, get_columns
@@ -706,7 +705,9 @@ def _get_catalog_filter_conditions(
706705

707706
catalog_filter: str = ""
708707
if catalog is not None and catalog != "":
709-
if self._c.is_single_database_metadata is True or api_supported_only_for_connected_database is True:
708+
if (
709+
self._c and self._c.is_single_database_metadata is True
710+
) or api_supported_only_for_connected_database is True:
710711
catalog_filter += " AND current_database() = {catalog}".format(catalog=self.__escape_quotes(catalog))
711712
else:
712713
if database_col_name is None or database_col_name == "":
@@ -723,13 +724,19 @@ def get_schemas(
723724
raise InterfaceError("connection is closed")
724725

725726
if self.supportSHOWDiscovery() >= self._MIN_SHOW_DISCOVERY_VERSION:
726-
_logger.debug("Support SHOW command. get_schemas with catalog = %s, schema_pattern = %s", catalog, schema_pattern)
727+
_logger.debug(
728+
"Support SHOW command. get_schemas with catalog = %s, schema_pattern = %s", catalog, schema_pattern
729+
)
727730

728731
# Commented out the following line since the Driver will temporarily accept empty string but will block in near future
729732
# ret_empty: bool = self.is_empty(catalog) or self.is_empty(schema_pattern)
730733
ret_empty: bool = False
731734

732-
schemas: typing.Tuple = self._metadataAPIPostProcessing.get_schema_post_processing(self._metadataServerAPIHelper.get_schema_server_api(catalog, schema_pattern, ret_empty, self._c.is_single_database_metadata))
735+
schemas: typing.Tuple = self._metadataAPIPostProcessing.get_schema_post_processing(
736+
self._metadataServerAPIHelper.get_schema_server_api(
737+
catalog, schema_pattern, ret_empty, self._c.is_single_database_metadata if self._c else False
738+
)
739+
)
733740

734741
return schemas
735742
else:
@@ -740,7 +747,7 @@ def get_schemas_legacy_hardcoded_query(
740747
) -> tuple:
741748
query_args: typing.List[str] = []
742749
sql: str = ""
743-
if self._c.is_single_database_metadata is True:
750+
if self._c and self._c.is_single_database_metadata is True:
744751
sql = (
745752
"SELECT nspname AS TABLE_SCHEM, current_database() AS TABLE_CATALOG FROM pg_catalog.pg_namespace "
746753
" WHERE nspname <> 'pg_toast' AND (nspname !~ '^pg_temp_' "
@@ -853,22 +860,25 @@ def get_catalogs(self: "Cursor") -> typing.Tuple:
853860
if self.supportSHOWDiscovery() >= self._MIN_SHOW_DISCOVERY_VERSION:
854861
_logger.debug("Support SHOW command. get_catalogs")
855862

856-
if self._c.is_single_database_metadata is True:
863+
if self._c and self._c.is_single_database_metadata is True:
857864
sql = "select current_database as TABLE_CAT FROM current_database() ORDER BY TABLE_CAT"
858865

859866
self.execute(sql)
860-
catalogs: typing.Tuple = self.fetchall()
867+
result_catalogs: typing.Tuple = self.fetchall()
861868
self._metadataAPIPostProcessing.set_row_description(self._metadataAPIPostProcessing._get_catalogs_col)
869+
catalogs = result_catalogs
862870
else:
863-
catalogs: typing.Tuple = self._metadataAPIPostProcessing.get_catalog_post_processing(self._metadataServerAPIHelper.get_catalog_server_api())
871+
catalogs = self._metadataAPIPostProcessing.get_catalog_post_processing(
872+
self._metadataServerAPIHelper.get_catalog_server_api()
873+
)
864874

865875
return catalogs
866876
else:
867877
return self.get_catalogs_legacy_hardcoded_query()
868878

869879
def get_catalogs_legacy_hardcoded_query(self: "Cursor") -> typing.Tuple:
870880
sql: str = ""
871-
if self._c.is_single_database_metadata is True:
881+
if self._c and self._c.is_single_database_metadata is True:
872882
sql = "select current_database as TABLE_CAT FROM current_database()"
873883
else:
874884
# Datasharing/federation support enable, so get databases using the new view.
@@ -906,13 +916,27 @@ def get_tables(
906916
types = []
907917

908918
if self.supportSHOWDiscovery() >= self._MIN_SHOW_DISCOVERY_VERSION:
909-
_logger.debug("Support SHOW command. get_tables with catalog = %s, schema_pattern = %s, table_name_pattern = %s", catalog, schema_pattern, table_name_pattern)
919+
_logger.debug(
920+
"Support SHOW command. get_tables with catalog = %s, schema_pattern = %s, table_name_pattern = %s",
921+
catalog,
922+
schema_pattern,
923+
table_name_pattern,
924+
)
910925

911926
# Commented out the following line since the Driver will temporarily accept empty string but will block in near future
912927
# ret_empty: bool = self.is_empty(catalog) or self.is_empty(schema_pattern) or self.is_empty(table_name_pattern)
913928
ret_empty: bool = False
914929

915-
tables: typing.Tuple = self._metadataAPIPostProcessing.get_table_post_processing(self._metadataServerAPIHelper.get_table_server_api(catalog, schema_pattern, table_name_pattern, ret_empty, self._c.is_single_database_metadata), types)
930+
tables: typing.Tuple = self._metadataAPIPostProcessing.get_table_post_processing(
931+
self._metadataServerAPIHelper.get_table_server_api(
932+
catalog,
933+
schema_pattern,
934+
table_name_pattern,
935+
ret_empty,
936+
self._c.is_single_database_metadata if self._c else False,
937+
),
938+
types,
939+
)
916940

917941
return tables
918942
else:
@@ -931,7 +955,7 @@ def get_tables_legacy_hardcoded_query(
931955
if schema_pattern_type == "LOCAL_SCHEMA_QUERY":
932956
sql, sql_args = self.__build_local_schema_tables_query(catalog, schema_pattern, table_name_pattern, types)
933957
elif schema_pattern_type == "NO_SCHEMA_UNIVERSAL_QUERY":
934-
if self._c.is_single_database_metadata is True:
958+
if self._c and self._c.is_single_database_metadata is True:
935959
sql, sql_args = self.__build_universal_schema_tables_query(
936960
catalog, schema_pattern, table_name_pattern, types
937961
)
@@ -1214,24 +1238,41 @@ def get_columns(
12141238
raise InterfaceError("connection is closed")
12151239

12161240
if self.supportSHOWDiscovery() >= self._MIN_SHOW_DISCOVERY_VERSION:
1217-
_logger.debug("Support SHOW command. get_columns with catalog = %s, schema_pattern = %s, table_name_pattern = %s, column_name_pattern = %s", catalog, schema_pattern, tablename_pattern, columnname_pattern)
1241+
_logger.debug(
1242+
"Support SHOW command. get_columns with catalog = %s, schema_pattern = %s, table_name_pattern = %s, column_name_pattern = %s",
1243+
catalog,
1244+
schema_pattern,
1245+
tablename_pattern,
1246+
columnname_pattern,
1247+
)
12181248

12191249
# Commented out the following line since the Driver will temporarily accept empty string but will block in near future
12201250
# ret_empty: bool = self.is_empty(catalog) or self.is_empty(schema_pattern) or self.is_empty(tablename_pattern) or self.is_empty(columnname_pattern)
12211251
ret_empty: bool = False
12221252

1223-
columns: typing.Tuple = self._metadataAPIPostProcessing.get_column_post_processing(self._metadataServerAPIHelper.get_column_server_api(catalog, schema_pattern, tablename_pattern, columnname_pattern, ret_empty, self._c.is_single_database_metadata))
1253+
columns: typing.Tuple = self._metadataAPIPostProcessing.get_column_post_processing(
1254+
self._metadataServerAPIHelper.get_column_server_api(
1255+
catalog,
1256+
schema_pattern,
1257+
tablename_pattern,
1258+
columnname_pattern,
1259+
ret_empty,
1260+
self._c.is_single_database_metadata if self._c else False,
1261+
)
1262+
)
12241263

12251264
return columns
12261265
else:
1227-
return self.get_columns_legacy_hardcoded_query(catalog, schema_pattern, tablename_pattern, columnname_pattern)
1266+
return self.get_columns_legacy_hardcoded_query(
1267+
catalog, schema_pattern, tablename_pattern, columnname_pattern
1268+
)
12281269

12291270
def get_columns_legacy_hardcoded_query(
1230-
self: "Cursor",
1231-
catalog: typing.Optional[str] = None,
1232-
schema_pattern: typing.Optional[str] = None,
1233-
tablename_pattern: typing.Optional[str] = None,
1234-
columnname_pattern: typing.Optional[str] = None,
1271+
self: "Cursor",
1272+
catalog: typing.Optional[str] = None,
1273+
schema_pattern: typing.Optional[str] = None,
1274+
tablename_pattern: typing.Optional[str] = None,
1275+
columnname_pattern: typing.Optional[str] = None,
12351276
) -> tuple:
12361277
sql: str = ""
12371278
schema_pattern_type: str = self.__schema_pattern_match(schema_pattern)
@@ -1240,7 +1281,7 @@ def get_columns_legacy_hardcoded_query(
12401281
catalog, schema_pattern, tablename_pattern, columnname_pattern
12411282
)
12421283
elif schema_pattern_type == "NO_SCHEMA_UNIVERSAL_QUERY":
1243-
if self._c.is_single_database_metadata is True:
1284+
if self._c and self._c.is_single_database_metadata is True:
12441285
sql = self.__build_universal_schema_columns_query(
12451286
catalog, schema_pattern, tablename_pattern, columnname_pattern
12461287
)
@@ -2412,7 +2453,7 @@ def __schema_pattern_match(self: "Cursor", schema_pattern: typing.Optional[str])
24122453
if self._c is None:
24132454
raise InterfaceError("connection is closed")
24142455
if schema_pattern is not None and schema_pattern != "":
2415-
if self._c.is_single_database_metadata is True:
2456+
if self._c and self._c.is_single_database_metadata is True:
24162457
sql: str = "select 1 from svv_external_schemas where schemaname like {schema}".format(
24172458
schema=self.__escape_quotes(schema_pattern)
24182459
)
@@ -2434,6 +2475,8 @@ def __escape_quotes(self: "Cursor", s: str) -> str:
24342475
return "'{s}'".format(s=self.__sanitize_str(s))
24352476

24362477
def supportSHOWDiscovery(self: "Cursor") -> int:
2478+
if self._c is None:
2479+
return 0
24372480
for item in self._c.parameter_statuses:
24382481
if item[0] == b"show_discovery":
24392482
try:
@@ -2462,4 +2505,4 @@ def cur_catalog(self) -> str:
24622505
self._cur_catalog = catalogs[0][0]
24632506
_logger.debug("current catalog: %s", self._cur_catalog)
24642507

2465-
return self._cur_catalog
2508+
return self._cur_catalog

redshift_connector/idp_auth_helper.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def get_pkg_version(module_name: str) -> Version:
4343
try:
4444
from importlib.metadata import version as version
4545
except ModuleNotFoundError: # if importlib is not present, fallback to pkg_resources
46-
import pkg_resources
46+
import pkg_resources # type: ignore
4747

4848
return Version(pkg_resources.get_distribution(module_name).version)
4949

@@ -52,6 +52,7 @@ def get_pkg_version(module_name: str) -> Version:
5252
return Version(pkg_version)
5353

5454
import importlib
55+
5556
imported_module = importlib.import_module(module_name)
5657
return Version(imported_module.__version__)
5758

0 commit comments

Comments
 (0)