-
Notifications
You must be signed in to change notification settings - Fork 74
/
Copy pathcore.py
2640 lines (2247 loc) · 107 KB
/
core.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import logging
import os
import socket
import typing
from collections import deque
from copy import deepcopy
from datetime import datetime as Datetime
from datetime import timedelta as Timedelta
from decimal import Decimal
from hashlib import md5
from itertools import count
from os import getpid
from struct import pack
from typing import TYPE_CHECKING
from warnings import warn
from packaging import version
from scramp import ScramClient # type: ignore
from redshift_connector.config import (
DEFAULT_PROTOCOL_VERSION,
ClientProtocolVersion,
DbApiParamstyle,
_client_encoding,
max_int2,
max_int4,
max_int8,
min_int2,
min_int4,
min_int8,
pg_array_types,
pg_to_py_encodings,
)
from redshift_connector.cursor import Cursor
from redshift_connector.error import (
ArrayContentNotHomogenousError,
ArrayContentNotSupportedError,
DatabaseError,
DataError,
Error,
IntegrityError,
InterfaceError,
InternalError,
NotSupportedError,
OperationalError,
ProgrammingError,
Warning,
)
from redshift_connector.utils import (
FC_BINARY,
FC_TEXT,
NULL,
NULL_BYTE,
DriverInfo,
array_check_dimensions,
array_dim_lengths,
array_find_first_element,
array_flatten,
array_has_null,
array_recv_binary,
array_recv_text,
bh_unpack,
cccc_unpack,
ci_unpack,
date_in,
date_recv_binary,
float_array_recv,
geographyhex_recv,
h_pack,
h_unpack,
i_pack,
i_unpack,
ihihih_unpack,
ii_pack,
iii_pack,
int_array_recv,
make_divider_block,
numeric_in,
numeric_in_binary,
numeric_to_float_binary,
numeric_to_float_in,
)
from redshift_connector.utils import py_types as PY_TYPES
from redshift_connector.utils import q_pack
from redshift_connector.utils import redshift_types as REDSHIFT_TYPES
from redshift_connector.utils import (
text_recv,
time_in,
time_recv_binary,
timetz_in,
timetz_recv_binary,
varbytehex_recv,
walk_array,
)
from redshift_connector.utils.oids import RedshiftOID
if TYPE_CHECKING:
from ssl import SSLSocket
# Copyright (c) 2007-2009, Mathieu Fenniak
# Copyright (c) The Contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__author__ = "Mathieu Fenniak"
_logger: logging.Logger = logging.getLogger(__name__)
ZERO: Timedelta = Timedelta(0)
BINARY: type = bytes
# The purpose of this function is to change the placeholder of original query into $1, $2
# in order to be identified by database
# example: INSERT INTO book (title) VALUES (:title) -> INSERT INTO book (title) VALUES ($1)
# also return the function: make_args()
def convert_paramstyle(style: str, query) -> typing.Tuple[str, typing.Any]:
# I don't see any way to avoid scanning the query string char by char,
# so we might as well take that careful approach and create a
# state-based scanner. We'll use int variables for the state.
OUTSIDE: int = 0 # outside quoted string
INSIDE_SQ: int = 1 # inside single-quote string '...'
INSIDE_QI: int = 2 # inside quoted identifier "..."
INSIDE_ES: int = 3 # inside escaped single-quote string, E'...'
INSIDE_PN: int = 4 # inside parameter name eg. :name
INSIDE_CO: int = 5 # inside inline comment eg. --
INSIDE_MC: int = 6 # inside multiline comment eg. /*
in_quote_escape: bool = False
in_param_escape: bool = False
placeholders: typing.List[str] = []
output_query: typing.List[str] = []
param_idx: typing.Iterator[str] = map(lambda x: "$" + str(x), count(1))
state: int = OUTSIDE
prev_c: typing.Optional[str] = None
for i, c in enumerate(query):
if i + 1 < len(query):
next_c = query[i + 1]
else:
next_c = None
if state == OUTSIDE:
if c == "'":
output_query.append(c)
if prev_c == "E":
state = INSIDE_ES
else:
state = INSIDE_SQ
elif c == '"':
output_query.append(c)
state = INSIDE_QI
elif c == "-":
output_query.append(c)
if prev_c == "-":
state = INSIDE_CO
elif c == "*":
output_query.append(c)
if prev_c == "/":
state = INSIDE_MC
elif style == DbApiParamstyle.QMARK.value and c == "?":
output_query.append(next(param_idx))
elif style == DbApiParamstyle.NUMERIC.value and c == ":" and next_c not in ":=" and prev_c != ":":
# Treat : as beginning of parameter name if and only
# if it's the only : around
# Needed to properly process type conversions
# i.e. sum(x)::float
output_query.append("$")
elif style == DbApiParamstyle.NAMED.value and c == ":" and next_c not in ":=" and prev_c != ":":
# Same logic for : as in numeric parameters
state = INSIDE_PN
placeholders.append("")
elif style == DbApiParamstyle.PYFORMAT.value and c == "%" and next_c == "(":
state = INSIDE_PN
placeholders.append("")
elif style in (DbApiParamstyle.FORMAT.value, DbApiParamstyle.PYFORMAT.value) and c == "%":
style = DbApiParamstyle.FORMAT.value
if in_param_escape:
in_param_escape = False
output_query.append(c)
else:
if next_c == "%":
in_param_escape = True
elif next_c == "s":
state = INSIDE_PN
output_query.append(next(param_idx))
else:
raise InterfaceError("Only %s and %% are supported in the query.")
else:
output_query.append(c)
elif state == INSIDE_SQ:
if c == "'":
if in_quote_escape:
in_quote_escape = False
else:
if next_c == "'":
in_quote_escape = True
else:
state = OUTSIDE
output_query.append(c)
elif state == INSIDE_QI:
if c == '"':
state = OUTSIDE
output_query.append(c)
elif state == INSIDE_ES:
if c == "'" and prev_c != "\\":
# check for escaped single-quote
state = OUTSIDE
output_query.append(c)
elif state == INSIDE_PN:
if style == DbApiParamstyle.NAMED.value:
placeholders[-1] += c
if next_c is None or (not next_c.isalnum() and next_c != "_"):
state = OUTSIDE
try:
pidx: int = placeholders.index(placeholders[-1], 0, -1)
output_query.append("$" + str(pidx + 1))
del placeholders[-1]
except ValueError:
output_query.append("$" + str(len(placeholders)))
elif style == DbApiParamstyle.PYFORMAT.value:
if prev_c == ")" and c == "s":
state = OUTSIDE
try:
pidx = placeholders.index(placeholders[-1], 0, -1)
output_query.append("$" + str(pidx + 1))
del placeholders[-1]
except ValueError:
output_query.append("$" + str(len(placeholders)))
elif c in "()":
pass
else:
placeholders[-1] += c
elif style == DbApiParamstyle.FORMAT.value:
state = OUTSIDE
elif state == INSIDE_CO:
output_query.append(c)
if c == "\n":
state = OUTSIDE
elif state == INSIDE_MC:
output_query.append(c)
if c == "/" and prev_c == "*":
state = OUTSIDE
prev_c = c
if style in (DbApiParamstyle.NUMERIC.value, DbApiParamstyle.QMARK.value, DbApiParamstyle.FORMAT.value):
def make_args(vals):
return vals
else:
def make_args(vals):
return tuple(vals[p] for p in placeholders)
return "".join(output_query), make_args
# Message codes
# ALl communication is through a stream of messages
# Driver will send one or more messages to database,
# and database will respond one or more messages
# The first byte of a message specify the type of the message
NOTICE_RESPONSE: bytes = b"N"
AUTHENTICATION_REQUEST: bytes = b"R"
PARAMETER_STATUS: bytes = b"S"
BACKEND_KEY_DATA: bytes = b"K"
READY_FOR_QUERY: bytes = b"Z"
ROW_DESCRIPTION: bytes = b"T"
ERROR_RESPONSE: bytes = b"E"
DATA_ROW: bytes = b"D"
COMMAND_COMPLETE: bytes = b"C"
PARSE_COMPLETE: bytes = b"1"
BIND_COMPLETE: bytes = b"2"
CLOSE_COMPLETE: bytes = b"3"
PORTAL_SUSPENDED: bytes = b"s"
NO_DATA: bytes = b"n"
PARAMETER_DESCRIPTION: bytes = b"t"
NOTIFICATION_RESPONSE: bytes = b"A"
COPY_DONE: bytes = b"c"
COPY_DATA: bytes = b"d"
COPY_IN_RESPONSE: bytes = b"G"
COPY_OUT_RESPONSE: bytes = b"H"
EMPTY_QUERY_RESPONSE: bytes = b"I"
BIND: bytes = b"B"
PARSE: bytes = b"P"
EXECUTE: bytes = b"E"
FLUSH: bytes = b"H"
SYNC: bytes = b"S"
PASSWORD: bytes = b"p"
DESCRIBE: bytes = b"D"
TERMINATE: bytes = b"X"
CLOSE: bytes = b"C"
# This inform the format of a message
# the first byte, the code, will be the type of the message
# then add the 4 bytes to inform the length of rest of message
# then add the real data we want to send
def create_message(code: bytes, data: bytes = b"") -> bytes:
return code + typing.cast(bytes, i_pack(len(data) + 4)) + data
FLUSH_MSG: bytes = create_message(FLUSH)
SYNC_MSG: bytes = create_message(SYNC)
TERMINATE_MSG: bytes = create_message(TERMINATE)
COPY_DONE_MSG: bytes = create_message(COPY_DONE)
EXECUTE_MSG: bytes = create_message(EXECUTE, NULL_BYTE + i_pack(0))
# DESCRIBE constants
STATEMENT: bytes = b"S"
PORTAL: bytes = b"P"
# ErrorResponse codes
RESPONSE_SEVERITY: str = "S" # always present
RESPONSE_SEVERITY = "V" # always present
RESPONSE_CODE: str = "C" # always present
RESPONSE_MSG: str = "M" # always present
RESPONSE_DETAIL: str = "D"
RESPONSE_HINT: str = "H"
RESPONSE_POSITION: str = "P"
RESPONSE__POSITION: str = "p"
RESPONSE__QUERY: str = "q"
RESPONSE_WHERE: str = "W"
RESPONSE_FILE: str = "F"
RESPONSE_LINE: str = "L"
RESPONSE_ROUTINE: str = "R"
IDLE: bytes = b"I"
IDLE_IN_TRANSACTION: bytes = b"T"
IDLE_IN_FAILED_TRANSACTION: bytes = b"E"
arr_trans: typing.Mapping[int, typing.Optional[str]] = dict(zip(map(ord, "[] 'u"), ["{", "}", None, None, None]))
class Connection:
# DBAPI Extension: supply exceptions as attributes on the connection
Warning = property(lambda self: self._getError(Warning))
Error = property(lambda self: self._getError(Error))
InterfaceError = property(lambda self: self._getError(InterfaceError))
DatabaseError = property(lambda self: self._getError(DatabaseError))
OperationalError = property(lambda self: self._getError(OperationalError))
IntegrityError = property(lambda self: self._getError(IntegrityError))
InternalError = property(lambda self: self._getError(InternalError))
ProgrammingError = property(lambda self: self._getError(ProgrammingError))
NotSupportedError = property(lambda self: self._getError(NotSupportedError))
def __enter__(self: "Connection") -> "Connection":
return self
def __exit__(self: "Connection", exc_type, exc_value, traceback) -> None:
self.close()
def _getError(self: "Connection", error):
warn("DB-API extension connection.%s used" % error.__name__, stacklevel=3)
return error
@property
def client_os_version(self: "Connection") -> str:
from platform import platform as CLIENT_PLATFORM
try:
os_version: str = CLIENT_PLATFORM()
except:
os_version = "unknown"
return os_version
@staticmethod
def __get_host_address_info(host: str, port: int):
"""
Returns IPv4 address and port given a host name and port
"""
# https://docs.python.org/3/library/socket.html#socket.getaddrinfo
response = socket.getaddrinfo(host=host, port=port, family=socket.AF_INET)
_logger.debug("getaddrinfo response %s", response)
if not response:
raise InterfaceError("Unable to determine ip for host %s port %s", host, port)
return response[0][4]
def __init__(
self: "Connection",
user: str,
password: str,
database: str,
host: str = "localhost",
port: int = 5439,
source_address: typing.Optional[str] = None,
unix_sock: typing.Optional[str] = None,
ssl: bool = True,
sslmode: str = "verify-ca",
timeout: typing.Optional[int] = None,
max_prepared_statements: int = 1000,
tcp_keepalive: typing.Optional[bool] = True,
application_name: typing.Optional[str] = None,
replication: typing.Optional[str] = None,
client_protocol_version: int = DEFAULT_PROTOCOL_VERSION,
database_metadata_current_db_only: bool = True,
credentials_provider: typing.Optional[str] = None,
provider_name: typing.Optional[str] = None,
web_identity_token: typing.Optional[str] = None,
numeric_to_float: bool = False,
identity_namespace: typing.Optional[str] = None,
token_type: typing.Optional[str] = None,
idc_client_display_name: typing.Optional[str] = None,
):
"""
Creates a :class:`Connection` to an Amazon Redshift cluster. For more information on establishing a connection to an Amazon Redshift cluster using `federated API access <https://aws.amazon.com/blogs/big-data/federated-api-access-to-amazon-redshift-using-an-amazon-redshift-connector-for-python/>`_ see our examples page.
This is the underlying :class:`Connection` constructor called from :func:`redshift_connector.connect`.
Parameters
----------
user : str
The username to use for authentication with the Amazon Redshift cluster.
password : str
The password to use for authentication with the Amazon Redshift cluster.
database : str
The name of the database instance to connect to.
host : str
The hostname of the Amazon Redshift cluster.
port : int
The port number of the Amazon Redshift cluster. Default value is 5439.
source_address : Optional[str]
unix_sock : Optional[str]
ssl : bool
Is SSL enabled. Default value is ``True``. SSL must be enabled when authenticating using IAM.
sslmode : str
The security of the connection to the Amazon Redshift cluster. 'verify-ca' and 'verify-full' are supported.
timeout : Optional[int]
The number of seconds before the connection to the server will timeout. By default there is no timeout.
max_prepared_statements : int
tcp_keepalive : Optional[bool]
Is `TCP keepalive <https://en.wikipedia.org/wiki/Keepalive#TCP_keepalive>`_ used. The default value is ``True``.
application_name : Optional[str]
Sets the application name. The default value is None.
replication : Optional[str]
Used to run in `streaming replication mode <https://www.postgresql.org/docs/12/protocol-replication.html>`_.
client_protocol_version : int
The requested server protocol version. The default value is 1 representing `EXTENDED_RESULT_METADATA`. If the requested server protocol cannot be satisfied, a warning will be displayed to the user.
database_metadata_current_db_only : bool
Is `datashare <https://docs.aws.amazon.com/redshift/latest/dg/datashare-overview.html>`_ disabled. Default value is True, implying datasharing will not be used.
credentials_provider : Optional[str]
The class-path of the IdP plugin used for authentication with Amazon Redshift.
provider_name : Optional[str]
The name of the Redshift Native Auth Provider.
web_identity_token: Optional[str]
A web identity token used for authentication via Redshift Native IDP Integration
numeric_to_float: bool
Specifies if NUMERIC datatype values will be converted from ``decimal.Decimal`` to ``float``. By default NUMERIC values are received as ``decimal.Decimal``.
identity_namespace: Optional[str]
The identity namespace to be used with IdC auth plugin. Default value is None.
token_type: Optional[str]
The token type to be used for authentication using IdP Token auth plugin
idc_client_display_name: Optional[str]
The client display name to be used for user consent in IdC browser auth plugin.
"""
self.merge_socket_read = True
_client_encoding = "utf8"
self._commands_with_count: typing.Tuple[bytes, ...] = (
b"INSERT",
b"DELETE",
b"UPDATE",
b"MOVE",
b"FETCH",
b"COPY",
b"SELECT",
)
self.notifications: deque = deque(maxlen=100)
self.notices: deque = deque(maxlen=100)
self.parameter_statuses: deque = deque(maxlen=100)
self.max_prepared_statements: int = int(max_prepared_statements)
self._run_cursor: Cursor = Cursor(self, paramstyle=DbApiParamstyle.NAMED.value)
self._client_protocol_version: int = client_protocol_version
self._database = database
self.py_types = deepcopy(PY_TYPES)
self.redshift_types = deepcopy(REDSHIFT_TYPES)
self._database_metadata_current_db_only: bool = database_metadata_current_db_only
self.numeric_to_float: bool = numeric_to_float
# based on _client_protocol_version value, we must use different conversion functions
# for receiving some datatypes
self._enable_protocol_based_conversion_funcs()
self.web_identity_token = web_identity_token
if user is None:
raise InterfaceError("The 'user' connection parameter cannot be None")
redshift_native_auth: bool = False
if application_name is None or application_name == "":
def get_calling_module() -> str:
import inspect
module_name: str = ""
stack: typing.List[inspect.FrameInfo] = inspect.stack()
try:
# get_calling_module -> init -> connect -> init -> calling module
start: int = min(4, len(stack) - 1)
parent = stack[start][0]
calling_module = inspect.getmodule(parent)
if calling_module:
module_name = calling_module.__name__
except:
pass
finally:
del parent
del stack
return module_name
application_name = get_calling_module()
init_params: typing.Dict[str, typing.Optional[typing.Union[str, bytes]]] = {
"user": "",
"database": database,
"application_name": application_name,
"replication": replication,
"client_protocol_version": str(self._client_protocol_version),
"driver_version": DriverInfo.driver_full_name(),
"os_version": self.client_os_version,
}
if credentials_provider:
init_params["plugin_name"] = credentials_provider
if credentials_provider.split(".")[-1] in (
"BasicJwtCredentialsProvider",
"BrowserAzureOAuth2CredentialsProvider",
):
redshift_native_auth = True
init_params["idp_type"] = "AzureAD"
if credentials_provider.split(".")[-1] in (
"IdpTokenAuthPlugin",
"BrowserIdcAuthPlugin",
):
redshift_native_auth = True
self.set_idc_plugins_params(
init_params, credentials_provider, identity_namespace, token_type
)
if redshift_native_auth and provider_name:
init_params["provider_name"] = provider_name
if not redshift_native_auth or user:
init_params["user"] = user
_logger.debug(make_divider_block())
_logger.debug("Building Redshift wire protocol start-up packet")
_logger.debug(init_params)
_logger.debug(make_divider_block())
for k, v in tuple(init_params.items()):
if isinstance(v, str):
init_params[k] = v.encode("utf8")
elif v is None:
del init_params[k]
elif not isinstance(v, (bytes, bytearray)):
raise InterfaceError("The parameter " + k + " can't be of type " + str(type(v)) + ".")
if "user" in init_params:
self.user: bytes = typing.cast(bytes, init_params["user"])
else:
self.user = b""
if isinstance(password, str):
self.password: bytes = password.encode("utf8")
else:
self.password = password
self.autocommit: bool = False
self._xid = None
self._caches: typing.Dict = {}
# Create the TCP/Ip socket and connect to specific database
# if there already has a socket, it will not create new connection when run connect again
try:
if unix_sock is None and host is not None:
_logger.debug("creating tcp/ip socket")
self._usock: typing.Union[socket.socket, "SSLSocket"] = socket.socket(
socket.AF_INET, socket.SOCK_STREAM
)
if source_address is not None:
self._usock.bind((source_address, 0))
elif unix_sock is not None:
if not hasattr(socket, "AF_UNIX"):
raise InterfaceError("attempt to connect to unix socket on unsupported platform")
_logger.debug("creating af_unix socket")
self._usock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
raise ProgrammingError("one of host or unix_sock must be provided")
if timeout is not None:
_logger.debug("set socket timeout=%s", timeout)
self._usock.settimeout(timeout)
if unix_sock is None and host is not None:
hostport: typing.Tuple[str, int] = Connection.__get_host_address_info(host, port)
_logger.debug("Attempting to create connection socket with address %s", hostport)
self._usock.connect(hostport)
elif unix_sock is not None:
_logger.debug("connecting to socket with unix socket")
self._usock.connect(unix_sock)
_logger.debug("Connection socket established")
# For Redshift, we the default ssl approve is True
# create ssl connection with Redshift CA certificates and check the hostname
if ssl is True:
try:
from ssl import CERT_REQUIRED, SSLContext
# ssl_context = ssl.create_default_context()
path = os.path.abspath(__file__)
if os.name == "nt":
path = "\\".join(path.split("\\")[:-1]) + "\\files\\redshift-ca-bundle.crt"
else:
path = "/".join(path.split("/")[:-1]) + "/files/redshift-ca-bundle.crt"
ssl_context: SSLContext = SSLContext()
ssl_context.verify_mode = CERT_REQUIRED
ssl_context.load_default_certs()
_logger.debug("try to load Redshift CA certs from location %s", path)
ssl_context.load_verify_locations(path)
# Int32(8) - Message length, including self.
# Int32(80877103) - The SSL request code.
_logger.debug("Sending SSLRequestMessage to BE")
self._usock.sendall(ii_pack(8, 80877103))
resp: bytes = self._usock.recv(1)
if resp != b"S":
_logger.debug("Server response code when attempting to establish ssl connection: $s", resp)
raise InterfaceError("Server refuses SSL")
if sslmode == "verify-ca":
_logger.debug("applying sslmode=%s to socket", sslmode)
self._usock = ssl_context.wrap_socket(self._usock)
elif sslmode == "verify-full":
_logger.debug("applying sslmode=%s to socket and force check_hostname", sslmode)
ssl_context.check_hostname = True
self._usock = ssl_context.wrap_socket(self._usock, server_hostname=host)
else:
_logger.debug("unknown sslmode=%s is ignored", sslmode)
_logger.debug("Socket SSL details: %s", self._usock.cipher()) # type: ignore
except ImportError:
raise InterfaceError("SSL required but ssl module not available in this Python installation")
self._sock = self._usock.makefile(mode="rwb")
if tcp_keepalive:
_logger.debug("enabling tcp keepalive on socket")
self._usock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
except socket.timeout as timeout_error:
self._usock.close()
raise OperationalError("connection time out", timeout_error)
except socket.error as e:
try:
_logger.debug("Socket state: %s", self._usock.__dict__)
except:
pass
self._usock.close()
raise InterfaceError("communication error", e)
self._flush: typing.Callable = self._sock.flush
self._read: typing.Callable = self._sock.read
self._write: typing.Callable = self._sock.write
self._backend_key_data: typing.Optional[bytes] = None
trans_tab = dict(zip(map(ord, "{}"), "[]"))
glbls = {"Decimal": Decimal}
self.inspect_funcs: typing.Dict[type, typing.Callable] = {
Datetime: self.inspect_datetime,
list: self.array_inspect,
tuple: self.array_inspect,
int: self.inspect_int,
}
# it's a dictionary whose key is type of message,
# value is the corresponding function to process message
self.message_types: typing.Dict[bytes, typing.Callable] = {
NOTICE_RESPONSE: self.handle_NOTICE_RESPONSE,
AUTHENTICATION_REQUEST: self.handle_AUTHENTICATION_REQUEST,
PARAMETER_STATUS: self.handle_PARAMETER_STATUS,
BACKEND_KEY_DATA: self.handle_BACKEND_KEY_DATA,
READY_FOR_QUERY: self.handle_READY_FOR_QUERY,
ROW_DESCRIPTION: self.handle_ROW_DESCRIPTION,
ERROR_RESPONSE: self.handle_ERROR_RESPONSE,
EMPTY_QUERY_RESPONSE: self.handle_EMPTY_QUERY_RESPONSE,
DATA_ROW: self.handle_DATA_ROW,
COMMAND_COMPLETE: self.handle_COMMAND_COMPLETE,
PARSE_COMPLETE: self.handle_PARSE_COMPLETE,
BIND_COMPLETE: self.handle_BIND_COMPLETE,
CLOSE_COMPLETE: self.handle_CLOSE_COMPLETE,
PORTAL_SUSPENDED: self.handle_PORTAL_SUSPENDED,
NO_DATA: self.handle_NO_DATA,
PARAMETER_DESCRIPTION: self.handle_PARAMETER_DESCRIPTION,
NOTIFICATION_RESPONSE: self.handle_NOTIFICATION_RESPONSE,
COPY_DONE: self.handle_COPY_DONE,
COPY_DATA: self.handle_COPY_DATA,
COPY_IN_RESPONSE: self.handle_COPY_IN_RESPONSE,
COPY_OUT_RESPONSE: self.handle_COPY_OUT_RESPONSE,
}
# Int32 - Message length, including self.
# Int32(196608) - Protocol version number. Version 3.0.
# Any number of key/value pairs, terminated by a zero byte:
# String - A parameter name (user, database, or options)
# String - Parameter value
# Conduct start-up communication with database
# Message's first part is the protocol version - Int32(196608)
protocol: int = 196608
val: bytearray = bytearray(i_pack(protocol))
# Message include parameters name and value (user, database, application_name, replication)
for k, v in init_params.items():
val.extend(k.encode("ascii") + NULL_BYTE + typing.cast(bytes, v) + NULL_BYTE)
val.append(0)
_logger.debug("Sending start-up parameters to BE")
# Use write and flush function to write the content of the buffer
# and then send the message to the database
self._write(i_pack(len(val) + 4))
self._write(val)
self._flush()
self._cursor: Cursor = self.cursor()
code = None
self.error: typing.Optional[Exception] = None
_logger.debug("Awaiting BE response to start-up parameters")
# When driver send the start-up message to database, DB will respond multi messages to driver
# whose format is same with the message that driver sent to DB.
while code not in (READY_FOR_QUERY, ERROR_RESPONSE):
# Thus use a loop to process each message
# Each time will read 5 bytes, the first byte, the code, inform the type of message
# following 4 bytes inform the message's length
# then can use this length to minus 4 to get the real data.
buffer = self._read(5)
if len(buffer) == 0:
if self._usock.timeout is not None:
raise InterfaceError(
"BrokenPipe: server socket closed. We noticed a timeout is set for this connection. Consider "
"raising the timeout or defaulting timeout to none."
)
else:
raise InterfaceError(
"BrokenPipe: server socket closed. Please check that client side networking configurations such "
"as Proxies, firewalls, VPN, etc. are not affecting your network connection."
)
code, data_len = ci_unpack(buffer)
_logger.debug("Wire message from BE Code=%s", code)
self.message_types[code](self._read(data_len - 4), None)
if self.error is not None:
_logger.debug("Error occurred during start up communication: %s", self.error)
raise self.error
# if we didn't receive a server_protocol_version from the server, default to
# using BASE_SERVER as the server is likely lacking this functionality due to
# being out of date
if (
self._client_protocol_version > ClientProtocolVersion.BASE_SERVER
and not (b"server_protocol_version", str(self._client_protocol_version).encode()) in self.parameter_statuses
):
_logger.debug("Server_protocol_version not received from server")
self._client_protocol_version = ClientProtocolVersion.BASE_SERVER
self._enable_protocol_based_conversion_funcs()
self.in_transaction = False
_logger.debug("Connection.__init__ completed")
def _enable_protocol_based_conversion_funcs(self: "Connection"):
if self._client_protocol_version >= ClientProtocolVersion.BINARY.value:
_logger.debug("Enabling binary protocol data conversion functions")
self.redshift_types[RedshiftOID.NUMERIC] = (FC_BINARY, numeric_in_binary)
self.redshift_types[RedshiftOID.DATE] = (FC_BINARY, date_recv_binary)
self.redshift_types[RedshiftOID.GEOGRAPHY] = (FC_BINARY, geographyhex_recv) # GEOGRAPHY
self.redshift_types[RedshiftOID.TIME] = (FC_BINARY, time_recv_binary)
self.redshift_types[RedshiftOID.TIMETZ] = (FC_BINARY, timetz_recv_binary)
self.redshift_types[RedshiftOID.CHAR_ARRAY] = (FC_BINARY, array_recv_binary) # CHAR[]
self.redshift_types[RedshiftOID.SMALLINT_ARRAY] = (FC_BINARY, array_recv_binary) # INT2[]
self.redshift_types[RedshiftOID.INTEGER_ARRAY] = (FC_BINARY, array_recv_binary) # INT4[]
self.redshift_types[RedshiftOID.TEXT_ARRAY] = (FC_BINARY, array_recv_binary) # TEXT[]
self.redshift_types[RedshiftOID.VARCHAR_ARRAY] = (FC_BINARY, array_recv_binary) # VARCHAR[]
self.redshift_types[RedshiftOID.REAL_ARRAY] = (FC_BINARY, array_recv_binary) # FLOAT4[]
self.redshift_types[RedshiftOID.OID_ARRAY] = (FC_BINARY, array_recv_binary) # OID[]
self.redshift_types[RedshiftOID.ACLITEM_ARRAY] = (FC_BINARY, array_recv_binary) # ACLITEM[]
self.redshift_types[RedshiftOID.VARBYTE] = (FC_TEXT, text_recv) # VARBYTE
if self.numeric_to_float:
_logger.debug("Enabling numeric to float binary conversion function")
self.redshift_types[RedshiftOID.NUMERIC] = (FC_BINARY, numeric_to_float_binary)
else: # text protocol
_logger.debug("Enabling text protocol data conversion functions")
self.redshift_types[RedshiftOID.NUMERIC] = (FC_TEXT, numeric_in)
self.redshift_types[RedshiftOID.TIME] = (FC_TEXT, time_in)
self.redshift_types[RedshiftOID.DATE] = (FC_TEXT, date_in)
self.redshift_types[RedshiftOID.GEOGRAPHY] = (FC_TEXT, text_recv) # GEOGRAPHY
self.redshift_types[RedshiftOID.TIMETZ] = (FC_BINARY, timetz_recv_binary)
self.redshift_types[RedshiftOID.CHAR_ARRAY] = (FC_TEXT, array_recv_text) # CHAR[]
self.redshift_types[RedshiftOID.SMALLINT_ARRAY] = (FC_TEXT, int_array_recv) # INT2[]
self.redshift_types[RedshiftOID.INTEGER_ARRAY] = (FC_TEXT, int_array_recv) # INT4[]
self.redshift_types[RedshiftOID.TEXT_ARRAY] = (FC_TEXT, array_recv_text) # TEXT[]
self.redshift_types[RedshiftOID.VARCHAR_ARRAY] = (FC_TEXT, array_recv_text) # VARCHAR[]
self.redshift_types[RedshiftOID.REAL_ARRAY] = (FC_TEXT, float_array_recv) # FLOAT4[]
self.redshift_types[RedshiftOID.OID_ARRAY] = (FC_TEXT, int_array_recv) # OID[]
self.redshift_types[RedshiftOID.ACLITEM_ARRAY] = (FC_TEXT, array_recv_text) # ACLITEM[]
self.redshift_types[RedshiftOID.VARBYTE] = (FC_TEXT, varbytehex_recv) # VARBYTE
if self.numeric_to_float:
_logger.debug("Enabling numeric to float text conversion function")
self.redshift_types[RedshiftOID.NUMERIC] = (FC_TEXT, numeric_to_float_in)
_logger.debug("connection.redshift_types=%s", str(self.redshift_types))
@property
def _is_multi_databases_catalog_enable_in_server(self: "Connection") -> bool:
if (b"datashare_enabled", str("on").encode()) in self.parameter_statuses:
return True
else:
# if we don't receive this param from the server, we do not support
return False
@property
def _is_cross_datasharing_enable_in_server(self: "Connection") -> bool:
"""
Returns True if cross datasharing is enabled in the server. Returns False if disabled or not received.
:return:
:rtype:
"""
cross_datasharing_enable_in_server: bool = False
for parameter in self.parameter_statuses:
if parameter[0] == b"external_database":
if parameter[1] == b"on":
cross_datasharing_enable_in_server = True
elif parameter[1] == b"off":
cross_datasharing_enable_in_server = False
else:
raise InterfaceError(
"Protocol error. Session setup failed. Invalid value of external_database parameter. Only on/off are valid values"
)
break
return cross_datasharing_enable_in_server
@property
def is_single_database_metadata(self):
"""
Returns True if single database metadata enabled using ``database_metadata_current_db_only`` connection
parameter or if server has neither multi-database catalog nor cross datashare enabled.
Returns False if ``database_metadata_current_db_only`` connection parameter is disabled or if server
has neither multi-database catalog nor cross datashare enabled.
:return:
:rtype:
"""
# for cross datasharing we always return False
if self._is_cross_datasharing_enable_in_server:
return False
else:
return self._database_metadata_current_db_only or not self._is_multi_databases_catalog_enable_in_server
def handle_ERROR_RESPONSE(self: "Connection", data, ps):
"""
Handler for ErrorResponse message received via Amazon Redshift wire protocol, represented by b'E' code.
ErrorResponse (B)
Byte1('E')
Identifies the message as an error.
Int32
Length of message contents in bytes, including self.
The message body consists of one or more identified fields, followed by a zero byte as a terminator. Fields may appear in any order. For each field there is the following:
Byte1
A code identifying the field type; if zero, this is the message terminator and no string follows. The presently defined field types are listed in Section 42.5. Since more field types may be added in future, frontends should silently ignore fields of unrecognized type.
String
The field value.
Parameters
----------
:param data: bytes:
Message content
:param ps: typing.Optional[typing.Dict[str, typing.Any]]:
Prepared Statement from associated Cursor
Returns
-------
None:None
"""
msg: typing.Dict[str, str] = dict(
(s[:1].decode(_client_encoding), s[1:].decode(_client_encoding)) for s in data.split(NULL_BYTE) if s != b""
)
_logger.debug("ErrorResponse received from BE: %s", msg)
response_code: str = msg[RESPONSE_CODE]
if response_code == "28000":
cls: type = InterfaceError
elif response_code == "23505":
cls = IntegrityError
else:
cls = ProgrammingError
self.error = cls(msg)
def handle_EMPTY_QUERY_RESPONSE(self: "Connection", data, ps):
"""
Handler for EmptyQueryResponse message received via Amazon Redshift wire protocol, represented by b'I' code.
EmptyQueryResponse (B)
Byte1('I')
Identifies the message as a response to an empty query string. (This substitutes for CommandComplete.)
Int32(4)
Length of message contents in bytes, including self.
Parameters
----------
:param data: bytes:
Message content
:param ps: typing.Optional[typing.Dict[str, typing.Any]]:
Prepared Statement from associated Cursor
Returns
-------
None:None
"""
_logger.debug("EmptyQueryResponse received from BE")
self.error = ProgrammingError("query was empty")
def handle_CLOSE_COMPLETE(self: "Connection", data, ps):
"""
Handler for CloseComplete message received via Amazon Redshift wire protocol, represented by b'3' code. Currently a
no-op.
CloseComplete (B)
Byte1('3')
Identifies the message as a Close-complete indicator.
Int32(4)
Length of message contents in bytes, including self.
Parameters
----------
:param data: bytes:
Message content
:param ps: typing.Optional[typing.Dict[str, typing.Any]]:
Prepared Statement from associated Cursor
Returns
-------
None:None
"""
_logger.debug("CloseComplete received from BE")
def handle_PARSE_COMPLETE(self: "Connection", data, ps):
"""
Handler for ParseComplete message received via Amazon Redshift wire protocol, represented by b'1' code. Currently a
no-op.
ParseComplete (B)