Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove DataSourceCredentials #11605

Merged
merged 4 commits into from
May 22, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -193,3 +193,61 @@ def __init__(
self.synonyms = kwargs['synonyms']
self.encryption_key = kwargs.get('encryption_key', None)
self.e_tag = kwargs.get('e_tag', None)


class SearchIndexerDataSourceConnection(msrest.serialization.Model):
"""Represents a datasource connection definition, which can be used to configure an indexer.

All required parameters must be populated in order to send to Azure.

:param name: Required. The name of the datasource connection.
:type name: str
:param description: The description of the datasource connection.
:type description: str
:param type: Required. The type of the datasource connection. Possible values include: "azuresql",
"cosmosdb", "azureblob", "azuretable", "mysql".
:type type: str or ~azure.search.documents.models.SearchIndexerDataSourceType
:param connection_string: The connection string for the datasource connection.
:type connection_string: str
:param container: Required. The data container for the datasource connection.
:type container: ~azure.search.documents.models.SearchIndexerDataContainer
:param data_change_detection_policy: The data change detection policy for the datasource connection.
:type data_change_detection_policy: ~azure.search.documents.models.DataChangeDetectionPolicy
:param data_deletion_detection_policy: The data deletion detection policy for the datasource connection.
:type data_deletion_detection_policy:
~azure.search.documents.models.DataDeletionDetectionPolicy
:param e_tag: The ETag of the data source.
:type e_tag: str
"""

_validation = {
'name': {'required': True},
'type': {'required': True},
'connection_string': {'required': True},
'container': {'required': True},
}

_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'container': {'key': 'container', 'type': 'SearchIndexerDataContainer'},
Copy link
Contributor

@rakshith91 rakshith91 May 22, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should this be "flattenned" too like the connection string? perhaps just take in a container name + kwargs and create the DataContainer ourselves? Just a possible suggestion - no strong opinion

'data_change_detection_policy': {'key': 'dataChangeDetectionPolicy', 'type': 'DataChangeDetectionPolicy'},
'data_deletion_detection_policy': {'key': 'dataDeletionDetectionPolicy', 'type': 'DataDeletionDetectionPolicy'},
'e_tag': {'key': '@odata\\.etag', 'type': 'str'},
}

def __init__(
self,
**kwargs
):
super(SearchIndexerDataSourceConnection, self).__init__(**kwargs)
self.name = kwargs['name']
self.description = kwargs.get('description', None)
self.type = kwargs['type']
self.connection_string = kwargs['connection_string']
self.container = kwargs['container']
self.data_change_detection_policy = kwargs.get('data_change_detection_policy', None)
self.data_deletion_detection_policy = kwargs.get('data_deletion_detection_policy', None)
self.e_tag = kwargs.get('e_tag', None)
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,12 @@

from ._generated import SearchServiceClient as _SearchServiceClient
from ._generated.models import SearchIndexerSkillset
from ._utils import get_access_conditions, normalize_endpoint
from ._utils import (
get_access_conditions,
normalize_endpoint,
pack_search_indexer_data_source,
unpack_search_indexer_data_source,
)
from ..._headers_mixin import HeadersMixin
from ..._version import SDK_MONIKER

Expand Down Expand Up @@ -251,13 +256,13 @@ def get_indexer_status(self, name, **kwargs):

@distributed_trace
def create_datasource(self, data_source, **kwargs):
# type: (SearchIndexerDataSource, **Any) -> Dict[str, Any]
# type: (SearchIndexerDataSourceConnection, **Any) -> SearchIndexerDataSourceConnection
"""Creates a new datasource.

:param data_source: The definition of the datasource to create.
:type data_source: ~search.models.SearchIndexerDataSource
:return: The created SearchIndexerDataSource
:rtype: dict
:type data_source: ~search.models.SearchIndexerDataSourceConnection
:return: The created SearchIndexerDataSourceConnection
:rtype: ~search.models.SearchIndexerDataSourceConnection

.. admonition:: Example:

Expand All @@ -269,21 +274,22 @@ def create_datasource(self, data_source, **kwargs):
:caption: Create a Data Source
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = self._client.data_sources.create(data_source, **kwargs)
return result
packed_data_source = pack_search_indexer_data_source(data_source)
result = self._client.data_sources.create(packed_data_source, **kwargs)
return unpack_search_indexer_data_source(result)

@distributed_trace
def create_or_update_datasource(self, data_source, name=None, **kwargs):
# type: (SearchIndexerDataSource, Optional[str], **Any) -> Dict[str, Any]
# type: (SearchIndexerDataSourceConnection, Optional[str], **Any) -> SearchIndexerDataSourceConnection
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should do this in a different PR prolly - but we must allow individual keywords only to update the datasource - which if present would override the datasource. (See skillsets for example)

"""Creates a new datasource or updates a datasource if it already exists.
:param name: The name of the datasource to create or update.
:type name: str
:param data_source: The definition of the datasource to create or update.
:type data_source: ~search.models.SearchIndexerDataSource
:type data_source: ~search.models.SearchIndexerDataSourceConnection
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
:return: The created SearchIndexerDataSource
:rtype: dict
:return: The created SearchIndexerDataSourceConnection
:rtype: ~search.models.SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
Expand All @@ -292,23 +298,24 @@ def create_or_update_datasource(self, data_source, name=None, **kwargs):
kwargs.update(access_condition)
if not name:
name = data_source.name
packed_data_source = pack_search_indexer_data_source(data_source)
result = self._client.data_sources.create_or_update(
data_source_name=name,
data_source=data_source,
data_source=packed_data_source,
error_map=error_map,
**kwargs
)
return result
return unpack_search_indexer_data_source(result)

@distributed_trace
def get_datasource(self, name, **kwargs):
# type: (str, **Any) -> Dict[str, Any]
# type: (str, **Any) -> SearchIndexerDataSourceConnection
"""Retrieves a datasource definition.

:param name: The name of the datasource to retrieve.
:type name: str
:return: The SearchIndexerDataSource that is fetched.
:rtype: dict
:return: The SearchIndexerDataSourceConnection that is fetched.
:rtype: ~search.models.SearchIndexerDataSourceConnection

.. admonition:: Example:

Expand All @@ -317,19 +324,19 @@ def get_datasource(self, name, **kwargs):
:end-before: [END get_data_source]
:language: python
:dedent: 4
:caption: Retrieve a SearchIndexerDataSource
:caption: Retrieve a SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = self._client.data_sources.get(name, **kwargs)
return result
return unpack_search_indexer_data_source(result)

@distributed_trace
def get_datasources(self, **kwargs):
# type: (**Any) -> Sequence[SearchIndexerDataSource]
# type: (**Any) -> Sequence[SearchIndexerDataSourceConnection]
"""Lists all datasources available for a search service.

:return: List of all the data sources.
:rtype: `list[dict]`
:rtype: `list[~search.models.SearchIndexerDataSourceConnection]`

.. admonition:: Example:

Expand All @@ -338,21 +345,21 @@ def get_datasources(self, **kwargs):
:end-before: [END list_data_source]
:language: python
:dedent: 4
:caption: List all the SearchIndexerDataSources
:caption: List all the SearchIndexerDataSourceConnections
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = self._client.data_sources.list(**kwargs)
return result.data_sources
return [unpack_search_indexer_data_source(x) for x in result.data_sources]

@distributed_trace
def delete_datasource(self, data_source, **kwargs):
# type: (Union[str, SearchIndexerDataSource], **Any) -> None
# type: (Union[str, SearchIndexerDataSourceConnection], **Any) -> None
"""Deletes a datasource. To use access conditions, the Datasource model must be
provided instead of the name. It is enough to provide the name of the datasource
to delete unconditionally

:param data_source: The datasource to delete.
:type data_source: str or ~search.models.SearchIndexerDataSource
:type data_source: str or ~search.models.SearchIndexerDataSourceConnection
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
:return: None
Expand All @@ -365,7 +372,7 @@ def delete_datasource(self, data_source, **kwargs):
:end-before: [END delete_data_source]
:language: python
:dedent: 4
:caption: Delete a SearchIndexerDataSource
:caption: Delete a SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
)
from ._generated.models import (
AzureActiveDirectoryApplicationCredentials,
DataSourceCredentials,
SearchIndexerDataSource as _SearchIndexerDataSource,
SearchResourceEncryptionKey as _SearchResourceEncryptionKey,
SynonymMap as _SynonymMap,
SearchIndex,
Expand All @@ -25,6 +27,7 @@
PatternAnalyzer,
PatternTokenizer,
SynonymMap,
SearchIndexerDataSourceConnection,
SearchResourceEncryptionKey,
)

Expand Down Expand Up @@ -197,6 +200,39 @@ def unpack_search_resource_encryption_key(search_resource_encryption_key):
application_secret=search_resource_encryption_key.access_credentials.application_secret
)

def pack_search_indexer_data_source(search_indexer_data_source):
# type: (SearchIndexerDataSourceConnection) -> _SearchIndexerDataSource
if not search_indexer_data_source:
return None
credentials = DataSourceCredentials(
connection_string=search_indexer_data_source.connection_string
)
return _SearchIndexerDataSource(
name=search_indexer_data_source.name,
description=search_indexer_data_source.description,
type=search_indexer_data_source.type,
credentials=credentials,
container=search_indexer_data_source.container,
data_change_detection_policy=search_indexer_data_source.data_change_detection_policy,
data_deletion_detection_policy=search_indexer_data_source.data_deletion_detection_policy,
e_tag=search_indexer_data_source.e_tag
)

def unpack_search_indexer_data_source(search_indexer_data_source):
# type: (_SearchIndexerDataSource) -> SearchIndexerDataSourceConnection
if not search_indexer_data_source:
return None
return SearchIndexerDataSourceConnection(
name=search_indexer_data_source.name,
description=search_indexer_data_source.description,
type=search_indexer_data_source.type,
connection_string=search_indexer_data_source.credentials.connection_string,
container=search_indexer_data_source.container,
data_change_detection_policy=search_indexer_data_source.data_change_detection_policy,
data_deletion_detection_policy=search_indexer_data_source.data_deletion_detection_policy,
e_tag=search_indexer_data_source.e_tag
)

def get_access_conditions(model, match_condition=MatchConditions.Unconditionally):
# type: (Any, MatchConditions) -> Tuple[Dict[int, Any], Dict[str, bool]]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError}
Expand Down
Loading