Skip to content

Commit

Permalink
[qna] regen (#19827)
Browse files Browse the repository at this point in the history
* regen qna

* Model renames

* Client param order

* Updates to docs, samples, tests

* Update test trigger

* Fix live tests

* Fix live samples

Co-authored-by: antisch <[email protected]>
  • Loading branch information
iscai-msft and annatisch authored Jul 20, 2021
1 parent 1cbcc75 commit 85ce0e9
Show file tree
Hide file tree
Showing 33 changed files with 533 additions and 425 deletions.
24 changes: 12 additions & 12 deletions sdk/cognitivelanguage/azure-ai-language-questionanswering/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,43 +70,43 @@ The following examples show common scenarios using the `client` [created above](

### Ask a question

The only input required to ask a question using a knowledgebase is just the question itself:
The only input required to ask a question using a knowledge base is just the question itself:

```python
from azure.ai.language.questionanswering import models as qna

params = qna.KnowledgebaseQueryParameters(
params = qna.KnowledgeBaseQueryOptions(
question="How long should my Surface battery last?"
)

output = client.query_knowledgebase(
project_name="FAQ",
knowledgebase_query_parameters=params
knowledge_base_query_options=params
)
for candidate in output.answers:
print("({}) {}".format(candidate.confidence_score, candidate.answer))
print("Source: {}".format(candidate.source))

```

You can set additional properties on `KnowledgebaseQueryParameters` to limit the number of answers, specify a minimum confidence score, and more.
You can set additional properties on `KnowledgeBaseQueryOptions` to limit the number of answers, specify a minimum confidence score, and more.

### Ask a follow-up question

If your knowledgebase is configured for [chit-chat][questionanswering_docs_chat], you can ask a follow-up question provided the previous question-answering ID and, optionally, the exact question the user asked:
If your knowledge base is configured for [chit-chat][questionanswering_docs_chat], you can ask a follow-up question provided the previous question-answering ID and, optionally, the exact question the user asked:

```python
params = qna.models.KnowledgebaseQueryParameters(
params = qna.models.KnowledgeBaseQueryOptions(
question="How long should charging take?"
context=qna.models.KnowledgebaseAnswerRequestContext(
context=qna.models.KnowledgeBaseAnswerRequestContext(
previous_user_query="How long should my Surface battery last?",
previous_qna_id=previous_answer.id
)
)

output = client.query_knowledgebase(
project_name="FAQ",
knowledgebase_query_parameters=params
knowledge_base_query_options=params
)
for candidate in output.answers:
print("({}) {}".format(candidate.confidence_score, candidate.answer))
Expand All @@ -123,13 +123,13 @@ from azure.ai.language.questionanswering import models as qna

client = QuestionAnsweringClient(endpoint, credential)

params = qna.KnowledgebaseQueryParameters(
params = qna.KnowledgeBaseQueryOptions(
question="How long should my Surface battery last?"
)

output = await client.query_knowledgebase(
project_name="FAQ",
knowledgebase_query_parameters=params
knowledge_base_query_options=params
)
```

Expand All @@ -149,8 +149,8 @@ from azure.core.exceptions import HttpResponseError

try:
client.query_knowledgebase(
project_name="invalid-knowledgebase",
knowledgebase_query_parameters=params
project_name="invalid-knowledge-base",
knowledge_base_query_options=params
)
except HttpResponseError as error:
print("Query failed: {}".format(error.message))
Expand Down
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@
from ._version import VERSION

__version__ = VERSION
__all__ = ['QuestionAnsweringClient']
__all__ = ["QuestionAnsweringClient"]

try:
from ._patch import patch_sdk # type: ignore

patch_sdk()
except ImportError:
pass
Original file line number Diff line number Diff line change
Expand Up @@ -12,22 +12,23 @@
from azure.core import PipelineClient
from msrest import Deserializer, Serializer

from . import models
from ._configuration import QuestionAnsweringClientConfiguration
from .operations import QuestionAnsweringClientOperationsMixin

if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any

from azure.core.credentials import AzureKeyCredential
from azure.core.rest import HttpRequest, HttpResponse

from ._configuration import QuestionAnsweringClientConfiguration
from .operations import QuestionAnsweringClientOperationsMixin
from . import models


class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin):
"""The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language detection and question answering. Further documentation can be found in :code:`<a href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview">https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview</a>`.
:param endpoint: Supported Cognitive Services endpoint (e.g., https://:code:`<resource-name>`.api.cognitiveservices.azure.com).
:param endpoint: Supported Cognitive Services endpoint (e.g.,
https://:code:`<resource-name>`.api.cognitiveservices.azure.com).
:type endpoint: str
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.AzureKeyCredential
Expand All @@ -49,15 +50,19 @@ def __init__(
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False

def send_request(self, request, **kwargs):
# type: (HttpRequest, Any) -> HttpResponse
def send_request(
self,
request, # type: HttpRequest
**kwargs # type: Any
):
# type: (...) -> HttpResponse
"""Runs the network request through the client's chained policies.
We have helper methods to create requests specific to this service in `azure.ai.language.questionanswering.rest`.
Use these helper methods to create the request you pass to this method. See our example below:
>>> from azure.ai.language.questionanswering.rest import build_query_knowledgebase_request
>>> request = build_query_knowledgebase_request(project_name, json, content, deployment_name)
>>> request = build_query_knowledgebase_request(project_name=project_name, json=json, content=content, deployment_name=deployment_name, **kwargs)
<HttpRequest [POST], url: '/:query-knowledgebases'>
>>> response = client.send_request(request)
<HttpResponse: 200 OK>
Expand All @@ -73,6 +78,7 @@ def send_request(self, request, **kwargs):
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""

request_copy = deepcopy(request)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@

from ._question_answering_client import QuestionAnsweringClient

__all__ = ['QuestionAnsweringClient']
__all__ = ["QuestionAnsweringClient"]
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,23 @@
# --------------------------------------------------------------------------

from copy import deepcopy
from typing import Any
from typing import Any, Awaitable

from azure.core import AsyncPipelineClient
from azure.core.credentials import AzureKeyCredential
from azure.core.rest import AsyncHttpResponse, HttpRequest
from msrest import Deserializer, Serializer

from .. import models
from ._configuration import QuestionAnsweringClientConfiguration
from .operations import QuestionAnsweringClientOperationsMixin
from .. import models


class QuestionAnsweringClient(QuestionAnsweringClientOperationsMixin):
"""The language service API is a suite of natural language processing (NLP) skills built with best-in-class Microsoft machine learning algorithms. The API can be used to analyze unstructured text for tasks such as sentiment analysis, key phrase extraction, language detection and question answering. Further documentation can be found in :code:`<a href="https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview">https://docs.microsoft.com/en-us/azure/cognitive-services/text-analytics/overview</a>`.
:param endpoint: Supported Cognitive Services endpoint (e.g., https://:code:`<resource-name>`.api.cognitiveservices.azure.com).
:param endpoint: Supported Cognitive Services endpoint (e.g.,
https://:code:`<resource-name>`.api.cognitiveservices.azure.com).
:type endpoint: str
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.AzureKeyCredential
Expand All @@ -38,14 +39,14 @@ def __init__(self, endpoint: str, credential: AzureKeyCredential, **kwargs: Any)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False

def send_request(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
def send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]:
"""Runs the network request through the client's chained policies.
We have helper methods to create requests specific to this service in `azure.ai.language.questionanswering.rest`.
Use these helper methods to create the request you pass to this method. See our example below:
>>> from azure.ai.language.questionanswering.rest import build_query_knowledgebase_request
>>> request = build_query_knowledgebase_request(project_name, json, content, deployment_name)
>>> request = build_query_knowledgebase_request(project_name=project_name, json=json, content=content, deployment_name=deployment_name, **kwargs)
<HttpRequest [POST], url: '/:query-knowledgebases'>
>>> response = await client.send_request(request)
<AsyncHttpResponse: 200 OK>
Expand All @@ -61,6 +62,7 @@ def send_request(self, request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.AsyncHttpResponse
"""

request_copy = deepcopy(request)
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest

from ... import models as _models, rest
from ... import models as _models, rest as rest

T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
Expand All @@ -29,44 +29,42 @@
class QuestionAnsweringClientOperationsMixin:
async def query_knowledgebase(
self,
knowledgebase_query_parameters: "_models.KnowledgebaseQueryParameters",
knowledge_base_query_options: "_models.KnowledgeBaseQueryOptions",
*,
project_name: str,
deployment_name: Optional[str] = None,
**kwargs: Any
) -> "_models.KnowledgebaseAnswers":
"""Answers the specified question using your knowledgebase.
) -> "_models.KnowledgeBaseAnswers":
"""Answers the specified question using your knowledge base.
Answers the specified question using your knowledgebase.
Answers the specified question using your knowledge base.
:param knowledge_base_query_options: Post body of the request.
:type knowledge_base_query_options:
~azure.ai.language.questionanswering.models.KnowledgeBaseQueryOptions
:keyword project_name: The name of the project to use.
:paramtype project_name: str
:param knowledgebase_query_parameters: Post body of the request.
:type knowledgebase_query_parameters:
~azure.ai.language.questionanswering.models.KnowledgebaseQueryParameters
:keyword deployment_name: The name of the specific deployment of the project to use.
:paramtype deployment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KnowledgebaseAnswers, or the result of cls(response)
:rtype: ~azure.ai.language.questionanswering.models.KnowledgebaseAnswers
:return: KnowledgeBaseAnswers, or the result of cls(response)
:rtype: ~azure.ai.language.questionanswering.models.KnowledgeBaseAnswers
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop("cls", None) # type: ClsType["_models.KnowledgebaseAnswers"]
cls = kwargs.pop("cls", None) # type: ClsType["_models.KnowledgeBaseAnswers"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))

content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]

json = self._serialize.body(knowledgebase_query_parameters, "object")
json = self._serialize.body(knowledge_base_query_options, "KnowledgeBaseQueryOptions")

request = rest.build_query_knowledgebase_request(
content_type=content_type,
project_name=project_name,
deployment_name=deployment_name,
json=json,
content_type=content_type,
template_url=self.query_knowledgebase.metadata["url"],
**kwargs
)
)._to_pipeline_transport_request()
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
Expand All @@ -82,24 +80,22 @@ async def query_knowledgebase(
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)

deserialized = self._deserialize("KnowledgebaseAnswers", pipeline_response)
deserialized = self._deserialize("KnowledgeBaseAnswers", pipeline_response)

if cls:
return cls(PipelineResponse._convert(pipeline_response), deserialized, {})
return cls(pipeline_response, deserialized, {})

return deserialized

query_knowledgebase.metadata = {"url": "/:query-knowledgebases"} # type: ignore

async def query_text(
self, text_query_parameters: "_models.TextQueryParameters", **kwargs: Any
) -> "_models.TextAnswers":
async def query_text(self, text_query_options: "_models.TextQueryOptions", **kwargs: Any) -> "_models.TextAnswers":
"""Answers the specified question using the provided text in the body.
Answers the specified question using the provided text in the body.
:param text_query_parameters: Post body of the request.
:type text_query_parameters: ~azure.ai.language.questionanswering.models.TextQueryParameters
:param text_query_options: Post body of the request.
:type text_query_options: ~azure.ai.language.questionanswering.models.TextQueryOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TextAnswers, or the result of cls(response)
:rtype: ~azure.ai.language.questionanswering.models.TextAnswers
Expand All @@ -108,14 +104,15 @@ async def query_text(
cls = kwargs.pop("cls", None) # type: ClsType["_models.TextAnswers"]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}))

content_type = kwargs.pop("content_type", "application/json") # type: Optional[str]

json = self._serialize.body(text_query_parameters, "object")
json = self._serialize.body(text_query_options, "TextQueryOptions")

request = rest.build_query_text_request(
json=json, content_type=content_type, template_url=self.query_text.metadata["url"], **kwargs
)
content_type=content_type,
json=json,
template_url=self.query_text.metadata["url"],
)._to_pipeline_transport_request()
path_format_arguments = {
"Endpoint": self._serialize.url("self._config.endpoint", self._config.endpoint, "str", skip_quote=True),
}
Expand All @@ -134,7 +131,7 @@ async def query_text(
deserialized = self._deserialize("TextAnswers", pipeline_response)

if cls:
return cls(PipelineResponse._convert(pipeline_response), deserialized, {})
return cls(pipeline_response, deserialized, {})

return deserialized

Expand Down
Loading

0 comments on commit 85ce0e9

Please sign in to comment.