Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: [dialogflow-cx] exposed DataStoreConnectionSignals #5567

Merged
merged 3 commits into from
Jul 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package google.cloud.dialogflow.cx.v3;

import "google/api/field_behavior.proto";
import "google/cloud/dialogflow/cx/v3/gcs.proto";
import "google/protobuf/duration.proto";

option cc_enable_arenas = true;
option csharp_namespace = "Google.Cloud.Dialogflow.Cx.V3";
Expand All @@ -40,6 +41,27 @@ option ruby_package = "Google::Cloud::Dialogflow::CX::V3";
//
// Hierarchy: Agent->Flow->Page->Fulfillment/Parameter.
message AdvancedSettings {
// Define behaviors of speech to text detection.
message SpeechSettings {
// Sensitivity of the speech model that detects the end of speech.
// Scale from 0 to 100.
int32 endpointer_sensitivity = 1;

// Timeout before detecting no speech.
google.protobuf.Duration no_speech_timeout = 2;

// Use timeout based endpointing, interpreting endpointer sensitivy as
// seconds of timeout value.
bool use_timeout_based_endpointing = 3;

// Mapping from language to Speech-to-Text model. The mapped Speech-to-Text
// model will be selected for requests from its corresponding language.
// For more information, see
// [Speech
// models](https://cloud.google.com/dialogflow/cx/docs/concept/speech-models).
map<string, string> models = 5;
}

// Define behaviors for DTMF (dual tone multi frequency).
message DtmfSettings {
// If true, incoming audio is processed for DTMF (dual tone multi frequency)
Expand All @@ -55,15 +77,26 @@ message AdvancedSettings {

// The digit that terminates a DTMF digit sequence.
string finish_digit = 3;

// Interdigit timeout setting for matching dtmf input to regex.
google.protobuf.Duration interdigit_timeout_duration = 6;

// Endpoint timeout setting for matching dtmf input to regex.
google.protobuf.Duration endpointing_timeout_duration = 7;
}

// Define behaviors on logging.
message LoggingSettings {
// If true, StackDriver logging is currently enabled.
// Enables StackDriver logging.
bool enable_stackdriver_logging = 2;

// If true, DF Interaction logging is currently enabled.
// Enables DF Interaction logging.
bool enable_interaction_logging = 3;

// Enables consent-based end-user input redaction, if true, a pre-defined
// session parameter `$session.params.conversation-redaction` will be
// used to determine if the utterance should be redacted.
bool enable_consent_based_redaction = 4;
}

// If present, incoming audio is exported by Dialogflow to the configured
Expand All @@ -73,6 +106,14 @@ message AdvancedSettings {
// - Flow level
GcsDestination audio_export_gcs_destination = 2;

// Settings for speech to text detection.
// Exposed at the following levels:
// - Agent level
// - Flow level
// - Page level
// - Parameter level
SpeechSettings speech_settings = 3;

// Settings for DTMF.
// Exposed at the following levels:
// - Agent level
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,20 @@ message Agent {
bool enable_answer_feedback = 1 [(google.api.field_behavior) = OPTIONAL];
}

// Settings for end user personalization.
message PersonalizationSettings {
// Optional. Default end user metadata, used when processing DetectIntent
// requests. Recommended to be filled as a template instead of hard-coded
// value, for example { "age": "$session.params.age" }. The data will be
// merged with the
// [QueryParameters.end_user_metadata][google.cloud.dialogflow.cx.v3.QueryParameters.end_user_metadata]
// in
// [DetectIntentRequest.query_params][google.cloud.dialogflow.cx.v3.DetectIntentRequest.query_params]
// during query processing.
google.protobuf.Struct default_end_user_metadata = 1
[(google.api.field_behavior) = OPTIONAL];
}

// The unique identifier of the agent.
// Required for the
// [Agents.UpdateAgent][google.cloud.dialogflow.cx.v3.Agents.UpdateAgent]
Expand Down Expand Up @@ -303,8 +317,7 @@ message Agent {
// Immutable. Name of the start flow in this agent. A start flow will be
// automatically created when the agent is created, and can only be deleted by
// deleting the agent. Format: `projects/<Project ID>/locations/<Location
// ID>/agents/<Agent ID>/flows/<Flow ID>`. Currently only the default start
// flow with id "00000000-0000-0000-0000-000000000000" is allowed.
// ID>/agents/<Agent ID>/flows/<Flow ID>`.
string start_flow = 16 [
(google.api.field_behavior) = IMMUTABLE,
(google.api.resource_reference) = { type: "dialogflow.googleapis.com/Flow" }
Expand All @@ -328,6 +341,11 @@ message Agent {
// requests.
bool enable_spell_correction = 20;

// Optional. Enable training multi-lingual models for this agent. These models
// will be trained on all the languages supported by the agent.
bool enable_multi_language_training = 40
[(google.api.field_behavior) = OPTIONAL];

// Indicates whether the agent is locked for changes. If the agent is locked,
// modifications to the agent will be rejected except for [RestoreAgent][].
bool locked = 27;
Expand All @@ -349,6 +367,10 @@ message Agent {
// Optional. Answer feedback collection settings.
AnswerFeedbackSettings answer_feedback_settings = 38
[(google.api.field_behavior) = OPTIONAL];

// Optional. Settings for end user personalization.
PersonalizationSettings personalization_settings = 42
[(google.api.field_behavior) = OPTIONAL];
}

// The request message for
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ syntax = "proto3";

package google.cloud.dialogflow.cx.v3;

import "google/api/field_behavior.proto";

option cc_enable_arenas = true;
option csharp_namespace = "Google.Cloud.Dialogflow.Cx.V3";
option go_package = "cloud.google.com/go/dialogflow/cx/apiv3/cxpb;cxpb";
Expand Down Expand Up @@ -54,3 +56,183 @@ enum DataStoreType {
// A data store that contains structured data (for example FAQ).
STRUCTURED = 3;
}

// Data store connection feature output signals.
// Might be only partially field if processing stop before the final answer.
// Reasons for this can be, but are not limited to: empty UCS search results,
// positive RAI check outcome, grounding failure, ...
message DataStoreConnectionSignals {
// Diagnostic info related to the rewriter model call.
message RewriterModelCallSignals {
// Prompt as sent to the model.
string rendered_prompt = 1;

// Output of the generative model.
string model_output = 2;

// Name of the generative model. For example, "gemini-ultra", "gemini-pro",
// "gemini-1.5-flash" etc. Defaults to "Other" if the model is unknown.
string model = 3;
}

// Search snippet details.
message SearchSnippet {
// Title of the enclosing document.
string document_title = 1;

// Uri for the document. Present if specified for the document.
string document_uri = 2;

// Text included in the prompt.
string text = 3;
}

// Diagnostic info related to the answer generation model call.
message AnswerGenerationModelCallSignals {
// Prompt as sent to the model.
string rendered_prompt = 1;

// Output of the generative model.
string model_output = 2;

// Name of the generative model. For example, "gemini-ultra", "gemini-pro",
// "gemini-1.5-flash" etc. Defaults to "Other" if the model is unknown.
string model = 3;
}

// Answer part with citation.
message AnswerPart {
// Substring of the answer.
string text = 1;

// Citations for this answer part. Indices of `search_snippets`.
repeated int32 supporting_indices = 2;
}

// Snippet cited by the answer generation model.
message CitedSnippet {
// Details of the snippet.
SearchSnippet search_snippet = 1;

// Index of the snippet in `search_snippets` field.
int32 snippet_index = 2;
}

// Grounding signals.
message GroundingSignals {
// Represents the decision of the grounding check.
enum GroundingDecision {
// Decision not specified.
GROUNDING_DECISION_UNSPECIFIED = 0;

// Grounding have accepted the answer.
ACCEPTED_BY_GROUNDING = 1;

// Grounding have rejected the answer.
REJECTED_BY_GROUNDING = 2;
}

// Grounding score buckets.
enum GroundingScoreBucket {
// Score not specified.
GROUNDING_SCORE_BUCKET_UNSPECIFIED = 0;

// We have very low confidence that the answer is grounded.
VERY_LOW = 1;

// We have low confidence that the answer is grounded.
LOW = 3;

// We have medium confidence that the answer is grounded.
MEDIUM = 4;

// We have high confidence that the answer is grounded.
HIGH = 5;

// We have very high confidence that the answer is grounded.
VERY_HIGH = 6;
}

// Represents the decision of the grounding check.
GroundingDecision decision = 1;

// Grounding score bucket setting.
GroundingScoreBucket score = 2;
}

// Safety check results.
message SafetySignals {
// Safety decision.
// All kinds of check are incorporated into this final decision, including
// banned phrases check.
enum SafetyDecision {
// Decision not specified.
SAFETY_DECISION_UNSPECIFIED = 0;

// No manual or automatic safety check fired.
ACCEPTED_BY_SAFETY_CHECK = 1;

// One ore more safety checks fired.
REJECTED_BY_SAFETY_CHECK = 2;
}

// Specifies banned phrase match subject.
enum BannedPhraseMatch {
// No banned phrase check was executed.
BANNED_PHRASE_MATCH_UNSPECIFIED = 0;

// All banned phrase checks led to no match.
BANNED_PHRASE_MATCH_NONE = 1;

// A banned phrase matched the query.
BANNED_PHRASE_MATCH_QUERY = 2;

// A banned phrase matched the response.
BANNED_PHRASE_MATCH_RESPONSE = 3;
}

// Safety decision.
SafetyDecision decision = 1;

// Specifies banned phrase match subject.
BannedPhraseMatch banned_phrase_match = 2;

// The matched banned phrase if there was a match.
string matched_banned_phrase = 3;
}

// Optional. Diagnostic info related to the rewriter model call.
RewriterModelCallSignals rewriter_model_call_signals = 1
[(google.api.field_behavior) = OPTIONAL];

// Optional. Rewritten string query used for search.
string rewritten_query = 2 [(google.api.field_behavior) = OPTIONAL];

// Optional. Search snippets included in the answer generation prompt.
repeated SearchSnippet search_snippets = 3
[(google.api.field_behavior) = OPTIONAL];

// Optional. Diagnostic info related to the answer generation model call.
AnswerGenerationModelCallSignals answer_generation_model_call_signals = 4
[(google.api.field_behavior) = OPTIONAL];

// Optional. The final compiled answer.
string answer = 5 [(google.api.field_behavior) = OPTIONAL];

// Optional. Answer parts with relevant citations.
// Concatenation of texts should add up the `answer` (not counting
// whitespaces).
repeated AnswerPart answer_parts = 6 [(google.api.field_behavior) = OPTIONAL];

// Optional. Snippets cited by the answer generation model from the most to
// least relevant.
repeated CitedSnippet cited_snippets = 7
[(google.api.field_behavior) = OPTIONAL];

// Optional. Grounding signals.
GroundingSignals grounding_signals = 8
[(google.api.field_behavior) = OPTIONAL];

// Optional. Safety check result.
SafetySignals safety_signals = 9 [(google.api.field_behavior) = OPTIONAL];
}
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,26 @@ message Flow {
pattern: "projects/{project}/locations/{location}/agents/{agent}/flows/{flow}"
};

// Settings for multi-lingual agents.
message MultiLanguageSettings {
// Optional. Enable multi-language detection for this flow. This can be set
// only if [agent level multi language
// setting][Agent.enable_multi_language_training] is enabled.
bool enable_multi_language_detection = 1
[(google.api.field_behavior) = OPTIONAL];

// Optional. Agent will respond in the detected language if the detected
// language code is in the supported resolved languages for this flow. This
// will be used only if multi-language training is enabled in the
// [agent][google.cloud.dialogflow.cx.v3.Agent.enable_multi_language_training]
// and multi-language detection is enabled in the
// [flow][google.cloud.dialogflow.cx.v3.Flow.MultiLanguageSettings.enable_multi_language_detection].
// The supported languages must be a subset of the languages supported by
// the agent.
repeated string supported_response_language_codes = 2
[(google.api.field_behavior) = OPTIONAL];
}

// The unique identifier of the flow.
// Format: `projects/<Project ID>/locations/<Location ID>/agents/<Agent
// ID>/flows/<Flow ID>`.
Expand Down Expand Up @@ -329,6 +349,14 @@ message Flow {
// Optional. Knowledge connector configuration.
KnowledgeConnectorSettings knowledge_connector_settings = 18
[(google.api.field_behavior) = OPTIONAL];

// Optional. Multi-lingual agent settings for this flow.
MultiLanguageSettings multi_language_settings = 28
[(google.api.field_behavior) = OPTIONAL];

// Indicates whether the flow is locked for changes. If the flow is locked,
// modifications to the flow will be rejected.
bool locked = 30;
}

// The request message for
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -268,11 +268,17 @@ message SecuritySettings {
string audio_export_pattern = 2;

// Enable audio redaction if it is true.
// Note that this only redacts end-user audio data;
// Synthesised audio from the virtual agent is not redacted.
bool enable_audio_redaction = 3;

// File format for exported audio file. Currently only in telephony
// recordings.
AudioFormat audio_format = 4;

// Whether to store TTS audio. By default, TTS audio from the virtual agent
// is not exported.
bool store_tts_audio = 6;
}

// Settings for exporting conversations to
Expand Down Expand Up @@ -355,6 +361,9 @@ message SecuritySettings {
// for Agent Assist traffic), higher value will be ignored and use default.
// Setting a value higher than that has no effect. A missing value or
// setting to 0 also means we use default TTL.
// When data retention configuration is changed, it only applies to the data
// created after the change; the TTL of existing data created before the
// change stays intact.
int32 retention_window_days = 6;

// Specifies the retention behavior defined by
Expand Down
Loading
Loading