diff --git a/docs/Gemfile b/docs/Gemfile index a2f68d7d0a..85e9695254 100644 --- a/docs/Gemfile +++ b/docs/Gemfile @@ -25,6 +25,7 @@ group :jekyll_plugins do gem 'jekyll-octicons' gem 'jekyll-readme-index' gem 'jekyll-mentions' + gem 'jekyll-toc' gem 'jemoji' end diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock index c940148360..223d35d626 100644 --- a/docs/Gemfile.lock +++ b/docs/Gemfile.lock @@ -188,6 +188,8 @@ GEM jekyll-seo-tag (~> 2.0) jekyll-titles-from-headings (0.5.1) jekyll (~> 3.3) + jekyll-toc (0.12.2) + nokogiri (~> 1.9) jekyll-watch (2.2.1) listen (~> 3.0) jemoji (0.10.2) @@ -255,6 +257,7 @@ DEPENDENCIES jekyll-readme-index jekyll-redirect-from jekyll-sitemap + jekyll-toc jemoji minima (~> 2.0) wdm (~> 0.1.0) diff --git a/docs/_config.yml b/docs/_config.yml index 6093f343d8..485f03d7c9 100644 --- a/docs/_config.yml +++ b/docs/_config.yml @@ -27,6 +27,7 @@ defaults: # Build settings markdown: kramdown +highlighter: rouge sass: style: :compressed @@ -41,4 +42,16 @@ plugins: - jekyll-mentions - jekyll-octicons - jekyll-sitemap + - jekyll-toc + - jekyll-rouge - jemoji + +toc: + # Default is "section-nav": + list_class: nav flex-column + # Default is no class for sublists: + sublist_class: nav flex-column + # Default is "toc-entry": + item_class: nav-item no-underline py-1 text-gray-light + # Default is "toc-": + item_prefix: nav-item- \ No newline at end of file diff --git a/docs/_data/toc.yml b/docs/_data/toc.yml index 4b3a8c8315..896713367a 100644 --- a/docs/_data/toc.yml +++ b/docs/_data/toc.yml @@ -1,19 +1,26 @@ Overview: +Virtual Assistant: +- Create +- Customize +- Deploy +- Handbook +Skills: +- Create +- Customize +- Connect to a sample +- Convert a v4 Bot +- Extend a v4 Bot +- Handbook - Samples -Tutorials: -- Create a Virtual Assistant -- Customize a Virtual Assistant -- Create a skill -- Customize a skill -- Enable Speech -- Enable Microsoft Teams -How To: -- Virtual Assistant -- Skills +Solution Accelerators: +- Assistants - Samples -Reference: -- Virtual Assistant -- Skills -- Analytics -- Samples -Help: \ No newline at end of file +- Enable proactive notifications +- View analytics using Power BI +Clients and Channels: +- Clients +- Channels +- Extend to Direct Line Speech +- Extend to Microsoft Teams +Help: +- Reference \ No newline at end of file diff --git a/docs/_docs/_samples/_default.md b/docs/_docs/_samples/_default.md index 45a28bc63a..1475801b8d 100644 --- a/docs/_docs/_samples/_default.md +++ b/docs/_docs/_samples/_default.md @@ -4,4 +4,5 @@ subcategory: language: title: order: 1 +toc: true --- diff --git a/docs/_docs/_samples/_howto.md b/docs/_docs/_samples/_howto.md deleted file mode 100644 index 2b163acfb6..0000000000 --- a/docs/_docs/_samples/_howto.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -category: How To -subcategory: [Virtual Assistant or Skills] -title: [Title] -description: [Description] -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} diff --git a/docs/_docs/_samples/_reference.md b/docs/_docs/_samples/_reference.md deleted file mode 100644 index 04fbea3700..0000000000 --- a/docs/_docs/_samples/_reference.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -category: Reference -subcategory: [Virtual Assistant or Skills] -title: [Title] -description: [Description] -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} diff --git a/docs/_docs/_samples/_tutorial-intro.md b/docs/_docs/_samples/_tutorial-intro.md new file mode 100644 index 0000000000..89afbfa814 --- /dev/null +++ b/docs/_docs/_samples/_tutorial-intro.md @@ -0,0 +1,20 @@ +--- +layout: tutorial +category: Tutorials +subcategory: [Tutorial Name] +title: Intro +language: [C#, TypeScript or None] +order: 1 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +### Purpose + +### Prerequisites + +### Time To Complete + +### Scenario diff --git a/docs/_docs/_samples/_tutorial.md b/docs/_docs/_samples/_tutorial.md index e9dbb9d794..cf335f21ce 100644 --- a/docs/_docs/_samples/_tutorial.md +++ b/docs/_docs/_samples/_tutorial.md @@ -5,4 +5,8 @@ subcategory: [Tutorial Name] title: Intro language: [C#, TypeScript or None] order: 1 ---- \ No newline at end of file +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} diff --git a/docs/_docs/clients-and-channels/clients/event-companion.md b/docs/_docs/clients-and-channels/clients/event-companion.md new file mode 100644 index 0000000000..413c703df2 --- /dev/null +++ b/docs/_docs/clients-and-channels/clients/event-companion.md @@ -0,0 +1,71 @@ +--- +category: Clients and Channels +subcategory: Clients +title: Event Companion (Android) +description: The **Event Companion** app enables you to create widgets that will responds to custom events sent from your Virtual Assistant +order: 2 +toc: true +--- +# {{ page.title }} +{:.no_toc} +{{page.description}} + +## Architecture +![Virtual Assistant Client (Android) overview diagram]({{site.baseurl}}/assets/images/android-virtual-assistant-client-architecture.png) + +## Prerequisites +1. Set up your own [Virtual Assistant Client]({{site.baseurl}}/clients-and-channels/clients/virtual-assistant-client). + +1. Download the [Event Companion app source code]({{site.repo}}/tree/next/samples/android/clients/EventCompanion). + +## Build and run +### Run +{:.no_toc} +[Build and run your app](https://developer.android.com/studio/run) to deploy to the Android Emulator or a connected device. + +## Create new widgets + +Create sample numeric and toggle widgets. + +Numeric widgets and toggle widgets are available in Event Companion app for now. +![Event Companion app widgets]({{site.baseurl}}/assets/images/android-event-companion-widgets.jpg) + +### Numeric widget +{:.no_toc} +1. Long press on a blank area of the home screen, then select **Widgets**. + +1. Select a numeric widget and drag onto the the home screen. + +1. Configure: +- **Label**: Widget label +- **Event**: The name value of an event activity +- **Icon**: Widget icon +Predefined templates are available to populate a numeric widget for common scenarios. +![Numeric widget]({{site.baseurl}}/assets/images/android-event-companion-numeric-widget-configuration.jpg) + +1. Click **Add Widget** to finish placing on the home screen. + +### Toggle widget +{:.no_toc} +1. Long press on a blank area of the home screen, then select **Widgets**. + +1. Select a toggle widget and drag onto the the home screen. + +1. Configure: +- **Label**: Widget label +- **Event**: The name value of an event activity +- **Icon**: Widget icon +Predefined templates are available to populate a toggle widget for common scenarios. +![Toggle widget]({{site.baseurl}}/assets/images/android-event-companion-toggle-widget-configuration.jpg) + +1. Click **Add Widget** to finish setting up the widget. + +## Manage widgets +All created widgets can be reconfigured from the main screen of the **Event Companion** app. +![Event companion manage widgets]({{site.baseurl}}/assets/images/android-event-companion-manage-widgets.jpg) + +1. Select the widget which needs to be reconfigured. + +1. Modify properties. + +1. Select **Save Widget** to apply the changes. \ No newline at end of file diff --git a/docs/_docs/clients-and-channels/clients/virtual-assistant-client.md b/docs/_docs/clients-and-channels/clients/virtual-assistant-client.md new file mode 100644 index 0000000000..a8006d48d9 --- /dev/null +++ b/docs/_docs/clients-and-channels/clients/virtual-assistant-client.md @@ -0,0 +1,159 @@ +--- +category: Clients and Channels +subcategory: Clients +title: Virtual Assistant Client (Android) +description: Chat with your Virtual Assistant using the **Virtual Assistant Client** app and set it up as the default assistant on a device. +order: 1 +toc: true +--- + +# {{ page.title }} +{:.no_toc} +{{page.description}} + +## Prerequisites +1. Install [Android Studio](https://developer.android.com/studio/). + +1. Download the [**Virtual Assistant Client** app source code](https://aka.ms/virtualassistantclient). + +1. [Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) to setup your Virtual Assistant environment. + +1. [Enable speech]({{site.baseurl}}/clients-and-channels/tutorials/enable-speech/1-intro) on your new Virtual Assistant + +1. If you want to capture analytics, get started with [Visual Studio App Center](https://docs.microsoft.com/en-us/appcenter/sdk/getting-started/android) and register a new app. + +## Build and run + +### Add your application settings +{:.no_toc} + +There are two configuration files used to provide your environment settings. + +#### [Direct Line Speech configuration]({{site.repo}}/blob/next/samples/android/clients/VirtualAssistantClient/directlinespeech/src/main/assets/default_configuration.json) +{:.no_toc} +```json +{ + "service_key": "SPEECH_SERVICE_SUBSCRIPTION_KEY", // Replace with your Speech Service subscription key + "service_region": "westus2", + "bot_id": "DIRECT_LINE_SPEECH_SECRET_KEY", // Replace with your Direct Line Speech secret + "user_id": "android", + "user_name": "Android", + "locale": "en-us", + "keyword": "computer" +} +``` + +The **user_id** is a unique identifier for all messages generated by the user, this can be combined with [Linked Accounts sample]({{site.baseurl}}/solution-accelerators/samples/linked-accounts/). + +#### [App configuration]({{site.repo}}/blob/next/samples/android/clients/VirtualAssistantClient/app/src/main/assets/default_app_configuration.json) +{:.no_toc} +```json +{ + "history_linecount": 2147483646, + "show_full_conversation": true, + "enable_dark_mode": false, + "keep_screen_on": true, + "app_center_id": "APP_CENTER_ID" // Replace with your Visual Studio App Center id +} +``` + +#### Optional: [Chat colors]({{site.repo}}/blob/next/samples/android/clients/VirtualAssistantClient/app/src/main/res/values/colors.xml) +{:.no_toc} +```xml + + +... + + #000000 + #ffffff + #f2f2f2 + #3062d6 +... + +``` + +### Run +{:.no_toc} +[Build and run your app](https://developer.android.com/studio/run) to deploy to the Android Emulator or a connected device. + +#### Permissions +{:.no_toc} +##### Record Audio +{:.no_toc} +Required for the user to make voice requests to a bot. With this a user can only use the keyboard. +##### Fine Location +{:.no_toc} +Allow Virtual Assistant to receive the [**VA.Location** event]({{site.baseurl}}/virtual-assistant/handbook/events/) with GPS coordinates to utilize location-based skills like Point of Interest. + +## Interact with a Virtual Assistant +### Chat +{:.no_toc} +The main view shows an expected user and assistant chat window. Start a conversation by selecting the microphone or keyboard icons. + +![Widgets]({{site.baseurl}}/assets/images/android-virtual-assistant-client-chat.png) + +### Widget +{:.no_toc} +Using widgets, you can demonstrate an Assistant having a native chat experience on a device. + +![Widgets]({{site.baseurl}}/assets/images/android-virtual-assistant-client-widget.png) + +### Side menu +{:.no_toc} +Swipe from the left to access the menu. + +![Side menu]({{site.baseurl}}/assets/images/android-virtual-assistant-client-side-menu.png) + +### Restart conversation +{:.no_toc} +Restart the conversation with a Virtual Assistant with a new conversation id. + +### Settings +{:.no_toc} +Access the same settings from the configuration files. + +![Settings]({{site.baseurl}}/assets/images/android-virtual-assistant-client-settings.png) + +### Set as default assistant +{:.no_toc} + +Set your Virtual Assistant as the device's default assist app. + +1. Allow the **Appear on top** permission to overlay this app on Android +![Settings]({{site.baseurl}}/assets/images/android-virtual-assistant-client-appear-on-top.png) + +1. Select **Device assistance app** +![Settings]({{site.baseurl}}/assets/images/android-virtual-assistant-client-device-assistance-app-1.png) + +1. Select **Virtual Assistant** +![Settings]({{site.baseurl}}/assets/images/android-virtual-assistant-client-device-assistance-app-2.png) + +## Events +The **Virtual Assistant Client** is enabled to work with [events used in the sample Skills]({{site.baseurl}}/virtual-assistant/handbook/events/). + +### Open default apps +{:.no_toc} +#### [OpenDefaultApp](https://github.com/microsoft/botframework-solutions/blob/8e05d16bacaac483810807cab67b9120d07c5302/samples/android/clients/VirtualAssistantClient/app/src/main/java/com/microsoft/bot/builder/solutions/virtualassistant/service/SpeechService.java#L502) +{:.no_toc} +This method takes the metadata from an **OpenDefaultApp** event to open default apps on the device. + +#### Maps +{:.no_toc} +Compatible with either [Waze](https://www.waze.com/) or [Google Maps](https://www.google.com/maps) (in this order). + +#### Phone +{:.no_toc} +Compatible with the default dialer. + +#### Music +{:.no_toc} +Compatible with [Spotify](https://www.spotify.com/). + +### Other events +{:.no_toc} +#### [BroadcastWidgetUpdate](https://github.com/microsoft/botframework-solutions/blob/next/samples/android/clients/VirtualAssistantClient/app/src/main/java/com/microsoft/bot/builder/solutions/virtualassistant/service/SpeechService.java#L579) +{:.no_toc} +This method sends the value of this event activity to any listening apps, like the [**Event Companion**]({{site.baseurl}}/clients-and-channels/clients/event-companion) app. + +## Next steps +Use the [**Event Companion**]({{site.baseurl}}/clients-and-channels/clients/event-companion) app to broadcast your Virtual Assistant's metadata and prototype advanced scenarios. \ No newline at end of file diff --git a/docs/_docs/clients-and-channels/tutorials/enable-speech/1-intro.md b/docs/_docs/clients-and-channels/tutorials/enable-speech/1-intro.md new file mode 100644 index 0000000000..4e01e7fe02 --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-speech/1-intro.md @@ -0,0 +1,38 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Direct Line Speech +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +### Purpose + +The Virtual Assistant template creates and deploys a bot enabled to work in speech-voice scenarios. + +This tutorial covers the steps required to connect the [Direct Line Speech](https://docs.microsoft.com/en-us/azure/bot-service/directline-speech-bot?view=azure-bot-service-4.0) channel to your assistant and build a simple application integrated with the Speech SDK to demonstrate Speech interactions working. + +### Prerequisites + +- [Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) to setup your environment. + +- Make sure the **Universal Windows Platform development** workload is available on your machine. Choose **Tools > Get Tools** and Features from the Visual Studio menu bar to open the Visual Studio installer. If this workload is already enabled, close the dialog box. + + ![UWP Enablement]({{site.baseurl}}/assets/images/vs-enable-uwp-workload.png) + + Otherwise, select the box next to .NET cross-platform development, and select Modify at the lower right corner of the dialog box. Installation of the new feature takes a moment. + +### Time to Complete + +10 minutes + +### Scenario + +Run an application that enables you to speak to your Virtual Assistant on the Direct Line Speech channel. + + + diff --git a/docs/_docs/clients-and-channels/tutorials/enable-speech/2-create-speech-instance.md b/docs/_docs/clients-and-channels/tutorials/enable-speech/2-create-speech-instance.md new file mode 100644 index 0000000000..10eff3c580 --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-speech/2-create-speech-instance.md @@ -0,0 +1,13 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Direct Line Speech +title: Create a Speech Service resource +order: 2 +--- + +# Tutorial: {{page.subcategory}} + +## {{page.title}} + +The Speech Services are the unification of speech-to-text, text-to-speech, and speech-translation into a single Azure subscription. Get started with a [Speech Service resource on Azure](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/get-started). \ No newline at end of file diff --git a/docs/_docs/clients-and-channels/tutorials/enable-speech/3-add-speech-channel.md b/docs/_docs/clients-and-channels/tutorials/enable-speech/3-add-speech-channel.md new file mode 100644 index 0000000000..fe26262bd1 --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-speech/3-add-speech-channel.md @@ -0,0 +1,13 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Direct Line Speech +title: Add the Direct Line Speech channel +order: 3 +--- + +# Tutorial: {{page.subcategory}} + +## {{page.title}} + +Direct Line Speech will enable low latency, high reliability connection with client applications using the Speech SDK. [Register your Virtual Assistant with the Direct Line Speech](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-directlinespeech?view=azure-bot-service-4.0) channel. \ No newline at end of file diff --git a/docs/_docs/clients-and-channels/tutorials/enable-speech/4-integrate-speech-channel.md b/docs/_docs/clients-and-channels/tutorials/enable-speech/4-integrate-speech-channel.md new file mode 100644 index 0000000000..1fc4639f23 --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-speech/4-integrate-speech-channel.md @@ -0,0 +1,16 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Direct Line Speech +title: Build speech sample app +order: 4 +--- + +# Tutorial: {{page.subcategory}} + +## Integrating with the Speech Channel + +1. Download the [latest release from the Direct Line Speech Client repository](https://github.com/Azure-Samples/Cognitive-Services-Direct-Line-Speech-Client/releases). +1. Follow the [quickstart instructions](https://github.com/Azure-Samples/Cognitive-Services-Direct-Line-Speech-Client#quickstart) to set up your environment and connect to your Virtual Assistant. + +![Direct Line Speech Client Configuration]({{site.baseurl}}/assets/images/dlspeechclient.png) \ No newline at end of file diff --git a/docs/_docs/clients-and-channels/tutorials/enable-speech/5-changing-the-voice.md b/docs/_docs/clients-and-channels/tutorials/enable-speech/5-changing-the-voice.md new file mode 100644 index 0000000000..b6eb13a8a7 --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-speech/5-changing-the-voice.md @@ -0,0 +1,26 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Direct Line Speech +title: Change the voice +order: 5 +--- + +# Tutorial: {{page.subcategory}} + +## Changing the Voice + +Now let's change the default voice (*Jessa24kRUS*) configured within your Virtual Assistant to a higher quality [Neural voice](https://azure.microsoft.com/en-us/blog/microsoft-s-new-neural-text-to-speech-service-helps-machines-speak-like-people/). Note that Neural voices will only work with speech subscription keys created for certain locations (regions). See the last column in the [Standard and neural voices](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/regions#standard-and-neural-voices) table for region availability. If your bot is configured for Neural voice and your speech subscription key is for a region not enabled for Neural voices, Direct Line Speech channel will terminate the connection with the client with an Internal Server Error (code 500). + +To change your Virtual Assistant's voice: + +1. Open your Virtual Assistant Solution in Visual Studio. +1. Open **DefaultWebSocketAdapter.cs** located within the **Adapters** folder. +1. Select the Voice you would like to use from [this list](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/language-support#neural-voices), for example **Microsoft Server Speech Text to Speech Voice (en-US, JessaNeural)** +1. Update the following line to specify the new voice: +```diff +- Use(new SetSpeakMiddleware(settings.DefaultLocale ?? "en-us")); ++ Use(new SetSpeakMiddleware(settings.DefaultLocale ?? "en-us", "Microsoft Server Speech Text to Speech Voice (en-US, JessaNeural)")); +``` +1. Build your Assistant and re-publish your Assistant to Azure so the changes are available to the Speech Channel. +1. Repeat the tests and listen to the voice difference. \ No newline at end of file diff --git a/docs/_docs/clients-and-channels/tutorials/enable-speech/6-next-steps.md b/docs/_docs/clients-and-channels/tutorials/enable-speech/6-next-steps.md new file mode 100644 index 0000000000..6a43ad4c90 --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-speech/6-next-steps.md @@ -0,0 +1,59 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Direct Line Speech +title: Next steps +order: 6 +--- + +# Tutorial: {{page.subcategory}} + +## Next Steps + +This tutorial is based on sample applications provided by the Cognitive Services Speech SDK, learn more. + +
+ +
+ C#$ icon +

Create a voice-first virtual assistant with the Speech SDK, UWP

+

Develop a C# Universal Windows Platform (UWP) application by using the Speech SDK.

+
+ +
+ +
+ Java icon +

Create a voice-first virtual assistant with the Speech SDK, Java

+

Develop a Java console application by using the Cognitive Services Speech SDK.

+
+ +
+
+ +
+ +
+ Android icon +

Create a voice-first virtual assistant in Java on Android by using the Speech SDK

+

Build a voice-first virtual assistant with Java for Android using the Speech SDK.

+
+ +
+ +
+ Android icon +

Virtual Assistant Client on Android

+

Connect your Direct Line Speech-enabled bot to a sample Android application.

+
+ +
+
\ No newline at end of file diff --git a/docs/_docs/tutorials/enable-teams/1_intro.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/1-intro.md similarity index 68% rename from docs/_docs/tutorials/enable-teams/1_intro.md rename to docs/_docs/clients-and-channels/tutorials/enable-teams/1-intro.md index 26a0ddb02d..172489fcc5 100644 --- a/docs/_docs/tutorials/enable-teams/1_intro.md +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/1-intro.md @@ -1,13 +1,14 @@ --- -category: Tutorials -subcategory: Enable Microsoft Teams +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams title: Intro order: 1 --- -# Tutorial: Adding your Assistant to Microsoft Teams +# Tutorial: {{page.subcategory}} -## Intro +## {{ page.title }} ### Purpose @@ -17,7 +18,7 @@ This tutorial covers the steps required to connect your Virtual Assistant to Mic ### Prerequisites -- [Create a Virtual Assistant]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro) to setup your environment. +- [Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) to setup your environment. - Microsoft Teams installed and configured to work with your Office 365 tenant. diff --git a/docs/_docs/tutorials/enable-teams/2_add_teams_channel.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/2-add-teams-channel.md similarity index 78% rename from docs/_docs/tutorials/enable-teams/2_add_teams_channel.md rename to docs/_docs/clients-and-channels/tutorials/enable-teams/2-add-teams-channel.md index dd88aab9dc..0287995f86 100644 --- a/docs/_docs/tutorials/enable-teams/2_add_teams_channel.md +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/2-add-teams-channel.md @@ -1,11 +1,12 @@ --- -category: Tutorials -subcategory: Enable Microsoft Teams +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams title: Add Microsoft Teams channel order: 2 --- -# Tutorial: Adding your Assistant to Microsoft Teams +# Tutorial: {{page.subcategory}} ## Add the Microsoft Teams Channel diff --git a/docs/_docs/tutorials/enable-teams/3_install_app_studio.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/3-install-app-studio.md similarity index 89% rename from docs/_docs/tutorials/enable-teams/3_install_app_studio.md rename to docs/_docs/clients-and-channels/tutorials/enable-teams/3-install-app-studio.md index bc842a5d41..648d1d9212 100644 --- a/docs/_docs/tutorials/enable-teams/3_install_app_studio.md +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/3-install-app-studio.md @@ -1,11 +1,12 @@ --- -category: Tutorials -subcategory: Enable Microsoft Teams +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams title: Install App Studio order: 3 --- -# Tutorial: Adding your Assistant to Microsoft Teams +# Tutorial: {{page.subcategory}} ## Installing App Studio diff --git a/docs/_docs/tutorials/enable-teams/4_create_app_manifest.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/4-create-app-manifest.md similarity index 92% rename from docs/_docs/tutorials/enable-teams/4_create_app_manifest.md rename to docs/_docs/clients-and-channels/tutorials/enable-teams/4-create-app-manifest.md index b673567c9f..2abc109bee 100644 --- a/docs/_docs/tutorials/enable-teams/4_create_app_manifest.md +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/4-create-app-manifest.md @@ -1,11 +1,12 @@ --- -category: Tutorials -subcategory: Enable Microsoft Teams +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams title: Create application manifest order: 4 --- -# Tutorial: Adding your Assistant to Microsoft Teams +# Tutorial: {{page.subcategory}} ## Create the Application Manifest for Teams diff --git a/docs/_docs/tutorials/enable-teams/5_test_in_teams.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/5-test-in-teams.md similarity index 72% rename from docs/_docs/tutorials/enable-teams/5_test_in_teams.md rename to docs/_docs/clients-and-channels/tutorials/enable-teams/5-test-in-teams.md index b931f713c3..8caffb406c 100644 --- a/docs/_docs/tutorials/enable-teams/5_test_in_teams.md +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/5-test-in-teams.md @@ -1,11 +1,12 @@ --- -category: Tutorials -subcategory: Enable Microsoft Teams +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams title: Test in Teams order: 5 --- -# Tutorial: Adding your Assistant to Microsoft Teams +# Tutorial: {{page.subcategory}} ## Testing in Teams diff --git a/docs/_docs/tutorials/enable-teams/6_add_commands.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/6-add-commands.md similarity index 90% rename from docs/_docs/tutorials/enable-teams/6_add_commands.md rename to docs/_docs/clients-and-channels/tutorials/enable-teams/6-add-commands.md index 347e30cc98..dbccab8406 100644 --- a/docs/_docs/tutorials/enable-teams/6_add_commands.md +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/6-add-commands.md @@ -1,11 +1,12 @@ --- -category: Tutorials -subcategory: Enable Microsoft Teams +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams title: Add commands order: 6 --- -# Tutorial: Adding your Assistant to Microsoft Teams +# Tutorial: {{page.subcategory}} ## Adding Commands diff --git a/docs/_docs/clients-and-channels/tutorials/enable-teams/7-next-steps.md b/docs/_docs/clients-and-channels/tutorials/enable-teams/7-next-steps.md new file mode 100644 index 0000000000..8b6ca7447c --- /dev/null +++ b/docs/_docs/clients-and-channels/tutorials/enable-teams/7-next-steps.md @@ -0,0 +1,36 @@ +--- +layout: tutorial +category: Clients and Channels +subcategory: Extend to Microsoft Teams +title: Next steps +order: 7 +--- + +# Tutorial: {{page.subcategory}} + +## Next Steps + +Learn more from additional documentation provided by Microsoft Teams. + +
+ +
+ Microsoft Teams icon +

Test and debug your Microsoft Teams bot

+

When testing your bot you need to take into consideration both the context(s) you want your bot to run in, as well as any functionality you may have added to your bot that requires data specific to Microsoft Teams.

+
+ +
+ +
+ Microsoft Teams icon +

Quickly develop apps with App Studio for Microsoft Teams

+

App Studio makes it easy to start creating or integrating your own Microsoft Teams apps

+
+ +
+
\ No newline at end of file diff --git a/docs/_docs/help/faq.md b/docs/_docs/help/faq.md index 64a9f01db7..c6ff8dec32 100644 --- a/docs/_docs/help/faq.md +++ b/docs/_docs/help/faq.md @@ -2,108 +2,127 @@ category: Help title: Frequently asked questions order: 1 +toc: true --- # {{ page.title }} {:.no_toc} -## Contents -{:.no_toc} - -* -{:toc} - - ## Virtual Assistant -### What is the Bot Framework Virtual Assistant solution accelerator? -The Bot Framework Virtual Assistant solution accelerator enables you to build a conversational assistant tailored to your brand, personalized for your users, and available across a broad range of clients and devices. -This greatly simplifies the creation of a new bot project by providing basic conversational intents, a dispatch model, Language Understanding and QnA Maker integration, Skills, and automated ARM deployment. +### What is the Bot Framework Virtual Assistant Solution Accelerator? +{:.no_toc} +The Bot Framework Virtual Assistant template enables you to build a conversational assistant tailored to your brand, personalized for your users, and available across a broad range of clients and devices. +This greatly simplifies the creation of a new bot project by providing basic conversational intents, a dispatch model, Language Understanding and QnA Maker integration, SKills, and automated ARM deployment. ### What is the architecture of a Virtual Assistant solution? -Learn more about the [Virtual Assistant solution architecture]({{site.baseurl}}/reference/virtual-assistant/architecture). +{:.no_toc} +Learn more about the [Virtual Assistant solution architecture]({{site.baseurl}}/overview/virtual-assistant-solution). ### How do I create a Virtual Assistant? -Follow a guided tutorial to create a Virtual Assistant (available in [C#]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro) or [Typescript]({{site.baseurl}}/tutorials/typescript/create-assistant/1_intro)). +{:.no_toc} +Follow a guided tutorial to create a Virtual Assistant (available in [C#]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) or [Typescript]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/typescript/1-intro)). ### How do I customize a Virtual Assistant? -Follow a guided tutorial to customize a Virtual Assistant (available in [C#]({{site.baseurl}}/tutorials/csharp/customize-assistant/1_intro) or [Typescript]({{site.baseurl}}/tutorials/typescript/customize-assistant/1_intro)). +{:.no_toc} +Follow a guided tutorial to customize a Virtual Assistant (available in [C#]({{site.baseurl}}/virtual-assistant/tutorials/customize-assistant/csharp/1-intro) or [Typescript]({{site.baseurl}}/virtual-assistant/tutorials/customize-assistant/typescript/1-intro). ### How do I deploy a Virtual Assistant? -Learn how to deploy a Virtual Assistant by [automated scripts]({{site.baseurl}}/reference/virtual-assistant/deploymentscripts) or [manual configuration]({{site.baseurl}}/howto/virtual-assistant/manualdeployment). +{:.no_toc} +Learn how to deploy a Virtual Assistant by [automated scripts]({{site.baseurl}}/help/reference/deployment-scripts) or [manual configuration]({{site.baseurl}}/virtual-assistant/tutorials/deploy-assistant/cli/1-intro). ### How do I test a Virtual Assistant? -Learn how to [test a Virtual Assistant]({{site.baseurl}}/howto/virtual-assistant/testing). +{:.no_toc} +Learn how to [test a Virtual Assistant]({{site.baseurl}}/virtual-assistant/handbook/testing/). ### How do I pass events to a Virtual Assistant? +{:.no_toc} Event activities are used to pass metadata between a Bot and user without being visible to the user. The data from these activities can be processed by a Virtual Assistant to fulfill scenarios like providing a summary of the day ahead or filling semantic action slots on a Skill. -Learn more on [sample event activities packaged with the Virtual Assistant template]({{site.baseurl}}/reference/virtual-assistant/events). +Learn more on [sample event activities packaged with the Virtual Assistant template]({{site.baseurl}}/virtual-assistant/handbook/events/). ### How do I link user accounts to a Virtual Assistant? -Learn how to [link user accounts to a Virtual Assistant]({{site.baseurl}}/howto/virtual-assistant/testing). +{:.no_toc} +Learn how to [link user accounts to a Virtual Assistant]({{site.baseurl}}/solution-accelerators/samples/linked-accounts/). ### How do I collect feedback from users for a Virtual Assistant? -Learn more about using the [sample feedback middleware that enables you to capture feedback from a Virtual Assistant's users]({{site.baseurl}}/reference/virtual-assistant/events) in Application Insights telemetry. +{:.no_toc} +Learn more about using the [sample feedback middleware that enables you to capture feedback from a Virtual Assistant's users]({{site.baseurl}}/virtual-assistant/handbook/feedback/) in Application Insights telemetry. ### How does localization work for a Virtual Assistant? -Learn how to [manage localization across a Virtual Assistant environment]({{site.baseurl}}/reference/virtual-assistant/localization). +{:.no_toc} +Learn how to [manage localization across a Virtual Assistant environment]({{site.baseurl}}/virtual-assistant/handbook/localization/). ### How do I send proactive messages to users? -Learn how to [send proactive messages to users]({{site.baseurl}}/howto/virtual-assistant/proactivemessaging). +{:.no_toc} +Learn how to [send proactive messages to users]({{site.baseurl}}/solution-accelerators/samples/proactive-notifications/). ### How do I convert from the Enterprise Template to the Virtual Assistant Template? -Learn how to [convert from the Enterprise Template to the Virtual Assistant Template]({{site.baseurl}}/howto/virtual-assistant/ettovamigration). +{:.no_toc} +Learn how to [convert from the Enterprise Template to the Virtual Assistant Template]({{site.baseurl}}/virtual-assistant/handbook/migration/). ### What happened to the Virtual Assistant solution (v0.3 and earlier)? -Learn how to [update from the Virtual Assistant solution (v0.3 and earlier)]({{site.baseurl}}/howto/virtual-assistant/oldvatomigration). +{:.no_toc} +The Virtual Assistant solution from v0.3 and earlier was delivered with multiple sample Skills to support productivty and point of interest scenarios. These are now available as indepdendent [Bot Framework SKills], reusable Skills that can be added to an existing bot. ## Skills ### What is a Bot Framework Skill? +{:.no_toc} Bot Framework Skills are re-usable skill building blocks covering conversational use-cases, enabling you to add extensive functionality to a Bot within minutes. Skills include Language Understanding models, dialogs, and integration code, and are delivered in source code - enabling you to customize and extend as required. ### What sample Skills are available? -The following sample Skills are available out of the box, with appropriate steps required to deploy and configure for your own use: -- [Calendar]({{site.baseurl}}/reference/skills/productivity-calendar) -- [Email]({{site.baseurl}}/reference/skills/productivity-email) -- [To Do]({{site.baseurl}}/reference/skills/productivity-todo) -- [Point of Interest]({{site.baseurl}}/reference/skills/pointofinterest) -- [Experimental]({{site.baseurl}}/reference/skills/experimental) +{:.no_toc} +The following sample Skills are available out of the box, with appropriate steps required to deploy and configure for your own use/ +- [Calendar]({{site.baseurl}}/skills/samples/calendar) +- [Email]({{site.baseurl}}/skills/samples/email) +- [To Do]({{site.baseurl}}/skills/samples/to-do) +- [Point of Interest]({{site.baseurl}}/skills/samples/point-of-interest) +- [Experimental]({{site.baseurl}}/skills/samples/experimental) ### How do I create a Bot Framework Skill? -Follow a guided tutorial to create a Bot Framework Skill (available in [C#]({{site.baseurl}}/tutorials/csharp/create-skill/1_intro) or [Typescript]({{site.baseurl}}/tutorials/typescript/create-skill/1_intro)). +{:.no_toc} +Follow a guided tutorial to create a Bot Framework Skill (available in [C#]({{site.baseurl}}/skills/tutorials/create-skill/csharp/1-intro) or [Typescript]({{site.baseurl}}/skills/tutorials/create-skill/typescript/1-intro)). ### How do I customize a Bot Framework Skill? -Follow a guided tutorial to customize a Bot Framework Skill (available in [C#]({{site.baseurl}}/tutorials/csharp/customize-skill/1_intro) or [Typescript]({{site.baseurl}}/tutorials/typescript/customize-skill/1_intro)). +{:.no_toc} +Follow a guided tutorial to customize a Bot Framework Skill (available in [C#]({{site.baseurl}}/skills/tutorials/customize-skill/csharp/1-intro) or [Typescript]({{site.baseurl}}/skills/tutorials/customize-skill/typescript/1-intro)). ### What are the best practices when developing custom Bot Framework Skills? -Learn the [best practices when developing a custom Bot Framework Skill]({{site.baseurl}}/reference/skills/bestpractices). +{:.no_toc} +Learn the [best practices when developing a custom Bot Framework Skill]({{site.baseurl}}/skills/handbook/best-practices). ### How do I add Skills to a Virtual Assistant? -Learn how to [add Skills to a Virtual Assistant]({{site.baseurl}}/howto/skills/addingskills). +{:.no_toc} +Learn how to [add Skills to a Virtual Assistant]({{site.baseurl}}/skills/handbook/add-skills-to-a-virtual-assistant). ### What is a Bot Framework Skill manifest? -The [Bot Framework Skill manifest]({{site.baseurl}}/reference/skills/skillmanifest) enables Skills to be self-describing in that they communicate the name and description of a Skill, it's authentication requirements (if appropriate), along with discrete actions it exposes. +{:.no_toc} +The [Bot Framework Skill manifest]({{site.baseurl}}/skills/handbook/manifest) enables Skills to be self-describing in that they communicate the name and sceription of a SKill, it's authentication requirements (if appropriate), along with discrete actions it exposes. -This manifest provides all of the metadata required for a calling Bot to know when to trigger invoking a Skill and what actions it provides. The manifest is used by the Botskills command line tool to configure a Bot to make use of a Skill. +This manifest provides all of the metadata required for a calling Bot to know when to trigger invoking a Skill and what actions it provides. The manifest is used by the Botskills command line tool to configure a Bot to make use of a SKill. ### How does Bot Framework Skill authentication work? -A Skill needs to be able to authenticate the request from a Virtual Assistant, [learn how a Skill uses JWT and whitelist authentication]({{site.baseurl}}/reference/skillauthentication). +{:.no_toc} +A Skill needs to be able to authenticate the request from a Virtual Assistant, [learn how a Skill uses JWT and whitelist authentication]({{site.baseurl}}/skills/handbook/authentication). ### What is the Botskills Command Line (CLI) tool? -[Botskills command line tool]({{site.baseurl}}/reference/skills/botskills) allows you to automate teh connection between a Virtual Assistant and your Skills; this includes the process of updating your dispatch models and creating authentication connections when needed. +{:.no_toc} +[Botskills command line tool]({{site.baseurl}}/help/reference/botskills) allows you to automate the connection between a Virtual Assistant and your Skills; this includes the process of updating your dispatch models and creating authentication connections when needed. ### How do I enable Bot Framework Skills on an existing v4 Bot? -Learn how to [enable Bot Framework Skill support on an existing v4 Bot]({{site.baseurl}}/howto/skills/addskillsupportforv4bot). +{:.no_toc} +Learn how to [enable Bot Framework Skill support on an existing v4 Bot]({{site.baseurl}}/skills/tutorials/extend-v4-bot/csharp/1-intro). ### How do I convert an existing v4 Bot to a Bot Framework Skill? -Learn how to [convert an existing v4 Bot to a Bot Framework Skill]({{site.baseurl}}/howto/skills/skillenablingav4bot). +{:.no_toc} +Learn how to [convert an existing v4 Bot to a Bot Framework Skill]({{site.baseurl}}/skills/tutorials/convert-v4-bot/csharp/1-intro). ## Analytics ### How do I enable analytics for a bot or a Virtual Assistant? +{:.no_toc} [Application Insights](https://azure.microsoft.com/en-us/services/application-insights/) is an Azure service which enables analytics about your applications, infrastructure and network. Bot Framework can use the built-in Application Insights telemetry to provide information about how your bot is performing and track key metrics. The Bot Framework SDK ships with several samples that demonstrate how to add telemtry to your bot and produce reports (included). [Power BI](https://powerbi.microsoft.com/) is a business analytics services that lets you visualize your data and share insights across your organization. You can ingest data from Application Insights into live dashboards and reports. @@ -111,22 +130,25 @@ Learn how to [convert an existing v4 Bot to a Bot Framework Skill]({{site.baseur [Learn more]({{site.baseurl}}/overview/analytics/) ### How do I configure Application Insights for a bot or Virtual Assistant? +{:.no_toc} Bot Framework can use the Application Insights telemetry to provide information about how your bot is performing, and track key metrics. The Bot Framework SDK ships with several samples that demonstrate how to add telemetry to your bot and produce reports (included). -Common queries for bot analytics are available in [Application Insights Analytics](). - Examples of Power BI dashboards are provided in the [Power BI Analytics sample](https://aka.ms/botPowerBiTemplate), highlighting how to gain insights on your bot's performance and quality. ### Where can I download the sample Power BI for a Virtual Assistant? +{:.no_toc} Examples of Power BI dashboards are provided in the [Power BI Analytics sample](https://aka.ms/botPowerBiTemplate), highlighting how to gain insights on your bot's performance and quality. ## Samples ### How do I set up Enterprise Notifications for a Virtual Assistant? +{:.no_toc} Learn how to [set up the Enterprise Notifications sample for a Virtual Assistant](https://aka.ms/enterprisenotificationssample). ### How do I use the Virtual Assistant Android Client? +{:.no_toc} Learn how to [configure your Virtual Assistant with the Virtual Assistant Android Client](https://aka.ms/bfvirtualassistantclientdocs). ### How do I use the Hospitality Assistant sample? -The [Hospitality Aassistant sample](https://aka.ms/hospitalityassistantdocs) is a prototype of a Virtual Assistant solution that helps to conceptualize and demonstrate how an assistant could be used in a hospitality-focused scenario. It also provides a starting point for those interested in creating an assistant customized for this scenario. +{:.no_toc} +The [Hospitality Aassistant sample](https://aka.ms/hospitalityassistantdocs) is a prototype of a Virtual Assistant solution that helps to conceptualize and demonstrate how an assistant could be used in a hospitality-focused scenario. It also provides a starting point for those interested in creating an assistant customized for this scenario. \ No newline at end of file diff --git a/docs/_docs/help/knownissues.md b/docs/_docs/help/known-issues.md similarity index 97% rename from docs/_docs/help/knownissues.md rename to docs/_docs/help/known-issues.md index 5870726959..5064e4c9e2 100644 --- a/docs/_docs/help/knownissues.md +++ b/docs/_docs/help/known-issues.md @@ -2,62 +2,12 @@ category: Help title: Known issues order: 2 +toc: true --- # {{ page.title }} {:.no_toc} -## Contents -{:.no_toc} - -* -{:toc} - -## Skill dialog telemetry is not showing up in the Power BI dashboard -In the Bot Builder SDK version 4.5.3 and below, there is a bug which causes the Activity ID and Conversation ID to be null on all telemetry logged over a web socket connection. This causes the Skill dialog telemetry to not populate properly in the [Conversational AI Power BI sample](https://aka.ms/botPowerBiTemplate). To resolve this issue, follow these steps: - -1. Update to the latest Microsoft.Bot.Builder packages - 1. Add the following package source to your project: **https://botbuilder.myget.org/F/botbuilder-v4-dotnet-daily/api/v3/index.json** - 1. Update all Microsoft.Bot.Builder packages to version **4.6.0-preview-191005-1** and above -1. Add the following code to **Startup.cs**: - ``` - // Configure telemetry - services.AddApplicationInsightsTelemetry(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - services.AddSingleton(); - ``` -1. Update your **DefaultAdapter.cs** and **DefaultWebsocketAdapter.cs** with the following: - ``` - public DefaultAdapter( - BotSettings settings, - TemplateEngine templateEngine, - ConversationState conversationState, - ICredentialProvider credentialProvider, - TelemetryInitializerMiddleware telemetryMiddleware, - IBotTelemetryClient telemetryClient) - : base(credentialProvider) - { - ... - - Use(telemetryMiddleware); - - // Uncomment the following line for local development without Azure Storage - // Use(new TranscriptLoggerMiddleware(new MemoryTranscriptStore())); - Use(new TranscriptLoggerMiddleware(new AzureBlobTranscriptStore(settings.BlobStorage.ConnectionString, settings.BlobStorage.Container))); - Use(new ShowTypingMiddleware()); - Use(new FeedbackMiddleware(conversationState, telemetryClient)); - Use(new SetLocaleMiddleware(settings.DefaultLocale ?? "en-us")); - Use(new EventDebuggerMiddleware()); - } - ``` - -For more information, refer to the following resources: -- [Bot Builder SDK issue](https://github.com/microsoft/botbuilder-dotnet/issues/2474) -- [Bot Builder SDK pull request](https://github.com/microsoft/botbuilder-dotnet/pull/2580) - ## My Microsoft App Registration could not be automatically provisioned Some users might experience the following error when running deployment `Could not provision Microsoft App Registration automatically. Please provide the -appId and -appPassword arguments for an existing app and try again`. In this situation, [create and register an Azure AD application](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-authentication?view=azure-bot-service-4.0&tabs=csharp%2Cbot-oauth#create-and-register-an-azure-ad-application) manually. @@ -132,7 +82,7 @@ Due to a limitation with the LUIS authoring APIs the original deployment scripts This may cause you to also experience `Forbidden` LUIS errors when testing your Bot as you may have exhausted the quota for your starter LUIS key, changing from your starter LUIS subscription key will resolve this. -This has now been resolved in the latest deployment scripts which you can update to following [these instructions]({{site.baseurl}}/reference/virtual-assistant/deploymentscripts#updating-your-deployment-scripts). If you have an existing deployment you'll have to manually perform the following steps: +This has now been resolved in the latest deployment scripts which you can update to following [these instructions]({{site.baseurl}}/help/reference/deployment-scripts/#updating-your-deployment-scripts). If you have an existing deployment you'll have to manually perform the following steps: 1. As shown below go through **each LUIS model including Dispatch**, click Assign Resoucre and locate the appropriate subscription key and then re-publish. @@ -185,7 +135,50 @@ There is a known issue in the `Botskills` CLI tool during the command's executio Example of the `connect` command with a trailing backslash in the `luisFolder` argument: ``` bash -botskills connect --localManifest "" --luisFolder "/" --ts +botskills connect --botName "" --localManifest "" --luisFolder "/" --ts ``` So, to avoid this, it's highly recommended to use `PowerShell 6` to execute the CLI tool commands. Also, you can remove the trailing backslash of the argument. + +## Skill dialog telemetry is not showing up in the Power BI dashboard +In the Bot Builder SDK version 4.5.3 and below, there is a bug which causes the Activity ID and Conversation ID to be null on all telemetry logged over a web socket connection. This causes the Skill dialog telemetry to not populate properly in the [Conversational AI Power BI sample](https://aka.ms/botPowerBiTemplate). To resolve this issue, follow these steps: + +1. Update to the latest Microsoft.Bot.Builder packages + 1. Add the following package source to your project: **https://botbuilder.myget.org/F/botbuilder-v4-dotnet-daily/api/v3/index.json** + 1. Update all Microsoft.Bot.Builder packages to version **4.6.0-preview-191005-1** and above +1. Add the following code to **Startup.cs**: + ``` + // Configure telemetry + services.AddApplicationInsightsTelemetry(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); + ``` +1. Update your **DefaultAdapter.cs** and **DefaultWebsocketAdapter.cs** with the following: + ``` + public DefaultAdapter( + BotSettings settings, + TemplateEngine templateEngine, + ConversationState conversationState, + ICredentialProvider credentialProvider, + TelemetryInitializerMiddleware telemetryMiddleware, + IBotTelemetryClient telemetryClient) + : base(credentialProvider) + { + ... + Use(telemetryMiddleware); + // Uncomment the following line for local development without Azure Storage + // Use(new TranscriptLoggerMiddleware(new MemoryTranscriptStore())); + Use(new TranscriptLoggerMiddleware(new AzureBlobTranscriptStore(settings.BlobStorage.ConnectionString, settings.BlobStorage.Container))); + Use(new ShowTypingMiddleware()); + Use(new FeedbackMiddleware(conversationState, telemetryClient)); + Use(new SetLocaleMiddleware(settings.DefaultLocale ?? "en-us")); + Use(new EventDebuggerMiddleware()); + } + ``` + +For more information, refer to the following resources: +- [Bot Builder SDK issue](https://github.com/microsoft/botbuilder-dotnet/issues/2474) +- [Bot Builder SDK pull request](https://github.com/microsoft/botbuilder-dotnet/pull/2580) \ No newline at end of file diff --git a/docs/_docs/reference/skills/botskills.md b/docs/_docs/help/reference/botskills.md similarity index 84% rename from docs/_docs/reference/skills/botskills.md rename to docs/_docs/help/reference/botskills.md index 2ccc137dba..559201cd71 100644 --- a/docs/_docs/reference/skills/botskills.md +++ b/docs/_docs/help/reference/botskills.md @@ -1,21 +1,15 @@ --- -category: Reference -subcategory: Skills +category: Help +subcategory: Reference title: BotSkills CLI Tool description: Details on usage and commands. -order: 1 +order: 2 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} - -## Overview Botskills command line tool allows you to automate the connection between the **Virtual Assistant** and your **Skills**, which includes the process of updating your dispatch models and create authentication connections where needed. The CLI performs the following operations on your behalf: 1. Retrieve the **Skill Manifest** from the remote Skill through the `/api/skill/manifest` endpoint. If it is a local Skill you should specify the path. @@ -24,7 +18,7 @@ The CLI performs the following operations on your behalf: 4. Refresh the dispatch LUIS model with the new utterances. 5. In the case of **Active Directory Authentication Providers**, an authentication connection will be added to your Bot automatically and the associated Scopes added to your Azure AD application that backs your deployed Assistant. -> Your Virtual Assistant must have been deployed using the [deployment tutorial]({{site.baseurl}}/tutorials/csharp/create-assistant/4_provision_your_azure_resources) before using the `botskills` CLI as it relies on the Dispatch models being available and a deployed Bot for authentication connection information. +> Your Virtual Assistant must have been deployed using the [deployment tutorial]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources) before using the `botskills` CLI as it relies on the Dispatch models being available and a deployed Bot for authentication connection information. ## Prerequisites - [Node.js](https://nodejs.org/) version 10.8 or higher @@ -42,11 +36,13 @@ The CLI performs the following operations on your behalf: For all of this commands, the tool assumes that you are running the CLI within the **Virtual Assistant project directory** and have created your Bot through the template, and therefore have a `skills.json` file present in the working folder which contains the connected skills. ### Connect Skills +{:.no_toc} + The `connect` command allows you to connect a Skill, be it local or remote, to your Virtual Assistant bot. The Skill and Virtual Assistant can be in different coding languages without problem, this is, you can connect a Skill coded in C# into a Virtual Assistant coded in TypeScript, but be sure to specify your Virtual Assistant's coding language using `--cs` or `--ts`. Here is an example: ```bash -botskills connect --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder --cs +botskills connect --botName --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder --cs ``` *Remember to re-publish your Assistant to Azure after you've added a Skill unless you plan on testing locally only* @@ -54,6 +50,8 @@ botskills connect --remoteManifest "http://.azurewebsites.n For further information, see the [Connect command documentation]({{site.repo}}/tree/master/tools/botskills/docs/connect.md). ### Disconnect Skills +{:.no_toc} + The `disconnect` command allows you to disconnect a Skill from your Virtual Assistant. You can always check the Skills already connected to your Virtual Assistant using the [`list` command](#List-Connected-Skills). Remember to specify the coding language of your Virtual Assistant using `--cs` or `--ts`. Here is an example: @@ -65,17 +63,21 @@ For further information, see the [Disconnect command documentation]({{site.repo} > Note: The id of the Skill can also be aquired using the `botskills list` command. You can check the [List command documentation]({{site.repo}}/tree/master/tools/botskills/docs/list.md). -### Update a Connected Skill +### Update a connected Skill +{:.no_toc} + The `update` command allows you to update a Skill, be it local or remote, to your Virtual Assistant bot. The Skill and Virtual Assistant can be in different coding languages without problem, this is, you can update a Skill coded in C# into a Virtual Assistant coded in TypeScript, but be sure to specify your Virtual Assistant's coding language using `--cs` or `--ts`. Here is an example: ```bash -botskills update --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder --cs +botskills update --botName --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder --cs ``` For further information, see the [Update command documentation]({{site.repo}}/tree/master/tools/botskills/docs/update.md). -### Refresh Connected Skills +### Refresh connected Skills +{:.no_toc} + The `refresh` command allows you to train and publish your existing dispatch model of your **Virtual Assistant**, specifying the Virtual Assistant's coding language using `--cs` or `--ts`. This functionality is mainly useful after using the `connect` or `disconnect` command with the `--noRefresh` flag. Here is an example: @@ -85,7 +87,9 @@ botskills refresh --cs For further information, see the [Refresh command documentation]({{site.repo}}/tree/master/tools/botskills/docs/refresh.md). -### List Connected Skills +### List connected Skills +{:.no_toc} + The `list` command allows you to acknowledge the Skills currently connected to your Virtual Assistant. Here is an example: diff --git a/docs/_docs/reference/virtual-assistant/deploymentscripts.md b/docs/_docs/help/reference/deployment-scripts.md similarity index 98% rename from docs/_docs/reference/virtual-assistant/deploymentscripts.md rename to docs/_docs/help/reference/deployment-scripts.md index c8c55d209d..6ab5d5f9e6 100644 --- a/docs/_docs/reference/virtual-assistant/deploymentscripts.md +++ b/docs/_docs/help/reference/deployment-scripts.md @@ -1,21 +1,15 @@ --- -category: Reference -subcategory: Virtual Assistant +category: Help +subcategory: Reference title: Deployment Scripts description: Reference for deployment tools provided in the Virtual Assistant Template. -order: 3 +order: 1 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} -## Intro - A number of PowerShell scripts are provided in the Virtual Assistant Template to help deploy and configure your different resources. Please find details on each script's purpose, parameters, and outputs below. ## Resources @@ -30,6 +24,7 @@ A number of PowerShell scripts are provided in the Virtual Assistant Template to ## Scripts ### deploy.ps1 +{:.no_toc} This script orchestrates the deployment of all Azure Resources and Cognitive Models to get the Virtual Assistant running. @@ -48,6 +43,7 @@ This script orchestrates the deployment of all Azure Resources and Cognitive Mod | logFile | Log file for any errors that occur during script execution. Defaults to `Deployment` folder | No | ### deploy_cognitive_models.ps1 +{:.no_toc} This script deploys all the language models found in `Deployment/Resources/LU` and the knowledgebases found in `Deployment/Resources/QnA`. Finally it creates a Dispatch model to dispatch between all cognitive models. @@ -66,6 +62,7 @@ This script deploys all the language models found in `Deployment/Resources/LU` a | logFile | Log file for any errors that occur during script execution. Defaults to `Deployment` folder | No | ### update_cognitive_models.ps1 +{:.no_toc} This script updates your hosted language models and knowledgebases based on local .lu files. Or, it can update your local .lu files based on your current models. Finally, it refreshes your dispatch model with the latest changes. @@ -80,6 +77,8 @@ This script updates your hosted language models and knowledgebases based on loca | logFile | Log file for any errors that occur during script execution. Defaults to `Deployment` folder | No | ### publish.ps1 +{:.no_toc} + This script builds and publishes your local project to your Azure. | Parameter | Description | Required? | @@ -88,9 +87,11 @@ This script builds and publishes your local project to your Azure. | resourceGroup | The resource group for the Azure Web App | Yes | | projFolder | The project folder. Defaults to | No | -## Common Questions +## Frequently asked questions ### What services are deployed by the script? +{:.no_toc} + The Virtual Assistant Template relies on a number of Azure resources to run. The included deployment scripts and ARM template use the following services: Resource | Notes | @@ -108,9 +109,13 @@ QnA Maker Azure Search Service | Search index for your QnA Maker knowledgebases. Content Moderator | Subscription keys for Content Moderator Cognitive Service. ### How do I reduce my Azure costs during development? +{:.no_toc} + The default `parameters.template.json` file is configured to use all free service tiers to reduce the cost of testing. Provide this file in the `-parametersFile` parameter on the `deploy.ps1` script. **Note: There are service limits associated with free tiers (e.g. Azure Search permits only 1 free tier per subscription). Free tiers should only be used for development, not for production implementations.** ### How do I customize my Azure resource deployment? +{:.no_toc} + Any of the following parameters in the ARM template can be overridden with your preferred values using the `parameters.template.json` file provided in the `Deployment/Resources` folder: | Parameters | Default Value | @@ -166,6 +171,8 @@ Then provide the path to the file as an argument on the `deploy.ps1` script: ``` ### How do I use my existing Azure resources from the same resource group? +{:.no_toc} + If you want to use existing resources from the same resource group, override the parameters for the services you want in the `parameters.template.json`. Provide this file in the `-parametersFile` parameter on the `deploy.ps1` script. #### parameters.template.json @@ -179,6 +186,8 @@ If you want to use existing resources from the same resource group, override the ``` ### How do I use my existing Azure resources from a different resource group? +{:.no_toc} + If you want to use an existing resource from a different resource group, follow these steps: #### Cosmos DB @@ -221,7 +230,7 @@ If you want to use an existing resource from a different resource group, follow 2. Provide the appropriate configuration in `appsettings.json` from the [Azure Portal](https://portal.azure.com). ### How do I update my local deployment scripts with the latest? - +{:.no_toc} Once you have created your Virtual Assistant or Skill projects using the various templates and generators, you may need to update the deployment scripts to reflect ongoing changes to these scripts over time. #### Sample Project diff --git a/docs/_docs/help/support.md b/docs/_docs/help/support.md new file mode 100644 index 0000000000..189d831711 --- /dev/null +++ b/docs/_docs/help/support.md @@ -0,0 +1,17 @@ +--- +category: Help +title: Support and feedback +order: 3 +--- + + +# {{ page.title }} +{:.no_toc} + +These resources provide additional information and support for developing a Virtual Assistant and Skills. + +|Source|Description| +|-|-| +|[Github repository](https://github.com/microsoft/botframework-solutions)|The Bot Framework Solutions repository| +|[Stack Overflow](https://stackoverflow.com/questions/tagged/botframework)|Community support on Stack Overflow| +|[Additional resources](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-resources-links-help?view=azure-bot-service-4.0)|Additional suport resources related to the Bot Framework| diff --git a/docs/_docs/howto/samples/enterprisenotifications.md b/docs/_docs/howto/samples/enterprisenotifications.md deleted file mode 100644 index 1294712e73..0000000000 --- a/docs/_docs/howto/samples/enterprisenotifications.md +++ /dev/null @@ -1,129 +0,0 @@ ---- -category: How To -subcategory: Samples -title: Set up Enterprise Notifications for a Virtual Assistant -description: Steps for configuring the Enterprise Notifications sample -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -## Prerequisites - -1. [Create a Virtual Assistant]({{ site.baseurl }}/tutorials/csharp/create-assistant/1_intro/) to setup your Virtual Assistant environment. - -1. Manually deploy the following Azure resources: - - - [Create](https://ms.portal.azure.com/#create/Microsoft.EventHub) a [Azure Event Hub](https://azure.microsoft.com/en-us/services/event-hubs/) resource - - [Create](https://ms.portal.azure.com/#create/Microsoft.FunctionApp) a [Azure Function](https://azure.microsoft.com/en-us/services/functions/) resource - - [Create](https://ms.portal.azure.com/#create/Microsoft.NotificationHub) a [Azure Notification Hub](https://azure.microsoft.com/en-us/services/notification-hubs/) resource - - [Create](https://ms.portal.azure.com/#create/Microsoft.DocumentDB) a [Azure Cosmos DB](https://azure.microsoft.com/en-us/services/cosmos-db/) resource - -1. Install the [Bot Framework Emulator](https://aka.ms/botframeworkemulator) to use in testing. - -## Event Producer - -This sample includes an example [Event Producer]({{site.repo}}/samples/EnterpriseNotification/EventProducer) console application that sends an Event to the Event Hub for processing simulating creation of a notification. - -- Update `appSettings.json` with the `EventHubName` and `EventHubConnectionString` which you can find by going to your EventHub resource, creating an instance and then a `Shared Access Policy` - -### Azure Function - Event Handler -This sample includes an example [EventHandler Azure Function]({{site.repo}}/Samples/EnterpriseNotification/EventHandler) which is triggered by Event delivery and handles Event processing. - - -1. Update [Function1.cs]({{site.repo}}/samples/EnterpriseNotification/EventHandler/Function1.cs) and change the `EventHubTrigger` to reflect your Event Hub name. - ```csharp - public static async Task Run([EventHubTrigger("YourEventHubName", Connection = "EventHubConnection")] EventData[] events, ILogger log)` - ``` -2. The Azure Functions blade in the Azure Portal provides a wide range of routes to deploy the provided code to your newly created Azure Function including Visual Studio and VSCode. Follow this to deploy the sample EventHandler project. -3. Once deployed, go to the Azure Function in Azure and choose Configuration. -4. Create a new `ConnectionString` called `EventHubConnection` property and provide the same EventHub connection string as in the previous section. -5. In the `Application Settings` section create the following settings which are used bvy the Event Handler. - - `DirectLineSecret` - Located within the Channels section of your Azure Bot Service registration. Required to communicate with your assistant and send events. - - `DocumentDbEndpointUrl` - Located within the CosmoDB Azure Portal blade. Required to access the User Preference store. - - `DocumentDbPrimaryKey`- Located within the CosmoDB Azure Portal blade. - -## Virtual Assistant - -### ProactiveState Middleware - -In order to be able to deliver messages to a conversation the end user must already have had an interaction with the assistant. As part of this interaction a `ConversationReference` needs to be persisted and used to resume the conversation. - -We provide a middleware component to perform this ConversationReference storage which can be found in the Bot.Builder.Solutions package. - -1. Add this line to your `Startup.cs` to register the proactive state. -```csharp - services.AddSingleton(); -``` -2. Within your `DefaultAdapter.cs` add this line to the constructor -```csharp - ProactiveState proactiveState -``` -3. Within your `DefaultAdapter.cs` add this line: -```csharp - Use(new ProactiveStateMiddleware(proactiveState)); -``` - -### Event Handling - -The following code handles the `BroadcastEvent` event type sent by the Azure function and is added to the Event Handling code. Within Virtual Assistant this is handled by `OnEventAsync` within MainDialog.cs. - -The `_proactiveStateAccessor` is the state that contains a mapping between UserId and previously persisted conversation. It retrieves the proactive state from a store previously saved by enabling the `ProactiveStateMiddleware`. - -Within `MainDialog.cs` add the following changes: - -1. Add this variable to your `MainDialog` class. - ```csharp - private IStatePropertyAccessor _proactiveStateAccessor; - ``` -2. Add this line to the constructor - ```csharp - ProactiveState proactiveState - ``` - and initialise the state in the constructor - ```csharp - _proactiveStateAccessor = proactiveState.CreateProperty(nameof(ProactiveModel)); - ``` -3. Add this event handler to your `OnEventAsync` handler to handle the `BroadcastEvent` - - ```csharp - case "BroadcastEvent": - var eventData = JsonConvert.DeserializeObject(dc.Context.Activity.Value.ToString()); - - var proactiveModel = await _proactiveStateAccessor.GetAsync(dc.Context, () => new ProactiveModel()); - - var conversationReference = proactiveModel[MD5Util.ComputeHash(eventData.UserId)].Conversation; - await dc.Context.Adapter.ContinueConversationAsync(_appCredentials.MicrosoftAppId, conversationReference, ContinueConversationCallback(dc.Context, eventData.Message), cancellationToken); - break; - ``` - -## Testing and Validation - -Now events can be sent to a user through your Virtual Assistant in an active conversation. - -### Bot Framework Emulator - -Event generation must generate Events with the same `UserId` as the Emulator is using so the existing conversation can be matched and notifications can be delivered. - -![UserId Settings]({{ site.baseurl }}/assets/images/emulator-userid.png) - -1. In the **Bot Framework Emulator**, navigate to **Settings** and provide a guid to represent a simulated user ID. This will ensure any conversations with your Assistant use the same user ID. - -1. Begin a conversation with your Assistant to create a proactive state record for future user. - -## Event Producer - -1. Copy the user ID used in the **Bot Framework Emulator** into the `SendMessagesToEventHub` method within `Program.cs` of the **Event Producer**. -This ensures any notifications sent are routed to your active conversation. - - -1. Run the **Event Producer** to generate a message and observe that the message is shown within your session. - -![Enterprise Notification Demo]({{ site.baseurl }}/assets/images/enterprisenotification-demo.png) \ No newline at end of file diff --git a/docs/_docs/howto/samples/vaclient_android.md b/docs/_docs/howto/samples/vaclient_android.md deleted file mode 100644 index 1d73138405..0000000000 --- a/docs/_docs/howto/samples/vaclient_android.md +++ /dev/null @@ -1,101 +0,0 @@ ---- -category: How To -subcategory: Samples -title: Use the Virtual Assistant Client (Android) -description: Steps for using the Virtual Assistant Client on Android -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -## Prerequisites - -1. Install [Android Studio](https://developer.android.com/studio/) on your PC. - -1. [Create a Virtual Assistant]({{ site.baseurl }}/tutorials/csharp/create-assistant/1_intro/) to setup your Virtual Assistant environment. - -1. [Enable speech]({{ site.baseurl }}/tutorials/enable-speech/1_intro) on your new Virtual Assistant, which enables you to retrieve a - - [Microsoft Speech Cognitive Service subscription key]({{ site.baseurl }}/tutorials/enable-speech/2_create_speech_instance/) - - [Add the Direct Line Speech channel to your Assistant]({{ site.baseurl }}/tutorials/enable-speech/3_add_speech_channel/) - -1. [Download](https://aka.ms/virtualassistantclient) the Virtual Assistant Client (Android) app source code. - -## Overview -![Virtual Assistant Client (Android) overview diagram]({{ site.baseurl }}/assets/images/virtualassistantclient-android-overview.png) - -A user can interact with their Assistant using the **Virtual Assistant Client app** via widgets on the home screen or the main UI of the app. -These bot responses can optionally be broadcast to an **Event Companion app** that does't need to implement the Speech SDK. - -## Building the project - -### Provide credentials to the application - -The following configuration values must be supplied to `DefaultConfiguration.java` to connect to the Assistant via the Direct Line Speech channel: -* `SPEECH_SERVICE_SUBSCRIPTION_KEY` -* `DIRECT_LINE_SPEECH_SECRET_KEY` -* `USER_FROM_ID` - -```java -public class DefaultConfiguration { - - // Replace below with your own subscription key - public static final String SPEECH_SERVICE_SUBSCRIPTION_KEY = "YOUR_KEY_HERE";//TODO - - public static final String DIRECT_LINE_SPEECH_SECRET_KEY = "YOUR_DIRECTLINE_SPEECH_KEY_HERE";//TODO - - public static final String SPEECH_SERVICE_SUBSCRIPTION_KEY_REGION = "westus2";//TODO - - public static final String USER_NAME = "User"; - public static final String USER_FROM_ID = "YOUR_USER_FROM_ID_HERE";//TODO - - public static final String LOCALE = "en-us"; - - public static final String KEYWORD = "computer"; - - // please note that the default colors are read from /res/values/colors.xml -} -``` -**Note:** that there are two versions, one for debug and one for release build flavors. - -The USER_FROM_ID is a unique identifier for all messages generated by the user, this is typically combined with [Linked Accounts]({{ site.baseurl }}/howto/virtual-assistant/linkedaccounts/). - -### Deploy -1. Select the desired build flavor (debug or release) and ensure credentials are set for the desired build flavor -2. Deploy to emulator or device - -## Using the Project -### Permissions - - **Record Audio** - - Required for the user to make voice requests to a bot. With this a user can only use the keyboard. - - **Fine Location** - - Allow your Assistant to receive the `VA.Location` event with GPS coordinates to utilize location-based skills like Point of Interest. - -## Interacting with your Assistant -### Conversation view -![Conversation view]({{ site.baseurl }}/assets/images/virtualassistantclient-android-fullconversationview.png) - -To demonstrate a chat app experience with an Assistant, the main screen shows user/bot interactions with a threaded conversation. -Trigger a conversation by selecting either: -* Mic button to speak to the bot -* Keyboard button to type to the bot - -### Native view -![Native view]({{ site.baseurl }}/assets/images/virtualassistantclient-android-widgetview.png) - -Using widgets, you can demonstrate an Assistant having a native chat experience on a device. - -### Menu -Swipe from the left to access the menu, providing the following functionality: -* Restart conversation -* Settings - -## Features - -Learn more about the [Virtual Assistant Client (Android) comprehensive feature set]({{ site.baseurl }}/reference/samples/vaclient_android/) diff --git a/docs/_docs/howto/skills/addskillsupportforv4bot.md b/docs/_docs/howto/skills/addskillsupportforv4bot.md deleted file mode 100644 index aa432df4d5..0000000000 --- a/docs/_docs/howto/skills/addskillsupportforv4bot.md +++ /dev/null @@ -1,210 +0,0 @@ ---- -category: How To -subcategory: Skills -title: Enable skills on an existing v4 bot -description: How to add Skills to an existing v4 bot (not Virtual Assistant template) -order: 2 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -## Overview - -Creating a Bot Framework Bot through the Virtual Assistant template is the easiest way to get started with using Skills. If you have an existing v4 based Bot, the recommended approach would be to take the resulting project from this template and bring across your custom dialogs to get started quickly. - - If, however you have an existing V4 Bot that you wish to add Skill capability then please follow the steps below. - -## Update your bot to use Bot Framework Solutions libraries -#### C# -Add [`Microsoft.Bot.Builder.Solutions`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Solutions/) and [`Microsoft.Bot.Builder.Skills`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Skills/) NuGet packages to your solution. - -#### TypeScript -Add [`botbuilder-solutions`](https://www.npmjs.com/package/botbuilder-solutions) and [`botbuilder-skills`](https://www.npmjs.com/package/botbuilder-skills) npm packages to your solution. - -## Skill configuration -#### C# -The `Microsoft.Bot.Builder.Skills` package provides a `SkillManifest` type that describes a Skill. Your bot should maintain a collection of registered Skills typically serialized into a JSON configuration file. The Virtual Assistant template uses a `skills.json` file for this purpose. - -As part of your Configuration processing you should construct a collection of registered Skills by deserializing this file, for example: - -```csharp -public List Skills { get; set; } -``` - -#### TypeScript -The 'botbuilder-skills' package provides a `ISkillManifest` interface that describes a Skill. Your bot should maintain a collection of registered Skills typically serialized into a `JSON` configuration file. The Virtual Assistant template uses a `skills.json` file for this purpose that can be found in the `src` directory. - -That file must have the following structure: - -```json -{ - "skills": [] -} -``` - -As part of your Configuration processing you should construct a collection of registered Skills by deserializing this file, for example: - -```typescript -import { skills as skillsRaw } from './skills.json'; -const skills: ISkillManifest[] = skillsRaw; -``` - -> NOTE: The `botbuilder-skills` package also provides a `IBotSettings` interface that can be used to storage the keys/secrets of the services that will be used to connect services to the bot. - -## Skill Dialog registration -#### C# -In your `Startup.cs` file register a `SkillDialog` for each registered skill as shown below, this uses the collection of Skills that you created in the previous step. - -```csharp - // Register skill dialogs -services.AddTransient(sp => -{ - var userState = sp.GetService(); - var skillDialogs = new List(); - - foreach (var skill in settings.Skills) - { - var authDialog = BuildAuthDialog(skill, settings); - var credentials = new MicrosoftAppCredentialsEx(settings.MicrosoftAppId, settings.MicrosoftAppPassword, skill.MSAappId); - skillDialogs.Add(new SkillDialog(skill, credentials, telemetryClient, userState, authDialog)); - } - - return skillDialogs; -}); -``` - -For scenarios where Skills require authentication connections you need to create an associated `MultiProviderAuthDialog` - -```csharp - // This method creates a MultiProviderAuthDialog based on a skill manifest. -private MultiProviderAuthDialog BuildAuthDialog(SkillManifest skill, BotSettings settings) -{ - if (skill.AuthenticationConnections?.Count() > 0) - { - if (settings.OAuthConnections.Any() && settings.OAuthConnections.Any(o => skill.AuthenticationConnections.Any(s => s.ServiceProviderId == o.Provider))) - { - var oauthConnections = settings.OAuthConnections.Where(o => skill.AuthenticationConnections.Any(s => s.ServiceProviderId == o.Provider)).ToList(); - return new MultiProviderAuthDialog(oauthConnections); - } - else - { - throw new Exception($"You must configure at least one supported OAuth connection to use this skill: {skill.Name}."); - } - } - - return null; -} -``` - -#### TypeScript - -In your `index.ts` file register a `SkillDialog` for each registered skill as shown below, this uses the collection of Skills that you created in the previous step. - -```typescript - // Register skill dialogs -const skillDialogs: SkillDialog[] = skills.map((skill: ISkillManifest) => { - const authDialog: MultiProviderAuthDialog|undefined = buildAuthDialog(skill, botSettings); - const credentials: MicrosoftAppCredentialsEx = new MicrosoftAppCredentialsEx( - botSettings.microsoftAppId || '', - botSettings.microsoftAppPassword || '', - skill.msAppId); - - return new SkillDialog(skill, credentials, adapter.telemetryClient, skillContextAccessor, authDialog); -}); -``` - -For scenarios where Skills require authentication connections you need to create an associated `MultiProviderAuthDialog` - -```typescript -// This method creates a MultiProviderAuthDialog based on a skill manifest. -function buildAuthDialog(skill: ISkillManifest, settings: Partial): MultiProviderAuthDialog|undefined { - if (skill.authenticationConnections !== undefined && skill.authenticationConnections.length > 0) { - if (settings.oauthConnections !== undefined) { - const oauthConnections: IOAuthConnection[] | undefined = settings.oauthConnections.filter( - (oauthConnection: IOAuthConnection) => { - return skill.authenticationConnections.some((authenticationConnection: IAuthenticationConnection) => { - return authenticationConnection.serviceProviderId === oauthConnection.provider; - }); - }); - if (oauthConnections !== undefined) { - return new MultiProviderAuthDialog(oauthConnections); - } - } else { - throw new Error(`You must configure at least one supported OAuth connection to use this skill: ${skill.name}.`); - } - } - - return undefined; -} -``` - -## Route utterances to Skills -#### C# -Within your Main/Router dialog you firstly need to ensure the SkillDialogs registered previously are added to the dialog stack: - -```csharp -foreach (var skillDialog in skillDialogs) -{ - AddDialog(skillDialog); -} -``` - -Add the following code after your Dispatcher has executed passing the registered Skills and the Intent returned from the Dispatcher. If the IsSkill method returns true then you start the appropriate SkillDialog instance passing the Skill Manifest Id and the matching intent. - -```csharp -// Identify if the dispatch intent matches any Action within a Skill if so, we pass to the appropriate SkillDialog to hand-off -var identifiedSkill = SkillRouter.IsSkill(_settings.Skills, intent.ToString()); - -if (identifiedSkill != null) -{ - // We have identified a skill so initialize the skill connection with the target skill - // the dispatch intent is the Action ID of the Skill enabling us to resolve the specific action and identify slots - // Pass the activity we have - var result = await dc.BeginDialogAsync(identifiedSkill.Id, intent); - - if (result.Status == DialogTurnStatus.Complete) - { - await CompleteAsync(dc); - } -} -else -{ - // Your normal intent routing logic -} -``` - -#### TypeScript - -Within your Main/Router dialog you firstly need to ensure the SkillDialogs registered previously are added to the dialog stack: - -```typescript -skillDialogs.forEach((skillDialog: SkillDialog) => { - this.addDialog(skillDialog); -}); -``` - -Add the following code after your Dispatcher has executed passing the registered Skills and the Intent returned from the Dispatcher. If the `isSkill` method returns true then you start the appropriate SkillDialog instance passing the Skill Manifest Id and the matching intent. - -```typescript -// Identify if the dispatch intent matches any Action within a Skill if so, we pass to the appropriate SkillDialog to hand-off -const identifiedSkill: ISkillManifest | undefined = SkillRouter.isSkill(this.settings.skills, intent); -if (identifiedSkill !== undefined) { - // We have identified a skill so initialize the skill connection with the target skill - // the dispatch intent is the Action ID of the Skill enabling us to resolve the specific action and identify slots - // Pass the activity we have - const result: DialogTurnResult = await dc.beginDialog(identifiedSkill.id); - - if (result.status === DialogTurnStatus.complete) { - await this.complete(dc); - } -} else { - // Your normal intent routing logic -} -``` \ No newline at end of file diff --git a/docs/_docs/howto/skills/manualauthsteps.md b/docs/_docs/howto/skills/manualauthsteps.md deleted file mode 100644 index 01f52c5ef8..0000000000 --- a/docs/_docs/howto/skills/manualauthsteps.md +++ /dev/null @@ -1,48 +0,0 @@ ---- -category: How To -subcategory: Skills -title: Manually configure OAuth for Productivity Skills -description: How to configure authentication manually for skills. -order: 3 ---- - -# {{ page.title }} - -If you wish to make use of the Calendar, Email and Task Skills standalone to the Virtual Assistant (local mode) you need to configure an Authentication Connection enabling use of your Assistant to authenticate against services such as Office 365 and securely store a token which can be retrieved by your assistant when a user asks a question such as *"What does my day look like today"* to then use against an API like Microsoft Graph. - -> These steps are not required if you plan to use the productivity skills as part of the Virtual Assistant, these steps are performed automatically when you add a Skill to your assistant. - -The [Add Authentication to your bot](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-authentication?view=azure-bot-service-4.0&tabs=aadv1%2Ccsharp%2Cbot-oauth) section in the Azure Bot Service documentation covers more detail on how to configure Authentication. However in this scenario, the automated deployment step for the Skill has already created the **Azure AD v2 Application** for your Bot and you instead only need to follow these instructions: - -- Navigate to the Azure Portal, Click Azure Active Directory and then `App Registrations` -- Find the Application that's been created for your Bot as part of the deployment. You can search for the application by name or ApplicationID as part of the experience but note that search only works across applications currently shown and the one you need may be on a separate page. -- Click API permissions on the left-hand navigation - - Select Add Permission to show the permissions pane - - Select `Microsoft Graph` - - Select Delegated Permissions and then add each of the following permissions required for the Productivity Skills you are adding (see the specific documentation page for the specific scopes required.) - - Click Add Permissions at the bottom to apply the changes. - -Next you need to create the Authentication Connection for your Bot. Within the Azure Portal, find the `Web App Bot` resource created when your deployed your Bot and choose `Settings`. - -- Scroll down to the oAuth Connection settings section. -- Click `Add Setting` -- Type in the name of your Connection Setting - e.g. `Outlook` -- Choose `Azure Active Directory v2` from the Service Provider drop-down -- Open the `appSettings.config` file for your Skill - - Copy/Paste the value of `microsoftAppId` into the ClientId setting - - Copy/Paste the value of `microsoftAppPassword` into the Client Secret setting - - Set Tenant Id to common - - Set scopes to match the ones provided in the earlier step. - -![Manual Auth Connection]({{site.baseurl}}/assets/images/manualauthconnection.png) - -Finally, open the `appSettings.config` file for your Skill and update the connection name to match the one provided in the previous step. - -``` -"oauthConnections": [ - { - "name": "Outlook", - "provider": "Azure Active Directory v2" - } - ], -``` \ No newline at end of file diff --git a/docs/_docs/howto/skills/skillenablingav4bot.md b/docs/_docs/howto/skills/skillenablingav4bot.md deleted file mode 100644 index 499ed83def..0000000000 --- a/docs/_docs/howto/skills/skillenablingav4bot.md +++ /dev/null @@ -1,162 +0,0 @@ ---- -category: How To -subcategory: Skills -title: Convert an existing v4 bot to a skill -description: Steps required to take an existing Bot and make it available as a skill. -order: 3 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -## Overview - -Creating a [Bot Framework Skill]({{site.baseurl}}/overview/skills) through the [Skill template]({{site.baseurl}}/tutorials/csharp/create-skill/1_intro) is the easiest way to get started with creating a new Skill. If you have an existing v4 based Bot, we recommended you take the resulting project from this template and copy over across your custom dialogs to get started quickly. - -If you want to manually update your existing bot into a Bot Framework Skill, you can continue below. - -## Update your bot to use the Bot Framework Solutions libraries -#### C# - -1. Implement MVC architecture - - If you have an existing bot using the v4 SDK, following the MVC approach from this [Bot Builder sample](https://github.com/Microsoft/BotBuilder-Samples/tree/master/samples/csharp_dotnetcore/05.multi-turn-prompt). - -1. Enable the Bot Framework Solutions packages - - Add [`Microsoft.Bot.Builder.Solutions`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Solutions/) and [`Microsoft.Bot.Builder.Skills`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Skills/) NuGet packages to your solution. - -2. Create a custom Skill adapter - - - Create a a custom adapter class that derives from the SkillAdapter and add the `SkillMiddleware.cs` class is added to it. - - ```csharp - public class CustomSkillAdapter : SkillAdapter - { - public CustomSkillAdapter( - BotSettings settings, - ICredentialProvider credentialProvider, - BotStateSet botStateSet, - ResponseManager responseManager, - IBotTelemetryClient telemetryClient, - UserState userState) - : base(credentialProvider) - { - ... - Use(new SkillMiddleware(userState)); - } - } - ``` - -3. Add the Skill services to startup - - In your `startup.cs` file, add the following Transient adapters: - - ```csharp - services.AddTransient(); - services.AddTransient(); - ``` - -4. Update your BotController class - - - Update your `BotController.cs` class to derive from `SkillController` - - ```csharp - [ApiController] - public class BotController : SkillController - { - public BotController(IServiceProvider serviceProvider, BotSettingsBase botSettings) - : base(serviceProvider, botSettings) - { } - } - ``` - -#### TypeScript -1. Enable the Bot Framework Solutions packages - - Add [`botbuilder-solutions`](https://www.npmjs.com/package/botbuilder-solutions) and [`botbuilder-skills`](https://www.npmjs.com/package/botbuilder-skills) npm packages to your solution. - -2. Create a custom Skill adapter - - Create a Custom Adapter that derives from the `SkillHttpBotAdapter` and ensure the `SkillMiddleware` is added - - ```typescript - export class CustomSkillAdapter extends SkillHttpBotAdapter { - constructor( - telemetryClient: TelemetryClient, - conversationState: ConversationState, - skillContextAccessor: StatePropertyAccessor, - dialogStateAccessor: StatePropertyAccessor, - ... - ) { - super(telemetryClient); - [...] - this.use(new SkillMiddleware(conversationState, skillContextAccessor, dialogStateAccessor)); - [...] - } - } - ``` - -3. Add the Skill services to startup - - Add the new adapter to your `index.ts` file. - - ```typescript - const skillBotAdapter: CustomSkillAdapter = new CustomSkillAdapter( - telemetryClient, - conversationState, - skillContextAccessor, - dialogStateAccessor, - ...); - const skillAdapter: SkillHttpAdapter = new SkillHttpAdapter( - skillBotAdapter - ); - ``` - -4. Add the Skill endpoint - - Update your `index.ts` to handle messages to interact with the bot as a skill. - - ```typescript - // Listen for incoming assistant requests - server.post('/api/skill/messages', (req: restify.Request, res: restify.Response) => { - // Route received a request to adapter for processing - skillAdapter.processActivity(req, res, async (turnContext: TurnContext) => { - // route to bot activity handler. - await bot.run(turnContext); - }); - }); - ``` - - -## Add a Skill manifest - -Create a `manifestTemplate.json` file in the root of your Bot. Ensure at a minimum the root level `id`, `name`, `description` and action details are completed. - -```csharp -{ - "id": "", - "name": "", - "description": "", - "iconUrl": "", - "authenticationConnections": [ ], - "actions": [ - { - "id": "", - "definition": { - "description": "", - "slots": [ ], - "triggers": { - "utteranceSources": [ - { - "locale": "en", - "source": [ - "luisModel#intent" - ] - } - ] - } - } - } - ] -} -``` diff --git a/docs/_docs/howto/virtual-assistant/manualdeployment.md b/docs/_docs/howto/virtual-assistant/manualdeployment.md deleted file mode 100644 index cdf8ecd6a8..0000000000 --- a/docs/_docs/howto/virtual-assistant/manualdeployment.md +++ /dev/null @@ -1,267 +0,0 @@ ---- -category: How To -subcategory: Virtual Assistant -title: Manual Deployment -description: How to manually deploy and configuring your Virtual Assistant -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -### Intro -The Virtual Assistant comes with a set of scripts to simplify the deployment process. However, if you'd like to manually deploy and configure your assistant, you can follow these steps. - -### Create MSA App Registration -#### Option 1: Create registration using Az CLI -Run the following command to create your app registration: - -``` -az ad app create ` - --display-name 'your-app-name' ` - --password 'your-app-pw' ` - --available-to-other-tenants ` - --reply-urls 'https://token.botframework.com/.auth/web/redirect' -``` - -#### Option 2: Create registration manually in Azure Portal -Follow the [Register an application in Azure AD](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-tutorial-authentication?view=azure-bot-service-3.0&tabs=aadv1#register-an-application-in-azure-ad) instructions. -> Under **Supported account types** you should select either "Accounts in any organizational directory" or "Accounts in any organizational directory and personal Microsoft accounts" to ensure the Azure Bot Service can correctly expose your bot via Bot Channels. - -### Deploy ARM template with parameters -#### Option 1: Deploy arm template using Az CLI -Run the following command to deploy the Virtual Assistant ARM template: -``` -az group deployment create ` - --resource-group "resource-group-name" ` - --template-file "path-to-arm-template"` - --parameters "path-to-arm-parameters-file" ` - --parameters microsoftAppId='ms-app-id' microsoftAppPassword='ms-app-pw' -``` - -#### Option 2: Deploy arm template using Azure Portal -1. Click on the following button to load the Virtual Assistant ARM template in the Azure Portal: -Deploy to Azure -1. Provide your Microsoft App Id and Microsoft App Password, and override any default parameter values as needed. -1. Click "Purchase" to deploy. - -### Update appsettings.json with configuration -After your Azure resources have been deployed, fill in the following keys and secrets in appsettings.json with the values from your deployed resources: -```json -{ - "microsoftAppId": "", - "microsoftAppPassword": "", - "ApplicationInsights": { - "InstrumentationKey": "" - }, - "blobStorage": { - "connectionString": "", - "container": "transcripts" - }, - "cosmosDb": { - "collectionId": "botstate-collection", - "databaseId": "botstate-db", - "cosmosDBEndpoint": "", - "authKey": "" - } -} -``` - -### Deploy LUIS models -#### Option 1: Deploy with BF CLI tools -1. Run the following command for each .lu file in `\Deployment\Resources\LU` to parse the files to .luis files that can be imported to LUIS: - ``` - ludown parse toluis ` - --in "path-to-lu-file" ` - --luis_culture "culture-code" ` - --out_folder "output-folder" ` - --out "output-file-name.luis" - ``` -1. Run the following command to import the LUIS model into the LUIS portal. - ``` - luis import application ` - --appName "app-name" ` - --authoringKey "luis-authoring-key" ` - --subscriptionKey "luis-authoring-key" ` - --region "region" ` - --in "path-to-luis-file" ` - --wait - ``` -1. Run the following command to train the LUIS model. - ``` - luis train version ` - --appId "app-id" ` - --region "region" ` - --authoringKey "authoring-key" ` - --versionId "version-id" ` - --wait - ``` -1. Run the following command to publish the LUIS model. - ``` - luis publish version ` - --appId "app-id" ` - --region "region" ` - --authoringKey "authoring-key" ` - --versionId "version-id" ` - --wait - ``` -1. Run the following command to create a .cs representation of your LUIS model. - ``` - luisgen "path-to-luis-file" -cs "YourModelNameLuis" -o "path-to-output-folder" - ``` -1. For each LUIS model, add the following configuration to the `cognitiveModels.your-locale.languageModels` collection in cognitivemodels.json file: - ```json - { - "subscriptionkey": "", - "appid": "", - "id": "", - "version": "", - "region": "", - "name": "", - "authoringkey": "", - "authoringRegion": "" - } - ``` - -#### Option 2: Deploy manually to LUIS portal -1. Run the following command for each .lu file in `\Deployment\Resources\LU` to parse the files to .json files that can be imported into the LUIS portal: - ``` - ludown parse toluis ` - --in "path-to-lu-file" ` - --luis_culture "culture-code" ` - --out_folder "output-folder" ` - --out "output-file-name.json" - ``` -1. In the LUIS portal, click "Create new app" -1. Provide a name, culture, and description for your app. -1. Click **Manage** > **Versions** > **Import version** -1. Browse to your .json file, then click "Done". -1. Train your LUIS app. -1. Publish your LUIS app. -1. For each LUIS model, add the following configuration to the `cognitiveModels.your-locale.languageModels` collection in cognitivemodels.json file: - ```json - { - "subscriptionkey": "", - "appid": "", - "id": "", - "version": "", - "region": "", - "name": "", - "authoringkey": "", - "authoringRegion": "" - } - ``` - -### Deploy QnA Maker knowledgebases -#### Option 1: Deploy with BF CLI tools -1. Run the following command for each .lu file in `\Deployment\Resources\QnA` to parse the files to .json files that can be deployed to QnA Maker: - ``` - ludown parse toqna ` - --in "path-to-lu-file" ` - --out_folder "output-folder" ` - --out "output-file-name.qna" - ``` -1. Run the following command to import .qna file to QnA Maker. - ``` - qnamaker create kb ` - --name "kb-name" ` - --subscriptionKey "qna-subscription-key" ` - --in "path-to-qna-file" ` - --force ` - --wait - ``` -1. Run the following command to publish the knowledgebase. - ``` - qnamaker publish kb ` - --kbId "kb-id" ` - --subscriptionKey "qna-subscription-key" - ``` -1. For each QnA Maker knowledgebase model, add the following configuration to the `cognitiveModels.your-locale.knowledgebases` collection in cognitivemodels.json file: - ```json - { - "endpointKey": "", - "kbId": "", - "hostname": "", - "subscriptionKey": "", - "name": "", - "id": "" - } - ``` - -#### Option 2: Deploy manually to QnA Maker portal -The QnA Maker portal does not accept JSON files as input, so in order to deploy directly to the QnA Maker portal, you should either author new knowledgebases based on your scenario's needs directly in the portal, or import data in TSV format. - -After creating your knowledgebases, update the `cognitiveModels.your-locale.knowledgebases` collection in cognitivemodels.json file for each knowledgebase: - -```json -{ - "endpointKey": "", - "kbId": "", - "hostname": "", - "subscriptionKey": "", - "name": "", - "id": "" -} -``` - -### Create Dispatch model -1. Initialize the dispatch model. - ``` - dispatch init ` - --name "dispatch-name" ` - --luisAuthoringKey "luis-authoring-key" ` - --luisAuthoringRegion "luis-authoring-region ` - --dataFolder "path-to-output-folder" - ``` -1. Add LUIS and QnA Maker sources - - Foreach LUIS app, run the following command: - ``` - dispatch add ` - --type "luis" ` - --name "luis-app-name" ` - --id "luis-app-id" ` - --region "luis-region" ` - --intentName "l_luis-app-name" ` - --dataFolder "path-to-output-folder" - --dispatch "path-to-.dispatch-file" - ``` - - - Foreach QnA Maker knowledgebase, run the following command: - ``` - dispatch add ` - --type "qna" ` - --name "kb-name ` - --id "kb-id" ` - --key "qna-subscription-key" ` - --intentName "q_kb-app-name" ` - --dataFolder "path-to-output-folder" - --dispatch "path-to-.dispatch-file" - ``` -1. Create the dispatch model. - ``` - dispatch create ` - --dispatch "path-to-.dispatch-file" ` - --dataFolder "path-to-output-folder" ` - --culture "dispatch-culture" - ``` -1. Run luisgen tool to generate a .cs representation of your Dispatch model. - ``` - luisgen "path-to-.json-file" -cs "DispatchLuis" -o "output-folder" - ``` -1. Add the following configuration to the `cognitiveModels.your-locale.dispatchModel` collection in cognitivemodels.json file: - ```json - "dispatchModel": { - "authoringkey": "", - "appid": "", - "name": "", - "subscriptionkey": "", - "region": "", - "authoringRegion": "" - } - ``` \ No newline at end of file diff --git a/docs/_docs/howto/virtual-assistant/oldvatovamigration.md b/docs/_docs/howto/virtual-assistant/oldvatovamigration.md deleted file mode 100644 index 311a36bc6e..0000000000 --- a/docs/_docs/howto/virtual-assistant/oldvatovamigration.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -category: How To -subcategory: Virtual Assistant -title: Migrate from Virtual Assistant Solution to Template -description: Guidance on how to move from the original Virtual Assistant solution to the new Template -order: 7 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -## Prerequisites - -Learn how to [migrate an Enterprise Template based bot to the Virtual Assistant Template]({{site.baseurl}}/howto/virtual-assistant/ettovamigration). After doing so you will have a Virtual Assistant project ready to add the Productivity & Point of Interest SKills. - -## What happened to the Virtual Assistant Skills? - -The Virtual Assistant (Beta Release 0.3) solution was delivered with multiple preview Skills to support productivity & point of interest scenarios. These have now been made available as [**Bot Framework Skills**]({{site.baseurl}}/overview/skills), reusable conversational skill that can be added to an existing bot. Developers can add and remove Skills with one command that incoporates all language models and configuration changes. Skills are themselves Bots, invoked remotely and a Skill developer template (.NET, TS) is available to facilitate creation of new Skills. - -## Add the Skills to your assistant - -The Skills previously part of the Virtual Assistant solution are relocated to the [skills directory](https://github.com/microsoft/botframework-solutions/tree/master/skills/src/csharp). After deploying your selected Skills, continue to [add them to your Virtual Assistant]({{site.baseurl}}/howto/skills/addingskills). \ No newline at end of file diff --git a/docs/_docs/howto/virtual-assistant/proactivemessaging.md b/docs/_docs/howto/virtual-assistant/proactivemessaging.md deleted file mode 100644 index 2421dda40d..0000000000 --- a/docs/_docs/howto/virtual-assistant/proactivemessaging.md +++ /dev/null @@ -1,139 +0,0 @@ ---- -category: How To -subcategory: Virtual Assistant -title: Send proactive messages -description: Adding proactive experiences to your Assistant -order: 5 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} -## Intro - -Proactive scenarios are a key part of ensuring a Virtual Assistant can provide more intelligent and helpful capabilities to end users. This enables a Virtual Assistant to move away from exclusively reactive (user initiated) scenarios towards more intelligent interactions triggered by external events that are known to be of interest to the end user. - -Example scenarios are as follows and will enable your assistant to stand out and provide unique capabilities to your users. - -- "You have a meeting coming up in a couple of minutes" -- "Here's your daily briefing" (as you start your car) -- "I know you have time in you calendar on the way home, how about picking up some items from your grocery list on your way?" - -## Implement a sample proactive scenario - -At this time, the Virtual Assistant provides one proactive scenario which is already implemented which can be used as a reference. When an client device (e.g. car) sends a `DeviceStart` event to the bot, it will query for upcoming events for the next hour. Within CalendarSkill, the dialog that handles this scenario is located here: [`UpcomingEventDialog.cs`]({{site.repo}}/blob/master/skills/src/csharp/calendarskill/calendarskill/Dialogs/UpcomingEventDialog.cs) - -> Note that the code samples we use below all come from this dialog implementation - -When developing using the Bot Framework SDK, you can utilize the adapter's `ContinueConversationAsync` function to patch into a previously started conversation. - -It's important to note that a user must already have had an interaction with the Bot for this proactive interaction to happen and a persistent chat canvas will be required in order to send messages back to the user. WebChat for example will close the conversation when the browser is closed, however a canvas such as Teams or a custom device can persist the same conversation over time. - -```csharp -await sc.Context.Adapter.ContinueConversationAsync(_endpointService.AppId, proactiveModel[MD5Util.ComputeHash(userId)].Conversation, UpcomingEventContinueConversationCallback(eventModel, sc), cancellationToken); -``` - -Inside the callback method, you can use the scoped `turnContext` to send outgoing activities: - -```csharp -return async (turnContext, token) => -{ - ... - await turnContext.SendActivityAsync(activity); -}; -``` - -This way if the previous conversation opened is still alive, the user will see the new message being sent into that conversation from the bot. - -In order to send a message to a previous conversation, you require a [`conversationReference object`](https://raw.githubusercontent.com/Microsoft/botbuilder-dotnet/89817b6b8db42726c9ffcf82bf40b4e66592b84f/libraries/Microsoft.Bot.Schema/ConversationReference.cs). To retrieve this you need to store conversation references within your solution. - -To make use of this middleware you need to register it within `startup.cs` as shown below: - -```csharp -options.Middleware.Add(new ProactiveStateMiddleware(proactiveState)); -``` - -Along with declaring `proactiveState` alongside your existing state objects: - -```csharp -var proactiveState = new ProactiveState(dataStore); -``` - -Once these steps are performed, after every turn the bot will store each users `ConversationReference` objects. Whenever you need to send message to an existing conversation, you can retrieve this reference. The stored conversation references are keyed by hashed (MD5) userId so when you retrieve it, you need to use the hashed userId to retrieve it from proactiveState: - -```csharp -var userId = activity.From.Id; -await sc.Context.Adapter.ContinueConversationAsync(_endpointService.AppId, proactiveModel[MD5Util.ComputeHash(userId)].Conversation, UpcomingEventContinueConversationCallback(eventModel, sc), cancellationToken);` -``` - -Once retrieved, you can then start processing your proactive scenarios. Virtual Assistant gives you two ways of doing it: - -- BackgroundTaskQueue -- ScheduledTask - -With `BackgroundTaskQueue`, you can use the `QueueBackgroundWorkItem` method to put the processing of an operation into a queue and the hosted service that handles the background tasks will retrieve this and run in a different thread. - -```csharp -_backgroundTaskQueue.QueueBackgroundWorkItem(async (token) => -{ - var handler = new CheckUpcomingEventHandler - { - CalendarService = calendarService - }; - await handler.Handle(UpcomingEventCallback(userId, sc, proactiveState)); -}); -``` - -In the `UpcomingEventCallback`, use the `ContinueConversationAsync` method on `turnContext.Adapter` to initiate the proactive call, as mentioned earlier. - -With `ScheduledTask`, you can use the `AddScheduledTask` method to create a new schedule for a certain task. You can use an expression to represent a schedule. Please refer to [NCrontab](https://github.com/atifaziz/NCrontab) for how to define an expression. - -To run a task at 12PM on every Monday, you can use the following: - -```csharp -_scheduledTask.AddScheduleTask(new ScheduledTaskModel { - ScheduleExpression = "0 12 * * Mon", - Task = async (ct) => { await _logger.Write("Happy Monday!"); } -} -``` - -With support from these task extensions you can easily perform operations in the background and send messages back to users whenever there's a signal to do so. Now we have a dialog that sends a proactive message back to the user in a previously opened conversation. Let's explore how the request is routed back to the skill. - -There's two approaches to trigger a proactive message scenario, just the same as any other reactive scenarios: Events and User Utterances. For the Virtual Assistant to know the mapping between an event and the skills, a new configuration file has been introduced: skillEvents.json. - -This file contains the mapping between an event and the skills that could consume it. We support multiple skills for one event enabling multiplexing. Its format is as follows: - -```json -{ - "skillEvents": [ - { - "event": "DeviceStart", - "skillId": [ "l_Calendar" ], - "parameters": {} - } - ] -} -``` - -The Virtual Assistant knows how to interpret this file, and route the events to different skills. It's then up to the skills to implement handling for those events. For example, CalendarSkill handles the DeviceStart event inside `MainDialog.cs`, in the `OnEventAsync` function. - -```csharp -case Events.DeviceStart: -{ - var skillOptions = new CalendarSkillDialogOptions - { - SkillMode = _skillMode, - }; - - await dc.BeginDialogAsync(nameof(UpcomingEventDialog), skillOptions); - - break; -} -``` - -To support performing proactive scenarios triggered from a user utterance, this can be achieved by training the language model with new intents and entities, refreshing the dispatcher, and creation of dialogs to handle that intent/entity and perform background processing which results in a proactive message being sent using the approach detailed above. \ No newline at end of file diff --git a/docs/_docs/howto/virtual-assistant/testing.md b/docs/_docs/howto/virtual-assistant/testing.md deleted file mode 100644 index 4d94246173..0000000000 --- a/docs/_docs/howto/virtual-assistant/testing.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -category: How To -subcategory: Virtual Assistant -title: Test the Virtual Assistant -description: How to test your assistant end-to-end. -order: 4 ---- - -# {{ page.title }} -{:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} - -## Intro -Your Virtual Assistant can be tested just like any other Bot Framework Bot; the most common tools are the [Bot Framework Emulator](https://aka.ms/botframework-emulator) and [Web Chat](https://aka.ms/botframework-webchat). - -## Unit Testing - -Take advantage of the Test project that is available when you [Create a New Skill]({{site.baseurl}}/tutorials/csharp/create-skill/1_intro). -Follow along with the Flow tests to see a basic usage of how to mock activities from a user and validate the bot responses. -If you'd like to take this further, you can explore the tests of a published skill for a deep dive on APIs, mocking LUIS, and more. - -## Client Testing - -### Bot Framework Emulator - -The Bot Framework Emulator can be used by opening the .bot file provided within the Project directory. You must have completed the [deployment steps]({{site.baseurl}}/tutorials/csharp/create-assistant/4_provision_your_azure_resources) first and should ensure you have the [latest emulator](https://aka.ms/botframework-emulator) installed. - -> Authentication scenarios cannot be fully tested within the Emulator at this time. The Web Test Harness provides a workaround for this. - -### Direct Line Configuration - -For device integration and use of the test harnesses below you need to publish your assistant to your Azure subscription and then configure the [Direct Line](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-directline?view=azure-bot-service-3.0) channel. - -- Start with deploying your assistant to Azure -- Then use the following CLI tool shown below, the key will be shown in the `key` field. This will not be accessible again so ensure you keep this securely and for the steps below. - -```shell -az bot directline create -g YOUR_RESOURCE_GROUP_NAME --name YOUR_BOT_NAME -``` - -### Direct Line Sample - -A simple Console App is provided to demonstrate the base communication interaction required with a Virtual Assistant and highlights how a device can interact with a Virtual Assistant. The Sample enables you to conduct a conversation with a Virtual Assistant and demonstrates how responses can be processed including Adaptive Cards along with retrieving the `Speak` property which is the Speech friendly variation of the response. - -Examples are also provided on how events can be sent (device activation for example) as well as receiving responses to perform an action locally (e.g. change the navigation system or radio station). - -Update the code to reflect the Direct Line secret you created previously. - -### Web Chat Test Harness - -The Web Chat test harness makes use of the [Bot Framework Web Chat](https://github.com/Microsoft/BotFramework-WebChat) to provide an additional test canvas. -The Web Chat test harness is configured against an Identity Provider (e.g. Azure Active Directory) to enable the user to sign in and retrieve a unique identifier. -This will ensure all messages sent during testing use this identifier, enabling testing of the [Linked Accounts]({{site.baseurl}}/howto/virtual-assistant/linkedaccounts) feature. -You must use sign in to your Linked Accounts app with the same identity. -The account you link will be automatically made available to you when testing through the Web Chat test harness, removing the need for authentication prompts. - -See [Authentication Configuration]({{site.baseurl}}/howto/virtual-assistant/linkedaccounts#authentication-configuration) for how to configure authentication in the application. -Update the `AzureAd` section in `appsettings.development.config` with the above authentication information along with the Direct Line secret created previously. - -When opening the Assistant-WebTest project for the first time you will be assigned a unique port number for local debugging - you can check this by right clicking the Assistant-WebTest project in Visual Studio, choosing **Properties** and reviewing the App URL in the **Debug** section. -Ensure this is entered into the Reply URLs section of your Authentication configuration (e.g. `https://localhost:44320/signin-oidc`). - -### Additional Platforms - -We plan to offer additional test harnesses and integration samples for Linux and Android moving forward. diff --git a/docs/_docs/overview/analytics.md b/docs/_docs/overview/analytics.md deleted file mode 100644 index adecb808e7..0000000000 --- a/docs/_docs/overview/analytics.md +++ /dev/null @@ -1,60 +0,0 @@ ---- -category: Overview -title: Analytics -order: 3 ---- - -# {{ page.title }} -{:.no_toc} - -## Contents -{:.no_toc} - -* -{:toc} - -## Intro - -[Application Insights](https://azure.microsoft.com/en-us/services/application-insights/) is an Azure service which enables analytics about your applications, infrastructure and network. The Bot Framework can use the Application Insights telemetry to provide information about how your bot is performing, and track key metrics. The Bot Framework SDK ships with several samples that demonstrate how to add telemetry to your bot and produce reports (included). - -[Power BI](https://powerbi.microsoft.com/) is a business analytics services that lets you visualize your data and share insights across your organization. You can ingest data from Application Insights into live dashboards and reports. - -## Prerequisites - -The [Conversational Analytics Power BI sample](https://aka.ms/botPowerBiTemplate) is generated under the assumption you are using the latest Bot Framework SDK and telemetry middleware. You can find these (and generate the required Application Insights resource) with the following samples: - -- [Virtual Assistant Template]({{site.repo}}/tree/master/templates/Virtual-Assistant-Template/csharp/Sample) -- [Skill Template]({{site.repo}}/tree/master/templates/Skill-Template/csharp/Sample) - -### Configuring Sentiment - -LUIS enables you to run a sentiment analysis on a user's utterance. This can be enabled through the [LUIS portal](https://www.luis.ai). -Sentiment must be enabled for each application. To enable sentiment: - -1. Log in to the portal. -2. Select **My Apps**. -3. Click on the specific application you want to enable sentiment. -4. Select **Manage** on the upper menu. -5. Select **Publish Settings** on the side menu. It should resemble the below. - -![Enabling Sentiment]({{site.baseurl}}/assets/images/enable_sentiment.png) - -6. **Enable** the *Use sentiment analysis to determine if a user's utterance is positive, negative, or neutral* checkbox. -7. Select **Publish** and repeat for each LUIS application. - -### Power BI Installation - -The [PowerBI Desktop client](https://aka.ms/pbidesktopstore) is available for Windows clients. -Alternatively, you can use the Power BI service. If you don't have a PowerBI service account, sign up for a [free 60 day trial account](https://app.powerbi.com/signupredirect?pbi_source=web) and upload the Power BI template to view the reports. - -## Telemetry Logging - -This [guide](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-telemetry?view=azure-bot-service-4.0) highlights the provided telemetry for bot and user activities, including [LUIS](https://www.luis.ai/) and [QnA Maker](https://www.qnamaker.ai/) results. how to configure your bot's telemetry, either through bot configuring or overriding the telemetry client. - -## Application Insights Analytics - -Common queries for bot analytics are available in [Applications Insights Analytics]({{site.baseurl}}/reference/analytics/applicationinsights). - -## Power BI Analytics Sample - -Examples of Power BI dashboards are provided in the [Power BI Analytics sample]({{site.baseurl}}/reference/analytics/powerbi), highlighting how to gain insights on your bot's performance and quality. \ No newline at end of file diff --git a/docs/_docs/overview/samples/enterprisenotifications.md b/docs/_docs/overview/samples/enterprisenotifications.md deleted file mode 100644 index aa4e58adad..0000000000 --- a/docs/_docs/overview/samples/enterprisenotifications.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -category: Overview -subcategory: Samples -title: Enterprise Notifications -order: 4 ---- - -# {{ page.title }} -## Contents -{:.no_toc} - -* -{:toc} - -## Intro - -There are many scenarios where an enterprise-focused Assistant needs to push notifications or messages to employees. -These messages may need to be delivered on a variety of [channels](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-manage-channels?view=azure-bot-service-4.0) and customized by each employees. -It's important to consider the range of channels you wish to offer to customers and whether they provide a persistent conversation over time and the channel itself supports proactive message delivery. Microsoft Teams is an example of a persistent channel enabling conversations to occur over a longer period of time and across a range of devices. This contrasts with WebChat which is only available for the life of the browser window. - -In addition to conversational canvases mobile devices are another key end user channel and these same notifications/messages should be delivered as appropriate to these devices. - -We provide this sample to demonstrate how to build a notification/broadcasting scenario using a Virtual Assistant and various Azure resources. -Each customer scenario will vary significantly, so this is an MVP (minimum viable product) to get started with. - -## Sample Capabilities - -This sample demonstrates the following capabilities: - -1. A console application that shows how a supporting system can create an event for a specific event and send for processing. -2. An Azure function that handles events and routes them to a User via a Bot (Virtual Assistant). In this same handler mobile application push notification can be added as additional custom steps. -3. A User Preference store that enables user preferences for notification delivery to be stored and is used by the Azure Function. -3. The extensions to a Bot required to display an event to a user and also store ConversationReference objects enabling proactive message delivery. - -## Sample Architecture - -![Enterprise Notifications sample architecture]({{ site.baseurl }}/assets/images/enterprisenotifications-architecture.png) - -### Event Producer - -Azure Functions are used to collect events from upstream systems and convert them into a canonical event schema before handing over to the Event Hub for centralized handling. In this sample, we simulate this event generation functionality for ease of testing by using the console application located [here](/samples/EnterpriseNotification/EventProducer). - -### Azure Event Hub - -In this sample, the Azure Event Hub is the centralized service that manages events gathered from different parts of the system and sent through the Azure Function aforementioned. For any event to reach an end user, it has to flow into the Azure Event Hub first. - -### Azure Functions - Event Handler - -After an event is posted to the Azure Event Hub, an Azure Function service is triggered to process them. The background to the use of Azure Functions is as follows: - -- Azure Functions natively support triggers against a variety of Azure services, Event Hub trigger is one of these. -- Azure Functions scales against Event Hub services by managing locks on partitions of the Azure Event Hub internally as part of the framework. - -#### Notification Handler (Trigger) - -The triggering element of the Azure function is handled as part of the [EventHandler](/samples/EnterpriseNotification/EventHandler). The `Run` method within [Function1.cs]({{site.repo}}/samples/EnterpriseNotification/EventHandler/Function1.cs) is automatically invoked once an event is available. - -#### Notification Handler (Run) - -Following the trigger the following steps are performed as part of the same [EventHandler]({{site.repo}}/samples/EnterpriseNotification/EventHandler) example: - -- Unpack the event -- Read from a UserPreference store to check user's profile settings -- If the user has 'SendNotificationToMobileDevice' flag to be true, then send a notification to user's mobile device with the event content. -- If the user has 'SendNotificationToConversation' flag to be true, then send a message to the bot with the event content. - -This sample uses CosmosDB as the UserPreference store but can be modified to reflect an existing store you may already have in place. The code will check if there's a record for the particular user. If not, it will then add a record with default settings of both destinations set to true. - -This sample doesn't include the implementation for sending a notification to mobile devices as this requires additional configuration. You can refer to [this documentation](https://docs.microsoft.com/en-us/azure/notification-hubs/notification-hubs-aspnet-backend-ios-apple-apns-notification) for more information. - -The message the Event Handler sends to the bot is an Activity of type `event`, with the name `BroadcastEvent` and value is set to the data received rom the Event Hub. - -### Notification Hub - -[Notification Hubs](https://azure.microsoft.com/en-us/services/notification-hubs) provide the capability to delivery notifications to end user devices. Please refer to [this documentation](https://docs.microsoft.com/en-us/azure/notification-hubs/notification-hubs-aspnet-backend-ios-apple-apns-notification) for additional steps to perform this integration as needed. - -### Virtual Assistant - -The assistant is responsible for surfacing the message received from the Event Handler back to the user. An example project is located [here]({{site.repo}}/samples/EnterpriseNotification/VirtualAssistant) which has a small number of extensions compared to a normal Virtual Assistant. - -### Adaptive Cards and Web Dashboards - -When Notification Handler handles events emitted from Azure Event Hub, it can persist the events into a user data store. - -This would enable user/system administrator to look at the events later on from a Web Dashboard where AdaptiveCards and other Web components can be used to render them to provide companion experiences to the assistant. This part is not included in the sample implementation at the time. - - -## Next Steps - -
-
-
-

Set up Enterprise Notifications for a Virtual Assistant

-

Steps for configuring the Enterprise Notifications sample.

-
- -
-
diff --git a/docs/_docs/overview/skills.md b/docs/_docs/overview/skills.md index ad578ce4bc..00b330e16f 100644 --- a/docs/_docs/overview/skills.md +++ b/docs/_docs/overview/skills.md @@ -1,25 +1,104 @@ --- category: Overview -title: Skills -order: 2 +title: What is a Bot Framework Skill? +order: 4 +toc: true --- # {{ page.title }} -Bot Framework Skills are re-usable conversational skill building-blocks covering conversational use-cases enabling you to add extensive functionality to a Bot within minutes. Skills include LUIS models, Dialogs and Integration code and delivered in source code form enabling you to customize and extend as required. At this time we provide Calendar, Email, To Do, Point of Interest skills and a number of other experimental skills. +{:.no_toc} + +Bot Framework Skills are re-usable conversational skill building-blocks covering conversational use-cases enabling you to add extensive functionality to a Bot within minutes. Skills include language understanding (LUIS) models, dialogs and integration code and delivered as source code enabling you to customize and extend as required. At this time we provide Calendar, Email, To Do, Point of Interest skills and a number of other experimental skills. A Skill is like a standard conversational bot but with the ability to be plugged in to a broader solution. This can be a complex Virtual Assistant or perhaps an Enterprise Bot seeking to stitch together multiple bots within an organization. -Apart from some minor differences that enable this special invocation pattern, a Skill looks and behaves like a regular bot. The same protocol is maintained between two bots to ensure a consistent approach. Skills for common scenarios like productivity and navigation to be used as-is or customized however a customer prefers. +Apart from some minor differences that enable this special invocation pattern, a Skill looks and behaves like a regular bot. The same protocol is maintained between two bots to ensure a consistent approach. Skills for common scenarios like productivity and navigation can be used as-is or customized however a customer prefers. >The Skill implementations currently provided are in C# only but the remote invocation nature of the Skills does enable you to invoke C# based Skills from a typescript Bot project. -## Available Skills +## Available Skill samples + +The following Skill samples are available out of the box, each with deployment steps required to deploy and configure Skills for your use. + + +
+ + + -The following Skills are available out of the box, each of the documentation links below has the deployment steps required to deploy and configure Skills for your use. +## Next steps -- [Productivity - Calendar]({{site.baseurl}}/reference/skills/productivity-calendar) -- [Productivity - Email]({{site.baseurl}}/reference/skills/productivity-email) -- [Productivity - To Do]({{site.baseurl}}/reference/skills/productivity-todo) -- [Point of Interest]({{site.baseurl}}/reference/skills/pointofinterest) -- [Automotive]({{site.baseurl}}/reference/skills/automotive) -- [Experimental Skills]({{site.baseurl}}/reference/skills/experimental) + diff --git a/docs/_docs/overview/virtual-assistant-solution.md b/docs/_docs/overview/virtual-assistant-solution.md new file mode 100644 index 0000000000..03221b392e --- /dev/null +++ b/docs/_docs/overview/virtual-assistant-solution.md @@ -0,0 +1,119 @@ +--- +category: Overview +title: What is Virtual Assistant? +order: 2 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +![Virtual Assistant diagram]({{site.baseurl}}/assets/images/virtualassistant-diagram.jpg) + +Customers and partners have increasing need to deliver advanced conversational assistant experiences tailored to their brand, personalized to their users, and made available across a broad range of canvases and devices. Continuing Microsoft's open-sourced approach towards the Bot Framework SDK, the open-source Virtual Assistant solution provides you with a set of core foundational capabilities and full control over the end user experience and data. + +At it's core is the [Virtual Assistant]({{site.baseurl}}//overview/virtual-assistant-template) (available in C# and TypeScript) is a project template with the best practices to developing a bot on the Microsoft Azure platform. + +Common assistant scenarios are provided as reusable conversational Skills, increasing developer productivity and enabling a vibrant ecosystem of reusable scenarios. Individual skills can be added to an assistant, for example: finding a point of interest, checking off an item on a to-do list, or replying to an email. Skills are fully customizable and consist of language models for multiple languages, dialogs, and code. + +Owning and enriching customer relationships and insights is vital to the goals of the Virtual Assistant. This contrasts with established in-market assistants that do not enable white-labelling, customization or privacy placed in control of the customer. + +Further skills can be created and made available either through your own assistant or made available through a broader ecosystem enabling assistant owners to curate the capabilities that make sense for their scenario and work across industries. + +Extend a Virtual Assistant experience with reusable [Bot Framework Skills]({{site.baseurl}}/overview/skills), fulfilling common scenarios such as finding a nearby point of interest, checking off an item on a to-do list, or replying to an email. Skills can be fully customized and samples are available, enabling you to get started with multiple languages, dialogs, and code. + +Review analytics captured from the out-of-the-box Application Insights telemetry using the [Virtual Assistant Analytics Power BI template]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro/). + + +## Key Principles + +### Your data, brand, and experience +{:.no_toc} + +All aspects of the end user experience are owned and controlled by you. This includes the branding, name, voice, personality, responses, and avatar. The source code to the Virtual Assistant and supporting Skills are provided as samples for you to customize. + +Your Virtual Assistant will be deployed within your Azure subscription. Therefore all data generated by your assistant (questions asked, user behavior, etc.) is entirely contained within your Azure subscription. See [Cognitive Services Azure Trusted Cloud](https://www.microsoft.com/en-us/trustcenter/cloudservices/cognitiveservices) and the [Azure section of the Trust Center](https://www.microsoft.com/en-us/TrustCenter/CloudServices/Azure) more specifically for more information. + +### Write once, embed anywhere +{:.no_toc} + +The Virtual Assistant Solution Accelerator leverages Azure Cognitive Services and can be embedded on any [Bot Framework channel](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-manage-channels?view=azure-bot-service-4.0) such as Web Chat, Facebook Messenger, or [Microsoft Teams]({{site.baseurl}}/virtual-assistant/tutorials/enable-teams/1-intro/). You can use the [Direct Line](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-channel-directline?view=azure-bot-service-4.0) channel to integrate a Virtual Assistant into your mobile app, webpage, or device. Focus on voice-first experiences using the [Direct Line Speech]({{site.baseurl}}/virtual-assistant/tutorials/enable-speech/1-intro/) channel. + +### Enterprise-grade solutions +{:.no_toc} + +The Virtual Assistant Solution Accelerator is built with the Azure Bot Service, Language Understanding Cognitive Service, Unified Speech along with a broad set of supporting Azure components meaning that you benefit from the [Azure global infrastructure](https://azure.microsoft.com/en-gb/global-infrastructure/) including ISO 27018, HIPAA, PCI DSS, SOC 1, 2 and 3 certification. + +In addition, Language Understanding support is provided by the LUIS Cognitive Service which [supports a broad set of languages](https://docs.microsoft.com/en-us/azure/cognitive-services/luis/luis-supported-languages). The [Translator Cognitive Service](https://azure.microsoft.com/en-us/services/cognitive-services/translator-text-api/) provides machine translation capabilities to extend your Virtual Assistant to additional locales. + +### Flexible integration and contextually aware +{:.no_toc} + +The Virtual Assistant Solution Accelerator architecture is flexible and can be integrated with existing investments made into device-based conversational AI experiences. You can integrate into existing backend systems and APIs. Your users' Virtual Assistant can be integrated into your device and ecosystem to enable a truly intelligent experience. Through this contextual awareness your Virtual Assistant can deliver a further personalized conversation than otherwise possible. + +### Extend with Bot Framework Skills +{:.no_toc} + +A Virtual Assistant often needs to extend it's capabilities with specialized Skills. Typically, an organization needs to manage their own language models, dialogs, API integration, and generated responses. +This is further complicated should you require your Virtual Assistant to support multiple languages. The Solution Accelerator includes a Skill capability that enables external bots to be plugged into an existing Virtual Assistant. + + +## Virtual Assistant components + +There are four major components within Virtual Assistant: + +### Virtual Assistant core +{:.no_toc} + +The [Virtual Assistant core template]({{site.baseurl}}/overview/virtual-assistant-template) is the basic building blocks that bring together the virtual assistant capabilities and the Microsoft technologies required to build a Virtual Assistant including Bot Framework SDK, LUIS, QnA Maker, Skills registration, Linked Accounts, etc. + +### Skills +{:.no_toc} + +Bot Framework Skills leverage the new Bot Framework SDK capabilities and provide re-usable conversational skill building-blocks covering conversational use-cases enabling you to add extensive functionality to a Bot within minutes. Skills include LUIS models, Dialogs and Integration code and delivered in source code form enabling you to customize and extend as required. Provided are Calendar, Email, To Do, Point of Interest skills and a number of other experimental skills. You can find more details in the [Skills]() section of this site. + +### Assistant Solution Accelerators +{:.no_toc} + +Microsoft has assembled these samples to allow for acceleration of customers in building a Virtual Assistant for specific Industries or scenarios. These samples have pre-packaged skill configurations, additional capabilities and suggested channels that are leveraged in typical scenarios. +- [Base Virtual Assistant]({{site.baseurl}}/overview/virtual-assistant-template) +- [Automotive Assistant]() (In and out of Vehicle Experiences for Customers) +- [Enterprise Assistant]({{site.baseurl}}/solution-accelerators/assistants/enterprise-assistant/) (Employee Focused Experiences) +- [Hospitality Assistant]({{site.baseurl}}/solution-accelerators/assistants/hospitality-assistant/) (In Room Assistant for hotel guests) + +### Clients and Channels +{:.no_toc} + +Clients and Channels are the ways that users can interact with the Virtual Assistant. + +Microsoft offers a variety of channels such as Facebook Messenger, Teams, Slack, WeChat, and many others. You can find many details around available channels in the [Bot Framework Channels documentation](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-channels-reference?view=azure-bot-service-4.0). + +Sample client applications are implementations of clients that will integrate seamlessly with your Assistant and enable Speech scenarios. +- [Virtual Assistant Client for Android]({site.baseurl}}/clients-and-channels/clients/virtual-assistant-client/) +- Virtual Assistant Client for PowerApps *(Coming Soon)* + +## Next steps +{:.toc} + + diff --git a/docs/_docs/overview/virtual-assistant-template.md b/docs/_docs/overview/virtual-assistant-template.md new file mode 100644 index 0000000000..a81a56c7aa --- /dev/null +++ b/docs/_docs/overview/virtual-assistant-template.md @@ -0,0 +1,319 @@ +--- +category: Overview +title: What's in the Virtual Assistant template? +description: The Virtual Assistant Template brings together many best practices identified through the building of conversational experiences and automates integration of components that we've found to be highly beneficial to Bot Framework developers. This section covers some background to key decisions to help explain why the template works the way it does with links to detailed information where appropriate. +order: 3 +toc: true +--- + +# {{ page.title }} +{:.no_toc} +{{ page.description }} + +## Your Assistant project + +Using the template you'll end up with your Assistant project that is organized in-line with the recommended thinking on how a Bot project should be structured. You are free to restructure this as necessary but bear in mind that the provided deployment scripts expect some files to be in a consistent location so bear this in mind. + +To learn more about project structure, see the [Create Project]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/3-create-project/) documentation. + +## Language Understanding +### .lu file format +{:.no_toc} + +The [LU](https://github.com/Microsoft/botbuilder-tools/blob/next/packages/Ludown/docs/lu-file-format.md) format is similar to MarkDown enabling easy modification and source control of your LUIS models and QnA information. Virtual Assistant uses these files at its core to simplify deployment and provide an ongoing source control solution. + +The [LuDown](https://github.com/Microsoft/botbuilder-tools/tree/next/packages/Ludown) tool is then used to convert .LU files into LUIS models which can then be published to your LUIS subscription either through the portal or the associated [LUIS](https://github.com/Microsoft/botbuilder-tools/tree/next/packages/LUIS) CLI (command line) tool. The same tool is used to create a QnA Maker JSON file which the [QnA Maker](https://github.com/Microsoft/botbuilder-tools/tree/next/packages/QnAMaker) CLI (command line) tool then uses to publish items to the QnA Maker knowledgebase. + +All of the above is handled as part of the Deployment scripts detailed below. + +### LUIS +{:.no_toc} + +Every Bot should handle a base level of conversational language understanding. Cancellation or Help, for example, is a basic thing every Bot should handle with ease. Typically, developers need to create these base intents and provide initial training data to get started. The Virtual Assistant template provides example LU files to get you started and avoids every project having to create these each time and ensures a base level of capability out of the box. + +The LU files provide the following intents across English, Chinese, French, Italian, German, Spanish. +> Cancel, Confirm, Escalate, FinishTask, GoBack, Help, Reject, Repeat, SelectAny, SelectItem, SelectNone, ShowNext, ShowPrevious, StartOver, Stop + +You can review these within the [**Deployment\Resources**]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Deployment/Resources/LU) directory. + +#### LUIS strongly-typed classes +{:.no_toc} + +The [LuisGen](https://github.com/microsoft/botbuilder-tools/blob/next/packages/LUISGen/src/npm/readme.md) tool enables developers to create a strongly-typed class for their LUIS models. As a result, you can easily reference the intents and entities as class instance members. + +You'll find a **GeneralLuis.cs** and **DispatchLuis.cs** class as part of your project within the [**Services**]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Services) folder. The DispatchLuis.cs will be re-generated if you add Skills to reflect the changes made. + +To learn more about LuisGen, see the [LuisGen Ttool](https://github.com/microsoft/botbuilder-tools/blob/next/packages/LUISGen/src/npm/readme.md) documentation. + +### QnA Maker +{:.no_toc} + +A key design pattern used to good effect in the first wave of conversational experiences was to leverage Language Understanding (LUIS) and QnA Maker together. LUIS would be trained with tasks that your Bot could do for an end-user and QnA Maker would be trained with more general knowledge and also provide personality chit-chat capabilities. + +[QnA Maker](https://www.qnamaker.ai/) provides the ability for non-developers to curate general knowledge in the format of question and answer pairs. This knowledge can be imported from FAQ data sources, product manuals and interactively within the QnaMaker portal. + +Two example QnA Maker models localized to English, Chinese, French, Italian, German, Spanish are provided in the [LU](https://github.com/Microsoft/botbuilder-tools/blob/next/packages/Ludown/docs/lu-file-format.md) file format within the **Deployment\Resources** folder or [here]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Deployment/Resources/QnA). + +#### Base Personality +{:.no_toc} + +QnAMaker provides 5 different personality types which you can find [here](https://github.com/microsoft/BotBuilder-PersonalityChat/tree/next/CSharp/Datasets). The Virtual Assistant template includes the **Professional** personality and has been converted into the [LU](https://github.com/Microsoft/botbuilder-tools/blob/next/packages/Ludown/docs/lu-file-format.md) format to ease source control and deployment. + +You can review this within the [**Deployment\Resources**]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Deployment/Resources/QnA) directory. + +![QnA ChitChat example]({{site.baseurl}}/assets/images/qnachitchatexample.png) + +### Dispatch Model +{:.no_toc} + +[Dispatch](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-tutorial-dispatch?view=azure-bot-service-4.0&tabs=csaddref%2Ccsbotconfig) provides an elegant solution to bringing together LUIS models and QnAMaker knowledge-bases into one experience. It does this by extracting utterances from each configured LUIS model and questions from QnA Maker and creating a central dispatch LUIS model. This enables a Bot to quickly identify which LUIS model or component should handle a given utterance and ensures QnA Maker data is considered at the top level of intent processing not just through None intent processing as has been the case previously. + +This Dispatch tool also enables model evaluation which will highlight confusion and overlap across LUIS models and QnA Maker knowledgebases highlighting issues before deployment. + +The Dispatch model is used at the core of each project created using the template. It's referenced within the **MainDialog** class to identify whether the target is a LUIS model or QnA. In the case of LUIS, the secondary LUIS model is invoked returning the intent and entities as usual. Dispatcher is also used for interruption detection and Skill processing whereby your Dispatch model will be updated each time you add a new Skill. + +![Dispatch Example]({{site.baseurl}}/assets/images/dispatchexample.png) + +## Handling activities +### Activity processing +{:.no_toc} + +1. Activities are first processed within your Bot through the DialogBot.cs class found in the **Bots** folder. **OnTurnAsync** is executed and **MainDialog** processing is started. + +2. The **MainDialog** dialog provided in the template derives from a base class called [RouterDialog]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Dialogs/RouterDialog.cs) which can be found in the **Microsoft.Bot.Builder.Solutions** NuGet library. + +3. The **OnInterruptDialogAsync** handler within **MainDialog** is executed which in-turn calls LUIS to evaluate the **General** LUIS model for top intent processing. If interruption is required it's processed at this point. + +4. Processing returns back to RouterDialog which will end the dialog if interruption has been requested. + +5. If the Activity is a message and there is an active dialog, the activity is forwarded on. If there is no Active dialog then RouteAsync on MainDialog is invoked to perform "Turn 0" processing. + +6. **RouteAsync** within MainDialog invokes the Dispatch model to identify whether it should hand the utterance to: + - A dialog (mapped to a LUIS intent) + - QnAMaker (Chitchat or QnA) + - A Skill (mapped to a Dispatcher skill intent) + +### Interruptions +{:.no_toc} + +The **MainDialog** class provided in the template derives from a base class called [RouterDialog]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Dialogs/RouterDialog.cs) which can be found in the **Microsoft.Bot.Builder.Solutions** NuGet library. + +This RouterDialog as part of the **OnContinueDialogAsync** handler invokes on the **OnInterruptDialogAsync** within your **MainDialog.cs**. This handler enables interruption logic to be processed before any utterance is processed, by default Cancel, Help, Logout and Restart are handled as part of this handler enabling top-level intent processing even when you have an active dialog. + +You can review this logic within [**MainDialog.cs**]({{site.repo}}/blob/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Dialogs/MainDialog.cs#L295). + +### Event processing +{:.no_toc} + +Events play a central role to an assistant experience and we provide a central event handler as part of the **MainDialog** implementation. **OnEventAsync** provides support for two key events: **VA.Location** and **VA.Timezone**. These events are processed, validated and then stored in state enabling dialogs and Skills to leverage these as required to enable them to adapt to the user. For example, the Point of Interest skill can decide not to prompt for your current location if a Location is present. + +In addition, the standard **token/response** event is handled as per the Azure Bot Service authentication feature. + +This event handler can be extended as required to support your specific scenarios. You can find this in [**MainDialog.cs**]({{site.repo}}/blob/next_docs/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Dialogs/MainDialog.cs#L208). + +### Fallback responses +{:.no_toc} + +In situations where an utterance from a user isn't understood by Dispatch (and therefore LUIS, QnAMaker, and Skills) the typical approach is to send a confused message back to the user. However, this behavior can easily be overridden to call some form of fallback capability where you could use another knowledge source like a Search engine to see if there is a highly scored response that could help satisfy the user's request. + +### Managing global exceptions +{:.no_toc} + +Whilst exceptions are typically handled at source it's important to have a global exception handler for unexpected situations which is defined as part of the Adapter definition within [**DefaultAdapter.cs**]({{site.repo}}/blob/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Adapters/DefaultAdapter.cs). + +The provided Exception handler passes information on the Exception as a Trace activity enabling it to be shown within the Bot Framework Emulator if it's being used otherwise these are suppressed. A general Error message is then shown to the user and the exception is logged through Application Insights. + +```csharp +OnTurnError = async (turnContext, exception) => +{ + await turnContext.SendActivityAsync(new Activity(type: ActivityTypes.Trace, text: $"{exception.Message}")); + await turnContext.SendActivityAsync(new Activity(type: ActivityTypes.Trace, text: $"{exception.StackTrace}")); + await turnContext.SendActivityAsync(MainStrings.ERROR); + telemetryClient.TrackException(exception); +}; +``` + +## Managing state + +CosmosDB is used as the default state store through the SDK provided `CosmosDbStorage` storage provider. This provides a production-grade, scalable storage layer for your Bots state along with fast disaster recovery capabilities and regional replication where required. Features like automatic time-to-live provide additional benefits around clean-up of old conversations. + +Within **Startup.cs** you can optionally choose to disable use of CosmosDB and switch to MemoryStorage often used for development operations but ensure this is reverted ahead of production deployment. + +```csharp + // Configure storage + // Uncomment the following line for local development without Cosmos Db + // services.AddSingleton(); + services.AddSingleton(new CosmosDbStorage(settings.CosmosDb)); +``` + +Deployment can be customized to omit deployment of CosmosDB and is covered in the [deployment documentation]({{site.baseurl}}/help/reference/deploymentscripts.md). + +## Introduction card + +A key issue with many conversational experiences is end-users not knowing how to get started, leading to general questions that the Bot may not be best placed to answer. First impressions matter! An introduction card offers an opportunity to introduce the Bot's capabilities to an end-user and suggests a few initial questions the user can use to get started. It's also a great opportunity to surface the personality of your Bot. + +A simple introduction card is provided as standard which you can adapt as needed, a returning user card is shown on subsequent interactions when a user has completed the onboarding dialog (triggered by the Get Started button on the Introduction card) + +![Intro Card Example]({{site.baseurl}}/assets/images/vatemplateintrocard.png) + +## Multi-locale support + +Most conversational experiences need to serve users in a variety of languages which introduces additional complexity around ensuring: +- The users desired language is identified on each incoming message +- The appropriate language variant of Dispatch, LUIS, and QnAMaker is used to process the user's question +- Responses to the user are selected from the right locale response file (language generation). + +The Virtual Assistant addresses all of the above capabilities and assists with the deployment considerations for multi-language Dispatch, LUIS, and QNAMaker resources. Localized responses for built-in capabilities are also provided. + +To learn more about how multi-locale support is added, see the [localization documentation]({{site.baseurl}}/virtual-assistant/handbook/localization.md). + +## Language generation and responses + +The Virtual Assistant has transitioned to use the new [Language Generation](https://github.com/Microsoft/BotBuilder-Samples/tree/next/experimental/language-generation#readme) capability to provide a more natural conversational experience by being able to define multiple response variations and leverage context/memory to adapt these dynamically to end-users. + +Language Generation (LG) leverages a new [LG file format](https://github.com/microsoft/BotBuilder-Samples/blob/next/experimental/language-generation/docs/lg-file-format.md) which follows the same markdown approach as the LU file format mentioned earlier. This enables easy editing of responses by a broad range of roles. + +LG also enables Adaptive Card responses to be defined alongside responses further simplifying management and localization of responses. + +LG files for your Virtual Assistant can be found in your **responses** folder or [here]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Responses) and the Template Engine code can be found in your **Startup.cs** file. + +An example of LG in use can be found [here]({{site.repo}}/blob/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Dialogs/MainDialog.cs#L77) and throughout the Virtual Assistant. + +## Telemetry + +Providing insights into the user engagement of your Bot has proven to be highly valuable. This insight can help you understand the levels of user engagement, what features of the Bot they are using (intents) along with questions people are asking that the Bot isn't able to answer - highlighting gaps in the Bot's knowledge that could be addressed through new QnA Maker articles for instance. + +Integration of Application Insights provides significant operational/technical insight out of the box but this can also be used to capture specific Bot related events - messages sent and received along with LUIS and QnA Maker operations. Bot level telemetry is intrinsically linked to technical and operational telemetry enabling you to inspect how a given user question was answered and vice versa. + +A middleware component combined with a wrapper class around the QnA Maker and LuisRecognizer SDK classes provides an elegant way to collect a consistent set of events. These consistent events can then be used by the Application Insights tooling along with tools like PowerBI. An example PowerBI dashboard is as part of the Bot Framework Solutions Github repo and works right out of the box with every Virtual Assistant template. See the [Analytics]({{site.baseurl}}/overview/analytics) section for more information. + +![Analytics Example]({{site.baseurl}}/assets/images/analytics/virtual-assistant-analytics-powerbi-1.png) + +To learn more about Telemetry, see the [Analytics tutorial]({{site.baseurl}}/virtual-assistant/tutorials/view-analytics). + +## Azure resource deployment + +The comprehensive experience requires the following Azure resources to function properly, detailed [here]({{site.baseurl}}/help/reference/deployment-scripts/). + +Resource | Description | +-------- | ----- | +Azure Bot Service | Azure Blob Storage | +Azure Cosmos DB | Azure App Service Plan | +Azure Application Insights | Bot Web App | +Language Understanding (LUIS) | QnA Maker | +QnA Maker Web App | QnA Maker Azure Search Service | +Content Moderator | + +To enable you to get started quickly, we have provided an ARM template and set of PowerShell scripts (supported cross-platform) to provide these resources along with the required LUIS models, QnAMaker knowledgebases, Dispatcher and publishing into Azure. In addition the ability to refresh the LUIS and QNA resources with any changes from your LU files. + +All of the steps provided by our scripts are documented [here]({{site.baseurl}}/virtual-assistant/tutorials/deploy-assistant/web/1-intro/) if you wish to review or perform manually. + +You can find the ARM template (template.json) in your **Deployment\Resources** folder or [here]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Template/VA/Deployment/Resources). The PowerShell scripts can be found in your **Deployment\Scripts** folder or [here]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Template/VA/Deployment/Scripts). + +## Middleware + +Middleware is simply a class that sits between the adapter and your bot logic, added to your adapter's middleware collection during initialization. Every activity coming into or out of your Assistant flows through your middleware. + +A number of middleware components have been provided to address some key scenarios and are included in the **Microsoft.Bot.Builder.Solutions** nuget library or in [this location]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Middleware). + +### Set Locale Middleware +{:.no_toc} + +In multi-locale scenarios, it's key to understand the user's locale so you can select the appropriate language LUIS Models and responses for a given user. Most channels populate the **Locale** property on an incoming Message activity but there are many cases where this may not be present thus it's important to stamp a default locale on activities where this is missing so downstream components + +You can find this component within the **Microsoft.Bot.Builder.Solutions** NuGet library or in [this location]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Middleware/SetLocaleMiddleware.cs). + +### Set Speak Middleware +{:.no_toc} + +For Speech scenario's providing a fully formed SSML fragment is required in order to be able to control the voice, tone and more advanced capabilities such as pronunciation. Setting the **Speak** property on the Activity to a Speech representation should be performed as part of the Language Generation step but in cases where this is omitted we can transpose the Activity.Text property into Speak to ensure all responses have Speech variations. + +The [**Set Speak Middleware**]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Middleware/SetSpeakMiddleware.cs) provides these capabilities and only executes when the Direct-Line Speech channel is used. An example SSML fragment is shown below: + +```json + + + +You have the following event on your calendar: Sync Meeting at 4PM with 2 people at Conference Room 1. + +``` + +### Console Output Middleware +{:.no_toc} + +The [**Console Output Middleware**]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Middleware/ConsoleOutputMiddleware.cs) is a simple component for debugging that outputs incoming and outcoming activities to the console enabling you to easily see the Text/Speak responses flowing through your Bot. + +### Event Debugger Middleware +{:.no_toc} + +Event Activities can be used to pass metadata between an assistant and user without being visible to the user. These events can enable a device or application to communicate an event to an assistant (e.g. being switched on) or enable an assistant to convey an action to a device to perform such as opening a deep link to an application or changing the temperature. + +It can be hard to generate these activities for testing purposes as the Bot Framework Emulator doesn't provide the ability to send Activities. The [**Event Debug Middleware**]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Middleware/EventDebuggerMiddleware.cs) provides an elegant workaround enabling you to send messages following a specific format which are then transposed into an Event activity processed by your Assistant + +For example sending this message with the middleware registered: **/event:{ "Name": "{Event name}", "Value": "{Event value}" }** would generate an Activity of type event being created with the appropriate Value. + +### Content Moderator Middleware +{:.no_toc} + +Content Moderator is an optional component that enables the detection of potential profanity and helps check for personally identifiable information (PII). This can be helpful to integrate into Bots enabling a Bot to react to profanity or if the user shares PII information. For example, a Bot can apologize and hand-off to a human or not store telemetry records if PII information is detected. + +[**Content Moderator Middleware**]({{site.repo}}/blob/next/lib/csharp/microsoft.bot.builder.solutions/microsoft.bot.builder.solutions/Middleware/ContentModeratorMiddleware.cs) is provided that screen texts and surfaces output through a **TextModeratorResult** on the **TurnState** object. This middleware is not enabled by default. + +### Feedback Middleware +{:.no_toc} + +Collecting feedback from users at the end of an interaction is a great way to measure how your assistant is doing with resolving end-users objectives beyond in addition to measuring dialog completion metrics. Forcing the user to complete feedback is highly disruptive to an overall experience so it's important to make this optional and not get in the way of the user. + +The [**Feedback Middleware**]() provides a way to collect feedback leveraging suggested actions thus making the request optional and surfaces simple 👍 and 👎 options with an option to provide a text response too. This feedback is then stored through the usual Application Insights telemetry storage and surfaced via the accompanying PowerBI dashboard. + +By default, the middleware activates at the end of every dialog but you can customize this further to suit your scenario, for example only asking for feedback when LUIS or QnA prediction scores are low or perhaps at random or a limited number of times in a time period. + +To learn more about the Feedback capability, see the [Feedback documentation]({{site.baseurl}}/virtual-assistant/handbook/feedback.md). + +## Dialogs + +Beyond the core **MainDialog** dialog two further dialogs are provided firstly to deliver core scenarios but also to provide examples to get you started. These are all wired up to provided LUIS intents so work out of the box across multiple languages. + +### Main Dialog +{:.no_toc} + +The [**MainDialog**]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Dialogs) class as discussed earlier in this section is the core part of the Activity processing stack and is where all activities are processed. This is also where the Help intent is handled which returns a response as defined within the Language Generation responses. Events are also handled as part of this dialog. + +### Onboarding Dialog +{:.no_toc} + +The [**OnboardingDialog**]({{site.repo}}/blob/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Dialogs/OnboardingDialog.cs) provides an example introduction Dialog experience for users starting their first conversation. It prompts for some information which is then stored in State for future use by your assistant. This dialog demonstrates how you can use prompts and state. + +### Escalate Dialog +{:.no_toc} + +The [**EscalateDialog**]({{site.repo}}/blob/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample/Dialogs/EscalateDialog.cs) demonstrates a stubbed dialog to handle a user asking to be transferred to a human. This is where you could integrate to a human-handoff capability. The provided implementation returns a response with a placeholder telephone number. + +## Unit Testing + +Unit testing of dialogs is an important capability for any project. A number of examples unit tests are provided as part of the Virtual Assistant and cover all capabilities provided. These can be used as a baseline to build your own additional tests. + +You can find these tests in a companion project to your assistant or [here]({{site.repo}}/tree/next/templates/Virtual-Assistant-Template/csharp/Sample/VirtualAssistantSample.Tests). + +## Skill Support + +The Virtual Assistant integrates Skill support for your assistant, enabling you to easily register skills through the execution of the `botskills` command-line tool. The ability to trigger skills based on utterances relies heavily on the Dispatcher which is automatically provisioned as part of your assistant deployment. + +Within `MainDialog`, any dispatch intent that has been identified is matched against registered skills. If a skill is matched then Skill invocation is started with subsequent messages being routed to the Skill until the skill conversation is ended. + +## Speech support + +The Virtual Assistant has all of the pre-requisites required for a high-quality speech experience out of the box when using Direct Line Speech. This includes ensuring all responses have speech friendly responses, middleware for SSML and configuration of the Streaming Extensions adapter. The [Enabling speech tutorial({{site.baseurl}}/tutorials/enable-speech/1_intro/) includes further configuration steps to provision Speech and get starting with a test tool quickly. + +## Multi-provider authentication + +For some assistant scenarios you may have a capability or Skill that supports multiple authentication types, the Calendar Skill for examples supports both Microsoft and Google accounts. If a user has linked their assistant to both of these there is a scenario where you need to clarify which account the user wants to use, to support this scenario the Multi Provider Auth will wrap an additional prompt around an authentication request. + +The Multi Provider Authentication also provides the Skill authentication protocol whereby a Skill can request a token centrally from the Virtual Assistant rather than prompting for its own authentication. + +## Continuous integration and deployment + +A [Azure DevOps](https://azure.microsoft.com/en-us/solutions/devops/) [YAML](https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema?view=azure-devops&tabs=schema) file for Continuous Integration is included within the `pipeline` folder of your assistant and provides all the steps required to build your assistant project and generate code coverage results. This can be imported into your Azure DevOps environment to create a build. + +In addition, documentation to create a [release pipeline]({{site.baseurl}}/howto/virtual-assistant/continuousdeployment/) is also provided enabling you to continuously deploy updates to your project to your Azure test environment and also update Dispatch, LUIS and QnAMaker resources with any changes to the LU files within source control. + diff --git a/docs/_docs/overview/virtualassistant.md b/docs/_docs/overview/virtualassistant.md deleted file mode 100644 index 4f21b224db..0000000000 --- a/docs/_docs/overview/virtualassistant.md +++ /dev/null @@ -1,86 +0,0 @@ ---- -category: Overview -title: Virtual Assistant -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## Contents -{:.no_toc} - -* -{:toc} - -## Intro -![Virtual Assistant Diagram]({{site.baseurl}}/assets/images/virtualassistant-diagram.jpg) - -Customers and partners have a significant need to deliver a conversational assistant tailored to their brand, personalized to their users, and made available across a broad range of canvases and devices. Continuing Microsoft's open-sourced approach towards the [Bot Framework](http://aka.ms/botframework) SDK, the open-source Virtual Assistant solution provides you with a set of core foundational capabilities and full control over the end user experience. - -Owning and enriching customer relationships and insights is vital to the goals of the Virtual Assistant. You can modify the name, voice, and personality to suit a customer’s needs. The solution simplifies the creation of a Virtual Assistant, enabling you to get started in minutes and extend with end-to-end development tooling. - -Common scenarios are provided as reusable conversational Skills, increasing developer productivity and enabling a vibrant ecosystem of reusable scenarios. Individual Skills can be added to an existing conversational application like: finding a point of interest, checking off an item on a to-do list, or replying to an email. Skills are fully customizable and consist of language models for multiple languages, dialogs, and code. - -## Example Scenarios - -The Virtual Assistant extends across a broad number of industry scenarios, some example scenarios are shown below for reference purposes. - -- Automotive Industry - - Voice enabled Virtual Assistant integrated into the car providing end users the ability to perform traditional car operations (e.g. navigation, radio) along with productivity focused scenarios such as moving meetings when your running late, adding items to your task list and proactive experiences where the car can suggest tasks to complete based on events such as starting the engine, traveling home or enabling cruise control. Adaptive Cards are rendered within the Head Unit and Speech integration performed through Push-To-Talk or Wake Word interactions. - -- Hospitality - - Voice enabled Virtual Assistant integrated into a hotel-room device providing a broad range of Hospitality focused scenarios (e.g. extend your stay, request late checkout, room service) including concierge and the ability to find local restaurants and attractions. Optional linking to your Productivity accounts open up more personalized experiences such as suggested alarm calls, Weather warnings and learning of patterns across stays. An evolution of the current TV personalization experienced in room today. - -- Enterprise - - Voice and Text enabled branded Employee Assistant experiences integrated into enterprise devices and existing conversation canvases (e.g. Teams, WebChat, Slack) enabling employees to manage their calendars, find available meeting rooms, find people with specific skills or perform HR related operations. - -## Our Principles - -### Your Data, Brand, and Experience - -All aspects of the end user experience are owned and controlled by you. This includes the Branding, Name, Voice, Personality, Responses and Avatar. The source-code to the Virtual Assistant and supporting Skills are provided in full enabling you to adjust as required. - -Your Virtual Assistant will be deployed within your Azure subscription. Therefore all data generated by your assistant (questions asked, user behavior, etc.) is entirely contained within your Azure subscription. See [Cognitive Services Azure Trusted Cloud](https://www.microsoft.com/en-us/trustcenter/cloudservices/cognitiveservices) and the [Azure section of the Trust Center](https://www.microsoft.com/en-us/TrustCenter/CloudServices/Azure) more specifically for more information. - -### Write Once, Embed Anywhere - -The Virtual Assistant leverages the Microsoft Conversational AI platform and therefore can be surfaced through any Bot Framework [channel](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-manage-channels?view=azure-bot-service-4.0) – e.g. WebChat, FaceBook Messenger, Skype, etc. - -In addition, through the [Direct Line](https://docs.microsoft.com/en-us/azure/bot-service/rest-api/bot-framework-rest-direct-line-3-0-concepts?view=azure-bot-service-4.0) channel we can embed experiences into Desktop and Mobile Apps including devices such as Cars, Speakers, Alarm Clocks, etc. - -### Enterprise Grade Solutions - -The Virtual Assistant solution is built on the Azure Bot Service, Language Understanding Cognitive Service, Unified Speech along with a broad set of supporting Azure components meaning that you benefit from the [Azure global infrastructure](https://azure.microsoft.com/en-gb/global-infrastructure/) including ISO 27018, HIPAA, PCI DSS, SOC 1, 2 and 3 certification. - -In addition, Language Understanding support is provided by the LUIS Cognitive Service which supports a broad set of [languages](https://docs.microsoft.com/en-us/azure/cognitive-services/luis/luis-supported-languages). The [Translator Cognitive Service](https://azure.microsoft.com/en-us/services/cognitive-services/translator-text-api/) provides additional Machine Translation capabilities to extend the reach of your Virtual Assistant even further. - -### Flexible Integration and Contextually Aware - -The Virtual Assistant architecture is flexible and can be integrated with existing investments you have made into device-based Speech or Natural Language processing capabilities and integrate existing back-end systems and APIs. - -Not only does the Virtual Assistant enable you to deliver your own unique experience, but you can also handoff a user's query to their chosen Digital Assistant for certain types of questions. - -Your Virtual Assistant can be integrated into your device and ecosystem to enable a truly intelligent experience. -Through this contextual awareness your Virtual Assistant can deliver a further personalized conversation than otherwise possible. - -### Adaptive Cards - -[Adaptive Cards](https://adaptivecards.io/) provide the ability for your Virtual Assistant to return user experience elements (e.g. Cards, Images, Buttons) alongside text-based responses. -If the conversation canvas has a screen these cards can be rendered across a broad range of devices and platforms providing supporting UX where appropriate. -[Samples of Adaptive Cards](https://adaptivecards.io/samples/) are available to try out, as well as documentation on how to [render cards within your own application](https://docs.microsoft.com/en-us/adaptive-cards/rendering-cards/getting-started). - -### Skills - -In addition to the core Virtual Assistant, there exists a broad set of common capabilities which require each developer to build themselves. -Productivity scenarios are a great example where each organization would need to create their own: - -1. Language Models (*LUIS*) -2. Dialogs (*code-based*) -3. API Integration (*code-based*) -4. Language Generation (*bot responses*) - -This is then further complicated by the need to support multiple languages and results in a large amount of work required for any organization building their own assistant. -Our Virtual Assistant solution includes a new Skill capability enabling new capabilities to be plugged into an existing Virtual Assistant through configuration only. - -All aspects of each Skill are completely customizable by developers as the full source code is provided on GitHub along with the Virtual Assistant. diff --git a/docs/_docs/overview/whats-new.md b/docs/_docs/overview/whats-new.md new file mode 100644 index 0000000000..5883e11f27 --- /dev/null +++ b/docs/_docs/overview/whats-new.md @@ -0,0 +1,153 @@ +--- +category: Overview +title: What's new? +order: 1 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +![Virtual Assistant diagram]({{site.baseurl}}/assets/images/virtualassistant-diagram.jpg) + +Customers and partners have increasing need to deliver advanced conversational assistant experiences tailored to their brand, personalized to their users, and made available across a broad range of canvases and devices. Continuing Microsoft's open-sourced approach towards the Bot Framework SDK, the open-source Virtual Assistant solution provides you with a set of core foundational capabilities and full control over the end user experience and data. + +At it's core is the [Virtual Assistant]({{site.baseurl}}/overview/virtual-assistant-template) (available in C# and TypeScript) is a project template with the best practices to developing a bot on the Microsoft Azure platform. + +For more details on Virtual Assistant review [What is the Virtual Assistant Solution?]({{site.baseurl}}/overview/virtual-assistant-solution). + +## Virtual Assistant components +{:.no_toc} + +There are four major components within the Virtual Assistant: + +- Virtual Assistant Core +- Skills +- Assistant Solution Accelerators +- Clients and Channels + +The following content will cover the new items of each of these components. + +## What's new since Build 2019? +In this section, we will cover the new features to each of the Components of Virtual Assistant since the Build 2019 Conference. These new features are targeted for Ignite 2019 timeframe. + +### Virtual Assistant Core + +Virtual Assistant Core has many new features that have been added to follow the support of the Bot Framework and other core components. In this section we will cover the new features or supported components that make up the Virtual Assistant Core implementation. + +#### Bot Framework 4.6 Support +{:.no_toc} + +Virtual Assistant is updated to Bot Framework 4.6. Some new capabilities of the SDK are highlighted in this document, but more details of Bot Framework 4.6 can be found [here](https://github.com/microsoft/botframework#Bot-Framework-SDK-v4). + +#### Language Generation (LG) Support +{:.no_toc} + +Microsoft has added support of the new [Language Generation (LG)](https://github.com/Microsoft/BotBuilder-Samples/tree/master/experimental/language-generation) features that have been added to the [Bot Framework 4.6](https://github.com/microsoft/botframework#Bot-Framework-SDK-v4) allowing for richer dialogs that have more dynamic, natural responses. Microsoft has also incorporated LG into the Virtual Assistant Skills. + +#### Context Switching +{:.no_toc} + +Context switching enables a Developer to allow the user to switch to a different dialog/skill through by allowing top-level intents to be evaluated within each of the waterfall dialogs. + +#### Speech Support +{:.no_toc} + +Microsoft has added speech support to Virtual Assistant enabling Speech-first experiences without any custom-code. This includes configuration settings for WebSockets and SSML middleware. A tutorial is included in the documentation on how to configure the Direct Line Speech channel. + +#### Teams Channel Support +{:.no_toc} + +Microsoft has worked closely with the Microsoft Teams organization to incorporate the Microsoft Teams channel as a supported channel for Virtual Assistant. As Virtual Assistant moves forward you will continue to see enhancements to allow for smoother Teams integration as a part of Virtual Assistant. An example Manifest is provided as part of the Enterprise Assistant to simplify addition of your assistant to Teams. + +#### Multi-Turn QnA Maker Support +{:.no_toc} + +Microsoft has added support to Virtual Assistant to allow for the support of Multi-Turn QnA Maker as a solution for more advanced FAQ experiences. Much of this comes with the support of [Bot Framework 4.6](https://github.com/microsoft/botframework#Bot-Framework-SDK-v4). More details you can review the [Multi-turn QnA Maker documentation](https://docs.microsoft.com/en-us/azure/cognitive-services/QnAMaker/how-to/multiturn-conversation). + + +### Skills + +Microsoft continues to grow its skills library and enhance skills that are provided. In this section we will cover the new skills and enhancements to skills that have been provided. Experimental skills should be considered samples, they typically have a simple LUIS language model and are not localized. + +#### Improved Conversation Flows and Capabilities +{:.no_toc} + +Microsoft has also improved the conversations in many of the existing skills that were previously announced that allow for better customer experiences with skills such as Calendar, POI, and others. + +#### Hospitality Skill (Experimental) +{:.no_toc} + +Microsoft has released a new experimental skill focused on Hospitality scenarios allowing for experiences such as managing reservations, check out, and amenity requests. You can find more details on scenarios that this supports in the [Hospitality Skill documentation]({{site.baseurl}}/skills/samples/hospitality). + +#### Event Skill (Experimental) +{:.no_toc} + +Microsoft has released a new experimental skill focused on finding events from the popular platform [Eventbrite](http://www.eventbrite.com/). More details on scenarios that this supports can be found in the [Event Skill documentation]({{site.baseurl}}/skills/samples/event/). + +#### IT Service Management (ITSM) Skill (Experimental) +{:.no_toc} + +Microsoft has released a new experimental skill focused on popular IT Service Management scenarios. The IT Service Management skill provides a basic skill that provides ticket and knowledge base related capabilities and supports [ServiceNow](http://www.servicenow.com/). More details around the ITSM skill can be found [here]({{site.baseurl}}/skills/samples/itsm/). + +#### Music Skill (Experimental) +{:.no_toc} + +Microsoft has released a new experimental skill focused on demonstrating artists and playlist lookup for the popular music service [Spotify](https://developer.spotify.com/documentation/web-api/libraries/). Playback information is then signalled back to the device through Events enabling native device playback. More details around the Music skill can be found [here]({{site.baseurl}}/skills/samples/music/). + + +### Assistant Solution Accelerators + +Microsoft has introduced the concept of Assistant Samples during this time. As we continue to grow our Virtual Assistant capabilities, we are looking to provide our customers with samples of implementations that bring together the skills and channels that many of our customers are looking to build. In this section we will provide details to some of the Assistant Samples that we are introducing. + +#### Base Virtual Assistant +{:.no_toc} + +Microsoft has not necessarily released a new capability as much as the concept of an Empty Core assistant that allows for customers to build from a completely empty solution that does not incorporate any pre-installed skills. This has always been the basis of the Virtual Assistant, but we want to ensure that this is the base that customers can start with if they want to assemble a solution that one of the other examples does not fit easily to their solution. + +#### Enterprise Assistant Sample +{:.no_toc} + +Microsoft has assembled a typical configuration of a Virtual Assistant that is common from what we have seen working with our customer base implementation of a Virtual Assistant that is targeted at the Enterprise community. You can get more details around this sample [here]({{site.baseurl}}/solution-accelerators/assistants/enterprise-assistant/). + +#### Hospitality Assistant Sample +{:.no_toc} + +Microsoft has assembled a typical configuration of a Virtual Assistant that is common from what we have seen working with our customer base implementation of a Virtual Assistant that is targeted at the Hospitality community. You can get more details around this sample [here]({{site.baseurl}}/solution-accelerators/assistants/hospitality-assistant/). + +#### Automotive Assistant Sample +{:.no_toc} + +Microsoft has assembled a typical configuration of a Virtual Assistant that is common from what we have seen working with our customer base implementation of a Virtual Assistant that is targeted at the Automotive community. You can get more details around this sample are coming soon. + +### Clients and Channels + +Microsoft continues to work to bring more ways to allow users to connect to their Virtual Assistant through a conversational canvas of their choice. This allows developers to write their conversational experiences once and then allow them to be consumed through the key channels and clients that their users demand. In this section we will provide details to what has be added in this area. + +#### Android Virtual Assistant Client +{:.no_toc} + +Microsoft understands the need to be able to have devices such as phones, tablets, and other general IOT devices (Cars, Alarm Clocks, etc.) as interfaces to interact with their users. Microsoft has created a base Android application for users that demonstrates the following capabilities: +- Render Adaptive Cards +- Provide OOB support to Direct Line Speech +- Run as Service on an Android Device +- Open and Close the Mic on the Device +- Consume Events and Engage with the local operating system for Android OS Events (Navigation, Phone Dialer, etc.) +- Run as Default Assistant +- Provide Threaded Conversation Views +- Provide Widgets that will allow for customized Launchers to leverage +- Configuration options to allow user to set bot endpoints +- Configuration options to allow for customization of colors +- Light and Dark Mode support + +This sample application can be used to quickly test your Virtual Assistant or any BF Bot on Android Devices (8.x and greater). More details can be found [here]({{site.baseurl}}/clients-and-channels/clients/virtual-assistant-client/). + +#### Teams Channel Support +{:.no_toc} + +Microsoft has added Teams Channel support to our [Bot Framework 4.6](https://github.com/microsoft/botframework#Bot-Framework-SDK-v4) channel and is now supported for Out of the Box use with Virtual Assistant. + + +## Summary + +Microsoft is committed to bringing our customers the ability to bring their own unique Virtual Assistant experiences to their users by bringing the tools and control that is required in a world of many Virtual Agents. In just the time since Build 2019 Microsoft continues to make great strides in improving these tools and capabilities for their customers. We look forward to how you take these enhancements forward to enable your customers / users in the future. As always you can provide feedback and/or bug reports at [https://github.com/microsoft/botframework-solutions/issues](https://github.com/microsoft/botframework-solutions/issues). \ No newline at end of file diff --git a/docs/_docs/reference/analytics/applicationinsights.md b/docs/_docs/reference/analytics/applicationinsights.md deleted file mode 100644 index 96a275e831..0000000000 --- a/docs/_docs/reference/analytics/applicationinsights.md +++ /dev/null @@ -1,96 +0,0 @@ ---- -category: Reference -subcategory: Analytics -title: Application Insights -description: Detailed information on how Application Insights is used to collect information and powers our analytics capabilities. -order: 1 ---- - - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro -The following provide some examples of retrieving data about your bot, to better understand how your bot (and related services) are performing. - -## LUIS Intents Pie Chart - -The following demonstrates querying data that was generated by LUIS telemetry. The telemetry logs the top intent from the recognizer (in addition to some other properties), and logs an [Application Insights Custom Event](https://docs.microsoft.com/en-us/azure/application-insights/app-insights-api-custom-events-metrics). - -![Luis Pie Chart]({{site.baseurl}}/assets/images/luis_pie.png) - -``` -customEvents -| where timestamp >= ago(24h) -| where name startswith "LuisResult" -| extend intent = tostring(customDimensions.['intent']) -| summarize count() by intent -| order by count_ desc -| render piechart -``` - -## P50, P95, P99 for Services - -The following query demonstrates querying data from the Application Insights `dependencies` table which models calls to external components. - -![Example Report]({{site.baseurl}}/assets/images/p99.png) - -CosmosDB - -``` -dependencies -| where timestamp >= now(-1d) -| where type == "Azure DocumentDB" -| summarize percentiles(duration,50,95,99) by bin (timestamp, 1m) -| render timechart -``` - -Azure Blob Storage - -``` -dependencies -| where timestamp >= now(-1d) -| where type == "Azure blob" -| summarize percentiles(duration,50,95,99) by bin (timestamp, 1m) -| render timechart -``` - -LUIS - -``` -dependencies -| where timestamp >= now(-1d) -| where type == "HTTP" and name contains "/luis" -| summarize percentiles(duration,50,95,99) by bin (timestamp, 1m) -| render timechart -``` - -## Using Application Insights in Visual Studio - -Within Visual Studio, Application Insights events can be queried in the "Application Insights Search" window. [For more details, see the Application Insights documentation](https://docs.microsoft.com/en-us/azure/application-insights/app-insights-diagnostic-search). - -![Example Visual Studio Session]({{site.baseurl}}/assets/images/visualstudio_appinsights.png) - -Clicking on "Track Operation" on the details of any event can give you a visualization of where time is being spent, using the events in the telemetry that are automatically correlated: - -![Example Track Operation]({{site.baseurl}}/assets/images/visualstudio_trackoperation.png) - -With this view, you can quickly understand where time is being spent within your bot. - -## Disabling Application Insights - -To turn off Application Insights logging for C#, open up the `Startup.cs` file and uncomment the following lines: - -```csharp - public void Configure(IApplicationBuilder app, IHostingEnvironment env) - { - // Uncomment to disable Application Insights. - // var configuration = app.ApplicationServices.GetService(); - // configuration.DisableTelemetry = true; -``` \ No newline at end of file diff --git a/docs/_docs/reference/analytics/powerbi.md b/docs/_docs/reference/analytics/powerbi.md deleted file mode 100644 index 042b5a5f78..0000000000 --- a/docs/_docs/reference/analytics/powerbi.md +++ /dev/null @@ -1,151 +0,0 @@ ---- -category: Reference -subcategory: Analytics -title: Power BI -description: Detailed information on how the provided Power BI template provides insights into your assistant usage. -order: 3 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro -The [Conversational Analytics Power BI sample](https://aka.ms/botPowerBiTemplate) provides a Power BI template that can be used to understand how your bot is performing. - -## Sample Overview - -|Dashboard Name|Description| -|-|-| -|[Overall Usage](#overall-usage)| Provides general insights like unique users, total number of messages, and number of conversations per day| -|[All Dialogs Overview](#all-dialogs-overview)| All dialogs' popularity and status based off of SDK telemetry.| -|[Dialog Overview](#dialog-overview)| A specific dialog's popularity, status, and average bot/user turn count.| -|[LUIS Intents](#luis-intents)| A count of LUIS intents per day.| -|[All Conversations Metrics](#all-conversations-metrics)| Highlights the average number of conversations per unique user and the average duration by day.| -|[Conversations Drill Down](#conversations-drill-down)| Per conversation, this highlights the dialogs triggered and common utterances.| -|[Transcript](#transcript)| Highlights a count of interactions, session duration, and the transcript between bot and users per unique conversation.| -|[Demographics](#demographics)| Demonstrates where users are connecting to your bot and the languages they are using.| -|[Word Cloud](#word-cloud)| Shows a word cloud and tree map of common uttered phrases.| -|[Sentiment](#sentiment)| If sentiment analysis is enabled, will show the average sentiment by day.| -|[QnA Maker Insights](#qna-maker-insights)| Shows insights on user utterances matches with QnA Maker. | - -### Sample Query using Application Insights - -Below is a sample query to create a `conversationalai` table in Power BI. This extracts bot activity, QnA results, sentiment and demographic data. - -You will need to replace `` with your Application Insights Application ID, which can be obtained by: - -1. Log in to the [Azure Portal](https://portal.azure.com/). -2. Navigate to your Application Insights resource. -3. Select **API Access**. -4. Copy the Application ID. - -![AppInsightsApplicationID]({{site.baseurl}}/assets/images/appinsightsapplicationid.png) - -``` -let AnalyticsQuery = -let Source = Json.Document(Web.Contents("https://api.loganalytics.io/v1/apps/" & & "/query", -[Query=[#"query"="customEvents -| project -timestamp, -Name = name, -ConversationID=session_Id, -Channel=customDimensions.channelId, -Language = customDimensions.locale, -Text = customDimensions.text, -Question = customDimensions.question, -FoundInKnowledgeSource = customDimensions.articleFound, -UserAcceptedAnswer = customDimensions.userAcceptedAnswer, -KnowledgeItemsDiscarded = customDimensions.knowledgeItemsDiscarded, -FromName = customDimensions.fromName, -Score = customDimensions.score, -Sentiment = customDimensions.sentimentScore, -client_Type, -client_City, -client_StateOrProvince, -client_CountryOrRegion, -QnAResponse=customDimensions.answer, -QnAFeedbackScore=customDimensions.feedbackScore, -QnAConfidenceScore=customDimensions.confidenceScore, -QnAQuery=customDimensions.question -| where timestamp > ago(90d) -| order by timestamp desc -",#"x-ms-app"="AAPBI"],Timeout=#duration(0,0,4,0)])), -TypeMap = #table( -{ "AnalyticsTypes", "Type" }, -{ -{ "Double", Double.Type }, -{ "Int64", Int64.Type }, -{ "Int32", Int32.Type }, -{ "Int16", Int16.Type }, -{ "UInt64", Number.Type }, -{ "UInt32", Number.Type }, -{ "UInt16", Number.Type }, -{ "Byte", Byte.Type }, -{ "Single", Single.Type }, -{ "Decimal", Decimal.Type }, -{ "TimeSpan", Duration.Type }, -{ "datetime", DateTimeZone.Type }, -{ "string", Text.Type }, -{ "boolean", Logical.Type }, -{ "SByte", Logical.Type } -}), -DataTable = Source[tables]{0}, -Columns = Table.FromRecords(DataTable[columns]), -ColumnsWithType = Table.Join(Columns, {"type"}, TypeMap , {"AnalyticsTypes"}), -Rows = Table.FromRows(DataTable[rows], Columns[name]), -Table = Table.TransformColumnTypes(Rows, Table.ToList(ColumnsWithType, (c) => { c{0 }, c{3}})) -in -Table -in - AnalyticsQuery -``` - -### Overall Usage - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-overall.png) - -### All Dialogs Overview - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-alldialogsoverview.png) - -### Dialog Overview - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-dialogoverview.png) - -### LUIS Intents - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-luisintents.png) - -### All Conversations Metrics - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-allconversationsmetrics.png) - -### Conversations Drill Down - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-conversationsdrilldown.png) - -### Transcript - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-transcript.png) - -### Demographics - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-demographics.png) - -### Word Cloud - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-wordcloud.png) - -### Sentiment - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-sentimentanalysis.png) - -### QnA Maker Insights - -![Example Report]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-qnamakerinsights.png) diff --git a/docs/_docs/reference/samples/hospitalitysample.md b/docs/_docs/reference/samples/hospitalitysample.md deleted file mode 100644 index 2e1f462296..0000000000 --- a/docs/_docs/reference/samples/hospitalitysample.md +++ /dev/null @@ -1,84 +0,0 @@ ---- -category: Reference -subcategory: Samples -title: Hospitality Assistant -description: Virtual Assistant sample made for a hospitality scenario. -order: 2 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Overview -The [Hospitality Sample Virtual Assistant]({{site.repo}}/tree/master/samples/assistants/HospitalitySample) is a prototype of an assistant that helps to conceptualize and demonstrate how a virtual assistant could be used in a hospitality specific scenario. It also provides a starting point for those interested in creating an assistant customized for this scenario. - -This sample works off the basis that the assistant would be integrated into a hotel room device and would help a hotel guest with anything they might usually go to the hotel concierge about. It also provides additional capabilites that might be useful for guests, such as getting the weather forecast or showing current news articles. - -The Hospitality Sample builds off of the [Virtual Assistant Template]({{site.baseurl}}/overview/virtualassistant) with the addition of a [QnA Maker](https://www.qnamaker.ai/) knowledge base for answering common hotel FAQs and [Adaptive Cards](https://adaptivecards.io/) customized for hospitality. It also connects 7 different skills, which are [Hospitality]({{site.baseurl}}/reference/skills/experimental/#hospitality-skill), [Event]({{site.baseurl}}/reference/skills/experimental/#event-skill), [Point of Interest]({{site.baseurl}}/reference/skills/pointofinterest), [Weather]({{site.baseurl}}/reference/skills/experimental/#weather-skill), [Bing Search]({{site.baseurl}}/reference/skills/experimental/#bing-search-skill), [News]({{site.baseurl}}/reference/skills/experimental/#news-skill), and [Restaurant Booking]({{site.baseurl}}/reference/skills/experimental/#restaurant-booking-skill). - -![Hospitality Sample Diagram]({{site.baseurl}}/assets/images/hospitalitysample-diagram.png) - -The majority of the skills connected to this sample are [experimental skills]({{site.baseurl}}/reference/skills/experimental), which means they are early prototypes of Skills and are likely to have rudimentary language models, limited language support and limited testing. These skills demonstrate a variety of skill concepts and provide great examples to get you started. - -## Sample Configuration -To configure this sample follow the steps below: -1. Clone the [Hospitality Sample from our repository]({{site.repo}}/tree/master/samples/assistants/HospitalitySample). -2. Follow the [Create your Virtual Assistant tutorial]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro/) to deploy your assistant. Use the sample project you cloned instead of the Virtual Assistant template to include the hospitality customizations in this project. -3. Clone the following skills from our repository: - - [Hospitality Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/hospitalityskill) - - [Event Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/eventskill) - - [Point of Interest Skill]({{site.repo}}/tree/master/skills/src/csharp/pointofinterestskill/pointofinterestskill) - - [Weather Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/weatherskill) - - [Bing Search Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/bingsearchskill/bingsearchskill) - - [News Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/newsskill) - - [Restaurant Booking Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/restaurantbooking) -4. [Deploy each one of these skills]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources/) separately, using the deployment script included in the skill directory. -4. [Add each skill]({{site.baseurl}}/howto/skills/addingskills/) using the botskills connect CLI tool. - -## Supported Scenarios -This sample demonstrates the following scenarios: -- Answer hotel FAQs ([QnA Maker](https://www.qnamaker.ai/) knowledge base) - - *Where is the gym?* - - *What time is breakfast?* - - *Do you allow pets?* -- Guest reservation changes ([Hospitality Skill]({{site.baseurl}}/reference/skills/experimental/#hospitality-skill)) - - *I want to extend my stay by 2 nights* - - *Can I get a late check out time?* - - *Can you check me out now* -- Room services ([Hospitality Skill]({{site.baseurl}}/reference/skills/experimental/#hospitality-skill)) - - *I want to see a room service menu* - - *Can you get me 2 croissants and a yogurt parfait?* - - *Can you bring me a toothbrush and toothpaste?* -- Get local area information ([Event]({{site.baseurl}}/reference/skills/experimental/#event-skill) and [Point of Interest]({{site.baseurl}}/reference/skills/pointofinterest) skills) - - *What's happening nearby?* - - *Find me nearby coffee shops* -- Make a restaurant reservation ([Restaurant Booking Skill]({{site.baseurl}}/reference/skills/experimental/#restaurant-booking-skill)) - - *Make a dinner reservation for tonight* -- Weather forecast ([Weather Skill]({{site.baseurl}}/reference/skills/experimental/#weather-skill)) - - *What's the weather today?* -- Find news articles ([News Skill]({{site.baseurl}}/reference/skills/experimental/#news-skill)) - - *What's the latest news on surfing?* - - *What news is currently trending?* -- Search the web ([Bing Search Skill]({{site.baseurl}}/reference/skills/experimental/#bing-search-skill)) - - *Tell me about the jurassic park movie* - - *Who is Bill Gates?* - -For a more in-depth explanation of the scenarios supported by each skill check out the [experimental skills documentation]({{site.baseurl}}/reference/skills/experimental) and [Point of Interest Skill documentation]({{site.baseurl}}/reference/skills/pointofinterest). - -## Transcripts -Review sample conversational flows for the Hospitality Sample Assistant by downloading the following transcripts and opening with the [Bot Framework Emulator](https://aka.ms/botframework-emulator). For more flows of specific skills see [skills transcripts]({{site.baseurl}}/reference/skills/transcripts). - -**Hotel FAQs**: [Download]({{site.baseurl}}/assets/transcripts/hospitalitysample-faqs.transcript) - -**Reservation changes**: [Download]({{site.baseurl}}/assets/transcripts/hospitalitysample-reservationchanges.transcript) - -**Room services**: [Download]({{site.baseurl}}/assets/transcripts/hospitalitysample-roomservices.transcript) - -**Local information**: [Download]({{site.baseurl}}/assets/transcripts/hospitalitysample-localinfo.transcript) - diff --git a/docs/_docs/reference/samples/vaclient_android.md b/docs/_docs/reference/samples/vaclient_android.md deleted file mode 100644 index 1770313480..0000000000 --- a/docs/_docs/reference/samples/vaclient_android.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -category: Reference -subcategory: Samples -title: Virtual Assistant Client (Android) comprehensive feature set -description: Detailed information on the features of the Virtual Assistant Client on Android -order: 1 ---- - - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Activity UI for testing and demonstrating a bot on the Direct Line Speech channel - -* Interaction - * User may send requests to bot via voice or text - * Responses from the bot can render text responses and Adaptive Card attachments (single and carousel) - * Bot responses are audible (controlled by 'Media' volume bar) - * Volume rocket adjusts 'Media' volume bar without having to select it - * Adaptive Cards amy be clickable to send a response to bot - * The listening mode adapts to a bot's input hints and user's input method - * User may restart a new conversation with the bot - * User may use "Show Assistant Settings" as a shortcut to the default assist app menu - -* Appearance - * Newest response is rendered at the bottom of the conversation history (scrolled automatically) - * User may elect to show a threaded conversation (by default only bot responses shown) - * User may adjust dark/light color scheme - -## Settings UI to configure a bot endpoint and experience - -* A user can configure: - * Speech Service subscription key - * Speech Service subscrition key region - * Direct Line Speech secret key - * User From.Id value - * Locale - * Chat History line-count - * Timezone - * Bot Speech Bubble Background Color - * User Speech Bubble Background Color - * Bot Text Color - * User Text Color - * GPS Sent on (read-only) - * Send GPS Location (resent `VA.Location` event activity with GPS coordinates) - - -## Widgets for demonstrating a native chat experience - - * Users may add resizable widgets to their homescreen by long-pressing and scrolling to the Virtual Assistant Client app - 1. Microphone - * Trigger the service to listen to user requests - 1. User utterance (request) - * Echo the user's request in text format - 1. Bot response (response) - * Show the text response from the bot - does not show Adaptive Cards - - -## Always-on background service - -* Stores state client side -* Interface between clients (widget and Activity UI) and service -* Interface with plug-in apps (via AIDL and broadcasts) -* Sends GPS location periodically -* Receives bot responses and plays audio without showing the Activity UI -* Open default apps (navigation and music) as necessary to fulfill user's request - * Navigation: attempts to open Waze first. If unavailable, opens via Google Maps. - * Music: opens Spotify. \ No newline at end of file diff --git a/docs/_docs/reference/skills/experimental.md b/docs/_docs/reference/skills/experimental.md deleted file mode 100644 index 194bc2fb11..0000000000 --- a/docs/_docs/reference/skills/experimental.md +++ /dev/null @@ -1,200 +0,0 @@ ---- -category: Reference -subcategory: Skills -title: Experimental Skills -description: News, Search, Reservation, Weather, Music, Events, and Hospitality. -order: 12 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Overview - -These experimental Bot Framework Skills are early prototypes to help bring skill concepts to life for demonstrations and proof-of-concepts along with providing different examples to get you started. - -These skills by their very nature are not complete, will likely have rudimentary language models, limited language support and limited testing hence are located in a experimental folder to ensure this is understood before you make use of them. - -### Skill Deployment - -The Experimental Skills require the following dependencies for end to end operation which are created through an ARM script which you can modify as required. - -- Azure Web App -- Azure Storage Account (Transcripts) -- Azure Application Insights (Telemetry) -- Azure CosmosDb (State) -- Azure Cognitive Services - Language Understanding - -> Review the pricing and terms for the services and adjust to suit your scenario. - -**To deploy the experimental skills using the default configuration, follow the steps in this common [deployment documentation page]({{site.baseurl}}/tutorials/csharp/create-assistant/4_provision_your_azure_resources) from the folder where your have cloned the GitHub repo.** - -## Skills - -### Bing Search Skill - -The [Bing Search Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/bingsearchskill) provides a simple Skill that integrates with the [Bing Search Cognitive Service](https://azure.microsoft.com/en-us/services/cognitive-services/bing-web-search-api/). - -This skill has a very limited LUIS model (available in English, French, Italian, German, Spanish and Chinese) and demonstates three simple scenarios: - -- Celebrity Information: *Who is Tom Cruise?* -- Q&A: *What is the gdp of switzerland* -- Movie Information: *Tell me about the jurassic park movie* - -![Search Example]({{site.baseurl}}/assets/images/skills-experimental-bingsearch.png) - -#### Configuration - -1. Get your own [Bing Search Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-web-search-api/). -1. Get your own [Project Answer Search Key](https://labs.cognitive.microsoft.com/en-us/project-answer-search). -1. Provide these values in your `appsettings.json` file. - -``` -"BingSearchKey": "{YOUR_BING_SEARCH_COGNITIVE_SERVICES_KEY}", -"BingAnswerSearchKey": "{YOUR_PROJECT_ANSWER_SEARCH_KEY}" -``` - -### Event Skill - -The [Event Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/eventskill) provides a simple skill that integrates with [Eventbrite](https://www.eventbrite.com/platform/) to show information about events happening in the specified area. - -This skill currently supports one scenario to get local event information. - -![Event Example]({{site.baseurl}}/assets/images/skills-event-transcript.png) - -#### Configuration - -1. Get your own [Eventbrite API Key](https://www.eventbrite.com/platform/api-keys). -1. Provide this value in your `appsettings.json` file. - -``` -"eventbriteKey": "YOUR_EVENTBRITE_API_KEY" -``` - -### Hospitality Skill - -The [Hospitality Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/hospitalityskill) demonstrates experiences that would be useful in a hospitality specific scenario, such as being able to check out of a hotel, ordering room service, and requesting hotel amenities. This skill does not integrate a hotel service at this time, and is instead simulated with static data for testing purposes. - -This skill demonstrates the following scenarios: -- Show reservation: *What is my current check out date?* -- Extend reservation: *Can I extend my stay?* -- Request late check-out: *I want a late check out time* -- Request amenities: *Can you bring me a toothbrush and toothpaste?* -- Room service: *I want to see a room service menu* -- Check out: *Can I check out now?* - -![Hospitality Example]({{site.baseurl}}/assets/images/skills-hospitality-transcript.png) - -The [Hospitality Sample VA]({{site.baseurl}}/reference/samples/hospitalitysample) demonstrates this skill and a number of other skills to demonstrate a more in-depth hospitality experience. - -### IT Service Management Skill - -The [IT Service Management skill](https://github.com/microsoft/AI/tree/master/skills/src/csharp/experimental/itsmskill) provides a basic skill that provides ticket and knowledge base related capabilities and supports ServiceNow. - -This skill demonstrates the following scenarios: -- Create a ticket: *Create a ticket for my broken laptop* -- Show ticket: *What's the status of my incident* -- Update Ticket: *Change ticket's urgency to high* -- Close a ticket: *Close my ticket* -- Find Knowledgebase item: *Search knowledge articles related to error 1234* - -#### Configuration - -To test this skill you will need to follow the ServiceNow configuration steps shown below: - -1. Create a ServiceNow instance in the [ServiceNow Developer Site](https://developer.servicenow.com/app.do#!/instance). -1. Update this configuration entry in your `appsettings.json` file with your Service Now instance URL: -`"serviceNowUrl": "{YOUR_SERVICENOW_INSTANCE_URL}` -1. Create a [scripted REST API](https://docs.servicenow.com/bundle/geneva-servicenow-platform/page/integrate/custom_web_services/task/t_CreateAScriptedRESTService.html) to get current user's sys_id and please raise an issue if simpler way is found - - In System Web Services/Scripted REST APIs, click New to create an API - - In API's Resources, click New to add a resource - - In the resource, select GET for HTTP method and input `(function process(/*RESTAPIRequest*/ request, /*RESTAPIResponse*/ response) { return gs.getUserID(); })(request, response);` in Script - - Update the serviceNowGetUserId of appsetting.json: `"serviceNowGetUserId": "YOUR_API_NAMESPACE/YOUR_API_ID"` -1. Register an Application and OAuth configuration by following [these instructions](https://docs.servicenow.com/bundle/london-platform-administration/page/administer/security/task/t_CreateEndpointforExternalClients.html#t_CreateEndpointforExternalClients). Keep the generated Client ID and Client Secret to be used in the following OAuth Connection step. - - Redirect URL is https://token.botframework.com/.auth/web/redirect -1. Add an OAuth Connection in the Settings pane of your Web App Bot named 'ServiceNow' using Service Provider 'Generic Oauth 2' - - Set Authorization URL to the following, replacing YOUR_INSTANCE with your instance name: https://YOUR_INSTANCE.service-now.com/oauth_auth.do - - Set Token URL, Refresh URL to the following, replacing YOUR_INSTANCE with your instance name: https://YOUR_INSTANCE.service-now.com/oauth_token.do - - No Scopes are needed - - Click Test Connection to verify the connection works as expected. - -To test this skill with your Virtual Assistant one manual step is required over and above the usual skill connection steps. - -1. Add OAuth Connection to your Virtual Assistant manually as per the step above. This connection type cannot be automatically configured as part of botskills. - -### Music Skill - -The [Music skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/musicskill) integrates with [Spotify](https://developer.spotify.com/documentation/web-api/libraries/) to look up playlists and artists and open the Spotify app via URI. -This is dependent on the [SpotifyAPI-NET](https://github.com/JohnnyCrazy/SpotifyAPI-NET) wrapper for the Spotify Web API. - -#### Configuration - -1. Get your own client id and secret when you [create a Spotify client](https://developer.spotify.com/dashboard/). -1. Provide these values in your `appsettings.json` file. - -``` - "spotifyClientId": "{YOUR_SPOTIFY_CLIENT_ID}", - "spotifyClientSecret": "{YOUR_SPOTIFY_CLIENT_SECRET}" -``` - -#### Event Activity integration - -This Skill supports an outgoing `OpenDefaultApp` Event Activity that provides a Spotify URI for chat clients to open on their own. - -``` -{ - "type":"event", - "name":"OpenDefaultApp", - "value":{ - "MusicUri":"{SPOTIFY_URI}" - } -} -``` - - -### News Skill - -The [News skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/newsskill) provides a simple Skill that integrates with the Bing News Cognitive Service to demonstrate how a news experience can be integrated into a Virtual Assistant. - -Once deployed create a [Bing News Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-news-search-api/) and update the appropriate configuration within appSettings.config. - -This skill supports the following scenarios: -- Find articles: *Find me news about sports* -- Trending articles: *What news is trending now?* -- Show favorite topic: *Find news for me* - -![News Example]({{site.baseurl}}/assets/images/skills-news-transcript.png) - -#### Configuration - -1. Get your own [Bing News Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-news-search-api/) -1. Provide this value in your `appsettings.json` file. - -``` -"BingNewsKey": "{YOUR_BING_NEWS_COGNITIVE_SERVICES_KEY}" -``` - -### Restaurant Booking Skill - -The [Restaurant Booking skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/restaurantbooking) provides a simple restaurant booking experience guiding the user through booking a table and leverages Adaptive Cards throughout to demonstrate how Speech, Text and UX can be combined for a compelling user experience. No integration to restaurant booking services exists at this time so is simulated with static data for testing purposes. - -![Restaurant Example]({{site.baseurl}}/assets/images/skills-restaurant-transcript.png) - -### Weather Skill - -The [Weather skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/weatherskill) provides a basic Skill that integrates with [AccuWeather](https://developer.accuweather.com) to demonstrate how a weather experience can be integrated into a Virtual Assistant. - -#### Configuration - -1. Get your own API Key when by following the instructions on [AccuWeather Getting Started](https://developer.accuweather.com/getting-started). -1. Provide this value in your `appsettings.json` file. - -``` -"WeatherApiKey": "{YOUR_ACCUWEATHER_API_KEY}" -``` diff --git a/docs/_docs/reference/skills/skillauthentication.md b/docs/_docs/reference/skills/skillauthentication.md deleted file mode 100644 index db4c1f7e9b..0000000000 --- a/docs/_docs/reference/skills/skillauthentication.md +++ /dev/null @@ -1,89 +0,0 @@ ---- -category: Reference -subcategory: Skills -title: Skill Authentication -description: Details on skill authentication approach and flow. -order: 4 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -A Skill needs to be able to authenticate the request coming from another bot (Virtual Assistant). The Skill model requires two levels of Authentication: - -## JWT Authentication - -The Virtual Assistant needs to include an Authorization header in the request. - -This is needed as a Skill needs to verify that the request comes from a properly registered bot service, and that the request was intended for the skill. Because every bot service is a Microsoft app, we can leverage AAD to obtain JWT token as well as verification - -![Skill Authentication Flow]({{site.baseurl}}/assets/images/virtualassistant-skillauthentication.png) - -Between Virtual Assistant and skill bot, we'll use AAD as the authority to generate and validate token. The token will be a JWT token. Virtual Assistant will use this information to request a JWT token: - 1. Microsoft app id - this will become the source appid claim in the token - 2. Microsoft app password - 3. Skill bot's Microsoft app id - this will become the audience claim in the token - -The JWT token will be a 'bearer' token so it'll be part of the Authorization header. - -When skill bot receives a request, it looks at the Authorization header to retrieve the token. Then it will rely on AAD to decrypt & validate the JWT token. Right now the skill will only verify if the audience claim is the skill's Microsoft app id. If the audience claim verification passes, then the authentication succeeds and the request moves forward into the skill bot for further processing. - -By default, a skill that's created out of a Skill Template enables JWT authentication. - -On the Virtual Assistant side, we use the SkillDialog to dispatch requests to the skills. To enable Virtual Assistant to obtain proper JWT token to send a request to skill, you need to have these lines of code when you create the SkillDialog instances: - -```csharp -var credentials = new MicrosoftAppCredentialsEx(settings.MicrosoftAppId, settings.MicrosoftAppPassword, skill.MSAappId); -skillDialogs.Add(new SkillDialog(skill, credentials, telemetryClient, userState, authDialog)); -``` - -The `MicrosoftAppCredentialsEx` class provided within the Microsoft.Bot.Builder.Skills package is the central place to manage the information needed for the skill to obtain the AAD token. Once you pass this into the SkillDialog, the SkillDialog will be able to use it to properly retrieve the AAD token. This behavior is the default behavior if you create a Virtual Assistant out of the Virtual Assistant Template VSIX. - -## Whitelist Authentication - -After the JWT token is verified, the Skill bot needs to verify if the request comes from a bot that's previously included in a whitelist. A Skill needs to have knowledge of it's callers and give permissions to that bot explicitly instead of any bot that could call the Skill. This level of authorization is enabled by default as well, making sure a Skill is well protected from public access. Developers need to do the following to implement the Whitelist mechanism: - -Declare a class `WhiteListAuthProvider` in the bot service project that implements the interface `IWhitelistAuthenticationProvider` - -```csharp -public HashSet AppsWhitelist -{ - get - { - return new HashSet - { - // add AppIds of Virtual Assistant here - }; - } -} -``` - -By adding the Microsoft App id of the Virtual Assistant that's calling the Skill into the property AppsWhitelist, you are allowing the bot that's associated with that app id to invoke your skill. - -In `Startup.cs`, register a singleton of the interface with this class - -```csharp -// Register WhiteListAuthProvider -services.AddSingleton(); -``` - -In `BotController.cs` (derived from the `SkillController`, add the class as a new parameter to the constructor - -```csharp -public BotController( - IBot bot, - BotSettingsBase botSettings, - IBotFrameworkHttpAdapter botFrameworkHttpAdapter, - SkillWebSocketAdapter skillWebSocketAdapter, - IWhitelistAuthenticationProvider whitelistAuthenticationProvider) - : base(bot, botSettings, botFrameworkHttpAdapter, skillWebSocketAdapter, whitelistAuthenticationProvider) -{} -``` - -With all these changes in place, you're enabling your Skill to allow bots to invoke it as long as the bot's Microsoft App id is included in the whitelist. diff --git a/docs/_docs/reference/skills/skilltokenflow.md b/docs/_docs/reference/skills/skilltokenflow.md deleted file mode 100644 index 9ed34b3405..0000000000 --- a/docs/_docs/reference/skills/skilltokenflow.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -category: Reference -subcategory: Skills -title: Skill Token Flow -description: Details on how skills authenticate users. -order: 5 ---- - -# {{ page.title }} - -To ensure a standardized user experience across all Skills, the parent Bot is responsible for managing token requests. This helps to ensure that tokens common across multiple skills can be shared and the user isn’t prompted to authenticate for every skill. - -The approach mirrors that of the eventing approach used by the Azure Bot Service today. - -When a token isn’t already cached (e.g. first time use) the following flow occurs: - -- When a Skill requests a Token it asks the calling Bot for a token using an event called `tokens/request` -- The Skill then starts a EventPrompt waiting for a Event to be returned called `tokens/response` - -- The Bot then makes use of the native OAuthPrompt capabilities to surface a prompt to the user -- When a Token is retrieved it’s returned to the Bot within a tokens/response message which is used to complete the OAuthPrompt and store the token securely. -- This same event is then forwarded to the Skill through the existing SkillDialog on the stack and provides the Token for the Skill to use. - -![Initial Authentication Flow for Skills]({{site.baseurl}}/assets/images/virtualassistant-SkillAuthInitialFlow.png) - -Subsequent activations benefit from the Azure Bot Service provided cache and enables silent retrieval of a token. - -![Subsequent Authentication Flow for Skills]({{site.baseurl}}/assets/images/virtualassistant-SkillAuthSubsequentFlow.png) \ No newline at end of file diff --git a/docs/_docs/reference/skills/transcripts.md b/docs/_docs/reference/skills/transcripts.md deleted file mode 100644 index c7b3d7f26b..0000000000 --- a/docs/_docs/reference/skills/transcripts.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -category: Reference -subcategory: Skills -title: Transcripts -description: Bot Framework emulator transcripts for pre-built skills. -order: 13 ---- - -# {{ page.title }} -Review a sample conversational flow of each skill by downloading a transcript and opening with the [Bot Framework Emulator](https://aka.ms/botframework-emulator). - -#### Automotive Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-automotive.transcript) - -#### Calendar Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-calendar.transcript) - -#### Email Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-email.transcript) - -#### To Do Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-todo.transcript) - -#### Point of Interest Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-pointofinterest.transcript) - -#### News Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-news.transcript) - -#### Reservation Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-restaurantbooking.transcript) - -#### Hospitality Skill: [Download]({{site.baseurl}}/assets/transcripts/skills-hospitality.transcript) \ No newline at end of file diff --git a/docs/_docs/reference/virtual-assistant/architecture.md b/docs/_docs/reference/virtual-assistant/architecture.md deleted file mode 100644 index 3783ea5117..0000000000 --- a/docs/_docs/reference/virtual-assistant/architecture.md +++ /dev/null @@ -1,72 +0,0 @@ ---- -category: Reference -subcategory: Virtual Assistant -title: Architecture -description: Detailed documentation covering what the template provides and how it works -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro -An architecture diagram of your Virtual Assistant created through the template is shown below along with a detailed explanation. - -![Virtual Assistant Architecture]({{site.baseurl}}/assets/images/virtualassistant-architecture.jpg) - -## Client Integration - -End-Users can make use of the Virtual Assistant through the support [Azure Bot Service Channels](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-manage-channels?view=azure-bot-service-4.0) including WebChat or through the Direct Line API that provides the ability to integrate your assistant directly into a device, mobile app or any other client experience. - -Device integration requires creation of a lightweight host app which runs on the device. We have successfully built native applications across multiple embedded platforms including HTML5 applications. - -The host app is responsible for the following capabilities. These can of course be extended depending on the device capabilities. - - Open and closing the microphone has indicated through the InputHint on messages returned by the Assistant - - Audio playback of responses created by the Text-to-Speech service - - Rendering of Adaptive Cards on the device through a broad range of renderers supplied with the Adaptive Cards SDK - - Processing events received from the Assistant, often to perform on device operations (e.g. change navigation destination) - - Accessing the on-device secret store to store and retrieve a token for communication with the assistant - - Integration with the Unified Speech SDK where on-device speech capabilities are required - - Interface to the Direct-Line REST API or SDKs - - Authenticating the end user of the device and providing a unique userId to the Assistant. Microsoft has capabilities to help with this if needed. - -## Assistant Middleware - -The Assistant makes use of a number of Middleware Components to process incoming messages: - - Telemetry Middleware leverages Application Insights to store telemetry for incoming messages, LUIS evaluation and QNA activities. PowerBI can then use this data to surface conversational insights. - - Event Processing Middleware processes events sent by the device - - Content Moderator Middleware is an optional component that uses the Content Moderator Cognitive Service to detect inappropriate / PII content]] - -## Advanced Conversational Analytics - -The Assistant is configured to collect telemetry into Application Insights. This can be imported into a PowerBI dashboard to view [advanced conversational analytics](https://aka.ms/botPowerBiTemplate). - -## Dialogs - -Dialogs represent conversational topics that the Assistant can handle, the template provides a `MainDialog`, `CancelDialog` and example `EscalateDialog`, `OnboardingDialog` dialogs. - -## Authentication - -The Assistant and associated Skills often need access to end-user authentication tokens in order to perform operations on behalf of the user. OAuth authentication providers are supported by the Azure Bot Service and provide the ability for you to configure providers such as Active Directory (for Office 365), Facebook or your own. - -Authentication connections are created on the Azure Bot Service and the Assistant makes use of these to initiate an authentication request (generating an OAuth signin card) or retrieve a token from the Azure Bot Service provided secure token store. - -Skills can request Authentication tokens for a given user when they are activated, this request is passed as an event to the Assistant which then uses the specific Authentication connection to surface an authentication request to the user if a token isn't found in the secure store. More detail on this is available [here]({{site.baseurl}}/reference/skills/skilltokenflow) - -## Linked Accounts - -Linked Accounts is a supporting web application that demonstrates how a user can link their Assistant to their digital properties (e.g. Office 365, Google, etc.) on a companion device (mobile phone or website). This would be done as part of the on-boarding process and avoids authentication prompts during voice scenarios. - -This integrates with the Authentication capability detailed above and provides a mechanism for a user to unlink all accounts which can be used as part of a device *forget me* feature. - -## Edge Enablement - -Many assistant scenarios require cloud-connectivity to access down-stream APIs or data sources (e.g. Office 365, Navigation data, Music Services, etc.). There are however a class of assistant scenarios especially those running on devices that may have periods of poor connectivity where pushing Speech, Language Processing and Dialog management onto the Edge (device) is needed. - -We have a number of options to address this depending on platform and are working with initial customers to deliver this capability. \ No newline at end of file diff --git a/docs/_docs/reference/virtual-assistant/managemodels.md b/docs/_docs/reference/virtual-assistant/managemodels.md deleted file mode 100644 index 7b3cb8bad1..0000000000 --- a/docs/_docs/reference/virtual-assistant/managemodels.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -category: Reference -subcategory: Virtual Assistant -title: Managing cognitive models across environments -description: Guidance on how to manage cognitive models across environments in a team -order: 1 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -### Intro - -When trying to develop language models in a distributed team, managing conflicts can be difficult. Refer to the following guidance for some common scenarios when managing cognitive models for a team. - -### I want to protect my production environment against conflicting changes made by multiple editors. -It is recommended that for project being worked on by multiple developers that you protect your production cognitive models by only deploying changes through a build pipeline. This pipeline should run the various scripts/commands needed to update your LUIS models, QnA Maker knowledgebases, and Dispatch model automatically based on your source control. Individual developers should make their changes in their own versions of the models, and push their changes in to source control when they are ready to merge. - -![]({{site.baseurl}}/assets/images/model_management_flow.png) - -### I want to test changes to my LUIS models and QnA Maker knowledgebases in the portal. - -When you want to test changes to your LUIS models and QnA Maker knowledgebases in the portal, it is recommended that you deploy your own personal versions to develop with, and do not make changes directly in the production apps to prevent conflicts with the other developers. After you have made all the changes you want in the portal, follow these steps to share your changes with your team: - -1. Run the following command from your project folder: - - ``` - .\Deployment\Scripts\update_cognitive_models.ps1 -RemoteToLocal - ``` - - > This script downloads your modified LUIS models in the .lu schema so it can be published to production by your build pipeline. If you are running this script from a Virtual Assistant project, it also runs `dispatch refresh` and `luisgen` to update your Dispatch model and DispatchLuis.cs files. - -2. Check in your updated .lu files to source control. - > Your changes should go through a peer review to validate there will be no conflicts. You can also share your LUIS app and/or transcripts of the bot conversation with your changes to help in this conversation. - -3. Run your build pipeline to deploy your updated files to your production environment. - > This pipeline should update your LUIS models, QnA Maker knowledgebases, and Dispatch model as needed. - - -### I've changed my skill LUIS model. What next? - -If you have added or removed an intent from your skill LUIS model, follow these steps to update your skill manifest: - -1. Open the manifestTemplate.json file. -2. If you have added new intents, either add them to an existing `action` or add a new action for the intent like this: - - ```json - "actions": [ - { - "id": "toDoSkill_addToDo", - "definition": { - "description": "Add a task", - "slots": [], - "triggers": { - "utteranceSources": [ - { - "locale": "en", - "source": [ "todo#AddToDo" ] - } - ] - } - } - }, - ``` - -Once you have updated your manifest, follow these steps to update any Virtual Assistants that are using your skill: - -1. Run the following command from your project directory: - - ``` - botskills update --cs - ``` - - > This command updates your skills.json file with the latest manifest definitions for each connected skill, and runs dispatch refresh to update your dispatch model. diff --git a/docs/_docs/reference/virtual-assistant/parentchildpattern.md b/docs/_docs/reference/virtual-assistant/parentchildpattern.md deleted file mode 100644 index 41d66bcdda..0000000000 --- a/docs/_docs/reference/virtual-assistant/parentchildpattern.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -category: Reference -subcategory: Virtual Assistant -title: Virtual Assistant and Skills Pattern -description: How the Virtual Assistant and Skills can enable a parent-child pattern throughout your organization. -order: 7 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} -## Overview - -As you develop your Virtual Assistant, you will find that the ability to manage individual domains of a conversation (Skills) and aggregate them into a singular *parent* Assistant becomes attractive for a number of reasons: - -* **End user fatigue**: As customer adoption for this technology grows, new Bots appear owned by disparate teams, increasing the cognitive load on your end users. It becomes up to them to remember the right Bot to use for a given function and they may not discover the full breadth of Bots available. -* **Monolithic Architecture**: As a Bot increases it's complexity it becomes unsustainable to house them within a single project. -* **Centralized changes**: On the cognitive-side, changes to language models, QnA knowledge bases, and dialogs are usually performed by a central team. This quickly becomes a bottleneck across an organization and highlights change-management issues over time. - -## Parent-Child pattern - -Adopting a Parent-Child pattern enables you to address the above issues and provides the following benefits: - - -* Establish a front-facing Assistant experience that your users grow familiar with. This Assistant identifies the intent best suited for a given utterance and hands off processing to a remote-hosted Skill. -* Enable different teams to own their own capabilities packaged up in a Skill which is added to the **parent** Assistant. -* Mix programming languages between your Assistant and Skills, for example a C# Assistant could call a Typescript Skill and vice-versa. -* Leverage Skills from third parties including Microsoft to quickly extend your Assistant's capabilities. - -### Example: Enterprise Assistant - -In the Enterprise Assistant example shown below, an enterprise customer establishes their global Assistant's brand and personality that all end users interact with across a broad range of [Bot Framework Channels](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-manage-channels?view=azure-bot-service-4.0). - -![Enterprise Assistant Example]({{site.baseurl}}/assets/images/parentchildpattern-enterpriseassistant.png) - -The Enterprise Assistant itself provides basic conversational capabilities and QnA and surfaces capabilities from separate HR and IT Bots align with integrating a Calendar Skill capability. Due to the nature of handing over control to skills it's important to ensure that the user can interrupt conversation with a Skill to *escape*. For example saying `cancel` would enable the user to stop an interaction with a Skill and go back to interacting with the parent-level assistant. A user may also wish to seek help or escalate to a human. - -## Key Concepts - -The following concepts are key to an effective Skill architecture. These are described at a generic level before moving into details of how this is solved as part of the Virtual Assistant. - -### Dispatching - -Taking a natural language question from a user (e.g. `What meetings do I have today`) and identifying which (if any) conversational component to hand the question to is the primary function of the Dispatching capability. - -The Dispatcher capability requires knowledge of training data for downstream Skills and Q&A in order to reason over all components and make an informed decision on which component to hand control to. A key requirement is the ability to provide scoring comparison across multiple dependencies which is not otherwise possible as there is no common baseline. - -### Orchestrator - -Once a downstream conversational component has been identified the triggering question is passed across and a conversation with the downstream Skill is established through an Orchestration capability. - -Follow-up questions from the user are routed to the Skill until the Skill indicates it is complete at which point it hands back control. - -The Orchestrator is also responsible for exchanging appropriate Context from the Assistant to the Skill and vice-versa. For example, if the Assistant is already aware of the users location this can be passed to the downstream component removing the need for the user to be prompted again. - -Conversely, a downstream component can provide information for the Assistant to store as part of it's context for use in subsequent interactions. - -In addition, depending on the scenario the Orchestrator also handles authentication-token needs of downstream Skills maintaining authentication at the parent-level enabling tokens to be shared across Skills if needed (e.g. Office 365 across Calendar, Email and To Do skills). - -## Bot Framework Skills and Virtual Assistant - -Bot Framework Skills are a new capability enabling Parent-Child / Assistant type experiences to be created. These Skills are almost identical to normal Bot Framework based bots and can be developed and tested in the same way, ensuring a consistent and familiar approach and the same Activity protocol is maintained. - -The main change, is to add a different invocation approach enabling an Assistant to invoke a Skill directly (via WebSockets) and not have to go via the usual Bot Framework channel infrastructure. We provide a [Dispatcher](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-tutorial-dispatch?view=azure-bot-service-4.0&tabs=cs) capability which is wired up as part of the Virtual Assistant which takes in training data from LUIS models and QnAMaker to enable effective routing, other dispatching sources can be added through flat-file import. - -The [Skill Architecture]({{site.baseurl}}/reference/skills/architecture) documentation covers the role of the Dispatcher and SkillDialog in more detail. - -A Skill Template is provided to enable developers to quickly create Skills and existing V4 BF SDK bots can be easily updated to enable them to be called as Skills. - -A supporting Skill command line tool enables Skills to be added/removed/refreshed to a parent Bot with no code changes. This tool abstracts various steps including Dispatcher and Authentication configuration steps. - -The Virtual Assistant Template (C# and Typescript) provides out of the box Skill support including Dispatcher configuration. With no additional code changes you can add Skills and top level intents such as cancellation are provided for you. - diff --git a/docs/_docs/reference/virtual-assistant/templateoutline.md b/docs/_docs/reference/virtual-assistant/templateoutline.md deleted file mode 100644 index 76473a0127..0000000000 --- a/docs/_docs/reference/virtual-assistant/templateoutline.md +++ /dev/null @@ -1,89 +0,0 @@ ---- -category: Reference -subcategory: Virtual Assistant -title: Understanding the Template -description: An outline of what the Virtual Assistant template provides -order: 2 ---- - -# {{ page.title }} -{:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro - -The Virtual Assistant Template brings together a number of best practices we've identified through the building of conversational experiences and automates integration of components that we've found to be highly beneficial to Bot Framework developers. This section covers some background to key decisions to help explain why the template works the way it does. - -## Greeting Card - -A key issue with many conversational experiences is end-users not knowing how to get started, leading to general questions that the Bot may not be best placed to answer. First impressions matter! An introduction card offers an opportunity to introduce the Bot's capabilities to an end user and suggests a few initial questions the user can use to get started. It's also a great opportunity to surface the personality of your Bot. - -A simple introduction card is provided as standard which you can adapt as needed, a returning user card is shown on subsequent interactions when a user has completed the onboarding dialog (triggered by the Get Started button on the Introduction card) - -![Intro Card Example]({{site.baseurl}}/assets/images/vatemplateintrocard.png) - -## Language Understanding - -### Basic Language Understanding (LUIS) intents - -Every Bot should handle a base level of conversational language understanding. Greetings for example are a basic thing every Bot should handle with ease. Typically, developers need to create these base intents and provide initial training data to get started. The Virtual Assistant template provides example LU files to get you started and avoids every project having to create these each time and ensures a base level of capability out of the box. - -The LU files provide the following intents across English, Chinese, French, Italian, German, Spanish. - -> Cancel, Confirm, Escalate, FinishTask, GoBack, Help, Reject, Repeat, SelectAny, SelectItem, SelectNone, ShowNext, ShowPrevious, StartOver, Stop - -The [LU](https://github.com/Microsoft/botbuilder-tools/blob/master/packages/Ludown/docs/lu-file-format.md) format is similar to MarkDown enabling easy modification and source control. The [LuDown](https://github.com/Microsoft/botbuilder-tools/tree/master/packages/Ludown) tool is then used to convert .LU files into LUIS models which can then be published to your LUIS subscription either through the portal or the associated [LUIS](https://github.com/Microsoft/botbuilder-tools/tree/master/packages/LUIS) CLI (command line) tool. - -### Dispatch Model - -A key design pattern used to good effect in the first wave of conversational experiences was to leverage Language Understanding (LUIS) and QnA Maker. LUIS would be trained with tasks that your Bot could do for an end user and QnA Maker would be trained with more general knowledge. - -All incoming utterances (questions) would be routed to LUIS for analysis. If the intent of a given utterance was not identified it was marked as a None intent. QnA Maker was then used to try and find an answer for the end-user. - -Whilst this pattern worked well there were two key scenarios where problems could be experienced. - -- If utterances in the LUIS model and QnA Maker overlapped sometimes slightly, this could lead to strange behavior where LUIS may try to process a question when it should have been directed to QnA Maker. -- When there were two or more LUIS models a Bot would have to invoke each one and perform some form of intent evaluation comparison to identify where to send a given utterance. As there is no common baseline score comparison across models didn't work effectively leading to a poor user experience. - -The [Dispatch model](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-tutorial-dispatch?view=azure-bot-service-4.0&tabs=csaddref%2Ccsbotconfig) provides an elegant solution to this by extracting utterances from each configured LUIS model and questions from QnA Maker and creating a central dispatch LUIS model. - -This enables a Bot to quickly identify which LUIS model or component should handle a given utterance and ensures QnA Maker data is considered at the top level of intent processing not just the None intent as before. - -This Dispatch tool also enables evaluation which will highlight confusion and overlap across LUIS models and QnA Maker knowledgebases highlighting issues before deployment. - -The Dispatch model is used at the core of each project created using the template. It's referenced within the `MainDialog` class to identify whether the target is a LUIS model or QnA. In the case of LUIS, the secondary LUIS model is invoked returning the intent and entities as usual. Dispatcher is also used for interruption detection. - -![Dispatch Example]({{site.baseurl}}/assets/images/dispatchexample.png) - -## QnA Maker - -[QnA Maker](https://www.qnamaker.ai/) provides the ability for non-developers to curate general knowledge in the format of question and answer pairs. This knowledge can be imported from FAQ data sources, product manuals and interactively within the QnaMaker portal. - -Two example QnA Maker models are provided in the [LU](https://github.com/Microsoft/botbuilder-tools/blob/master/packages/Ludown/docs/lu-file-format.md) file format within the QnA folder of CognitiveModels, one for FAQ and one for chit-chat. [LuDown](https://github.com/Microsoft/botbuilder-tools/tree/master/packages/Ludown) is then used as part of the deployment script to create a QnA Maker JSON file which the [QnA Maker](https://github.com/Microsoft/botbuilder-tools/tree/master/packages/QnAMaker) CLI (command line) tool then uses to publish items to the QnA Maker knowledgebase. - -![QnA ChitChat example]({{site.baseurl}}/assets/images/qnachitchatexample.png) - -### Content Moderator - -Content Moderator is an optional component which enables detection of potential profanity and helps check for personally identifiable information (PII). This can be helpful to integrate into Bots enabling a Bot to react to profanity or if the user shares PII information. For example, a Bot can apologise and hand-off to a human or not store telemetry records if PII information is detected. - -A middleware component is provided that screen texts and surfaces through a ```TextModeratorResult``` on the TurnState object. - -## Telemetry - -Providing insights into the user engagement of your Bot has proven to be highly valuable. This insight can help you understand the levels of user engagement, what features of the Bot they are using (intents) along with questions people are asking that the Bot isn't able to answer - highlighting gaps in the Bot's knowledge that could be addressed through new QnA Maker articles for instance. - -Integration of Application Insights provides significant operational/technical insight out of the box but this can also be used to capture specific Bot related events - messages sent and received along with LUIS and QnA Maker operations. - -Bot level telemetry is intrinsically linked to technical and operational telemetry enabling you to inspect how a given user question was answered and vice versa. - -A middleware component combined with a wrapper class around the QnA Maker and LuisRecognizer SDK classes provides an elegant way to collect a consistent set of events. These consistent events can then be used by the Application Insights tooling along with tools like PowerBI. - -An example PowerBI dashboard is as part of the Bot Framework Solutions github repo and works right out of the box with every Virtual Assistant template. See the [Analytics]({{site.baseurl}}/overview/analytics) section for more information. - -![Analytics Example]({{site.baseurl}}/assets/images/powerbi-conversationanalytics-luisintents.png) \ No newline at end of file diff --git a/docs/_docs/howto/skills/addingskills.md b/docs/_docs/skills/handbook/add-skills-to-a-virtual-assistant.md similarity index 90% rename from docs/_docs/howto/skills/addingskills.md rename to docs/_docs/skills/handbook/add-skills-to-a-virtual-assistant.md index 71091f575d..e2ba903e71 100644 --- a/docs/_docs/howto/skills/addingskills.md +++ b/docs/_docs/skills/handbook/add-skills-to-a-virtual-assistant.md @@ -1,19 +1,16 @@ --- -category: How To -subcategory: Skills -title: Add skills to a Virtual Assistant +category: Skills +subcategory: Handbook +title: Add a Skill to a Virtual Assistant description: Steps for adding a skill to an assistant -order: 1 +order: 6 +toc: true --- # {{ page.title }} {:.no_toc} +{{ page.description }} -## In this how-to -{:.no_toc} - -* -{:toc} ## Prerequisites - [Node.js](https://nodejs.org/) version 10.8 or higher @@ -39,7 +36,7 @@ The `botskills` CLI can be installed using the following npm command: npm install -g botskills ``` -> Your Virtual Assistant must have been deployed using the [deployment tutorial]({{site.baseurl}}/tutorials/csharp/create-assistant/4_provision_your_azure_resources) before using the `botskills` CLI as it relies on the Dispatch models being available and a deployed Bot for authentication connection information. +> Your Virtual Assistant must have been deployed using the [deployment tutorial]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources) before using the `botskills` CLI as it relies on the Dispatch models being available and a deployed Bot for authentication connection information. ## Skill Deployment @@ -52,7 +49,7 @@ Run the following command to add each Skill to your Virtual Assistant. This assu The `--luisFolder` parameter can be used to point the Skill CLI at the source LU files for trigger utterances. For Skills provided within this repo these can be found in the `Deployment/Resources/LU` folder of each Skill. The CLI will automatically traverse locale folder hierarchies. This can be omitted for any of the skills we provide as the LU files are provided locally. Also, you have to specify the `--cs` (for C#) or `--ts` (for TypeScript) argument for determining the coding language of your assistant, since each language takes different folder structures that need to be taken into consideration. ```bash -botskills connect --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder [path] --cs +botskills connect --botName YOUR_BOT_NAME --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder [path] --cs ``` See the [Skill CLI documentation]({{site.baseurl}}/reference/skills/botskills) for detailed CLI documentation. @@ -96,7 +93,7 @@ Run the following command to update a Skill to your Virtual Assistant. This assu The `--luisFolder` parameter can be used to point the Skill CLI at the source LU files for trigger utterances. For Skills provided within this repo these can be found in the `Deployment/Resources/LU` folder of each Skill. The CLI will automatically traverse locale folder hierarchies. This can be omitted for any of the skills we provide as the LU files are provided locally. Also, you have to specify the `--cs` (for C#) or `--ts` (for TypeScript) argument for determining the coding language of your assistant, since each language takes different folder structures that need to be taken into consideration. ```bash -botskills update --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder [path] --cs +botskills update --botName YOUR_BOT_NAME --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder [path] --cs ``` ## Refresh Connected Skills diff --git a/docs/_docs/reference/skills/architecture.md b/docs/_docs/skills/handbook/architecture.md similarity index 63% rename from docs/_docs/reference/skills/architecture.md rename to docs/_docs/skills/handbook/architecture.md index 593b9be421..9d955cfa70 100644 --- a/docs/_docs/reference/skills/architecture.md +++ b/docs/_docs/skills/handbook/architecture.md @@ -1,21 +1,15 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Handbook title: Architecture -description: Under the covers of the skill implementation. +description: Under the covers of the skill implementation order: 1 +toc: true --- # {{ page.title }} {:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro +{{ page.description }} Developers can compose conversational experiences by stitching together re-usable conversational capabilities, known as Skills. @@ -23,7 +17,7 @@ Within an Enterprise, this could be creating one parent bot bringing together mu Skills are themselves Bots, invoked remotely and a Skill developer template (.NET, TS) is available to facilitate creation of new Skills. -A key design goal for Skills was to maintain the consistent Activity protocol and ensure the development experience was as close to any normal V4 SDK bot as possible. To that end, a Bot simply starts a `SkillDialog` which abstracts the skill invocation mechanics. +A key design goal for Skills was to maintain the consistent Activity protocol and ensure the development experience was as close to any normal V4 SDK bot as possible. To that end, a Bot simply starts a **SkillDialog** which abstracts the skill invocation mechanics. ## Invocation Flow @@ -42,21 +36,21 @@ When the user of a Virtual Assistant asks a question, the Dispatcher will proces > When testing a Virtual Assistant using the Emulator the SkillDialog surfaces Skill invocation and slot-filling telemetry. -On start-up of a Virtual Assistant, each registered Skill results in a SkillDialog instance being created which is associated with a `SkillManifest` instance containing details about the Skill including it's endpoint, actions and slots. +On start-up of a Virtual Assistant, each registered Skill results in a SkillDialog instance being created which is associated with a **SkillManifest** instance containing details about the Skill including it's endpoint, actions and slots. -All communication between a Virtual Assistant and a Skill is performed through a custom `SkillDialog`, which is started when the dispatcher identifies a Skill that maps to a users utterances. Skills are invoked through a lightweight `SkillWebSocket` or `SkillHttp` adapter, maintaining the standard Bot communication protocol and ensuring Skills can be developed using the standard Bot Framework toolkit. +All communication between a Virtual Assistant and a Skill is performed through a custom **SkillDialog**, which is started when the dispatcher identifies a Skill that maps to a users utterances. Skills are invoked through a lightweight **SkillWebSocket** or **SkillHttp** adapter, maintaining the standard Bot communication protocol and ensuring Skills can be developed using the standard Bot Framework toolkit. -The `SkillManifest` provides the endpoint for the SkillDialog to communicate with along with action and slot information. Slots are optional and a way to pass parameters to a Skill. +The **SkillManifest** provides the endpoint for the SkillDialog to communicate with along with action and slot information. Slots are optional and a way to pass parameters to a Skill. -When a Skill wants to terminate an ongoing dialog, it sends back an Activity with `Handoff` type to signal the completion of the current dialog. +When a Skill wants to terminate an ongoing dialog, it sends back an Activity with **Handoff** type to signal the completion of the current dialog. -See the [SkillAuthentication]({{site.baseurl}}/reference/skills/skillauthentication) section for information on how Bot->Skill invocation is secured. +See the [SkillAuthentication]({{site.baseurl}}/skills/handbook/authentication/) section for information on how Bot->Skill invocation is secured. ## Skill Middleware -The `SkillMiddleware` is used by each Skill and is configured automatically if you use the Skill Template. +The **SkillMiddleware** is used by each Skill and is configured automatically if you use the Skill Template. -The middleware consumes the `skill/cancelallskilldialogs` event, when the Skill receives it it clears out the active dialog stack on that active Skill. This is useful in interruptions - i.e. if a user asks to cancel, a Virtual Assistant can send this event to the Skill and cancel the active dialog. +The middleware consumes the **skill/cancelallskilldialogs** event, when the Skill receives it it clears out the active dialog stack on that active Skill. This is useful in interruptions - i.e. if a user asks to cancel, a Virtual Assistant can send this event to the Skill and cancel the active dialog. ## Interrupting Active Skills diff --git a/docs/_docs/skills/handbook/authentication.md b/docs/_docs/skills/handbook/authentication.md new file mode 100644 index 0000000000..4265a1b143 --- /dev/null +++ b/docs/_docs/skills/handbook/authentication.md @@ -0,0 +1,142 @@ +--- +category: Skills +subcategory: Handbook +title: Authentication +description: A Skill needs to be able to authenticate the request coming from another bot (Virtual Assistant). The Skill model requires two levels of Authentication +order: 4 +toc: true +--- + +# {{ page.title }} +{:.no_toc} +{{ page.description }} + + +## JWT Authentication + +The Virtual Assistant needs to include an Authorization header in the request. + +This is needed as a Skill needs to verify that the request comes from a properly registered bot service, and that the request was intended for the skill. Because every bot service is a Microsoft app, we can leverage AAD to obtain JWT token as well as verification + +![Skill Authentication Flow]({{site.baseurl}}/assets/images/virtualassistant-skillauthentication.png) + +Between Virtual Assistant and skill bot, we'll use AAD as the authority to generate and validate token. The token will be a JWT token. Virtual Assistant will use this information to request a JWT token: + 1. Microsoft app id - this will become the source appid claim in the token + 2. Microsoft app password + 3. Skill bot's Microsoft app id - this will become the audience claim in the token + +The JWT token will be a 'bearer' token so it'll be part of the Authorization header. + +When skill bot receives a request, it looks at the Authorization header to retrieve the token. Then it will rely on AAD to decrypt & validate the JWT token. Right now the skill will only verify if the audience claim is the skill's Microsoft app id. If the audience claim verification passes, then the authentication succeeds and the request moves forward into the skill bot for further processing. + +By default, a skill that's created out of a Skill Template enables JWT authentication. + +On the Virtual Assistant side, we use the SkillDialog to dispatch requests to the skills. To enable Virtual Assistant to obtain proper JWT token to send a request to skill, you need to have these lines of code when you create the SkillDialog instances: + +```csharp +var credentials = new MicrosoftAppCredentialsEx(settings.MicrosoftAppId, settings.MicrosoftAppPassword, skill.MSAappId); +skillDialogs.Add(new SkillDialog(skill, credentials, telemetryClient, userState, authDialog)); +``` + +The **MicrosoftAppCredentialsEx** class provided within the Microsoft.Bot.Builder.Skills package is the central place to manage the information needed for the skill to obtain the AAD token. Once you pass this into the SkillDialog, the SkillDialog will be able to use it to properly retrieve the AAD token. This behavior is the default behavior if you create a Virtual Assistant out of the Virtual Assistant Template VSIX. + +## Whitelist Authentication + +After the JWT token is verified, the Skill bot needs to verify if the request comes from a bot that's previously included in a whitelist. A Skill needs to have knowledge of it's callers and give permissions to that bot explicitly instead of any bot that could call the Skill. This level of authorization is enabled by default as well, making sure a Skill is well protected from public access. Developers need to do the following to implement the Whitelist mechanism: + +Declare a class **WhiteListAuthProvider** in the bot service project that implements the interface **IWhitelistAuthenticationProvider** + +```csharp +public HashSet AppsWhitelist +{ + get + { + return new HashSet + { + // add AppIds of Virtual Assistant here + }; + } +} +``` + +By adding the Microsoft App id of the Virtual Assistant that's calling the Skill into the property AppsWhitelist, you are allowing the bot that's associated with that app id to invoke your skill. + +In **Startup.cs**, register a singleton of the interface with this class + +```csharp +// Register WhiteListAuthProvider +services.AddSingleton(); +``` + +In **BotController.cs** (derived from the **SkillController**, add the class as a new parameter to the constructor + +```csharp +public BotController( + IBot bot, + BotSettingsBase botSettings, + IBotFrameworkHttpAdapter botFrameworkHttpAdapter, + SkillWebSocketAdapter skillWebSocketAdapter, + IWhitelistAuthenticationProvider whitelistAuthenticationProvider) + : base(bot, botSettings, botFrameworkHttpAdapter, skillWebSocketAdapter, whitelistAuthenticationProvider) +{} +``` + +With all these changes in place, you're enabling your Skill to allow bots to invoke it as long as the bot's Microsoft App id is included in the whitelist. + +## Token Flow + +To ensure a standardized user experience across all Skills, the parent Bot is responsible for managing token requests. This helps to ensure that tokens common across multiple Skills can be shared and the user isn't prompted to authenticate for every Skill. +When a token isn't already cached (e.g. first time use) the following flow occurs: +- When a Skill requests a token, it asks the calling Bot for a token using an event called **tokens/request** +- The Skill starts an EventPRompt waiting for an Event to be returned called **tokens/response** +- The Bot makes use of an OAuthPrompt to surface a prompt to the user +- When a token is retrieved it's returned to the Bot within a **tokens/response** activity, which is used to complete the OAuthPrompt and store the token securely +- The same event is then forwarded to the Skill through the SkillDialog on the stack and provides a token for the Skill to use + +![Initial authentication flow for Skills]({{site.baseurl}}/assets/images/virtualassistant-SkillAuthInitialFlow.png) + +Subsequent activations benefit from the Azure Bot Service provided cache, which enables silent retrieval of a token. + +![Subsequent authentication flow for Skills]({{site.baseurl}}/assets/images/virtualassistant-SkillAuthSubsequentFlow.png) + +## Manual authentication + +If you wish to make use of the Calendar, Email and Task Skills standalone to the Virtual Assistant (local mode) you need to configure an Authentication Connection enabling use of your Assistant to authenticate against services such as Office 365 and securely store a token which can be retrieved by your assistant when a user asks a question such as *"What does my day look like today"* to then use against an API like Microsoft Graph. + +> These steps are not required if you plan to use the productivity skills as part of the Virtual Assistant, these steps are performed automatically when you add a Skill to your assistant. + +The [Add Authentication to your bot](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-authentication?view=azure-bot-service-4.0&tabs=aadv1%2Ccsharp%2Cbot-oauth) section in the Azure Bot Service documentation covers more detail on how to configure Authentication. However in this scenario, the automated deployment step for the Skill has already created the **Azure AD v2 Application** for your Bot and you instead only need to follow these instructions: + +- Navigate to the Azure Portal, Click Azure Active Directory and then **App Registrations** +- Find the Application that's been created for your Bot as part of the deployment. You can search for the application by name or ApplicationID as part of the experience but note that search only works across applications currently shown and the one you need may be on a separate page. +- Click API permissions on the left-hand navigation + - Select Add Permission to show the permissions pane + - Select **Microsoft Graph** + - Select Delegated Permissions and then add each of the following permissions required for the Productivity Skills you are adding (see the specific documentation page for the specific scopes required.) + - Click Add Permissions at the bottom to apply the changes. + +Next you need to create the Authentication Connection for your Bot. Within the Azure Portal, find the **Web App Bot** resource created when your deployed your Bot and choose **Settings**. + +- Scroll down to the oAuth Connection settings section. +- Click **Add Setting** +- Type in the name of your Connection Setting - e.g. **Outlook** + - This name will be displayed to the user in an OAuth card, ensure that it is clear what this maps to +- Choose **Azure Active Directory v2** from the Service Provider drop-down +- Open the **appSettings.config** file for your Skill + - Copy/Paste the value of **microsoftAppId** into the ClientId setting + - Copy/Paste the value of **microsoftAppPassword** into the Client Secret setting + - Set Tenant Id to common + - Set scopes to match the ones provided in the earlier step. + +![Manual Auth Connection]({{site.baseurl}}/assets/images/manualauthconnection.png) + +Finally, open the **appSettings.config** file for your Skill and update the connection name to match the one provided in the previous step. + +``` +"oauthConnections": [ + { + "name": "Outlook", + "provider": "Azure Active Directory v2" + } + ] +``` \ No newline at end of file diff --git a/docs/_docs/reference/skills/bestpractices.md b/docs/_docs/skills/handbook/best-practices.md similarity index 91% rename from docs/_docs/reference/skills/bestpractices.md rename to docs/_docs/skills/handbook/best-practices.md index 7773f2f31a..0a2a6bee91 100644 --- a/docs/_docs/reference/skills/bestpractices.md +++ b/docs/_docs/skills/handbook/best-practices.md @@ -1,28 +1,26 @@ --- -category: Reference -subcategory: Skills -title: Best Practices -description: Best practices for developing your Bot Framework Skill. -order: 2 +category: Skills +subcategory: Handbook +title: Best practices +description: Best practices when developing a Bot Framework Skill +order: 6 +toc: true --- # {{ page.title }} {:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} +{{ page.description }} ## Language understanding ### Best practices +{:.no_toc} A key aspect of your custom Skill's success will be it's ability to extract the right data out of a user's utterance. Follow the [Best practices for building a language understanding app](https://docs.microsoft.com/en-us/azure/cognitive-services/luis/luis-concept-best-practices) to help plan your own application. ### Use the General LUIS model for common utterances +{:.no_toc} If there is an utterance that you expect would be applied to multiple Skills, take advantage of the General LUIS model provided to manage this entity at the top-level. The following intents are currently available: @@ -38,18 +36,21 @@ If there is an utterance that you expect would be applied to multiple Skills, ta * Restart ### Update LUIS model -You can update you LUIS model in LUIS portal. Or modify the `.lu` file then convert it to `.json` and upload to LUIS portal manually, or use `update_cognitive_models.ps1` +{:.no_toc} + +You can update you LUIS model in LUIS portal. Or modify the **.lu** file then convert it to **.json** and upload to LUIS portal manually, or use **update_cognitive_models.ps1** -How to convert `.json` to `.lu`: +How to convert **.json** to **.lu**: ```bash ludown refresh -i YOUR_BOT_NAME.json ``` -How to convert `.lu` to `.json`: +How to convert **.lu** to **.json**: ```bash ludown parse toluis --in YOUR_BOT_NAME.lu ``` ### Test LUIS model +{:.no_toc} The unit test use a mock LUIS model. So if you need to test your LUIS model, you can implement a test tool by [LUIS API](https://westus.dev.cognitive.microsoft.com/docs/services/5890b47c39e2bb17b84a55ff/operations/5890b47c39e2bb052c5b9c2f) to test it automatically. @@ -60,12 +61,14 @@ Read [Design and control conversation flow](https://docs.microsoft.com/en-us/azu ## Developing a dialog ### Take advantage of multimodal clients +{:.no_toc} Consider the multiple layers of communication a user may have with a Skill on the many popular communication services available on the [Azure Bot Service Channels](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-manage-channels?view=azure-bot-service-4.0). #### Speech & Text +{:.no_toc} -Speech & Text responses are stored in `.json` files, and offer the ability to provide a variety of responses and set the input hint on each Activity. +Speech & Text responses are stored in **.json** files, and offer the ability to provide a variety of responses and set the input hint on each Activity. ```json { @@ -94,15 +97,17 @@ Speech & Text responses are stored in `.json` files, and offer the ability to pr } ``` -Vary your responses. By providing additional utterances to the `replies` array, your Skill will sound more natural and provide a dynamic conversation. +Vary your responses. By providing additional utterances to the **replies** array, your Skill will sound more natural and provide a dynamic conversation. Write how people speak. A skill should only provide relevant context when read aloud. Use visual aids to offer more data to a user. #### Common string +{:.no_toc} -Some common strings shouldn't save in response file. Suggest you to save them in `.resx` file. It is easy to be localized. +Some common strings shouldn't save in response file. Suggest you to save them in **.resx** file. It is easy to be localized. #### Visual +{:.no_toc} Use [Adaptive Cards](https://adaptivecards.io/) to deliver rich cards as visual clues to a Skill's content. @@ -354,6 +359,7 @@ protected async Task GetOverviewMeetingListResponseAsync( ``` ### Use prompts to enable smart option matching +{:.no_toc} When a Skill needs to gather information with users, it should use the prompts available in the SDK library. These enable developers to validate responses with specific data types or create custom validation rules. @@ -412,6 +418,7 @@ protected PromptOptions GetPointOfInterestChoicePromptOptions(List ChoiceValidator(PromptValidatorContext p } ``` -If you need a more complex prompt you can implement it by inheriting `Microsoft.Bot.Builder.Dialogs.Prompt`. Or read [Create your own prompts to gather user input](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-primitive-prompts?view=azure-bot-service-4.0&tabs=csharp) to learn more about custom prompt. +If you need a more complex prompt you can implement it by inheriting **Microsoft.Bot.Builder.Dialogs.Prompt**. Or read [Create your own prompts to gather user input](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-primitive-prompts?view=azure-bot-service-4.0&tabs=csharp) to learn more about custom prompt. ### Enable long running tasks +{:.no_toc} [Proactive scenarios]({{site.baseurl}}/howto/virtual-assistant/proactivemessaging) are a key part of ensuring a Skill Assistant can provide more intelligent and helpful capabilities to end users. This enables a Skill to have more intelligent interactions with a user, triggered by external events. ### Handle and log errors +{:.no_toc} -Use the `HandleDialogExceptions` method in [SkillDialogBase.cs]({{site.repo}}/blob/master/templates/Skill-Template/csharp/Sample/SkillSample/Dialogs/SkillDialogBase.cs) to send a trace back to the [Bot Framework Emulator](https://aka.ms/botframework-emulator), logging the exception, and sending a friendly error response to the user. +Use the **HandleDialogExceptions** method in [SkillDialogBase.cs]({{site.repo}}/blob/master/templates/Skill-Template/csharp/Sample/SkillSample/Dialogs/SkillDialogBase.cs) to send a trace back to the [Bot Framework Emulator](https://aka.ms/botframework-emulator), logging the exception, and sending a friendly error response to the user. ```csharp protected async Task HandleDialogExceptions(WaterfallStepContext sc, Exception ex) @@ -476,12 +485,14 @@ protected async Task HandleDialogExceptions(WaterfallStepContext sc, Exception e ``` ### Manage the states +{:.no_toc} Save your data in different scope of states. Read [Save user and conversation data](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-howto-v4-state?view=azure-bot-service-4.0&tabs=csharp) to learn about user and conversation state. -For dialog state, you can save your data in `stepContext.State.Dialog[YOUR_DIALOG_STATE_KEY]`. +For dialog state, you can save your data in **stepContext.State.Dialog[YOUR_DIALOG_STATE_KEY]**. ### Manage the dialogs +{:.no_toc} Use dialog options to transfer data among dialogs. Read [Create advanced conversation flow using branches and loops](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-dialog-manage-complex-conversation-flow?view=azure-bot-service-4.0&tabs=csharp) to learn more about dialog management. diff --git a/docs/_docs/reference/skills/responses.md b/docs/_docs/skills/handbook/language-generation.md similarity index 97% rename from docs/_docs/reference/skills/responses.md rename to docs/_docs/skills/handbook/language-generation.md index dbba5665d8..b23112049e 100644 --- a/docs/_docs/reference/skills/responses.md +++ b/docs/_docs/skills/handbook/language-generation.md @@ -1,25 +1,24 @@ --- -category: Reference -subcategory: Skills -title: Responses +category: Skills +subcategory: Handbook +title: Language generation description: Details on how responses work in skill projects. -order: 6 +order: 5 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} +{{ page.description }} ## Response Files To configure responses for your skill project, you'll need a `.json` file and a text template file (`.tt`) for each collection of responses. An example of each can be found in the Skill Template project in the Responses folder. ### Json Structure +{:.no_toc} + Responses can be stored in the following format to be used in your project. Each `.json` file should have a Build Action of **EmbeddedResource** to be loaded properly at runtime. ``` "templateName": { @@ -46,10 +45,14 @@ Responses can be stored in the following format to be used in your project. Each | suggestedActions (optional) | Sets simple suggestedActions on the response. | #### Localization +{:.no_toc} + To provide localized versions of responses, add additional `.json` files for each language. The file name format should be name.locale.json (e.g. `MyResponses.de.json`). ### Text Template +{:.no_toc} + The text template (`.tt`) file auto-generates a class representing the names of the responses in the `.json` file. This allows you to reference the responses names more easily. The text template and `.json` files should have the same root name and be in the same folder (e.g. `MyResponses.tt` and `MyResponses.json`). ```csharp @@ -71,6 +74,7 @@ namespace SkillSample.Responses.Sample Adaptive cards can be added in `.json` format to the Content folder to be accessed throughout your project. Each `.json` file should have a Build Action of EmbeddedResource to be loaded properly at runtime. ### Card Object +{:.no_toc} Adaptive cards are referenced via the Card type. To create a Card object from the Adaptive Card *MyCard.json*, use the following code: @@ -79,6 +83,8 @@ new Card("MyCard") ``` ### ICardData Interface +{:.no_toc} + The ICardData interface is used to replace tokens in your Adaptive Card. This allows you to provide different values into different containers in your card. If *MyCard.json* contains that following Adaptive TextBlock: @@ -128,6 +134,7 @@ services.AddSingleton(sp => new ResponseManager( Once you have created your `.json` and `.tt` files and initialized your ResponseManager in `Startup.cs`, you can use the response manager to build your responses. The following methods and overloads are available to you. ### GetResponse() +{:.no_toc} - Get a simple response from template with Text, Speak, InputHint, and SuggestedActions set. @@ -141,6 +148,7 @@ Once you have created your `.json` and `.tt` files and initialized your Response | tokens | StringDictionary of tokens to replace in the response. | ### GetCardResponse() +{:.no_toc} - Get a response with an Adaptive Card attachment. diff --git a/docs/_docs/reference/skills/skillmanifest.md b/docs/_docs/skills/handbook/manifest.md similarity index 89% rename from docs/_docs/reference/skills/skillmanifest.md rename to docs/_docs/skills/handbook/manifest.md index c42a90de36..6d33eb573a 100644 --- a/docs/_docs/reference/skills/skillmanifest.md +++ b/docs/_docs/skills/handbook/manifest.md @@ -1,27 +1,20 @@ --- -category: Reference -subcategory: Skills -title: Skill Manifest -description: Overview of the skill manifest and its role with skill registration and invocation. -order: 3 +category: Skills +subcategory: Handbook +title: Manifest +description: Overview of the Skill manifest and its role with Skill registration and invocation. +order: 2 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro - The Skill manifest enables Skills to be self-describing in that they communicate the name and description of a Skill, it's authentication requirements if appropriate along with the discrete actions that it exposes. Each action provides utterances that the caller can use to identify when an utterance should be passed across to a skill along with slots (parameters) that it can accept for slot-filling if required. This manifest provides all of the metadata required for a calling Bot to know when to trigger invoking a skill and what actions it provides. The manifest is used by the Skill command-line tool to configure a Bot to make use of a Skill. -Each skill exposes a manifest endpoint enabling easy retrieval of a manifest, this can be found on the following URI path of your skill: `/api/skill/manifest` +Each skill exposes a manifest endpoint enabling easy retrieval of a manifest, this can be found on the following URI path of your skill: **/api/skill/manifest** ## Manifest structure @@ -37,6 +30,7 @@ A manifest is made up of the following structure: - Utterance ### Manifest Header +{:.no_toc} The manifest header provides high level information relating to your skill, the table below provides more information on each item. Note that items marked as automatic should not be provided in your manifest file as they are automatically provided at runtime as part of the manifest generation. @@ -54,13 +48,14 @@ The manifest header provides high level information relating to your skill, the "name": "Calendar Skill", "description": "The Calendar skill provides calendaring related capabilities and supports Office and Google calendars.", "iconUrl": "calendarSkill.png", - "msaAppId": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", + "msaAppId": "10101010-1010-1010-1010-101010101010", "endpoint": "http://localhost:3980/api/skill/messages", ``` ### Authentication Connections +{:.no_toc} -The `authenticationConnections` section communicates which authentication providers your skill supports, if any. For example, a Calendar skill might support both Outlook and Google enabling it to function with either provider depending on the users choice. The caller can then use this information to automatically configure the Authentication connection or as required enable a manual step to be performed. +The **authenticationConnections** section communicates which authentication providers your skill supports, if any. For example, a Calendar skill might support both Outlook and Google enabling it to function with either provider depending on the users choice. The caller can then use this information to automatically configure the Authentication connection or as required enable a manual step to be performed. Parameter | Description | Required --------- | ----------- | -------- @@ -84,8 +79,9 @@ The `authenticationConnections` section communicates which authentication provid ``` ### Actions +{:.no_toc} -The `actions` section describes the discrete actions (features) that a given Skill supports. Each action can optionally provide slots (parameters) that the caller may choose to pass or alternatively omit and pass the utterance for the Skill to perform it's own slot filling. Slot filling on the client side can enable a Skill to be invoked and not require any further input or turns from the end user. +The **actions** section describes the discrete actions (features) that a given Skill supports. Each action can optionally provide slots (parameters) that the caller may choose to pass or alternatively omit and pass the utterance for the Skill to perform it's own slot filling. Slot filling on the client side can enable a Skill to be invoked and not require any further input or turns from the end user. Parameter | Description | Required --------- | ----------- | -------- @@ -119,6 +115,7 @@ Parameter | Description | Required ``` ### Trigger +{:.no_toc} A given action can be trigged through different mechanisms or an utterance. Example triggering utterances must be provided by a skill to enable a caller to train a natural language dispatcher so it can identify utterances that should be routed to a skill. @@ -156,9 +153,10 @@ Utterances can also be provided in-line with the skill manifest as shown below. } ``` -Both `utteranceSources` and `utterances` support multiple-locales enabling you to express the locales your Skill supports. +Both **utteranceSources** and **utterances** support multiple-locales enabling you to express the locales your Skill supports. ### Example Skill Manifest +{:.no_toc} ```json { diff --git a/docs/_docs/reference/skills/automotive.md b/docs/_docs/skills/samples/automotive.md similarity index 95% rename from docs/_docs/reference/skills/automotive.md rename to docs/_docs/skills/samples/automotive.md index 43e0190045..c91510a7b1 100644 --- a/docs/_docs/reference/skills/automotive.md +++ b/docs/_docs/skills/samples/automotive.md @@ -1,21 +1,17 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Samples +language: Experimental Skills title: Automotive Skill -description: Skill for enabling car automation scenarios. -order: 7 +description: Automotive Skill provides the ability to issue comands to vehicles to control settings in a vehicle. +order: 2 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} -## Overview The Automotive Skill is in preview and demonstrates the first capabilities to help enable Automotive scenarios. The skill focuses on Vehicle Settings, specifically Climate Control, Safety and Basic audio controls. Media, Tuner and Phone capabilities are expected in a future release. Vehicle Control is a complicated domain, whilst there are only a limited set of car controls for climate control there are a myriad of ways that a human can describe a given setting. For example, *I'm feeling chilly* , *My feet are cold* and *It's cold here in the back* all relate to a decrease in temperature but to different parts of the car and perhaps even different fan settings. @@ -33,6 +29,7 @@ At this time, changes to vehicle settings are supported through the `VEHICLE_SET The following vehicle setting areas are supported at this time, example utterances are provided for guidance. In cases where the utterance results in multiple potential settings or a value isn't provided then the skill will prompt for disambiguation. Confirmation will be sought from the user if a setting is configured to require confirmation, important for sensitive settings such as safety. ### Climate Control +{:.no_toc} - *Set temperature to 21 degrees* - *Defog my windshield* @@ -44,12 +41,14 @@ The following vehicle setting areas are supported at this time, example utteranc - *Change climate control* ### Safety +{:.no_toc} - *Turn lane assist off* - *Enable lane change alert* - *Set park assist to alert* ### Audio +{:.no_toc} - *Adjust the equalizer* - *Increase the bass* @@ -61,7 +60,7 @@ An example transcript file demonstrating the Skill in action can be found [here] ![ Automotive Skill Transcript Example]({{site.baseurl}}/assets/images/skills-auto-transcript.png) -## Language Understanding (LUIS) +## Language Understanding LUIS models for the Skill are provided in `.lu` file format as part of the Skill. These are currently available in English with other languages to follow. @@ -73,14 +72,10 @@ The following Top Level intents are available with the main `settings` LUIS mode In addition there are two supporting LUIS models `settings_name` and `settings_value`, these are used for disambiguation scenarios to clarify setting names and values where the initial utterance doesn't provide clear information. ## Configuration -### Deployment -Learn how to [provision your Azure resources]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources/) in the Create a Skill tutorial. -### Authentication connection settings - -> No Authentication is required for this skill ### Customizing vehicle settings +{:.no_toc} Available vehicle settings are defined in a supporting metadata file which you can find in this location: `automotiveskill/Dialogs/VehicleSettings/Resources/available_settings.yaml`. @@ -112,6 +107,7 @@ values: ``` ### Deploying the Skill in local-mode +{:.no_toc} The Automotive skill is not added by default when deploying the Virtual Assistant as this is a domain specific skill. @@ -125,7 +121,7 @@ You will be prompted to provide the following parameters: - Name - A name for your bot and resource group. This must be **unique**. - Location - The Azure region for your services (e.g. westus) -- LUIS Authoring Key - Refer to [this documentation page]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro) for retrieving this key. +- LUIS Authoring Key - Refer to [this documentation page]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) for retrieving this key. The MSBot tool will outline the deployment plan including location and SKU. Ensure you review before proceeding. @@ -163,6 +159,7 @@ msbot list --bot YOURBOTFILE.bot --secret YOUR_BOT_SECRET Once you have followed the deployment instructions above, open the provided `.bot` file with the Bot Framework Emulator. ### Adding the Skill to an existing Virtual Assistant deployment +{:.no_toc} Follow the instructions below to add the Automotive Skill to an existing Virtual Assistant deployment that you have. @@ -219,6 +216,7 @@ Follow the instructions below to add the Automotive Skill to an existing Virtual 7. In order for Adaptive Cards to render images associated with the Automotive skill you will need to take the Image assets located in the `wwwroot/images` folder of the Automotive skill and place in a HTTP location (potentially your Bot deployment) and place the base URI path in the skill configuration `ImageAssetLocation` property. If you skip this step, Adaptive Cards will not render with images correctly. ## Events + The Automotive Skill surfaces setting changes for testing purposes through an event returned to the client. This enables easy testing and simulation, all events are prefixed with `AutomotiveSkill.`. The below event is generated as a response to `I'm feeling cold` ```json diff --git a/docs/_docs/skills/samples/bing-search.md b/docs/_docs/skills/samples/bing-search.md new file mode 100644 index 0000000000..8becef8a26 --- /dev/null +++ b/docs/_docs/skills/samples/bing-search.md @@ -0,0 +1,34 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Bing Search Skill +description: Bing Search Skill provides the ability to use Bing to provide answers to common search questions. +order: 3 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [Bing Search Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/bingsearchskill) provides a simple Skill that integrates with the [Bing Search Cognitive Service](https://azure.microsoft.com/en-us/services/cognitive-services/bing-web-search-api/). + +This skill has a very limited LUIS model (available in English, French, Italian, German, Spanish and Chinese) and demonstates three simple scenarios: + +- Celebrity Information: *Who is Bill Gates?* +- Q&A: *what's the population of China?* +- Movie Information: *Tell me about the jurassic park movie* + +![Search Example]({{site.baseurl}}/assets/images/skills-experimental-bingsearch.png) + +## Configuration +{:.no_toc} + +1. Get your own [Bing Search Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-web-search-api/). +1. Get your own [Project Answer Search Key](https://labs.cognitive.microsoft.com/en-us/project-answer-search). +1. Provide these values in your `appsettings.json` file. + +``` +"BingSearchKey": "{YOUR_BING_SEARCH_COGNITIVE_SERVICES_KEY}", +"BingAnswerSearchKey": "{YOUR_PROJECT_ANSWER_SEARCH_KEY}" +``` \ No newline at end of file diff --git a/docs/_docs/reference/skills/productivity-calendar.md b/docs/_docs/skills/samples/calendar.md similarity index 66% rename from docs/_docs/reference/skills/productivity-calendar.md rename to docs/_docs/skills/samples/calendar.md index 1c17d42c46..ce6f5904f7 100644 --- a/docs/_docs/reference/skills/productivity-calendar.md +++ b/docs/_docs/skills/samples/calendar.md @@ -1,23 +1,19 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Samples title: Calendar Skill description: Add calendar capabilities to your Assistant. Powered by Microsoft Graph and Google. -order: 8 +order: 1 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} -## Overview -The Calendar Skill provides calendar related capabilities to a Virtual Assistant. +{{ page.description }} ## Supported scenarios +{:.toc} The following scenarios are currently supported by the Skill: @@ -26,7 +22,7 @@ The following scenarios are currently supported by the Skill: - *Accept the event sent by Yolanda Wong* - Change an Event - *Bring forward my 4:00 appointment two hours* - - *Reschedule my interview on Monday to 1 pm* + - *Reschedule my interview to Tuesday at 1 PM* - Connect to a Meeting - *Connect me to conference call* - *Connect me with my 2 o'clock meeting* @@ -53,11 +49,14 @@ The following scenarios are currently supported by the Skill: - *What's the duration of my 4 PM meeting?* - Time Remaining - *How long until my next meeting?* - - *How many days are there until Thanksgiving?* + - *How many minutes free do I have before next scheduled appointment?* + +**However**, if you wish to use the Skill directly without using a Virtual Assistant please use the following steps to manually configure Authentication for the Calendar Skill. This is **not** required when using the Skill with a Virtual Assistant. -## Language Understanding (LUIS) +## Language Understanding +{:.toc} -LUIS models for the Skill are provided in `.lu` file format as part of the Skill. Further languages are being prioritized. +LUIS models for the Skill are provided in **.lu** file format as part of the Skill. Further languages are being prioritized. |Supported Languages| |-| @@ -69,6 +68,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |Chinese (simplified)| ### Intents +{:.no_toc} |Name|Description| |-|-| @@ -92,6 +92,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |TimeRemaining| Matches queries to get the time until a meeting begins| ### Entities +{:.no_toc} |Name|Description| |-|-| @@ -99,6 +100,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |ContactName| Simple entity| |DestinationCalendar| Simple entity| |Duration| Simple entity| +|FromDate| Simple entity| |FromTime| Simple entity| |Location| Simple entity| |MeetingRoom| Simple entity| @@ -116,32 +118,49 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |ordinal| Prebuilt entity| ## Configuration +{:.toc} + ### Deployment -Learn how to [provision your Azure resources]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources/) in the Create a Skill tutorial. +{:.no_toc} +Learn how to [provision your Azure resources]({{site.baseurl}}/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources/) in the Create a Skill tutorial. ### Supported content providers +{:.no_toc} + > Office 365 and Outlook.com through the Microsoft Graph is supported along with support for Google accounts. To use Google account skill you need follow these steps: -1. Create your calendar API credential in [Google developers console](https://console.developers.google.com). -2. Create an OAuth connection setting in your Web App Bot. - - Connection name: `googleapi` - - Service Provider: `Google` - - Client id and secret are generated in step 1 - - Scopes: `https://www.googleapis.com/auth/calendar https://www.googleapis.com/auth/contacts`. -3. Add the connection name, client id, secret and scopes in the `appsetting.json` file. +1. Enable Calendar API in [Google API library](https://console.developers.google.com/apis/library) +1. Create your calendar API credential in [Google developers console](https://console.developers.google.com/apis/credentials). + 1. Choose "Create credential" - "OAuth Client ID" + 1. Choose "Web Application" + 1. Set Redirect URL as **https://token.botframework.com/.auth/web/redirect** +1. Create an OAuth connection setting in your Web App Bot. + - Connection name: **googleapi** + - Service Provider: **Google** + - Client id and secret are generated in step 2 + - Scopes: **https://www.googleapis.com/auth/calendar https://www.googleapis.com/auth/contacts**. +1. Add the connection name, client id, secret and scopes in the **appsetting.json** file. ### Authentication connection settings +{:.no_toc} + If you plan to use the skill as part of a Virtual Assistant the process of registering a skill with your Virtual Assistant will create the supporting authentication connection information automatically for your Virtual Assistant. This skill uses the following authentication scopes which are registered automatically: -- `User.ReadBasic.All` -- `Calendars.ReadWrite` -- `People.Read` -- `Contacts.Read` +- **User.ReadBasic.All** +- **Calendars.ReadWrite** +- **People.Read** +- **Contacts.Read** **However**, if you wish to use the Skill directly without using a Virtual Assistant please use the following steps to manually configure Authentication for the Calendar Skill. This is **not** required when using the Skill with a Virtual Assistant. -Follow the general instructions [here]({{site.baseurl}}/howto/skills/manualauthsteps) to configure this using the scopes shown above. +Follow the general instructions [here]({{site.baseurl}}/skills/handbook/authentication#manual-authentication) to configure this using the scopes shown above. ## Events -Learn how to use [events]({{site.baseurl}}/reference/virtual-assistant/events) to send backend data to a Skill, like a user's location or time zone. \ No newline at end of file +{:.toc} +Learn how to use [events]({{site.baseurl}}/virtual-assistant/handbook/events) to send backend data to a Skill, like a user's location or time zone. + +## Download a transcript +{:.toc} + +Download diff --git a/docs/_docs/reference/skills/productivity-email.md b/docs/_docs/skills/samples/email.md similarity index 59% rename from docs/_docs/reference/skills/productivity-email.md rename to docs/_docs/skills/samples/email.md index d52bf84a36..117908c9b7 100644 --- a/docs/_docs/reference/skills/productivity-email.md +++ b/docs/_docs/skills/samples/email.md @@ -1,61 +1,51 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Samples title: Email Skill description: Add email capabilities to your Assistant. Powered by Microsoft Graph and Google. -order: 9 +order: 2 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} - -## Overview -The Email Skill provides email related capabilities to a Virtual Assistant. +{{ page.description }} ## Supported scenarios +{:.toc} The following scenarios are currently supported by the Skill: - Send an Email - *Send an email to John Smith* - - *Send an email* + - *Send an email to Harold about the team lunch this Tuesday* - Find Email - *Find email from John Smith* - *What email do I have* -- Add Flag - - *This email needs to be flagged* - - *Add a flag to the email Simone Jones just sent to me* - Check Messages - *Do I have any new mail* - *Check my email* - Delete - - *Do I have any new mail* - - *Check my email* + - *Delete an email* + - *put the email in the recycle bin* - Forward - - *Forward all files from Petrina to Jim* - - *Could you forward this message to Cosmo my email* -- Query Last Text - - *Who emailed me last* - - *What was the last email I got from Dad* + - *Forward email from megan to alex* + - *Could you forward this message* - Read Aloud - - *Read the last email from Philippe* + - *Read email from Philippe* - *Read unread email* - Reply to an Email - - *Reply with "I will call you back"* - - *Respond to my last email* + - *Reply with I will call you back* + - *Respond to my email* - Select an Email - *The third search result please* - *Open this one* -## Language Understanding (LUIS) +## Language Understanding +{:.toc} -LUIS models for the Skill are provided in `.lu` file format as part of the Skill. Further languages are being prioritized. +LUIS models for the Skill are provided in **.lu** file format as part of the Skill. Further languages are being prioritized. |Supported Languages| |-| @@ -67,6 +57,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |Chinese (simplified)| ### Intents +{:.no_toc} |Name|Description| |-|-| @@ -82,6 +73,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |SendEmail| Matches queries to send an email | ### Entities +{:.no_toc} |Name|Description| |-|-| @@ -106,32 +98,48 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |ordinal| Prebuilt entity| ## Configuration +{:.toc} + ### Deployment -Learn how to [provision your Azure resources]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources/) in the Create a Skill tutorial. +{:.no_toc} +Learn how to [provision your Azure resources]({{site.baseurl}}/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources/) in the Create a Skill tutorial. ### Supported content providers +{:.no_toc} > Office 365 and Outlook.com through the Microsoft Graph is supported along with support for Google accounts. To use Google account skill you need follow these steps: -1. Create your calendar API credential in [Google developers console](https://console.developers.google.com). -2. Create an OAuth connection setting in your Web App Bot. - - Connection name: `googleapi` - - Service Provider: `Google` - - Client id and secret are generated in step 1 - - Scopes: `https://www.googleapis.com/auth/calendar https://www.googleapis.com/auth/contacts`. -3. Add the connection name, client id, secret and scopes in the `appsetting.json` file. +1. Enable Gmail API in [Google API library](https://console.developers.google.com/apis/library) +1. Create your Gmail API credential in [Google developers console](https://console.developers.google.com/apis/credentials). + 1. Choose "Create credential" - "OAuth Client ID" + 1. Choose "Web Application" + 1. Set Redirect URL as **https://token.botframework.com/.auth/web/redirect** +1. Create an OAuth connection setting in your Web App Bot. + - Connection name: **googleapi** + - Service Provider: **Google** + - Client id and secret are generated in step 2 + - Scopes: **https://mail.google.com/ https://www.googleapis.com/auth/contacts**. +1. Add the connection name, client id, secret and scopes in the **appsetting.json** file. ### Authentication connection Settings +{:.no_toc} If you plan to use the skill as part of a Virtual Assistant the process of registering a skill with your Virtual Assistant will create the supporting authentication connection information automatically for your Virtual Assistant. This skill uses the following authentication scopes which are registered automatically: -- `User.ReadBasic.All` -- `Mail.ReadWrite` -- `Mail.Send` -- `People.Read` -- `Contacts.Read` +- **User.ReadBasic.All** +- **Mail.ReadWrite** +- **Mail.Send** +- **People.Read** +- **Contacts.Read** **However**, if you wish to use the Skill directly without using a Virtual Assistant please use the following steps to manually configure Authentication for the Calendar Skill. This is **not** required when using the Skill with a Virtual Assistant. -Follow the general instructions [here]({{site.baseurl}}/howto/skills/manualauthsteps) to configure this using the scopes shown above. +Follow the general instructions [here]({{site.baseurl}}/skills/handbook/authentication/#manual-authentication) to configure this using the scopes shown above. ## Events -Learn how to use [events]({{site.baseurl}}/reference/virtual-assistant/events) to send backend data to a Skill, like a user's location or time zone. \ No newline at end of file +{:.toc} + +Learn how to use [events]({{site.baseurl}}/virtual-assistant/handbook/events) to send backend data to a Skill, like a user's location or time zone. + +## Download a transcript +{:.toc} + +Download diff --git a/docs/_docs/skills/samples/event.md b/docs/_docs/skills/samples/event.md new file mode 100644 index 0000000000..4e66363068 --- /dev/null +++ b/docs/_docs/skills/samples/event.md @@ -0,0 +1,28 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Event Skill +description: Event Skill provides ability to search for events using Eventbrite. +order: 4 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [Event Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/eventskill) provides a simple skill that integrates with [Eventbrite](https://www.eventbrite.com/platform/) to show information about events happening in the specified area. + +This skill currently supports one scenario to get local event information. + +![Event Example]({{site.baseurl}}/assets/images/skills-event-transcript.png) + +## Configuration +{:.no_toc} + +1. Get your own [Eventbrite API Key](https://www.eventbrite.com/platform/api-keys). +1. Provide this value in your `appsettings.json` file. + +``` +"eventbriteKey": "YOUR_EVENTBRITE_API_KEY" +``` \ No newline at end of file diff --git a/docs/_docs/skills/samples/experimental.md b/docs/_docs/skills/samples/experimental.md new file mode 100644 index 0000000000..5bfecda988 --- /dev/null +++ b/docs/_docs/skills/samples/experimental.md @@ -0,0 +1,582 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Overview +description: These experimental Bot Framework Skills are early prototypes to help bring skill concepts to life for demonstrations and proof-of-concepts along with providing different examples to get you started. These skills by their very nature are not complete, will likely have rudimentary language models, limited language support and limited testing hence are located in a experimental folder to ensure this is understood before you make use of them. +order: 1 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +This is a definition of an Experimental Skill + +## Skill Deployment +{:.toc} + +The Experimental Skills require the following dependencies for end to end operation which are created through an ARM script which you can modify as required. + +- Azure Web App +- Azure Storage Account (Transcripts) +- Azure Application Insights (Telemetry) +- Azure CosmosDb (State) +- Azure Cognitive Services - Language Understanding + +> Review the pricing and terms for the services and adjust to suit your scenario. + +**To deploy the experimental skills using the default configuration, follow the steps in this common [deployment documentation page]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources) from the folder where your have cloned the GitHub repo.** + + + + + + + + + +### Automotive Skill +{:.toc} + +The Automotive Skill is in preview and demonstrates the first capabilities to help enable Automotive scenarios. The skill focuses on Vehicle Settings, specifically Climate Control, Safety and Basic audio controls. Media, Tuner and Phone capabilities are expected in a future release. + +Vehicle Control is a complicated domain, whilst there are only a limited set of car controls for climate control there are a myriad of ways that a human can describe a given setting. For example, *I'm feeling chilly* , *My feet are cold* and *It's cold here in the back* all relate to a decrease in temperature but to different parts of the car and perhaps even different fan settings. + +The Skill leverages a set of LUIS models to help understand the intent and entities but then leverages capabilities from our Maluuba team to match potential settings and actions to the available settings to then suggest a course of action. + +Unlike the Productivity and PoI skills that are integrated into existing services, the automotive skill will require integration with the telematics solution in use by a given OEM so will require customization to reflect actual car features for a given OEM along with integration. + +To enable testing and simulation any action identified is surfaced to the calling application as an event, this can easily be seen within the Bot Framework Emulator and will be wired up into the Web Test harness available as part of the Virtual Assistant solution. + +#### Supported scenarios +{:.no_toc} + +At this time, changes to vehicle settings are supported through the `VEHICLE_SETTINGS_CHANGE` and `VEHICLE_SETTINGS_DECLARATIVE` intents. The former enables questions such as "change the temperature to 21 degrees" whereas the latter intent enables scenarios such as "I'm feeling cold" which require additional processing steps. + +The following vehicle setting areas are supported at this time, example utterances are provided for guidance. In cases where the utterance results in multiple potential settings or a value isn't provided then the skill will prompt for disambiguation. Confirmation will be sought from the user if a setting is configured to require confirmation, important for sensitive settings such as safety. + +##### Climate Control +{:.no_toc} + +- *Set temperature to 21 degrees* +- *Defog my windshield* +- *Put the air on my feet* +- *Turn off the ac* +- *I'm feeling cold* +- *It's feeling cold in the back* +- *The passenger is freezing* +- *Change climate control* + +##### Safety +{:.no_toc} + +- *Turn lane assist off* +- *Enable lane change alert* +- *Set park assist to alert* + +##### Audio +{:.no_toc} + +- *Adjust the equalizer* +- *Increase the bass* +- *Increase the volume* + +Vehicle settings can be selected through explicit entry of the vehicle setting name, numeric or ordinal (first one, last one). + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-automotive.transcript), you can use the Bot Framework Emulator to open transcripts. + +![ Automotive Skill Transcript Example]({{site.baseurl}}/assets/images/skills-auto-transcript.png) + +#### Language Understanding +{:.no_toc} + +LUIS models for the Skill are provided in `.lu` file format as part of the Skill. These are currently available in English with other languages to follow. + +The following Top Level intents are available with the main `settings` LUIS model + +- VEHICLE_SETTINGS_CHANGE +- VEHICLE_SETTINGS_DECLARATIVE + +In addition there are two supporting LUIS models `settings_name` and `settings_value`, these are used for disambiguation scenarios to clarify setting names and values where the initial utterance doesn't provide clear information. + +#### Configuration +{:.no_toc} + +##### Customizing vehicle settings +{:.no_toc} + +Available vehicle settings are defined in a supporting metadata file which you can find in this location: `automotiveskill/Dialogs/VehicleSettings/Resources/available_settings.yaml`. + +To add an new setting along with appropriate setting values it's easily expressed in YAML. The example below shows a new Volume control setting with the ability to Set, Increase, Decrease and Mute the volume. + +``` +canonicalName: Volume +values: + - canonicalName: Set + requiresAmount: true + - canonicalName: Decrease + changesSignOfAmount: true + - canonicalName: Increase + antonym: Decrease + - canonicalName: Mute +allowsAmount: true +amounts: + - unit: '' +``` + + For key settings you may wish to prompt for confirmation, safety settings for example. This can be specified through a `requiresConfirmation` property as shown below. + +``` +canonicalName: Lane Change Alert +values: + - canonicalName: Off + requiresConfirmation: true + - canonicalName: On +``` + +##### Deploying the Skill in local-mode +{:.no_toc} + +The Automotive skill is not added by default when deploying the Virtual Assistant as this is a domain specific skill. + +Run this PowerShell script to deploy your shared resources and LUIS models. + +``` + pwsh.exe -ExecutionPolicy Bypass -File DeploymentScripts/deploy_bot.ps1 +``` + +You will be prompted to provide the following parameters: + +- Name - A name for your bot and resource group. This must be **unique**. +- Location - The Azure region for your services (e.g. westus) +- LUIS Authoring Key - Refer to [this documentation page]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) for retrieving this key. + +The MSBot tool will outline the deployment plan including location and SKU. Ensure you review before proceeding. + +> After deployment is complete, it's **imperative** that you make a note of the .bot file secret provided as this will be required for later steps. The secret can be found near the top of the execution output and will be in purple text. + +- Update your `appsettings.json` file with the newly created `.bot` file name and `.bot` file secret. +- Run the following command and retrieve the InstrumentationKey for your Application Insights instance and update `InstrumentationKey` in your `appsettings.json` file. + +``` +msbot list --bot YOURBOTFILE.bot --secret YOUR_BOT_SECRET +``` + +```json +{ + "botFilePath": ".//YOURBOTFILE.bot", + "botFileSecret": "YOUR_BOT_SECRET", + "ApplicationInsights": { + "InstrumentationKey": "YOUR_INSTRUMENTATION_KEY" + } +} +``` + +- Finally, add the `.bot` file paths for each of your language configurations (English only at this time). + +```json +"defaultLocale": "en-us", +"languageModels": { + "en": { + "botFilePath": ".//LocaleConfigurations//YOUR_EN_BOT_PATH.bot", + "botFileSecret": "" + } +} +``` + +Once you have followed the deployment instructions above, open the provided `.bot` file with the Bot Framework Emulator. + +##### Adding the Skill to an existing Virtual Assistant deployment +{:.no_toc} + +Follow the instructions below to add the Automotive Skill to an existing Virtual Assistant deployment that you have. + +1. Update the Virtual Assistant deployment scripts. + - Add the additional automotive skill LUIS models to the bot.recipe file located within your assistant project: `assistant/DeploymentScripts/en/bot.recipe` + + ```json + { + "type": "luis", + "id": "settings", + "name": "settings", + "luPath": "../skills/automotiveskill/automotiveskill/CognitiveModels/LUIS/en/settings.lu" + }, + { + "type": "luis", + "id": "settings_name", + "name": "settings_name", + "luPath": "../skills/automotiveskill/automotiveskill/CognitiveModels/LUIS/en/settings_name.lu" + }, + { + "type": "luis", + "id": "settings_value", + "name": "settings_value", + "luPath": "../skills/automotiveskill/automotiveskill/CognitiveModels/LUIS/en/settings_value.lu" + }, + ``` + + - Add dispatch references to the core LUIS intents for the skill within the **assistant/CognitiveModels/en/dispatch.lu** file as shown below. Only the vehicle settings model is required for dispatch. This enables the Dispatcher to understand your new capabilities and route utterances to your skill + + ``` + # l_Automotive + - [VEHICLE_SETTINGS_CHANGE](../../../../skills/automotiveskill/automotiveskill/CognitiveModels/LUIS/en/settings_dispatch.lu#VEHICLE_SETTINGS_CHANGE) + ``` + +2. Run the following script to deploy the new Automotive Skill LUIS models and to update the dispatcher. + + ``` + pwsh.exe -ExecutionPolicy Bypass -File DeploymentScripts/update_published_models.ps1 -locales "en-us" + ``` + +3. In Virtual Assistant, add the skill configuration entry (in an earlier section) to **appsettings.json**. This tells the Virtual Assistant that there is a new skill available for use. + +4. Run the LuisGen tool to update the strongly-typed Dispatch class (Dispatch.cs) to reflect the additional dispatch target. + + ``` + LUISGen DeploymentScripts/en/dispatch.luis -cs Dispatch -o Dialogs/Shared/Resources + ``` + +5. Update **MainDialog.cs** within your Assistant project with the dispatch intent for your skill (l_automotive). This can be found in the assistant/dialogs/main folder of your project. + ![Add My Skill Image]({{site.baseurl}}/assets/images/skills_maindialogupdate.jpg) + +6. Add a project reference from your Virtual Assistant project to the Automotive Skill, this will ensure the DLL housing the skill can be found at runtime for skill activation. + +7. In order for Adaptive Cards to render images associated with the Automotive skill you will need to take the Image assets located in the `wwwroot/images` folder of the Automotive skill and place in a HTTP location (potentially your Bot deployment) and place the base URI path in the skill configuration `ImageAssetLocation` property. If you skip this step, Adaptive Cards will not render with images correctly. + +#### Events +{:.no_toc} + +The Automotive Skill surfaces setting changes for testing purposes through an event returned to the client. This enables easy testing and simulation, all events are prefixed with `AutomotiveSkill.`. The below event is generated as a response to `I'm feeling cold` + +```json +{ + "name": "AutomotiveSkill.Temperature", + "type": "event", + "value": [ + { + "Key": "valueingform", + "Value": "Increasing" + }, + { + "Key": "settingname", + "Value": "Temperature" + } + ] +} +``` + +### Bing Search Skill + +The [Bing Search Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/bingsearchskill) provides a simple Skill that integrates with the [Bing Search Cognitive Service](https://azure.microsoft.com/en-us/services/cognitive-services/bing-web-search-api/). + +This skill has a very limited LUIS model (available in English, French, Italian, German, Spanish and Chinese) and demonstates three simple scenarios: + +- Celebrity Information: *Who is Bill Gates?* +- Q&A: *what's the population of China?* +- Movie Information: *Tell me about the jurassic park movie* + +![Search Example]({{site.baseurl}}/assets/images/skills-experimental-bingsearch.png) + +#### Configuration +{:.no_toc} + +1. Get your own [Bing Search Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-web-search-api/). +1. Get your own [Project Answer Search Key](https://labs.cognitive.microsoft.com/en-us/project-answer-search). +1. Provide these values in your `appsettings.json` file. + +``` +"BingSearchKey": "{YOUR_BING_SEARCH_COGNITIVE_SERVICES_KEY}", +"BingAnswerSearchKey": "{YOUR_PROJECT_ANSWER_SEARCH_KEY}" +``` + +### Event Skill + +The [Event Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/eventskill) provides a simple skill that integrates with [Eventbrite](https://www.eventbrite.com/platform/) to show information about events happening in the specified area. + +This skill currently supports one scenario to get local event information. + +![Event Example]({{site.baseurl}}/assets/images/skills-event-transcript.png) + +#### Configuration +{:.no_toc} + +1. Get your own [Eventbrite API Key](https://www.eventbrite.com/platform/api-keys). +1. Provide this value in your `appsettings.json` file. + +``` +"eventbriteKey": "YOUR_EVENTBRITE_API_KEY" +``` + +### Hospitality Skill + +The [Hospitality Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/hospitalityskill) demonstrates experiences that would be useful in a hospitality specific scenario, such as being able to check out of a hotel, ordering room service, and requesting hotel amenities. This skill does not integrate a hotel service at this time, and is instead simulated with static data for testing purposes. + +This skill demonstrates the following scenarios: +- Show reservation: *What is my current check out date?* +- Extend reservation: *Can I extend my stay?* +- Request late check-out: *I want a late check out time* +- Request amenities: *Can you bring me a toothbrush and toothpaste?* +- Room service: *I want to see a room service menu* +- Check out: *Can I check out now?* + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-hospitality.transcript), you can use the Bot Framework Emulator to open transcripts. + +![Hospitality Example]({{site.baseurl}}/assets/images/skills-hospitality-transcript.png) + +The [Hospitality Sample VA]({{site.baseurl}}/reference/samples/hospitalitysample) demonstrates this skill and a number of other skills to demonstrate a more in-depth hospitality experience. + +### IT Service Management Skill + +The [IT Service Management skill](https://github.com/microsoft/AI/tree/next/skills/src/csharp/experimental/itsmskill) provides a basic skill that provides ticket and knowledge base related capabilities and supports SerivceNow. + +This skill demonstrates the following scenarios: +- Create a ticket: *Create a ticket for my broken laptop* +- Show ticket: *What's the status of my incident* +- Update Ticket: *Change ticket's urgency to high* +- Close a ticket: *Close my ticket* +- Find Knowledgebase item: *Search knowledge articles related to error 1234* + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-itsm.transcript), you can use the Bot Framework Emulator to open transcripts. + +#### Configuration +{:.no_toc} + +To test this skill you will need to follow the ServiceNow configuration steps shown below: + +- Create a ServiceNow instance in the [ServiceNow Developer Site](https://developer.servicenow.com/app.do#!/instance). +- Update this configuration entry in your `appsettings.json` file with your Service Now instance URL: +`"serviceNowUrl": "{YOUR_SERVICENOW_INSTANCE_URL}` +- Create a [scripted REST API](https://docs.servicenow.com/bundle/geneva-servicenow-platform/page/integrate/custom_web_services/task/t_CreateAScriptedRESTService.html) to get current user's sys_id and please raise an issue if simpler way is found + - In System Web Services/Scripted REST APIs, click New to create an API + - In API's Resources, click New to add a resource + - In the resource, select GET for HTTP method and input `(function process(/*RESTAPIRequest*/ request, /*RESTAPIResponse*/ response) { return gs.getUserID(); })(request, response);` in Script + - Update the serviceNowGetUserId of appsetting.json: `"serviceNowGetUserId": "YOUR_API_NAMESPACE/YOUR_API_ID"` +- Register an Application and OAuth configuration by following [these instructions](https://docs.servicenow.com/bundle/london-platform-administration/page/administer/security/task/t_CreateEndpointforExternalClients.html#t_CreateEndpointforExternalClients). Keep the generated Client ID and Client Secret to be used in the following OAuth Connection step. + - Redirect URL is https://token.botframework.com/.auth/web/redirect +- Add an OAuth Connection in the Settings pane of your Web App Bot named 'ServiceNow' using Service Provider 'Generic Oauth 2' + - Set Authorization URL to the following, replacing YOUR_INSTANCE with your instance name: https://YOUR_INSTANCE.service-now.com/oauth_auth.do + - Set Token URL, Refresh URL to the following, replacing YOUR_INSTANCE with your instance name: https://YOUR_INSTANCE.service-now.com/oauth_token.do + - No Scopes are needed + - Click Test Connection to verify the connection works as expected. + +To test this skill with your Virtual Assistant one manual step is required over and above the usual skill connection steps. + +- Add OAuth Connection to your Virtual Assistant manually as per the step above. This connection type cannot be automatically configured as part of botskills. + +### Music Skill + +The [Music skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/musicskill) integrates with [Spotify](https://developer.spotify.com/documentation/web-api/libraries/) to look up playlists and artists and open the Spotify app via URI. +This is dependent on the [SpotifyAPI-NET](https://github.com/JohnnyCrazy/SpotifyAPI-NET) wrapper for the Spotify Web API. + +#### Configuration +{:.no_toc} + +1. Get your own client id and secret when you [create a Spotify client](https://developer.spotify.com/dashboard/). +1. Provide these values in your `appsettings.json` file. + +``` + "spotifyClientId": "{YOUR_SPOTIFY_CLIENT_ID}", + "spotifyClientSecret": "{YOUR_SPOTIFY_CLIENT_SECRET}" +``` + +#### Events +{:.no_toc} + +This Skill supports an outgoing `OpenDefaultApp` Event Activity that provides a Spotify URI for chat clients to open on their own. + +``` +{ + "type":"event", + "name":"OpenDefaultApp", + "value":{ + "MusicUri":"{SPOTIFY_URI}" + } +} +``` + + +### News Skill + +The [News skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/newsskill) provides a simple Skill that integrates with the Bing News Cognitive Service to demonstrate how a news experience can be integrated into a Virtual Assistant. + +Once deployed create a [Bing News Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-news-search-api/) and update the appropriate configuration within appSettings.config. + +This skill supports the following scenarios: +- Find articles: *Find me news about sports* +- Trending articles: *What news is trending now?* +- Show favorite topic: *Find news for me* + +![News Example]({{site.baseurl}}/assets/images/skills-news-transcript.png) + +#### Configuration +{:.no_toc} + +1. Get your own [Bing News Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-news-search-api/) +1. Provide this value in your `appsettings.json` file. + +``` +"BingNewsKey": "{YOUR_BING_NEWS_COGNITIVE_SERVICES_KEY}" +``` + +### Phone Skill + +The Phone Skill provides the capability to start phone calls to a Virtual Assistant. + +#### Supported scenarios +{:.no_toc} + +The following scenarios are currently supported by the Skill: + +- Outgoing Call + - *Call Sanjay Narthwani* + - *Call 555 5555* + - *Make a call* + +The skill will automatically prompt the user for any missing information and/or to clarify ambiguous information. + +##### Example dialog +{:.no_toc} + +Here is an example of a dialog with the Phone skill that showcases all possible prompts. +Note that the skill may skip prompts if the corresponding information is already given. +This example assumes that the user's contact list contains multiple contacts named "Sanjay", one of which is named "Sanjay Narthwani" and has multiple phone numbers, one of which is labelled "Mobile". + +|Turn| Utterance/ Prompt | +|-|-| +|User| Make a call | +|Skill| Who would you like to call? | +|User| Sanjay | +|Skill| Which Sanjay? | +|User| Narthwani | +|Skill| Sanjay Narthwani has multiple phone numbers. Which one? | +|User| Mobile | +|Skill| Calling Sanjay Narthwani on mobile. | + +Refer to the unit tests for further example dialogs. + +#### Language Understanding +{:.no_toc} + +LUIS models for the Skill are provided in `.lu` file format as part of the Skill. Further languages are being prioritized. + +|Supported Languages| +|-| +|English| + +The LUIS model `phone` is used to understand the user's initial query as well as responses to the prompt "Who would you like to call?" +The other LUIS models (`contactSelection` and `phoneNumberSelection`) are used to understand the user's responses to later prompts in the dialog. + +##### Intents +{:.no_toc} + +|Name|Description| +|-|-| +|OutgoingCall| Matches queries to make a phone call | + +##### Entities +{:.no_toc} + +|Name|Description| +|-|-| +|contactName| The name of the contact to call | +|phoneNumber| A literal phone number specified by the user in the query, in digits | +|phoneNumberSpelledOut| A literal phone number specified by the user in the query, in words | +|phoneNumberType| Identifies a certain phone number of the contact by its type (for example, "home", "business", "mobile") | + +#### Configuration +{:.no_toc} + +##### Supported content providers +{:.no_toc} + +> Office 365 and Outlook.com through the Microsoft Graph is supported along with support for Google accounts. + +To use Google account in skill you need to follow these steps: +1. Create your Gmail API credential in [Google developers console](https://console.developers.google.com). +2. Create an OAuth connection setting in your Web App Bot. + - Connection name: `googleapi` + - Service Provider: `Google` + - Client id and secret are generated in step 1 + - Scopes: `"https://www.googleapis.com/auth/contacts"`. +3. Add the connection name, client id, secret and scopes in appsetting.json file. + +##### Authentication connection settings +{:.no_toc} + +If you plan to use the skill as part of a Virtual Assistant, the process of registering a skill with your Virtual Assistant will create the supporting authentication connection information automatically for your Virtual Assistant. This skill uses the following authentication scopes, which are registered automatically: +- `User.ReadBasic.All` +- `User.Read` +- `People.Read` +- `Contacts.Read` + +**However**, if you wish to use the Skill directly without using a Virtual Assistant, please use the following steps to manually configure Authentication for the Phone Skill. This is **not** required when using the Skill with a Virtual Assistant. + +Follow the general instructions [here]({{site.baseurl}}/howto/skills/manualauthsteps.md) to configure this using the scopes shown above. + +#### Events +{:.no_toc} + +Note that the Phone skill only handles the dialog with the user about the phone call to be made, but does not place the actual phone call. +The phone call would typically be placed by the client application communicating with the bot or skill. +For example, if the client application is an Android app, it would communicate with the bot to allow the user to go through the dialog and at the end, it would place the call using an Android mechanism for placing calls. + +The information that is required to place the call is returned from the Phone skill in the form of an event at the end of the dialog. +This event has the name `PhoneSkill.OutgoingCall`. +Its value is a JSON object representing an object of type `PhoneSkill.Models.OutgoingCall`. + +The value of the event has the following properties: +- The property `Number` holds the phone number to be dialed as a string. + (Please note that this string is in the same format as it appears in the user's contact list or in the user's query. + If you require an RFC 3966 compliant `tel:` URI or a particular other format, we recommend using a phone number formatting library to format this string accordingly, taking into account the user's default country code and any other relevant external information.) +- The property `Contact` is optional and holds the contact list entry that the user selected. + This is an object of type `PhoneSkill.Models.ContactCandidate`. + This information may be useful, for example, to allow the client application to show information about the contact on the screen while the phone number is being dialed. + +Here is an example of an event returned by the Phone skill: + +``` +{ + [...] + "type": "event", + "name": "PhoneSkill.OutgoingCall", + "value": { + "Number": "555 111 1111", + "Contact": { + "CorrespondingId": "[...]", + "Name": "Andrew Smith", + "PhoneNumbers": [ + { + "Number": "555 111 1111", + "Type": { + "FreeForm": "", + "Standardized": 1 + } + } + ] + } + } +} +``` + +### Restaurant Booking Skill + +The [Restaurant Booking skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/restaurantbooking) provides a simple restaurant booking experience guiding the user through booking a table and leverages Adaptive Cards throughout to demonstrate how Speech, Text and UX can be combined for a compelling user experience. No integration to restaurant booking services exists at this time so is simulated with static data for testing purposes. + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-restaurantbooking.transcript), you can use the Bot Framework Emulator to open transcripts. + +![Restaurant Example]({{site.baseurl}}/assets/images/skills-restaurant-transcript.png) + +### Weather Skill + +The [Weather skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/weatherskill) provides a basic Skill that integrates with [AccuWeather](https://developer.accuweather.com) to demonstrate how a weather experience can be integrated into a Virtual Assistant. + +#### Configuration +{:.no_toc} + +1. Get your own API Key when by following the instructions on [AccuWeather Getting Started](https://developer.accuweather.com/getting-started). +1. Provide this value in your `appsettings.json` file. + +``` +"WeatherApiKey": "{YOUR_ACCUWEATHER_API_KEY}" +``` diff --git a/docs/_docs/skills/samples/hospitality.md b/docs/_docs/skills/samples/hospitality.md new file mode 100644 index 0000000000..93aaaf6676 --- /dev/null +++ b/docs/_docs/skills/samples/hospitality.md @@ -0,0 +1,27 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Hospitality Skill +description: Hospitality Skill provides ability to do common tasks for hotel and other hospitality scenarios. +order: 5 +--- + +# {{ page.title }} +{:.no_toc} + +The [Hospitality Skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/hospitalityskill) demonstrates experiences that would be useful in a hospitality specific scenario, such as being able to check out of a hotel, ordering room service, and requesting hotel amenities. This skill does not integrate a hotel service at this time, and is instead simulated with static data for testing purposes. + +This skill demonstrates the following scenarios: +- Show reservation: *What is my current check out date?* +- Extend reservation: *Can I extend my stay?* +- Request late check-out: *I want a late check out time* +- Request amenities: *Can you bring me a toothbrush and toothpaste?* +- Room service: *I want to see a room service menu* +- Check out: *Can I check out now?* + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-hospitality.transcript), you can use the Bot Framework Emulator to open transcripts. + +![Hospitality Example]({{site.baseurl}}/assets/images/skills-hospitality-transcript.png) + +The [Hospitality Sample VA]({{site.baseurl}}/reference/samples/hospitalitysample) demonstrates this skill and a number of other skills to demonstrate a more in-depth hospitality experience. \ No newline at end of file diff --git a/docs/_docs/skills/samples/itsm.md b/docs/_docs/skills/samples/itsm.md new file mode 100644 index 0000000000..0b5f98a7ad --- /dev/null +++ b/docs/_docs/skills/samples/itsm.md @@ -0,0 +1,48 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: IT Service Managment (ITSM) Skill +description: IT Service Management Skill provides ability to work with typical Help Desk Ticketing scenarios for ServiceNow. +order: 6 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [IT Service Management skill](https://github.com/microsoft/AI/tree/next/skills/src/csharp/experimental/itsmskill) provides a basic skill that provides ticket and knowledge base related capabilities and supports SerivceNow. + +This skill demonstrates the following scenarios: +- Create a ticket: *Create a ticket for my broken laptop* +- Show ticket: *What's the status of my incident* +- Update Ticket: *Change ticket's urgency to high* +- Close a ticket: *Close my ticket* +- Find Knowledgebase item: *Search knowledge articles related to error 1234* + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-itsm.transcript), you can use the Bot Framework Emulator to open transcripts. + +## Configuration +{:.no_toc} + +To test this skill you will need to follow the ServiceNow configuration steps shown below: + +- Create a ServiceNow instance in the [ServiceNow Developer Site](https://developer.servicenow.com/app.do#!/instance). +- Update this configuration entry in your `appsettings.json` file with your Service Now instance URL: +`"serviceNowUrl": "{YOUR_SERVICENOW_INSTANCE_URL}` +- Create a [scripted REST API](https://docs.servicenow.com/bundle/geneva-servicenow-platform/page/integrate/custom_web_services/task/t_CreateAScriptedRESTService.html) to get current user's sys_id and please raise an issue if simpler way is found + - In System Web Services/Scripted REST APIs, click New to create an API + - In API's Resources, click New to add a resource + - In the resource, select GET for HTTP method and input `(function process(/*RESTAPIRequest*/ request, /*RESTAPIResponse*/ response) { return gs.getUserID(); })(request, response);` in Script + - Update the serviceNowGetUserId of appsetting.json: `"serviceNowGetUserId": "YOUR_API_NAMESPACE/YOUR_API_ID"` +- Register an Application and OAuth configuration by following [these instructions](https://docs.servicenow.com/bundle/london-platform-administration/page/administer/security/task/t_CreateEndpointforExternalClients.html#t_CreateEndpointforExternalClients). Keep the generated Client ID and Client Secret to be used in the following OAuth Connection step. + - Redirect URL is https://token.botframework.com/.auth/web/redirect +- Add an OAuth Connection in the Settings pane of your Web App Bot named 'ServiceNow' using Service Provider 'Generic Oauth 2' + - Set Authorization URL to the following, replacing YOUR_INSTANCE with your instance name: https://YOUR_INSTANCE.service-now.com/oauth_auth.do + - Set Token URL, Refresh URL to the following, replacing YOUR_INSTANCE with your instance name: https://YOUR_INSTANCE.service-now.com/oauth_token.do + - No Scopes are needed + - Click Test Connection to verify the connection works as expected. + +To test this skill with your Virtual Assistant one manual step is required over and above the usual skill connection steps. + +- Add OAuth Connection to your Virtual Assistant manually as per the step above. This connection type cannot be automatically configured as part of botskills. \ No newline at end of file diff --git a/docs/_docs/skills/samples/music.md b/docs/_docs/skills/samples/music.md new file mode 100644 index 0000000000..59277010c0 --- /dev/null +++ b/docs/_docs/skills/samples/music.md @@ -0,0 +1,26 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Music Skill +description: Music Skill provides the ability to select music to be played from Spotify. +order: 7 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [Music skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/musicskill) integrates with [Spotify](https://developer.spotify.com/documentation/web-api/libraries/) to look up playlists and artists and open the Spotify app via URI. +This is dependent on the [SpotifyAPI-NET](https://github.com/JohnnyCrazy/SpotifyAPI-NET) wrapper for the Spotify Web API. + +## Configuration +{:.no_toc} + +1. Get your own client id and secret when you [create a Spotify client](https://developer.spotify.com/dashboard/). +1. Provide these values in your `appsettings.json` file. + +``` + "spotifyClientId": "{YOUR_SPOTIFY_CLIENT_ID}", + "spotifyClientSecret": "{YOUR_SPOTIFY_CLIENT_SECRET}" +``` \ No newline at end of file diff --git a/docs/_docs/skills/samples/news.md b/docs/_docs/skills/samples/news.md new file mode 100644 index 0000000000..b83f284a33 --- /dev/null +++ b/docs/_docs/skills/samples/news.md @@ -0,0 +1,33 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: News Skill +description: News Skill provides the ability to find and review news articles. +order: 8 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [News skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/newsskill) provides a simple Skill that integrates with the Bing News Cognitive Service to demonstrate how a news experience can be integrated into a Virtual Assistant. + +Once deployed create a [Bing News Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-news-search-api/) and update the appropriate configuration within appSettings.config. + +This skill supports the following scenarios: +- Find articles: *Find me news about sports* +- Trending articles: *What news is trending now?* +- Show favorite topic: *Find news for me* + +![News Example]({{site.baseurl}}/assets/images/skills-news-transcript.png) + +## Configuration +{:.no_toc} + +1. Get your own [Bing News Cognitive Services Key](https://azure.microsoft.com/en-us/services/cognitive-services/bing-news-search-api/) +1. Provide this value in your `appsettings.json` file. + +``` +"BingNewsKey": "{YOUR_BING_NEWS_COGNITIVE_SERVICES_KEY}" +``` \ No newline at end of file diff --git a/docs/_docs/reference/skills/phone.md b/docs/_docs/skills/samples/phone.md similarity index 75% rename from docs/_docs/reference/skills/phone.md rename to docs/_docs/skills/samples/phone.md index db3d9bb0c0..04ec3cb6a4 100644 --- a/docs/_docs/reference/skills/phone.md +++ b/docs/_docs/skills/samples/phone.md @@ -1,37 +1,19 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Samples +language: Experimental Skills title: Phone Skill -description: The Phone Skill provides the capability to start phone calls to a Virtual Assistant. +description: Phone Skill provides the ability to find and call a person or a number. order: 9 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -{:toc} - -## Overview - The Phone Skill provides the capability to start phone calls to a Virtual Assistant. -## Supported Sources - -> Office 365 and Outlook.com through the Microsoft Graph is supported along with support for Google accounts. - -To use Google account in skill you need to follow these steps: -1. Create your Gmail API credential in [Google developers console](https://console.developers.google.com). -2. Create an OAuth connection setting in your Web App Bot. - - Connection name: `googleapi` - - Service Provider: `Google` - - Client id and secret are generated in step 1 - - Scopes: `"https://www.googleapis.com/auth/contacts"`. -3. Add the connection name, client id, secret and scopes in appsetting.json file. - -## Supported Scenarios +## Supported scenarios The following scenarios are currently supported by the Skill: @@ -42,7 +24,8 @@ The following scenarios are currently supported by the Skill: The skill will automatically prompt the user for any missing information and/or to clarify ambiguous information. -### Example Dialog +### Example dialog +{:.no_toc} Here is an example of a dialog with the Phone skill that showcases all possible prompts. Note that the skill may skip prompts if the corresponding information is already given. @@ -59,35 +42,9 @@ This example assumes that the user's contact list contains multiple contacts nam |User| Mobile | |Skill| Calling Sanjay Narthwani on mobile. | -Please refer to the unit tests for further example dialogs. - -## Skill Deployment - -The Phone Skill requires the following dependencies for end to end operation, which are created through an ARM script, which you can modify as required. - -- Azure Web App -- Azure Storage Account (Transcripts) -- Azure Application Insights (Telemetry) -- Azure CosmosDb (State) -- Azure Cognitive Services - Language Understanding - -> Review the pricing and terms for the services and adjust to suit your scenario. - -To deploy your services using the default configuration, follow the steps in this common deployment documentation page ([C#]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources.md)/[TypeScript]({{site.baseurl}}/tutorials/typescript/create-skill/4_provision_your_azure_resources/.md)) from the folder where you have cloned the GitHub repo. - -### Authentication Connection Settings - -If you plan to use the skill as part of a Virtual Assistant, the process of registering a skill with your Virtual Assistant will create the supporting authentication connection information automatically for your Virtual Assistant. This skill uses the following authentication scopes, which are registered automatically: -- `User.ReadBasic.All` -- `User.Read` -- `People.Read` -- `Contacts.Read` - -**However**, if you wish to use the Skill directly without using a Virtual Assistant, please use the following steps to manually configure Authentication for the Phone Skill. This is **not** required when using the Skill with a Virtual Assistant. - -Follow the general instructions [here]({{site.baseurl}}/howto/skills/manualauthsteps.md) to configure this using the scopes shown above. +Refer to the unit tests for further example dialogs. -## Language Model +## Language Understanding LUIS models for the Skill are provided in `.lu` file format as part of the Skill. Further languages are being prioritized. @@ -99,12 +56,14 @@ The LUIS model `phone` is used to understand the user's initial query as well as The other LUIS models (`contactSelection` and `phoneNumberSelection`) are used to understand the user's responses to later prompts in the dialog. ### Intents +{:.no_toc} |Name|Description| |-|-| |OutgoingCall| Matches queries to make a phone call | ### Entities +{:.no_toc} |Name|Description| |-|-| @@ -113,7 +72,36 @@ The other LUIS models (`contactSelection` and `phoneNumberSelection`) are used t |phoneNumberSpelledOut| A literal phone number specified by the user in the query, in words | |phoneNumberType| Identifies a certain phone number of the contact by its type (for example, "home", "business", "mobile") | -## Integration +## Configuration + +### Supported content providers +{:.no_toc} + +> Office 365 and Outlook.com through the Microsoft Graph is supported along with support for Google accounts. + +To use Google account in skill you need to follow these steps: +1. Create your Gmail API credential in [Google developers console](https://console.developers.google.com). +2. Create an OAuth connection setting in your Web App Bot. + - Connection name: `googleapi` + - Service Provider: `Google` + - Client id and secret are generated in step 1 + - Scopes: `"https://www.googleapis.com/auth/contacts"`. +3. Add the connection name, client id, secret and scopes in appsetting.json file. + +##### Authentication connection settings +{:.no_toc} + +If you plan to use the skill as part of a Virtual Assistant, the process of registering a skill with your Virtual Assistant will create the supporting authentication connection information automatically for your Virtual Assistant. This skill uses the following authentication scopes, which are registered automatically: +- `User.ReadBasic.All` +- `User.Read` +- `People.Read` +- `Contacts.Read` + +**However**, if you wish to use the Skill directly without using a Virtual Assistant, please use the following steps to manually configure Authentication for the Phone Skill. This is **not** required when using the Skill with a Virtual Assistant. + +Follow the general instructions [here]({{site.baseurl}}/howto/skills/manualauthsteps.md) to configure this using the scopes shown above. + +## Events Note that the Phone skill only handles the dialog with the user about the phone call to be made, but does not place the actual phone call. The phone call would typically be placed by the client application communicating with the bot or skill. @@ -133,24 +121,26 @@ The value of the event has the following properties: Here is an example of an event returned by the Phone skill: - { - [...] - "type": "event", - "name": "PhoneSkill.OutgoingCall", - "value": { - "Number": "555 111 1111", - "Contact": { - "CorrespondingId": "[...]", - "Name": "Andrew Smith", - "PhoneNumbers": [ - { - "Number": "555 111 1111", - "Type": { - "FreeForm": "", - "Standardized": 1 - } +``` +{ + [...] + "type": "event", + "name": "PhoneSkill.OutgoingCall", + "value": { + "Number": "555 111 1111", + "Contact": { + "CorrespondingId": "[...]", + "Name": "Andrew Smith", + "PhoneNumbers": [ + { + "Number": "555 111 1111", + "Type": { + "FreeForm": "", + "Standardized": 1 } - ] } - } + ] + } } +} +``` \ No newline at end of file diff --git a/docs/_docs/reference/skills/pointofinterest.md b/docs/_docs/skills/samples/point-of-interest.md similarity index 72% rename from docs/_docs/reference/skills/pointofinterest.md rename to docs/_docs/skills/samples/point-of-interest.md index 7c5458a1a5..e4b93e36f0 100644 --- a/docs/_docs/reference/skills/pointofinterest.md +++ b/docs/_docs/skills/samples/point-of-interest.md @@ -1,24 +1,20 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Samples title: Point Of Interest Skill description: Find points of interest and directions. Powered by Azure Maps and FourSquare. -order: 11 +order: 3 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} - -## Overview -The Point of Interest Skill provides point of interest and navigation related capabilities to a Virtual Assistant. +{{ page.description }} ## Supported scenarios +{:.toc} + The following scenarios are currently supported by the Skill: - Get Directions to a Point of Interest @@ -34,8 +30,10 @@ The following scenarios are currently supported by the Skill: - _Are there any pharmacies in town?_ - _Can you recommend an affordable restaurant in Seattle?_ -## Language Understanding (LUIS) -LUIS models are provided in `.lu` file format to support the scenarios used in this Skill. +## Language Understanding +{:.toc} + +LUIS models are provided in **.lu** file format to support the scenarios used in this Skill. |Supported Languages| |-| @@ -47,6 +45,7 @@ LUIS models are provided in `.lu` file format to support the scenarios used in t |Chinese (simplified)| ### Intents +{:.no_toc} |Name|Description| |-|-| @@ -55,6 +54,7 @@ LUIS models are provided in `.lu` file format to support the scenarios used in t |FindParking| Matches queries searching for a parking space | ### Entities +{:.no_toc} |Name|Description| |-|-| @@ -68,31 +68,47 @@ LUIS models are provided in `.lu` file format to support the scenarios used in t |number| Prebuilt entity| ## Configuration +{:.toc} + ### Deployment +{:.no_toc} + Learn how to [provision your Azure resources]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources/) in the Create a Skill tutorial. ### Supported content providers +{:.no_toc} + > **Mandatory**: [Azure Maps](https://azure.microsoft.com/en-us/services/azure-maps/) is supported for finding Points of Interest and getting route directions to a selected location. > As this is the only supported provider to get directions, this provider is required. > [Foursquare](https://developer.foursquare.com/docs/api) is supported for finding Points of Interest and related details (rating, business hours, price level, etc.). ### Authentication connection settings +{:.no_toc} + > No authentication is required for this skill ## Events -Learn how to use [events]({{site.baseurl}}/reference/virtual-assistant/events) to send backend data to a Skill, like a user's location. +{:.toc} + +Learn how to use [events]({{site.baseurl}}/virtual-assistant/handbook/events) to send backend data to a Skill, like a user's location. ### From assistant to user -This Skill supports an outgoing `OpenDefaultApp` Event Activity that provides a Geo URI for chat clients to determine how to handle navigation to a user's selected point of interest. -The [Virtual Assistant Client (Android) sample]({{ site.baseurl }}/howto/samples/vaclient_android/) demonstrates how a client may navigate to a destination using a user's preferred map application. +{:.no_toc} + +This Skill supports an outgoing **OpenDefaultApp** Event Activity that provides a [Geo URI](https://en.wikipedia.org/wiki/Geo_URI_scheme) for chat clients to determine how to handle navigation to a user's selected point of interest. +The [Virtual Assistant Client (Android) sample]({{ site.baseurl }}/clients/virtual-assistant-client/) demonstrates how a client may navigate to a destination using a user's preferred map application. ```json { "type":"event", "name":"OpenDefaultApp", "value":{ - "GeoUri":"geo:{LONGITUDE},{LATITUDE}" + "GeoUri":"geo:{LATITUDE},{LONGITUDE}" } } -``` \ No newline at end of file +``` + +## Download a transcript + +Download \ No newline at end of file diff --git a/docs/_docs/skills/samples/restaurant-booking.md b/docs/_docs/skills/samples/restaurant-booking.md new file mode 100644 index 0000000000..01254312a6 --- /dev/null +++ b/docs/_docs/skills/samples/restaurant-booking.md @@ -0,0 +1,18 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Restaurant Booking Skill +description: Restaurant Booking Skill provides the ability to find and book a restaurant reservation. +order: 10 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [Restaurant Booking skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/restaurantbooking) provides a simple restaurant booking experience guiding the user through booking a table and leverages Adaptive Cards throughout to demonstrate how Speech, Text and UX can be combined for a compelling user experience. No integration to restaurant booking services exists at this time so is simulated with static data for testing purposes. + +An example transcript file demonstrating the Skill in action can be found [here]({{site.baseurl}}/assets/transcripts/skills-restaurantbooking.transcript), you can use the Bot Framework Emulator to open transcripts. + +![Restaurant Example]({{site.baseurl}}/assets/images/skills-restaurant-transcript.png) diff --git a/docs/_docs/reference/skills/productivity-todo.md b/docs/_docs/skills/samples/to-do.md similarity index 76% rename from docs/_docs/reference/skills/productivity-todo.md rename to docs/_docs/skills/samples/to-do.md index c019ac1a68..feff77e739 100644 --- a/docs/_docs/reference/skills/productivity-todo.md +++ b/docs/_docs/skills/samples/to-do.md @@ -1,24 +1,19 @@ --- -category: Reference -subcategory: Skills +category: Skills +subcategory: Samples title: To Do Skill description: Add task management capabilities to your Assistant. Powered by Microsoft Graph. -order: 10 +order: 4 +toc: true --- # {{ page.title }} {:.no_toc} -## In this reference -{:.no_toc} - -* -{:toc} - -## Overview -The To Do Skill provides task related capabilities to a Virtual Assistant. +{{ page.description }} ## Supported scenarios +{:.toc} The following scenarios are currently supported by the Skill: @@ -31,17 +26,18 @@ The following scenarios are currently supported by the Skill: - *Browse my groceries* - *Show my to do list* - Delete Tasks - - *Remove "salad vegetables" from my grocery list* - - *Remove my to do to "pick up Tom at 6 AM"* + - *Remove salad vegetables from my grocery list* + - *Remove my to do to pick up Tom at 6 AM* - *Remove all tasks* - Mark Tasks as Complete - - *Mark the task "get some food" as complete* - - *Task completed "reserve a restaurant for anniversary"* - - *Check off "bananas" on my grocery list* + - *Mark the task get some food as complete* + - *Task completed reserve a restaurant* + - *Check off bananas on my grocery list* -## Language Understanding (LUIS) +## Language Understanding +{:.toc} -LUIS models for the Skill are provided in `.lu` file format as part of the Skill. Further languages are being prioritized. +LUIS models for the Skill are provided in **.lu** file format as part of the Skill. Further languages are being prioritized. |Supported Languages | |-| @@ -53,6 +49,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |Chinese (simplified)| ### Intents +{:.no_toc} |Name|Description| |-|-| @@ -62,6 +59,7 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |DeleteToDo| Matches queries to delete a To Do item | ### Entities +{:.no_toc} |Name|Description| |-|-| @@ -76,27 +74,37 @@ LUIS models for the Skill are provided in `.lu` file format as part of the Skill |ordinal| Prebuilt entity| ## Configuration +{:.toc} + ### Deployment -Learn how to [provision your Azure resources]({{site.baseurl}}/tutorials/csharp/create-skill/4_provision_your_azure_resources/) in the Create a Skill tutorial. +{:.no_toc} + +Learn how to [provision your Azure resources]({{site.baseurl}}/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources/) in the Create a Skill tutorial. ### Supported content providers +{:.no_toc} + > Office 365 and Outlook.com through the Microsoft Graph is supported at this time. ### Authentication connection settings +{:.no_toc} + If you plan to use the skill as part of a Virtual Assistant the process of registering a skill with your Virtual Assistant will create the supporting authentication connection information automatically for your Virtual Assistant. This skill uses the following authentication scopes which are registered automatically: -- `Notes.ReadWrite` -- `User.Read` -- `User.ReadBasic.All` -- `Tasks.ReadWrite` +- **Notes.ReadWrite** +- **User.Read** +- **User.ReadBasic.All** +- **Tasks.ReadWrite** **However**, if you wish to use the Skill directly without using a Virtual Assistant please use the following steps to manually configure Authentication for the Calendar Skill. This is **not** required when using the Skill with a Virtual Assistant. -Follow the general instructions [here]({{site.baseurl}}/howto/skills/manualauthsteps) to configure this using the scopes shown above. +Follow the general instructions [here]({{site.baseurl}}/skills/handbook/authentication/#manual-authentication) to configure this using the scopes shown above. ### Add customized to do lists +{:.no_toc} + If you want to add your customized list types, for example, your homework list or movie list, please follow these steps: -1. Add your list type to `appsettings.json` +1. Add your list type to **appsettings.json** ```json "customizeListTypes": [ @@ -139,3 +147,8 @@ If you want to add your customized list types, for example, your homework list o ``` 4. Redeploy your To Do Skill. + +## Download a transcript +{:.toc} + +Download \ No newline at end of file diff --git a/docs/_docs/skills/samples/transcripts.md b/docs/_docs/skills/samples/transcripts.md new file mode 100644 index 0000000000..28dcad157e --- /dev/null +++ b/docs/_docs/skills/samples/transcripts.md @@ -0,0 +1,45 @@ +--- +category: Skills +subcategory: Samples +title: Transcripts +description: View sample conversations of the Virtual Assistants and Skills. +order: 6 +--- + +# {{ page.title }} +{:.no_toc} + +
+
+{{ page.description }} Download the Bot Framework Emulator and any transcript file available below. You can open these in the emulator. +
+
+ +
+
+ +## Virtual Assistant samples +### Enterprise Assistant +{:.no_toc} + +### Hospitality Assistant +{:.no_toc} +Frequently asked questions +Local info +Reservation changes +Room services + +## Skills +Calendar +Email +Point of Interest +To Do + +### Experimental +{:.no_toc} +Automotive +Hospitality +ITSM +News +Restaurant Booking + diff --git a/docs/_docs/skills/samples/weather.md b/docs/_docs/skills/samples/weather.md new file mode 100644 index 0000000000..b40fa19888 --- /dev/null +++ b/docs/_docs/skills/samples/weather.md @@ -0,0 +1,24 @@ +--- +category: Skills +subcategory: Samples +language: Experimental Skills +title: Weather Skill +description: Weather Skill provides the ability to look up the weather for a location. +order: 11 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [Weather skill]({{site.repo}}/tree/master/skills/src/csharp/experimental/weatherskill) provides a basic Skill that integrates with [AccuWeather](https://developer.accuweather.com) to demonstrate how a weather experience can be integrated into a Virtual Assistant. + +## Configuration +{:.no_toc} + +1. Get your own API Key when by following the instructions on [AccuWeather Getting Started](https://developer.accuweather.com/getting-started). +1. Provide this value in your `appsettings.json` file. + +``` +"WeatherApiKey": "{YOUR_ACCUWEATHER_API_KEY}" +``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/add-prebuilt-skill/1-intro.md b/docs/_docs/skills/tutorials/add-prebuilt-skill/1-intro.md new file mode 100644 index 0000000000..617685b60f --- /dev/null +++ b/docs/_docs/skills/tutorials/add-prebuilt-skill/1-intro.md @@ -0,0 +1,27 @@ +--- +layout: tutorial +category: Skills +subcategory: Connect to a sample +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +### Purpose + +Install Bot Framework development prerequisites and add one of the Skills provided as part of the Virtual Assistant. + +### Prerequisites + +- [Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) to setup your environment. + +### Time to Complete + +15 minutes + +### Scenario + +Add one of the sample skills provided in the [Bot Framework Solutions GitHub repository]({{site.repo}}/skills/csharp) provide to your Virtual Assistant. The Skills are only available in C# at this time but these can be added to a Typescript based assistant. \ No newline at end of file diff --git a/docs/_docs/tutorials/add-prebuilt-skill/2_download_and_install.md b/docs/_docs/skills/tutorials/add-prebuilt-skill/2-download-and-install.md similarity index 88% rename from docs/_docs/tutorials/add-prebuilt-skill/2_download_and_install.md rename to docs/_docs/skills/tutorials/add-prebuilt-skill/2-download-and-install.md index 3a5e6ec3fb..126124887e 100644 --- a/docs/_docs/tutorials/add-prebuilt-skill/2_download_and_install.md +++ b/docs/_docs/skills/tutorials/add-prebuilt-skill/2-download-and-install.md @@ -1,14 +1,14 @@ --- layout: tutorial -category: Tutorials -subcategory: Connect a pre-built skill +category: Skills +subcategory: Connect to a sample title: Download and install order: 2 --- -# Tutorial: Connect a pre-built skill to your assistant +# Tutorial: {{page.subcategory}} -## Download and install +## {{ page.title }} > It's important to ensure all of the following pre-requisites are installed on your machine prior to attempting deployment otherwise you may run into deployment issues. diff --git a/docs/_docs/skills/tutorials/add-prebuilt-skill/3-deploy-skill-project.md b/docs/_docs/skills/tutorials/add-prebuilt-skill/3-deploy-skill-project.md new file mode 100644 index 0000000000..c090730cdd --- /dev/null +++ b/docs/_docs/skills/tutorials/add-prebuilt-skill/3-deploy-skill-project.md @@ -0,0 +1,23 @@ +--- +layout: tutorial +category: Skills +subcategory: Connect to a sample +title: Deploy skill project +order: 3 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +Choose which of the provided Skills you wish to add to your Virtual Assistant, click one of the links below and follow the **`Skill Deployment`** instructions to deploy your own instance of this Skill. + +| Name | Description | +| ---- | ----------- | +|[Calendar Skill]({{site.baseurl}}/skills/samples/calendar)|Add calendar capabilities to your assistant. Powered by Microsoft Graph and Google.| +|[Email Skill]({{site.baseurl}}/skills/samples/email)|Add email capabilities to your assistant. Powered by Microsoft Graph and Google.| +|[To Do Skill]({{site.baseurl}}/skills/samples/to-do)|Add task management capabilities to your assistant. Powered by Microsoft Graph.| +|[Point of Interest Skill]({{site.baseurl}}/skills/samples/point-of-interest)|Find points of interest and directions. Powered by Azure Maps and FourSquare.| +|[Automotive Skill]({{site.baseurl}}/skills/samples/automotive)|Industry-vertical Skill for showcasing enabling car feature control.| +|[Phone Skill]({{site.baseurl}}/skills/samples/phone)|Make phone calls. Powered by Microsoft Graph and Google.| +|[Experimental Skills]({{site.baseurl}}/skills/samples/experimental)|News, Search, Restaurant Booking and Weather.| diff --git a/docs/_docs/tutorials/add-prebuilt-skill/4_connect_skill.md b/docs/_docs/skills/tutorials/add-prebuilt-skill/4-connect-skill.md similarity index 68% rename from docs/_docs/tutorials/add-prebuilt-skill/4_connect_skill.md rename to docs/_docs/skills/tutorials/add-prebuilt-skill/4-connect-skill.md index 71cee9561c..d20655b3f2 100644 --- a/docs/_docs/tutorials/add-prebuilt-skill/4_connect_skill.md +++ b/docs/_docs/skills/tutorials/add-prebuilt-skill/4-connect-skill.md @@ -1,14 +1,14 @@ --- layout: tutorial -category: Tutorials -subcategory: Connect a pre-built skill +category: Skills +subcategory: Connect to a sample title: Connect skill order: 4 --- -# Tutorial: Connect a pre-built skill to your assistant +# Tutorial: {{page.subcategory}} -## Connect your Skill to an assistant +## {{ page.title }} Once you've deployed your Skill you can now add it to your Assistant. @@ -17,7 +17,7 @@ To add your new Skill to your assistant/Bot we provide a `botskills` command lin Run the following command from a command prompt **within the directory of your assistant/Bot**. ```bash -botskills connect --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder "\Deployment\Resources\LU\en\" --cs +botskills connect --botName YOUR_BOT_NAME --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder "\Deployment\Resources\LU\en\" --cs ``` **Remember to re-publish your Assistant to Azure after you've added a Skill unless you plan on testing locally only** diff --git a/docs/_docs/skills/tutorials/add-prebuilt-skill/5-test-your-skill.md b/docs/_docs/skills/tutorials/add-prebuilt-skill/5-test-your-skill.md new file mode 100644 index 0000000000..e755eb7c6d --- /dev/null +++ b/docs/_docs/skills/tutorials/add-prebuilt-skill/5-test-your-skill.md @@ -0,0 +1,21 @@ +--- +layout: tutorial +category: Skills +subcategory: Connect to a sample +title: Test skill +order: 5 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +Refer to the documentation page in the table below for an example question that you can ask to validate that your Assistant can now perform additional capabilities with no additional code changes. + +| Name | Description | +| ---- | ----------- | +|[Calendar Skill]({{site.baseurl}}/skills/samples/calendar)|Add calendar capabilities to your assistant. Powered by Microsoft Graph and Google.| +|[Email Skill]({{site.baseurl}}/skills/samples/email)|Add email capabilities to your assistant. Powered by Microsoft Graph and Google.| +|[To Do Skill]({{site.baseurl}}/skills/samples/to-do)|Add task management capabilities to your assistant. Powered by Microsoft Graph.| +|[Point of Interest Skill]({{site.baseurl}}/skills/samples/point-of-interest)|Find points of interest and directions. Powered by Azure Maps and FourSquare.| +|[Experimental Skills]({{site.baseurl}}/skills/samples/experimental)|News, Search, Restaurant Booking and Weather.| diff --git a/docs/_docs/tutorials/add-prebuilt-skill/6_next_steps.md b/docs/_docs/skills/tutorials/add-prebuilt-skill/6-next-steps.md similarity index 84% rename from docs/_docs/tutorials/add-prebuilt-skill/6_next_steps.md rename to docs/_docs/skills/tutorials/add-prebuilt-skill/6-next-steps.md index 01eb370535..d3b9abd13e 100644 --- a/docs/_docs/tutorials/add-prebuilt-skill/6_next_steps.md +++ b/docs/_docs/skills/tutorials/add-prebuilt-skill/6-next-steps.md @@ -1,14 +1,14 @@ --- layout: tutorial -category: Tutorials -subcategory: Connect a pre-built skill +category: Skills +subcategory: Connect to a sample title: Next steps order: 6 --- -# Tutorial: Connect a pre-built skill to your assistant +# Tutorial: {{page.subcategory}} -## Next steps +## {{ page.title }} Now that you've connected a pre-built skill to your assitant, try out these other tutorials: @@ -19,7 +19,7 @@ Now that you've connected a pre-built skill to your assitant, try out these othe

Build a custom skill for your scenario.

diff --git a/docs/_docs/skills/tutorials/convert-v4-bot/csharp/1-intro.md b/docs/_docs/skills/tutorials/convert-v4-bot/csharp/1-intro.md new file mode 100644 index 0000000000..4b131cb9c4 --- /dev/null +++ b/docs/_docs/skills/tutorials/convert-v4-bot/csharp/1-intro.md @@ -0,0 +1,26 @@ +--- +layout: tutorial +category: Skills +subcategory: Convert a v4 Bot +language: C# +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +### Purpose + +Creating a [Bot Framework Skill]({{site.baseurl}}/overview/skills) through the [Skill template]({{site.baseurl}}/skills/tutorials/create-skill/csharp/1-intro) is the easiest way to get started with creating a new Skill. If you have an existing v4 based Bot, we recommended you take the resulting project from this template and copy over across your custom dialogs to get started quickly. + +### Prerequisites + +### Time to Complete + +10 minutes + +### Scenario + +Convert an existing v4 Bot into a Bot Framework Skill. \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/convert-v4-bot/csharp/2-add-packages.md b/docs/_docs/skills/tutorials/convert-v4-bot/csharp/2-add-packages.md new file mode 100644 index 0000000000..d9090878ef --- /dev/null +++ b/docs/_docs/skills/tutorials/convert-v4-bot/csharp/2-add-packages.md @@ -0,0 +1,63 @@ +--- +layout: tutorial +category: Skills +subcategory: Convert a v4 Bot +language: C# +title: Add Bot Framework Solutions packages +order: 2 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + + +1. Implement MVC architecture + - If you have an existing bot using the v4 SDK, following the MVC approach from this [Bot Builder sample](https://github.com/Microsoft/BotBuilder-Samples/tree/master/samples/csharp_dotnetcore/05.multi-turn-prompt). + +1. Enable the Bot Framework Solutions packages + - Add [`Microsoft.Bot.Builder.Solutions`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Solutions/) and [`Microsoft.Bot.Builder.Skills`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Skills/) NuGet packages to your solution. + +2. Create a custom Skill adapter + + - Create a a custom adapter class that derives from the SkillAdapter and add the `SkillMiddleware.cs` class is added to it. + + ```csharp + public class CustomSkillAdapter : SkillAdapter + { + public CustomSkillAdapter( + BotSettings settings, + ICredentialProvider credentialProvider, + BotStateSet botStateSet, + ResponseManager responseManager, + IBotTelemetryClient telemetryClient, + UserState userState) + : base(credentialProvider) + { + ... + Use(new SkillMiddleware(userState)); + } + } + ``` + +3. Add the Skill services to startup + - In your `startup.cs` file, add the following Transient adapters: + + ```csharp + services.AddTransient(); + services.AddTransient(); + ``` + +4. Update your BotController class + + - Update your `BotController.cs` class to derive from `SkillController` + + ```csharp + [ApiController] + public class BotController : SkillController + { + public BotController(IServiceProvider serviceProvider, BotSettingsBase botSettings) + : base(serviceProvider, botSettings) + { } + } + ``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/convert-v4-bot/csharp/3-skill-manifest.md b/docs/_docs/skills/tutorials/convert-v4-bot/csharp/3-skill-manifest.md new file mode 100644 index 0000000000..71e7f3bc0b --- /dev/null +++ b/docs/_docs/skills/tutorials/convert-v4-bot/csharp/3-skill-manifest.md @@ -0,0 +1,43 @@ +--- +layout: tutorial +category: Skills +subcategory: Convert a v4 Bot +language: C# +title: Add a Skill Manifest +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Create a `manifestTemplate.json` file in the root of your Bot. Ensure at a minimum the root level `id`, `name`, `description` and action details are completed. + +```json +{ + "id": "", + "name": "", + "description": "", + "iconUrl": "", + "authenticationConnections": [ ], + "actions": [ + { + "id": "", + "definition": { + "description": "", + "slots": [ ], + "triggers": { + "utteranceSources": [ + { + "locale": "en", + "source": [ + "luisModel#intent" + ] + } + ] + } + } + } + ] +} +``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/convert-v4-bot/typescript/1-intro.md b/docs/_docs/skills/tutorials/convert-v4-bot/typescript/1-intro.md new file mode 100644 index 0000000000..e0f6ecf5d6 --- /dev/null +++ b/docs/_docs/skills/tutorials/convert-v4-bot/typescript/1-intro.md @@ -0,0 +1,26 @@ +--- +layout: tutorial +category: Skills +subcategory: Convert a v4 Bot +language: TypeScript +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +### Purpose + +Creating a [Bot Framework Skill]({{site.baseurl}}/overview/skills) through the [Skill template]({{site.baseurl}}/skills/tutorials/create-skill/csharp/1-intro) is the easiest way to get started with creating a new Skill. If you have an existing v4 based Bot, we recommended you take the resulting project from this template and copy over across your custom dialogs to get started quickly. + +### Prerequisites + +### Time to Complete + +10 minutes + +### Scenario + +Convert an existing v4 Bot into a Bot Framework Skill. \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/convert-v4-bot/typescript/2-add-packages.md b/docs/_docs/skills/tutorials/convert-v4-bot/typescript/2-add-packages.md new file mode 100644 index 0000000000..ce3212e369 --- /dev/null +++ b/docs/_docs/skills/tutorials/convert-v4-bot/typescript/2-add-packages.md @@ -0,0 +1,64 @@ +--- +layout: tutorial +category: Skills +subcategory: Convert a v4 Bot +language: TypeScript +title: Add Bot Framework Solutions packages +order: 2 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +1. Enable the Bot Framework Solutions packages + - Add [`botbuilder-solutions`](https://www.npmjs.com/package/botbuilder-solutions) and [`botbuilder-skills`](https://www.npmjs.com/package/botbuilder-skills) npm packages to your solution. + +2. Create a custom Skill adapter + - Create a Custom Adapter that derives from the `SkillHttpBotAdapter` and ensure the `SkillMiddleware` is added + + ```typescript + export class CustomSkillAdapter extends SkillHttpBotAdapter { + constructor( + telemetryClient: TelemetryClient, + conversationState: ConversationState, + skillContextAccessor: StatePropertyAccessor, + dialogStateAccessor: StatePropertyAccessor, + ... + ) { + super(telemetryClient); + [...] + this.use(new SkillMiddleware(conversationState, skillContextAccessor, dialogStateAccessor)); + [...] + } + } + ``` + +3. Add the Skill services to startup + - Add the new adapter to your `index.ts` file. + + ```typescript + const skillBotAdapter: CustomSkillAdapter = new CustomSkillAdapter( + telemetryClient, + conversationState, + skillContextAccessor, + dialogStateAccessor, + ...); + const skillAdapter: SkillHttpAdapter = new SkillHttpAdapter( + skillBotAdapter + ); + ``` + +4. Add the Skill endpoint + - Update your `index.ts` to handle messages to interact with the bot as a skill. + + ```typescript + // Listen for incoming assistant requests + server.post('/api/skill/messages', (req: restify.Request, res: restify.Response) => { + // Route received a request to adapter for processing + skillAdapter.processActivity(req, res, async (turnContext: TurnContext) => { + // route to bot activity handler. + await bot.run(turnContext); + }); + }); + ``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/convert-v4-bot/typescript/3-skill-manifest.md b/docs/_docs/skills/tutorials/convert-v4-bot/typescript/3-skill-manifest.md new file mode 100644 index 0000000000..c92dba8ca5 --- /dev/null +++ b/docs/_docs/skills/tutorials/convert-v4-bot/typescript/3-skill-manifest.md @@ -0,0 +1,43 @@ +--- +layout: tutorial +category: Skills +subcategory: Convert a v4 Bot +language: TypeScript +title: Add a Skill Manifest +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Create a `manifestTemplate.json` file in the root of your Bot. Ensure at a minimum the root level `id`, `name`, `description` and action details are completed. + +```json +{ + "id": "", + "name": "", + "description": "", + "iconUrl": "", + "authenticationConnections": [ ], + "actions": [ + { + "id": "", + "definition": { + "description": "", + "slots": [ ], + "triggers": { + "utteranceSources": [ + { + "locale": "en", + "source": [ + "luisModel#intent" + ] + } + ] + } + } + } + ] +} +``` \ No newline at end of file diff --git a/docs/_docs/tutorials/csharp/create-skill/1_intro.md b/docs/_docs/skills/tutorials/create-skill/csharp/1-intro.md similarity index 71% rename from docs/_docs/tutorials/csharp/create-skill/1_intro.md rename to docs/_docs/skills/tutorials/create-skill/csharp/1-intro.md index 53d816c31a..64f168da56 100644 --- a/docs/_docs/tutorials/csharp/create-skill/1_intro.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/1-intro.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Intro order: 1 @@ -8,7 +9,7 @@ order: 1 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose @@ -16,7 +17,7 @@ Install Bot Framework development prerequisites and create a Skill using the Bot ### Prerequisites -If you haven't [created a Virtual Assistant]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro), [download and install]({{site.baseurl}}/tutorials/csharp/create-skill/2_download_and_install) the Bot Framework development prerequisites. +If you haven't [created a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro), [download and install]({{site.baseurl}}/tutorials/csharp/create-skill/2_download_and_install) the Bot Framework development prerequisites. - Retrieve your LUIS Authoring Key - Review the [LUIS regions](https://docs.microsoft.com/en-us/azure/cognitive-services/luis/luis-reference-regions) documentation page for the correct LUIS portal for the region you plan to deploy to. Note that www.luis.ai refers to the US region and an authoring key retrieved from this portal will not work within a Europe deployment. diff --git a/docs/_docs/tutorials/csharp/create-skill/2_download_and_install.md b/docs/_docs/skills/tutorials/create-skill/csharp/2-download-and-install.md similarity index 95% rename from docs/_docs/tutorials/csharp/create-skill/2_download_and_install.md rename to docs/_docs/skills/tutorials/create-skill/csharp/2-download-and-install.md index f3b8f85938..84b10a9792 100644 --- a/docs/_docs/tutorials/csharp/create-skill/2_download_and_install.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/2-download-and-install.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Download and install order: 2 @@ -8,7 +9,7 @@ order: 2 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Download and install +## {{ page.title }} > It's important to ensure all of the following pre-requisites are installed on your machine prior to attempting deployment otherwise you may run into deployment issues. diff --git a/docs/_docs/tutorials/csharp/create-skill/3_create_your_skill.md b/docs/_docs/skills/tutorials/create-skill/csharp/3-create-your-skill.md similarity index 98% rename from docs/_docs/tutorials/csharp/create-skill/3_create_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/csharp/3-create-your-skill.md index 0799a4042e..4c1602a64e 100644 --- a/docs/_docs/tutorials/csharp/create-skill/3_create_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/3-create-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Create your skill project order: 3 @@ -8,7 +9,7 @@ order: 3 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Create your Skill +## {{ page.title }} 1. In Visual Studio, click **File > New Project**. 2. Under Bot, select **Skill Template**. diff --git a/docs/_docs/tutorials/csharp/create-skill/4_provision_your_azure_resources.md b/docs/_docs/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources.md similarity index 94% rename from docs/_docs/tutorials/csharp/create-skill/4_provision_your_azure_resources.md rename to docs/_docs/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources.md index 79c7077a83..28d40712f7 100644 --- a/docs/_docs/tutorials/csharp/create-skill/4_provision_your_azure_resources.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Provision your Azure resources order: 4 @@ -8,7 +9,7 @@ order: 4 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Deploy your Skill +## {{ page.title }} The Virtual Assistant require the following dependencies for end to end operation which are created through an ARM script which you can modify as required. @@ -36,6 +37,6 @@ The Virtual Assistant require the following dependencies for end to end operatio `appPassword` | The password for the [Azure Active Directory App](https://ms.portal.azure.com/#blade/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/Overview) that will be used by your bot. It must be at least 16 characters long, contain at least 1 special character, and contain at least 1 numeric character. If using an existing app, this must be the existing password. | **Yes** `luisAuthoringKey` | The authoring key for your LUIS account. It can be found at https://www.luis.ai/user/settings or https://eu.luis.ai/user/settings | **Yes** -You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/reference/virtual-assistant/deploymentscripts/) page. +You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/help/reference/deployment-scripts//) page. > Note that if you choose to deploy your Skill manually or re-use an existing App-Service please ensure that Web Sockets are enabled on the App Service configuration pane. The deployment scripts supplied as part of the Skill template will do this automatically. diff --git a/docs/_docs/tutorials/csharp/create-skill/5_run_your_skill.md b/docs/_docs/skills/tutorials/create-skill/csharp/5-run-your-skill.md similarity index 89% rename from docs/_docs/tutorials/csharp/create-skill/5_run_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/csharp/5-run-your-skill.md index a559d5509e..f5333ecfe0 100644 --- a/docs/_docs/tutorials/csharp/create-skill/5_run_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/5-run-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Run your skill order: 5 @@ -8,7 +9,7 @@ order: 5 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Test your Skill +## {{ page.title }} Once deployment is complete, you can start debugging through the following steps: diff --git a/docs/_docs/tutorials/csharp/create-skill/6_add_your_skill.md b/docs/_docs/skills/tutorials/create-skill/csharp/6-add-your-skill.md similarity index 64% rename from docs/_docs/tutorials/csharp/create-skill/6_add_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/csharp/6-add-your-skill.md index 3634085e73..90a029a45a 100644 --- a/docs/_docs/tutorials/csharp/create-skill/6_add_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/6-add-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Add your skill to a Virtual Assistant order: 6 @@ -8,6 +9,8 @@ order: 6 # Tutorial: {{page.subcategory}} ({{page.language}}) +## {{ page.title }} + ## Validate the Skill manifest endpoint - To validate your Skill is deployed and working open up a browser window and navigate to your deployed Skill manifest (`/api/skill/manifest endpoint`). e.g. `http://localhost:3978/api/skill/manifest` @@ -19,7 +22,7 @@ To add your new Skill to your assistant/Bot we provide a `botskills` command lin Run the following command from a command prompt **within the directory of your assistant/Bot**. ```bash -botskills connect --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder "/Deployment/Resources/LU/en/" --cs +botskills connect --botName YOUR_BOT_NAME --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder "/Deployment/Resources/LU/en/" --cs ``` -See the [Adding Skills]({{site.baseurl}}/howto/skills/addingskills) for more detail on how to add skills. \ No newline at end of file +See the [Adding Skills]({{site.baseurl}}/skills/handbook/add-skills-to-a-virtual-assistant/) for more detail on how to add skills. diff --git a/docs/_docs/tutorials/csharp/create-skill/7_invoke_your_skill.md b/docs/_docs/skills/tutorials/create-skill/csharp/7-invoke-your-skill.md similarity index 72% rename from docs/_docs/tutorials/csharp/create-skill/7_invoke_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/csharp/7-invoke-your-skill.md index 1eb5d2425d..e59b3ce392 100644 --- a/docs/_docs/tutorials/csharp/create-skill/7_invoke_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/csharp/7-invoke-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: C# title: Invoke your skill order: 7 @@ -8,6 +9,6 @@ order: 7 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Testing your Skill +## {{ page.title }} Test your skill works in your Bot through the emulator by typing "sample dialog" diff --git a/docs/_docs/skills/tutorials/create-skill/csharp/8-next-steps.md b/docs/_docs/skills/tutorials/create-skill/csharp/8-next-steps.md new file mode 100644 index 0000000000..540ffbbe59 --- /dev/null +++ b/docs/_docs/skills/tutorials/create-skill/csharp/8-next-steps.md @@ -0,0 +1,47 @@ +--- +layout: tutorial +category: Skills +subcategory: Create +language: C# +title: Next steps +order: 8 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Now that you've created your skill, try the one of these tutorials: + + \ No newline at end of file diff --git a/docs/_docs/tutorials/typescript/create-skill/1_intro.md b/docs/_docs/skills/tutorials/create-skill/typescript/1-intro.md similarity index 93% rename from docs/_docs/tutorials/typescript/create-skill/1_intro.md rename to docs/_docs/skills/tutorials/create-skill/typescript/1-intro.md index 909e0da9b9..c6c07991fe 100644 --- a/docs/_docs/tutorials/typescript/create-skill/1_intro.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/1-intro.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Intro order: 1 @@ -8,7 +9,7 @@ order: 1 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose diff --git a/docs/_docs/tutorials/typescript/create-skill/2_download_and_install.md b/docs/_docs/skills/tutorials/create-skill/typescript/2-download-and-install.md similarity index 94% rename from docs/_docs/tutorials/typescript/create-skill/2_download_and_install.md rename to docs/_docs/skills/tutorials/create-skill/typescript/2-download-and-install.md index 8cc11689e6..f379d57250 100644 --- a/docs/_docs/tutorials/typescript/create-skill/2_download_and_install.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/2-download-and-install.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Download and install order: 2 @@ -8,7 +9,7 @@ order: 2 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Download and install +## {{ page.title }} > It's important to ensure all of the following prerequisites are installed on your machine prior to attempting deployment otherwise you may run into deployment issues. diff --git a/docs/_docs/tutorials/typescript/create-skill/3_create_your_skill.md b/docs/_docs/skills/tutorials/create-skill/typescript/3-create-your-skill.md similarity index 97% rename from docs/_docs/tutorials/typescript/create-skill/3_create_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/typescript/3-create-your-skill.md index d7186bb184..0fec11adb1 100644 --- a/docs/_docs/tutorials/typescript/create-skill/3_create_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/3-create-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Create your skill project order: 3 @@ -8,7 +9,7 @@ order: 3 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Create your Skill +## {{ page.title }} Install the botbuilder-assistant generator diff --git a/docs/_docs/tutorials/typescript/create-skill/4_provision_your_azure_resources.md b/docs/_docs/skills/tutorials/create-skill/typescript/4-provision-your-azure-resources.md similarity index 94% rename from docs/_docs/tutorials/typescript/create-skill/4_provision_your_azure_resources.md rename to docs/_docs/skills/tutorials/create-skill/typescript/4-provision-your-azure-resources.md index e0ba7f2314..3402c3b91d 100644 --- a/docs/_docs/tutorials/typescript/create-skill/4_provision_your_azure_resources.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/4-provision-your-azure-resources.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Provision your Azure resources order: 4 @@ -8,7 +9,7 @@ order: 4 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Deploy your Skill +## {{ page.title }} The Virtual Assistant require the following dependencies for end to end operation which are created through an ARM script which you can modify as required. @@ -36,6 +37,6 @@ The Virtual Assistant require the following dependencies for end to end operatio `appPassword` | The password for the [Azure Active Directory App](https://ms.portal.azure.com/#blade/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/Overview) that will be used by your bot. It must be at least 16 characters long, contain at least 1 special character, and contain at least 1 numeric character. If using an existing app, this must be the existing password. | **Yes** `luisAuthoringKey` | The authoring key for your LUIS account. It can be found at https://www.luis.ai/user/settings or https://eu.luis.ai/user/settings | **Yes** -You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/reference/virtual-assistant/deploymentscripts/) page. +You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/help/reference/deployment-scripts//) page. > Note that if you choose to deploy your Skill manually or re-use an existing App-Service please ensure that Web Sockets are enabled on the App Service configuration pane. The deployment scripts supplied as part of the Skill template will do this automatically. diff --git a/docs/_docs/tutorials/typescript/create-skill/5_run_your_skill.md b/docs/_docs/skills/tutorials/create-skill/typescript/5-run-your-skill.md similarity index 90% rename from docs/_docs/tutorials/typescript/create-skill/5_run_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/typescript/5-run-your-skill.md index 3a58e82809..14351c63fe 100644 --- a/docs/_docs/tutorials/typescript/create-skill/5_run_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/5-run-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Run your skill order: 5 @@ -8,7 +9,7 @@ order: 5 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Test your Skill +## {{ page.title }} Once deployment is complete, you can start debugging through the following steps: diff --git a/docs/_docs/tutorials/typescript/create-skill/6_add_your_skill.md b/docs/_docs/skills/tutorials/create-skill/typescript/6-add-your-skill.md similarity index 65% rename from docs/_docs/tutorials/typescript/create-skill/6_add_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/typescript/6-add-your-skill.md index f7b11ca9c7..7bb94fdaae 100644 --- a/docs/_docs/tutorials/typescript/create-skill/6_add_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/6-add-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Add your skill to a Virtual Assistant order: 6 @@ -8,6 +9,8 @@ order: 6 # Tutorial: {{page.subcategory}} ({{page.language}}) +## {{ page.title }} + ## Validate the Skill manifest endpoint - To validate your Skill is deployed and working open up a browser window and navigate to your deployed Skill manifest (`/api/skill/manifest endpoint`). e.g. `http://localhost:3978/api/skill/manifest` @@ -19,7 +22,7 @@ To add your new Skill to your assistant/Bot we provide a `botskills` command lin Run the following command from a command prompt **within the directory of your assistant/Bot**. ```bash -botskills connect --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder "/Deployment/Resources/LU/en/" --ts +botskills connect --botName YOUR_BOT_NAME --remoteManifest "http://.azurewebsites.net/api/skill/manifest" --luisFolder "/Deployment/Resources/LU/en/" --ts ``` -See the [Adding Skills]({{site.baseurl}}/howto/skills/addingskills) for more detail on how to add skills. \ No newline at end of file +See the [Adding Skills]({{site.baseurl}}/skills/handbook/add-skills-to-a-virtual-assistant/) for more detail on how to add skills. diff --git a/docs/_docs/tutorials/typescript/create-skill/7_invoke_your_skill.md b/docs/_docs/skills/tutorials/create-skill/typescript/7-invoke-your-skill.md similarity index 72% rename from docs/_docs/tutorials/typescript/create-skill/7_invoke_your_skill.md rename to docs/_docs/skills/tutorials/create-skill/typescript/7-invoke-your-skill.md index ccd5e87575..9a987f37ee 100644 --- a/docs/_docs/tutorials/typescript/create-skill/7_invoke_your_skill.md +++ b/docs/_docs/skills/tutorials/create-skill/typescript/7-invoke-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a skill +layout: tutorial +category: Skills +subcategory: Create language: TypeScript title: Invoke your skill order: 7 @@ -8,6 +9,6 @@ order: 7 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Testing your Skill +## {{ page.title }} Test your skill works in your Bot through the emulator by typing "sample dialog" diff --git a/docs/_docs/skills/tutorials/create-skill/typescript/8-next-steps.md b/docs/_docs/skills/tutorials/create-skill/typescript/8-next-steps.md new file mode 100644 index 0000000000..9229486f6c --- /dev/null +++ b/docs/_docs/skills/tutorials/create-skill/typescript/8-next-steps.md @@ -0,0 +1,47 @@ +--- +layout: tutorial +category: Skills +subcategory: Create +language: TypeScript +title: Next steps +order: 8 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Now that you've created your custom skill, try the one of these tutorials: + + \ No newline at end of file diff --git a/docs/_docs/tutorials/csharp/customize-skill/1_intro.md b/docs/_docs/skills/tutorials/customize-skill/csharp/1-intro.md similarity index 64% rename from docs/_docs/tutorials/csharp/customize-skill/1_intro.md rename to docs/_docs/skills/tutorials/customize-skill/csharp/1-intro.md index b9e4f9ed94..c4ed0334d0 100644 --- a/docs/_docs/tutorials/csharp/customize-skill/1_intro.md +++ b/docs/_docs/skills/tutorials/customize-skill/csharp/1-intro.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: C# title: Intro order: 1 @@ -8,7 +9,7 @@ order: 1 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose @@ -16,7 +17,7 @@ Learn how to navigate your skill project and make common customizations. ### Prerequisites -[Create a skill]({{site.baseurl}}/tutorials/csharp/create-skill/1_intro) to setup your environment. +[Create a skill]({{site.baseurl}}/skills/tutorials/create-skill/csharp/1-intro) to setup your environment. ### Time to Complete diff --git a/docs/_docs/tutorials/csharp/customize-skill/2_edit_your_responses.md b/docs/_docs/skills/tutorials/customize-skill/csharp/2-edit-your-responses.md similarity index 88% rename from docs/_docs/tutorials/csharp/customize-skill/2_edit_your_responses.md rename to docs/_docs/skills/tutorials/customize-skill/csharp/2-edit-your-responses.md index 1b91c9d7cf..13fdee2b66 100644 --- a/docs/_docs/tutorials/csharp/customize-skill/2_edit_your_responses.md +++ b/docs/_docs/skills/tutorials/customize-skill/csharp/2-edit-your-responses.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: C# title: Edit responses order: 2 @@ -15,4 +16,4 @@ Edit the MainResponses.json and SharedResponses.json files in the Responses fold To add additional responses, create a new folder in the Responses directory, then copy the .tt and .json files from Responses/Sample. Rename the files to match your domain, and modify the json file as needed. Set the Build Action of the json files to `EmbeddedResource`. In the Build menu of Visual Studio, run "Transform All t4 templates" to generate the necessary .cs file. In startup, register your response class in the ResponseManager. ### Learn More -For more information, refer to the [Skill Responses reference]({{site.baseurl}}/reference/skills/responses). +For more information, refer to the [Skill Responses reference]({{site.baseurl}}/skills/handbook/language-generation). diff --git a/docs/_docs/tutorials/csharp/customize-skill/3_update_skill_manifest.md b/docs/_docs/skills/tutorials/customize-skill/csharp/3-update-skill-manifest.md similarity index 87% rename from docs/_docs/tutorials/csharp/customize-skill/3_update_skill_manifest.md rename to docs/_docs/skills/tutorials/customize-skill/csharp/3-update-skill-manifest.md index d8b26fe692..cd4ae75735 100644 --- a/docs/_docs/tutorials/csharp/customize-skill/3_update_skill_manifest.md +++ b/docs/_docs/skills/tutorials/customize-skill/csharp/3-update-skill-manifest.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: C# title: Update your skill manifest order: 3 diff --git a/docs/_docs/tutorials/csharp/customize-skill/4_publish_your_skill.md b/docs/_docs/skills/tutorials/customize-skill/csharp/4-publish-your-skill.md similarity index 81% rename from docs/_docs/tutorials/csharp/customize-skill/4_publish_your_skill.md rename to docs/_docs/skills/tutorials/customize-skill/csharp/4-publish-your-skill.md index 754615466a..1aeab907ac 100644 --- a/docs/_docs/tutorials/csharp/customize-skill/4_publish_your_skill.md +++ b/docs/_docs/skills/tutorials/customize-skill/csharp/4-publish-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: C# title: Publish your skill order: 4 diff --git a/docs/_docs/skills/tutorials/customize-skill/csharp/5-next-steps.md b/docs/_docs/skills/tutorials/customize-skill/csharp/5-next-steps.md new file mode 100644 index 0000000000..e7f33e9bd7 --- /dev/null +++ b/docs/_docs/skills/tutorials/customize-skill/csharp/5-next-steps.md @@ -0,0 +1,36 @@ +--- +layout: tutorial +category: Skills +subcategory: Customize +language: C# +title: Next steps +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Next Steps +Now that you've customized your skill, try one of these tutorials: + + \ No newline at end of file diff --git a/docs/_docs/tutorials/typescript/customize-skill/1_intro.md b/docs/_docs/skills/tutorials/customize-skill/typescript/1-intro.md similarity index 64% rename from docs/_docs/tutorials/typescript/customize-skill/1_intro.md rename to docs/_docs/skills/tutorials/customize-skill/typescript/1-intro.md index 4e20b7e7d9..dcb4d3a036 100644 --- a/docs/_docs/tutorials/typescript/customize-skill/1_intro.md +++ b/docs/_docs/skills/tutorials/customize-skill/typescript/1-intro.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: TypeScript title: Intro order: 1 @@ -8,7 +9,7 @@ order: 1 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose @@ -16,7 +17,7 @@ Learn how to navigate your skill project and make common customizations. ### Prerequisites -[Create a skill]({{site.baseurl}}/tutorials/typescript/create-skill/1_intro) to setup your environment. +[Create a skill]({{site.baseurl}}/skills/tutorials/create-skill/typescript/1-intro/) to setup your environment. ### Time to Complete diff --git a/docs/_docs/tutorials/typescript/customize-skill/2_edit_your_responses.md b/docs/_docs/skills/tutorials/customize-skill/typescript/2-edit-your-responses.md similarity index 84% rename from docs/_docs/tutorials/typescript/customize-skill/2_edit_your_responses.md rename to docs/_docs/skills/tutorials/customize-skill/typescript/2-edit-your-responses.md index 3a7290b623..8235f78cc7 100644 --- a/docs/_docs/tutorials/typescript/customize-skill/2_edit_your_responses.md +++ b/docs/_docs/skills/tutorials/customize-skill/typescript/2-edit-your-responses.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: TypeScript title: Edit responses order: 2 @@ -15,4 +16,4 @@ Edit the MainResponses.json and SharedResponses.json files in the Responses fold To add additional responses, create a new folder in the Responses directory, then copy the files from Responses/Sample. Rename the files to match your domain, then modify the json file as needed. ### Learn More -For more information, refer to the [Skill Responses reference]({{site.baseurl}}/reference/skills/responses). \ No newline at end of file +For more information, refer to the [Skill Responses reference]({{site.baseurl}}/skills/handbook/language-generation). diff --git a/docs/_docs/tutorials/typescript/customize-skill/3_update_skill_manifest.md b/docs/_docs/skills/tutorials/customize-skill/typescript/3-update-skill-manifest.md similarity index 87% rename from docs/_docs/tutorials/typescript/customize-skill/3_update_skill_manifest.md rename to docs/_docs/skills/tutorials/customize-skill/typescript/3-update-skill-manifest.md index d5e460d7aa..3cafa68496 100644 --- a/docs/_docs/tutorials/typescript/customize-skill/3_update_skill_manifest.md +++ b/docs/_docs/skills/tutorials/customize-skill/typescript/3-update-skill-manifest.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: TypeScript title: Update your skill manifest order: 3 diff --git a/docs/_docs/tutorials/typescript/customize-skill/4_publish_your_skill.md b/docs/_docs/skills/tutorials/customize-skill/typescript/4-publish-your-skill.md similarity index 82% rename from docs/_docs/tutorials/typescript/customize-skill/4_publish_your_skill.md rename to docs/_docs/skills/tutorials/customize-skill/typescript/4-publish-your-skill.md index 6a9ee10432..f57f4b3edd 100644 --- a/docs/_docs/tutorials/typescript/customize-skill/4_publish_your_skill.md +++ b/docs/_docs/skills/tutorials/customize-skill/typescript/4-publish-your-skill.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a skill +layout: tutorial +category: Skills +subcategory: Customize language: TypeScript title: Publish your skill order: 4 diff --git a/docs/_docs/skills/tutorials/customize-skill/typescript/5-next-steps.md b/docs/_docs/skills/tutorials/customize-skill/typescript/5-next-steps.md new file mode 100644 index 0000000000..c72f2d3586 --- /dev/null +++ b/docs/_docs/skills/tutorials/customize-skill/typescript/5-next-steps.md @@ -0,0 +1,36 @@ +--- +layout: tutorial +category: Skills +subcategory: Customize +language: TypeScript +title: Next steps +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Next Steps +Now that you've customized your skill, try one of these tutorials: + + \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/csharp/1-intro.md b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/1-intro.md new file mode 100644 index 0000000000..3331925b37 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/1-intro.md @@ -0,0 +1,27 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +title: Intro +language: C# +order: 1 +--- + + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +### Purpose + +Creating a Bot Framework Bot through the Virtual Assistant template is the easiest way to get started with using Skills. If you have an existing v4 based Bot, the recommended approach would be to take the resulting project from this template and bring across your custom dialogs to get started quickly. + +### Prerequisites + +### Time to Complete + +10 minutes + +### Scenario + +Enable Skills on an existing v4 bot. diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/csharp/2-add-packages.md b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/2-add-packages.md new file mode 100644 index 0000000000..2e9929fa41 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/2-add-packages.md @@ -0,0 +1,14 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +title: Add Bot Framework Solutions packages +language: C# +order: 2 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Add [`Microsoft.Bot.Builder.Solutions`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Solutions/) and [`Microsoft.Bot.Builder.Skills`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Skills/) NuGet packages to your solution. diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/csharp/3-skill-configuration.md b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/3-skill-configuration.md new file mode 100644 index 0000000000..2f7223aa72 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/3-skill-configuration.md @@ -0,0 +1,20 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +title: Skill configuration +language: C# +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +The `Microsoft.Bot.Builder.Skills` package provides a `SkillManifest` type that describes a Skill. Your bot should maintain a collection of registered Skills typically serialized into a JSON configuration file. The Virtual Assistant template uses a `skills.json` file for this purpose. + +As part of your Configuration processing you should construct a collection of registered Skills by deserializing this file, for example: + +```csharp +public List Skills { get; set; } +``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/csharp/4-skill-dialog-registration.md b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/4-skill-dialog-registration.md new file mode 100644 index 0000000000..c7341846ce --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/4-skill-dialog-registration.md @@ -0,0 +1,55 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +title: Skill dialog registration +language: C# +order: 4 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +In your `Startup.cs` file register a `SkillDialog` for each registered skill as shown below, this uses the collection of Skills that you created in the previous step. + +```csharp + // Register skill dialogs +services.AddTransient(sp => +{ + var userState = sp.GetService(); + var skillDialogs = new List(); + + foreach (var skill in settings.Skills) + { + var authDialog = BuildAuthDialog(skill, settings); + var credentials = new MicrosoftAppCredentialsEx(settings.MicrosoftAppId, settings.MicrosoftAppPassword, skill.MSAappId); + skillDialogs.Add(new SkillDialog(skill, credentials, telemetryClient, userState, authDialog)); + } + + return skillDialogs; +}); +``` + +For scenarios where Skills require authentication connections you need to create an associated `MultiProviderAuthDialog` + +```csharp + // This method creates a MultiProviderAuthDialog based on a skill manifest. +private MultiProviderAuthDialog BuildAuthDialog(SkillManifest skill, BotSettings settings) +{ + if (skill.AuthenticationConnections?.Count() > 0) + { + if (settings.OAuthConnections.Any() && settings.OAuthConnections.Any(o => skill.AuthenticationConnections.Any(s => s.ServiceProviderId == o.Provider))) + { + var oauthConnections = settings.OAuthConnections.Where(o => skill.AuthenticationConnections.Any(s => s.ServiceProviderId == o.Provider)).ToList(); + return new MultiProviderAuthDialog(oauthConnections); + } + else + { + throw new Exception($"You must configure at least one supported OAuth connection to use this skill: {skill.Name}."); + } + } + + return null; +} +``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/csharp/5-forward-queries-to-skills.md b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/5-forward-queries-to-skills.md new file mode 100644 index 0000000000..7f38024188 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/csharp/5-forward-queries-to-skills.md @@ -0,0 +1,45 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +title: Forward queries to Skills +language: C# +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Within your Main/Router dialog you firstly need to ensure the SkillDialogs registered previously are added to the dialog stack: + +```csharp +foreach (var skillDialog in skillDialogs) +{ + AddDialog(skillDialog); +} +``` + +Add the following code after your Dispatcher has executed passing the registered Skills and the Intent returned from the Dispatcher. If the IsSkill method returns true then you start the appropriate SkillDialog instance passing the Skill Manifest Id and the matching intent. + +```csharp +// Identify if the dispatch intent matches any Action within a Skill if so, we pass to the appropriate SkillDialog to hand-off +var identifiedSkill = SkillRouter.IsSkill(_settings.Skills, intent.ToString()); + +if (identifiedSkill != null) +{ + // We have identified a skill so initialize the skill connection with the target skill + // the dispatch intent is the Action ID of the Skill enabling us to resolve the specific action and identify slots + // Pass the activity we have + var result = await dc.BeginDialogAsync(identifiedSkill.Id, intent); + + if (result.Status == DialogTurnStatus.Complete) + { + await CompleteAsync(dc); + } +} +else +{ + // Your normal intent routing logic +} +``` diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/typescript/1-intro.md b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/1-intro.md new file mode 100644 index 0000000000..bcec32939a --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/1-intro.md @@ -0,0 +1,26 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +language: TypeScript +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +### Purpose + +Creating a Bot Framework Bot through the Virtual Assistant template is the easiest way to get started with using Skills. If you have an existing v4 based Bot, the recommended approach would be to take the resulting project from this template and bring across your custom dialogs to get started quickly. + +### Prerequisites + +### Time to Complete + +10 minutes + +### Scenario + +Enable Skills on an existing v4 bot. \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/typescript/2-add-packages.md b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/2-add-packages.md new file mode 100644 index 0000000000..7fac8f6f08 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/2-add-packages.md @@ -0,0 +1,14 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +language: TypeScript +title: Add Bot Framework Solutions packages +order: 2 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Add [`botbuilder-solutions`](https://www.npmjs.com/package/botbuilder-solutions) and [`botbuilder-skills`](https://www.npmjs.com/package/botbuilder-skills) npm packages to your solution. diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/typescript/3-skill-configuration.md b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/3-skill-configuration.md new file mode 100644 index 0000000000..86e0f7f81b --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/3-skill-configuration.md @@ -0,0 +1,31 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +language: TypeScript +title: Skill configuration +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +The 'botbuilder-skills' package provides a `ISkillManifest` interface that describes a Skill. Your bot should maintain a collection of registered Skills typically serialized into a `JSON` configuration file. The Virtual Assistant template uses a `skills.json` file for this purpose that can be found in the `src` directory. + +That file must have the following structure: + +```json +{ + "skills": [] +} +``` + +As part of your Configuration processing you should construct a collection of registered Skills by deserializing this file, for example: + +```typescript +import { skills as skillsRaw } from './skills.json'; +const skills: ISkillManifest[] = skillsRaw; +``` + +> NOTE: The `botbuilder-skills` package also provides a `IBotSettings` interface that can be used to storage the keys/secrets of the services that will be used to connect services to the bot. diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/typescript/4-skill-dialog-registration.md b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/4-skill-dialog-registration.md new file mode 100644 index 0000000000..12a5399fb8 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/4-skill-dialog-registration.md @@ -0,0 +1,52 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +language: TypeScript +title: Skill dialog registration +order: 4 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +In your `index.ts` file register a `SkillDialog` for each registered skill as shown below, this uses the collection of Skills that you created in the previous step. + +```typescript + // Register skill dialogs +const skillDialogs: SkillDialog[] = skills.map((skill: ISkillManifest) => { + const authDialog: MultiProviderAuthDialog|undefined = buildAuthDialog(skill, botSettings); + const credentials: MicrosoftAppCredentialsEx = new MicrosoftAppCredentialsEx( + botSettings.microsoftAppId || '', + botSettings.microsoftAppPassword || '', + skill.msAppId); + + return new SkillDialog(skill, credentials, adapter.telemetryClient, skillContextAccessor, authDialog); +}); +``` + +For scenarios where Skills require authentication connections you need to create an associated `MultiProviderAuthDialog` + +```typescript +// This method creates a MultiProviderAuthDialog based on a skill manifest. +function buildAuthDialog(skill: ISkillManifest, settings: Partial): MultiProviderAuthDialog|undefined { + if (skill.authenticationConnections !== undefined && skill.authenticationConnections.length > 0) { + if (settings.oauthConnections !== undefined) { + const oauthConnections: IOAuthConnection[] | undefined = settings.oauthConnections.filter( + (oauthConnection: IOAuthConnection) => { + return skill.authenticationConnections.some((authenticationConnection: IAuthenticationConnection) => { + return authenticationConnection.serviceProviderId === oauthConnection.provider; + }); + }); + if (oauthConnections !== undefined) { + return new MultiProviderAuthDialog(oauthConnections); + } + } else { + throw new Error(`You must configure at least one supported OAuth connection to use this skill: ${skill.name}.`); + } + } + + return undefined; +} +``` \ No newline at end of file diff --git a/docs/_docs/skills/tutorials/extend-v4-bot/typescript/5-forward-queries-to-skills.md b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/5-forward-queries-to-skills.md new file mode 100644 index 0000000000..bb4cb66c64 --- /dev/null +++ b/docs/_docs/skills/tutorials/extend-v4-bot/typescript/5-forward-queries-to-skills.md @@ -0,0 +1,39 @@ +--- +layout: tutorial +category: Skills +subcategory: Extend a v4 Bot +language: TypeScript +title: Forward queries to Skills +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{ page.title }} + +Within your Main/Router dialog you firstly need to ensure the SkillDialogs registered previously are added to the dialog stack: + +```typescript +skillDialogs.forEach((skillDialog: SkillDialog) => { + this.addDialog(skillDialog); +}); +``` + +Add the following code after your Dispatcher has executed passing the registered Skills and the Intent returned from the Dispatcher. If the `isSkill` method returns true then you start the appropriate SkillDialog instance passing the Skill Manifest Id and the matching intent. + +```typescript +// Identify if the dispatch intent matches any Action within a Skill if so, we pass to the appropriate SkillDialog to hand-off +const identifiedSkill: ISkillManifest | undefined = SkillRouter.isSkill(this.settings.skills, intent); +if (identifiedSkill !== undefined) { + // We have identified a skill so initialize the skill connection with the target skill + // the dispatch intent is the Action ID of the Skill enabling us to resolve the specific action and identify slots + // Pass the activity we have + const result: DialogTurnResult = await dc.beginDialog(identifiedSkill.id); + + if (result.status === DialogTurnStatus.complete) { + await this.complete(dc); + } +} else { + // Your normal intent routing logic +} +``` \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/assistants/enterprise-assistant.md b/docs/_docs/solution-accelerators/assistants/enterprise-assistant.md new file mode 100644 index 0000000000..9d6875d656 --- /dev/null +++ b/docs/_docs/solution-accelerators/assistants/enterprise-assistant.md @@ -0,0 +1,126 @@ +--- +category: Solution Accelerators +subcategory: Assistants +title: Enterprise Assistant +order: 1 +toc: true +--- +# {{ page.title }} +{:.no_toc} +Many organizations are looking to provide a centralized conversational experience across many canvases for employees. This concept allows for a consolidation of many disparate bots across the organization to a more centralized solution where a master bot handles finding the right bot to handle the conversation, thus avoiding bot explosion through parent bot/skills approach. This, in turn, gets the user productive quicker and allows for a true Enterprise Virtual Assistant Experience. + +The [Enterprise Assistant sample]({{site.repo}}/tree/next/samples/csharp/assistants/enterprise-assistant) is an example of a Virtual Assistant that helps conceptualize and demonstrate how an assistant could be used in common enterprise scenarios. It also provides a starting point for those interested in creating an assistant customized for this scenario. + +![Enterprise Assistant Overview Drawing]({{site.baseurl}}/assets/images/EnterpriseAssistantSampleOverview.PNG) + +This sample works off the basis that the assistant would be provided through common employee channels such as Microsoft Teams, a mobile application, and Web Chat to help improve employee productivity, but also assist them in getting work tasks completed such as opening an IT Service Management (ITSM) ticket. It also provides additional capabilities that might be useful for employees, like getting the weather forecast or showing current news articles. + +The Enterprise Assistant Sample is based on the [Virtual Assistant Template]({{site.baseurl}}/overview/virtual-assistant-template), with the addition of a [QnA Maker knowledge base](https://docs.microsoft.com/en-us/azure/cognitive-services/qnamaker/concepts/knowledge-base) for answering common enterprise FAQs (such as Benefits and HR Information) and customized Adaptive Cards. It also connects 7 different Skills; which are [Calendar]({{site.baseurl}}/skills/samples/calendar), [Email]({{site.baseurl}}/skills/samples/email), and [To Do]({{site.baseurl}}/skills/samples/to-do) along with the experimental skills of [Weather]({{site.baseurl}}/skills/samples/weather), [News]({{site.baseurl}}/skills/samples/next), [Phone]({{site.baseurl}}/skills/samples/phone) and [ITSM]({{site.baseurl}}/skills/samples/itsm). + +In many cases, you can leverage [Azure Active Directory (AAD)](https://azure.microsoft.com/en-us/services/active-directory/) for single sign-on (SSO), though this may be limited by the channel itself and your specific requirements. + + +## Proactive notifications + +The Enterprise Assistant sample includes [proactive notifications]({{site.baseurl}}/virtual-assistant/samples/proactive-notifications), enabling scenarios such as: + +- Send notifications to your users that the Enterprise Assistant would like to start a conversation, thus allowing the user to indicate when they are ready to have this discussion + - e.g., a user receives a notification "your training is due", allowing them to initiate the conversation about what training is required) + +- Initiate a proactive dialog with your users through an open channel such as Microsoft Teams + - e.g., "Benefits enrollment just opened; would you like to know more about benefits?" + + +## Supported scenarios + +The majority of the skills connected to this sample are [experimental skills]({{site.baseurl}}/skills/samples/experimental), which means they are early prototypes of Skills and are likely to have rudimentary language models, limited language support and limited testing. These skills demonstrate a variety of skill concepts and provide great examples to get you started. This sample demonstrates the following scenarios: + +#### HR FAQ +{:.no_toc} +- *I need life insurance* +- *How do I sign up for benefits?* +- *What is HSA?* + +#### [Calendar Skill]({{site.baseurl}}/skills/samples/calendar) +{:.no_toc} +##### Connect to a meeting +{:.no_toc} +- *Connect me to conference call* +- *Connect me with my 2 o’clock meeting* + +##### Create a meeting +{:.no_toc} +- *Create a meeting tomorrow at 9 AM with Lucy Chen* +- *Put anniversary on my calendar* + +##### Delete a meeting +{:.no_toc} +- *Cancel my meeting at 3 PM today* +- *Drop my appointment for Monday* + +##### Find a meeting +{:.no_toc} +- *Do I have any appointments today?* +- *Get to my next event* + +#### [Email]({{site.baseurl}}/skills/samples/email) +{:.no_toc} +##### Send an email +{:.no_toc} +- *Send an email to John Smith* +- *What are my latest messages?* + +#### [To Do Skill]({{site.baseurl}}/skills/samples/to-do) +{:.no_toc} +##### Add a task +{:.no_toc} +- *Add some items to the shopping notes* +- *Put milk on my grocery list* +- *Create task to meet Leon after 5:00 PM* + +#### [Weather Skill]({{site.baseurl}}/skills/samples/weather) +{:.no_toc} +##### Get the forecast +{:.no_toc} +- *What’s the weather today?* + +#### [News Skill]({{site.baseurl}}/skills/samples/next) +{:.no_toc} +##### Find news articles +{:.no_toc} +- *What’s the latest news on technology?* +- *What news is currently trending?* + +#### [Phone Skill]({{site.baseurl}}/skills/samples/phone) +{:.no_toc} +##### Make an outgoing call +{:.no_toc} +- *Call Sanjay Narthwani* +- *Call 867 5309* +- *Make a call* + +#### [IT Service Management (ITSM) Skill]({{site.baseurl}}/skills/samples/itsm) +{:.no_toc} +##### Create a ticket +{:.no_toc} +- *Create a ticket for my broken laptop* + +##### Show a ticket +{:.no_toc} +- *What’s the status of my incident?* + +##### Update a ticket +{:.no_toc} +- *Change ticket’s urgency to high* + +##### Close a ticket +{:.no_toc} +- *Close my ticket* + +## Deploy + +An automated deployment (including proactive notifications) will be available soon. + +## Download transcripts + +Sample transcripts for the Enterprise Assistant will be available soon. \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/assistants/hospitality-assistant.md b/docs/_docs/solution-accelerators/assistants/hospitality-assistant.md new file mode 100644 index 0000000000..6b9a4bd429 --- /dev/null +++ b/docs/_docs/solution-accelerators/assistants/hospitality-assistant.md @@ -0,0 +1,104 @@ +--- +category: Solution Accelerators +subcategory: Assistants +title: Hospitality Assistant +order: 2 +toc: true +--- + +# {{ page.title }} +{:.no_toc} + +The [Hospitality Assistant sample]({{site.repo}}/tree/next/samples/assistants/hospitality-assistant) is a prototype of an assistant that helps to conceptualize and demonstrate how a virtual assistant could be used in a hospitality specific scenario. It also provides a starting point for those interested in creating an assistant customized for this scenario. + +This sample works off the basis that the assistant would be integrated into a hotel room device and would help a hotel guest with anything they might usually go to the hotel concierge about. It also provides additional capabilites that might be useful for guests, such as getting the weather forecast or showing current news articles. + +The Hospitality Sample builds off of the [Virtual Assistant Template]({{site.baseurl}}/overview/virtual-assistant-template) with the addition of a [QnA Maker](https://www.qnamaker.ai/) knowledge base for answering common hotel FAQs and customized [Adaptive Cards](https://adaptivecards.io/). + +![Hospitality Sample Diagram]({{site.baseurl}}/assets/images/hospitalitysample-diagram.png) + +## Supported scenarios + +The majority of the skills connected to this sample are [experimental skills]({{site.baseurl}}/skills/samples/experimental), which means they are early prototypes of Skills and are likely to have rudimentary language models, limited language support and limited testing. These skills demonstrate a variety of skill concepts and provide great examples to get you started. This sample demonstrates the following scenarios: + +#### Hotel FAQ +{:.no_toc} +- *Where is the gym?* +- *What time is breakfast?* +- *Do you allow pets?* + +#### [Bing Search Skill]({{site.baseurl}}/skills/samples/bing-search) +{:.no_toc} +##### Search the web +{:.no_toc} +- *Tell me about the jurassic park movie* +- *Who is Bill Gates?* + +#### [Event Skill]({{site.baseurl}}/skills/samples/event) +{:.no_toc} +##### Find local events +{:.no_toc} +- *What's happening nearby?* + +#### [Hospitality Skill]({{site.baseurl}}/skills/samples/hospitality) +{:.no_toc} +##### Guest reservation changes +{:.no_toc} +- *I want to extend my stay by 2 nights* +- *Can I get a late check out time?* +- *Can you check me out now* + +##### Room service +{:.no_toc} +- *I want to see a room service menu* +- *Can you get me 2 croissants and a yogurt parfait?* +- *Can you bring me a toothbrush and toothpaste?* + +#### [News Skill]({{site.baseurl}}/skills/samples/news) +{:.no_toc} +##### Find news articles +{:.no_toc} +- *What's the latest news on surfing?* +- *What news is currently trending?* + +#### [Restaurant Booking Skill]({{site.baseurl}}/skills/samples/restaurant-booking) +{:.no_toc} +##### Make a restaurant reservation +{:.no_toc} +- *Make a dinner reservation for tonight* + +#### [Point of Interest Skill]({{site.baseurl}}/reference/skills/point-of-interest) +{:.no_toc} +##### Find points of interest nearby +{:.no_toc} +- *Find me nearby coffee shops* + +#### [Weather Skill]({{site.baseurl}}/skills/samples/weather) +{:.no_toc} +##### Get the forecast +{:.no_toc} +- *What’s the weather today?* + +## Deploy +To configure this sample follow the steps below: +1. Clone the [Hospitality Assistant sample]({{site.repo}}/tree/next/samples/csharp/assistants/hospitality-assistant). +1. Follow the [Create your Virtual Assistant tutorial]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro/) to deploy your assistant. Use the sample project you cloned instead of the Virtual Assistant template to include the hospitality customizations in this project. +1. Clone the following skills from our repository: + - [Hospitality Skill]({{site.repo}}/tree/next/skills/csharp/experimental/hospitalityskill) + - [Event Skill]({{site.repo}}/tree/next/skills//csharp/experimental/eventskill) + - [Point of Interest Skill]({{site.repo}}/tree/next/skills/csharp/pointofinterestskill) + - [Weather Skill]({{site.repo}}/tree/next/skills/csharp/experimental/weatherskill) + - [Bing Search Skill]({{site.repo}}/tree/next/skills/csharp/experimental/bingsearchskill) + - [News Skill]({{site.repo}}/tree/next/skills/csharp/experimental/newsskill) + - [Restaurant Booking Skill]({{site.repo}}/tree/next/skills/csharp/experimental/restaurantbookingskill) +1. [Deploy each one of these skills]({{site.baseurl}}/skills/tutorials/create-skill/csharp/4-provision-your-azure-resources/) separately, using the deployment script included in the skill directory. +1. [Add each skill]({{site.baseurl}}/skills/handbook/add-skills-to-a-virtual-assistant/) using the botskills connect CLI tool. + +## Download transcripts + +View sample conversations Hospitality Assistant solution by downloading a transcript and opening with the [Bot Framework Emulator](https://aka.ms/botframework-emulator). For more flows of specific skills see [transcripts]({{site.baseurl}}/reference/skills/transcripts). + +Frequently asked questions +Local info +Reservation changes +Room services diff --git a/docs/_docs/howto/virtual-assistant/linkedaccounts.md b/docs/_docs/solution-accelerators/samples/linked-accounts.md similarity index 78% rename from docs/_docs/howto/virtual-assistant/linkedaccounts.md rename to docs/_docs/solution-accelerators/samples/linked-accounts.md index 9706a6da27..e37c42de38 100644 --- a/docs/_docs/howto/virtual-assistant/linkedaccounts.md +++ b/docs/_docs/solution-accelerators/samples/linked-accounts.md @@ -1,21 +1,15 @@ --- -category: How To -subcategory: Virtual Assistant -title: Link Accounts to a Virtual Assistant +category: Solution Accelerators +subcategory: Samples +title: Linked accounts description: Enable users to link third party accounts (e.g. Office 365) to your Assistant. -order: 6 +order: 1 +toc: true --- # {{ page.title }} {:.no_toc} -## In this how-to -{:.no_toc} - -* -{:toc} -## Overview - Speech-led conversational scenarios require a different mindset and approach for certain scenarios, one such example is Authentication. If you take a Productivity scenario, whereby the user wants to access information in their calendar it's important for the VA Bot to have access to a security token (Office 365 for example). @@ -35,38 +29,39 @@ The Linked Accounts feature of the Virtual Assistant provides a reference sample ## Authentication Configuration -In order to perform Account Linking, the Linked Accounts web app will need the end user to login using the same account as they'll use to authenticate as a user of your Virtual Assistant, for example `darren@contosoassistant.com`. This is required to retrieve the unique identifier of the user which is used as the **key** to retrieving any linked token in the future. +In order to perform Account Linking, the Linked Accounts web app will need the end user to login using the same account as they'll use to authenticate as a user of your Virtual Assistant, for example **darren@contosoassistant.com**. This is required to retrieve the unique identifier of the user which is used as the **key** to retrieving any linked token in the future. -The ``appsettings.json`` file in the LinkedAccounts sample project has the following OAuth configuration entry for you to complete, the default example is for a microsoftonline.com based scenario. You can replace this with any custom authentication solution you have, what is key is ensuring the Linked Accounts feature is authenticating the user in some way and retrieving the same unique identifier which is passed to the assistant in future conversations. +The **appsettings.json** file in the LinkedAccounts sample project has the following OAuth configuration entry for you to complete, the default example is for a microsoftonline.com based scenario. You can replace this with any custom authentication solution you have, what is key is ensuring the Linked Accounts feature is authenticating the user in some way and retrieving the same unique identifier which is passed to the assistant in future conversations. ### Integrating Azure AD +{:.no_toc} 1. Sign in to the [Azure Portal](https://portal.azure.com/). 2. On the left sidebar, select **Azure Active Directory**. 3. From the sidebar within, select **App Registrations (Preview)**. 4. Select **New registration** * **Name**: *Provide a friendly name* - * **Redirect URI**: `http://localhost:XXXX/signin-oidc` *(update with the local port of your project or replace with the address of your deployed website* + * **Redirect URI**: http://localhost:XXXX/signin-oidc *(update with the local port of your project or replace with the address of your deployed website* * Click Register 5. Select the Authentication section of your newly created application - * Select `ID tokens` under the Implicit grant section -6. On the **Overview** page of your new app, copy the following values into your `appsettings.json` - * `Directory (tenant) ID` maps to `TenantId` - * `Application (client) ID` maps to `ClientId` + * Select **ID tokens** under the Implicit grant section +6. On the **Overview** page of your new app, copy the following values into your **appsettings.json** + * **Directory (tenant) ID** maps to **TenantId** + * **Application (client) ID** maps to **ClientId** -```json +```diff "AzureAd": { "Instance": "https://login.microsoftonline.com/", - "Domain": "[Enter the domain of your tenant, e.g. contoso.onmicrosoft.com]", - "TenantId": "[Enter 'common', or 'organizations' or the Tenant Id (Obtained from the Azure portal. Select 'Endpoints' from the 'App registrations' blade and use the GUID in any of the URLs)]", - "ClientId": "[Enter the Client Id (Application ID obtained from the Azure portal)]", ++ "Domain": "[Enter the domain of your tenant, e.g. contoso.onmicrosoft.com]", ++ "TenantId": "[Enter 'common', or 'organizations' or the Tenant Id (Obtained from the Azure portal. Select 'Endpoints' from the 'App registrations' blade and use the GUID in any of the URLs)]", ++ "ClientId": "[Enter the Client Id (Application ID obtained from the Azure portal)]", "CallbackPath": "/signin-oidc" } ``` > **Note** This should enable MSA accounts to be linked as well, but your provider may prevent that as a default. You can go to **Users** > **New guest user** to add additional accounts. -This sample uses the AD Object Identifier claim (``AadObjectidentifierClaim``) as the unique user identifier when performing token operations. This needs to be the same user identifier used by the Virtual Assistant when requesting tokens. +This sample uses the AD Object Identifier claim (**AadObjectidentifierClaim**) as the unique user identifier when performing token operations. This needs to be the same user identifier used by the Virtual Assistant when requesting tokens. In order to manage account linking and securely store authentication tokens, the web app requires access to your bot's ApplicationId and Secret which you provide through the following configuration settings. @@ -103,7 +98,7 @@ Now that you've linked your account and stored tokens you can move back to your Asking a question that triggers a user flow which requires the specified token should now not prompt for authentication. -The Bot Framework Emulator currently generates a unique UserId which can be changed to a new unique ID by clicking the down arrow next to the Restart Conversation button and choosing 'Restart with new UserId'. Unfortunately there is no current way to specify a UserId and therefore match the `AadObjectidentifierClaim` associated with your user account for use which blocks Emulator testing. At this time we have provided the ability in Linked Accounts to override the UserId enabling you to pass in the UserId currently in use by the Emulator. You can view the User identified being used by the emulator by sending a message and clicking on the entry in the log window and retrieving the from.id value. +The Bot Framework Emulator currently generates a unique UserId which can be changed to a new unique ID by clicking the down arrow next to the Restart Conversation button and choosing 'Restart with new UserId'. Unfortunately there is no current way to specify a UserId and therefore match the **AadObjectidentifierClaim** associated with your user account for use which blocks Emulator testing. At this time we have provided the ability in Linked Accounts to override the UserId enabling you to pass in the UserId currently in use by the Emulator. You can view the User identified being used by the emulator by sending a message and clicking on the entry in the log window and retrieving the from.id value. ```json "from": { diff --git a/docs/_docs/solution-accelerators/samples/proactive-notifications.md b/docs/_docs/solution-accelerators/samples/proactive-notifications.md new file mode 100644 index 0000000000..8f5bac7aa7 --- /dev/null +++ b/docs/_docs/solution-accelerators/samples/proactive-notifications.md @@ -0,0 +1,40 @@ +--- +category: Solution Accelerators +subcategory: Samples +title: Proactive notifications +order: 2 +toc: true +--- +# {{ page.title }} +{:.no_toc} + +There are many scenarios where a Virtual Assistant needs to push activities to users. It is important to consider the range of channels you may offer to users and whether they provide a persistent conversation over time and the channel itself supports proactive message delivery. Microsoft Teams is an example of a persistent channel enabling conversations to occur over a longer period and across a range of devices. This contrasts with Web Chat which is only available for the life of the browser window. + +In addition to these common channels, mobile devices are another key end-user channel and these same notifications/messages should be delivered as appropriate to these devices. + +This sample demonstrates how to build a notification broadcast solution using a Virtual Assistant and related Azure resources. Each implementation will vary significantly, so this is available as a minimum viable product (MVP) to get started. + +This sample includes proactive notifications, enabling scenarios such as: + +- Send notifications to your users that the Virtual Assistant would like to start a conversation, thus allowing the user to trigger when they are ready to have this discussion (e.g., a user receives a notification "your training is due", allowing them to initiate the conversation about what training is required) + +- Initiate a proactive dialog with your users through an open channel such as Microsoft Teams (e.g., "Benefits enrollment just opened; would you like to know more about benefits?") + +![Proactive notifications sample architecture]({{site.baseurl}}/assets/images/ProactiveNotificationsDrawing.PNG) + +## Deploy + +An automated deployment will be available in the [Enterprise Assistant sample]({{site.baseurl}}/solution-accelerators/assistants/enterprise-assistant), otherwise you can follow the tutorial in **Next steps** to manually provision the necessary Azure resources. + +## Next Steps + + \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/1-intro.md b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/1-intro.md new file mode 100644 index 0000000000..6c77a70f4a --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/1-intro.md @@ -0,0 +1,48 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: Enable proactive notifications +title: Intro +order: 1 +toc: true +--- + +# Tutorial: {{page.subcategory}} +{:.no_toc} +## {{ page.title }} +{:.no_toc} + +### Purpose + +Enable the proactive notifications sample on a Virtual Assistant, which demonstrates the following capabilities: +- A console application that sends a sample event to an **Event Hubs Instance** +- An **Azure Function** that handles notification events and routes them to the Virtual Assistant. +- A user preference store in **Azure Cosmos DB** used by the function app to look up notification settings. +- A **Virtual Assistant** project that handles incoming notification events. + +![Proactive Notifications sample architecture]({{site.baseurl}}/assets/images/ProactiveNotificationsDrawing.PNG) + +### Prerequisites +#### Option: Using the Enterprise Assistant sample +{:.no_toc} +The [Enterprise Assistant sample]({{site.baseurl}}/virtual-assistant/samples/enterprise-assistant) comes with a preconfigured Virtual Assistant project and deployment scripts to create all of the required Azure resources. + +#### Option: Using the core Virtual Assistant Template +{:.no_toc} + +If you are using the core Virtual Assistant Template, you must create some additional Azure resources. + +1. [Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/csharp/create-assistant/1-intro/) to setup your Virtual Assistant environment. + +1. Manually deploy the following Azure resources: + - [Create](https://ms.portal.azure.com/#create/Microsoft.EventHub) an [Event Hub](https://azure.microsoft.com/en-us/services/event-hubs/) resource + - [Create](https://ms.portal.azure.com/#create/Microsoft.FunctionApp) an [Azure Function](https://azure.microsoft.com/en-us/services/functions/) resource + - **Optional**: [Create](https://ms.portal.azure.com/#create/Microsoft.NotificationHub) a [Notification Hub](https://azure.microsoft.com/en-us/services/notification-hubs/) resource + - This implementation is not provided in this tutorial. Learn more on how to [send push notifications to specific users using Azure Notification Hub](https://docs.microsoft.com/en-us/azure/notification-hubs/notification-hubs-aspnet-backend-ios-apple-apns-notification) + +### Time to Complete +20 minutes + +### Scenario +Create an Azure solution that enables your Virtual Assistant to send proactive notifications to users. + diff --git a/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/2-event-producer.md b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/2-event-producer.md new file mode 100644 index 0000000000..b51ef46b34 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/2-event-producer.md @@ -0,0 +1,37 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: Enable proactive notifications +title: Produce a notification event +order: 2 +toc: true +--- + +# Tutorial: {{page.subcategory}} +{:.no_toc} +## {{page.title}} +{:.no_toc} + +The [**EventProducer**]({{site.repo}}) project is a console application that sends a sample message to your **Event Hubs Instance**. + +### Create an Event Hubs Instance resource +1. In your **Event Hub Namespace** resource, navigate to **Entities** > **Event Hubs** +1. Select **+ Event Hub**, provide a name and select **Create** + - Make note of the **Event Hub Name** for later +1. In your **Event Hubs Instance** resource, navigate to **Settings** > **Shared access policies** +1. Select **+ Add**, provide a name and select **Create** + - Make note of the **Connection string-primary key** for later + +### Configure the Event Producer project +Update the **appSettings.json** with the values collected in the last step, as well as a random user id that you will use to test against later. + +#### [appSettings.json]({{site.repo}}) +{:.no_toc} + +```json +{ + "EventHubName": "YOUR_EVENT_HUB_INSTANCE_NAME", + "EventHubConnectionString": "YOUR_EVENT_HUB_INSTANCE_CONNECTION_STRING", + "UserId": "YOUR_USER_ID" +} +``` \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/3-event-handler.md b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/3-event-handler.md new file mode 100644 index 0000000000..36227fff72 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/3-event-handler.md @@ -0,0 +1,64 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: Enable proactive notifications +title: Handle notifications +order: 3 +toc: true +--- + +# Tutorial: {{page.subcategory}} +{:.no_toc} +## {{page.title}} +{:.no_toc} + +The [**EventHandler**]() project is a sample Azure Function application that performs the following order of operations: +1. Listens for events from an **Event Hubs Instance**. +1. Read from a user preference store in **Azure Cosmos DB** to check a user's settings. +1. If the **SendNotificationToConversation** flag is true, send an event activity to a user's active conversation with the message. +1. If the **SendNotificationToMobileDevice** flag is true, send a notification to the user's mobile devce using **Azure Notifications Hubs**. + - This implementation is not provided in this tutorial. Learn more on how to [send push notifications to specific users using Azure Notification Hub](https://docs.microsoft.com/en-us/azure/notification-hubs/notification-hubs-aspnet-backend-ios-apple-apns-notification) + +### Configure the Event Handler project +Update the **YOUR_EVENT_HUB_NAME** parameter of **Function1** class with your **Event Hubs Instance** name. + +#### [Function1.cs]() +{:.no_toc} + +```diff +[FunctionName("EventHubTrigger")] ++ public static async Task Run([EventHubTrigger("YOUR_EVENT_HUB_NAME", Connection = "EventHubConnection")] EventData[] events, ILogger log) +{ + foreach (EventData eventData in events) + { + try + { + string messageBody = Encoding.UTF8.GetString(eventData.Body.Array, eventData.Body.Offset, eventData.Body.Count); + + var data = JsonConvert.DeserializeObject(messageBody); + await SendEventToBot(data); + await Task.Yield(); + } + catch { } + } +} +``` + +### Publish and configure the Function App + +1. Publish the **EventHandler** project to your **Function Apps** resource +1. Navigate to the resource and select **Configuration** + +#### Application Strings +{:.no_toc} +Select **+ New application setting** for each of the following: +- **DirectLineSecret**: YOUR_BOT_DIRECT_LINE_SECRET + - *Located in the **Azure Bot Service** resource > **Channels*** +- **DocumentDbEndpointUrl**: YOUR_COSMOS_DB_URI +- **DocumentDbPrimaryKey**: YOUR_COSMOS_DB_PRIMARY_KEY + - *Located in the **Azure Cosmos DB account** resource > **Keys*** + +#### Connection Strings +{:.no_toc} +Select **+ New connection string** for the following: +- **EventHubConnection**: YOUR_EVENT_HUB_INSTANCE_CONNECTION_STRING \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/4-virtual-assistant.md b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/4-virtual-assistant.md new file mode 100644 index 0000000000..b65adf6ac0 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/4-virtual-assistant.md @@ -0,0 +1,132 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: Enable proactive notifications +title: Set up the Virtual Assistant +order: 4 +toc: true +--- + +# Tutorial: {{page.subcategory}} +{:.no_toc} +## {{page.title}} +{:.no_toc} +### Option: Using the Enterprise Assistant sample +{:.no_toc} +The Enterprise Assistant sample is already configured with the **Proactive State Middleware** necessary event handling. Continue to the next step. + +### Option: Using the core Virtual Assistant Template +{:.no_toc} + +#### Add the Proactive State Middleware +{:.no_toc} + +For messages to be delivered to a user's conversation, a **ConversationReference** needs to be persisted in the Virtual Assistant and used to resume the existing conversation. + +Update both the **Startup** and **DefaultAdapter** classes with references to **ProactiveState** and **ProactiveStateMiddleware**. + +#### [Startup.cs]({{site.repo}}) +{:.no_toc} + +```diff + public void ConfigureServices(IServiceCollection services) +{ +... ++ services.AddSingleton(); +... +} +``` + +#### [DefaultAdapter.cs]({{site.repo}}) +{:.no_toc} + +```diff +public DefaultAdapter( + BotSettings settings, + TemplateEngine templateEngine, + ConversationState conversationState, + ICredentialProvider credentialProvider, + TelemetryInitializerMiddleware telemetryMiddleware, + IBotTelemetryClient telemetryClient, ++ ProactiveState proactiveState) + : base(credentialProvider) + { + OnTurnError = async (turnContext, exception) => + { + await turnContext.SendActivityAsync(new Activity(type: ActivityTypes.Trace, text: $"{exception.Message}")); + await turnContext.SendActivityAsync(new Activity(type: ActivityTypes.Trace, text: $"{exception.StackTrace}")); + await turnContext.SendActivityAsync(templateEngine.EvaluateTemplate("errorMessage")); + telemetryClient.TrackException(exception); + }; + + Use(telemetryMiddleware); + + // Uncomment the following line for local development without Azure Storage + // Use(new TranscriptLoggerMiddleware(new MemoryTranscriptStore())); + Use(new TranscriptLoggerMiddleware(new AzureBlobTranscriptStore(settings.BlobStorage.ConnectionString, settings.BlobStorage.Container))); + Use(new ShowTypingMiddleware()); + Use(new FeedbackMiddleware(conversationState, telemetryClient)); + Use(new SetLocaleMiddleware(settings.DefaultLocale ?? "en-us")); + Use(new EventDebuggerMiddleware()); ++ Use(new ProactiveStateMiddleware(proactiveState)); + } +``` + +#### Handle the **BroadcastEvent** activity + +The Event Handler sends a **BroadcastEvent** activity that must be handled by the Virtual Assistant. +The **_proactiveStateAccessor** contains the mapping between a user id and a previous conversation. + +Update the **MainDialog** class with the below changes to the constructor and **OnEventActivityAsync** method. + +#### MainDialog.cs +{:.no_toc} + +```diff +public class MainDialog : RouterDialog +{ + private BotServices _services; + private BotSettings _settings; + private TemplateEngine _templateEngine; + private ILanguageGenerator _langGenerator; + private TextActivityGenerator _activityGenerator; + private OnboardingDialog _onboardingDialog; + private IStatePropertyAccessor _skillContext; + private IStatePropertyAccessor _onboardingState; + private IStatePropertyAccessor> _previousResponseAccessor; ++ private MicrosoftAppCredentials _appCredentials; ++ private IStatePropertyAccessor _proactiveStateAccessor; + + public MainDialog( + IServiceProvider serviceProvider, + IBotTelemetryClient telemetryClient, ++ MicrosoftAppCredentials appCredentials, ++ ProactiveState proactiveState) + : base(nameof(MainDialog), telemetryClient) + { + _services = serviceProvider.GetService(); + _settings = serviceProvider.GetService(); + _templateEngine = serviceProvider.GetService(); + _langGenerator = serviceProvider.GetService(); + _activityGenerator = serviceProvider.GetService(); + _previousResponseAccessor = serviceProvider.GetService>>(); + TelemetryClient = telemetryClient; ++ _appCredentials = appCredentials; ++ _proactiveStateAccessor = proactiveState.CreateProperty(nameof(ProactiveModel)); + ... + + protected override async Task OnEventActivityAsync(DialogContext innerDc, CancellationToken cancellationToken = default) + { + ... ++ case "BroadcastEvent": ++ var eventData = JsonConvert.DeserializeObject(dc.Context.Activity.Value.ToString()); ++ ++ var proactiveModel = await _proactiveStateAccessor.GetAsync(dc.Context, () => new ProactiveModel()); ++ ++ var conversationReference = proactiveModel[MD5Util.ComputeHash(eventData.UserId)].Conversation; ++ await dc.Context.Adapter.ContinueConversationAsync(_appCredentials.MicrosoftAppId, conversationReference, ContinueConversationCallback(dc.Context, eventData.Message), cancellationToken); ++ break; + ... + } +} +``` \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/5-run-your-solution.md b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/5-run-your-solution.md new file mode 100644 index 0000000000..74ca9b6b10 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/enable-proactive-notifications/5-run-your-solution.md @@ -0,0 +1,36 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: Enable proactive notifications +title: Run your solution +order: 5 +toc: true +--- + +# Tutorial: {{page.subcategory}} +{:.no_toc} +## {{page.title}} +{:.no_toc} + +Now events can be sent to a user through your Virtual Assistant in an active conversation. + +### Start a new conversation with your Virtual Assistant + +In order for the notification to be received, the sample event has to use the same **user id** as in an existing conversation. +![Bot Framework Emulator settings]({{site.baseurl}}/assets/images/proactive-notifications/emulator-settings.png) + +1. Open the **Bot Framework Emulator**. + +1. Navigate to **Settings** and provide the same **user id** you set in the **EventProducer**. + +1. Run your Virtual Assistant project. + +1. Start a new conversation with your Virtual Assistant to create a proactive state record for future user. + +## Send a sample notification with the Event Producer + +1. Run the **EventProducer** project to generate a sample notification message. + +1. Congratulations, you've received a proactive notification through your Virtual Assistant! + +![Demonstration of a notification received in an existing bot conversation]({{site.baseurl}}/assets/images/enterprisenotification-demo.png) \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/view-analytics/1-intro.md b/docs/_docs/solution-accelerators/tutorials/view-analytics/1-intro.md new file mode 100644 index 0000000000..db35a995f2 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/view-analytics/1-intro.md @@ -0,0 +1,147 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: View analytics using Power BI +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +### Purpose +The Virtual Assistant analytics sample provides a Power BI template that can be used to understand how your bot is performing. + + + + +### Prerequisites +* [Install Power BI Desktop](https://powerbi.microsoft.com/desktop/) +* [Download the Virtual Assistant analytics Power BI template]({{site.baseurl}}/assets/analytics/virtual-assistant-analytics-sample.pbit) +* [Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) to deploy your Azure resources + +### Time To Complete +10 minutes + +### Scenario +A Power BI dashboard showing Application Insights telemetry captured from a Virtual Assistant. \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/view-analytics/2-get-application-insights-application-id.md b/docs/_docs/solution-accelerators/tutorials/view-analytics/2-get-application-insights-application-id.md new file mode 100644 index 0000000000..c3524e5efa --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/view-analytics/2-get-application-insights-application-id.md @@ -0,0 +1,21 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: View analytics using Power BI +title: Get your Application Insights Application ID +order: 2 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +1. Log in to the [Azure Portal](https://portal.azure.com/). + +1. Navigate to the Application Insights resource created from the [Create a Virtual Assistant tutorial]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro). + +1. In the sidebar, navigate to **Configure > API Access** on the sidebar. +![Screenshot highlighting the API Access tab in an Application Insights resource]({{site.baseurl}}/assets/images/analytics/application-insights-api-access.png) + +1. Copy the Application ID for the next step. +![Screenshot highlighting the Application ID of an Application Insights resource]({{site.baseurl}}/assets/images/analytics/application-insights-application-id.png) \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/view-analytics/3-open-template.md b/docs/_docs/solution-accelerators/tutorials/view-analytics/3-open-template.md new file mode 100644 index 0000000000..49422899f2 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/view-analytics/3-open-template.md @@ -0,0 +1,16 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: View analytics using Power BI +title: Open the Power BI template +order: 3 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +1. Open the [Virtual Assistant analytics template]({{site.baseurl}}/assets/analytics/virtual-assistant-analytics-sample.pbit) and paste your **Application Insights Application ID**. +![Screenshot of the load template view of a new Virtual Assistant analytics Power BI template]({{site.baseurl}}/assets/images/analytics/virtual-assistant-analytics-powerbi-load-template.png) + +1. After loading the tables with your populated data, you should now see insights from your Virtual Assistant. \ No newline at end of file diff --git a/docs/_docs/solution-accelerators/tutorials/view-analytics/4-next-steps.md b/docs/_docs/solution-accelerators/tutorials/view-analytics/4-next-steps.md new file mode 100644 index 0000000000..2fbba53f71 --- /dev/null +++ b/docs/_docs/solution-accelerators/tutorials/view-analytics/4-next-steps.md @@ -0,0 +1,34 @@ +--- +layout: tutorial +category: Solution Accelerators +subcategory: View analytics using Power BI +title: Next steps +order: 4 +--- + +# Tutorial: {{page.subcategory}} + +## {{ page.title }} + +Now that you've learned learned how to view a sample of Virtual Assistant analytics, learn more from the following resources. + + \ No newline at end of file diff --git a/docs/_docs/tutorials/add-prebuilt-skill/1_intro.md b/docs/_docs/tutorials/add-prebuilt-skill/1_intro.md deleted file mode 100644 index 0574edd052..0000000000 --- a/docs/_docs/tutorials/add-prebuilt-skill/1_intro.md +++ /dev/null @@ -1,27 +0,0 @@ ---- -layout: tutorial -category: Tutorials -subcategory: Connect a pre-built skill -title: Intro -order: 1 ---- - -# Tutorial: Connect a pre-built skill to your assistant - -## Intro - -### Purpose - -Install Bot Framework development prerequisites and add one of the Skills provided as part of the Virtual Assistant. - -### Prerequisites - -- [Create a Virtual Assistant](/docs/tutorials/csharp/virtualassistant.md) to setup your environment. - -### Time to Complete - -15 minutes - -### Scenario - -Add one of the skills provided in the [Bot Framework Solutions GitHub repo](https://github.com/microsoft/botframework-solutions) provide to your Virtual Assistant. The Skills are only available in C# at this time but these can be added to a Typescript based assistant. \ No newline at end of file diff --git a/docs/_docs/tutorials/add-prebuilt-skill/3_deploy_skill_project.md b/docs/_docs/tutorials/add-prebuilt-skill/3_deploy_skill_project.md deleted file mode 100644 index 0ddeab9cab..0000000000 --- a/docs/_docs/tutorials/add-prebuilt-skill/3_deploy_skill_project.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -layout: tutorial -category: Tutorials -subcategory: Connect a pre-built skill -title: Deploy skill project -order: 3 ---- - -# Tutorial: Connect a pre-built skill to your assistant - -## Deploy skill project - -Choose which of the provided Skills you wish to add to your Virtual Assistant, click one of the links below and follow the **`Skill Deployment`** instructions to deploy your own instance of this Skill. - -| Name | Description | -| ---- | ----------- | -|[Calendar Skill]({{site.baseurl}}/reference/skills/productivity-calendar)|Add calendar capabilities to your assistant. Powered by Microsoft Graph and Google.| -|[Email Skill]({{site.baseurl}}/reference/skills/productivity-email)|Add email capabilities to your assistant. Powered by Microsoft Graph and Google.| -|[To Do Skill]({{site.baseurl}}/reference/skills/productivity-todo)|Add task management capabilities to your assistant. Powered by Microsoft Graph.| -|[Point of Interest Skill]({{site.baseurl}}/reference/skills/pointofinterest)|Find points of interest and directions. Powered by Azure Maps and FourSquare.| -|[Automotive Skill]({{site.baseurl}}/reference/skills/automotive)|Industry-vertical Skill for showcasing enabling car feature control.| -|[Phone Skill]({{site.baseurl}}/reference/skills/phone)|Make phone calls. Powered by Microsoft Graph and Google.| -|[Experimental Skills]({{site.baseurl}}/reference/skills/experimental)|News, Search, Restaurant Booking and Weather.| \ No newline at end of file diff --git a/docs/_docs/tutorials/add-prebuilt-skill/5_test_your_skill.md b/docs/_docs/tutorials/add-prebuilt-skill/5_test_your_skill.md deleted file mode 100644 index 12669df68f..0000000000 --- a/docs/_docs/tutorials/add-prebuilt-skill/5_test_your_skill.md +++ /dev/null @@ -1,23 +0,0 @@ ---- -layout: tutorial -category: Tutorials -subcategory: Connect a pre-built skill -title: Test skill -order: 5 ---- - -# Tutorial: Connect a pre-built skill to your assistant - -## Testing your Skill - -Refer to the documentation page in the table below for an example question that you can ask to validate that your Assistant can now perform additional capabilities with no additional code changes. - -| Name | Description | -| ---- | ----------- | -|[Calendar Skill]({{site.baseurl}}/reference/skills/productivity-calendar)|Add calendar capabilities to your assistant. Powered by Microsoft Graph and Google.| -|[Email Skill]({{site.baseurl}}/reference/skills/productivity-email)|Add email capabilities to your assistant. Powered by Microsoft Graph and Google.| -|[To Do Skill]({{site.baseurl}}/reference/skills/productivity-todo)|Add task management capabilities to your assistant. Powered by Microsoft Graph.| -|[Point of Interest Skill]({{site.baseurl}}/reference/skills/pointofinterest)|Find points of interest and directions. Powered by Azure Maps and FourSquare.| -|[Automotive Skill]({{site.baseurl}}/reference/skills/automotive)|Industry-vertical Skill for showcasing enabling car feature control.| -|[Phone Skill]({{site.baseurl}}/reference/skills/phone)|Make phone calls. Powered by Microsoft Graph and Google.| -|[Experimental Skills]({{site.baseurl}}/reference/skills/experimental)|News, Search, Restaurant Booking and Weather.| \ No newline at end of file diff --git a/docs/_docs/tutorials/csharp/create-assistant/3_create_project.md b/docs/_docs/tutorials/csharp/create-assistant/3_create_project.md deleted file mode 100644 index 71156b811f..0000000000 --- a/docs/_docs/tutorials/csharp/create-assistant/3_create_project.md +++ /dev/null @@ -1,76 +0,0 @@ ---- -category: Tutorials -subcategory: Create a Virtual Assistant -language: C# -title: Create your assistant -order: 3 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Create your Virtual Assistant project - -1. In Visual Studio, replace **File > New Project**. -2. Under Bot, select **Virtual Assistant Template**. -3. Name your project and select **Create**. -4. Build your project to restore the NuGet packages. - -## What files were created? - | - Adapters // BotAdapter implementations for configuring Middleware - | - DefaultAdapter.cs // Configures basic middleware - | - DefaultWebSocketAdapter.cs // Configures middleware for web socket connection - | - Bots // IBot implementations for initializing dialog stack - | - DialogBot.cs // Initializes the dialog stack with a primary dialog (e.g. MainDialog) - | - Content // Static content used by the assistant including images and Adaptive Cards - | - NewUserGreeting.json // Adaptive Card shown to first time users - | - ReturningUserGreeting.json // Adaptive Card shown to returning users - | - Controllers // API Controllers - | - BotController.cs // API Controller for api/messages endpoint - | - Deployment // Files for deployment and provisioning - | - Resources // Resources for deployment and provisioning. May be excluded from source control. - | - LU // Files for deploying LUIS language models - | - General.lu // General language model (e.g. Cancel, Help, Escalate, etc.) - | - QnA // Files for deploying QnA Maker knowledgebases - | - Chitchat.lu // Chitchat knowledgebase (e.g. Hi, How are you?, What's your name?, - | - Faq.lu // FAQ knowledgebase - | - template.json // ARM Deployment template - | - parameters.template.json // ARM Deployment parameters file - | - Scripts // PowerShell scripts for deployment and provisioning - | - deploy.ps1 // Deploys and provisions Azure resources and cognitive models - | - deploy_cognitive_models.ps1 // Deploys and provisions cognitive models only - | - update_cognitive_models.ps1 // Updates existing cognitive models - | - luis_functions.ps1 // Functions used for deploying and updating LUIS models - | - qna_functions.ps1 // Functions used for deploying and updating QnA Maker knowledgebases - | - Dialogs // Bot Framework Dialogs - | - MainDialog.cs // Dialog for routing incoming messages - | - OnboardingDialog.cs // Dialog for collecting basic profile information from user - | - CancelDialog.cs // Dialog for confirming cancellation intent - | - EscalateDialog.cs // Dialog for handling user escalation - | - Models // Data models - | - OnboardingState.cs // Model for basic profile information - | - Pipeline // Files for setting up an deployment pipeline in Azure DevOps - | - Assistant.yml // Build pipeline template for Azure DevOps - | - Responses // Classes and files for representing bot responses - | - Cancel // Cancel responses - | - CancelResponses.cs // Cancel dialog response manager - | - CancelString.resx // Cancel dialog strings - | - Escalate // Escalate responses - | - EscalateResponses.cs // Escalate dialog response manager - | - EscalateString.resx // Escalate dialog strings - | - Main // Main responses - | - MainResponses.cs // Main dialog response manager - | - MainString.resx // Main dialog strings - | - Onboarding // Onboarding responses - | - OnboardingResponses.cs // Onboarding dialog response manager - | - OnboardingString.resx // Onboarding dialog strings - | - Services // Configuration for connected services and service clients - | - BotServices.cs // Class representation of service clients and recognizers - | - BotSettings.cs // Class representation of configuration files - | - DispatchLuis.cs // Class representation of LUIS result from Dispatch language model - | - GeneralLuis.cs // Class representation of LUIS result from General language model - | - appsettings.json // Configuration for application and Azure services - | - cognitivemodels.json // Configuration for language models, knowledgebases, and dispatch model - | - skills.json // Configuration for connected skills - | - Program.cs // Default Program.cs file - | - Startup.cs // Initializes dependencies - diff --git a/docs/_docs/tutorials/csharp/create-assistant/6_next_steps.md b/docs/_docs/tutorials/csharp/create-assistant/6_next_steps.md deleted file mode 100644 index 5ef724fd1a..0000000000 --- a/docs/_docs/tutorials/csharp/create-assistant/6_next_steps.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -category: Tutorials -subcategory: Create a Virtual Assistant -language: C# -title: Next steps -order: 6 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next Steps - -Now that you've got the basics, try the one of these tutorials: - -
-
-
-

Customize a Virtual Assistant

-

Personalize your experience for your brand and customers.

-
- -
-
-
-

Create a skill

-

Build a custom skill for your scenario.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
diff --git a/docs/_docs/tutorials/csharp/create-skill/8_next_steps.md b/docs/_docs/tutorials/csharp/create-skill/8_next_steps.md deleted file mode 100644 index f99b853b68..0000000000 --- a/docs/_docs/tutorials/csharp/create-skill/8_next_steps.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -category: Tutorials -subcategory: Create a skill -language: C# -title: Next steps -order: 8 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next Steps - -Now that you've created your custom skill, try the one of these tutorials: - -
-
-
-

Customize a skill

-

Customize your skill for your brand and users.

-
- -
-
-
-

Create a Virtual Assistant

-

Get up and running with the solution accelerator.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/csharp/customize-assistant/5_next_steps.md b/docs/_docs/tutorials/csharp/customize-assistant/5_next_steps.md deleted file mode 100644 index 2653b5bbfd..0000000000 --- a/docs/_docs/tutorials/csharp/customize-assistant/5_next_steps.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -category: Tutorials -subcategory: Customize a Virtual Assistant -language: C# -title: Next steps -order: 5 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next steps - -Now that you've learned learned how to personalize a Virtual Assistant, try one of these tutorials: - -
-
-
-

Create a skill

-

Build a custom skill for your scenario.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/csharp/customize-skill/5_next_steps.md b/docs/_docs/tutorials/csharp/customize-skill/5_next_steps.md deleted file mode 100644 index 5e9c1ada1f..0000000000 --- a/docs/_docs/tutorials/csharp/customize-skill/5_next_steps.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -category: Tutorials -subcategory: Customize a skill -language: C# -title: Next steps -order: 5 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next Steps -Now that you've customized your skill, try one of these tutorials: - -
-
-
-

Create a Virtual Assistant

-

Get up and running with the solution accelerator.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/enable-speech/1_intro.md b/docs/_docs/tutorials/enable-speech/1_intro.md deleted file mode 100644 index 72b6cc8e60..0000000000 --- a/docs/_docs/tutorials/enable-speech/1_intro.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Intro -order: 1 ---- - -# Tutorial: Enable Speech for your Assistant - -## Intro - -### Purpose - -The Virtual Assistant template creates and deploys an Assistant with all speech enablement steps provided out of the box. - -This tutorial covers the steps required to connect the [Direct Line Speech channel](https://docs.microsoft.com/en-us/azure/bot-service/directline-speech-bot?view=azure-bot-service-4.0) to your assistant and build a simple application integrated with the Speech SDK to demonstrate Speech interactions working. - -### Prerequisites - -- [Create a Virtual Assistant]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro) to setup your environment. - -- Make sure the `Universal Windows Platform development` workload is available on your machine. Choose **Tools > Get Tools** and Features from the Visual Studio menu bar to open the Visual Studio installer. If this workload is already enabled, close the dialog box. - - ![UWP Enablement]({{site.baseurl}}/assets/images/vs-enable-uwp-workload.png) - - Otherwise, select the box next to .NET cross-platform development, and select Modify at the lower right corner of the dialog box. Installation of the new feature takes a moment. - -### Time to Complete - -10 minutes - -### Scenario - -Create a simple application that enables you to speak to your newly created Virtual Assistant. - - - diff --git a/docs/_docs/tutorials/enable-speech/2_create_speech_instance.md b/docs/_docs/tutorials/enable-speech/2_create_speech_instance.md deleted file mode 100644 index 6ef0467e0c..0000000000 --- a/docs/_docs/tutorials/enable-speech/2_create_speech_instance.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Create a Microsoft Speech instance -order: 2 ---- - -# Tutorial: Enable Speech for your Assistant - -## Create a Microsoft Speech Azure resource - -The first step is to create a Microsoft Speech Cognitive Services Azure resource to perform the Speech-To-Text and Text-To-Speech capabilities for your assistant. - -- Select an Azure region. Direct Line Speech Channel is a preview service limited to [these Azure regions](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/regions#voice-first-virtual-assistants). For best performance (reduced round-trip time) deploy your Virtual Assistant bot and Direct Line Speech channel to the same Azure region, and one that is closest to you. To help you decide, look up exact [geographical location](https://azure.microsoft.com/en-us/global-infrastructure/locations/) for each Azure region. -- Create a Microsoft Speech Cognitive Service instance in your Azure Subscription using the [Azure Portal](https://ms.portal.azure.com/#create/Microsoft.CognitiveServicesSpeechServices). In the *Location* field specify the selected Azure region based on the above. -- Once created, retrieve one of the speech **subscription keys** and store this ready for later in this tutorial. \ No newline at end of file diff --git a/docs/_docs/tutorials/enable-speech/3_add_speech_channel.md b/docs/_docs/tutorials/enable-speech/3_add_speech_channel.md deleted file mode 100644 index 2727185296..0000000000 --- a/docs/_docs/tutorials/enable-speech/3_add_speech_channel.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Add the Direct Line Speech channel -order: 3 ---- - -# Tutorial: Enable Speech for your Assistant - -## Add the Direct Line Speech channel to your Assistant - -The first step is to add the Direct-Line Speech Channel to your deployed Assistant. - -1. Go to the Azure Portal and locate the Web App Bot created for your Assistant which is most easily found by opening the Resource Group. -2. Click `Channels` on the left-hand navigation and select `Direct Line Speech` -3. Review the Channel introduction page and when ready, click `Save` to add the Channel to your Assistant. -4. Retrieve the **Channel secret** key which will be used by your application to connect to your Bot through the Direct Line Speech Channel and store this ready for later in this tutorial. diff --git a/docs/_docs/tutorials/enable-speech/4_integrate_speech_channel.md b/docs/_docs/tutorials/enable-speech/4_integrate_speech_channel.md deleted file mode 100644 index 8c1c728481..0000000000 --- a/docs/_docs/tutorials/enable-speech/4_integrate_speech_channel.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Build speech sample app -order: 4 ---- - -# Tutorial: Enable Speech for your Assistant - -## Integrating with the Speech Channel - -For this tutorial we'll take a pre-built C# sample to get you up and running quickly. - -1. Locate the [assistant-SimpleSpeechApp](https://github.com/microsoft/botframework-solutions/tree/master/solutions/testharnesses/csharp/assistant-SimpleSpeechApp) example application found in the [botframework-solutions github repo](https://github.com/microsoft/botframework-solutions/) and open in Visual Studio / VSCode. -2. Open `MainPage.xaml.cs` which you can find in your Solution by expanding `MainPage.xaml` in Solution Explorer. -3. At the top of the file you will find the following configuration properties. Update these, using the `Channel Secret` and `Speech Subscription key` that you retrieved in the previous steps. The region provided is for Direct Line Speech which should be left as `westus2` at this time. - - ``` - private const string channelSecret = "YourChannelSecret"; - private const string speechSubscriptionKey = "YourSpeechSubscriptionKey"; - ``` -4. Build your application. \ No newline at end of file diff --git a/docs/_docs/tutorials/enable-speech/5_testing_speech.md b/docs/_docs/tutorials/enable-speech/5_testing_speech.md deleted file mode 100644 index efb4757bf5..0000000000 --- a/docs/_docs/tutorials/enable-speech/5_testing_speech.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Test speech -order: 5 ---- - -# Tutorial: Enable Speech for your Assistant - -## Testing Speech Interactions - -1. Run the application created in the previous step. -2. Click `Enable Microphone` to ensure the Application has permission to access. -2. Click `Talk to your Bot` and say `Hello`, your should here a spoken Response from your Virtual Assistant. -3. You can now interact with your Assistant (including Skills) through Speech. *Note that follow-up questions asked by the Assistant will require you to click the Button each time as this sample application doesn't automatically open the microphone for questions*. - - ![Simple Speech App](assets/images/simplespeechapp.png) \ No newline at end of file diff --git a/docs/_docs/tutorials/enable-speech/6_changing_the_voice.md b/docs/_docs/tutorials/enable-speech/6_changing_the_voice.md deleted file mode 100644 index e8a4b7d11d..0000000000 --- a/docs/_docs/tutorials/enable-speech/6_changing_the_voice.md +++ /dev/null @@ -1,28 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Change the voice -order: 6 ---- - -# Tutorial: Enable Speech for your Assistant - -## Changing the Voice - -Now let's change the default voice (`Jessa24kRUS`) configured within your Virtual Assistant to a higher quality [Neural voice](https://azure.microsoft.com/en-us/blog/microsoft-s-new-neural-text-to-speech-service-helps-machines-speak-like-people/). Note that Neural voices will only work with speech subscription keys created for certain locations (regions). See the last column in the [Standard and neural voices](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/regions#standard-and-neural-voices) table for region availability. If your bot is configured for Neural voice and your speech subscription key is for a region not enabled for Neural voices, Direct Line Speech channel will terminate the connection with the client with an Internal Server Error (code 500). - -To switch to Neural voice: - -1. Open your Assistant Solution in Visual Studio. -2. Open `DefaultWebSocketAdapter.cs` located within your `Adapters` folder. -3. Select the Voice you would like to use from [this list](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/language-support#neural-voices), for example `Microsoft Server Speech Text to Speech Voice (en-US, JessaNeural)` -3. Update the following line to specify the new voice: - ``` - Use(new SetSpeakMiddleware(settings.DefaultLocale ?? "en-us")); - ``` - To - ``` - Use(new SetSpeakMiddleware(settings.DefaultLocale ?? "en-us", "Microsoft Server Speech Text to Speech Voice (en-US, JessaNeural)")); - ``` -4. Build your Assistant and re-publish your Assistant to Azure so the changes are available to the Speech Channel. -5. Repeat the tests and listen to the voice difference. \ No newline at end of file diff --git a/docs/_docs/tutorials/enable-speech/7_next_steps.md b/docs/_docs/tutorials/enable-speech/7_next_steps.md deleted file mode 100644 index 2cd2e96ce1..0000000000 --- a/docs/_docs/tutorials/enable-speech/7_next_steps.md +++ /dev/null @@ -1,49 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Speech -title: Next steps -order: 7 ---- - -# Tutorial: Enable Speech for your Assistant - -## Next Steps - -This tutorial is based on example applications provided by the Speech SDK which you can refer to for more information along with other programming languages. - -- [C# UWP](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/quickstart-virtual-assistant-csharp-uwp) -- [Java (Windows, macOS, Linux)](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/quickstart-virtual-assistant-java-jre) -- [Java (Android)](https://docs.microsoft.com/en-us/azure/cognitive-services/speech-service/quickstart-virtual-assistant-java-android) - -In addition, we provide an example [Android based Virtual Assistant Client](https://github.com/microsoft/botframework-solutions/blob/master/solutions/android/VirtualAssistantClient/readme.md) which provides an example client application that interfaces with the Virtual Assistant through the Speech Channel and renders Adaptive Cards. - -## More Tutorials -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
-
-

Customize a Virtual Assistant

-

Personalize your experience for your brand and customers.

-
- -
-
-
-

Create a skill

-

Build a custom skill for your scenario.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/enable-teams/7_next_steps.md b/docs/_docs/tutorials/enable-teams/7_next_steps.md deleted file mode 100644 index e896e0ea53..0000000000 --- a/docs/_docs/tutorials/enable-teams/7_next_steps.md +++ /dev/null @@ -1,47 +0,0 @@ ---- -category: Tutorials -subcategory: Enable Microsoft Teams -title: Next steps -order: 7 ---- - -# Tutorial: Adding your Assistant to Microsoft Teams - -## Next Steps - -The Microsoft Teams documentation has additional documentation around Microsoft Teams and Bots with two key items highlighted below. - -- [Test and debug your Microsoft Teams bot](https://docs.microsoft.com/en-us/microsoftteams/platform/concepts/bots/bots-test) -- [Quickly develop apps with App Studio for Microsoft Teams](https://docs.microsoft.com/en-us/microsoftteams/platform/get-started/get-started-app-studio) - -## More Tutorials - -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Customize a Virtual Assistant

-

Personalize your experience for your brand and customers.

-
- -
-
-
-

Create a skill

-

Build a custom skill for your scenario.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/typescript/create-assistant/6_next_steps.md b/docs/_docs/tutorials/typescript/create-assistant/6_next_steps.md deleted file mode 100644 index 25e5174689..0000000000 --- a/docs/_docs/tutorials/typescript/create-assistant/6_next_steps.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -category: Tutorials -subcategory: Create a Virtual Assistant -language: TypeScript -title: Next steps -order: 6 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next Steps - -Now that you've got the basics, try the one of these tutorials: - -
-
-
-

Customize a Virtual Assistant

-

Personalize your experience for your brand and customers.

-
- -
-
-
-

Create a skill

-

Build a custom skill for your scenario.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
diff --git a/docs/_docs/tutorials/typescript/create-skill/8_next_steps.md b/docs/_docs/tutorials/typescript/create-skill/8_next_steps.md deleted file mode 100644 index 542b3eaf49..0000000000 --- a/docs/_docs/tutorials/typescript/create-skill/8_next_steps.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -category: Tutorials -subcategory: Create a skill -language: TypeScript -title: Next steps -order: 8 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next Steps - -Now that you've created your custom skill, try the one of these tutorials: - -
-
-
-

Customize a skill

-

Customize your skill for your brand and users.

-
- -
-
-
-

Create a Virtual Assistant

-

Get up and running with the solution accelerator.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/typescript/customize-assistant/5_next_steps.md b/docs/_docs/tutorials/typescript/customize-assistant/5_next_steps.md deleted file mode 100644 index 58dac7d918..0000000000 --- a/docs/_docs/tutorials/typescript/customize-assistant/5_next_steps.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -category: Tutorials -subcategory: Customize a Virtual Assistant -language: TypeScript -title: Next steps -order: 5 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next steps - -Now that you've learned learned how to personalize a Virtual Assistant, try one of these tutorials: - -
-
-
-

Create a skill

-

Build a custom skill for your scenario.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/tutorials/typescript/customize-skill/5_next_steps.md b/docs/_docs/tutorials/typescript/customize-skill/5_next_steps.md deleted file mode 100644 index b0bedc9655..0000000000 --- a/docs/_docs/tutorials/typescript/customize-skill/5_next_steps.md +++ /dev/null @@ -1,42 +0,0 @@ ---- -category: Tutorials -subcategory: Customize a skill -language: TypeScript -title: Next steps -order: 5 ---- - -# Tutorial: {{page.subcategory}} ({{page.language}}) - -## Next Steps -Now that you've customized your skill, try one of these tutorials: - -
-
-
-

Create a Virtual Assistant

-

Get up and running with the solution accelerator.

-
- -
-
-
-

Enable Speech

-

Enable the Microsoft Speech Channel for your assistant.

-
- -
-
-
-

Enable Microsoft Teams

-

Enable the Microsoft Teams Channel for your assistant.

-
- -
-
\ No newline at end of file diff --git a/docs/_docs/howto/virtual-assistant/continuousdeployment.md b/docs/_docs/virtual-assistant/handbook/devops.md similarity index 64% rename from docs/_docs/howto/virtual-assistant/continuousdeployment.md rename to docs/_docs/virtual-assistant/handbook/devops.md index 4cc7e4e1f5..df522c8acd 100644 --- a/docs/_docs/howto/virtual-assistant/continuousdeployment.md +++ b/docs/_docs/virtual-assistant/handbook/devops.md @@ -1,19 +1,15 @@ --- -category: How To -subcategory: Virtual Assistant -title: How to create a Release Pipeline -description: Guidance on how to create and configure a Release Pipeline for your Virtual Assistant -order: 1 +category: Virtual Assistant +subcategory: Handbook +title: DevOps +description: Create an Azure DevOps release pipeline +order: 7 +toc: true --- # {{ page.title }} {:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} +{{ page.description }} ## Prerequisites - To have a better knowledge you can read the documentation of Continuous integration about the YAML build Pipelines. @@ -21,15 +17,15 @@ order: 1 - Have a deployed bot. - Update the YAML file to generate an Artifact. -## Scenario +## Create an Azure DevOps release pipeline -Create a Release Pipeline in Azure DevOps. - -## Introduction +### Introduction +{:.no_toc} In the first place, we need to have in mind a few modifications before starting to work on the Release Pipeline configuration. We'll generate an artifact, but what is an artifact? It’s a compressed version of the project or solution which contains all the necessary information to create the base of the Release Pipeline configuration. -## Create a Release Pipeline +### Create a Release Pipeline +{:.no_toc} Go to the release section in your DevOps organization and select in the plus icon that will show the following options. In this case, we select create a New release pipeline. @@ -57,7 +53,8 @@ After the release was executed you can check the log of each tasks added to the ![Create Release Pipeline 6]({{site.baseurl}}/assets/images/create_release_pipeline_6.png) -## Configure the Release Pipeline to update the Bot Services +### Configure the Release Pipeline to update the Bot Services +{:.no_toc} 1. As first step, to have a more clear Agent Job and tasks add the variable section, the Pipelines Variables that you will use in the Release configuration. The variables with the highlight are from the Az Login and the others are about the cognitivemodels.json file. @@ -151,3 +148,75 @@ After the release was executed you can check the log of each tasks added to the ``` ![Configure Release Pipeline 6]({{site.baseurl}}/assets/images/configure_release_pipeline_6.png) + + + + ### Introduction + +When trying to develop language models in a distributed team, managing conflicts can be difficult. Refer to the following guidance for some common scenarios when managing cognitive models for a team. + +## Manage cognitive models across environments + +### I want to protect my production environment against conflicting changes made by multiple editors. +{:.no_toc} + +It is recommended that for project being worked on by multiple developers that you protect your production cognitive models by only deploying changes through a build pipeline. This pipeline should run the various scripts/commands needed to update your LUIS models, QnA Maker knowledgebases, and Dispatch model automatically based on your source control. Individual developers should make their changes in their own versions of the models, and push their changes in to source control when they are ready to merge. + +![]({{site.baseurl}}/assets/images/model_management_flow.png) + +### I want to test changes to my LUIS models and QnA Maker knowledgebases in the portal. +{:.no_toc} + +When you want to test changes to your LUIS models and QnA Maker knowledgebases in the portal, it is recommended that you deploy your own personal versions to develop with, and do not make changes directly in the production apps to prevent conflicts with the other developers. After you have made all the changes you want in the portal, follow these steps to share your changes with your team: + +1. Run the following command from your project folder: + + ``` + .\Deployment\Scripts\update_cognitive_models.ps1 -RemoteToLocal + ``` + + > This script downloads your modified LUIS models in the .lu schema so it can be published to production by your build pipeline. If you are running this script from a Virtual Assistant project, it also runs `dispatch refresh` and `luisgen` to update your Dispatch model and DispatchLuis.cs files. + +2. Check in your updated .lu files to source control. + > Your changes should go through a peer review to validate there will be no conflicts. You can also share your LUIS app and/or transcripts of the bot conversation with your changes to help in this conversation. + +3. Run your build pipeline to deploy your updated files to your production environment. + > This pipeline should update your LUIS models, QnA Maker knowledgebases, and Dispatch model as needed. + + +### I've changed my skill LUIS model. What next? +{:.no_toc} + +If you have added or removed an intent from your skill LUIS model, follow these steps to update your skill manifest: + +1. Open the manifestTemplate.json file. +2. If you have added new intents, either add them to an existing `action` or add a new action for the intent like this: + + ```json + "actions": [ + { + "id": "toDoSkill_addToDo", + "definition": { + "description": "Add a task", + "slots": [], + "triggers": { + "utteranceSources": [ + { + "locale": "en", + "source": [ "todo#AddToDo" ] + } + ] + } + } + }, + ``` + +Once you have updated your manifest, follow these steps to update any Virtual Assistants that are using your skill: + +1. Run the following command from your project directory: + + ``` + botskills update --cs + ``` + + > This command updates your skills.json file with the latest manifest definitions for each connected skill, and runs dispatch refresh to update your dispatch model. diff --git a/docs/_docs/reference/virtual-assistant/events.md b/docs/_docs/virtual-assistant/handbook/events.md similarity index 80% rename from docs/_docs/reference/virtual-assistant/events.md rename to docs/_docs/virtual-assistant/handbook/events.md index e186e95491..507a6dbcbf 100644 --- a/docs/_docs/reference/virtual-assistant/events.md +++ b/docs/_docs/virtual-assistant/handbook/events.md @@ -1,21 +1,15 @@ --- -category: Reference -subcategory: Virtual Assistant +category: Virtual Assistant +subcategory: Handbook title: Events -description: Event activities enable contextual information to be shared between a user and assistant, without being visible to the user. -order: 6 +description: Send events to pass context to a Virtual Assistant +order: 12 +toc: true --- # {{ page.title }} {:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro +{{ page.description }} [Event Activities](https://docs.microsoft.com/en-us/azure/bot-service/dotnet/bot-builder-dotnet-activities?view=azure-bot-service-3.0#event) are used to pass metadata between a bot and user without being visible to the user. @@ -23,6 +17,7 @@ The data from these activities can be processed by an assistant to fulfill scena ## From user to assistant ### Location +{:.no_toc} You can pass a user's coordinates to an assistant using the **VA.Location** example event. @@ -41,6 +36,8 @@ You can pass a user's coordinates to an assistant using the **VA.Location** exam ``` ### Timezone +{:.no_toc} + You can pass a user's timezone to an assistant using the **VA.Timezone** example event. **Activity payload** @@ -58,6 +55,8 @@ You can pass a user's timezone to an assistant using the **VA.Timezone** example ``` ### Reset user +{:.no_toc} + You can request to remove all user state and unlink accounts by passing the **VA.ResetUser** example event. **Activity payload** @@ -75,8 +74,10 @@ You can request to remove all user state and unlink accounts by passing the **VA ## From assistant to user ### Open default applications +{:.no_toc} + To be tightly integrated with a user's messaging client, a Virtual Assistant needs to send events back to the client application. -The **OpenDefaultApp** example event is used in conjunction with the [Virtual Assistant Client (Android) sample)]({{ site.baseurl }}/howto/samples/vaclient_android/) to demonstrate samples of using metadata +The **OpenDefaultApp** example event is used in conjunction with the [Virtual Assistant Client (Android) sample)]({{ site.baseurl }}/clients/virtual-assistant-client/) to demonstrate samples of using metadata **Activity payload** ```json @@ -85,7 +86,7 @@ The **OpenDefaultApp** example event is used in conjunction with the [Virtual As "name":"OpenDefaultApp", "value":{ "MusicUri":"{Music player link}", - "MapsUri":"geo:{Latitude},{Longitude}", + "MapsUri":"geo:{LATITUDE},{LONGITUDE}", "TelephoneUri":"{Telephone number}", "MeetingUri":"{Microsoft Teams meeting link}" } @@ -93,5 +94,7 @@ The **OpenDefaultApp** example event is used in conjunction with the [Virtual As ``` ## Add and configure the event debug middleware -Native event activities are not supported on the [Bot Framework Emulator](https://aka.ms/botframework-emulator), you can work around this using the`EventDebugMiddleware` class that comes with the Virtual Assistan template. -You can send messages with a string payload following the format: `/event:{ "Name": "{Event name}", "Value": "{Event value}" }`. The middleware tranposes these values onto an event activity to be processed. +Native event activities are not supported on the [Bot Framework Emulator](https://aka.ms/botframework-emulator), you can work around this using the [**EventDebugMiddleware**]({{site.baseurl}}/overview/virtual-assistant-template/#middleware) class that comes with the Virtual Assistan template. +You can send messages with a string payload following the format: +**/event:{ "Name": "{Event name}", "Value": "{Event value}" }**. +The middleware transposes these values onto an event activity to be processed. diff --git a/docs/_docs/howto/virtual-assistant/feedback.md b/docs/_docs/virtual-assistant/handbook/feedback.md similarity index 73% rename from docs/_docs/howto/virtual-assistant/feedback.md rename to docs/_docs/virtual-assistant/handbook/feedback.md index 2f3b30c27c..ac8de97572 100644 --- a/docs/_docs/howto/virtual-assistant/feedback.md +++ b/docs/_docs/virtual-assistant/handbook/feedback.md @@ -1,19 +1,15 @@ --- -category: How To -subcategory: Virtual Assistant -title: Collect Feedback -description: Describes how to implement the FeedbackMiddleware to collect user feedback. -order: 6 +category: Virtual Assistant +subcategory: Handbook +title: Feedback +description: Collect feedback from users +order: 9 +toc: true --- # {{ page.title }} {:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} +{{ page.description }} ## Add and configure the middleware To start collecting user feedback, add the following code block in your adapter class (DefaultAdapter.cs in the Virtual Assistant and Skill templates): @@ -26,12 +22,12 @@ This enables the FeedbackMiddleware with the following default settings: | Property | Description | Type | Default value | | -------- | ----------- | ---- |------------- | -| FeedbackActions | Feedback options shown to the user. | `List` | 👍 / 👎 | -| DismissAction | Option to dismiss request for feedback, or request for comment. | `CardAction` | *Dismiss* -| FeedbackReceivedMessage | Message to show after user has provided feedback. | `string` | *Thanks for your feedback!* | -| CommentsEnabled | Flag indicating whether the bot should prompt for free-form comments after user has provided feedback. | `bool` | false | -| CommentPrompt | Message to show after user provided feedback if CommentsEnabled is true. | `string` | *Please add any additional comments in the chat.* -| CommentReceivedMessage | Message to show after user provides a free-form comment. | `string` | *Your comment has been received.* | +| FeedbackActions | Feedback options shown to the user. | **CardAction List** | 👍 / 👎 | +| DismissAction | Option to dismiss request for feedback, or request for comment. | **CardAction** | *Dismiss* +| FeedbackReceivedMessage | Message to show after user has provided feedback. | **string** | *Thanks for your feedback!* | +| CommentsEnabled | Flag indicating whether the bot should prompt for free-form comments after user has provided feedback. | **bool** | false | +| CommentPrompt | Message to show after user provided feedback if CommentsEnabled is true. | **string** | *Please add any additional comments in the chat.* +| CommentReceivedMessage | Message to show after user provides a free-form comment. | **string** | *Your comment has been received.* | Here is an example customization with different feedback options and comments enabled: @@ -64,4 +60,4 @@ After the middleware is configured, you can request feedback as usual. ## View your feedback in Power BI You can view your **Feedback** in the Feedback tab of the Conversational AI Dashboard. -More information on Power BI and Analytics in Virtual Assistant can be found [here]({{site.repo}}/reference/analytics/powerbi/). +[Learn how to set up your own Power BI dashboard]({{site.baseurl}}/virtual-assistant/tutorials/view-analytics/1-intro/) \ No newline at end of file diff --git a/docs/_docs/reference/virtual-assistant/localization.md b/docs/_docs/virtual-assistant/handbook/localization.md similarity index 93% rename from docs/_docs/reference/virtual-assistant/localization.md rename to docs/_docs/virtual-assistant/handbook/localization.md index 18bef040f5..2656d107fb 100644 --- a/docs/_docs/reference/virtual-assistant/localization.md +++ b/docs/_docs/virtual-assistant/handbook/localization.md @@ -1,21 +1,15 @@ --- -category: Reference -subcategory: Virtual Assistant +category: Virtual Assistant +subcategory: Handbook title: Localization -description: Understanding the Virtual Assistant's approach to localizing conversations -order: 6 +description: Manage localization across a Virtual Assistant solution +order: 5 +toc: true --- -# {{ page.title }} +# {{ page.title }} : {{ page.description }} {:.no_toc} - -## In this reference -{:.no_toc} - -* -{:toc} - -## Intro +{{ page.description }} ## Getting the locale To capture the user's locale, the Virtual Assistant uses the SetLocaleMiddleware. For each message that comes in from the user, the CurrentUICulture is set equal to the Activity's locale property. If Activity.Locale is not available on the activity, the DefaultLocale from cognitivemodel.json is used instead. @@ -117,7 +111,7 @@ var cognitiveModels = _services.CognitiveModelSets[locale]; ``` ## Responses -Responses can be localized in a variety of ways. If you use resource files (.resx) the correct response will be chosen based on the CurrentUICulture. The ResponseManager class in Microsoft.Bot.Builder.Solutions can also be used to localize responses in the json format described [here]({{site.baseurl}}/reference/skills/responses). +Responses can be localized in a variety of ways. If you use resource files (.resx) the correct response will be chosen based on the CurrentUICulture. The ResponseManager class in Microsoft.Bot.Builder.Solutions can also be used to localize responses in the json format described [here]({{site.baseurl}}/skills/handbook/language-generation). ## Channel Support The localization approach is currently supported in the following channels: @@ -127,6 +121,8 @@ The localization approach is currently supported in the following channels: - Direct Line Speech ### Bot Framework Emulator +{:.no_toc} + To test your assistant with different locales, you follow these steps in the Bot Framework emulator: 1. Open the **Settings** tab. @@ -138,6 +134,8 @@ To test your assistant with different locales, you follow these steps in the Bot ![Emulator locale screenshot]({{site.baseurl}}/assets/images/emulator_locale.jpg) ### Web Chat +{:.no_toc} + To use this approach in webchat, you can set the locale of the activity by providing the **locale** parameter when you initialize your WebChat client, like so: ``` @@ -156,4 +154,6 @@ To use this approach in webchat, you can set the locale of the activity by provi ``` ### Direct Line & Direct Line Speech +{:.no_toc} + For Direct Line and Direct Line Speech, your client can pass the locale in the Activity.Locale property to enable localization scenarios. \ No newline at end of file diff --git a/docs/_docs/howto/virtual-assistant/ettovamigration.md b/docs/_docs/virtual-assistant/handbook/migration.md similarity index 70% rename from docs/_docs/howto/virtual-assistant/ettovamigration.md rename to docs/_docs/virtual-assistant/handbook/migration.md index bb02139bfa..c3d19f2f3f 100644 --- a/docs/_docs/howto/virtual-assistant/ettovamigration.md +++ b/docs/_docs/virtual-assistant/handbook/migration.md @@ -1,19 +1,15 @@ --- -category: How To -subcategory: Virtual Assistant -title: Convert from Enterprise Template to Virtual Assistant Template -description: Guidance on how to move from an Enterprise Template based Bot to the new Template -order: 6 +category: Virtual Assistant +subcategory: Handbook +title: Migration +description: Migrate from the Enterprise template +order: 13 +toc: true --- # {{ page.title }} {:.no_toc} - -## In this how-to -{:.no_toc} - -* -{:toc} +{{ page.description }} ## What happened to the Enterprise Template? @@ -21,11 +17,11 @@ The Enterprise Template, released last year, brought together the required capab Thanks to strong feedback from our customers, we are bringing the two approaches together. These complex, assistant-like conversational experiences are proving critical to digital transformation and customer/employee engagement. -The Enterprise Template is now the [Virtual Assistant Template]({{site.baseurl}}/overview/virtualassistant) and introduces the following capabilities: +The Enterprise Template is now the [Virtual Assistant Template]({{site.baseurl}}/overview/virtual-assistant-template) and introduces the following capabilities: - C# template simplified and aligned to ASP.NET MVC pattern with dependency injection - Typescript generator -- `Microsoft.Bot.Builder.Solutions` NuGet package to enable easy updating of the template core after a project is created +- **Microsoft.Bot.Builder.Solutions** NuGet package to enable easy updating of the template core after a project is created - Works out-of-box with Skills, enabling you to use re-usable conversational capabilities or hand off specific tasks to child Bots within your organization - [Adaptive Cards](https://adaptivecards.io/) that greet new and returning users - Native conversational telemetry and Power BI analytics via the Bot Builder SDK @@ -36,18 +32,21 @@ If you have an existing bot based off of the Enterprise Template, we recommend c ## Key changes to the template ### ASP.NET MVC Pattern +{:.no_toc} The Virtual Assistant template has adopted the ASP.NET Core MVC approach which has enabled us to further simplify the template code and be more familiar to .NET developers. This has resulted in significant changes to how the Bot is configured and initialized through deeper use of [Dependency Injection (DI)](https://docs.microsoft.com/en-us/aspnet/core/fundamentals/dependency-injection?view=aspnetcore-2.2) which improve extensibility and the ability to automate testing. ### Bot file deprecation +{:.no_toc} -Prior to the Bot Framework SDK 4.3 release, the Bot Framework offered the .bot file as a mechanism to manage resources. Going forward we recommend that you use `appsettings.json` (C#) or `.env` (Typescript) file for managing these resources. +Prior to the Bot Framework SDK 4.3 release, the Bot Framework offered the .bot file as a mechanism to manage resources. Going forward we recommend that you use **appsettings.json** (C#) or **.env** (Typescript) file for managing these resources. In-line with this change to .bot files we have migrated the template configuration across to appSettings.json for general dependencies and cognitiveModels.json to represent the Dispatch, LUIS and QnA models registered for your assistant. This also enables you to leverage standard approaches such as KeyVault. ### Folder structure +{:.no_toc} We have flattened the directory structure, primarily around the Dialogs folders which had a hierarchy enabling Dialogs to have their own resources, responses and state. Through our work building Skills and working with customers/partners it became clear this structure didn't scale and became complex. @@ -71,32 +70,37 @@ The core folder structure is shown below and key concepts such as Dialogs, Model ``` ### Solutions NuGet package +{:.no_toc} -The previous Enterprise Template had a `Microsoft.Bot.Solutions` library which contained extensions to the Bot Framework to simplify creation of advanced experiences. This is now published as the [`Microsoft.Bot.Builder.Solutions`](https://www.nuget.org/packages/Microsoft.Bot.Builder.Solutions/) is now published as an additional NuGet library enabling us to easily make updates which you can pull into your project and avoiding have to perform differential comparison with our sample Enterprise Template project. +The previous Enterprise Template had a **Microsoft.Bot.Solutions** library which contained extensions to the Bot Framework to simplify creation of advanced experiences. This is now published as the [**Microsoft.Bot.Builder.Solutions**](https://www.nuget.org/packages/Microsoft.Bot.Builder.Solutions/) is now published as an additional NuGet library enabling us to easily make updates which you can pull into your project and avoiding have to perform differential comparison with our sample Enterprise Template project. ### ARM Deployment +{:.no_toc} -Previously we used the `msbot` command line tool to automate deployment of dependent resources in Azure. This enabled us to address limitations around automated Azure deployment for some resources and ensure developers had an easy way to get started. +Previously we used the **msbot** command line tool to automate deployment of dependent resources in Azure. This enabled us to address limitations around automated Azure deployment for some resources and ensure developers had an easy way to get started. With these limitations addressed we have now moved to a ARM template based approach providing you the same automated approach but also providing a more familiar way to customize deployment to suit your requirements. ## How to migrate to the Virtual Assistant template ### Create a new project +{:.no_toc} [Create a new project]({{site.baseurl}}/tutorials/create-assistant/3_create_project) using the Virtual Assistant Template. ### Deployment +{:.no_toc} -It's recommended to deploy your new Virtual Assistant template using the [updated deployment approach]({{site.baseurl}}/tutorials/csharp/create-assistant/4_provision_your_azure_resources) which now support the ability for multi-locale conversational experiences and the new configuration files which replace the .bot file. This enables you to get started right away with no manual changes. +It's recommended to deploy your new Virtual Assistant template using the [updated deployment approach]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources) which now support the ability for multi-locale conversational experiences and the new configuration files which replace the .bot file. This enables you to get started right away with no manual changes. -Alternatively if you wish to re-use existing deployed resources, you can alternatively take your existing .bot file, [decrypt the secrets](https://docs.microsoft.com/en-us/azure/bot-service/bot-file-basics?view=azure-bot-service-4.0&tabs=csharp) and manually move across existing Azure resource information into your new `appSettings.json` and `cognitiveModels.json` files. +Alternatively if you wish to re-use existing deployed resources, you can alternatively take your existing .bot file, [decrypt the secrets](https://docs.microsoft.com/en-us/azure/bot-service/bot-file-basics?view=azure-bot-service-4.0&tabs=csharp) and manually move across existing Azure resource information into your new **appSettings.json** and **cognitiveModels.json** files. ### Migrate dialogs +{:.no_toc} -1. Copy your custom dialog class files into the `Dialogs` directory of your new project. +1. Copy your custom dialog class files into the Dialogs directory of your new project. -1. Within the `Startup.cs` file, add a Transient Service for each of your dialogs. +1. Within the **Startup.cs** file, add a Transient Service for each of your dialogs. ```csharp // Register dialogs @@ -136,21 +140,25 @@ Alternatively if you wish to re-use existing deployed resources, you can alterna ``` ### Responses +{:.no_toc} Copy Responses for each dialog into a sub-directory of the Responses folder. Focus on having a sub-directory per dialog. ### Adaptive Cards +{:.no_toc} -Copy any Adaptive Cards used by your dialogs into the `Content` directory with the sample greeting cards. +Copy any Adaptive Cards used by your dialogs into the Content directory with the sample greeting cards. ### State +{:.no_toc} -Copy any state classes you may have created into the `Models` directory. +Copy any state classes you may have created into the Models directory. ### Files generated by the LUISGen tool +{:.no_toc} -Copy any LuisGen-generated classes into the `Services` directory. +Copy any LuisGen-generated classes into the Services directory. ## Extend your assistant with Skills -If your assistant was based on the [Virtual Assistant (Beta Release 0.3) solution](https://github.com/microsoft/AI/releases/tag/0.3), continue with [adding back the Skills]({{site.baseurl}}/howto/skills/addingskills). +If your assistant was based on the [Virtual Assistant (Beta Release 0.3) solution](https://github.com/microsoft/AI/releases/tag/0.3), continue with [adding back the Skills]({{site.baseurl}}/skills/handbook/add-skills-to-a-virtual-assistant/) diff --git a/docs/_docs/reference/virtual-assistant/responses.md b/docs/_docs/virtual-assistant/handbook/responses.md similarity index 90% rename from docs/_docs/reference/virtual-assistant/responses.md rename to docs/_docs/virtual-assistant/handbook/responses.md index 4b14e23b2a..2346f2da2b 100644 --- a/docs/_docs/reference/virtual-assistant/responses.md +++ b/docs/_docs/virtual-assistant/handbook/responses.md @@ -1,20 +1,17 @@ --- -category: Reference -subcategory: Virtual Assistant -title: Responses -description: Details on how responses work in the Virtual Assistant template. -order: 5 +category: Virtual Assistant +subcategory: Handbook +title: Language Generation +description: How responses work in the Virtual Assistant template +order: 4 +toc: true --- # {{ page.title }} {:.no_toc} +{{ page.description }} -## In this reference -{:.no_toc} - -* -{:toc} -## Intro +## Introduction Your Virtual Assistant can respond in a variety of ways depending on the scenario and the users active device or conversation canvas. Through use of the Bot Framework Activity schema, both `Text` and `Speak` variations of a response are returned enabling the device to make the most appropriate choice. @@ -22,11 +19,11 @@ The same Activity schema supports the attachment of User Experience elements thr ## Activity schema -The [Bot Framework Activity schema](https://github.com/Microsoft/BotBuilder/blob/master/specs/botframework-activity/botframework-activity.md) for the Azure Bot Service is an application-level representation of conversational actions made by humans and bots. This schema is used for all messages, including [Events]({{site.baseurl}}/reference/virtual-assistant/events). +The [Bot Framework Activity schema](https://github.com/Microsoft/BotBuilder/blob/master/specs/botframework-activity/botframework-activity.md) for the Azure Bot Service is an application-level representation of conversational actions made by humans and bots. This schema is used for all messages, including [Events]({{site.baseurl}}/virtual-assistant/handbook/events). ## Messages and Events -Messages are a specific Type of Activity set through the `ActivityType` property and relate to Messages to and from a user that should be shown/spoken. Events are a different `ActivityType` enabling messages to be *whispered* between the client and Bot and provide an elegant mechanism for the client to trigger events within the Virtual Assistant and vice versa to perform an operation on the device. More information is in the [events]({{site.baseurl}}/reference/virtual-assistant/events) section. +Messages are a specific Type of Activity set through the `ActivityType` property and relate to Messages to and from a user that should be shown/spoken. Events are a different `ActivityType` enabling messages to be *whispered* between the client and Bot and provide an elegant mechanism for the client to trigger events within the Virtual Assistant and vice versa to perform an operation on the device. More information is in the [events]({{site.baseurl}}/virtual-assistant/handbook/events) section. ## Adaptive Cards diff --git a/docs/_docs/virtual-assistant/handbook/testing.md b/docs/_docs/virtual-assistant/handbook/testing.md new file mode 100644 index 0000000000..13af1e7543 --- /dev/null +++ b/docs/_docs/virtual-assistant/handbook/testing.md @@ -0,0 +1,49 @@ +--- +category: Virtual Assistant +subcategory: Handbook +title: Testing +description: Your Virtual Assistant can be tested just like any other Bot Framework Bot; the most common tools are the [Bot Framework Emulator](https://aka.ms/botframework-emulator) and [Web Chat](https://aka.ms/botframework-webchat). +order: 6 +toc: true +--- + +# {{ page.title }} +{:.no_toc} +{{ page.description }} + + +## Unit Testing + +Take advantage of the Test project that is available when you [Create a New Skill]({{site.baseurl}}/skills/tutorials/create-skill/csharp/1-intro). +Follow along with the Flow tests to see a basic usage of how to mock activities from a user and validate the bot responses. +If you'd like to take this further, you can explore the tests of a published skill for a deep dive on APIs, mocking LUIS, and more. + +## Client Testing + +### Bot Framework Emulator +{:.no_toc} + +The Bot Framework Emulator can be used by opening the .bot file provided within the Project directory. You must have completed the [deployment steps]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources) first and should ensure you have the [latest emulator](https://aka.ms/botframework-emulator) installed. + +> Authentication scenarios cannot be fully tested within the Emulator at this time. The Web Test Harness provides a workaround for this. + +### Direct Line Configuration +{:.no_toc} + +For device integration and use of the test harnesses below you need to publish your assistant to your Azure subscription and then configure the [Direct Line](https://docs.microsoft.com/en-us/azure/bot-service/bot-service-channel-connect-directline?view=azure-bot-service-3.0) channel. + +- Start with deploying your assistant to Azure +- Then use the following CLI tool shown below, the key will be shown in the `key` field. This will not be accessible again so ensure you keep this securely and for the steps below. + +```shell +az bot directline create -g YOUR_RESOURCE_GROUP_NAME --name YOUR_BOT_NAME +``` + +### Direct Line Sample +{:.no_toc} + +A simple Console App is provided to demonstrate the base communication interaction required with a Virtual Assistant and highlights how a device can interact with a Virtual Assistant. The Sample enables you to conduct a conversation with a Virtual Assistant and demonstrates how responses can be processed including Adaptive Cards along with retrieving the **Speak** property which is the Speech friendly variation of the response. + +Examples are also provided on how events can be sent (device activation for example) as well as receiving responses to perform an action locally (e.g. change the navigation system or radio station). + +Update the code to reflect the Direct Line secret you created previously. \ No newline at end of file diff --git a/docs/_docs/tutorials/csharp/create-assistant/1_intro.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/1-intro.md similarity index 93% rename from docs/_docs/tutorials/csharp/create-assistant/1_intro.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/1-intro.md index 1e20cd9373..476c9fc019 100644 --- a/docs/_docs/tutorials/csharp/create-assistant/1_intro.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/1-intro.md @@ -1,14 +1,14 @@ --- layout: tutorial -category: Tutorials -subcategory: Create a Virtual Assistant +category: Virtual Assistant +subcategory: Create language: C# title: Intro order: 1 --- # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose Install Bot Framework development prerequisites and create your first Virtual Assistant. diff --git a/docs/_docs/tutorials/csharp/create-assistant/2_download_and_install.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/2-download-and-install.md similarity index 96% rename from docs/_docs/tutorials/csharp/create-assistant/2_download_and_install.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/2-download-and-install.md index 7b532e9a02..3b35463b9b 100644 --- a/docs/_docs/tutorials/csharp/create-assistant/2_download_and_install.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/2-download-and-install.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: C# title: Download and install order: 2 diff --git a/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/3-create-project.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/3-create-project.md new file mode 100644 index 0000000000..b31c168ec7 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/3-create-project.md @@ -0,0 +1,78 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Create +language: C# +title: Create your assistant +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Create your Virtual Assistant project + +1. In Visual Studio, replace **File > New Project**. +2. Under Bot, select **Virtual Assistant Template**. +3. Name your project and select **Create**. +4. Build your project to restore the NuGet packages. + +## What files were created? + | - Adapters // BotAdapter implementations for configuring Middleware + | - DefaultAdapter.cs // Configures basic middleware + | - DefaultWebSocketAdapter.cs // Configures middleware for web socket connection (speech) + | - Bots // IBot implementations for initializing dialog stack + | - DialogBot.cs // Initializes the dialog stack with a primary dialog (e.g. MainDialog) + | - Content // Static content used by the assistant including images and Adaptive Cards + | - NewUserGreeting.json // Adaptive Card shown to first time users + | - ReturningUserGreeting.json // Adaptive Card shown to returning users + | - Controllers // API Controllers + | - BotController.cs // API Controller for api/messages endpoint + | - Deployment // Files for deployment and provisioning + | - Resources // Resources for deployment and provisioning. + | - LU // Files for deploying LUIS language models + | - General.lu // General language model (e.g. Cancel, Help, Escalate, etc.) + | - QnA // Files for deploying QnA Maker knowledgebases + | - Chitchat.lu // Chitchat knowledgebase (e.g. Hi, How are you?, What's your name?, + | - Faq.lu // FAQ knowledgebase + | - template.json // ARM Deployment template + | - parameters.template.json // ARM Deployment parameters file + | - Scripts // PowerShell scripts for deployment and provisioning + | - deploy.ps1 // Deploys and provisions Azure resources and cognitive models + | - deploy_cognitive_models.ps1 // Deploys and provisions cognitive models only + | - update_cognitive_models.ps1 // Updates existing cognitive models + | - luis_functions.ps1 // Functions used for deploying and updating LUIS models + | - qna_functions.ps1 // Functions used for deploying and updating QnA Maker knowledgebases + | - publish.ps1 // Script to publish your Bot to Azure. + | - Dialogs // Bot Framework Dialogs + | - MainDialog.cs // Dialog for routing incoming messages + | - OnboardingDialog.cs // Dialog for collecting basic profile information from user + | - CancelDialog.cs // Dialog for confirming cancellation intent + | - EscalateDialog.cs // Dialog for handling user escalation + | - Models // Data models + | - OnboardingState.cs // Model for basic profile information + | - Pipeline // Files for setting up an deployment pipeline in Azure DevOps + | - Assistant.yml // Build pipeline template for Azure DevOps + | - Responses // Classes and files for representing bot responses + | - Cancel // Cancel responses + | - CancelResponses.cs // Cancel dialog response manager + | - CancelString.resx // Cancel dialog strings + | - Escalate // Escalate responses + | - EscalateResponses.cs // Escalate dialog response manager + | - EscalateString.resx // Escalate dialog strings + | - Main // Main responses + | - MainResponses.cs // Main dialog response manager + | - MainString.resx // Main dialog strings + | - Onboarding // Onboarding responses + | - OnboardingResponses.cs // Onboarding dialog response manager + | - OnboardingString.resx // Onboarding dialog strings + | - Services // Configuration for connected services and service clients + | - BotServices.cs // Class representation of service clients and recognizers + | - BotSettings.cs // Class representation of configuration files + | - DispatchLuis.cs // Class representation of LUIS result from Dispatch language model + | - GeneralLuis.cs // Class representation of LUIS result from General language model + | - appsettings.json // Configuration for application and Azure services + | - cognitivemodels.json // Configuration for language models, knowledgebases, and dispatch model + | - skills.json // Configuration for connected skills + | - Program.cs // Default Program.cs file + | - Startup.cs // Initializes dependencies + diff --git a/docs/_docs/tutorials/csharp/create-assistant/4_provision_your_azure_resources.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources.md similarity index 94% rename from docs/_docs/tutorials/csharp/create-assistant/4_provision_your_azure_resources.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources.md index 07aa1d3bc7..1823f22f48 100644 --- a/docs/_docs/tutorials/csharp/create-assistant/4_provision_your_azure_resources.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/4-provision-your-azure-resources.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: C# title: Provision your Azure resources order: 4 @@ -35,4 +36,4 @@ The Virtual Assistant requires the following Azure dependencies to run correctly appPassword | The password for the [Azure Active Directory App](https://ms.portal.azure.com/#blade/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/Overview) that will be used by your bot. It must be at least 16 characters long, contain at least 1 special character, and contain at least 1 numeric character. If using an existing app, this must be the existing password. | **Yes** luisAuthoringKey | The authoring key for your LUIS account. It can be found at https://www.luis.ai/user/settings or https://eu.luis.ai/user/settings | **Yes** -You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/reference/virtual-assistant/deploymentscripts/) page. +You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/help/reference/deployment-scripts//) page. diff --git a/docs/_docs/tutorials/csharp/create-assistant/5_run_your_assistant.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/5-run-your-assistant.md similarity index 96% rename from docs/_docs/tutorials/csharp/create-assistant/5_run_your_assistant.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/5-run-your-assistant.md index 2299ffa932..7bbb99db63 100644 --- a/docs/_docs/tutorials/csharp/create-assistant/5_run_your_assistant.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/5-run-your-assistant.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: C# title: Run your assistant order: 5 diff --git a/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/6-next-steps.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/6-next-steps.md new file mode 100644 index 0000000000..ca1a5a6f8a --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/csharp/6-next-steps.md @@ -0,0 +1,57 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Create +language: C# +title: Next steps +order: 6 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Next Steps + +Now that you've got the basics, try the one of these tutorials: + + diff --git a/docs/_docs/tutorials/typescript/create-assistant/1_intro.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/1-intro.md similarity index 93% rename from docs/_docs/tutorials/typescript/create-assistant/1_intro.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/1-intro.md index 885662d368..1bbef63b5d 100644 --- a/docs/_docs/tutorials/typescript/create-assistant/1_intro.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/1-intro.md @@ -1,14 +1,14 @@ --- layout: tutorial -category: Tutorials -subcategory: Create a Virtual Assistant +category: Virtual Assistant +subcategory: Create language: TypeScript title: Intro order: 1 --- # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose diff --git a/docs/_docs/tutorials/typescript/create-assistant/2_download_and_install.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/2-download-and-install.md similarity index 95% rename from docs/_docs/tutorials/typescript/create-assistant/2_download_and_install.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/2-download-and-install.md index f5b6fb9a94..d78c880483 100644 --- a/docs/_docs/tutorials/typescript/create-assistant/2_download_and_install.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/2-download-and-install.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: TypeScript title: Download and install order: 2 diff --git a/docs/_docs/tutorials/typescript/create-assistant/3_create_project.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/3-create-project.md similarity index 98% rename from docs/_docs/tutorials/typescript/create-assistant/3_create_project.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/3-create-project.md index 43dce2bcf1..608daa545c 100644 --- a/docs/_docs/tutorials/typescript/create-assistant/3_create_project.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/3-create-project.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: TypeScript title: Create your assistant order: 3 diff --git a/docs/_docs/tutorials/typescript/create-assistant/4_provision_your_azure_resources.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/4-provision-you-azure-resources.md similarity index 94% rename from docs/_docs/tutorials/typescript/create-assistant/4_provision_your_azure_resources.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/4-provision-you-azure-resources.md index c4c4a1d363..414f8955c8 100644 --- a/docs/_docs/tutorials/typescript/create-assistant/4_provision_your_azure_resources.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/4-provision-you-azure-resources.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: TypeScript title: Provision your Azure resources order: 4 @@ -35,4 +36,4 @@ The Virtual Assistant requires the following Azure dependencies to run correctly appPassword | The password for the [Azure Active Directory App](https://ms.portal.azure.com/#blade/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/Overview) that will be used by your bot. It must be at least 16 characters long, contain at least 1 special character, and contain at least 1 numeric character. If using an existing app, this must be the existing password. | **Yes** luisAuthoringKey | The authoring key for your LUIS account. It can be found at https://www.luis.ai/user/settings or https://eu.luis.ai/user/settings | **Yes** -You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/reference/virtual-assistant/deploymentscripts/) page. +You can find more detailed deployment steps including customization in the [Virtual Assistant and Skill Template deployment]({{site.baseurl}}/help/reference/deployment-scripts//) page. diff --git a/docs/_docs/tutorials/typescript/create-assistant/5_run_your_assistant.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/5-run-your-assistant.md similarity index 95% rename from docs/_docs/tutorials/typescript/create-assistant/5_run_your_assistant.md rename to docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/5-run-your-assistant.md index e3e030e9b5..ea4af959d3 100644 --- a/docs/_docs/tutorials/typescript/create-assistant/5_run_your_assistant.md +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/5-run-your-assistant.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Create a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Create language: TypeScript title: Run your assistant order: 5 diff --git a/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/6-next-steps.md b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/6-next-steps.md new file mode 100644 index 0000000000..6c8e9aabec --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/create-assistant/typescript/6-next-steps.md @@ -0,0 +1,57 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Create +language: TypeScript +title: Next steps +order: 6 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Next Steps + +Now that you've got the basics, try the one of these tutorials: + + diff --git a/docs/_docs/tutorials/csharp/customize-assistant/1_intro.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/1-intro.md similarity index 59% rename from docs/_docs/tutorials/csharp/customize-assistant/1_intro.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/1-intro.md index 94babcda75..84fccaa78f 100644 --- a/docs/_docs/tutorials/csharp/customize-assistant/1_intro.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/1-intro.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: C# title: Intro order: 1 @@ -8,7 +9,7 @@ order: 1 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose @@ -16,7 +17,7 @@ Learn how to navigate your assistant's project and make common customizations. ### Prerequisites -[Create a Virtual Assistant]({{site.baseurl}}/tutorials/csharp/create-assistant/1_intro) to setup your environment. +[Create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) to setup your environment. ### Time to Complete diff --git a/docs/_docs/tutorials/csharp/customize-assistant/2_edit_your_greeting.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/2-edit-your-greeting.md similarity index 99% rename from docs/_docs/tutorials/csharp/customize-assistant/2_edit_your_greeting.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/2-edit-your-greeting.md index dc5cf8836e..28d4317ced 100644 --- a/docs/_docs/tutorials/csharp/customize-assistant/2_edit_your_greeting.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/2-edit-your-greeting.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: C# title: Edit your greeting order: 2 diff --git a/docs/_docs/tutorials/csharp/customize-assistant/3_edit_responses.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/3-edit-responses.md similarity index 92% rename from docs/_docs/tutorials/csharp/customize-assistant/3_edit_responses.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/3-edit-responses.md index 101b07d8a9..ab4146f9e4 100644 --- a/docs/_docs/tutorials/csharp/customize-assistant/3_edit_responses.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/3-edit-responses.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: C# title: Edit your responses order: 3 diff --git a/docs/_docs/tutorials/csharp/customize-assistant/4_edit_your_cognitive_models.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/4-edit-your-cognitive-models.md similarity index 98% rename from docs/_docs/tutorials/csharp/customize-assistant/4_edit_your_cognitive_models.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/4-edit-your-cognitive-models.md index 68664a2170..8f1e8a6526 100644 --- a/docs/_docs/tutorials/csharp/customize-assistant/4_edit_your_cognitive_models.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/4-edit-your-cognitive-models.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: C# title: Edit your cognitive models order: 4 diff --git a/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/5-next-steps.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/5-next-steps.md new file mode 100644 index 0000000000..d26e90de73 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/csharp/5-next-steps.md @@ -0,0 +1,47 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Customize +language: C# +title: Next steps +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Next steps + +Now that you've learned learned how to personalize a Virtual Assistant, try one of these tutorials: + + \ No newline at end of file diff --git a/docs/_docs/tutorials/typescript/customize-assistant/1_intro.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/1-intro.md similarity index 83% rename from docs/_docs/tutorials/typescript/customize-assistant/1_intro.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/1-intro.md index 8cb5985bc9..e1526d9cdf 100644 --- a/docs/_docs/tutorials/typescript/customize-assistant/1_intro.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/1-intro.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: TypeScript title: Intro order: 1 @@ -8,7 +9,7 @@ order: 1 # Tutorial: {{page.subcategory}} ({{page.language}}) -## Intro +## {{ page.title }} ### Purpose diff --git a/docs/_docs/tutorials/typescript/customize-assistant/2_edit_your_greeting.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/2-edit-your-greeting.md similarity index 99% rename from docs/_docs/tutorials/typescript/customize-assistant/2_edit_your_greeting.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/2-edit-your-greeting.md index e99d144f02..4ba34c3df0 100644 --- a/docs/_docs/tutorials/typescript/customize-assistant/2_edit_your_greeting.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/2-edit-your-greeting.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: TypeScript title: Edit your greeting order: 2 diff --git a/docs/_docs/tutorials/typescript/customize-assistant/3_edit_responses.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/3-edit-responses.md similarity index 91% rename from docs/_docs/tutorials/typescript/customize-assistant/3_edit_responses.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/3-edit-responses.md index 70e2589e74..9a7a1c3b0f 100644 --- a/docs/_docs/tutorials/typescript/customize-assistant/3_edit_responses.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/3-edit-responses.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: TypeScript title: Edit your responses order: 3 diff --git a/docs/_docs/tutorials/typescript/customize-assistant/4_edit_your_cognitive_models.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/4-edit-your-cognitive-models.md similarity index 97% rename from docs/_docs/tutorials/typescript/customize-assistant/4_edit_your_cognitive_models.md rename to docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/4-edit-your-cognitive-models.md index afee59ed8f..bf8cd2bea9 100644 --- a/docs/_docs/tutorials/typescript/customize-assistant/4_edit_your_cognitive_models.md +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/4-edit-your-cognitive-models.md @@ -1,6 +1,7 @@ --- -category: Tutorials -subcategory: Customize a Virtual Assistant +layout: tutorial +category: Virtual Assistant +subcategory: Customize language: TypeScript title: Edit your cognitive models order: 4 diff --git a/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/5-next-steps.md b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/5-next-steps.md new file mode 100644 index 0000000000..3b6311a978 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/customize-assistant/typescript/5-next-steps.md @@ -0,0 +1,47 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Customize +language: TypeScript +title: Next steps +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## Next steps + +Now that you've learned learned how to personalize a Virtual Assistant, try one of these tutorials: + + \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/1-intro.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/1-intro.md new file mode 100644 index 0000000000..ceb650cf00 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/1-intro.md @@ -0,0 +1,23 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) +## {{page.title}} + +### Purpose +The Virtual Assistant comes with a set of scripts to simplify the deployment process. It is recommended that you [create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) that way. However, if you'd like to manually deploy and configure your assistant, you can follow these steps. + +### Prerequisites + +### Time To Complete +10 minutes + +### Scenario + +Deploy Virtual Assistant resources manually by using CLI tools. \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/2-create-msa-app-registration.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/2-create-msa-app-registration.md new file mode 100644 index 0000000000..9838d72f0b --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/2-create-msa-app-registration.md @@ -0,0 +1,22 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Create Microsoft App registration +order: 2 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +Run the following command to create your app registration: + +``` +az ad app create ` + --display-name 'your-app-name' ` + --password 'your-app-pw' ` + --available-to-other-tenants ` + --reply-urls 'https://token.botframework.com/.auth/web/redirect' +``` diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/3-deploy-arm-template.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/3-deploy-arm-template.md new file mode 100644 index 0000000000..9ad6a47f54 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/3-deploy-arm-template.md @@ -0,0 +1,20 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Deploy an Azure Resource Manager (ARM) templates +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) +## {{page.title}} + +Run the following command to deploy the Virtual Assistant ARM template: +``` +az group deployment create ` + --resource-group "resource-group-name" ` + --template-file "path-to-arm-template"` + --parameters "path-to-arm-parameters-file" ` + --parameters microsoftAppId='ms-app-id' microsoftAppPassword='ms-app-pw' +``` diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/4-update-application-settings.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/4-update-application-settings.md new file mode 100644 index 0000000000..ee648087fa --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/4-update-application-settings.md @@ -0,0 +1,32 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Update application settings +order: 4 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) +## {{page.title}} + +After your Azure resources have been deployed, fill in the following keys and secrets in appsettings.json with the values from your deployed resources: +```json +{ + "microsoftAppId": "", + "microsoftAppPassword": "", + "ApplicationInsights": { + "InstrumentationKey": "" + }, + "blobStorage": { + "connectionString": "", + "container": "transcripts" + }, + "cosmosDb": { + "collectionId": "botstate-collection", + "databaseId": "botstate-db", + "cosmosDBEndpoint": "", + "authKey": "" + } +} +``` \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/5-deploy-luis-models.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/5-deploy-luis-models.md new file mode 100644 index 0000000000..455d52407f --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/5-deploy-luis-models.md @@ -0,0 +1,65 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Deploy LUIS models +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) +## {{page.title}} + +1. Run the following command for each .lu file in `\Deployment\Resources\LU` to parse the files to .luis files that can be imported to LUIS: + ``` + ludown parse toluis ` + --in "path-to-lu-file" ` + --luis_culture "culture-code" ` + --out_folder "output-folder" ` + --out "output-file-name.luis" + ``` +1. Run the following command to import the LUIS model into the LUIS portal. + ``` + luis import application ` + --appName "app-name" ` + --authoringKey "luis-authoring-key" ` + --subscriptionKey "luis-authoring-key" ` + --region "region" ` + --in "path-to-luis-file" ` + --wait + ``` +1. Run the following command to train the LUIS model. + ``` + luis train version ` + --appId "app-id" ` + --region "region" ` + --authoringKey "authoring-key" ` + --versionId "version-id" ` + --wait + ``` +1. Run the following command to publish the LUIS model. + ``` + luis publish version ` + --appId "app-id" ` + --region "region" ` + --authoringKey "authoring-key" ` + --versionId "version-id" ` + --wait + ``` +1. Run the following command to create a .cs representation of your LUIS model. + ``` + luisgen "path-to-luis-file" -cs "YourModelNameLuis" -o "path-to-output-folder" + ``` +1. For each LUIS model, add the following configuration to the `cognitiveModels.your-locale.languageModels` collection in cognitivemodels.json file: + ```json + { + "subscriptionkey": "", + "appid": "", + "id": "", + "version": "", + "region": "", + "name": "", + "authoringkey": "", + "authoringRegion": "" + } + ``` diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/6-deploy-qna-maker-knowledgebases.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/6-deploy-qna-maker-knowledgebases.md new file mode 100644 index 0000000000..92b3c2afa1 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/6-deploy-qna-maker-knowledgebases.md @@ -0,0 +1,45 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Deploy QnA Maker knowledge bases +order: 6 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) +## {{page.title}} + +1. Run the following command for each .lu file in `\Deployment\Resources\QnA` to parse the files to .json files that can be deployed to QnA Maker: + ``` + ludown parse toqna ` + --in "path-to-lu-file" ` + --out_folder "output-folder" ` + --out "output-file-name.qna" + ``` +1. Run the following command to import .qna file to QnA Maker. + ``` + qnamaker create kb ` + --name "kb-name" ` + --subscriptionKey "qna-subscription-key" ` + --in "path-to-qna-file" ` + --force ` + --wait + ``` +1. Run the following command to publish the knowledgebase. + ``` + qnamaker publish kb ` + --kbId "kb-id" ` + --subscriptionKey "qna-subscription-key" + ``` +1. For each QnA Maker knowledgebase model, add the following configuration to the `cognitiveModels.your-locale.knowledgebases` collection in cognitivemodels.json file: + ```json + { + "endpointKey": "", + "kbId": "", + "hostname": "", + "subscriptionKey": "", + "name": "", + "id": "" + } + ``` diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/7-create-dispatch-model.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/7-create-dispatch-model.md new file mode 100644 index 0000000000..3e990d80fa --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/cli/7-create-dispatch-model.md @@ -0,0 +1,67 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using CLI tools +title: Create a Dispatch LUIS model +order: 7 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +1. Initialize the dispatch model. + ``` + dispatch init ` + --name "dispatch-name" ` + --luisAuthoringKey "luis-authoring-key" ` + --luisAuthoringRegion "luis-authoring-region ` + --dataFolder "path-to-output-folder" + ``` +1. Add LUIS and QnA Maker sources + - Foreach LUIS app, run the following command: + ``` + dispatch add ` + --type "luis" ` + --name "luis-app-name" ` + --id "luis-app-id" ` + --region "luis-region" ` + --intentName "l_luis-app-name" ` + --dataFolder "path-to-output-folder" + --dispatch "path-to-.dispatch-file" + ``` + + - Foreach QnA Maker knowledgebase, run the following command: + ``` + dispatch add ` + --type "qna" ` + --name "kb-name ` + --id "kb-id" ` + --key "qna-subscription-key" ` + --intentName "q_kb-app-name" ` + --dataFolder "path-to-output-folder" + --dispatch "path-to-.dispatch-file" + ``` +1. Create the dispatch model. + ``` + dispatch create ` + --dispatch "path-to-.dispatch-file" ` + --dataFolder "path-to-output-folder" ` + --culture "dispatch-culture" + ``` +1. Run luisgen tool to generate a .cs representation of your Dispatch model. + ``` + luisgen "path-to-.json-file" -cs "DispatchLuis" -o "output-folder" + ``` +1. Add the following configuration to the `cognitiveModels.your-locale.dispatchModel` collection in cognitivemodels.json file: + ```json + "dispatchModel": { + "authoringkey": "", + "appid": "", + "name": "", + "subscriptionkey": "", + "region": "", + "authoringRegion": "" + } + ``` \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/1-intro.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/1-intro.md new file mode 100644 index 0000000000..bad5ac3441 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/1-intro.md @@ -0,0 +1,23 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Intro +order: 1 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) +## {{page.title}} + +### Purpose +The Virtual Assistant comes with a set of scripts to simplify the deployment process. It is recommended that you [create a Virtual Assistant]({{site.baseurl}}/virtual-assistant/tutorials/create-assistant/csharp/1-intro) that way. However, if you'd like to manually deploy and configure your assistant, you can follow these steps. + +### Prerequisites + +### Time To Complete +10 minutes + +### Scenario + +Deploy Virtual Assistant resources manually by using the web. \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/2-create-msa-app-registration.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/2-create-msa-app-registration.md new file mode 100644 index 0000000000..7944d5e372 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/2-create-msa-app-registration.md @@ -0,0 +1,15 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Create Microsoft App registration +order: 2 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +Follow the [Register an application in Azure AD](https://docs.microsoft.com/en-us/azure/bot-service/bot-builder-tutorial-authentication?view=azure-bot-service-3.0&tabs=aadv1#register-an-application-in-azure-ad) instructions. +> Under **Supported account types** you should select either "Accounts in any organizational directory" or "Accounts in any organizational directory and personal Microsoft accounts" to ensure the Azure Bot Service can correctly expose your bot via Bot Channels. diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/3-deploy-arm-template.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/3-deploy-arm-template.md new file mode 100644 index 0000000000..65c96afbb7 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/3-deploy-arm-template.md @@ -0,0 +1,17 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Deploy an Azure Resource Manager (ARM) templates +order: 3 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +1. Click on the following button to load the Virtual Assistant ARM template in the Azure Portal: +Deploy to Azure +1. Provide your Microsoft App Id and Microsoft App Password, and override any default parameter values as needed. +1. Click "Purchase" to deploy. diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/4-update-application-settings.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/4-update-application-settings.md new file mode 100644 index 0000000000..48d245f6f4 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/4-update-application-settings.md @@ -0,0 +1,33 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Update application settings +order: 4 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +After your Azure resources have been deployed, fill in the following keys and secrets in appsettings.json with the values from your deployed resources: +```json +{ + "microsoftAppId": "", + "microsoftAppPassword": "", + "ApplicationInsights": { + "InstrumentationKey": "" + }, + "blobStorage": { + "connectionString": "", + "container": "transcripts" + }, + "cosmosDb": { + "collectionId": "botstate-collection", + "databaseId": "botstate-db", + "cosmosDBEndpoint": "", + "authKey": "" + } +} +``` \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/5-deploy-luis-models.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/5-deploy-luis-models.md new file mode 100644 index 0000000000..491b053fd3 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/5-deploy-luis-models.md @@ -0,0 +1,40 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Deploy LUIS models +order: 5 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +1. Run the following command for each .lu file in `\Deployment\Resources\LU` to parse the files to .json files that can be imported into the LUIS portal: + ``` + ludown parse toluis ` + --in "path-to-lu-file" ` + --luis_culture "culture-code" ` + --out_folder "output-folder" ` + --out "output-file-name.json" + ``` +1. In the LUIS portal, click "Create new app" +1. Provide a name, culture, and description for your app. +1. Click **Manage** > **Versions** > **Import version** +1. Browse to your .json file, then click "Done". +1. Train your LUIS app. +1. Publish your LUIS app. +1. For each LUIS model, add the following configuration to the `cognitiveModels.your-locale.languageModels` collection in cognitivemodels.json file: + ```json + { + "subscriptionkey": "", + "appid": "", + "id": "", + "version": "", + "region": "", + "name": "", + "authoringkey": "", + "authoringRegion": "" + } + ``` diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/6-deploy-qna-maker-knowledgebases.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/6-deploy-qna-maker-knowledgebases.md new file mode 100644 index 0000000000..b2ad4eff51 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/6-deploy-qna-maker-knowledgebases.md @@ -0,0 +1,27 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Deploy QnA Maker knowledge bases +order: 6 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +The QnA Maker portal does not accept JSON files as input, so in order to deploy directly to the QnA Maker portal, you should either author new knowledgebases based on your scenario's needs directly in the portal, or import data in TSV format. + +After creating your knowledgebases, update the `cognitiveModels.your-locale.knowledgebases` collection in cognitivemodels.json file for each knowledgebase: + +```json +{ + "endpointKey": "", + "kbId": "", + "hostname": "", + "subscriptionKey": "", + "name": "", + "id": "" +} +``` \ No newline at end of file diff --git a/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/7-create-dispatch-model.md b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/7-create-dispatch-model.md new file mode 100644 index 0000000000..a0662c44d8 --- /dev/null +++ b/docs/_docs/virtual-assistant/tutorials/deploy-assistant/web/7-create-dispatch-model.md @@ -0,0 +1,67 @@ +--- +layout: tutorial +category: Virtual Assistant +subcategory: Deploy +language: Using the web +title: Create a Dispatch LUIS model +order: 7 +--- + +# Tutorial: {{page.subcategory}} ({{page.language}}) + +## {{page.title}} + +1. Initialize the dispatch model. + ``` + dispatch init ` + --name "dispatch-name" ` + --luisAuthoringKey "luis-authoring-key" ` + --luisAuthoringRegion "luis-authoring-region ` + --dataFolder "path-to-output-folder" + ``` +1. Add LUIS and QnA Maker sources + - Foreach LUIS app, run the following command: + ``` + dispatch add ` + --type "luis" ` + --name "luis-app-name" ` + --id "luis-app-id" ` + --region "luis-region" ` + --intentName "l_luis-app-name" ` + --dataFolder "path-to-output-folder" + --dispatch "path-to-.dispatch-file" + ``` + + - Foreach QnA Maker knowledgebase, run the following command: + ``` + dispatch add ` + --type "qna" ` + --name "kb-name ` + --id "kb-id" ` + --key "qna-subscription-key" ` + --intentName "q_kb-app-name" ` + --dataFolder "path-to-output-folder" + --dispatch "path-to-.dispatch-file" + ``` +1. Create the dispatch model. + ``` + dispatch create ` + --dispatch "path-to-.dispatch-file" ` + --dataFolder "path-to-output-folder" ` + --culture "dispatch-culture" + ``` +1. Run luisgen tool to generate a .cs representation of your Dispatch model. + ``` + luisgen "path-to-.json-file" -cs "DispatchLuis" -o "output-folder" + ``` +1. Add the following configuration to the `cognitiveModels.your-locale.dispatchModel` collection in cognitivemodels.json file: + ```json + "dispatchModel": { + "authoringkey": "", + "appid": "", + "name": "", + "subscriptionkey": "", + "region": "", + "authoringRegion": "" + } + ``` \ No newline at end of file diff --git a/docs/_includes/footer.html b/docs/_includes/footer.html index 3e71b825a0..b49c1d3a97 100644 --- a/docs/_includes/footer.html +++ b/docs/_includes/footer.html @@ -1,5 +1,5 @@