-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add a C# sample for post call analytics (#2631)
* Add post call analytics sample with Azure Open AI * Add README. Cleanup * Add examples to help * Fix help * Update Program.cs * Remove byte order mark
- Loading branch information
Showing
4 changed files
with
184 additions
and
6 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
114 changes: 114 additions & 0 deletions
114
scenarios/csharp/dotnetcore/post-call-analytics/Program.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,114 @@ | ||
using Azure.AI.OpenAI; | ||
using OpenAI.Chat; | ||
using System.ClientModel; | ||
using System.CommandLine; | ||
using System.Net.Http.Json; | ||
using System.Text.Json; | ||
|
||
namespace PostCallAnalytics | ||
{ | ||
public sealed class FastTranscriptionOptions | ||
{ | ||
public string[] Locales { get; set; } = { "en-US" }; | ||
} | ||
|
||
public class Program | ||
{ | ||
internal static async Task<ByteArrayContent> GetAudioContentAsync(FileInfo inputAudio) | ||
{ | ||
var audioStream = inputAudio.OpenRead(); | ||
byte[] byteArray; | ||
using (MemoryStream memoryStream = new MemoryStream()) | ||
{ | ||
await audioStream.CopyToAsync(memoryStream); | ||
byteArray = memoryStream.ToArray(); | ||
} | ||
return new ByteArrayContent(byteArray); | ||
} | ||
|
||
internal static async Task<string?> TranscribeAsync(string speechKey, string speechRegion, FileInfo inputAudio) | ||
{ | ||
var speechEndpoint = $"https://{speechRegion}.api.cognitive.microsoft.com/speechtotext/transcriptions:transcribe?api-version=2024-05-15-preview"; | ||
|
||
var httpClient = new HttpClient(); | ||
httpClient.DefaultRequestHeaders.Add("Ocp-Apim-Subscription-Key", speechKey); | ||
|
||
var multipartFormDataContent = new MultipartFormDataContent(); | ||
var fastTranscriptionOptions = new FastTranscriptionOptions(); | ||
var fastTranscriptionOptionsDefinition = JsonContent.Create(fastTranscriptionOptions); | ||
multipartFormDataContent.Add(fastTranscriptionOptionsDefinition, "definition"); | ||
|
||
var streamContent = await GetAudioContentAsync(inputAudio); | ||
streamContent.Headers.Add("Content-Type", "multipart/form-data"); | ||
|
||
// Speech Fast TranscribeAsync requires a filename to be specified when sending a file. | ||
multipartFormDataContent.Add(streamContent, "audio", "audio.wav"); | ||
|
||
var response = await httpClient.PostAsync(speechEndpoint, multipartFormDataContent, CancellationToken.None); | ||
Console.WriteLine($"{speechEndpoint} : {response.StatusCode}"); | ||
|
||
var content = await response.Content.ReadAsStringAsync(); | ||
var json = JsonDocument.Parse(content).RootElement; | ||
|
||
var combinedTranscript = json.GetProperty("combinedPhrases")[0].GetProperty("text").GetString(); | ||
return combinedTranscript; | ||
} | ||
|
||
internal static async Task<string> SummarizeAsync(string openAiKey, string openAiEndpoint, string deploymentOrModelName, string transcription) | ||
{ | ||
var azureClient = new AzureOpenAIClient(new Uri(openAiEndpoint), new ApiKeyCredential(openAiKey)); | ||
var chatClient = azureClient.GetChatClient(deploymentOrModelName); | ||
|
||
var completion = await chatClient.CompleteChatAsync( | ||
[ | ||
new SystemChatMessage("You are an AI assistant that helps extract information from customer call center transcripts. Summarize the conversation in a couple sentences."), | ||
new UserChatMessage(transcription) | ||
] | ||
); | ||
|
||
Console.WriteLine($"{openAiEndpoint} : {completion.GetRawResponse().Status}"); | ||
|
||
var summary = completion.Value.Content[0].Text; | ||
return summary; | ||
} | ||
|
||
internal static async Task AnalyzeAudioAsync(string speechKey, string speechRegion, FileInfo inputAudio, string openAiKey, string openAiEndpoint, string deploymentOrModelName) | ||
{ | ||
if (string.IsNullOrEmpty(speechKey) || string.IsNullOrEmpty(speechRegion) || (inputAudio == null || !inputAudio.Exists) || string.IsNullOrEmpty(openAiKey) || string.IsNullOrEmpty(openAiEndpoint) || string.IsNullOrEmpty(deploymentOrModelName)) | ||
{ | ||
Console.WriteLine("Error: missing required option"); | ||
return; | ||
} | ||
|
||
var transcription = await TranscribeAsync(speechKey, speechRegion, inputAudio); | ||
Console.WriteLine($"Transcription: {transcription}"); | ||
|
||
var summary = await SummarizeAsync(openAiKey, openAiEndpoint, deploymentOrModelName, transcription); | ||
Console.WriteLine($"Summary: {summary}"); | ||
} | ||
|
||
public async static Task<int> Main(string[] args) | ||
{ | ||
var inputAudio = new Option<FileInfo>(name: "--inputAudio", description: "Path to the audio file. Required."); | ||
var speechKey = new Option<string>(name: "--speechKey", description: "Your Cognitive Services or Speech resource key. Required."); | ||
var speechRegion = new Option<string>(name: "--speechRegion", description: "Your Cognitive Services or Speech resource region. Example: eastus, northeurope. Required."); | ||
var openAiKey = new Option<string>(name: "--openAiKey", description: "Your Azure OpenAI resource key. Required."); | ||
var openAiEndpoint = new Option<string>(name: "--openAiEndpoint", description: "Your Azure OpenAI resource endpoint. Required. Example: https://YourResourceName.openai.azure.com"); | ||
var openAiDeploymentName = new Option<string>(name: "--openAiDeploymentName", description: "Your Azure OpenAI deployment name. Example: my-gpt-4o-mini. Required."); | ||
|
||
var rootCommand = new RootCommand() | ||
{ | ||
inputAudio, | ||
speechKey, | ||
speechRegion, | ||
openAiKey, | ||
openAiEndpoint, | ||
openAiDeploymentName | ||
}; | ||
|
||
rootCommand.SetHandler(AnalyzeAudioAsync, speechKey, speechRegion, inputAudio, openAiKey, openAiEndpoint, openAiDeploymentName); | ||
|
||
return await rootCommand.InvokeAsync(args); | ||
} | ||
} | ||
} |
16 changes: 16 additions & 0 deletions
16
scenarios/csharp/dotnetcore/post-call-analytics/post-call-analytics.csproj
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
<Project Sdk="Microsoft.NET.Sdk"> | ||
|
||
<PropertyGroup> | ||
<OutputType>Exe</OutputType> | ||
<TargetFramework>net8.0</TargetFramework> | ||
<Nullable>enable</Nullable> | ||
<ImplicitUsings>enable</ImplicitUsings> | ||
<RootNamespace>PostCallAnalytics</RootNamespace> | ||
</PropertyGroup> | ||
|
||
<ItemGroup> | ||
<PackageReference Include="Azure.AI.OpenAI" Version="2.0.0" /> | ||
<PackageReference Include="System.CommandLine" Version="2.0.0-beta4.22272.1" /> | ||
</ItemGroup> | ||
|
||
</Project> |
25 changes: 25 additions & 0 deletions
25
scenarios/csharp/dotnetcore/post-call-analytics/post-call-analytics.sln
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
|
||
Microsoft Visual Studio Solution File, Format Version 12.00 | ||
# Visual Studio Version 17 | ||
VisualStudioVersion = 17.11.35303.130 | ||
MinimumVisualStudioVersion = 10.0.40219.1 | ||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "post-call-analytics", "post-call-analytics.csproj", "{20937E87-0B3F-44C6-8AD1-33E30F67A762}" | ||
EndProject | ||
Global | ||
GlobalSection(SolutionConfigurationPlatforms) = preSolution | ||
Debug|Any CPU = Debug|Any CPU | ||
Release|Any CPU = Release|Any CPU | ||
EndGlobalSection | ||
GlobalSection(ProjectConfigurationPlatforms) = postSolution | ||
{20937E87-0B3F-44C6-8AD1-33E30F67A762}.Debug|Any CPU.ActiveCfg = Debug|Any CPU | ||
{20937E87-0B3F-44C6-8AD1-33E30F67A762}.Debug|Any CPU.Build.0 = Debug|Any CPU | ||
{20937E87-0B3F-44C6-8AD1-33E30F67A762}.Release|Any CPU.ActiveCfg = Release|Any CPU | ||
{20937E87-0B3F-44C6-8AD1-33E30F67A762}.Release|Any CPU.Build.0 = Release|Any CPU | ||
EndGlobalSection | ||
GlobalSection(SolutionProperties) = preSolution | ||
HideSolutionNode = FALSE | ||
EndGlobalSection | ||
GlobalSection(ExtensibilityGlobals) = postSolution | ||
SolutionGuid = {53CEF230-04DF-4757-AFCA-CE4C02ADAAFD} | ||
EndGlobalSection | ||
EndGlobal |