As of the time of writing, you have two options to integrate the Azure OpenAI ChatGPT model with your own data in external applications: AI Studio (Deploy App) and REST API. The provided code exposes a REST API over Azure Function, making it easier to leverage the capabilities of ChatGPT within your applications. By grounding the model with your own data, you unlock its full potential and enable the delivery of dynamic conversational experiences through a convenient API interface. This code empowers you to tap into the rich features of ChatGPT and effortlessly deliver exceptional user interactions.
GitHub: https://github.com/brodbor/ChatGPTCustomData
Example of website search: https://borisbrodsky.com/azure/supercharge-your-website-search-with-azure-openai-chatgpt-model/
Press release: https://azure.microsoft.com/en-us/updates/azure-open-ai-service-on-your-data/
using System;
using System.IO;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.Extensions.Logging;
public static class ChatGPTCustom
{
[FunctionName("ChatGPTCustom")]
public static async Task<IActionResult> Run(
[HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequest req,
ILogger log)
{
log.LogInformation("C# HTTP trigger function processed a request.");
try
{
// Configure in Function->Configuration->applicaiton Settings
string AOAIEndpoint = Environment.GetEnvironmentVariable("AOAIEndpoint");
string AOAIDeploymentId = Environment.GetEnvironmentVariable("AOAIDeploymentId");
string AOAIKey = Environment.GetEnvironmentVariable("AOAIKey");
string ChatGptUrl = Environment.GetEnvironmentVariable("ChatGptUrl");
string ChatGptKey = Environment.GetEnvironmentVariable("ChatGptKey");
string SearchEndpoint = Environment.GetEnvironmentVariable("SearchEndpoint");
string SearchKey = Environment.GetEnvironmentVariable("SearchKey");
string SearchIndex = Environment.GetEnvironmentVariable("SearchIndex");
// Read the request body
// string requestBody = await new StreamReader(req.Body).ReadToEndAsync();
string userMessage = req.Query["userMessage"];
// Prepare the request payload
var payload = new
{
dataSources = new[]
{
new
{
type = "AzureCognitiveSearch",
parameters = new
{
endpoint = SearchEndpoint,
key = SearchKey,
indexName = SearchIndex
}
}
},
messages = new[]
{
new
{
role = "user",
content = userMessage
}
}
};
// Create an HttpClient instance
using (HttpClient client = new HttpClient())
{
// Set the request headers
client.DefaultRequestHeaders.Add("api-key", AOAIKey);
client.DefaultRequestHeaders.Add("chatgpt_url", ChatGptUrl);
client.DefaultRequestHeaders.Add("chatgpt_key", ChatGptKey);
// Serialize the payload
string serializedPayload = JsonConvert.SerializeObject(payload);
// Create the request content
StringContent content = new StringContent(serializedPayload, System.Text.Encoding.UTF8, "application/json");
// Make the POST request
HttpResponseMessage response = await client.PostAsync($"{AOAIEndpoint}/openai/deployments/{AOAIDeploymentId}/extensions/chat/completions?api-version=2023-06-01-preview", content);
// Read the response content
string responseContent = await response.Content.ReadAsStringAsync();
// Return the response from the API
return new ContentResult
{
Content = responseContent,
ContentType = "application/json",
StatusCode = (int)response.StatusCode
};
}
}
catch (Exception ex)
{
return new ContentResult
{
Content = $"An error occurred: {ex.Message}",
ContentType = "text/plain",
StatusCode = 500
};
}
}
}
Thank you so much for sharing the detailed code. It really helped me for my project. My requirements are also same. We are using AI Search for our standardized resumes which are stored in Storage Account Container. But I am facing one issue is, I am not receiving more than 5 results. I tried to change max_token and top parameters. I also put it in a loop asking for more results again but not getting results more than 5 whereas we have 300K+ documents in our Blob storage.
Can you suggest me some work around?