Skip to main content
All docs
V25.1
  • TextResponse.ContinueAsync() Method

    Initiates an asynchronous continuation task.

    Namespace: DevExpress.AIIntegration.Extensions

    Assembly: DevExpress.AIIntegration.v25.1.dll

    NuGet Package: DevExpress.AIIntegration

    Declaration

    public Task ContinueAsync()

    Returns

    Type Description
    Task

    The task.

    Remarks

    AI-poweredextensions automatically split large content into manageable chunks. Use the ContinueAsync method to start processing the next chunk asynchronously.

    The following example registers an Azure OpenAI client and implements the Translate method. This method can manage large text blocks.

    using Azure;
    using Azure.AI.OpenAI;
    using Microsoft.Extensions.AI;
    using DevExpress.AIIntegration;
    using DevExpress.AIIntegration.Extensions;
    
    SetEnvironmentVariables();
    
    // Register an Azure OpenAI client
    AIExtensionsContainerDefault defaultAIExtensionsContainer = RegisterAzureOpenAIClient(
        Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT"),
        Environment.GetEnvironmentVariable("AZURE_OPENAI_APIKEY")
    );
    
    string sourceText = "A long text...";
    async Task<string> Translate(string sourceText, string language) {
        TextResponse response = await defaultAIExtensionsContainer.TranslateAsync(
            new TranslateRequest(sourceText, language)
        );
        if (response.IsCompleted)
            return response.Response;
        if (!response.IsRestrictedOrFailed) {
            string translatedText = response.Response;
            while (response.IsContinuationRequired) {
                await response.ContinueAsync();
                translatedText += response.Response;
            }
            return translatedText;
        }
        switch (response.Status) {
            case ResponseStatus.MaxTokenLimitExceeded:
            case ResponseStatus.InputSizeLimitExceeded:
                return "The text you're trying to send within a request is too long and exceeds the allowed limit.";
            case ResponseStatus.ContentFiltered:
                return "Potentially harmful content was detected in your request.";
            case ResponseStatus.Error:
                return "An error occurred while processing the request.";
        }
        throw new NotSupportedException();
    }
    
    Console.WriteLine(await Translate(sourceText, "de"));
    
    AIExtensionsContainerDefault RegisterAzureOpenAIClient(string azureOpenAIEndpoint, string azureOpenAIKey) {
        IChatClient client = new Azure.AI.OpenAI.AzureOpenAIClient(new Uri(azureOpenAIEndpoint),
            new System.ClientModel.ApiKeyCredential(azureOpenAIKey)).GetChatClient("gpt-4o-mini").AsIChatClient();
    
        return AIExtensionsContainerConsole.CreateDefaultAIExtensionContainer(client);
    }
    
    void SetEnvironmentVariables() {
        Environment.SetEnvironmentVariable("AZURE_OPENAI_ENDPOINT", {SPECIFY_YOUR_AZURE_ENDPOINT});
        Environment.SetEnvironmentVariable("AZURE_OPENAI_APIKEY", {SPECIFY_YOU_AZURE_KEY});
    }
    
    See Also