public class dev.langchain4j.model.azure.AzureOpenAiChatModel extends java.lang.Object implements dev.langchain4j.model.chat.ChatLanguageModel, dev.langchain4j.model.chat.TokenCountEstimator
{
private com.azure.ai.openai.OpenAIClient client;
private final java.lang.String deploymentName;
private final dev.langchain4j.model.Tokenizer tokenizer;
private final java.lang.Integer maxTokens;
private final java.lang.Double temperature;
private final java.lang.Double topP;
private final java.util.Map logitBias;
private final java.lang.String user;
private final java.lang.Integer n;
private final java.util.List stop;
private final java.lang.Double presencePenalty;
private final java.lang.Double frequencyPenalty;
private final java.util.List dataSources;
private final com.azure.ai.openai.models.AzureChatEnhancementConfiguration enhancements;
private final java.lang.Long seed;
private final com.azure.ai.openai.models.ChatCompletionsResponseFormat responseFormat;
public void <init>(com.azure.ai.openai.OpenAIClient, java.lang.String, dev.langchain4j.model.Tokenizer, java.lang.Integer, java.lang.Double, java.lang.Double, java.util.Map, java.lang.String, java.lang.Integer, java.util.List, java.lang.Double, java.lang.Double, java.util.List, com.azure.ai.openai.models.AzureChatEnhancementConfiguration, java.lang.Long, com.azure.ai.openai.models.ChatCompletionsResponseFormat)
{
com.azure.ai.openai.OpenAIClient v;
java.lang.Double v, v, v, v;
com.azure.ai.openai.models.ChatCompletionsResponseFormat v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
java.lang.Integer v, v;
java.lang.Long v;
java.util.Map v;
com.azure.ai.openai.models.AzureChatEnhancementConfiguration v;
java.lang.String v, v;
java.util.List v, v;
dev.langchain4j.model.Tokenizer v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: com.azure.ai.openai.OpenAIClient;
v := @parameter: java.lang.String;
v := @parameter: dev.langchain4j.model.Tokenizer;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Integer;
v := @parameter: java.util.List;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.List;
v := @parameter: com.azure.ai.openai.models.AzureChatEnhancementConfiguration;
v := @parameter: java.lang.Long;
v := @parameter: com.azure.ai.openai.models.ChatCompletionsResponseFormat;
specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: void <init>(java.lang.String,dev.langchain4j.model.Tokenizer,java.lang.Integer,java.lang.Double,java.lang.Double,java.util.Map,java.lang.String,java.lang.Integer,java.util.List,java.lang.Double,java.lang.Double,java.util.List,com.azure.ai.openai.models.AzureChatEnhancementConfiguration,java.lang.Long,com.azure.ai.openai.models.ChatCompletionsResponseFormat)>(v, v, v, v, v, v, v, v, v, v, v, v, v, v, v);
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.OpenAIClient client> = v;
return;
}
public void <init>(java.lang.String, java.lang.String, java.lang.String, java.lang.String, dev.langchain4j.model.Tokenizer, java.lang.Integer, java.lang.Double, java.lang.Double, java.util.Map, java.lang.String, java.lang.Integer, java.util.List, java.lang.Double, java.lang.Double, java.util.List, com.azure.ai.openai.models.AzureChatEnhancementConfiguration, java.lang.Long, com.azure.ai.openai.models.ChatCompletionsResponseFormat, java.time.Duration, java.lang.Integer, com.azure.core.http.ProxyOptions, boolean)
{
com.azure.ai.openai.OpenAIClient v;
java.lang.Double v, v, v, v;
com.azure.ai.openai.models.ChatCompletionsResponseFormat v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
java.lang.Integer v, v, v;
com.azure.core.http.ProxyOptions v;
java.lang.Long v;
java.time.Duration v;
java.util.Map v;
com.azure.ai.openai.models.AzureChatEnhancementConfiguration v;
java.lang.String v, v, v, v, v;
boolean v;
java.util.List v, v;
dev.langchain4j.model.Tokenizer v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: dev.langchain4j.model.Tokenizer;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Integer;
v := @parameter: java.util.List;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.List;
v := @parameter: com.azure.ai.openai.models.AzureChatEnhancementConfiguration;
v := @parameter: java.lang.Long;
v := @parameter: com.azure.ai.openai.models.ChatCompletionsResponseFormat;
v := @parameter: java.time.Duration;
v := @parameter: java.lang.Integer;
v := @parameter: com.azure.core.http.ProxyOptions;
v := @parameter: boolean;
specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: void <init>(java.lang.String,dev.langchain4j.model.Tokenizer,java.lang.Integer,java.lang.Double,java.lang.Double,java.util.Map,java.lang.String,java.lang.Integer,java.util.List,java.lang.Double,java.lang.Double,java.util.List,com.azure.ai.openai.models.AzureChatEnhancementConfiguration,java.lang.Long,com.azure.ai.openai.models.ChatCompletionsResponseFormat)>(v, v, v, v, v, v, v, v, v, v, v, v, v, v, v);
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: com.azure.ai.openai.OpenAIClient setupOpenAIClient(java.lang.String,java.lang.String,java.lang.String,java.time.Duration,java.lang.Integer,com.azure.core.http.ProxyOptions,boolean)>(v, v, v, v, v, v, v);
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.OpenAIClient client> = v;
return;
}
public void <init>(java.lang.String, java.lang.String, com.azure.core.credential.KeyCredential, java.lang.String, dev.langchain4j.model.Tokenizer, java.lang.Integer, java.lang.Double, java.lang.Double, java.util.Map, java.lang.String, java.lang.Integer, java.util.List, java.lang.Double, java.lang.Double, java.util.List, com.azure.ai.openai.models.AzureChatEnhancementConfiguration, java.lang.Long, com.azure.ai.openai.models.ChatCompletionsResponseFormat, java.time.Duration, java.lang.Integer, com.azure.core.http.ProxyOptions, boolean)
{
com.azure.ai.openai.OpenAIClient v;
java.lang.Double v, v, v, v;
com.azure.ai.openai.models.ChatCompletionsResponseFormat v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
java.lang.Integer v, v, v;
com.azure.core.http.ProxyOptions v;
java.lang.Long v;
java.time.Duration v;
java.util.Map v;
com.azure.ai.openai.models.AzureChatEnhancementConfiguration v;
java.lang.String v, v, v, v;
boolean v;
com.azure.core.credential.KeyCredential v;
java.util.List v, v;
dev.langchain4j.model.Tokenizer v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: com.azure.core.credential.KeyCredential;
v := @parameter: java.lang.String;
v := @parameter: dev.langchain4j.model.Tokenizer;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Integer;
v := @parameter: java.util.List;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.List;
v := @parameter: com.azure.ai.openai.models.AzureChatEnhancementConfiguration;
v := @parameter: java.lang.Long;
v := @parameter: com.azure.ai.openai.models.ChatCompletionsResponseFormat;
v := @parameter: java.time.Duration;
v := @parameter: java.lang.Integer;
v := @parameter: com.azure.core.http.ProxyOptions;
v := @parameter: boolean;
specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: void <init>(java.lang.String,dev.langchain4j.model.Tokenizer,java.lang.Integer,java.lang.Double,java.lang.Double,java.util.Map,java.lang.String,java.lang.Integer,java.util.List,java.lang.Double,java.lang.Double,java.util.List,com.azure.ai.openai.models.AzureChatEnhancementConfiguration,java.lang.Long,com.azure.ai.openai.models.ChatCompletionsResponseFormat)>(v, v, v, v, v, v, v, v, v, v, v, v, v, v, v);
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: com.azure.ai.openai.OpenAIClient setupOpenAIClient(java.lang.String,java.lang.String,com.azure.core.credential.KeyCredential,java.time.Duration,java.lang.Integer,com.azure.core.http.ProxyOptions,boolean)>(v, v, v, v, v, v, v);
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.OpenAIClient client> = v;
return;
}
public void <init>(java.lang.String, java.lang.String, com.azure.core.credential.TokenCredential, java.lang.String, dev.langchain4j.model.Tokenizer, java.lang.Integer, java.lang.Double, java.lang.Double, java.util.Map, java.lang.String, java.lang.Integer, java.util.List, java.lang.Double, java.lang.Double, java.util.List, com.azure.ai.openai.models.AzureChatEnhancementConfiguration, java.lang.Long, com.azure.ai.openai.models.ChatCompletionsResponseFormat, java.time.Duration, java.lang.Integer, com.azure.core.http.ProxyOptions, boolean)
{
com.azure.ai.openai.OpenAIClient v;
java.lang.Double v, v, v, v;
com.azure.ai.openai.models.ChatCompletionsResponseFormat v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
java.lang.Integer v, v, v;
com.azure.core.http.ProxyOptions v;
java.lang.Long v;
java.time.Duration v;
java.util.Map v;
com.azure.ai.openai.models.AzureChatEnhancementConfiguration v;
java.lang.String v, v, v, v;
boolean v;
java.util.List v, v;
com.azure.core.credential.TokenCredential v;
dev.langchain4j.model.Tokenizer v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: com.azure.core.credential.TokenCredential;
v := @parameter: java.lang.String;
v := @parameter: dev.langchain4j.model.Tokenizer;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Integer;
v := @parameter: java.util.List;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.List;
v := @parameter: com.azure.ai.openai.models.AzureChatEnhancementConfiguration;
v := @parameter: java.lang.Long;
v := @parameter: com.azure.ai.openai.models.ChatCompletionsResponseFormat;
v := @parameter: java.time.Duration;
v := @parameter: java.lang.Integer;
v := @parameter: com.azure.core.http.ProxyOptions;
v := @parameter: boolean;
specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: void <init>(java.lang.String,dev.langchain4j.model.Tokenizer,java.lang.Integer,java.lang.Double,java.lang.Double,java.util.Map,java.lang.String,java.lang.Integer,java.util.List,java.lang.Double,java.lang.Double,java.util.List,com.azure.ai.openai.models.AzureChatEnhancementConfiguration,java.lang.Long,com.azure.ai.openai.models.ChatCompletionsResponseFormat)>(v, v, v, v, v, v, v, v, v, v, v, v, v, v, v);
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: com.azure.ai.openai.OpenAIClient setupOpenAIClient(java.lang.String,java.lang.String,com.azure.core.credential.TokenCredential,java.time.Duration,java.lang.Integer,com.azure.core.http.ProxyOptions,boolean)>(v, v, v, v, v, v, v);
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.OpenAIClient client> = v;
return;
}
private void <init>(java.lang.String, dev.langchain4j.model.Tokenizer, java.lang.Integer, java.lang.Double, java.lang.Double, java.util.Map, java.lang.String, java.lang.Integer, java.util.List, java.lang.Double, java.lang.Double, java.util.List, com.azure.ai.openai.models.AzureChatEnhancementConfiguration, java.lang.Long, com.azure.ai.openai.models.ChatCompletionsResponseFormat)
{
java.lang.Double v, v, v, v, v;
com.azure.ai.openai.models.ChatCompletionsResponseFormat v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
java.lang.Integer v, v;
java.lang.Long v;
java.util.Map v;
com.azure.ai.openai.models.AzureChatEnhancementConfiguration v;
java.lang.String v, v;
java.util.List v, v;
dev.langchain4j.model.Tokenizer v;
java.lang.Object v, v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.lang.String;
v := @parameter: dev.langchain4j.model.Tokenizer;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.Map;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Integer;
v := @parameter: java.util.List;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.util.List;
v := @parameter: com.azure.ai.openai.models.AzureChatEnhancementConfiguration;
v := @parameter: java.lang.Long;
v := @parameter: com.azure.ai.openai.models.ChatCompletionsResponseFormat;
specialinvoke v.<java.lang.Object: void <init>()>();
v = staticinvoke <dev.langchain4j.internal.Utils: java.lang.Object getOrDefault(java.lang.Object,java.lang.Object)>(v, "gpt-35-turbo");
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.String deploymentName> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: dev.langchain4j.model.Tokenizer tokenizer> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Integer maxTokens> = v;
v = staticinvoke <java.lang.Double: java.lang.Double valueOf(double)>(0.7);
v = staticinvoke <dev.langchain4j.internal.Utils: java.lang.Object getOrDefault(java.lang.Object,java.lang.Object)>(v, v);
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double temperature> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double topP> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.util.Map logitBias> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.String user> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Integer n> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.util.List stop> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double presencePenalty> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double frequencyPenalty> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.util.List dataSources> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.models.AzureChatEnhancementConfiguration enhancements> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Long seed> = v;
v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.models.ChatCompletionsResponseFormat responseFormat> = v;
return;
}
public dev.langchain4j.model.output.Response generate(java.util.List)
{
java.util.List v;
dev.langchain4j.model.output.Response v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.util.List;
v = specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: dev.langchain4j.model.output.Response generate(java.util.List,java.util.List,dev.langchain4j.agent.tool.ToolSpecification)>(v, null, null);
return v;
}
public dev.langchain4j.model.output.Response generate(java.util.List, java.util.List)
{
java.util.List v, v;
dev.langchain4j.model.output.Response v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.util.List;
v := @parameter: java.util.List;
v = specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: dev.langchain4j.model.output.Response generate(java.util.List,java.util.List,dev.langchain4j.agent.tool.ToolSpecification)>(v, v, null);
return v;
}
public dev.langchain4j.model.output.Response generate(java.util.List, dev.langchain4j.agent.tool.ToolSpecification)
{
java.util.List v, v;
dev.langchain4j.agent.tool.ToolSpecification v;
dev.langchain4j.model.output.Response v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.util.List;
v := @parameter: dev.langchain4j.agent.tool.ToolSpecification;
v = staticinvoke <java.util.Collections: java.util.List singletonList(java.lang.Object)>(v);
v = specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: dev.langchain4j.model.output.Response generate(java.util.List,java.util.List,dev.langchain4j.agent.tool.ToolSpecification)>(v, v, v);
return v;
}
private dev.langchain4j.model.output.Response generate(java.util.List, java.util.List, dev.langchain4j.agent.tool.ToolSpecification)
{
com.azure.ai.openai.OpenAIClient v;
com.azure.ai.openai.models.FunctionCallConfig v;
dev.langchain4j.model.output.TokenUsage v;
dev.langchain4j.agent.tool.ToolSpecification v;
com.azure.ai.openai.models.CompletionsUsage v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
java.lang.Integer v, v;
com.azure.ai.openai.models.ChatCompletionsOptions v, v, v, v, v, v, v, v, v, v, v, v, v, v, v;
java.lang.Long v;
java.util.Map v;
com.azure.ai.openai.models.AzureChatEnhancementConfiguration v;
com.azure.ai.openai.models.ChatResponseMessage v;
boolean v;
com.azure.ai.openai.models.CompletionsFinishReason v;
java.util.List v, v, v, v, v, v, v, v;
java.lang.Double v, v, v, v;
com.azure.ai.openai.models.ChatCompletionsResponseFormat v;
com.azure.ai.openai.models.ChatCompletions v;
java.lang.String v, v, v, v;
dev.langchain4j.data.message.AiMessage v;
dev.langchain4j.model.output.FinishReason v;
dev.langchain4j.model.output.Response v;
java.lang.Object v, v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.util.List;
v := @parameter: java.util.List;
v := @parameter: dev.langchain4j.agent.tool.ToolSpecification;
v = new com.azure.ai.openai.models.ChatCompletionsOptions;
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: java.util.List toOpenAiMessages(java.util.List)>(v);
specialinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: void <init>(java.util.List)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.String deploymentName>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setModel(java.lang.String)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Integer maxTokens>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setMaxTokens(java.lang.Integer)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double temperature>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setTemperature(java.lang.Double)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double topP>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setTopP(java.lang.Double)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.util.Map logitBias>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setLogitBias(java.util.Map)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.String user>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setUser(java.lang.String)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Integer n>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setN(java.lang.Integer)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.util.List stop>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setStop(java.util.List)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double presencePenalty>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setPresencePenalty(java.lang.Double)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Double frequencyPenalty>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setFrequencyPenalty(java.lang.Double)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.util.List dataSources>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setDataSources(java.util.List)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.models.AzureChatEnhancementConfiguration enhancements>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setEnhancements(com.azure.ai.openai.models.AzureChatEnhancementConfiguration)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.Long seed>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setSeed(java.lang.Long)>(v);
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.models.ChatCompletionsResponseFormat responseFormat>;
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setResponseFormat(com.azure.ai.openai.models.ChatCompletionsResponseFormat)>(v);
if v == null goto label;
v = interfaceinvoke v.<java.util.List: boolean isEmpty()>();
if v != 0 goto label;
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: java.util.List toFunctions(java.util.Collection)>(v);
virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setFunctions(java.util.List)>(v);
label:
if v == null goto label;
v = new com.azure.ai.openai.models.FunctionCallConfig;
v = virtualinvoke v.<dev.langchain4j.agent.tool.ToolSpecification: java.lang.String name()>();
specialinvoke v.<com.azure.ai.openai.models.FunctionCallConfig: void <init>(java.lang.String)>(v);
virtualinvoke v.<com.azure.ai.openai.models.ChatCompletionsOptions: com.azure.ai.openai.models.ChatCompletionsOptions setFunctionCall(com.azure.ai.openai.models.FunctionCallConfig)>(v);
label:
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: com.azure.ai.openai.OpenAIClient client>;
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: java.lang.String deploymentName>;
v = virtualinvoke v.<com.azure.ai.openai.OpenAIClient: com.azure.ai.openai.models.ChatCompletions getChatCompletions(java.lang.String,com.azure.ai.openai.models.ChatCompletionsOptions)>(v, v);
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletions: java.util.List getChoices()>();
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
v = virtualinvoke v.<com.azure.ai.openai.models.ChatChoice: com.azure.ai.openai.models.ChatResponseMessage getMessage()>();
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: dev.langchain4j.data.message.AiMessage aiMessageFrom(com.azure.ai.openai.models.ChatResponseMessage)>(v);
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletions: com.azure.ai.openai.models.CompletionsUsage getUsage()>();
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: dev.langchain4j.model.output.TokenUsage tokenUsageFrom(com.azure.ai.openai.models.CompletionsUsage)>(v);
v = virtualinvoke v.<com.azure.ai.openai.models.ChatCompletions: java.util.List getChoices()>();
v = interfaceinvoke v.<java.util.List: java.lang.Object get(int)>(0);
v = virtualinvoke v.<com.azure.ai.openai.models.ChatChoice: com.azure.ai.openai.models.CompletionsFinishReason getFinishReason()>();
v = staticinvoke <dev.langchain4j.model.azure.InternalAzureOpenAiHelper: dev.langchain4j.model.output.FinishReason finishReasonFrom(com.azure.ai.openai.models.CompletionsFinishReason)>(v);
v = staticinvoke <dev.langchain4j.model.output.Response: dev.langchain4j.model.output.Response 'from'(java.lang.Object,dev.langchain4j.model.output.TokenUsage,dev.langchain4j.model.output.FinishReason)>(v, v, v);
return v;
}
public int estimateTokenCount(java.util.List)
{
java.util.List v;
int v;
dev.langchain4j.model.Tokenizer v;
dev.langchain4j.model.azure.AzureOpenAiChatModel v;
v := @this: dev.langchain4j.model.azure.AzureOpenAiChatModel;
v := @parameter: java.util.List;
v = v.<dev.langchain4j.model.azure.AzureOpenAiChatModel: dev.langchain4j.model.Tokenizer tokenizer>;
v = interfaceinvoke v.<dev.langchain4j.model.Tokenizer: int estimateTokenCountInMessages(java.lang.Iterable)>(v);
return v;
}
public static dev.langchain4j.model.azure.AzureOpenAiChatModel$Builder builder()
{
dev.langchain4j.model.azure.AzureOpenAiChatModel$Builder v;
java.util.Iterator v;
java.util.Collection v;
java.lang.Object v, v;
boolean v;
v = staticinvoke <dev.langchain4j.spi.ServiceHelper: java.util.Collection loadFactories(java.lang.Class)>(class "Ldev/langchain4j/model/azure/spi/AzureOpenAiChatModelBuilderFactory;");
v = interfaceinvoke v.<java.util.Collection: java.util.Iterator iterator()>();
v = interfaceinvoke v.<java.util.Iterator: boolean hasNext()>();
if v == 0 goto label;
v = interfaceinvoke v.<java.util.Iterator: java.lang.Object next()>();
v = interfaceinvoke v.<dev.langchain4j.model.azure.spi.AzureOpenAiChatModelBuilderFactory: java.lang.Object get()>();
return v;
label:
v = new dev.langchain4j.model.azure.AzureOpenAiChatModel$Builder;
specialinvoke v.<dev.langchain4j.model.azure.AzureOpenAiChatModel$Builder: void <init>()>();
return v;
}
}