public class dev.langchain4j.model.ollama.OllamaStreamingLanguageModel extends java.lang.Object implements dev.langchain4j.model.language.StreamingLanguageModel
{
private final dev.langchain4j.model.ollama.OllamaClient client;
private final java.lang.String modelName;
private final dev.langchain4j.model.ollama.Options options;
private final java.lang.String format;
public void <init>(java.lang.String, java.lang.String, java.lang.Double, java.lang.Integer, java.lang.Double, java.lang.Double, java.lang.Integer, java.lang.Integer, java.util.List, java.lang.String, java.time.Duration)
{
java.lang.Double v, v, v;
java.lang.Integer v, v, v;
java.time.Duration v, v;
dev.langchain4j.model.ollama.OllamaClient v;
dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder v, v, v;
java.lang.String v, v, v, v;
dev.langchain4j.model.ollama.Options v;
java.util.List v;
java.lang.Object v;
dev.langchain4j.model.ollama.OllamaStreamingLanguageModel v;
dev.langchain4j.model.ollama.Options$OptionsBuilder v, v, v, v, v, v, v, v;
v := @this: dev.langchain4j.model.ollama.OllamaStreamingLanguageModel;
v := @parameter: java.lang.String;
v := @parameter: java.lang.String;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Double;
v := @parameter: java.lang.Integer;
v := @parameter: java.lang.Integer;
v := @parameter: java.util.List;
v := @parameter: java.lang.String;
v := @parameter: java.time.Duration;
specialinvoke v.<java.lang.Object: void <init>()>();
v = staticinvoke <dev.langchain4j.model.ollama.OllamaClient: dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder builder()>();
v = virtualinvoke v.<dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder: dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder baseUrl(java.lang.String)>(v);
v = staticinvoke <java.time.Duration: java.time.Duration ofSeconds(long)>(60L);
v = staticinvoke <dev.langchain4j.internal.Utils: java.lang.Object getOrDefault(java.lang.Object,java.lang.Object)>(v, v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder: dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder timeout(java.time.Duration)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.OllamaClient$OllamaClientBuilder: dev.langchain4j.model.ollama.OllamaClient build()>();
v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: dev.langchain4j.model.ollama.OllamaClient client> = v;
v = staticinvoke <dev.langchain4j.internal.ValidationUtils: java.lang.String ensureNotBlank(java.lang.String,java.lang.String)>(v, "modelName");
v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: java.lang.String modelName> = v;
v = staticinvoke <dev.langchain4j.model.ollama.Options: dev.langchain4j.model.ollama.Options$OptionsBuilder builder()>();
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder temperature(java.lang.Double)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder topK(java.lang.Integer)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder topP(java.lang.Double)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder repeatPenalty(java.lang.Double)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder seed(java.lang.Integer)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder numPredict(java.lang.Integer)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options$OptionsBuilder stop(java.util.List)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.Options$OptionsBuilder: dev.langchain4j.model.ollama.Options build()>();
v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: dev.langchain4j.model.ollama.Options options> = v;
v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: java.lang.String format> = v;
return;
}
public void generate(java.lang.String, dev.langchain4j.model.StreamingResponseHandler)
{
dev.langchain4j.model.ollama.Options v;
dev.langchain4j.model.ollama.CompletionRequest v;
dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder v, v, v, v, v, v;
dev.langchain4j.model.StreamingResponseHandler v;
dev.langchain4j.model.ollama.OllamaClient v;
java.lang.Boolean v;
java.lang.String v, v, v;
dev.langchain4j.model.ollama.OllamaStreamingLanguageModel v;
v := @this: dev.langchain4j.model.ollama.OllamaStreamingLanguageModel;
v := @parameter: java.lang.String;
v := @parameter: dev.langchain4j.model.StreamingResponseHandler;
v = staticinvoke <dev.langchain4j.model.ollama.CompletionRequest: dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder builder()>();
v = v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: java.lang.String modelName>;
v = virtualinvoke v.<dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder: dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder model(java.lang.String)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder: dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder prompt(java.lang.String)>(v);
v = v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: dev.langchain4j.model.ollama.Options options>;
v = virtualinvoke v.<dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder: dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder options(dev.langchain4j.model.ollama.Options)>(v);
v = v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: java.lang.String format>;
v = virtualinvoke v.<dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder: dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder format(java.lang.String)>(v);
v = staticinvoke <java.lang.Boolean: java.lang.Boolean valueOf(boolean)>(1);
v = virtualinvoke v.<dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder: dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder stream(java.lang.Boolean)>(v);
v = virtualinvoke v.<dev.langchain4j.model.ollama.CompletionRequest$CompletionRequestBuilder: dev.langchain4j.model.ollama.CompletionRequest build()>();
v = v.<dev.langchain4j.model.ollama.OllamaStreamingLanguageModel: dev.langchain4j.model.ollama.OllamaClient client>;
virtualinvoke v.<dev.langchain4j.model.ollama.OllamaClient: void streamingCompletion(dev.langchain4j.model.ollama.CompletionRequest,dev.langchain4j.model.StreamingResponseHandler)>(v, v);
return;
}
public static dev.langchain4j.model.ollama.OllamaStreamingLanguageModel$OllamaStreamingLanguageModelBuilder builder()
{
java.lang.Object v;
java.util.function.Supplier v;
v = staticinvoke <dev.langchain4j.model.ollama.OllamaStreamingLanguageModel$init__1: java.util.function.Supplier bootstrap$()>();
v = staticinvoke <dev.langchain4j.spi.ServiceHelper: java.lang.Object loadFactoryService(java.lang.Class,java.util.function.Supplier)>(class "Ldev/langchain4j/model/ollama/spi/OllamaStreamingLanguageModelBuilderFactory;", v);
return v;
}
}