Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
a4f4199
Fix Vertex AI listSessions null handling
rolandkakonyi Mar 11, 2026
7005670
refactor: migrate LangChain4j to builder pattern, enhance token usage…
glaforge Mar 20, 2026
750851b
Merge pull request #1069 from glaforge:main
copybara-github Mar 20, 2026
3633a7d
fix: Removing deprecated methods from Runner
google-genai-bot Mar 20, 2026
8e9fb08
refactor: Use concatMap for sequential event persistence in Runner
google-genai-bot Mar 20, 2026
3e21e7a
fix: handle null `AiMessage.text()` to prevent NPE and add unit test …
glaforge Mar 21, 2026
f869994
Merge pull request #1071 from glaforge:main
copybara-github Mar 21, 2026
cdc5199
fix: add schema validation to SetModelResponseTool (issue #587 alread…
glaforge Mar 23, 2026
ce18dd9
Merge pull request #1074 from glaforge:main
copybara-github Mar 23, 2026
e9df447
Remove explicit SLF4J binding from city-time-weather ADK tutorial.
vorburger Mar 23, 2026
ce4b642
Fixes #490 and #1064 ToolConverter issues in the spring-ai module
ddobrin Mar 23, 2026
5640c58
Merge pull request #690 from bitmovin-engineering:main
copybara-github Mar 24, 2026
f6ab9d9
Merge pull request #1076 from ddobrin:issues
copybara-github Mar 24, 2026
8a7f816
refactor: use mock api answers for tests
google-genai-bot Mar 24, 2026
677b6d7
fix: parallel agent execution
Mar 25, 2026
5a2abbf
fix: resolve MCP tool parsing errors in Claude integration
Mar 25, 2026
6a5a55e
fix(firestore): Remove hardcoded dependency version
mohan-ganesh Feb 26, 2026
82baba1
Merge pull request #921 from mohan-ganesh:fix/firestore-version-remove
copybara-github Mar 25, 2026
8ab7f07
fix: add media/image support in Spring AI MessageConverter
prasadskarmarkar Jan 6, 2026
6e42aa7
Merge pull request #706 from prasadskarmarkar:fix/spring-ai-media-sup…
copybara-github Mar 25, 2026
3650c7f
chore: update google-genai version to 1.44.0
google-genai-bot Mar 25, 2026
84dff10
fix: Fixing tracing for function calls
google-genai-bot Mar 25, 2026
34d8c3d
Merge branch 'planner' into main
ddobrin Mar 25, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions contrib/firestore-session-service/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<parent>
Expand Down Expand Up @@ -49,7 +51,6 @@
<dependency>
<groupId>com.google.cloud</groupId>
<artifactId>google-cloud-firestore</artifactId>
<version>3.30.3</version>
</dependency>
<dependency>
<groupId>com.google.truth</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,19 @@
import com.google.adk.models.BaseLlmConnection;
import com.google.adk.models.LlmRequest;
import com.google.adk.models.LlmResponse;
import com.google.auto.value.AutoValue;
import com.google.genai.types.Blob;
import com.google.genai.types.Content;
import com.google.genai.types.FunctionCall;
import com.google.genai.types.FunctionCallingConfigMode;
import com.google.genai.types.FunctionDeclaration;
import com.google.genai.types.FunctionResponse;
import com.google.genai.types.GenerateContentConfig;
import com.google.genai.types.GenerateContentResponseUsageMetadata;
import com.google.genai.types.Part;
import com.google.genai.types.Schema;
import com.google.genai.types.ToolConfig;
import com.google.genai.types.Type;
import dev.langchain4j.Experimental;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.audio.Audio;
Expand All @@ -52,6 +53,7 @@
import dev.langchain4j.data.pdf.PdfFile;
import dev.langchain4j.data.video.Video;
import dev.langchain4j.exception.UnsupportedFeatureException;
import dev.langchain4j.model.TokenCountEstimator;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.request.ChatRequest;
Expand All @@ -65,128 +67,167 @@
import dev.langchain4j.model.chat.request.json.JsonStringSchema;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.output.TokenUsage;
import io.reactivex.rxjava3.core.BackpressureStrategy;
import io.reactivex.rxjava3.core.Flowable;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import org.jspecify.annotations.Nullable;

@Experimental
public class LangChain4j extends BaseLlm {
@AutoValue
public abstract class LangChain4j extends BaseLlm {

private static final TypeReference<Map<String, Object>> MAP_TYPE_REFERENCE =
new TypeReference<>() {};

private final ChatModel chatModel;
private final StreamingChatModel streamingChatModel;
private final ObjectMapper objectMapper;
LangChain4j() {
super("");
}

@Nullable
public abstract ChatModel chatModel();

@Nullable
public abstract StreamingChatModel streamingChatModel();

public abstract ObjectMapper objectMapper();

public abstract String modelName();

@Nullable
public abstract TokenCountEstimator tokenCountEstimator();

@Override
public String model() {
return modelName();
}

public static Builder builder() {
return new AutoValue_LangChain4j.Builder().objectMapper(new ObjectMapper());
}

@AutoValue.Builder
public abstract static class Builder {
public abstract Builder chatModel(ChatModel chatModel);

public abstract Builder streamingChatModel(StreamingChatModel streamingChatModel);

public abstract Builder tokenCountEstimator(TokenCountEstimator tokenCountEstimator);

public abstract Builder objectMapper(ObjectMapper objectMapper);

public abstract Builder modelName(String modelName);

public abstract LangChain4j build();
}

public LangChain4j(ChatModel chatModel) {
super(
Objects.requireNonNull(
chatModel.defaultRequestParameters().modelName(), "chat model name cannot be null"));
this.chatModel = Objects.requireNonNull(chatModel, "chatModel cannot be null");
this.streamingChatModel = null;
this.objectMapper = new ObjectMapper();
this(chatModel, null, null, chatModel.defaultRequestParameters().modelName(), null);
}

public LangChain4j(ChatModel chatModel, String modelName) {
super(Objects.requireNonNull(modelName, "chat model name cannot be null"));
this.chatModel = Objects.requireNonNull(chatModel, "chatModel cannot be null");
this.streamingChatModel = null;
this.objectMapper = new ObjectMapper();
this(chatModel, null, null, modelName, null);
}

public LangChain4j(StreamingChatModel streamingChatModel) {
super(
Objects.requireNonNull(
streamingChatModel.defaultRequestParameters().modelName(),
"streaming chat model name cannot be null"));
this.chatModel = null;
this.streamingChatModel =
Objects.requireNonNull(streamingChatModel, "streamingChatModel cannot be null");
this.objectMapper = new ObjectMapper();
this(
null,
streamingChatModel,
null,
streamingChatModel.defaultRequestParameters().modelName(),
null);
}

public LangChain4j(StreamingChatModel streamingChatModel, String modelName) {
super(Objects.requireNonNull(modelName, "streaming chat model name cannot be null"));
this.chatModel = null;
this.streamingChatModel =
Objects.requireNonNull(streamingChatModel, "streamingChatModel cannot be null");
this.objectMapper = new ObjectMapper();
this(null, streamingChatModel, null, modelName, null);
}

public LangChain4j(ChatModel chatModel, StreamingChatModel streamingChatModel, String modelName) {
super(Objects.requireNonNull(modelName, "model name cannot be null"));
this.chatModel = Objects.requireNonNull(chatModel, "chatModel cannot be null");
this.streamingChatModel =
Objects.requireNonNull(streamingChatModel, "streamingChatModel cannot be null");
this.objectMapper = new ObjectMapper();
this(chatModel, streamingChatModel, null, modelName, null);
}

private LangChain4j(
ChatModel chatModel,
StreamingChatModel streamingChatModel,
ObjectMapper objectMapper,
String modelName,
TokenCountEstimator tokenCountEstimator) {
this();
LangChain4j.builder()
.chatModel(chatModel)
.streamingChatModel(streamingChatModel)
.objectMapper(objectMapper)
.modelName(modelName)
.tokenCountEstimator(tokenCountEstimator)
.build();
}

@Override
public Flowable<LlmResponse> generateContent(LlmRequest llmRequest, boolean stream) {
if (stream) {
if (this.streamingChatModel == null) {
if (this.streamingChatModel() == null) {
return Flowable.error(new IllegalStateException("StreamingChatModel is not configured"));
}

ChatRequest chatRequest = toChatRequest(llmRequest);

return Flowable.create(
emitter -> {
streamingChatModel.chat(
chatRequest,
new StreamingChatResponseHandler() {
@Override
public void onPartialResponse(String s) {
emitter.onNext(
LlmResponse.builder().content(Content.fromParts(Part.fromText(s))).build());
}

@Override
public void onCompleteResponse(ChatResponse chatResponse) {
if (chatResponse.aiMessage().hasToolExecutionRequests()) {
AiMessage aiMessage = chatResponse.aiMessage();
toParts(aiMessage).stream()
.map(Part::functionCall)
.forEach(
functionCall -> {
functionCall.ifPresent(
function -> {
emitter.onNext(
LlmResponse.builder()
.content(
Content.fromParts(
Part.fromFunctionCall(
function.name().orElse(""),
function.args().orElse(Map.of()))))
.build());
});
});
}
emitter.onComplete();
}

@Override
public void onError(Throwable throwable) {
emitter.onError(throwable);
}
});
streamingChatModel()
.chat(
chatRequest,
new StreamingChatResponseHandler() {
@Override
public void onPartialResponse(String s) {
emitter.onNext(
LlmResponse.builder()
.content(Content.fromParts(Part.fromText(s)))
.build());
}

@Override
public void onCompleteResponse(ChatResponse chatResponse) {
if (chatResponse.aiMessage().hasToolExecutionRequests()) {
AiMessage aiMessage = chatResponse.aiMessage();
toParts(aiMessage).stream()
.map(Part::functionCall)
.forEach(
functionCall -> {
functionCall.ifPresent(
function -> {
emitter.onNext(
LlmResponse.builder()
.content(
Content.fromParts(
Part.fromFunctionCall(
function.name().orElse(""),
function.args().orElse(Map.of()))))
.build());
});
});
}
emitter.onComplete();
}

@Override
public void onError(Throwable throwable) {
emitter.onError(throwable);
}
});
},
BackpressureStrategy.BUFFER);
} else {
if (this.chatModel == null) {
if (this.chatModel() == null) {
return Flowable.error(new IllegalStateException("ChatModel is not configured"));
}

ChatRequest chatRequest = toChatRequest(llmRequest);
ChatResponse chatResponse = chatModel.chat(chatRequest);
LlmResponse llmResponse = toLlmResponse(chatResponse);
ChatResponse chatResponse = chatModel().chat(chatRequest);
LlmResponse llmResponse = toLlmResponse(chatResponse, chatRequest);

return Flowable.just(llmResponse);
}
Expand Down Expand Up @@ -413,7 +454,7 @@ private AiMessage toAiMessage(Content content) {

private String toJson(Object object) {
try {
return objectMapper.writeValueAsString(object);
return objectMapper().writeValueAsString(object);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
Expand Down Expand Up @@ -511,11 +552,38 @@ private JsonSchemaElement toJsonSchemaElement(Schema schema) {
}
}

private LlmResponse toLlmResponse(ChatResponse chatResponse) {
private LlmResponse toLlmResponse(ChatResponse chatResponse, ChatRequest chatRequest) {
Content content =
Content.builder().role("model").parts(toParts(chatResponse.aiMessage())).build();

return LlmResponse.builder().content(content).build();
LlmResponse.Builder builder = LlmResponse.builder().content(content);
TokenUsage tokenUsage = chatResponse.tokenUsage();
if (tokenCountEstimator() != null) {
try {
int estimatedInput =
tokenCountEstimator().estimateTokenCountInMessages(chatRequest.messages());
int estimatedOutput =
tokenCountEstimator().estimateTokenCountInText(chatResponse.aiMessage().text());
int estimatedTotal = estimatedInput + estimatedOutput;
builder.usageMetadata(
GenerateContentResponseUsageMetadata.builder()
.promptTokenCount(estimatedInput)
.candidatesTokenCount(estimatedOutput)
.totalTokenCount(estimatedTotal)
.build());
} catch (Exception e) {
e.printStackTrace();
}
} else if (tokenUsage != null) {
builder.usageMetadata(
GenerateContentResponseUsageMetadata.builder()
.promptTokenCount(tokenUsage.inputTokenCount())
.candidatesTokenCount(tokenUsage.outputTokenCount())
.totalTokenCount(tokenUsage.totalTokenCount())
.build());
}

return builder.build();
}

private List<Part> toParts(AiMessage aiMessage) {
Expand All @@ -539,14 +607,17 @@ private List<Part> toParts(AiMessage aiMessage) {
});
return parts;
} else {
Part part = Part.builder().text(aiMessage.text()).build();
return List.of(part);
String text = aiMessage.text();
if (text == null) {
return List.of();
}
return List.of(Part.builder().text(text).build());
}
}

private Map<String, Object> toArgs(ToolExecutionRequest toolExecutionRequest) {
try {
return objectMapper.readValue(toolExecutionRequest.arguments(), MAP_TYPE_REFERENCE);
return objectMapper().readValue(toolExecutionRequest.arguments(), MAP_TYPE_REFERENCE);
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
Expand Down
Loading
Loading