From 45015adf3ebaef391c13b7ae062de7c44b5c13b9 Mon Sep 17 00:00:00 2001 From: Branislav Burdiliak Date: Wed, 8 May 2024 22:02:33 +0200 Subject: [PATCH 1/4] Support tool messages in Anthropic Extract OpenAIChatToolCompletionService trait providing just a single method -- createChatToolCompletion. Provide OpenAIAnthropicChatToolCompletionService. Additionally, OpenAI tool choice is translated to Anthropic AI as an additional user message with encouragement to use the tool in question, namely: Use the $toolChoice tool in your response. --- .../openaiscala/anthropic/JsonFormats.scala | 28 +++----- .../anthropic/domain/Content.scala | 12 ++++ .../anthropic/domain/ToolSpec.scala | 4 ++ .../AnthropicCreateMessageSettings.scala | 1 + .../anthropic/service/AnthropicService.scala | 39 +++++++++-- .../service/AnthropicServiceFactory.scala | 23 ++++++- .../service/impl/AnthropicServiceImpl.scala | 23 ++++++- .../anthropic/service/impl/EndPoint.scala | 1 + ...OpenAIAnthropicChatCompletionService.scala | 28 +++++--- ...AIAnthropicChatToolCompletionService.scala | 62 +++++++++++++++++ .../anthropic/service/impl/package.scala | 63 ++++++++++------- .../anthropic/JsonFormatsSpec.scala | 67 ++++++++++++++++--- ...atToolCompletionStreamedServiceExtra.scala | 35 ++++++++++ .../OpenAIScalaClientException.scala | 1 + .../OpenAIChatToolCompletionService.scala | 37 ++++++++++ .../openaiscala/service/OpenAIService.scala | 28 +------- .../AnthropicCreateChatToolCompletion.scala | 41 ++++++++++++ 17 files changed, 397 insertions(+), 96 deletions(-) create mode 100644 anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala create mode 100644 anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala create mode 100644 openai-client-stream/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionStreamedServiceExtra.scala create mode 100644 openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionService.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala index aeea8bbf..147a221a 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala @@ -2,25 +2,11 @@ package io.cequence.openaiscala.anthropic import io.cequence.openaiscala.JsonUtil import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock} -import io.cequence.openaiscala.anthropic.domain.Content.{ - ContentBlock, - ContentBlocks, - SingleString -} -import io.cequence.openaiscala.anthropic.domain.Message.{ - AssistantMessage, - AssistantMessageContent, - UserMessage, - UserMessageContent -} +import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlock, ContentBlocks, SingleString} +import io.cequence.openaiscala.anthropic.domain.Message.{AssistantMessage, AssistantMessageContent, UserMessage, UserMessageContent} import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse.UsageInfo -import io.cequence.openaiscala.anthropic.domain.response.{ - ContentBlockDelta, - CreateMessageChunkResponse, - CreateMessageResponse, - DeltaText -} -import io.cequence.openaiscala.anthropic.domain.{ChatRole, Content, Message} +import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageChunkResponse, CreateMessageResponse, DeltaText} +import io.cequence.openaiscala.anthropic.domain.{ChatRole, Content, Message, ToolSpec} import play.api.libs.functional.syntax._ import play.api.libs.json._ @@ -135,4 +121,10 @@ trait JsonFormats { implicit val deltaTextReads: Reads[DeltaText] = Json.reads[DeltaText] implicit val contentBlockDeltaReads: Reads[ContentBlockDelta] = Json.reads[ContentBlockDelta] + + implicit lazy val toolSpecFormat: OFormat[ToolSpec] = { + implicit val stringAnyMapFormat: Format[Map[String, Any]] = JsonUtil.StringAnyMapFormat + implicit val config = JsonConfiguration(JsonNaming.SnakeCase) + Json.format[ToolSpec] + } } diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala index f5da4e0a..ba5ed2ef 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala @@ -16,5 +16,17 @@ object Content { mediaType: String, data: String ) extends ContentBlock + + sealed trait ToolUseBlock extends ContentBlock + // TODO: allow only for responses to createChatToolCompletion + case class ToolUseBlockSuccess( + toolUseId: String, + content: String // TODO: allow here only Text content blocks + ) extends ToolUseBlock + + case class ToolUseBlockFailure( + toolUseId: String, + content: String // TODO: allow here only Text content blocks + ) extends ToolUseBlock } } diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala new file mode 100644 index 00000000..4ea696b1 --- /dev/null +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala @@ -0,0 +1,4 @@ +package io.cequence.openaiscala.anthropic.domain + + +final case class ToolSpec(name: String, description: Option[String], inputSchema: Map[String, Any]) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala index 7d0d496e..233ec5b7 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala @@ -5,6 +5,7 @@ final case class AnthropicCreateMessageSettings( // See [[models|https://docs.anthropic.com/claude/docs/models-overview]] for additional details and options. model: String, + // TODO: check is this the right place to provide a system prompt? shouldn't we be obtaining it from SystemMessage-s? // System prompt. // A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [[guide to system prompts|https://docs.anthropic.com/claude/docs/system-prompts]]. system: Option[String] = None, diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala index eb2acba5..0fe4b2cc 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala @@ -2,11 +2,8 @@ package io.cequence.openaiscala.anthropic.service import akka.NotUsed import akka.stream.scaladsl.Source -import io.cequence.openaiscala.anthropic.domain.Message -import io.cequence.openaiscala.anthropic.domain.response.{ - ContentBlockDelta, - CreateMessageResponse -} +import io.cequence.openaiscala.anthropic.domain.{Message, ToolSpec} +import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse} import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings import io.cequence.openaiscala.service.CloseableService @@ -36,6 +33,38 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage ): Future[CreateMessageResponse] + + // TODO: + /** + * Creates a message. + * + * Send a structured list of input messages with text and/or image content, and the model + * will generate the next message in the conversation. + * + * The Messages API can be used for for either single queries or stateless multi-turn + * conversations. + * + * @param messages + * A list of messages comprising the conversation so far. + * @param tools + * [beta] Definitions of tools that the model may use. + * + * If you include tools in your API request, the model may return tool_use content blocks that represent the model's + * use of those tools. You can then run those tools using the tool input generated by the model and then optionally + * return results back to the model using tool_result content blocks. + * + * @param settings + * @return + * create message response + * @see + * Anthropic Doc + */ + def createToolMessage( + messages: Seq[Message], + tools: Seq[ToolSpec], + settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage + ): Future[CreateMessageResponse] + /** * Creates a message (streamed version). * diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicServiceFactory.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicServiceFactory.scala index 56028704..13959f6b 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicServiceFactory.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicServiceFactory.scala @@ -3,10 +3,16 @@ package io.cequence.openaiscala.anthropic.service import akka.stream.Materializer import io.cequence.openaiscala.anthropic.service.impl.{ AnthropicServiceImpl, - OpenAIAnthropicChatCompletionService + OpenAIAnthropicChatCompletionService, + OpenAIAnthropicChatToolCompletionService +} +import io.cequence.openaiscala.service.{ + OpenAIChatCompletionService, + OpenAIChatToolCompletionService +} +import io.cequence.openaiscala.service.StreamedServiceTypes.{ + OpenAIChatCompletionStreamedService } -import io.cequence.openaiscala.service.OpenAIChatCompletionService -import io.cequence.openaiscala.service.StreamedServiceTypes.OpenAIChatCompletionStreamedService import io.cequence.openaiscala.service.ws.Timeouts import scala.concurrent.ExecutionContext @@ -43,6 +49,17 @@ object AnthropicServiceFactory extends AnthropicServiceConsts { AnthropicServiceFactory(apiKey, timeouts) ) + def asOpenAIChatToolCompletionService( + apiKey: String = getAPIKeyFromEnv(), + timeouts: Option[Timeouts] = None + )( + implicit ec: ExecutionContext, + materializer: Materializer + ): OpenAIChatToolCompletionService = + new OpenAIAnthropicChatToolCompletionService( + AnthropicServiceFactory(apiKey, timeouts) + ) + /** * Create a new instance of the [[AnthropicService]] * diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala index 48a75ee8..c63122de 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala @@ -10,7 +10,7 @@ import io.cequence.openaiscala.anthropic.domain.response.{ CreateMessageResponse } import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings -import io.cequence.openaiscala.anthropic.domain.{ChatRole, Message} +import io.cequence.openaiscala.anthropic.domain.{ChatRole, Message, ToolSpec} import io.cequence.openaiscala.anthropic.service.AnthropicService import io.cequence.openaiscala.service.OpenAIWSRequestHelper import io.cequence.openaiscala.service.impl.OpenAIWSStreamRequestHelper @@ -40,6 +40,27 @@ private[service] trait AnthropicServiceImpl _.asSafe[CreateMessageResponse] ) + override def createToolMessage( + messages: Seq[Message], + tools: Seq[ToolSpec], + settings: AnthropicCreateMessageSettings + ): Future[CreateMessageResponse] = { + val coreParams = createBodyParamsForMessageCreation(messages, settings, stream = false) + val extraParams = jsonBodyParams( + Param.tools -> Some(tools.map(Json.toJson(_))) + ) + + execPOST( + EndPoint.messages, + bodyParams = coreParams ++ extraParams + ).map( + _.asSafe[CreateMessageResponse] + ) + } + + // TODO: somewhere override handleErrorCodes + // define Anthropic exceptions based on status codes + override def createMessageStreamed( messages: Seq[Message], settings: AnthropicCreateMessageSettings diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/EndPoint.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/EndPoint.scala index d80d61a5..a83ba366 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/EndPoint.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/EndPoint.scala @@ -20,6 +20,7 @@ object Param { case object stop_sequences extends Param case object stream extends Param case object temperature extends Param + case object tools extends Param case object top_p extends Param case object top_k extends Param diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala index 493d0b1a..e3134cf3 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala @@ -3,16 +3,10 @@ package io.cequence.openaiscala.anthropic.service.impl import akka.NotUsed import akka.stream.scaladsl.Source import io.cequence.openaiscala.anthropic.service.AnthropicService -import io.cequence.openaiscala.domain.BaseMessage -import io.cequence.openaiscala.domain.response.{ - ChatCompletionChunkResponse, - ChatCompletionResponse -} +import io.cequence.openaiscala.domain.{BaseMessage, ToolSpec} +import io.cequence.openaiscala.domain.response.{ChatCompletionChunkResponse, ChatCompletionResponse} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings -import io.cequence.openaiscala.service.{ - OpenAIChatCompletionService, - OpenAIChatCompletionStreamedServiceExtra -} +import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatCompletionStreamedServiceExtra} import scala.concurrent.{ExecutionContext, Future} @@ -46,6 +40,22 @@ private[service] class OpenAIAnthropicChatCompletionService( .map(toOpenAI) } + + // TODO: extract another trait extending OpenAIChatCompletionService with createChatToolCompletion + def createChatToolCompletion( + messages: Seq[BaseMessage], + tools: Seq[ToolSpec], + settings: CreateChatCompletionSettings + ): Future[ChatCompletionResponse] = { + underlying + .createToolMessage( + toAnthropic(messages), + toAnthropicToolSpecs(tools), + toAnthropic(settings, messages) + ) + .map(toOpenAI) + } + /** * Creates a completion for the chat message(s) with streamed results. * diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala new file mode 100644 index 00000000..ee8f837e --- /dev/null +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala @@ -0,0 +1,62 @@ +package io.cequence.openaiscala.anthropic.service.impl + +import akka.NotUsed +import akka.stream.scaladsl.Source +import io.cequence.openaiscala.anthropic.domain.Message +import io.cequence.openaiscala.anthropic.domain.Message.UserMessage +import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse +import io.cequence.openaiscala.anthropic.service.AnthropicService +import io.cequence.openaiscala.domain.response.{ChatCompletionChunkResponse, ChatCompletionResponse, ChatToolCompletionResponse} +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.domain.{BaseMessage, SystemMessage, ToolSpec} +import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatCompletionStreamedServiceExtra, OpenAIChatToolCompletionService} + +import scala.concurrent.{ExecutionContext, Future} + +private[service] class OpenAIAnthropicChatToolCompletionService( + underlying: AnthropicService +)( + implicit executionContext: ExecutionContext +) extends OpenAIChatToolCompletionService { + + /** + * Creates a model response for the given chat conversation expecting a tool call. + * + * @param messages + * A list of messages comprising the conversation so far. + * @param tools + * A list of tools the model may call. Currently, only functions are supported as a tool. + * Use this to provide a list of functions the model may generate JSON inputs for. + * @param responseToolChoice + * Controls which (if any) function/tool is called by the model. Specifying a particular + * function forces the model to call that function (must be listed in `tools`). Otherwise, + * the default "auto" mode is used where the model can pick between generating a message or + * calling a function. + * @param settings + * @return + * chat completion response + * @see + * OpenAI Doc + */ + override def createChatToolCompletion( + messages: Seq[BaseMessage], + tools: Seq[ToolSpec], + responseToolChoice: Option[String], + settings: CreateChatCompletionSettings + ): Future[ChatToolCompletionResponse] = { + val anthropicResponseF: Future[CreateMessageResponse] = underlying + .createToolMessage( + toAnthropic(messages) ++ responseToolChoice.map(toAnthropicToolUseEncouragement), + toAnthropicToolSpecs(tools), + toAnthropic(settings, messages) + ) + anthropicResponseF.map(toOpenAIChatToolCompletionResponse) + } + + // TODO: support streamed version? + + /** + * Closes the underlying ws client, and releases all its resources. + */ + override def close(): Unit = underlying.close() +} diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala index e41b36ed..717c7ae1 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala @@ -1,35 +1,16 @@ package io.cequence.openaiscala.anthropic.service +//import io.cequence.openaiscala.anthropic.{domain => Anthropic} import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock import io.cequence.openaiscala.anthropic.domain.Content.ContentBlocks -import io.cequence.openaiscala.anthropic.domain.response.{ - ContentBlockDelta, - CreateMessageResponse -} +import io.cequence.openaiscala.anthropic.domain.Message.UserMessage +import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse} import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse.UsageInfo import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings -import io.cequence.openaiscala.anthropic.domain.{Content, Message} -import io.cequence.openaiscala.domain.response.{ - ChatCompletionChoiceChunkInfo, - ChatCompletionChoiceInfo, - ChatCompletionChunkResponse, - ChatCompletionResponse, - ChunkMessageSpec, - UsageInfo => OpenAIUsageInfo -} +import io.cequence.openaiscala.anthropic.domain.{Content, Message, ToolSpec} +import io.cequence.openaiscala.domain.response.{ChatCompletionChoiceChunkInfo, ChatCompletionChoiceInfo, ChatCompletionChunkResponse, ChatCompletionResponse, ChatToolCompletionChoiceInfo, ChatToolCompletionResponse, ChunkMessageSpec, UsageInfo => OpenAIUsageInfo} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings -import io.cequence.openaiscala.domain.{ - AssistantMessage, - ChatRole, - MessageSpec, - SystemMessage, - BaseMessage => OpenAIBaseMessage, - Content => OpenAIContent, - ImageURLContent => OpenAIImageContent, - TextContent => OpenAITextContent, - UserMessage => OpenAIUserMessage, - UserSeqMessage => OpenAIUserSeqMessage -} +import io.cequence.openaiscala.domain.{AssistantMessage, AssistantToolMessage, ChatRole, FunctionSpec, MessageSpec, SystemMessage, BaseMessage => OpenAIBaseMessage, Content => OpenAIContent, ImageURLContent => OpenAIImageContent, TextContent => OpenAITextContent, ToolSpec => OpenAIToolSpec, UserMessage => OpenAIUserMessage, UserSeqMessage => OpenAIUserSeqMessage} import java.{util => ju} @@ -45,6 +26,15 @@ package object impl extends AnthropicServiceConsts { Message.UserMessage(content) } + def toAnthropicToolUseEncouragement(toolChoice: String): UserMessage = + UserMessage(s"Use the $toolChoice tool in your response.") + + def toAnthropicToolSpecs(toolSpecs: Seq[OpenAIToolSpec]): Seq[ToolSpec] = { + toolSpecs.collect { + case FunctionSpec(name, description, parameters) => ToolSpec(name, description, parameters) + } + } + def toAnthropic(content: OpenAIContent): Content.ContentBlock = { content match { case OpenAITextContent(text) => TextBlock(text) @@ -101,6 +91,24 @@ package object impl extends AnthropicServiceConsts { usage = Some(toOpenAI(response.usage)) ) + + def toOpenAIChatToolCompletionResponse(createMessageResponse: CreateMessageResponse) = { + ChatToolCompletionResponse( + id = createMessageResponse.id, + created = new ju.Date(), + model = createMessageResponse.model, + system_fingerprint = createMessageResponse.stop_reason, + choices = Seq( + ChatToolCompletionChoiceInfo( + message = toOpenAIAssistantToolMessage(createMessageResponse.content), + index = 0, + finish_reason = createMessageResponse.stop_reason + ) + ), + usage = Some(toOpenAI(createMessageResponse.usage)) + ) + } + def toOpenAI(blockDelta: ContentBlockDelta): ChatCompletionChunkResponse = ChatCompletionChunkResponse( id = "", @@ -130,6 +138,10 @@ package object impl extends AnthropicServiceConsts { AssistantMessage(singleTextContent, name = None) } + def toOpenAIAssistantToolMessage(content: ContentBlocks): AssistantToolMessage = { + ??? + } + private def concatenateMessages(messageContent: Seq[String]): String = messageContent.mkString("\n") @@ -140,4 +152,5 @@ package object impl extends AnthropicServiceConsts { completion_tokens = Some(usageInfo.output_tokens) ) } + } diff --git a/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala b/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala index 98573cea..b7b4c0af 100644 --- a/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala +++ b/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala @@ -3,16 +3,11 @@ package io.cequence.openaiscala.anthropic import io.cequence.openaiscala.anthropic.JsonFormatsSpec.JsonPrintMode import io.cequence.openaiscala.anthropic.JsonFormatsSpec.JsonPrintMode.{Compact, Pretty} import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock} -import io.cequence.openaiscala.anthropic.domain.Message -import io.cequence.openaiscala.anthropic.domain.Message.{ - AssistantMessage, - AssistantMessageContent, - UserMessage, - UserMessageContent -} +import io.cequence.openaiscala.anthropic.domain.{Message, ToolSpec} +import io.cequence.openaiscala.anthropic.domain.Message.{AssistantMessage, AssistantMessageContent, UserMessage, UserMessageContent} import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike -import play.api.libs.json.{Format, Json} +import play.api.libs.json.{Format, Json, Writes} object JsonFormatsSpec { sealed trait JsonPrintMode @@ -53,6 +48,47 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { testCodec[Message](assistantMessage, json) } + // TODO: add deserialization tests for: + // 1. ToolUseBlock - success - flat content + // 2. ToolUseBlock - success - TextBlock content + // 3. ToolUseBlock - failure - flat content + // 4. ToolUseBlock - failure - TextBlock content + + val expectedToolSpecJson = + """{ + | "name" : "get_stock_price", + | "description" : "Get the current stock price for a given ticker symbol.", + | "input_schema" : { + | "type" : "object", + | "properties" : { + | "ticker" : { + | "type" : "string", + | "description" : "The stock ticker symbol, e.g. AAPL for Apple Inc." + | } + | }, + | "required" : [ "ticker" ] + | } + |}""".stripMargin + + "serialize tools" in { + val toolSpec = ToolSpec( + name = "get_stock_price", + description = Some("Get the current stock price for a given ticker symbol."), + inputSchema = Map( + "type" -> "object", + "properties" -> Map( + "ticker" -> Map( + "type" -> "string", + "description" -> "The stock ticker symbol, e.g. AAPL for Apple Inc." + ) + ), + "required" -> Seq("ticker") + ) + ) + + testSerialization(toolSpec, expectedToolSpecJson, Pretty) + } + val expectedImageContentJson = """{ | "role" : "user", @@ -91,4 +127,19 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { Json.parse(json).as[A] shouldBe value } + private def testSerialization[A]( + value: A, + json: String, + printMode: JsonPrintMode = Compact + )( + implicit writes: Writes[A] + ): Unit = { + val jsValue = Json.toJson(value) + val serialized = printMode match { + case Compact => jsValue.toString() + case Pretty => Json.prettyPrint(jsValue) + } + serialized shouldBe json + } + } diff --git a/openai-client-stream/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionStreamedServiceExtra.scala b/openai-client-stream/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionStreamedServiceExtra.scala new file mode 100644 index 00000000..18918589 --- /dev/null +++ b/openai-client-stream/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionStreamedServiceExtra.scala @@ -0,0 +1,35 @@ +package io.cequence.openaiscala.service + +import akka.NotUsed +import akka.stream.scaladsl.Source +import io.cequence.openaiscala.domain.BaseMessage +import io.cequence.openaiscala.domain.response.ChatCompletionChunkResponse +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings + +/** + * Service that offers ONLY a streamed version of OpenAI chat completion endpoint. + * + * @since March + * 2024 + */ +trait OpenAIChatToolCompletionStreamedServiceExtra + extends OpenAIServiceConsts + with CloseableService { + + /** + * Creates a completion for the chat message(s) with streamed results. + * + * @param messages + * A list of messages comprising the conversation so far. + * @param settings + * @return + * chat completion response + * + * @see + * OpenAI Doc + */ + def createChatToolCompletionStreamed( + messages: Seq[BaseMessage], + settings: CreateChatCompletionSettings = DefaultSettings.CreateChatCompletion + ): Source[ChatCompletionChunkResponse, NotUsed] +} diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/OpenAIScalaClientException.scala b/openai-core/src/main/scala/io/cequence/openaiscala/OpenAIScalaClientException.scala index c25f7af3..2c3c99c1 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/OpenAIScalaClientException.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/OpenAIScalaClientException.scala @@ -6,6 +6,7 @@ object Retryable { t: OpenAIScalaClientException ): Option[OpenAIScalaClientException] = Some(t).filter(apply) + // TODO: consider retrying for Anthropic def apply(t: OpenAIScalaClientException): Boolean = t match { // we retry on these case _: OpenAIScalaClientTimeoutException => true diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionService.scala b/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionService.scala new file mode 100644 index 00000000..341d7e1e --- /dev/null +++ b/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIChatToolCompletionService.scala @@ -0,0 +1,37 @@ +package io.cequence.openaiscala.service + +import io.cequence.openaiscala.domain.{BaseMessage, ToolSpec} +import io.cequence.openaiscala.domain.response.ChatToolCompletionResponse +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings + +import scala.concurrent.Future + +trait OpenAIChatToolCompletionService extends OpenAIServiceConsts with CloseableService { + + /** + * Creates a model response for the given chat conversation expecting a tool call. + * + * @param messages + * A list of messages comprising the conversation so far. + * @param tools + * A list of tools the model may call. Currently, only functions are supported as a tool. + * Use this to provide a list of functions the model may generate JSON inputs for. + * @param responseToolChoice + * Controls which (if any) function/tool is called by the model. Specifying a particular + * function forces the model to call that function (must be listed in `tools`). Otherwise, + * the default "auto" mode is used where the model can pick between generating a message or + * calling a function. + * @param settings + * @return + * chat completion response + * @see + * OpenAI Doc + */ + def createChatToolCompletion( + messages: Seq[BaseMessage], + tools: Seq[ToolSpec], + responseToolChoice: Option[String] = None, + settings: CreateChatCompletionSettings = DefaultSettings.CreateChatToolCompletion + ): Future[ChatToolCompletionResponse] + +} diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala b/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala index d67c8372..441b78d5 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/service/OpenAIService.scala @@ -50,7 +50,7 @@ import scala.concurrent.Future * @since Jan * 2023 */ -trait OpenAIService extends OpenAICoreService { +trait OpenAIService extends OpenAICoreService with OpenAIChatToolCompletionService { /** * Retrieves a model instance, providing basic information about the model such as the owner @@ -93,32 +93,6 @@ trait OpenAIService extends OpenAICoreService { settings: CreateChatCompletionSettings = DefaultSettings.CreateChatFunCompletion ): Future[ChatFunCompletionResponse] - /** - * Creates a model response for the given chat conversation expecting a tool call. - * - * @param messages - * A list of messages comprising the conversation so far. - * @param tools - * A list of tools the model may call. Currently, only functions are supported as a tool. - * Use this to provide a list of functions the model may generate JSON inputs for. - * @param responseToolChoice - * Controls which (if any) function/tool is called by the model. Specifying a particular - * function forces the model to call that function (must be listed in `tools`). Otherwise, - * the default "auto" mode is used where the model can pick between generating a message or - * calling a function. - * @param settings - * @return - * chat completion response - * @see - * OpenAI Doc - */ - def createChatToolCompletion( - messages: Seq[BaseMessage], - tools: Seq[ToolSpec], - responseToolChoice: Option[String] = None, - settings: CreateChatCompletionSettings = DefaultSettings.CreateChatToolCompletion - ): Future[ChatToolCompletionResponse] - /** * Creates a new edit for the provided input, instruction, and parameters. * diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala new file mode 100644 index 00000000..e543f322 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala @@ -0,0 +1,41 @@ +package io.cequence.openaiscala.examples.nonopenai + +import io.cequence.openaiscala.anthropic.service.AnthropicServiceFactory +import io.cequence.openaiscala.domain.{FunctionSpec, NonOpenAIModelId, SystemMessage, UserMessage} +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.examples.ExampleBase +import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatToolCompletionService, OpenAICoreService, OpenAIService} + +import scala.concurrent.Future + +object AnthropicCreateChatToolCompletion extends ExampleBase[OpenAIChatToolCompletionService] { + + override protected val service: OpenAIChatToolCompletionService = + AnthropicServiceFactory.asOpenAIChatToolCompletionService() + + private val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserMessage("What's the S&P 500 as of today?") + ) + + override protected def run: Future[_] = + service + .createChatToolCompletion( + messages = messages, + settings = CreateChatCompletionSettings(NonOpenAIModelId.claude_3_haiku_20240307), + tools = Seq(FunctionSpec( + name = "get_stock_price", + description = Some("Get the current stock price of a given company"), + parameters = Map( + "type" -> "object", + "properties" -> Map( + "company" -> Map( + "type" -> "string", + "description" -> "The company name, e.g. Apple Inc." + ) + ), + "required" -> Seq("company") + ) + )) + ) +} From 4b40a369733792c42ca5246086e50f0db4a7eae9 Mon Sep 17 00:00:00 2001 From: Branislav Burdiliak Date: Sat, 11 May 2024 22:22:09 +0200 Subject: [PATCH 2/4] Execute Anthropic's tool calls as beta Add beta header to tool calls: anthropic-beta: tools-2024-04-04 --- .../service/AnthropicWSRequestHelper.scala | 55 +++++++++++++++++++ .../service/impl/AnthropicServiceImpl.scala | 26 ++++++--- .../service/ws/WSRequestHelper.scala | 32 +++++++++++ 3 files changed, 105 insertions(+), 8 deletions(-) create mode 100644 anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala new file mode 100644 index 00000000..17c245c7 --- /dev/null +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala @@ -0,0 +1,55 @@ +package io.cequence.openaiscala.anthropic.service + +import io.cequence.openaiscala.anthropic.service.AnthropicWSRequestHelper.AnthropicBetaHeader +import io.cequence.openaiscala.{OpenAIScalaClientException, OpenAIScalaEngineOverloadedException, OpenAIScalaRateLimitException, OpenAIScalaServerErrorException, OpenAIScalaTokenCountExceededException, OpenAIScalaUnauthorizedException} +import io.cequence.openaiscala.service.ws.WSRequestExtHelper +import play.api.libs.json.{JsObject, JsValue} + +import scala.concurrent.Future + +object AnthropicWSRequestHelper { + val AnthropicBetaHeader = "anthropic-beta" +} + +trait AnthropicWSRequestHelper extends WSRequestExtHelper { + + // TODO: introduce Anthropic error model + override protected def handleErrorCodes( + httpCode: Int, + message: String + ): Nothing = { + val errorMessage = s"Code ${httpCode} : ${message}" + httpCode match { + case 401 => throw new OpenAIScalaUnauthorizedException(errorMessage) + case 429 => throw new OpenAIScalaRateLimitException(errorMessage) + case 500 => throw new OpenAIScalaServerErrorException(errorMessage) + case 503 => throw new OpenAIScalaEngineOverloadedException(errorMessage) + case 400 => + if ( + message.contains("Please reduce your prompt; or completion length") || + message.contains("Please reduce the length of the messages") + ) + throw new OpenAIScalaTokenCountExceededException(errorMessage) + else + throw new OpenAIScalaClientException(errorMessage) + + case _ => throw new OpenAIScalaClientException(errorMessage) + } + } + + protected def execBetaPOSTWithStatus( + endPoint: PEP, + endPointParam: Option[String] = None, + params: Seq[(PT, Option[Any])] = Nil, + bodyParams: Seq[(PT, Option[JsValue])] = Nil, + ): Future[JsValue] = { + execPOSTWithStatusAndHeaders( + endPoint, + endPointParam, + params, + bodyParams, + headers = authHeaders ++ Seq(AnthropicBetaHeader -> "tools-2024-04-04") + ).map(handleErrorResponse) + } + +} diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala index c63122de..f6d549e5 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala @@ -11,7 +11,7 @@ import io.cequence.openaiscala.anthropic.domain.response.{ } import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings import io.cequence.openaiscala.anthropic.domain.{ChatRole, Message, ToolSpec} -import io.cequence.openaiscala.anthropic.service.AnthropicService +import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicWSRequestHelper} import io.cequence.openaiscala.service.OpenAIWSRequestHelper import io.cequence.openaiscala.service.impl.OpenAIWSStreamRequestHelper import play.api.libs.json.{JsValue, Json} @@ -22,7 +22,7 @@ import scala.concurrent.Future // Shouldn't use OpenAIWSRequestHelper and OpenAIWSStreamRequestHelper private[service] trait AnthropicServiceImpl extends AnthropicService - with OpenAIWSRequestHelper + with AnthropicWSRequestHelper with OpenAIWSStreamRequestHelper with JsonFormats { @@ -50,12 +50,22 @@ private[service] trait AnthropicServiceImpl Param.tools -> Some(tools.map(Json.toJson(_))) ) - execPOST( - EndPoint.messages, - bodyParams = coreParams ++ extraParams - ).map( - _.asSafe[CreateMessageResponse] - ) + def isToolCall = tools.nonEmpty + + if (isToolCall) + execBetaPOSTWithStatus( + EndPoint.messages, + bodyParams = coreParams ++ extraParams + ).map( + _.asSafe[CreateMessageResponse] + ) + else + execPOST( + EndPoint.messages, + bodyParams = coreParams ++ extraParams + ).map( + _.asSafe[CreateMessageResponse] + ) } // TODO: somewhere override handleErrorCodes diff --git a/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala b/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala index f3c7211c..8b1d5486 100644 --- a/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala +++ b/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala @@ -340,6 +340,38 @@ trait WSRequestHelper extends HasWSClient { client.url(url) } + protected def execPOSTWithStatusAndHeaders( + endPoint: PEP, + endPointParam: Option[String] = None, + params: Seq[(PT, Option[Any])] = Nil, + bodyParams: Seq[(PT, Option[JsValue])] = Nil, + acceptableStatusCodes: Seq[Int] = defaultAcceptableStatusCodes, + headers: Seq[(String, String)] = Nil + ): Future[RichJsResponse] = { + val request = getWSRequestOptionalWithHeaders(Some(endPoint), endPointParam, toStringParams(params), headers) + val bodyParamsX = bodyParams.collect { case (fieldName, Some(jsValue)) => + (fieldName.toString, jsValue) + } + + execPOSTJsonAux( + request, + JsObject(bodyParamsX), + Some(endPoint), + acceptableStatusCodes + ) + } + + protected def getWSRequestOptionalWithHeaders( + endPoint: Option[PEP], + endPointParam: Option[String], + params: Seq[(String, Option[Any])], + headers: Seq[(String, String)] + ) = { + val paramsString = paramsOptionalAsString(params) + val url = createUrl(endPoint, endPointParam) + paramsString + client.url(url).addHttpHeaders(headers: _*) + } + private def execRequestAux[T]( responseConverter: ResponseConverters.ResponseConverter[T] )( From 5261105b3e98fefafb21bb577dd450e80693605b Mon Sep 17 00:00:00 2001 From: Branislav Burdiliak Date: Sun, 12 May 2024 15:37:08 +0200 Subject: [PATCH 3/4] Allow system prompt per create message, model tool use content block System prompts are not provided from settings, instead, you can supply them per createMessage call. OpenAI adapter takes all System messages and concatenates their content to get a single Anthropic system prompt. Model Anthropic's tool use content block which can be returned as a reply to a create tool message. Add tests for: - deserialization of a tool_use content block - deserialization of a create message reponse with a tool_use content block --- .../openaiscala/anthropic/JsonFormats.scala | 11 +- .../anthropic/domain/Content.scala | 26 ++-- .../anthropic/domain/Message.scala | 3 +- .../response/CreateMessageResponse.scala | 3 +- .../AnthropicCreateMessageSettings.scala | 5 - .../anthropic/service/AnthropicService.scala | 19 ++- .../service/impl/AnthropicServiceImpl.scala | 12 +- ...OpenAIAnthropicChatCompletionService.scala | 3 + ...AIAnthropicChatToolCompletionService.scala | 1 + .../anthropic/service/impl/package.scala | 56 ++++++-- .../anthropic/JsonFormatsSpec.scala | 123 +++++++++++++++--- .../nonopenai/AnthropicCreateMessage.scala | 1 + .../AnthropicCreateMessageStreamed.scala | 1 + .../AnthropicCreateMessageWithImage.scala | 1 + 14 files changed, 210 insertions(+), 55 deletions(-) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala index 147a221a..b12b32ad 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala @@ -1,7 +1,7 @@ package io.cequence.openaiscala.anthropic import io.cequence.openaiscala.JsonUtil -import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock} +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock, ToolUseBlock} import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlock, ContentBlocks, SingleString} import io.cequence.openaiscala.anthropic.domain.Message.{AssistantMessage, AssistantMessageContent, UserMessage, UserMessageContent} import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse.UsageInfo @@ -54,6 +54,7 @@ trait JsonFormats { } implicit val contentBlockReads: Reads[ContentBlock] = new Reads[ContentBlock] { + implicit val stringAnyMapFormat: Format[Map[String, Any]] = JsonUtil.StringAnyMapFormat def reads(json: JsValue): JsResult[ContentBlock] = { (json \ "type").validate[String].flatMap { case "text" => (json \ "text").validate[String].map(TextBlock.apply) @@ -64,6 +65,12 @@ trait JsonFormats { mediaType <- (source \ "media_type").validate[String] data <- (source \ "data").validate[String] } yield ImageBlock(`type`, mediaType, data) + case "tool_use" => + for { + id <- (json \ "id").validate[String] + name <- (json \ "name").validate[String] + input <- (json \ "input").validate[Map[String, Any]] + } yield ToolUseBlock(id, name, input) case _ => JsError("Unsupported or invalid content block") } } @@ -113,7 +120,7 @@ trait JsonFormats { (__ \ "model").read[String] and (__ \ "stop_reason").readNullable[String] and (__ \ "stop_sequence").readNullable[String] and - (__ \ "usage").read[UsageInfo] + (__ \ "usage").readNullable[UsageInfo] )(CreateMessageResponse.apply _) implicit val createMessageChunkResponseReads: Reads[CreateMessageChunkResponse] = diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala index ba5ed2ef..fcf09c2a 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Content.scala @@ -17,16 +17,22 @@ object Content { data: String ) extends ContentBlock - sealed trait ToolUseBlock extends ContentBlock - // TODO: allow only for responses to createChatToolCompletion - case class ToolUseBlockSuccess( - toolUseId: String, - content: String // TODO: allow here only Text content blocks - ) extends ToolUseBlock + case class ToolUseBlock( + id: String, + name: String, + input: Map[String, Any] // TODO: allow here only Text content blocks + ) extends ContentBlock - case class ToolUseBlockFailure( - toolUseId: String, - content: String // TODO: allow here only Text content blocks - ) extends ToolUseBlock +// sealed trait ToolUseBlock extends ContentBlock +// // TODO: allow only for responses to createChatToolCompletion +// case class ToolUseBlockSuccess( +// toolUseId: String, +// content: String // TODO: allow here only Text content blocks +// ) extends ToolUseBlock +// +// case class ToolUseBlockFailure( +// toolUseId: String, +// content: String // TODO: allow here only Text content blocks +// ) extends ToolUseBlock } } diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala index e104afaa..778f75b5 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala @@ -13,8 +13,7 @@ sealed abstract class Message private ( object Message { - case class UserMessage(contentString: String) - extends Message(ChatRole.User, SingleString(contentString)) + case class UserMessage(contentString: String) extends Message(ChatRole.User, SingleString(contentString)) case class UserMessageContent(contentBlocks: Seq[ContentBlock]) extends Message(ChatRole.User, ContentBlocks(contentBlocks)) case class AssistantMessage(contentString: String) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/response/CreateMessageResponse.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/response/CreateMessageResponse.scala index 154e10f2..84b2b5f5 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/response/CreateMessageResponse.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/response/CreateMessageResponse.scala @@ -12,7 +12,8 @@ final case class CreateMessageResponse( model: String, stop_reason: Option[String], stop_sequence: Option[String], - usage: UsageInfo + // TODO: it is required according to the API documentation, but it is not present in the response for tool calls + usage: Option[UsageInfo] ) object CreateMessageResponse { diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala index 233ec5b7..b604811e 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/settings/AnthropicCreateMessageSettings.scala @@ -5,11 +5,6 @@ final case class AnthropicCreateMessageSettings( // See [[models|https://docs.anthropic.com/claude/docs/models-overview]] for additional details and options. model: String, - // TODO: check is this the right place to provide a system prompt? shouldn't we be obtaining it from SystemMessage-s? - // System prompt. - // A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [[guide to system prompts|https://docs.anthropic.com/claude/docs/system-prompts]]. - system: Option[String] = None, - // The maximum number of tokens to generate before stopping. // Note that our models may stop before reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate. // Different models have different maximum values for this parameter. See [[models|https://docs.anthropic.com/claude/docs/models-overview]] for details. diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala index 0fe4b2cc..59acaad6 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala @@ -22,6 +22,11 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { * * @param messages * A list of messages comprising the conversation so far. + * @param systemPrompt + * System prompt. + * + * A system prompt is a way of providing context and instructions to Claude, + * such as specifying a particular goal or role. See our guide to system prompts. * @param settings * @return * create message response @@ -30,10 +35,10 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { */ def createMessage( messages: Seq[Message], + systemPrompt: Option[String], settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage ): Future[CreateMessageResponse] - // TODO: /** * Creates a message. @@ -46,6 +51,11 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { * * @param messages * A list of messages comprising the conversation so far. + * @param systemPrompt + * System prompt. + * + * A system prompt is a way of providing context and instructions to Claude, + * such as specifying a particular goal or role. See our guide to system prompts. * @param tools * [beta] Definitions of tools that the model may use. * @@ -61,6 +71,7 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { */ def createToolMessage( messages: Seq[Message], + systemPrompt: Option[String], tools: Seq[ToolSpec], settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage ): Future[CreateMessageResponse] @@ -76,6 +87,11 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { * * @param messages * A list of messages comprising the conversation so far. + * @param systemPrompt + * System prompt. + * + * A system prompt is a way of providing context and instructions to Claude, + * such as specifying a particular goal or role. See our guide to system prompts. * @param settings * @return * create message response @@ -84,6 +100,7 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { */ def createMessageStreamed( messages: Seq[Message], + systemPrompt: Option[String], settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage ): Source[ContentBlockDelta, NotUsed] } diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala index f6d549e5..b4489684 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala @@ -31,21 +31,23 @@ private[service] trait AnthropicServiceImpl override def createMessage( messages: Seq[Message], + systemPrompt: Option[String], settings: AnthropicCreateMessageSettings ): Future[CreateMessageResponse] = execPOST( EndPoint.messages, - bodyParams = createBodyParamsForMessageCreation(messages, settings, stream = false) + bodyParams = createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = false) ).map( _.asSafe[CreateMessageResponse] ) override def createToolMessage( messages: Seq[Message], + systemPrompt: Option[String], tools: Seq[ToolSpec], settings: AnthropicCreateMessageSettings ): Future[CreateMessageResponse] = { - val coreParams = createBodyParamsForMessageCreation(messages, settings, stream = false) + val coreParams = createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = false) val extraParams = jsonBodyParams( Param.tools -> Some(tools.map(Json.toJson(_))) ) @@ -73,12 +75,13 @@ private[service] trait AnthropicServiceImpl override def createMessageStreamed( messages: Seq[Message], + systemPrompt: Option[String], settings: AnthropicCreateMessageSettings ): Source[ContentBlockDelta, NotUsed] = execJsonStreamAux( EndPoint.messages, "POST", - bodyParams = createBodyParamsForMessageCreation(messages, settings, stream = true) + bodyParams = createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = true) ).map { (json: JsValue) => (json \ "error").toOption.map { error => throw new OpenAIScalaClientException(error.toString()) @@ -101,6 +104,7 @@ private[service] trait AnthropicServiceImpl protected def createBodyParamsForMessageCreation( messages: Seq[Message], + systemPrompt: Option[String], settings: AnthropicCreateMessageSettings, stream: Boolean ): Seq[(Param, Option[JsValue])] = { @@ -112,7 +116,7 @@ private[service] trait AnthropicServiceImpl jsonBodyParams( Param.messages -> Some(messageJsons), Param.model -> Some(settings.model), - Param.system -> settings.system, + Param.system -> systemPrompt, Param.max_tokens -> Some(settings.max_tokens), Param.metadata -> { if (settings.metadata.isEmpty) None else Some(settings.metadata) }, Param.stop_sequences -> { diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala index e3134cf3..cab30044 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala @@ -35,6 +35,7 @@ private[service] class OpenAIAnthropicChatCompletionService( underlying .createMessage( toAnthropic(messages), + toAnthropicSystemPrompt(messages), toAnthropic(settings, messages) ) .map(toOpenAI) @@ -50,6 +51,7 @@ private[service] class OpenAIAnthropicChatCompletionService( underlying .createToolMessage( toAnthropic(messages), + toAnthropicSystemPrompt(messages), toAnthropicToolSpecs(tools), toAnthropic(settings, messages) ) @@ -74,6 +76,7 @@ private[service] class OpenAIAnthropicChatCompletionService( underlying .createMessageStreamed( toAnthropic(messages), + toAnthropicSystemPrompt(messages), toAnthropic(settings, messages) ) .map(toOpenAI) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala index ee8f837e..52921348 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala @@ -47,6 +47,7 @@ private[service] class OpenAIAnthropicChatToolCompletionService( val anthropicResponseF: Future[CreateMessageResponse] = underlying .createToolMessage( toAnthropic(messages) ++ responseToolChoice.map(toAnthropicToolUseEncouragement), + toAnthropicSystemPrompt(messages), toAnthropicToolSpecs(tools), toAnthropic(settings, messages) ) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala index 717c7ae1..fcbda01e 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala @@ -1,30 +1,40 @@ package io.cequence.openaiscala.anthropic.service -//import io.cequence.openaiscala.anthropic.{domain => Anthropic} -import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock +import io.cequence.openaiscala.JsonUtil +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{TextBlock, ToolUseBlock} import io.cequence.openaiscala.anthropic.domain.Content.ContentBlocks import io.cequence.openaiscala.anthropic.domain.Message.UserMessage -import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse} import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse.UsageInfo +import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse} import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings import io.cequence.openaiscala.anthropic.domain.{Content, Message, ToolSpec} import io.cequence.openaiscala.domain.response.{ChatCompletionChoiceChunkInfo, ChatCompletionChoiceInfo, ChatCompletionChunkResponse, ChatCompletionResponse, ChatToolCompletionChoiceInfo, ChatToolCompletionResponse, ChunkMessageSpec, UsageInfo => OpenAIUsageInfo} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings -import io.cequence.openaiscala.domain.{AssistantMessage, AssistantToolMessage, ChatRole, FunctionSpec, MessageSpec, SystemMessage, BaseMessage => OpenAIBaseMessage, Content => OpenAIContent, ImageURLContent => OpenAIImageContent, TextContent => OpenAITextContent, ToolSpec => OpenAIToolSpec, UserMessage => OpenAIUserMessage, UserSeqMessage => OpenAIUserSeqMessage} +import io.cequence.openaiscala.domain.{AssistantMessage, AssistantToolMessage, ChatRole, FunctionCallSpec, FunctionSpec, MessageSpec, SystemMessage, ToolCallSpec, BaseMessage => OpenAIBaseMessage, Content => OpenAIContent, ImageURLContent => OpenAIImageContent, TextContent => OpenAITextContent, ToolSpec => OpenAIToolSpec, UserMessage => OpenAIUserMessage, UserSeqMessage => OpenAIUserSeqMessage} import java.{util => ju} package object impl extends AnthropicServiceConsts { - def toAnthropic(messages: Seq[OpenAIBaseMessage]): Seq[Message] = + def toAnthropic(messages: Seq[OpenAIBaseMessage]): Seq[Message] = { messages.collect { - case OpenAIUserMessage(content, _) => Message.UserMessage(content) + case OpenAIUserMessage(content, _) => + Message.UserMessage(content) case OpenAIUserSeqMessage(contents, _) => Message.UserMessageContent(contents.map(toAnthropic)) // legacy message type case MessageSpec(role, content, _) if role == ChatRole.User => Message.UserMessage(content) } + } + + def toAnthropicSystemPrompt(messages: Seq[OpenAIBaseMessage]): Option[String] = { + val systemMessagesContent = messages.collect { case SystemMessage(content, _) => + content + }.mkString("\n") + + if (systemMessagesContent.isEmpty) None else Some(systemMessagesContent) + } def toAnthropicToolUseEncouragement(toolChoice: String): UserMessage = UserMessage(s"Use the $toolChoice tool in your response.") @@ -64,7 +74,7 @@ package object impl extends AnthropicServiceConsts { AnthropicCreateMessageSettings( model = settings.model, - system = if (systemMessagesContent.isEmpty) None else Some(systemMessagesContent), + //system = if (systemMessagesContent.isEmpty) None else Some(systemMessagesContent), max_tokens = settings.max_tokens.getOrElse(DefaultSettings.CreateMessage.max_tokens), metadata = Map.empty, stop_sequences = settings.stop, @@ -88,7 +98,7 @@ package object impl extends AnthropicServiceConsts { logprobs = None ) ), - usage = Some(toOpenAI(response.usage)) + usage = response.usage.map(toOpenAI) ) @@ -105,7 +115,7 @@ package object impl extends AnthropicServiceConsts { finish_reason = createMessageResponse.stop_reason ) ), - usage = Some(toOpenAI(createMessageResponse.usage)) + usage = createMessageResponse.usage.map(toOpenAI) ) } @@ -139,9 +149,35 @@ package object impl extends AnthropicServiceConsts { } def toOpenAIAssistantToolMessage(content: ContentBlocks): AssistantToolMessage = { - ??? + println(content.toString) + val textMessage = singleTextMessage(content) + AssistantToolMessage( + content = Some(textMessage), + name = None, + tool_calls = content.blocks.collect { + case toolContent: ToolUseBlock => toOpenAI(toolContent) + } + ) } + def toOpenAI(toolUseBlock: ToolUseBlock): (String, ToolCallSpec) = { + val inputJson = JsonUtil.StringAnyMapFormat.writes(toolUseBlock.input).toString() + toolUseBlock.id -> FunctionCallSpec(toolUseBlock.id, inputJson) + } + +// def toOpenAI(toolContent: ToolUseBlock): (String, ToolCallSpec) = { +// toolContent match { +// case ToolUseBlockSuccess(toolUseId, content) => +// toolUseId -> FunctionCallSpec(toolUseId, content) +// case ToolUseBlockFailure(toolUseId, content) => +// // TODO: how to map fail case? +// ??? +// } +// } + + private def singleTextMessage(content: ContentBlocks): String = + concatenateMessages(content.blocks.collect { case TextBlock(text) => text }) + private def concatenateMessages(messageContent: Seq[String]): String = messageContent.mkString("\n") diff --git a/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala b/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala index b7b4c0af..466c5b39 100644 --- a/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala +++ b/anthropic-client/src/test/scala/io/cequence/openaiscala/anthropic/JsonFormatsSpec.scala @@ -2,12 +2,23 @@ package io.cequence.openaiscala.anthropic import io.cequence.openaiscala.anthropic.JsonFormatsSpec.JsonPrintMode import io.cequence.openaiscala.anthropic.JsonFormatsSpec.JsonPrintMode.{Compact, Pretty} -import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock} -import io.cequence.openaiscala.anthropic.domain.{Message, ToolSpec} -import io.cequence.openaiscala.anthropic.domain.Message.{AssistantMessage, AssistantMessageContent, UserMessage, UserMessageContent} +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ + ImageBlock, + TextBlock, + ToolUseBlock +} +import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlock, ContentBlocks} +import io.cequence.openaiscala.anthropic.domain.{ChatRole, Message, ToolSpec} +import io.cequence.openaiscala.anthropic.domain.Message.{ + AssistantMessage, + AssistantMessageContent, + UserMessage, + UserMessageContent +} +import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike -import play.api.libs.json.{Format, Json, Writes} +import play.api.libs.json.{Format, Json, Reads, Writes} object JsonFormatsSpec { sealed trait JsonPrintMode @@ -48,6 +59,26 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { testCodec[Message](assistantMessage, json) } + "deserialize a tool_use content block" in { + val json = + """ { + | "type": "tool_use", + | "id": "toolu_01A09q90qw90lq917835lq9", + | "name": "get_weather", + | "input": {"location": "San Francisco, CA", "unit": "celsius"} + | }""".stripMargin + + val toolUseBlock = ToolUseBlock( + id = "toolu_01A09q90qw90lq917835lq9", + name = "get_weather", + input = Map( + "location" -> "\"San Francisco, CA\"", + "unit" -> "\"celsius\"" + ) + ) + testDeserialization[ContentBlock](json, toolUseBlock) + } + // TODO: add deserialization tests for: // 1. ToolUseBlock - success - flat content // 2. ToolUseBlock - success - TextBlock content @@ -75,14 +106,14 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { name = "get_stock_price", description = Some("Get the current stock price for a given ticker symbol."), inputSchema = Map( - "type" -> "object", - "properties" -> Map( - "ticker" -> Map( - "type" -> "string", - "description" -> "The stock ticker symbol, e.g. AAPL for Apple Inc." - ) - ), - "required" -> Seq("ticker") + "type" -> "object", + "properties" -> Map( + "ticker" -> Map( + "type" -> "string", + "description" -> "The stock ticker symbol, e.g. AAPL for Apple Inc." + ) + ), + "required" -> Seq("ticker") ) ) @@ -108,6 +139,55 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { testCodec[Message](userMessage, expectedImageContentJson, Pretty) } + val createToolMessageResponseJson = + """{ + | "id": "msg_01Aq9w938a90dw8q", + | "model": "claude-3-opus-20240229", + | "stop_reason": "tool_use", + | "role": "assistant", + | "content": [ + | { + | "type": "text", + | "text": "I need to use the get_weather, and the user wants SF, which is likely San Francisco, CA." + | }, + | { + | "type": "tool_use", + | "id": "toolu_01A09q90qw90lq917835lq9", + | "name": "get_weather", + | "input": {"location": "San Francisco, CA", "unit": "celsius"} + | } + | ] + |}""".stripMargin + + "deserialize tool use content block" in { + val toolUseResponse = CreateMessageResponse( + id = "msg_01Aq9w938a90dw8q", + role = ChatRole.Assistant, + content = ContentBlocks( + Seq( + // TODO: check, shouldn't this get to description of a tool use block? + TextBlock( + "I need to use the get_weather, and the user wants SF, which is likely San Francisco, CA." + ), + ToolUseBlock( + id = "toolu_01A09q90qw90lq917835lq9", + name = "get_weather", + input = Map( + "location" -> "\"San Francisco, CA\"", + "unit" -> "\"celsius\"" + ) + ) + ) + ), + model = "claude-3-opus-20240229", + stop_reason = Some("tool_use"), + stop_sequence = None, + usage = None + ) + testDeserialization(createToolMessageResponseJson, toolUseResponse) + + } + } private def testCodec[A]( @@ -117,14 +197,8 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { )( implicit format: Format[A] ): Unit = { - val jsValue = Json.toJson(value) - val serialized = printMode match { - case Compact => jsValue.toString() - case Pretty => Json.prettyPrint(jsValue) - } - serialized shouldBe json - - Json.parse(json).as[A] shouldBe value + testSerialization(value, json, printMode) + testDeserialization(json, value) } private def testSerialization[A]( @@ -142,4 +216,13 @@ class JsonFormatsSpec extends AnyWordSpecLike with Matchers with JsonFormats { serialized shouldBe json } + private def testDeserialization[A]( + json: String, + value: A + )( + implicit format: Reads[A] + ): Unit = { + Json.parse(json).as[A] shouldBe value + } + } diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessage.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessage.scala index f4d66067..b6e616bd 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessage.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessage.scala @@ -22,6 +22,7 @@ object AnthropicCreateMessage extends ExampleBase[AnthropicService] { service .createMessage( messages, + systemPrompt = None, settings = AnthropicCreateMessageSettings( model = NonOpenAIModelId.claude_3_haiku_20240307, max_tokens = 4096 diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageStreamed.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageStreamed.scala index df1f4f7f..3c4219a2 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageStreamed.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageStreamed.scala @@ -21,6 +21,7 @@ object AnthropicCreateMessageStreamed extends ExampleBase[AnthropicService] { service .createMessageStreamed( messages, + systemPrompt = None, settings = AnthropicCreateMessageSettings( model = NonOpenAIModelId.claude_3_haiku_20240307, max_tokens = 4096 diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageWithImage.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageWithImage.scala index 7e293af8..51c63a95 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageWithImage.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateMessageWithImage.scala @@ -42,6 +42,7 @@ object AnthropicCreateMessageWithImage extends ExampleBase[AnthropicService] { service .createMessage( messages, + systemPrompt = None, settings = AnthropicCreateMessageSettings( model = NonOpenAIModelId.claude_3_opus_20240229, max_tokens = 4096 From f95c3b3ba44d4755c6739b7feb0f743812d44c9c Mon Sep 17 00:00:00 2001 From: Branislav Burdiliak Date: Sun, 12 May 2024 15:40:40 +0200 Subject: [PATCH 4/4] Reformat code --- .../openaiscala/anthropic/JsonFormats.scala | 26 ++++++++-- .../anthropic/domain/Message.scala | 3 +- .../anthropic/domain/ToolSpec.scala | 7 ++- .../anthropic/service/AnthropicService.scala | 32 +++++++------ .../service/AnthropicWSRequestHelper.scala | 27 +++++++---- .../service/impl/AnthropicServiceImpl.scala | 9 ++-- ...OpenAIAnthropicChatCompletionService.scala | 11 +++-- ...AIAnthropicChatToolCompletionService.scala | 25 ++++++---- .../anthropic/service/impl/package.scala | 45 ++++++++++++++---- .../service/ws/WSRequestHelper.scala | 19 +++++--- .../AnthropicCreateChatToolCompletion.scala | 47 ++++++++++++------- 11 files changed, 171 insertions(+), 80 deletions(-) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala index b12b32ad..d53c42c2 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/JsonFormats.scala @@ -1,11 +1,29 @@ package io.cequence.openaiscala.anthropic import io.cequence.openaiscala.JsonUtil -import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock, ToolUseBlock} -import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlock, ContentBlocks, SingleString} -import io.cequence.openaiscala.anthropic.domain.Message.{AssistantMessage, AssistantMessageContent, UserMessage, UserMessageContent} +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ + ImageBlock, + TextBlock, + ToolUseBlock +} +import io.cequence.openaiscala.anthropic.domain.Content.{ + ContentBlock, + ContentBlocks, + SingleString +} +import io.cequence.openaiscala.anthropic.domain.Message.{ + AssistantMessage, + AssistantMessageContent, + UserMessage, + UserMessageContent +} import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse.UsageInfo -import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageChunkResponse, CreateMessageResponse, DeltaText} +import io.cequence.openaiscala.anthropic.domain.response.{ + ContentBlockDelta, + CreateMessageChunkResponse, + CreateMessageResponse, + DeltaText +} import io.cequence.openaiscala.anthropic.domain.{ChatRole, Content, Message, ToolSpec} import play.api.libs.functional.syntax._ import play.api.libs.json._ diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala index 778f75b5..e104afaa 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/Message.scala @@ -13,7 +13,8 @@ sealed abstract class Message private ( object Message { - case class UserMessage(contentString: String) extends Message(ChatRole.User, SingleString(contentString)) + case class UserMessage(contentString: String) + extends Message(ChatRole.User, SingleString(contentString)) case class UserMessageContent(contentBlocks: Seq[ContentBlock]) extends Message(ChatRole.User, ContentBlocks(contentBlocks)) case class AssistantMessage(contentString: String) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala index 4ea696b1..89ae110e 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/domain/ToolSpec.scala @@ -1,4 +1,7 @@ package io.cequence.openaiscala.anthropic.domain - -final case class ToolSpec(name: String, description: Option[String], inputSchema: Map[String, Any]) +final case class ToolSpec( + name: String, + description: Option[String], + inputSchema: Map[String, Any] +) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala index 59acaad6..0b2c3da9 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicService.scala @@ -3,7 +3,10 @@ package io.cequence.openaiscala.anthropic.service import akka.NotUsed import akka.stream.scaladsl.Source import io.cequence.openaiscala.anthropic.domain.{Message, ToolSpec} -import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse} +import io.cequence.openaiscala.anthropic.domain.response.{ + ContentBlockDelta, + CreateMessageResponse +} import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings import io.cequence.openaiscala.service.CloseableService @@ -23,10 +26,10 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { * @param messages * A list of messages comprising the conversation so far. * @param systemPrompt - * System prompt. + * System prompt. * - * A system prompt is a way of providing context and instructions to Claude, - * such as specifying a particular goal or role. See our guide to system prompts. + * A system prompt is a way of providing context and instructions to Claude, such as + * specifying a particular goal or role. See our guide to system prompts. * @param settings * @return * create message response @@ -52,16 +55,17 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { * @param messages * A list of messages comprising the conversation so far. * @param systemPrompt - * System prompt. + * System prompt. * - * A system prompt is a way of providing context and instructions to Claude, - * such as specifying a particular goal or role. See our guide to system prompts. + * A system prompt is a way of providing context and instructions to Claude, such as + * specifying a particular goal or role. See our guide to system prompts. * @param tools - * [beta] Definitions of tools that the model may use. + * [beta] Definitions of tools that the model may use. * - * If you include tools in your API request, the model may return tool_use content blocks that represent the model's - * use of those tools. You can then run those tools using the tool input generated by the model and then optionally - * return results back to the model using tool_result content blocks. + * If you include tools in your API request, the model may return tool_use content blocks + * that represent the model's use of those tools. You can then run those tools using the tool + * input generated by the model and then optionally return results back to the model using + * tool_result content blocks. * * @param settings * @return @@ -88,10 +92,10 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts { * @param messages * A list of messages comprising the conversation so far. * @param systemPrompt - * System prompt. + * System prompt. * - * A system prompt is a way of providing context and instructions to Claude, - * such as specifying a particular goal or role. See our guide to system prompts. + * A system prompt is a way of providing context and instructions to Claude, such as + * specifying a particular goal or role. See our guide to system prompts. * @param settings * @return * create message response diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala index 17c245c7..4c5746a3 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/AnthropicWSRequestHelper.scala @@ -1,7 +1,14 @@ package io.cequence.openaiscala.anthropic.service import io.cequence.openaiscala.anthropic.service.AnthropicWSRequestHelper.AnthropicBetaHeader -import io.cequence.openaiscala.{OpenAIScalaClientException, OpenAIScalaEngineOverloadedException, OpenAIScalaRateLimitException, OpenAIScalaServerErrorException, OpenAIScalaTokenCountExceededException, OpenAIScalaUnauthorizedException} +import io.cequence.openaiscala.{ + OpenAIScalaClientException, + OpenAIScalaEngineOverloadedException, + OpenAIScalaRateLimitException, + OpenAIScalaServerErrorException, + OpenAIScalaTokenCountExceededException, + OpenAIScalaUnauthorizedException +} import io.cequence.openaiscala.service.ws.WSRequestExtHelper import play.api.libs.json.{JsObject, JsValue} @@ -15,9 +22,9 @@ trait AnthropicWSRequestHelper extends WSRequestExtHelper { // TODO: introduce Anthropic error model override protected def handleErrorCodes( - httpCode: Int, - message: String - ): Nothing = { + httpCode: Int, + message: String + ): Nothing = { val errorMessage = s"Code ${httpCode} : ${message}" httpCode match { case 401 => throw new OpenAIScalaUnauthorizedException(errorMessage) @@ -27,7 +34,7 @@ trait AnthropicWSRequestHelper extends WSRequestExtHelper { case 400 => if ( message.contains("Please reduce your prompt; or completion length") || - message.contains("Please reduce the length of the messages") + message.contains("Please reduce the length of the messages") ) throw new OpenAIScalaTokenCountExceededException(errorMessage) else @@ -38,11 +45,11 @@ trait AnthropicWSRequestHelper extends WSRequestExtHelper { } protected def execBetaPOSTWithStatus( - endPoint: PEP, - endPointParam: Option[String] = None, - params: Seq[(PT, Option[Any])] = Nil, - bodyParams: Seq[(PT, Option[JsValue])] = Nil, - ): Future[JsValue] = { + endPoint: PEP, + endPointParam: Option[String] = None, + params: Seq[(PT, Option[Any])] = Nil, + bodyParams: Seq[(PT, Option[JsValue])] = Nil + ): Future[JsValue] = { execPOSTWithStatusAndHeaders( endPoint, endPointParam, diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala index b4489684..48df396b 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/AnthropicServiceImpl.scala @@ -36,7 +36,8 @@ private[service] trait AnthropicServiceImpl ): Future[CreateMessageResponse] = execPOST( EndPoint.messages, - bodyParams = createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = false) + bodyParams = + createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = false) ).map( _.asSafe[CreateMessageResponse] ) @@ -47,7 +48,8 @@ private[service] trait AnthropicServiceImpl tools: Seq[ToolSpec], settings: AnthropicCreateMessageSettings ): Future[CreateMessageResponse] = { - val coreParams = createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = false) + val coreParams = + createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = false) val extraParams = jsonBodyParams( Param.tools -> Some(tools.map(Json.toJson(_))) ) @@ -81,7 +83,8 @@ private[service] trait AnthropicServiceImpl execJsonStreamAux( EndPoint.messages, "POST", - bodyParams = createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = true) + bodyParams = + createBodyParamsForMessageCreation(messages, systemPrompt, settings, stream = true) ).map { (json: JsValue) => (json \ "error").toOption.map { error => throw new OpenAIScalaClientException(error.toString()) diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala index cab30044..4cc8ee87 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatCompletionService.scala @@ -4,9 +4,15 @@ import akka.NotUsed import akka.stream.scaladsl.Source import io.cequence.openaiscala.anthropic.service.AnthropicService import io.cequence.openaiscala.domain.{BaseMessage, ToolSpec} -import io.cequence.openaiscala.domain.response.{ChatCompletionChunkResponse, ChatCompletionResponse} +import io.cequence.openaiscala.domain.response.{ + ChatCompletionChunkResponse, + ChatCompletionResponse +} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings -import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatCompletionStreamedServiceExtra} +import io.cequence.openaiscala.service.{ + OpenAIChatCompletionService, + OpenAIChatCompletionStreamedServiceExtra +} import scala.concurrent.{ExecutionContext, Future} @@ -41,7 +47,6 @@ private[service] class OpenAIAnthropicChatCompletionService( .map(toOpenAI) } - // TODO: extract another trait extending OpenAIChatCompletionService with createChatToolCompletion def createChatToolCompletion( messages: Seq[BaseMessage], diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala index 52921348..803d8845 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/OpenAIAnthropicChatToolCompletionService.scala @@ -6,10 +6,18 @@ import io.cequence.openaiscala.anthropic.domain.Message import io.cequence.openaiscala.anthropic.domain.Message.UserMessage import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse import io.cequence.openaiscala.anthropic.service.AnthropicService -import io.cequence.openaiscala.domain.response.{ChatCompletionChunkResponse, ChatCompletionResponse, ChatToolCompletionResponse} +import io.cequence.openaiscala.domain.response.{ + ChatCompletionChunkResponse, + ChatCompletionResponse, + ChatToolCompletionResponse +} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings import io.cequence.openaiscala.domain.{BaseMessage, SystemMessage, ToolSpec} -import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatCompletionStreamedServiceExtra, OpenAIChatToolCompletionService} +import io.cequence.openaiscala.service.{ + OpenAIChatCompletionService, + OpenAIChatCompletionStreamedServiceExtra, + OpenAIChatToolCompletionService +} import scala.concurrent.{ExecutionContext, Future} @@ -44,13 +52,12 @@ private[service] class OpenAIAnthropicChatToolCompletionService( responseToolChoice: Option[String], settings: CreateChatCompletionSettings ): Future[ChatToolCompletionResponse] = { - val anthropicResponseF: Future[CreateMessageResponse] = underlying - .createToolMessage( - toAnthropic(messages) ++ responseToolChoice.map(toAnthropicToolUseEncouragement), - toAnthropicSystemPrompt(messages), - toAnthropicToolSpecs(tools), - toAnthropic(settings, messages) - ) + val anthropicResponseF: Future[CreateMessageResponse] = underlying.createToolMessage( + toAnthropic(messages) ++ responseToolChoice.map(toAnthropicToolUseEncouragement), + toAnthropicSystemPrompt(messages), + toAnthropicToolSpecs(tools), + toAnthropic(settings, messages) + ) anthropicResponseF.map(toOpenAIChatToolCompletionResponse) } diff --git a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala index fcbda01e..f542d08d 100644 --- a/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala +++ b/anthropic-client/src/main/scala/io/cequence/openaiscala/anthropic/service/impl/package.scala @@ -5,12 +5,40 @@ import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{TextBlock, import io.cequence.openaiscala.anthropic.domain.Content.ContentBlocks import io.cequence.openaiscala.anthropic.domain.Message.UserMessage import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse.UsageInfo -import io.cequence.openaiscala.anthropic.domain.response.{ContentBlockDelta, CreateMessageResponse} +import io.cequence.openaiscala.anthropic.domain.response.{ + ContentBlockDelta, + CreateMessageResponse +} import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings import io.cequence.openaiscala.anthropic.domain.{Content, Message, ToolSpec} -import io.cequence.openaiscala.domain.response.{ChatCompletionChoiceChunkInfo, ChatCompletionChoiceInfo, ChatCompletionChunkResponse, ChatCompletionResponse, ChatToolCompletionChoiceInfo, ChatToolCompletionResponse, ChunkMessageSpec, UsageInfo => OpenAIUsageInfo} +import io.cequence.openaiscala.domain.response.{ + ChatCompletionChoiceChunkInfo, + ChatCompletionChoiceInfo, + ChatCompletionChunkResponse, + ChatCompletionResponse, + ChatToolCompletionChoiceInfo, + ChatToolCompletionResponse, + ChunkMessageSpec, + UsageInfo => OpenAIUsageInfo +} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings -import io.cequence.openaiscala.domain.{AssistantMessage, AssistantToolMessage, ChatRole, FunctionCallSpec, FunctionSpec, MessageSpec, SystemMessage, ToolCallSpec, BaseMessage => OpenAIBaseMessage, Content => OpenAIContent, ImageURLContent => OpenAIImageContent, TextContent => OpenAITextContent, ToolSpec => OpenAIToolSpec, UserMessage => OpenAIUserMessage, UserSeqMessage => OpenAIUserSeqMessage} +import io.cequence.openaiscala.domain.{ + AssistantMessage, + AssistantToolMessage, + ChatRole, + FunctionCallSpec, + FunctionSpec, + MessageSpec, + SystemMessage, + ToolCallSpec, + BaseMessage => OpenAIBaseMessage, + Content => OpenAIContent, + ImageURLContent => OpenAIImageContent, + TextContent => OpenAITextContent, + ToolSpec => OpenAIToolSpec, + UserMessage => OpenAIUserMessage, + UserSeqMessage => OpenAIUserSeqMessage +} import java.{util => ju} @@ -40,8 +68,8 @@ package object impl extends AnthropicServiceConsts { UserMessage(s"Use the $toolChoice tool in your response.") def toAnthropicToolSpecs(toolSpecs: Seq[OpenAIToolSpec]): Seq[ToolSpec] = { - toolSpecs.collect { - case FunctionSpec(name, description, parameters) => ToolSpec(name, description, parameters) + toolSpecs.collect { case FunctionSpec(name, description, parameters) => + ToolSpec(name, description, parameters) } } @@ -74,7 +102,7 @@ package object impl extends AnthropicServiceConsts { AnthropicCreateMessageSettings( model = settings.model, - //system = if (systemMessagesContent.isEmpty) None else Some(systemMessagesContent), + // system = if (systemMessagesContent.isEmpty) None else Some(systemMessagesContent), max_tokens = settings.max_tokens.getOrElse(DefaultSettings.CreateMessage.max_tokens), metadata = Map.empty, stop_sequences = settings.stop, @@ -101,7 +129,6 @@ package object impl extends AnthropicServiceConsts { usage = response.usage.map(toOpenAI) ) - def toOpenAIChatToolCompletionResponse(createMessageResponse: CreateMessageResponse) = { ChatToolCompletionResponse( id = createMessageResponse.id, @@ -154,8 +181,8 @@ package object impl extends AnthropicServiceConsts { AssistantToolMessage( content = Some(textMessage), name = None, - tool_calls = content.blocks.collect { - case toolContent: ToolUseBlock => toOpenAI(toolContent) + tool_calls = content.blocks.collect { case toolContent: ToolUseBlock => + toOpenAI(toolContent) } ) } diff --git a/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala b/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala index 8b1d5486..ba3042d4 100644 --- a/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala +++ b/openai-client/src/main/scala/io/cequence/openaiscala/service/ws/WSRequestHelper.scala @@ -341,14 +341,19 @@ trait WSRequestHelper extends HasWSClient { } protected def execPOSTWithStatusAndHeaders( - endPoint: PEP, - endPointParam: Option[String] = None, - params: Seq[(PT, Option[Any])] = Nil, - bodyParams: Seq[(PT, Option[JsValue])] = Nil, - acceptableStatusCodes: Seq[Int] = defaultAcceptableStatusCodes, - headers: Seq[(String, String)] = Nil + endPoint: PEP, + endPointParam: Option[String] = None, + params: Seq[(PT, Option[Any])] = Nil, + bodyParams: Seq[(PT, Option[JsValue])] = Nil, + acceptableStatusCodes: Seq[Int] = defaultAcceptableStatusCodes, + headers: Seq[(String, String)] = Nil ): Future[RichJsResponse] = { - val request = getWSRequestOptionalWithHeaders(Some(endPoint), endPointParam, toStringParams(params), headers) + val request = getWSRequestOptionalWithHeaders( + Some(endPoint), + endPointParam, + toStringParams(params), + headers + ) val bodyParamsX = bodyParams.collect { case (fieldName, Some(jsValue)) => (fieldName.toString, jsValue) } diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala index e543f322..daf5a4f3 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateChatToolCompletion.scala @@ -1,10 +1,20 @@ package io.cequence.openaiscala.examples.nonopenai import io.cequence.openaiscala.anthropic.service.AnthropicServiceFactory -import io.cequence.openaiscala.domain.{FunctionSpec, NonOpenAIModelId, SystemMessage, UserMessage} +import io.cequence.openaiscala.domain.{ + FunctionSpec, + NonOpenAIModelId, + SystemMessage, + UserMessage +} import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings import io.cequence.openaiscala.examples.ExampleBase -import io.cequence.openaiscala.service.{OpenAIChatCompletionService, OpenAIChatToolCompletionService, OpenAICoreService, OpenAIService} +import io.cequence.openaiscala.service.{ + OpenAIChatCompletionService, + OpenAIChatToolCompletionService, + OpenAICoreService, + OpenAIService +} import scala.concurrent.Future @@ -19,23 +29,24 @@ object AnthropicCreateChatToolCompletion extends ExampleBase[OpenAIChatToolCompl ) override protected def run: Future[_] = - service - .createChatToolCompletion( - messages = messages, - settings = CreateChatCompletionSettings(NonOpenAIModelId.claude_3_haiku_20240307), - tools = Seq(FunctionSpec( + service.createChatToolCompletion( + messages = messages, + settings = CreateChatCompletionSettings(NonOpenAIModelId.claude_3_haiku_20240307), + tools = Seq( + FunctionSpec( name = "get_stock_price", description = Some("Get the current stock price of a given company"), - parameters = Map( - "type" -> "object", - "properties" -> Map( - "company" -> Map( - "type" -> "string", - "description" -> "The company name, e.g. Apple Inc." - ) - ), - "required" -> Seq("company") - ) - )) + parameters = Map( + "type" -> "object", + "properties" -> Map( + "company" -> Map( + "type" -> "string", + "description" -> "The company name, e.g. Apple Inc." + ) + ), + "required" -> Seq("company") + ) + ) ) + ) }