Skip to content

Support tool messages in Anthropic #62

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
package io.cequence.openaiscala.anthropic

import io.cequence.openaiscala.JsonUtil
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{ImageBlock, TextBlock}
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.{
ImageBlock,
TextBlock,
ToolUseBlock
}
import io.cequence.openaiscala.anthropic.domain.Content.{
ContentBlock,
ContentBlocks,
Expand All @@ -20,7 +24,7 @@ import io.cequence.openaiscala.anthropic.domain.response.{
CreateMessageResponse,
DeltaText
}
import io.cequence.openaiscala.anthropic.domain.{ChatRole, Content, Message}
import io.cequence.openaiscala.anthropic.domain.{ChatRole, Content, Message, ToolSpec}
import play.api.libs.functional.syntax._
import play.api.libs.json._

Expand Down Expand Up @@ -68,6 +72,7 @@ trait JsonFormats {
}

implicit val contentBlockReads: Reads[ContentBlock] = new Reads[ContentBlock] {
implicit val stringAnyMapFormat: Format[Map[String, Any]] = JsonUtil.StringAnyMapFormat
def reads(json: JsValue): JsResult[ContentBlock] = {
(json \ "type").validate[String].flatMap {
case "text" => (json \ "text").validate[String].map(TextBlock.apply)
Expand All @@ -78,6 +83,12 @@ trait JsonFormats {
mediaType <- (source \ "media_type").validate[String]
data <- (source \ "data").validate[String]
} yield ImageBlock(`type`, mediaType, data)
case "tool_use" =>
for {
id <- (json \ "id").validate[String]
name <- (json \ "name").validate[String]
input <- (json \ "input").validate[Map[String, Any]]
} yield ToolUseBlock(id, name, input)
case _ => JsError("Unsupported or invalid content block")
}
}
Expand Down Expand Up @@ -127,12 +138,18 @@ trait JsonFormats {
(__ \ "model").read[String] and
(__ \ "stop_reason").readNullable[String] and
(__ \ "stop_sequence").readNullable[String] and
(__ \ "usage").read[UsageInfo]
(__ \ "usage").readNullable[UsageInfo]
)(CreateMessageResponse.apply _)

implicit val createMessageChunkResponseReads: Reads[CreateMessageChunkResponse] =
Json.reads[CreateMessageChunkResponse]

implicit val deltaTextReads: Reads[DeltaText] = Json.reads[DeltaText]
implicit val contentBlockDeltaReads: Reads[ContentBlockDelta] = Json.reads[ContentBlockDelta]

implicit lazy val toolSpecFormat: OFormat[ToolSpec] = {
implicit val stringAnyMapFormat: Format[Map[String, Any]] = JsonUtil.StringAnyMapFormat
implicit val config = JsonConfiguration(JsonNaming.SnakeCase)
Json.format[ToolSpec]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,23 @@ object Content {
mediaType: String,
data: String
) extends ContentBlock

case class ToolUseBlock(
id: String,
name: String,
input: Map[String, Any] // TODO: allow here only Text content blocks
) extends ContentBlock

// sealed trait ToolUseBlock extends ContentBlock
// // TODO: allow only for responses to createChatToolCompletion
// case class ToolUseBlockSuccess(
// toolUseId: String,
// content: String // TODO: allow here only Text content blocks
// ) extends ToolUseBlock
//
// case class ToolUseBlockFailure(
// toolUseId: String,
// content: String // TODO: allow here only Text content blocks
// ) extends ToolUseBlock
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package io.cequence.openaiscala.anthropic.domain

final case class ToolSpec(
name: String,
description: Option[String],
inputSchema: Map[String, Any]
)
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ final case class CreateMessageResponse(
model: String,
stop_reason: Option[String],
stop_sequence: Option[String],
usage: UsageInfo
// TODO: it is required according to the API documentation, but it is not present in the response for tool calls
usage: Option[UsageInfo]
)

object CreateMessageResponse {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,6 @@ final case class AnthropicCreateMessageSettings(
// See [[models|https://docs.anthropic.com/claude/docs/models-overview]] for additional details and options.
model: String,

// System prompt.
// A system prompt is a way of providing context and instructions to Claude, such as specifying a particular goal or role. See our [[guide to system prompts|https://docs.anthropic.com/claude/docs/system-prompts]].
system: Option[String] = None,

// The maximum number of tokens to generate before stopping.
// Note that our models may stop before reaching this maximum. This parameter only specifies the absolute maximum number of tokens to generate.
// Different models have different maximum values for this parameter. See [[models|https://docs.anthropic.com/claude/docs/models-overview]] for details.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package io.cequence.openaiscala.anthropic.service

import akka.NotUsed
import akka.stream.scaladsl.Source
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.openaiscala.anthropic.domain.{Message, ToolSpec}
import io.cequence.openaiscala.anthropic.domain.response.{
ContentBlockDelta,
CreateMessageResponse
Expand All @@ -25,6 +25,11 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts {
*
* @param messages
* A list of messages comprising the conversation so far.
* @param systemPrompt
* System prompt.
*
* A system prompt is a way of providing context and instructions to Claude, such as
* specifying a particular goal or role. See our guide to system prompts.
* @param settings
* @return
* create message response
Expand All @@ -33,6 +38,45 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts {
*/
def createMessage(
messages: Seq[Message],
systemPrompt: Option[String],
settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage
): Future[CreateMessageResponse]

// TODO:
/**
* Creates a message.
*
* Send a structured list of input messages with text and/or image content, and the model
* will generate the next message in the conversation.
*
* The Messages API can be used for for either single queries or stateless multi-turn
* conversations.
*
* @param messages
* A list of messages comprising the conversation so far.
* @param systemPrompt
* System prompt.
*
* A system prompt is a way of providing context and instructions to Claude, such as
* specifying a particular goal or role. See our guide to system prompts.
* @param tools
* [beta] Definitions of tools that the model may use.
*
* If you include tools in your API request, the model may return tool_use content blocks
* that represent the model's use of those tools. You can then run those tools using the tool
* input generated by the model and then optionally return results back to the model using
* tool_result content blocks.
*
* @param settings
* @return
* create message response
* @see
* <a href="https://docs.anthropic.com/claude/reference/messages_post">Anthropic Doc</a>
*/
def createToolMessage(
messages: Seq[Message],
systemPrompt: Option[String],
tools: Seq[ToolSpec],
settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage
): Future[CreateMessageResponse]

Expand All @@ -47,6 +91,11 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts {
*
* @param messages
* A list of messages comprising the conversation so far.
* @param systemPrompt
* System prompt.
*
* A system prompt is a way of providing context and instructions to Claude, such as
* specifying a particular goal or role. See our guide to system prompts.
* @param settings
* @return
* create message response
Expand All @@ -55,6 +104,7 @@ trait AnthropicService extends CloseableService with AnthropicServiceConsts {
*/
def createMessageStreamed(
messages: Seq[Message],
systemPrompt: Option[String],
settings: AnthropicCreateMessageSettings = DefaultSettings.CreateMessage
): Source[ContentBlockDelta, NotUsed]
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,16 @@ package io.cequence.openaiscala.anthropic.service
import akka.stream.Materializer
import io.cequence.openaiscala.anthropic.service.impl.{
AnthropicServiceImpl,
OpenAIAnthropicChatCompletionService
OpenAIAnthropicChatCompletionService,
OpenAIAnthropicChatToolCompletionService
}
import io.cequence.openaiscala.service.{
OpenAIChatCompletionService,
OpenAIChatToolCompletionService
}
import io.cequence.openaiscala.service.StreamedServiceTypes.{
OpenAIChatCompletionStreamedService
}
import io.cequence.openaiscala.service.OpenAIChatCompletionService
import io.cequence.openaiscala.service.StreamedServiceTypes.OpenAIChatCompletionStreamedService
import io.cequence.openaiscala.service.ws.Timeouts

import scala.concurrent.ExecutionContext
Expand Down Expand Up @@ -43,6 +49,17 @@ object AnthropicServiceFactory extends AnthropicServiceConsts {
AnthropicServiceFactory(apiKey, timeouts)
)

def asOpenAIChatToolCompletionService(
apiKey: String = getAPIKeyFromEnv(),
timeouts: Option[Timeouts] = None
)(
implicit ec: ExecutionContext,
materializer: Materializer
): OpenAIChatToolCompletionService =
new OpenAIAnthropicChatToolCompletionService(
AnthropicServiceFactory(apiKey, timeouts)
)

/**
* Create a new instance of the [[AnthropicService]]
*
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
package io.cequence.openaiscala.anthropic.service

import io.cequence.openaiscala.anthropic.service.AnthropicWSRequestHelper.AnthropicBetaHeader
import io.cequence.openaiscala.{
OpenAIScalaClientException,
OpenAIScalaEngineOverloadedException,
OpenAIScalaRateLimitException,
OpenAIScalaServerErrorException,
OpenAIScalaTokenCountExceededException,
OpenAIScalaUnauthorizedException
}
import io.cequence.openaiscala.service.ws.WSRequestExtHelper
import play.api.libs.json.{JsObject, JsValue}

import scala.concurrent.Future

object AnthropicWSRequestHelper {
val AnthropicBetaHeader = "anthropic-beta"
}

trait AnthropicWSRequestHelper extends WSRequestExtHelper {

// TODO: introduce Anthropic error model
override protected def handleErrorCodes(
httpCode: Int,
message: String
): Nothing = {
val errorMessage = s"Code ${httpCode} : ${message}"
httpCode match {
case 401 => throw new OpenAIScalaUnauthorizedException(errorMessage)
case 429 => throw new OpenAIScalaRateLimitException(errorMessage)
case 500 => throw new OpenAIScalaServerErrorException(errorMessage)
case 503 => throw new OpenAIScalaEngineOverloadedException(errorMessage)
case 400 =>
if (
message.contains("Please reduce your prompt; or completion length") ||
message.contains("Please reduce the length of the messages")
)
throw new OpenAIScalaTokenCountExceededException(errorMessage)
else
throw new OpenAIScalaClientException(errorMessage)

case _ => throw new OpenAIScalaClientException(errorMessage)
}
}

protected def execBetaPOSTWithStatus(
endPoint: PEP,
endPointParam: Option[String] = None,
params: Seq[(PT, Option[Any])] = Nil,
bodyParams: Seq[(PT, Option[JsValue])] = Nil
): Future[JsValue] = {
execPOSTWithStatusAndHeaders(
endPoint,
endPointParam,
params,
bodyParams,
headers = authHeaders ++ Seq(AnthropicBetaHeader -> "tools-2024-04-04")
).map(handleErrorResponse)
}

}
Loading
Loading