Skip to content

Commit

Permalink
Merge branch 'main' into chore/eval-docs
Browse files Browse the repository at this point in the history
  • Loading branch information
mhordynski authored Dec 5, 2024
2 parents 5180c8d + 926e4b6 commit 6c77d73
Show file tree
Hide file tree
Showing 27 changed files with 145 additions and 89 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/prepare_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,4 @@ jobs:
gh pr create -B main --title "$COMMIT_MESSAGE" \
--body 'Update ${{ github.event.inputs.packageName }} version from ${{ steps.packages_update.outputs.old_version }} to ${{ steps.packages_update.outputs.new_version }}'
env:
GH_TOKEN: ${{ secrets.GH_TOKEN }}
GH_TOKEN: ${{ secrets.GH_TOKEN }}
2 changes: 1 addition & 1 deletion .github/workflows/push_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,4 +48,4 @@ jobs:
uv tool run twine upload dist/*
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
4 changes: 2 additions & 2 deletions docs/how-to/document_search/distributed_ingestion.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ job_id = client.submit_job(
runtime_env={
"working_dir": "./",
"pip": [
"ragbits-core[litellm]",
"ragbits-core",
"ragbits-document-search[distributed]"
]
},
Expand All @@ -62,7 +62,7 @@ Ray Jobs is also available as CLI commands. You can submit a job using the follo
```bash
ray job submit \
--address http://<cluster_address>:8265 \
--runtime-env '{"pip": ["ragbits-core[litellm]", "ragbits-document-search[distributed]"]}'\
--runtime-env '{"pip": ["ragbits-core", "ragbits-document-search[distributed]"]}'\
--working-dir . \
-- python script.py
```
Expand Down
4 changes: 2 additions & 2 deletions docs/quickstart/quickstart1_prompts.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ In this Quickstart guide, you will learn how to define a dynamic prompt in Ragbi
To install Ragbits, run the following command in your terminal:

```bash
pip install ragbits[litellm]
pip install ragbits
```

This command will install all the popular Ragbits packages, along with [LiteLLM](https://docs.litellm.ai/docs/), which we will use in this guide for communicating with LLM APIs.
This command will install all the popular Ragbits packages.

## Defining a Static Prompt
The most standard way to define a prompt in Ragbits is to create a class that inherits from the `Prompt` class and configure it by setting values for appropriate properties. Here is an example of a simple prompt that asks the model to write a song about Ragbits:
Expand Down
2 changes: 1 addition & 1 deletion examples/apps/documents_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# dependencies = [
# "gradio",
# "ragbits-document-search",
# "ragbits-core[chroma,litellm]",
# "ragbits-core[chroma]",
# ]
# ///
from collections.abc import AsyncIterator
Expand Down
2 changes: 1 addition & 1 deletion examples/core/llm.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-core[litellm]",
# "ragbits-core",
# ]
# ///
import asyncio
Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search",
# "ragbits-core[litellm]",
# "ragbits-core",
# ]
# ///

Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/chroma.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search",
# "ragbits-core[chroma,litellm]",
# "ragbits-core[chroma]",
# ]
# ///

Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/chroma_otel.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search",
# "ragbits-core[chroma,litellm,otel]",
# "ragbits-core[chroma,otel]",
# ]
# ///

Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search[distributed]",
# "ragbits-core[litellm]",
# "ragbits-core",
# ]
# ///

Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/from_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ class to rephrase the query.
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search",
# "ragbits-core[chroma,litellm]",
# "ragbits-core[chroma]",
# ]
# ///

Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/multimodal.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search",
# "ragbits-core[litellm]",
# "ragbits-core",
# ]
# ///
import asyncio
Expand Down
2 changes: 1 addition & 1 deletion examples/document-search/qdrant.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search",
# "ragbits-core[litellm,qdrant]",
# "ragbits-core[qdrant]",
# ]
# ///

Expand Down
2 changes: 1 addition & 1 deletion examples/evaluation/document-search/evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# dependencies = [
# "ragbits-document-search",
# "ragbits-evaluate[relari]",
# "ragbits-core[litellm,chroma]",
# "ragbits-core[chroma]",
# ]
# ///
import asyncio
Expand Down
2 changes: 1 addition & 1 deletion examples/evaluation/document-search/ingest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# requires-python = ">=3.10"
# dependencies = [
# "ragbits-document-search[huggingface]",
# "ragbits-core[litellm,chroma]",
# "ragbits-core[chroma]",
# "hydra-core~=1.3.2",
# "unstructured[md]>=0.15.13",
# ]
Expand Down
21 changes: 19 additions & 2 deletions packages/ragbits-cli/src/ragbits/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,29 @@
import importlib.util
import pkgutil
from pathlib import Path
from typing import Annotated

from typer import Typer
import typer

import ragbits

app = Typer(no_args_is_help=True)
from .app import CLI, OutputType

app = CLI(no_args_is_help=True)


@app.callback()
def output_type(
# `OutputType.text.value` used as a workaround for the issue with `typer.Option` not accepting Enum values
output: Annotated[
OutputType, typer.Option("--output", "-o", help="Set the output type (text or json)")
] = OutputType.text.value, # type: ignore
) -> None:
"""Sets an output type for the CLI
Args:
output: type of output to be set
"""
app.set_output_type(output_type=output)


def main() -> None:
Expand Down
76 changes: 76 additions & 0 deletions packages/ragbits-cli/src/ragbits/cli/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
import json
from dataclasses import dataclass
from enum import Enum
from typing import Any

import typer
from pydantic import BaseModel
from rich.console import Console
from rich.table import Table


class OutputType(Enum):
"""Indicates a type of CLI output formatting"""

text = "text"
json = "json"


@dataclass()
class CliState:
"""A dataclass describing CLI state"""

output_type: OutputType = OutputType.text


class CLI(typer.Typer):
"""A CLI class with output formatting"""

def __init__(self, *args: Any, **kwargs: Any): # noqa: ANN401
super().__init__(*args, **kwargs)
self.state: CliState = CliState()
self.console: Console = Console()

def set_output_type(self, output_type: OutputType) -> None:
"""
Set the output type in the app state
Args:
output_type: OutputType
"""
self.state.output_type = output_type

def print_output(self, data: list[BaseModel] | BaseModel) -> None:
"""
Process and display output based on the current state's output type.
Args:
data: list of ditionaries or list of pydantic models representing output of CLI function
"""
if isinstance(data, BaseModel):
data = [data]
if len(data) == 0:
self._print_empty_list()
return
first_el_instance = type(data[0])
if any(not isinstance(datapoint, first_el_instance) for datapoint in data):
raise ValueError("All the rows need to be of the same type")
data_dicts: list[dict] = [output.model_dump(mode="python") for output in data]
output_type = self.state.output_type
if output_type == OutputType.json:
print(json.dumps(data_dicts, indent=4))
elif output_type == OutputType.text:
table = Table(show_header=True, header_style="bold magenta")
properties = data[0].model_json_schema()["properties"]
for key in properties:
table.add_column(properties[key]["title"])
for row in data_dicts:
table.add_row(*[str(value) for value in row.values()])
self.console.print(table)
else:
raise ValueError(f"Output type: {output_type} not supported")

def _print_empty_list(self) -> None:
if self.state.output_type == OutputType.text:
print("Empty data list")
elif self.state.output_type == OutputType.json:
print(json.dumps([]))
4 changes: 1 addition & 3 deletions packages/ragbits-core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ dependencies = [
"pydantic>=2.9.1",
"typer~=0.12.5",
"tomli~=2.0.2",
"litellm~=1.46.0",
]

[project.urls]
Expand All @@ -48,9 +49,6 @@ dependencies = [
chroma = [
"chromadb~=0.4.24",
]
litellm = [
"litellm~=1.46.0",
]
local = [
"torch~=2.2.1",
"transformers~=4.44.2",
Expand Down
28 changes: 16 additions & 12 deletions packages/ragbits-core/src/ragbits/core/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@
from pathlib import Path

import typer
from rich import print as pprint
from pydantic import BaseModel

from ragbits.cli.app import CLI
from ragbits.core.config import core_config
from ragbits.core.llms.base import LLMType
from ragbits.core.prompt.prompt import Prompt
from ragbits.core.prompt.prompt import ChatFormat, Prompt


def _render(prompt_path: str, payload: str | None) -> Prompt:
Expand All @@ -25,10 +26,17 @@ def _render(prompt_path: str, payload: str | None) -> Prompt:
return prompt_cls()


class LLMResponseCliOutput(BaseModel):
"""An output model for llm responses in CLI"""

question: ChatFormat
answer: str | BaseModel | None = None


prompts_app = typer.Typer(no_args_is_help=True)


def register(app: typer.Typer) -> None:
def register(app: CLI) -> None:
"""
Register the CLI commands for the package.
Expand Down Expand Up @@ -68,9 +76,8 @@ def render(prompt_path: str, payload: str | None = None) -> None:
Renders a prompt by loading a class from a module and initializing it with a given payload.
"""
prompt = _render(prompt_path=prompt_path, payload=payload)

pprint("[orange3]RENDERED PROMPT:")
pprint(prompt.chat)
response = LLMResponseCliOutput(question=prompt.chat)
app.print_output(response)

@prompts_app.command(name="exec")
def execute(
Expand All @@ -92,11 +99,8 @@ def execute(
raise ValueError("`llm_factory` must be provided")
llm = get_llm_from_factory(llm_factory)

response = asyncio.run(llm.generate(prompt))

pprint("[orange3]QUESTION:")
pprint(prompt.chat)
pprint("[orange3]ANSWER:")
pprint(response)
llm_output = asyncio.run(llm.generate(prompt))
response = LLMResponseCliOutput(question=prompt.chat, answer=llm_output)
app.print_output(response)

app.add_typer(prompts_app, name="prompts", help="Commands for managing prompts")
13 changes: 1 addition & 12 deletions packages/ragbits-core/src/ragbits/core/embeddings/litellm.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
try:
import litellm

HAS_LITELLM = True
except ImportError:
HAS_LITELLM = False
import litellm

from ragbits.core.audit import trace
from ragbits.core.embeddings import Embeddings
Expand Down Expand Up @@ -40,13 +35,7 @@ def __init__(
for more information, follow the instructions for your specific vendor in the\
[LiteLLM documentation](https://docs.litellm.ai/docs/embedding/supported_embedding).
api_version: The API version for the call.
Raises:
ImportError: If the 'litellm' extra requirements are not installed.
"""
if not HAS_LITELLM:
raise ImportError("You need to install the 'litellm' extra requirements to use LiteLLM embeddings models")

super().__init__()
self.model = model
self.options = options or {}
Expand Down
3 changes: 2 additions & 1 deletion packages/ragbits-core/src/ragbits/core/llms/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
from ragbits.core.utils.config_handling import get_cls_from_config

from .base import LLM
from .litellm import LiteLLM

__all__ = ["LLM"]
__all__ = ["LLM", "LiteLLM"]

module = sys.modules[__name__]

Expand Down
Loading

0 comments on commit 6c77d73

Please sign in to comment.