Skip to content

Commit

Permalink
Set default VLM to gpt-4o-mini
Browse files Browse the repository at this point in the history
  • Loading branch information
emcf committed Sep 4, 2024
1 parent 6f99c4c commit 4fc33ec
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def read_git_requirements(file):

setup(
name='thepipe_api',
version='1.2.6',
version='1.2.7',
author='Emmett McFarlane',
author_email='[email protected]',
description='AI-native extractor, powered by multimodal LLMs.',
Expand Down
2 changes: 1 addition & 1 deletion thepipe/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def extract_from_chunk(chunk: Chunk, chunk_index: int, schema: str, ai_model: st
response_dict = {"chunk_index": chunk_index, "source": source, "error": str(e)}
return response_dict, tokens_used

def extract(chunks: List[Chunk], schema: Union[str, Dict], ai_model: Optional[str] = 'google/gemma-2-9b-it', multiple_extractions: Optional[bool] = False, extraction_prompt: Optional[str] = DEFAULT_EXTRACTION_PROMPT, host_images: Optional[bool] = False) -> Tuple[List[Dict], int]:
def extract(chunks: List[Chunk], schema: Union[str, Dict], ai_model: Optional[str] = 'openai/gpt-4o-mini', multiple_extractions: Optional[bool] = False, extraction_prompt: Optional[str] = DEFAULT_EXTRACTION_PROMPT, host_images: Optional[bool] = False) -> Tuple[List[Dict], int]:
if isinstance(schema, dict):
schema = json.dumps(schema)

Expand Down

0 comments on commit 4fc33ec

Please sign in to comment.