@@ -102,19 +102,29 @@ def _parse_readme_with_openai(self, readme_content: str) -> Dict:
102
102
103
103
Return only the JSON configuration, nothing else."""
104
104
105
- response = self .openai_client .responses .parse (
106
- model = "gpt-4o-mini" ,
107
- input = [
108
- {"role" : "system" , "content" : "You are a helpful assistant that extracts MCP server configuration from READMEs." },
109
- {"role" : "user" , "content" : prompt }
110
- ],
111
- text_format = MCPServerConfigSchema
112
- )
113
105
try :
106
+ response = self .openai_client .responses .parse (
107
+ model = "gpt-4o-mini" ,
108
+ input = [
109
+ {"role" : "system" , "content" : "You are a helpful assistant that extracts MCP server configuration from READMEs." },
110
+ {"role" : "user" , "content" : prompt }
111
+ ],
112
+ text_format = MCPServerConfigSchema
113
+ )
114
+ # Validate response structure
115
+ if not hasattr (response , "output_parsed" ) or not hasattr (response .output_parsed , "model_dump" ):
116
+ raise ValueError ("Unexpected OpenAI API response format" )
114
117
config = response .output_parsed .model_dump ()
115
118
return config
116
- except json .JSONDecodeError :
117
- raise ValueError ("Failed to parse OpenAI response as JSON" )
119
+ except json .JSONDecodeError as e :
120
+ # Log the problematic response content for debugging
121
+ raise ValueError (f"Failed to parse OpenAI response as JSON: { e } " )
122
+ except openai .error .OpenAIError as e :
123
+ # Log OpenAI-specific errors
124
+ raise RuntimeError (f"OpenAI API error: { e } " )
125
+ except Exception as e :
126
+ # Catch-all for any other unexpected exceptions
127
+ raise RuntimeError (f"Unexpected error while parsing OpenAI response: { e } " )
118
128
119
129
def add_server_from_repo (self , server_name : str , repo_url : str ) -> None :
120
130
"""Add a new server configuration by analyzing its GitHub repository README."""
0 commit comments