Core language model functionality and interfaces
LM
from synth_ai.lm.core.main_v3 import LM # Initialize with any supported model lm = LM(model_name="gpt-4o-mini", temperature=0.7) # Generate responses response = lm.respond("Hello, world!") print(response.raw_response)
# Environment-based configuration import os os.environ['OPENAI_API_KEY'] = 'your-key-here' # Direct configuration lm = LM( model_name="claude-3-sonnet-20240229", temperature=0.7, max_tokens=1000 )
from pydantic import BaseModel class Response(BaseModel): answer: str confidence: float response = lm.respond_structured( message="What is 2+2?", response_model=Response ) print(response.answer) # "4" print(response.confidence) # 0.95