56 lines
1.3 KiB
Python
56 lines
1.3 KiB
Python
from typing import List, Optional, Dict
|
|
|
|
from pydantic import BaseModel
|
|
|
|
|
|
class ComparisonImage(BaseModel):
|
|
path: str
|
|
model: str
|
|
config: str
|
|
prompt_index: int
|
|
seed: int
|
|
lora1: Optional[str] = None
|
|
lora2: Optional[str] = None
|
|
prompt: Optional[str] = None # Adding prompt field
|
|
|
|
|
|
class ComparisonPair(BaseModel):
|
|
model1: ComparisonImage
|
|
model2: ComparisonImage
|
|
config: str
|
|
prompt_index: int
|
|
seed: int
|
|
prompt: str # The prompt used for both images
|
|
|
|
|
|
class ComparisonData(BaseModel):
|
|
configs: List[str] # Available config types (cloth_lora, identity_lora, dual_lora)
|
|
prompts: Dict[str, List[str]] # Mapping of config -> list of prompts
|
|
seeds: List[int] # All available seeds
|
|
pairs: List[ComparisonPair] # All comparison pairs with their prompts
|
|
|
|
|
|
class PathRequest(BaseModel):
|
|
"""Request model for providing the base comparison path"""
|
|
path: str
|
|
|
|
|
|
class ConfigRequest(BaseModel):
|
|
"""Request model for fetching specific configuration data"""
|
|
path: str
|
|
config_name: str
|
|
|
|
|
|
class ConfigInfo(BaseModel):
|
|
"""Basic information about an available configuration"""
|
|
name: str
|
|
model_count: int
|
|
prompt_count: int
|
|
seed_count: int
|
|
|
|
|
|
class AvailableConfigs(BaseModel):
|
|
"""Response model for the fetchConfigs endpoint"""
|
|
base_path: str
|
|
configs: List[ConfigInfo]
|