modelConfig = (model) => {
if (model.startsWith("claude"))
return {
model: model,
type: "chat",
api: "https://api.anthropic.com/v1/messages",
roles: ["user", "assistant"],
settings: {
temperature: 0.7,
max_tokens: viewof settings.value.max_prompt_tokens,
top_p: 1
},
headers: () => ({
"x-api-key": ANTHROPIC_API_KEY,
"anthropic-version": "2023-06-01",
"anthropic-dangerous-direct-browser-access": "true"
})
};
else if (model.startsWith("dall-e")) {
return {
model: model,
type: "image",
api: "https://api.openai.com/v1/images/generations",
settings: {
n: 1,
size: "1024x1024",
quality: "standard"
},
headers: () => ({
Authorization: `Bearer ${OPENAI_API_KEY}`
})
};
} else if (
model == "o1-mini" ||
model == "o1-preview" ||
model == "o3-mini"
) {
return {
type: "chat",
api: "https://api.openai.com/v1/chat/completions",
roles: ["user", "assistant"],
settings: {
model: model,
temperature: 1,
max_completion_tokens: viewof settings.value.max_tokens,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0
},
headers: () => ({
Authorization: `Bearer ${OPENAI_API_KEY}`
})
};
} else if (model == "o1") {
return {
api: "https://api.openai.com/v1/chat/completions",
type: "chat",
roles: ["user", "system", "assistant"],
settings: {
model: model,
temperature: 1,
max_completion_tokens: viewof settings.value.max_prompt_tokens,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0
},
headers: () => ({
Authorization: `Bearer ${OPENAI_API_KEY}`
})
};
} else {
return {
api: "https://api.openai.com/v1/chat/completions",
type: "chat",
roles: ["user", "system", "assistant"],
settings: {
model: model,
temperature: viewof settings.value.temperature,
max_tokens: viewof settings.value.max_prompt_tokens,
top_p: 1,
frequency_penalty: 0,
presence_penalty: 0
},
headers: () => ({
Authorization: `Bearer ${OPENAI_API_KEY}`
})
};
}
}