This commit is contained in:
Riccardo Giorato 2026-01-30 08:12:30 -08:00 committed by GitHub
commit 7fc93e73f5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 456 additions and 5 deletions

View File

@ -38,6 +38,7 @@ See [Venice AI](/providers/venice).
- [Qwen (OAuth)](/providers/qwen)
- [OpenRouter](/providers/openrouter)
- [Vercel AI Gateway](/providers/vercel-ai-gateway)
- [Together AI](/providers/together)
- [Moonshot AI (Kimi + Kimi Code)](/providers/moonshot)
- [OpenCode Zen](/providers/opencode)
- [Amazon Bedrock](/bedrock)

View File

@ -0,0 +1,65 @@
---
summary: "Together AI setup (auth + model selection)"
read_when:
- You want to use Together AI with Moltbot
- You need the API key env var or CLI auth choice
---
# Together AI
The [Together AI](https://together.ai) provides access to leading open-source models including Llama, DeepSeek, Kimi, and more through a unified API.
- Provider: `together`
- Auth: `TOGETHER_API_KEY`
- API: OpenAI-compatible
## Quick start
1) Set the API key (recommended: store it for the Gateway):
```bash
moltbot onboard --auth-choice together-api-key
```
2) Set a default model:
```json5
{
agents: {
defaults: {
model: { primary: "together/zai-org/GLM-4.7" }
}
}
}
```
## Non-interactive example
```bash
moltbot onboard --non-interactive \
--mode local \
--auth-choice together-api-key \
--together-api-key "$TOGETHER_API_KEY"
```
This will set `together/zai-org/GLM-4.7` as the default model.
## Environment note
If the Gateway runs as a daemon (launchd/systemd), make sure `TOGETHER_API_KEY`
is available to that process (for example, in `~/.clawdbot/.env` or via
`env.shellEnv`).
## Available models
Together AI provides access to many popular open-source models:
- **GLM 4.7 Fp8** - Default model with 200K context window
- **Llama 3.3 70B Instruct Turbo** - Fast, efficient instruction following
- **Llama 4 Scout** - Vision model with image understanding
- **Llama 4 Maverick** - Advanced vision and reasoning
- **DeepSeek V3.1** - Powerful coding and reasoning model
- **DeepSeek R1** - Advanced reasoning model
- **Kimi K2 Instruct** - High-performance model with 262K context window
All models support standard chat completions and are OpenAI API compatible.

View File

@ -286,6 +286,7 @@ export function resolveEnvApiKey(provider: string): EnvApiKeyResult | null {
venice: "VENICE_API_KEY",
mistral: "MISTRAL_API_KEY",
opencode: "OPENCODE_API_KEY",
together: "TOGETHER_API_KEY",
};
const envVar = envMap[normalized];
if (!envVar) return null;

View File

@ -13,6 +13,11 @@ import {
SYNTHETIC_MODEL_CATALOG,
} from "./synthetic-models.js";
import { discoverVeniceModels, VENICE_BASE_URL } from "./venice-models.js";
import {
TOGETHER_BASE_URL,
TOGETHER_MODEL_CATALOG,
buildTogetherModelDefinition,
} from "./together-models.js";
type ModelsConfig = NonNullable<OpenClawConfig["models"]>;
export type ProviderConfig = NonNullable<ModelsConfig["providers"]>[string];
@ -388,6 +393,14 @@ async function buildOllamaProvider(): Promise<ProviderConfig> {
};
}
function buildTogetherProvider(): ProviderConfig {
return {
baseUrl: TOGETHER_BASE_URL,
api: "openai-completions",
models: TOGETHER_MODEL_CATALOG.map(buildTogetherModelDefinition),
};
}
export async function resolveImplicitProviders(params: {
agentDir: string;
}): Promise<ModelsConfig["providers"]> {
@ -414,7 +427,10 @@ export async function resolveImplicitProviders(params: {
resolveEnvApiKeyVarName("kimi-code") ??
resolveApiKeyFromProfiles({ provider: "kimi-code", store: authStore });
if (kimiCodeKey) {
providers["kimi-code"] = { ...buildKimiCodeProvider(), apiKey: kimiCodeKey };
providers["kimi-code"] = {
...buildKimiCodeProvider(),
apiKey: kimiCodeKey,
};
}
const syntheticKey =
@ -454,6 +470,16 @@ export async function resolveImplicitProviders(params: {
providers.ollama = { ...(await buildOllamaProvider()), apiKey: ollamaKey };
}
const togetherKey =
resolveEnvApiKeyVarName("together") ??
resolveApiKeyFromProfiles({ provider: "together", store: authStore });
if (togetherKey) {
providers.together = {
...buildTogetherProvider(),
apiKey: togetherKey,
};
}
return providers;
}
@ -462,7 +488,9 @@ export async function resolveImplicitCopilotProvider(params: {
env?: NodeJS.ProcessEnv;
}): Promise<ProviderConfig | null> {
const env = params.env ?? process.env;
const authStore = ensureAuthProfileStore(params.agentDir, { allowKeychainPrompt: false });
const authStore = ensureAuthProfileStore(params.agentDir, {
allowKeychainPrompt: false,
});
const hasProfile = listProfilesForProvider(authStore, "github-copilot").length > 0;
const envToken = env.COPILOT_GITHUB_TOKEN ?? env.GH_TOKEN ?? env.GITHUB_TOKEN;
const githubToken = (envToken ?? "").trim();
@ -527,7 +555,10 @@ export async function resolveImplicitBedrockProvider(params: {
if (enabled !== true && !hasAwsCreds) return null;
const region = discoveryConfig?.region ?? env.AWS_REGION ?? env.AWS_DEFAULT_REGION ?? "us-east-1";
const models = await discoverBedrockModels({ region, config: discoveryConfig });
const models = await discoverBedrockModels({
region,
config: discoveryConfig,
});
if (models.length === 0) return null;
return {

View File

@ -0,0 +1,133 @@
import type { ModelDefinitionConfig } from "../config/types.models.js";
export const TOGETHER_BASE_URL = "https://api.together.xyz/v1";
export const TOGETHER_MODEL_CATALOG: ModelDefinitionConfig[] = [
{
id: "zai-org/GLM-4.7",
name: "GLM 4.7 Fp8",
reasoning: false,
input: ["text"],
contextWindow: 202752,
maxTokens: 8192,
cost: {
input: 0.45,
output: 2.0,
cacheRead: 0.45,
cacheWrite: 2.0,
},
},
{
id: "moonshotai/Kimi-K2.5",
name: "Kimi K2.5",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0.5,
output: 2.8,
cacheRead: 0.5,
cacheWrite: 2.8,
},
contextWindow: 262144,
maxTokens: 32768,
},
{
id: "meta-llama/Llama-3.3-70B-Instruct-Turbo",
name: "Llama 3.3 70B Instruct Turbo",
reasoning: false,
input: ["text"],
contextWindow: 131072,
maxTokens: 8192,
cost: {
input: 0.88,
output: 0.88,
cacheRead: 0.88,
cacheWrite: 0.88,
},
},
{
id: "meta-llama/Llama-4-Scout-17B-16E-Instruct",
name: "Llama 4 Scout 17B 16E Instruct",
reasoning: false,
input: ["text", "image"],
contextWindow: 10000000,
maxTokens: 32768,
cost: {
input: 0.18,
output: 0.59,
cacheRead: 0.18,
cacheWrite: 0.18,
},
},
{
id: "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
name: "Llama 4 Maverick 17B 128E Instruct FP8",
reasoning: false,
input: ["text", "image"],
contextWindow: 20000000,
maxTokens: 32768,
cost: {
input: 0.27,
output: 0.85,
cacheRead: 0.27,
cacheWrite: 0.27,
},
},
{
id: "deepseek-ai/DeepSeek-V3.1",
name: "DeepSeek V3.1",
reasoning: false,
input: ["text"],
contextWindow: 131072,
maxTokens: 8192,
cost: {
input: 0.6,
output: 1.25,
cacheRead: 0.6,
cacheWrite: 0.6,
},
},
{
id: "deepseek-ai/DeepSeek-R1",
name: "DeepSeek R1",
reasoning: true,
input: ["text"],
contextWindow: 131072,
maxTokens: 8192,
cost: {
input: 3.0,
output: 7.0,
cacheRead: 3.0,
cacheWrite: 3.0,
},
},
{
id: "moonshotai/Kimi-K2-Instruct-0905",
name: "Kimi K2-Instruct 0905",
reasoning: false,
input: ["text"],
contextWindow: 262144,
maxTokens: 8192,
cost: {
input: 1.0,
output: 3.0,
cacheRead: 1.0,
cacheWrite: 3.0,
},
},
];
export function buildTogetherModelDefinition(
model: (typeof TOGETHER_MODEL_CATALOG)[number],
): ModelDefinitionConfig {
return {
id: model.id,
name: model.name,
api: "openai-completions",
reasoning: model.reasoning,
input: model.input as ("text" | "image")[],
cost: model.cost,
contextWindow: model.contextWindow,
maxTokens: model.maxTokens,
};
}

View File

@ -164,6 +164,12 @@ describe("cli program (smoke)", () => {
key: "sk-moonshot-test",
field: "moonshotApiKey",
},
{
authChoice: "together-api-key",
flag: "--together-api-key",
key: "sk-together-test",
field: "togetherApiKey",
},
{
authChoice: "kimi-code-api-key",
flag: "--kimi-code-api-key",

View File

@ -52,7 +52,7 @@ export function registerOnboardCommand(program: Command) {
.option("--mode <mode>", "Wizard mode: local|remote")
.option(
"--auth-choice <choice>",
"Auth: setup-token|token|chutes|openai-codex|openai-api-key|openrouter-api-key|ai-gateway-api-key|moonshot-api-key|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|xiaomi-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip",
"Auth: setup-token|token|chutes|openai-codex|openai-api-key|openrouter-api-key|ai-gateway-api-key|moonshot-api-key|kimi-code-api-key|synthetic-api-key|venice-api-key|gemini-api-key|zai-api-key|xiaomi-api-key|apiKey|minimax-api|minimax-api-lightning|opencode-zen|skip|together-api-key",
)
.option(
"--token-provider <id>",
@ -76,6 +76,7 @@ export function registerOnboardCommand(program: Command) {
.option("--minimax-api-key <key>", "MiniMax API key")
.option("--synthetic-api-key <key>", "Synthetic API key")
.option("--venice-api-key <key>", "Venice API key")
.option("--together-api-key <key>", "Together AI API key")
.option("--opencode-zen-api-key <key>", "OpenCode Zen API key")
.option("--gateway-port <port>", "Gateway port")
.option("--gateway-bind <mode>", "Gateway bind: loopback|tailnet|lan|auto|custom")
@ -127,6 +128,7 @@ export function registerOnboardCommand(program: Command) {
minimaxApiKey: opts.minimaxApiKey as string | undefined,
syntheticApiKey: opts.syntheticApiKey as string | undefined,
veniceApiKey: opts.veniceApiKey as string | undefined,
togetherApiKey: opts.togetherApiKey as string | undefined,
opencodeZenApiKey: opts.opencodeZenApiKey as string | undefined,
gatewayPort:
typeof gatewayPort === "number" && Number.isFinite(gatewayPort)

View File

@ -63,6 +63,7 @@ describe("buildAuthChoiceOptions", () => {
expect(options.some((opt) => opt.value === "moonshot-api-key")).toBe(true);
expect(options.some((opt) => opt.value === "kimi-code-api-key")).toBe(true);
expect(options.some((opt) => opt.value === "together-api-key")).toBe(true);
});
it("includes Vercel AI Gateway auth choice", () => {
@ -75,6 +76,18 @@ describe("buildAuthChoiceOptions", () => {
expect(options.some((opt) => opt.value === "ai-gateway-api-key")).toBe(true);
});
it("includes Together AI auth choice", () => {
const store: AuthProfileStore = { version: 1, profiles: {} };
const options = buildAuthChoiceOptions({
store,
includeSkip: false,
includeClaudeCliIfMissing: true,
platform: "darwin",
});
expect(options.some((opt) => opt.value === "together-api-key")).toBe(true);
});
it("includes Synthetic auth choice", () => {
const store: AuthProfileStore = { version: 1, profiles: {} };
const options = buildAuthChoiceOptions({

View File

@ -21,7 +21,8 @@ export type AuthChoiceGroupId =
| "minimax"
| "synthetic"
| "venice"
| "qwen";
| "qwen"
| "together";
export type AuthChoiceGroup = {
value: AuthChoiceGroupId;
@ -66,6 +67,12 @@ const AUTH_CHOICE_GROUP_DEFS: {
hint: "Anthropic-compatible (multi-model)",
choices: ["synthetic-api-key"],
},
{
value: "together",
label: "Together AI",
hint: "API key",
choices: ["together-api-key"],
},
{
value: "venice",
label: "Venice AI",
@ -154,6 +161,11 @@ export function buildAuthChoiceOptions(params: {
label: "Venice AI API key",
hint: "Privacy-focused inference (uncensored models)",
});
options.push({
value: "together-api-key",
label: "Together AI API key",
hint: "Access to Llama, DeepSeek, Qwen, and more open models",
});
options.push({
value: "github-copilot",
label: "GitHub Copilot (GitHub device login)",

View File

@ -23,6 +23,8 @@ import {
applyOpenrouterProviderConfig,
applySyntheticConfig,
applySyntheticProviderConfig,
applyTogetherConfig,
applyTogetherProviderConfig,
applyVeniceConfig,
applyVeniceProviderConfig,
applyVercelAiGatewayConfig,
@ -34,6 +36,7 @@ import {
MOONSHOT_DEFAULT_MODEL_REF,
OPENROUTER_DEFAULT_MODEL_REF,
SYNTHETIC_DEFAULT_MODEL_REF,
TOGETHER_DEFAULT_MODEL_REF,
VENICE_DEFAULT_MODEL_REF,
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
XIAOMI_DEFAULT_MODEL_REF,
@ -43,6 +46,7 @@ import {
setOpencodeZenApiKey,
setOpenrouterApiKey,
setSyntheticApiKey,
setTogetherApiKey,
setVeniceApiKey,
setVercelAiGatewayApiKey,
setXiaomiApiKey,
@ -89,6 +93,8 @@ export async function applyAuthChoiceApiProviders(
authChoice = "synthetic-api-key";
} else if (params.opts.tokenProvider === "venice") {
authChoice = "venice-api-key";
} else if (params.opts.tokenProvider === "together") {
authChoice = "together-api-key";
} else if (params.opts.tokenProvider === "opencode") {
authChoice = "opencode-zen";
}
@ -633,5 +639,63 @@ export async function applyAuthChoiceApiProviders(
return { config: nextConfig, agentModelOverride };
}
if (authChoice === "together-api-key") {
let hasCredential = false;
if (!hasCredential && params.opts?.token && params.opts?.tokenProvider === "together") {
await setTogetherApiKey(normalizeApiKeyInput(params.opts.token), params.agentDir);
hasCredential = true;
}
if (!hasCredential) {
await params.prompter.note(
[
"Together AI provides access to leading open-source models including Llama, DeepSeek, Qwen, and more.",
"Get your API key at: https://api.together.xyz/settings/api-keys",
].join("\n"),
"Together AI",
);
}
const envKey = resolveEnvApiKey("together");
if (envKey) {
const useExisting = await params.prompter.confirm({
message: `Use existing TOGETHER_API_KEY (${envKey.source}, ${formatApiKeyPreview(envKey.apiKey)})?`,
initialValue: true,
});
if (useExisting) {
await setTogetherApiKey(envKey.apiKey, params.agentDir);
hasCredential = true;
}
}
if (!hasCredential) {
const key = await params.prompter.text({
message: "Enter Together AI API key",
validate: validateApiKeyInput,
});
await setTogetherApiKey(normalizeApiKeyInput(String(key)), params.agentDir);
}
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "together:default",
provider: "together",
mode: "api_key",
});
{
const applied = await applyDefaultModelChoice({
config: nextConfig,
setDefaultModel: params.setDefaultModel,
defaultModel: TOGETHER_DEFAULT_MODEL_REF,
applyDefaultConfig: applyTogetherConfig,
applyProviderConfig: applyTogetherProviderConfig,
noteDefault: TOGETHER_DEFAULT_MODEL_REF,
noteAgentModel,
prompter: params.prompter,
});
nextConfig = applied.config;
agentModelOverride = applied.agentModelOverride ?? agentModelOverride;
}
return { config: nextConfig, agentModelOverride };
}
return null;
}

View File

@ -21,6 +21,7 @@ const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<AuthChoice, string>> = {
"xiaomi-api-key": "xiaomi",
"synthetic-api-key": "synthetic",
"venice-api-key": "venice",
"together-api-key": "together",
"github-copilot": "github-copilot",
"copilot-proxy": "copilot-proxy",
"minimax-cloud": "minimax",

View File

@ -5,6 +5,13 @@ import {
SYNTHETIC_DEFAULT_MODEL_REF,
SYNTHETIC_MODEL_CATALOG,
} from "../agents/synthetic-models.js";
import {
TOGETHER_BASE_URL,
TOGETHER_MODEL_CATALOG,
buildTogetherModelDefinition,
} from "../agents/together-models.js";
import {
buildVeniceModelDefinition,
VENICE_BASE_URL,
@ -14,6 +21,7 @@ import {
import type { OpenClawConfig } from "../config/config.js";
import {
OPENROUTER_DEFAULT_MODEL_REF,
TOGETHER_DEFAULT_MODEL_REF,
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
XIAOMI_DEFAULT_MODEL_REF,
ZAI_DEFAULT_MODEL_REF,
@ -484,6 +492,80 @@ export function applyVeniceConfig(cfg: OpenClawConfig): OpenClawConfig {
};
}
export function applyTogetherProviderConfig(cfg: MoltbotConfig): MoltbotConfig {
const models = { ...cfg.agents?.defaults?.models };
models[TOGETHER_DEFAULT_MODEL_REF] = {
...models[TOGETHER_DEFAULT_MODEL_REF],
alias: models[TOGETHER_DEFAULT_MODEL_REF]?.alias ?? "Together AI",
};
const providers = { ...cfg.models?.providers };
const existingProvider = providers.together;
const existingModels = Array.isArray(existingProvider?.models) ? existingProvider.models : [];
// Use static catalog only (no async operations to maintain sync interface)
const { apiKey: existingApiKey, ...existingProviderRest } = (existingProvider ?? {}) as Record<
string,
unknown
> as { apiKey?: string };
const resolvedApiKey = typeof existingApiKey === "string" ? existingApiKey : undefined;
const normalizedApiKey = resolvedApiKey?.trim();
const togetherModels = TOGETHER_MODEL_CATALOG.map(buildTogetherModelDefinition);
const mergedModels = [
...existingModels,
...togetherModels.filter(
(model) => !existingModels.some((existing) => existing.id === model.id),
),
];
providers.together = {
...existingProviderRest,
baseUrl: TOGETHER_BASE_URL,
api: "openai-completions",
...(normalizedApiKey ? { apiKey: normalizedApiKey } : {}),
models: mergedModels.length > 0 ? mergedModels : togetherModels,
};
return {
...cfg,
agents: {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
models,
},
},
models: {
mode: cfg.models?.mode ?? "merge",
providers,
},
};
}
export function applyTogetherConfig(cfg: MoltbotConfig): MoltbotConfig {
const next = applyTogetherProviderConfig(cfg);
const existingModel = next.agents?.defaults?.model;
return {
...next,
agents: {
...next.agents,
defaults: {
...next.agents?.defaults,
model: {
...(existingModel && "fallbacks" in (existingModel as Record<string, unknown>)
? {
fallbacks: (existingModel as { fallbacks?: string[] }).fallbacks,
}
: undefined),
primary: TOGETHER_DEFAULT_MODEL_REF,
},
},
},
};
}
export function applyAuthProfileConfig(
cfg: OpenClawConfig,
params: {

View File

@ -116,6 +116,7 @@ export const ZAI_DEFAULT_MODEL_REF = "zai/glm-4.7";
export const XIAOMI_DEFAULT_MODEL_REF = "xiaomi/mimo-v2-flash";
export const OPENROUTER_DEFAULT_MODEL_REF = "openrouter/auto";
export const VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF = "vercel-ai-gateway/anthropic/claude-opus-4.5";
export const TOGETHER_DEFAULT_MODEL_REF = "together/zai-org/GLM-4.7";
export async function setZaiApiKey(key: string, agentDir?: string) {
// Write to resolved agent dir so gateway finds credentials on startup.
@ -177,3 +178,15 @@ export async function setOpencodeZenApiKey(key: string, agentDir?: string) {
agentDir: resolveAuthAgentDir(agentDir),
});
}
export async function setTogetherApiKey(key: string, agentDir?: string) {
upsertAuthProfile({
profileId: "together:default",
credential: {
type: "api_key",
provider: "together",
key,
},
agentDir: resolveAuthAgentDir(agentDir),
});
}

View File

@ -13,6 +13,8 @@ export {
applyOpenrouterProviderConfig,
applySyntheticConfig,
applySyntheticProviderConfig,
applyTogetherConfig,
applyTogetherProviderConfig,
applyVeniceConfig,
applyVeniceProviderConfig,
applyVercelAiGatewayConfig,
@ -44,6 +46,7 @@ export {
setOpencodeZenApiKey,
setOpenrouterApiKey,
setSyntheticApiKey,
setTogetherApiKey,
setVeniceApiKey,
setVercelAiGatewayApiKey,
setXiaomiApiKey,
@ -52,6 +55,7 @@ export {
VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF,
XIAOMI_DEFAULT_MODEL_REF,
ZAI_DEFAULT_MODEL_REF,
TOGETHER_DEFAULT_MODEL_REF,
} from "./onboard-auth.credentials.js";
export {
buildKimiCodeModelDefinition,

View File

@ -16,6 +16,7 @@ import {
applyOpenrouterConfig,
applySyntheticConfig,
applyVeniceConfig,
applyTogetherConfig,
applyVercelAiGatewayConfig,
applyXiaomiConfig,
applyZaiConfig,
@ -28,6 +29,7 @@ import {
setOpenrouterApiKey,
setSyntheticApiKey,
setVeniceApiKey,
setTogetherApiKey,
setVercelAiGatewayApiKey,
setXiaomiApiKey,
setZaiApiKey,
@ -376,6 +378,25 @@ export async function applyNonInteractiveAuthChoice(params: {
return applyOpencodeZenConfig(nextConfig);
}
if (authChoice === "together-api-key") {
const resolved = await resolveNonInteractiveApiKey({
provider: "together",
cfg: baseConfig,
flagValue: opts.togetherApiKey,
flagName: "--together-api-key",
envVar: "TOGETHER_API_KEY",
runtime,
});
if (!resolved) return null;
if (resolved.source !== "profile") await setTogetherApiKey(resolved.key);
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId: "together:default",
provider: "together",
mode: "api_key",
});
return applyTogetherConfig(nextConfig);
}
if (
authChoice === "oauth" ||
authChoice === "chutes" ||

View File

@ -17,6 +17,7 @@ export type AuthChoice =
| "kimi-code-api-key"
| "synthetic-api-key"
| "venice-api-key"
| "together-api-key"
| "codex-cli"
| "apiKey"
| "gemini-api-key"
@ -72,6 +73,7 @@ export type OnboardOptions = {
minimaxApiKey?: string;
syntheticApiKey?: string;
veniceApiKey?: string;
togetherApiKey?: string;
opencodeZenApiKey?: string;
gatewayPort?: number;
gatewayBind?: GatewayBind;