# see https://github.com/sigoden/aichat/blob/main/config.example.yaml rag_embedding_model: ollama:nomic-embed-text rag_chunk_size: 8192 rag_chunk_overlap: 409 model: ollama:qwen2.5 temperature: 0 clients: - type: openai-compatible name: ollama api_base: http://localhost:11434/v1 api_key: __LLM_API_KEY__ models: - name: qwen2.5 supports_function_calling: true max_input_tokens: 128000 - name: qwen2.5-coder:32b supports_function_calling: true max_input_tokens: 128000 - name: nomic-embed-text type: embedding max_input_tokens: 200000 max_tokens_per_chunk: 2000 default_chunk_size: 8192 max_batch_size: 100