[ { "order": "a", "md5sum": "48de9538c774188eb25a7e9ee024bbd3", "name": "Mistral OpenOrca", "filename": "mistral-7b-openorca.Q4_0.gguf", "filesize": "4108927744", "requires": "2.5.0", "ramrequired": "8", "parameters": "7 billion", "quant": "q4_0", "type": "Mistral", "systemPrompt": " ", "description": "Best overall fast chat model
", "url": "https://gpt4all.io/models/gguf/mistral-7b-openorca.Q4_0.gguf", }, { "order": "b", "md5sum": "97463be739b50525df56d33b26b00852", "name": "Mistral Instruct", "filename": "mistral-7b-instruct-v0.1.Q4_0.gguf", "filesize": "4108916384", "requires": "2.5.0", "ramrequired": "8", "parameters": "7 billion", "quant": "q4_0", "type": "Mistral", "systemPrompt": " ", "description": "Best overall fast instruction following model
", "url": "https://gpt4all.io/models/gguf/mistral-7b-instruct-v0.1.Q4_0.gguf", "promptTemplate": "[INST] %1 [/INST]" }, { "order": "c", "md5sum": "31cb6d527bd3bfb5e73c2e9dfbc75033", "name": "GPT4All Falcon", "filename": "gpt4all-falcon-q4_0.gguf", "filesize": "4210419040", "requires": "2.5.0", "ramrequired": "8", "parameters": "7 billion", "quant": "q4_0", "type": "Falcon", "systemPrompt": " ", "description": "Very fast model with good quality
", "url": "https://gpt4all.io/models/gguf/gpt4all-falcon-q4_0.gguf", "promptTemplate": "### Instruction:\n%1\n### Response:\n" }, { "order": "e", "md5sum": "5aff90007499bce5c64b1c0760c0b186", "name": "Wizard v1.2", "filename": "wizardlm-13b-v1.2.Q4_0.gguf", "filesize": "7365834624", "requires": "2.5.0", "ramrequired": "16", "parameters": "13 billion", "quant": "q4_0", "type": "LLaMA2", "systemPrompt": " ", "description": "Best overall larger model