From ef0a67eb940a57f88172817b186a439809360e46 Mon Sep 17 00:00:00 2001 From: Jared Van Bortel Date: Wed, 21 Feb 2024 16:18:26 -0500 Subject: [PATCH] models: remove gemma from models2.json and models3.json (#1995) Signed-off-by: Jared Van Bortel --- gpt4all-chat/metadata/models2.json | 48 ++++++++++-------------------- gpt4all-chat/metadata/models3.json | 48 ++++++++++-------------------- 2 files changed, 32 insertions(+), 64 deletions(-) diff --git a/gpt4all-chat/metadata/models2.json b/gpt4all-chat/metadata/models2.json index 903e7ad6..4d6c6a37 100644 --- a/gpt4all-chat/metadata/models2.json +++ b/gpt4all-chat/metadata/models2.json @@ -1,22 +1,6 @@ [ { "order": "a", - "md5sum": "6d1ca6e9533d177361fe2612a2c87474", - "name": "Gemma Instruct", - "filename": "gemma-7b-it.Q4_0.gguf", - "filesize": "4809316512", - "requires": "2.7.1", - "ramrequired": "8", - "parameters": "7 billion", - "quant": "q4_0", - "type": "Gemma", - "description": "A state-of-the-art open model from Google
", - "url": "https://gpt4all.io/models/gguf/gemma-7b-it.Q4_0.gguf", - "promptTemplate": "user\n%1\nmodel\n", - "systemPrompt": "" - }, - { - "order": "b", "md5sum": "f692417a22405d80573ac10cb0cd6c6a", "name": "Mistral OpenOrca", "filename": "mistral-7b-openorca.Q4_0.gguf2.gguf", @@ -31,6 +15,22 @@ "promptTemplate": "<|im_start|>user\n%1<|im_end|>\n<|im_start|>assistant\n", "systemPrompt": "<|im_start|>system\nYou are MistralOrca, a large language model trained by Alignment Lab AI. For multi-step problems, write out your reasoning for each step.\n<|im_end|>" }, + { + "order": "b", + "md5sum": "97463be739b50525df56d33b26b00852", + "name": "Mistral Instruct", + "filename": "mistral-7b-instruct-v0.1.Q4_0.gguf", + "filesize": "4108916384", + "requires": "2.5.0", + "ramrequired": "8", + "parameters": "7 billion", + "quant": "q4_0", + "type": "Mistral", + "systemPrompt": " ", + "description": "Best overall fast instruction following model
  • Fast responses
  • Trained by Mistral AI
  • Uncensored
  • Licensed for commercial use
", + "url": "https://gpt4all.io/models/gguf/mistral-7b-instruct-v0.1.Q4_0.gguf", + "promptTemplate": "[INST] %1 [/INST]" + }, { "order": "c", "md5sum": "c4c78adf744d6a20f05c8751e3961b84", @@ -47,22 +47,6 @@ "url": "https://gpt4all.io/models/gguf/gpt4all-falcon-newbpe-q4_0.gguf", "promptTemplate": "### Instruction:\n%1\n### Response:\n" }, - { - "order": "d", - "md5sum": "97463be739b50525df56d33b26b00852", - "name": "Mistral Instruct", - "filename": "mistral-7b-instruct-v0.1.Q4_0.gguf", - "filesize": "4108916384", - "requires": "2.5.0", - "ramrequired": "8", - "parameters": "7 billion", - "quant": "q4_0", - "type": "Mistral", - "systemPrompt": " ", - "description": "Best overall fast instruction following model
  • Fast responses
  • Trained by Mistral AI
  • Uncensored
  • Licensed for commercial use
", - "url": "https://gpt4all.io/models/gguf/mistral-7b-instruct-v0.1.Q4_0.gguf", - "promptTemplate": "[INST] %1 [/INST]" - }, { "order": "e", "md5sum": "00c8593ba57f5240f59662367b3ed4a5", diff --git a/gpt4all-chat/metadata/models3.json b/gpt4all-chat/metadata/models3.json index 5e33ca0f..df6c12eb 100644 --- a/gpt4all-chat/metadata/models3.json +++ b/gpt4all-chat/metadata/models3.json @@ -1,22 +1,6 @@ [ { "order": "a", - "md5sum": "6d1ca6e9533d177361fe2612a2c87474", - "name": "Gemma Instruct", - "filename": "gemma-7b-it.Q4_0.gguf", - "filesize": "4809316512", - "requires": "2.7.1", - "ramrequired": "8", - "parameters": "7 billion", - "quant": "q4_0", - "type": "Gemma", - "description": "A state-of-the-art open model from Google
  • Fast responses
  • Chat based model
  • Trained by Google
  • Licensed for commercial use
  • Gemma is provided under and subject to the Gemma Terms of Use found at ai.google.dev/gemma/terms
", - "url": "https://gpt4all.io/models/gguf/gemma-7b-it.Q4_0.gguf", - "promptTemplate": "user\n%1\nmodel\n%2\n", - "systemPrompt": "" - }, - { - "order": "b", "md5sum": "f692417a22405d80573ac10cb0cd6c6a", "name": "Mistral OpenOrca", "filename": "mistral-7b-openorca.Q4_0.gguf2.gguf", @@ -31,6 +15,22 @@ "promptTemplate": "<|im_start|>user\n%1<|im_end|>\n<|im_start|>assistant\n%2<|im_end|>\n", "systemPrompt": "<|im_start|>system\nYou are MistralOrca, a large language model trained by Alignment Lab AI. For multi-step problems, write out your reasoning for each step.\n<|im_end|>" }, + { + "order": "b", + "md5sum": "97463be739b50525df56d33b26b00852", + "name": "Mistral Instruct", + "filename": "mistral-7b-instruct-v0.1.Q4_0.gguf", + "filesize": "4108916384", + "requires": "2.5.0", + "ramrequired": "8", + "parameters": "7 billion", + "quant": "q4_0", + "type": "Mistral", + "systemPrompt": " ", + "description": "Best overall fast instruction following model
  • Fast responses
  • Trained by Mistral AI
  • Uncensored
  • Licensed for commercial use
", + "url": "https://gpt4all.io/models/gguf/mistral-7b-instruct-v0.1.Q4_0.gguf", + "promptTemplate": "[INST] %1 [/INST]" + }, { "order": "c", "md5sum": "c4c78adf744d6a20f05c8751e3961b84", @@ -47,22 +47,6 @@ "url": "https://gpt4all.io/models/gguf/gpt4all-falcon-newbpe-q4_0.gguf", "promptTemplate": "### Instruction:\n%1\n### Response:\n" }, - { - "order": "d", - "md5sum": "97463be739b50525df56d33b26b00852", - "name": "Mistral Instruct", - "filename": "mistral-7b-instruct-v0.1.Q4_0.gguf", - "filesize": "4108916384", - "requires": "2.5.0", - "ramrequired": "8", - "parameters": "7 billion", - "quant": "q4_0", - "type": "Mistral", - "systemPrompt": " ", - "description": "Best overall fast instruction following model
  • Fast responses
  • Trained by Mistral AI
  • Uncensored
  • Licensed for commercial use
", - "url": "https://gpt4all.io/models/gguf/mistral-7b-instruct-v0.1.Q4_0.gguf", - "promptTemplate": "[INST] %1 [/INST]" - }, { "order": "e", "md5sum": "00c8593ba57f5240f59662367b3ed4a5",