forked from mosszhd/tricode_titans_knowly
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathget_models.py
76 lines (62 loc) · 2.47 KB
/
get_models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import ollama
import yaml
from ollama import delete
import sys
def check_and_pull_models(model_option:int = 0):
"""
This function checks for Ollama models based on the provided integer parameter.
Args:
model_option (int): An integer (0 or 1) indicating which models to check.
- 0: Checks for gemma:2b, tinyllama:latest, and llava:latest.
- 1: Checks for all models mentioned above and llama2-uncensored:latest.
"""
models_to_check = ["gemma:2b", "tinyllama:latest", "llava:latest"]
if model_option == 1:
models_to_check.append("llama2-uncensored:latest")
models = ollama.list()
available_models = {model["name"]: model for model in models["models"]}
model_list = list(available_models.keys())
models_to_pull = set(models_to_check) - set(model_list)
if models_to_pull:
print("Models to pull:")
for model_name in models_to_pull:
print(f" - {model_name}")
print(f"Initializing {model_name} pull..")
print("Please be patient as it may take a while depending on the speed of your internet...")
ollama.pull(model_name)
print(f"{model_name} is ready..")
print(f"We appreciate your patience while we get the environment ready for you.")
print("Ollama models ready for finetuning!")
else:
print("All model requirements fulfilled.")
create_knowly_models(models_to_check)
def create_knowly_models(models_list):
for model in models_list:
if "gemma" in model:
ollama.create(model="KnowlyGemma",path="./modelfiles/gemma/Modelfile")
elif "tiny" in model:
ollama.create(model="KnowlyTinyLlama",path="./modelfiles/tinyllama/Modelfile")
elif "llava" in model:
ollama.create(model="KnowlyLlava",path="./modelfiles/llava/Modelfile")
elif "llama2" in model:
ollama.create(model="KnowlyLlama2",path="./modelfiles/llama2/Modelfile")
print("Model finetuning complete.")
if False:
for model in models_list:
delete_ollama_model(model)
def delete_ollama_model(model_name: str) -> None:
"""
Deletes an Ollama model by name.
Args:
model_name (str): The name of the Ollama model to be deleted.
Raises:
Exception: If the model deletion fails.
"""
try:
delete(model_name)
print(f"Model '{model_name}' deleted successfully.")
except Exception as e:
print(f"Error deleting model '{model_name}': {e}")
if __name__ == "__main__":
n = int(sys.argv[-1])
check_and_pull_models(model_option=n)