-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmain.py
More file actions
101 lines (74 loc) · 2.86 KB
/
main.py
File metadata and controls
101 lines (74 loc) · 2.86 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
import os
from utils import save_project_code_list, load_project_code_list, invoke_gemini, get_project_code_from_path
gemini_model = 'gemini-2.5-flash'
os.makedirs("./project_code", exist_ok=True)
input_str = """======================== Chat with code ===========================
Enter any 1 option from below:
1. Chat with new project code (input = your project path)
2. Chat with current project code (input = nothing)
3. Chat with updated project code (input = nothing: but your previous path will be used)
4. Exit (Type 'exit' to quit)
-> """
def main():
MODEL_PROMPT_WITH_HISTORY = [{
'role': 'system',
'content': """You are a helpful assistant developer.
You will receive a list of code snippets from a project.
Your task is to provide helpful responses based on the code snippets and user query.
You response should be concise and informative. Only response with what is necessary and nothing extra."""
}]
choice = input(input_str)
try:
choice = int(choice)
except Exception:
print("Exiting...")
return None
if choice == 1:
project_path = input("Enter your project path: ")
project_code_list = get_project_code_from_path(project_path)
MODEL_PROMPT_WITH_HISTORY.append({
'role': 'context',
'content': str(project_code_list)
})
save_project_code_list(project_code_list)
with open('last_project_path.txt', 'w') as file:
file.write(project_path)
elif choice == 2:
project_code_list = load_project_code_list()
MODEL_PROMPT_WITH_HISTORY.append({
'role': 'context',
'content': str(project_code_list)
})
elif choice == 3:
with open('last_project_path.txt', 'r') as file:
project_path = file.read()
project_code_list = get_project_code_from_path(project_path)
MODEL_PROMPT_WITH_HISTORY.append({
'role': 'context',
'content': str(project_code_list)
})
elif choice == 4 or choice == 'exit':
print("Exiting...")
return None
else:
print("Invalid choice")
return None
while True:
user_prompt = input("\nUser prompt: ")
if user_prompt == 'exit':
print("Exiting...")
return MODEL_PROMPT_WITH_HISTORY
MODEL_PROMPT_WITH_HISTORY.append({
'role': 'user',
'content': user_prompt
})
response = invoke_gemini(gemini_model=gemini_model, model_prompt=MODEL_PROMPT_WITH_HISTORY)
MODEL_PROMPT_WITH_HISTORY.append({
'role': 'assistant',
'content': str(response)
})
print("\nModel Response:", response)
return MODEL_PROMPT_WITH_HISTORY
if __name__ == "__main__":
# print("\n\n==== Your Chat History List ====\n", json.dumps(main()))
main()