ai_chatbot 0.1.6.3 → 0.1.6.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4a93ff88f75370288533bd606e7705f2ad7223b56974a81fe6967c72756b8e21
4
- data.tar.gz: 5966c1c3aaec715819780a3ea285ac489346007bf234e759b46824c70d846fa6
3
+ metadata.gz: 802b2da58aa8ef78b7b9813faf48e789db953d4c6e62a437002b1dc6017e4167
4
+ data.tar.gz: b3525c33bd0965aeccc2b13d67380e67df0ec6f3c502b0afe24968d694833d0f
5
5
  SHA512:
6
- metadata.gz: 817bac75b5fe2949efbc8b5a4baf96fd45b85051ca2be69755c618ed4c72effc8e17174073068912651188181595e25d76f0d9b342479cb92977715a8d2c3881
7
- data.tar.gz: e097a21eeb6f95098bf25c1159b00d6a1a05ad8a5e5379f9fceb25d5fe6d1baf02704b541aa109502d4aeb8147f7985c15ca781774ede5a1dd9c522440921b5b
6
+ metadata.gz: eff5b8d9885206a83a4f6a05ca275c925717299360ae62df2af451009a07e5d244de503031d0f8a091a4cd1ef067af85a18cadf6ded9ef994899b6d5639e4747
7
+ data.tar.gz: b05cd753b9e3d4db2d9e6fb86aa43b2e0e355d8cf723e3d852bfb43a23897aade4d5849232179c7338babc0f19568d05d5c5eafdda781337d00daca83b6594fc
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module AiChatbot
4
- VERSION = "0.1.6.3"
4
+ VERSION = "0.1.6.4"
5
5
  end
data/lib/ml_model.py CHANGED
@@ -6,12 +6,15 @@ from sklearn.metrics.pairwise import cosine_similarity
6
6
  import pickle
7
7
  import os
8
8
 
9
+ # Cache to store previously asked questions
10
+ cache = {}
11
+
9
12
  # Load or initialize the dataset
10
13
  if os.path.exists("qa_model.pkl"):
11
14
  with open("qa_model.pkl", "rb") as f:
12
15
  model_data = pickle.load(f)
13
- questions = model_data.get(['questions'],[])
14
- answers = model_data.get(['answers'],[])
16
+ questions = model_data.get("questions", [])
17
+ answers = model_data.get("answers", [])
15
18
  else:
16
19
  questions = [
17
20
  "How to create a new model in Rails?",
@@ -28,6 +31,10 @@ else:
28
31
  model = make_pipeline(TfidfVectorizer(), MultinomialNB())
29
32
  model.fit(questions, answers)
30
33
 
34
+ # Populate cache with known questions and answers
35
+ for q, a in zip(questions, answers):
36
+ cache[q.lower()] = a
37
+
31
38
  # Function to predict or retrain the model
32
39
  def main(action, query=None, new_answer=None):
33
40
  if action == "predict":
@@ -37,19 +44,24 @@ def main(action, query=None, new_answer=None):
37
44
  elif action == "update_answer":
38
45
  return update_answer(query, new_answer)
39
46
  elif action == "update_or_delete_question":
40
- return update_or_delete_question(query, new_answer) # Corrected here, calling the right function
47
+ return update_or_delete_question(query, new_answer)
41
48
  elif action == "list_questions":
42
49
  return list_questions()
43
50
  elif action == "list_answers":
44
- return list_answers()
45
-
51
+ return list_answers()
46
52
 
47
- # Function to predict the response with confidence check
53
+ # Function to predict the response with caching
48
54
  def get_prediction(query):
55
+ query_lower = query.lower()
56
+
57
+ # **Check cache first**
58
+ if query_lower in cache:
59
+ return cache[query_lower]
60
+
49
61
  query_vec = model.named_steps['tfidfvectorizer'].transform([query])
50
62
  question_vecs = model.named_steps['tfidfvectorizer'].transform(questions)
51
63
 
52
- # Calculate cosine similarity between query and known questions
64
+ # Calculate cosine similarity
53
65
  similarities = cosine_similarity(query_vec, question_vecs)
54
66
  max_similarity = similarities.max()
55
67
 
@@ -57,85 +69,93 @@ def get_prediction(query):
57
69
  if max_similarity < threshold:
58
70
  return "No good match found. Please provide the correct answer."
59
71
  else:
60
- prediction = model.predict([query])
61
- return prediction[0]
72
+ prediction = model.predict([query])[0]
73
+
74
+ # **Store in cache for faster future retrieval**
75
+ cache[query_lower] = prediction
76
+
77
+ return prediction
62
78
 
63
79
  # Function to train the model with a new question and answer
64
80
  def train_model(new_question, new_answer):
65
81
  global questions, answers
66
82
 
67
- # Append new question-answer pair to the dataset
83
+ # Append new question-answer pair
68
84
  questions.append(new_question)
69
85
  answers.append(new_answer)
70
86
 
71
- # Retrain the model with updated data
87
+ # Retrain the model
72
88
  model.fit(questions, answers)
73
89
 
74
- # Save the updated model and data
90
+ # **Update cache**
91
+ cache[new_question.lower()] = new_answer
92
+
93
+ # Save the updated model
75
94
  with open("qa_model.pkl", "wb") as f:
76
95
  pickle.dump({"questions": questions, "answers": answers}, f)
77
96
 
78
- return f"Model retrained with the new question: '{new_question}' and answer: '{new_answer}'"
97
+ return f"Model retrained with: '{new_question}' -> '{new_answer}'"
79
98
 
99
+ # Function to update an answer
80
100
  def update_answer(existing_question, new_answer):
81
101
  global questions, answers
82
102
 
83
103
  if existing_question in questions:
84
- # Find the index of the existing question
85
104
  index = questions.index(existing_question)
86
- # Update the answer
87
105
  answers[index] = new_answer
88
- # Retrain the model with updated data
106
+
107
+ # Retrain the model
89
108
  model.fit(questions, answers)
90
- # Save the updated model and data
109
+
110
+ # **Update cache**
111
+ cache[existing_question.lower()] = new_answer
112
+
113
+ # Save the model
91
114
  with open("qa_model.pkl", "wb") as f:
92
115
  pickle.dump({"questions": questions, "answers": answers}, f)
93
- return f"Answer updated for the question: '{existing_question}'"
94
- else:
95
- return "Question not found. Please provide a valid question."
116
+
117
+ return f"Answer updated for: '{existing_question}'"
118
+
119
+ return "Question not found."
96
120
 
121
+ # Function to update or delete a question
97
122
  def update_or_delete_question(existing_question, new_question):
98
- global questions # Only 'questions' is global, not 'new_question'
99
- if new_question=="None":
100
- new_question = None
101
-
123
+ global questions, answers
124
+
102
125
  if existing_question in questions:
126
+ index = questions.index(existing_question)
127
+
103
128
  if new_question:
104
- # Find the index of the existing question
105
- index = questions.index(existing_question)
106
- # Update the question
107
129
  questions[index] = new_question
108
- # Retrain the model with updated data
109
- model.fit(questions, answers)
110
- # Save the updated model and data
111
- with open("qa_model.pkl", "wb") as f:
112
- pickle.dump({"questions": questions, "answers": answers}, f)
113
- return f"Question updated from '{existing_question}' to '{new_question}'"
130
+ # **Update cache**
131
+ cache[new_question.lower()] = answers[index]
114
132
  else:
115
- # Remove the question if no new question is provided
116
- index = questions.index(existing_question)
133
+ # Delete the question
117
134
  del questions[index]
118
- del answers[index] # Ensure you also delete the corresponding answer
119
- # Retrain the model with updated data
120
- model.fit(questions, answers)
121
- # Save the updated model and data
122
- with open("qa_model.pkl", "wb") as f:
123
- pickle.dump({"questions": questions, "answers": answers}, f)
124
- return f"Question '{existing_question}' deleted successfully."
125
- else:
126
- return "Question not found. Please provide a valid question."
135
+ del answers[index]
136
+
137
+ # Retrain the model
138
+ model.fit(questions, answers)
139
+
140
+ # **Remove from cache if deleted**
141
+ if not new_question:
142
+ cache.pop(existing_question.lower(), None)
143
+
144
+ # Save the model
145
+ with open("qa_model.pkl", "wb") as f:
146
+ pickle.dump({"questions": questions, "answers": answers}, f)
147
+
148
+ return f"Updated question: '{existing_question}' -> '{new_question}'" if new_question else f"Deleted: '{existing_question}'"
127
149
 
150
+ return "Question not found."
128
151
 
129
152
  def list_questions():
130
- global questions
131
153
  return questions
132
154
 
133
155
  def list_answers():
134
- global answers
135
- return answers
156
+ return answers
136
157
 
137
158
  if __name__ == "__main__":
138
- # Expecting action (predict/train), question, and answer (if training)
139
159
  action = sys.argv[1]
140
160
  question = sys.argv[2] if len(sys.argv) > 2 else None
141
161
  answer = sys.argv[3] if len(sys.argv) > 3 else None
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ai_chatbot
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.6.3
4
+ version: 0.1.6.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sanket
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-02-24 00:00:00.000000000 Z
11
+ date: 2025-02-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: open3
@@ -58,5 +58,5 @@ requirements: []
58
58
  rubygems_version: 3.3.7
59
59
  signing_key:
60
60
  specification_version: 4
61
- summary: A chatbot for Rails integration with AI model using Python
61
+ summary: 'Fix: Improved response caching in ChatbotService'
62
62
  test_files: []