Muthuraja18 commited on
Commit
221a38b
·
verified ·
1 Parent(s): 1fb356e
Files changed (1) hide show
  1. app.py +69 -42
app.py CHANGED
@@ -142,56 +142,83 @@ questions_db = {
142
 
143
  # ----------------- JSON helpers ---------------
144
 
145
- def generate_ai_questions(topic, num_questions=5):
146
- """
147
- Generate AI-based questions for a given topic using Groq API.
148
- Falls back to `questions_db` if API fails.
149
- """
150
- try:
151
- GROQ_API_KEY ="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"
152
- url = "https://api.groq.com/openai/v1/responses" # Groq OpenAI-compatible endpoint
153
 
154
- headers = {
155
- "Authorization": f"Bearer {GROQ_API_KEY}",
156
- "Content-Type": "application/json"
157
- }
158
 
159
- prompt = f"Generate {num_questions} multiple-choice questions on the topic '{topic}'. " \
160
- f"Return each question in this format:\nQuestion|Option1,Option2,Option3,Option4|Answer"
161
-
162
- payload = {
163
- "model": "openai/gpt-oss-20b",
164
- "input": prompt
165
- }
166
-
167
- response = requests.post(url, headers=headers, json=payload, timeout=20)
168
- response.raise_for_status()
169
- data = response.json()
170
 
171
- raw_text = data.get("output_text") or data.get("result") or ""
172
- questions = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
173
 
174
- for line in raw_text.strip().split("\n"):
175
- parts = line.strip().split("|")
176
- if len(parts) == 3:
177
- q_text = parts[0].strip()
178
- opts = [o.strip() for o in parts[1].split(",")]
179
- ans = parts[2].strip()
180
- questions.append({"question": q_text, "options": opts, "answer": ans})
181
- if len(questions) >= num_questions:
182
- break
183
 
184
- if not questions:
185
- st.warning(f"No AI questions generated for topic '{topic}'. Using static questions if any.")
186
- return questions_db.get(topic.lower(), questions_db.get("default", []))[:num_questions]
 
 
 
 
 
187
 
188
- return questions
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
 
190
  except Exception as e:
191
- st.error(f"Groq API request failed: {e}")
192
- st.warning(f"No AI questions generated for topic '{topic}'. Using static questions if any.")
193
- return questions_db.get(topic.lower(), questions_db.get("default", []))[:num_questions]
194
-
 
195
 
196
 
197
  def load_json(path, default):
 
142
 
143
  # ----------------- JSON helpers ---------------
144
 
145
+ client = Groq(api_key=st.secrets["GROQ_API_KEY"].strip())
 
 
 
 
 
 
 
146
 
147
+ MODEL_NAME = "openai/gpt-oss-20b"
 
 
 
148
 
149
+ def generate_ai_questions(topic, num_questions=5, gid=None, questions_db=None):
150
+ """
151
+ Generate MCQs using Groq AI for a topic.
152
+ Fallback to questions_db if AI fails or no data returned.
153
+ Returns list of dicts: {"question":..., "options": [...], "answer":...}
154
+ """
 
 
 
 
 
155
 
156
+ # Ensure fallback exists
157
+ if questions_db is None:
158
+ questions_db = {}
159
+
160
+ prompt = f"""
161
+ Generate {num_questions} high-quality MCQ questions for the topic "{topic}".
162
+
163
+ STRICT FORMAT:
164
+ [
165
+ {{
166
+ "topic": "{topic}",
167
+ "question": "Question text",
168
+ "options": ["Option 1", "Option 2", "Option 3", "Option 4"],
169
+ "answer": "Correct option text"
170
+ }}
171
+ ]
172
 
173
+ RULES:
174
+ - ONLY valid JSON array.
175
+ - No extra text.
176
+ - No markdown.
177
+ - No explanations.
178
+ """
 
 
 
179
 
180
+ try:
181
+ st.info(f"Generating AI questions for topic '{topic}'...")
182
+ response = client.chat.completions.create(
183
+ model=MODEL_NAME,
184
+ messages=[{"role": "user", "content": prompt}],
185
+ temperature=0.2,
186
+ max_tokens=800,
187
+ )
188
 
189
+ raw = response.choices[0].message.content.strip()
190
+
191
+ # Remove code blocks if AI adds them
192
+ if raw.startswith("```"):
193
+ raw = raw.split("```")[1].replace("json", "").strip()
194
+
195
+ data = json.loads(raw)
196
+
197
+ # Optional: save Excel if gid provided
198
+ if gid:
199
+ os.makedirs("ai_questions_excel", exist_ok=True)
200
+ rows = []
201
+ for q in data:
202
+ rows.append({
203
+ "topic": q.get("topic", topic),
204
+ "question": q.get("question", ""),
205
+ "option_1": q.get("options", [""]*4)[0],
206
+ "option_2": q.get("options", [""]*4)[1],
207
+ "option_3": q.get("options", [""]*4)[2],
208
+ "option_4": q.get("options", [""]*4)[3],
209
+ "answer": q.get("answer", "")
210
+ })
211
+ df = pd.DataFrame(rows)
212
+ df.to_excel(f"ai_questions_excel/{gid}_questions.xlsx", index=False)
213
+
214
+ return data[:num_questions]
215
 
216
  except Exception as e:
217
+ st.warning(f"AI generation failed for topic '{topic}': {e}")
218
+ st.info("Using fallback questions from questions_db if available.")
219
+ # Fallback to static questions_db
220
+ fallback = questions_db.get(topic.lower(), questions_db.get("default", []))
221
+ return fallback[:num_questions]
222
 
223
 
224
  def load_json(path, default):