SHIKARICHACHA commited on
Commit
6d9b43a
·
verified ·
1 Parent(s): 93a0fac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -13
app.py CHANGED
@@ -6,6 +6,7 @@ Generates custom musical exercises with LLM, perfectly fit to user-specified num
6
  AND time signature, guaranteeing exact durations in MIDI and in the UI!
7
 
8
  Major updates:
 
9
  - Added duration sum display in Exercise Data tab
10
  - Shows total duration units (16th notes) for verification
11
  - Added DeepSeek AI model option
@@ -55,14 +56,18 @@ import os
55
  import subprocess as sp
56
  import base64
57
  import shutil
58
- from openai import OpenAI # For DeepSeek API
59
 
60
  # -----------------------------------------------------------------------------
61
  # 3. Configuration & constants
62
  # -----------------------------------------------------------------------------
63
  MISTRAL_API_URL = "https://api.mistral.ai/v1/chat/completions"
64
  MISTRAL_API_KEY = "yQdfM8MLbX9uhInQ7id4iUTwN4h4pDLX" # Replace with your key
65
- DEEPSEEK_API_KEY = "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"
 
 
 
 
66
 
67
  SOUNDFONT_URLS = {
68
  "Trumpet": "https://github.com/FluidSynth/fluidsynth/raw/master/sf2/Trumpet.sf2",
@@ -261,7 +266,7 @@ def get_technique_based_on_level(level: str) -> str:
261
  return random.choice(techniques.get(level, ["with slurs"]))
262
 
263
  # -----------------------------------------------------------------------------
264
- # 9. LLM Query Function (supports Mistral and DeepSeek)
265
  # -----------------------------------------------------------------------------
266
  def query_llm(model_name: str, prompt: str, instrument: str, level: str, key: str,
267
  time_sig: str, measures: int) -> str:
@@ -321,19 +326,25 @@ def query_llm(model_name: str, prompt: str, instrument: str, level: str, key: st
321
  print(f"Error querying Mistral API: {e}")
322
  return get_fallback_exercise(instrument, level, key, time_sig, measures)
323
 
324
- elif model_name == "DeepSeek":
325
  try:
326
  client = OpenAI(
327
  base_url="https://openrouter.ai/api/v1",
328
- api_key=DEEPSEEK_API_KEY,
329
  )
330
 
 
 
 
 
 
 
331
  completion = client.chat.completions.create(
332
  extra_headers={
333
  "HTTP-Referer": "https://github.com/AdaptiveMusicExerciseGenerator",
334
  "X-Title": "Music Exercise Generator",
335
  },
336
- model="deepseek/deepseek-chat-v3-0324:free",
337
  messages=[
338
  {"role": "system", "content": system_prompt},
339
  {"role": "user", "content": user_prompt},
@@ -347,7 +358,7 @@ def query_llm(model_name: str, prompt: str, instrument: str, level: str, key: st
347
  content = completion.choices[0].message.content
348
  return content.replace("```json","").replace("```","").strip()
349
  except Exception as e:
350
- print(f"Error querying DeepSeek API: {e}")
351
  return get_fallback_exercise(instrument, level, key, time_sig, measures)
352
 
353
  else:
@@ -399,7 +410,7 @@ def generate_exercise(instrument: str, level: str, key: str, tempo: int, time_si
399
  return f"Error: {str(e)}", None, str(tempo), None, "0", time_signature, 0
400
 
401
  # -----------------------------------------------------------------------------
402
- # 12. Simple AI chat assistant (optional, shares LLM)
403
  # -----------------------------------------------------------------------------
404
  def handle_chat(message: str, history: List, instrument: str, level: str, ai_model: str):
405
  if not message.strip():
@@ -423,19 +434,25 @@ def handle_chat(message: str, history: List, instrument: str, level: str, ai_mod
423
  history.append((message, f"Error: {str(e)}"))
424
  return "", history
425
 
426
- elif ai_model == "DeepSeek":
427
  try:
428
  client = OpenAI(
429
  base_url="https://openrouter.ai/api/v1",
430
- api_key=DEEPSEEK_API_KEY,
431
  )
432
 
 
 
 
 
 
 
433
  completion = client.chat.completions.create(
434
  extra_headers={
435
  "HTTP-Referer": "https://github.com/AdaptiveMusicExerciseGenerator",
436
  "X-Title": "Music Exercise Generator",
437
  },
438
- model="deepseek/deepseek-chat-v3-0324:free",
439
  messages=messages,
440
  temperature=0.7,
441
  max_tokens=500,
@@ -452,7 +469,7 @@ def handle_chat(message: str, history: List, instrument: str, level: str, ai_mod
452
  return "", history
453
 
454
  # -----------------------------------------------------------------------------
455
- # 13. Gradio user interface definition (for humans!)
456
  # -----------------------------------------------------------------------------
457
  def create_ui() -> gr.Blocks:
458
  with gr.Blocks(title="Adaptive Music Exercise Generator", theme="soft") as demo:
@@ -466,7 +483,7 @@ def create_ui() -> gr.Blocks:
466
  with gr.Group(visible=True) as params_group:
467
  gr.Markdown("### Exercise Parameters")
468
  ai_model = gr.Radio(
469
- ["Mistral", "DeepSeek"],
470
  value="Mistral",
471
  label="AI Model"
472
  )
 
6
  AND time signature, guaranteeing exact durations in MIDI and in the UI!
7
 
8
  Major updates:
9
+ - Added Qwen and Claude 3.5 Sonnet AI model options
10
  - Added duration sum display in Exercise Data tab
11
  - Shows total duration units (16th notes) for verification
12
  - Added DeepSeek AI model option
 
56
  import subprocess as sp
57
  import base64
58
  import shutil
59
+ from openai import OpenAI # For API models
60
 
61
  # -----------------------------------------------------------------------------
62
  # 3. Configuration & constants
63
  # -----------------------------------------------------------------------------
64
  MISTRAL_API_URL = "https://api.mistral.ai/v1/chat/completions"
65
  MISTRAL_API_KEY = "yQdfM8MLbX9uhInQ7id4iUTwN4h4pDLX" # Replace with your key
66
+ OPENROUTER_API_KEYS = {
67
+ "DeepSeek": "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",
68
+ "Qwen": "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",
69
+ "Claude": "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"
70
+ }
71
 
72
  SOUNDFONT_URLS = {
73
  "Trumpet": "https://github.com/FluidSynth/fluidsynth/raw/master/sf2/Trumpet.sf2",
 
266
  return random.choice(techniques.get(level, ["with slurs"]))
267
 
268
  # -----------------------------------------------------------------------------
269
+ # 9. LLM Query Function (supports multiple models)
270
  # -----------------------------------------------------------------------------
271
  def query_llm(model_name: str, prompt: str, instrument: str, level: str, key: str,
272
  time_sig: str, measures: int) -> str:
 
326
  print(f"Error querying Mistral API: {e}")
327
  return get_fallback_exercise(instrument, level, key, time_sig, measures)
328
 
329
+ elif model_name in ["DeepSeek", "Qwen", "Claude"]:
330
  try:
331
  client = OpenAI(
332
  base_url="https://openrouter.ai/api/v1",
333
+ api_key=OPENROUTER_API_KEYS[model_name],
334
  )
335
 
336
+ model_map = {
337
+ "DeepSeek": "deepseek/deepseek-chat-v3-0324:free",
338
+ "Qwen": "qwen/qwen3-14b:free",
339
+ "Claude": "anthropic/claude-3.5-sonnet:beta"
340
+ }
341
+
342
  completion = client.chat.completions.create(
343
  extra_headers={
344
  "HTTP-Referer": "https://github.com/AdaptiveMusicExerciseGenerator",
345
  "X-Title": "Music Exercise Generator",
346
  },
347
+ model=model_map[model_name],
348
  messages=[
349
  {"role": "system", "content": system_prompt},
350
  {"role": "user", "content": user_prompt},
 
358
  content = completion.choices[0].message.content
359
  return content.replace("```json","").replace("```","").strip()
360
  except Exception as e:
361
+ print(f"Error querying {model_name} API: {e}")
362
  return get_fallback_exercise(instrument, level, key, time_sig, measures)
363
 
364
  else:
 
410
  return f"Error: {str(e)}", None, str(tempo), None, "0", time_signature, 0
411
 
412
  # -----------------------------------------------------------------------------
413
+ # 12. AI chat assistant
414
  # -----------------------------------------------------------------------------
415
  def handle_chat(message: str, history: List, instrument: str, level: str, ai_model: str):
416
  if not message.strip():
 
434
  history.append((message, f"Error: {str(e)}"))
435
  return "", history
436
 
437
+ elif ai_model in ["DeepSeek", "Qwen", "Claude"]:
438
  try:
439
  client = OpenAI(
440
  base_url="https://openrouter.ai/api/v1",
441
+ api_key=OPENROUTER_API_KEYS[ai_model],
442
  )
443
 
444
+ model_map = {
445
+ "DeepSeek": "deepseek/deepseek-chat-v3-0324:free",
446
+ "Qwen": "qwen/qwen3-14b:free",
447
+ "Claude": "anthropic/claude-3.5-sonnet:beta"
448
+ }
449
+
450
  completion = client.chat.completions.create(
451
  extra_headers={
452
  "HTTP-Referer": "https://github.com/AdaptiveMusicExerciseGenerator",
453
  "X-Title": "Music Exercise Generator",
454
  },
455
+ model=model_map[ai_model],
456
  messages=messages,
457
  temperature=0.7,
458
  max_tokens=500,
 
469
  return "", history
470
 
471
  # -----------------------------------------------------------------------------
472
+ # 13. Gradio user interface definition
473
  # -----------------------------------------------------------------------------
474
  def create_ui() -> gr.Blocks:
475
  with gr.Blocks(title="Adaptive Music Exercise Generator", theme="soft") as demo:
 
483
  with gr.Group(visible=True) as params_group:
484
  gr.Markdown("### Exercise Parameters")
485
  ai_model = gr.Radio(
486
+ ["Mistral", "DeepSeek", "Qwen", "Claude"],
487
  value="Mistral",
488
  label="AI Model"
489
  )