lllindsey0615 commited on
Commit
4ec0a5d
·
1 Parent(s): b041844

initial commit

Browse files
Files changed (4) hide show
  1. .gradio/certificate.pem +31 -0
  2. README.md +1 -1
  3. app.py +166 -0
  4. requirements.txt +14 -0
.gradio/certificate.pem ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
3
+ TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
4
+ cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
5
+ WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
6
+ ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
7
+ MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
8
+ h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
9
+ 0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
10
+ A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
11
+ T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
12
+ B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
13
+ B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
14
+ KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
15
+ OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
16
+ jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
17
+ qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
18
+ rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
19
+ HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
20
+ hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
21
+ ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
22
+ 3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
23
+ NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
24
+ ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
25
+ TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
26
+ jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
27
+ oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
28
+ 4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
29
+ mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
30
+ emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
31
+ -----END CERTIFICATE-----
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 💻
4
  colorFrom: blue
5
  colorTo: indigo
6
  sdk: gradio
7
- sdk_version: 5.42.0
8
  app_file: app.py
9
  pinned: false
10
  short_description: ' Anticipatory Music Transformer wrapped in HARP'
 
4
  colorFrom: blue
5
  colorTo: indigo
6
  sdk: gradio
7
+ sdk_version: 5.28.0
8
  app_file: app.py
9
  pinned: false
10
  short_description: ' Anticipatory Music Transformer wrapped in HARP'
app.py ADDED
@@ -0,0 +1,166 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from dataclasses import asdict
4
+
5
+ from pyharp.core import ModelCard, build_endpoint
6
+ from pyharp.labels import LabelList
7
+
8
+ from transformers import AutoModelForCausalLM
9
+ from anticipation.sample import generate
10
+ from anticipation.convert import events_to_midi, midi_to_events
11
+ from anticipation import ops
12
+ from anticipation.tokenize import extract_instruments
13
+
14
+ # === Model Choices ===
15
+ SMALL_MODEL = "stanford-crfm/music-small-800k"
16
+ MEDIUM_MODEL = "stanford-crfm/music-medium-800k"
17
+ LARGE_MODEL = "stanford-crfm/music-large-800k"
18
+
19
+ # === HARP Model Card ===
20
+ model_card = ModelCard(
21
+ name="Anticipatory Music Transformer",
22
+ description="Generate accompaniment for a monophonic melody using AMT. ZeroGPU + HARP-compatible.",
23
+ author="John Thickstun, David Hall, Chris Donahue, Percy Liang",
24
+ tags=["midi", "generation", "accompaniment", "transformer"]
25
+ )
26
+
27
+ # === Core AMT pipeline (ZeroGPU-friendly: loads model each request) ===
28
+ def generate_accompaniment(
29
+ input_midi_path: str,
30
+ model_choice: str,
31
+ selected_midi_program: int,
32
+ history_length: float
33
+ ):
34
+ device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
35
+
36
+ # Load model per request (ZeroGPU ephemeral runtime)
37
+ model = AutoModelForCausalLM.from_pretrained(
38
+ model_choice,
39
+ torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
40
+ low_cpu_mem_usage=True
41
+ ).to(device)
42
+ model.eval()
43
+
44
+ # Parse input MIDI → events
45
+ events = midi_to_events(input_midi_path)
46
+ total_time = round(ops.max_time(events, seconds=True))
47
+
48
+ # Extract melody by MIDI program
49
+ events, melody = extract_instruments(events, [selected_midi_program])
50
+ if not melody:
51
+ return None, "No melody events found for the chosen MIDI program. Try another program number."
52
+
53
+ # Use initial history
54
+ history = ops.clip(events, 0, history_length, clip_duration=False)
55
+
56
+ # Generate accompaniment from history_length → total_time
57
+ accompaniment = generate(
58
+ model,
59
+ start_time=history_length,
60
+ end_time=total_time,
61
+ inputs=history,
62
+ controls=melody,
63
+ top_p=0.95,
64
+ debug=False
65
+ )
66
+
67
+ # Merge melody + accompaniment, clip to duration
68
+ output_events = ops.clip(
69
+ ops.combine(accompaniment, melody),
70
+ 0,
71
+ total_time,
72
+ clip_duration=True
73
+ )
74
+
75
+ # Save to MIDI file
76
+ output_midi_path = "amt_output.mid"
77
+ mid = events_to_midi(output_events)
78
+ mid.save(output_midi_path)
79
+
80
+ return output_midi_path, None
81
+
82
+
83
+ # === HARP process function ===
84
+ def process_fn(input_midi_path, model_choice, selected_midi_program, history_length):
85
+ """
86
+ Inputs order must match the input_components order below.
87
+ - input_midi_path: filepath string (gr.File with type='filepath')
88
+ - model_choice: str
89
+ - selected_midi_program: int
90
+ - history_length: float
91
+ Returns:
92
+ - output_midi_path: str or None (gr.File expects a filepath)
93
+ - labels_json: dict-like (for gr.JSON)
94
+ """
95
+ output_midi_path, error_message = generate_accompaniment(
96
+ input_midi_path,
97
+ model_choice,
98
+ int(selected_midi_program),
99
+ float(history_length),
100
+ )
101
+
102
+ if error_message:
103
+ return None, {"message": error_message}
104
+
105
+ labels = LabelList() # optional: fill with MidiLabel entries if you have them
106
+ return output_midi_path, asdict(labels)
107
+
108
+
109
+ # === Gradio + HARP UI ===
110
+ with gr.Blocks() as demo:
111
+ gr.Markdown("## 🎼 Anticipatory Music Transformer (ZeroGPU + HARP)")
112
+
113
+ # Inputs
114
+ input_midi = gr.File(
115
+ file_types=[".mid", ".midi"],
116
+ label="Input MIDI",
117
+ type="filepath",
118
+ ).harp_required(True)
119
+
120
+ model_dropdown = gr.Dropdown(
121
+ choices=[SMALL_MODEL, MEDIUM_MODEL, LARGE_MODEL],
122
+ value=MEDIUM_MODEL,
123
+ label="AMT Model",
124
+ )
125
+
126
+ midi_program_slider = gr.Slider(
127
+ minimum=0,
128
+ maximum=127,
129
+ step=1,
130
+ value=1,
131
+ label="Melody Instrument (MIDI Program #)"
132
+ )
133
+
134
+ history_slider = gr.Slider(
135
+ minimum=1,
136
+ maximum=30,
137
+ step=1,
138
+ value=5,
139
+ label="History Length (seconds)"
140
+ )
141
+
142
+ # Outputs
143
+ output_midi = gr.File(
144
+ file_types=[".mid", ".midi"],
145
+ label="Generated MIDI",
146
+ type="filepath",
147
+ )
148
+ output_labels = gr.JSON(label="Labels / Metadata")
149
+
150
+ # Build HARP endpoint
151
+ _ = build_endpoint(
152
+ model_card=model_card,
153
+ input_components=[
154
+ input_midi,
155
+ model_dropdown,
156
+ midi_program_slider,
157
+ history_slider,
158
+ ],
159
+ output_components=[
160
+ output_midi,
161
+ output_labels,
162
+ ],
163
+ process_fn=process_fn
164
+ )
165
+
166
+ demo.queue().launch(show_error=True, debug=True)
requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # PyHARP from correct branch
2
+ git+https://github.com/TEAMuP-dev/pyharp.git@cb/gen-inputs
3
+ midi2audio == 0.1.1
4
+ mido == 1.2.10
5
+ numpy >= 1.22.4
6
+ torch >= 2.0.1
7
+ transformers == 4.29.2
8
+ safetensors
9
+ accelerate
10
+ tqdm == 4.65.0
11
+
12
+ # Additional dependencies for AMT
13
+ git+https://github.com/jthickstun/anticipation.git
14
+