Spaces:
Running
Running
File size: 4,087 Bytes
27b1390 3f9fabf b1d66c1 f3c0738 27b1390 afa4c92 27b1390 3f9fabf 6e76183 3f9fabf 6e76183 4bb84bd 3f9fabf c1f3c15 4bb84bd f3c0738 50321a5 e5f191a c1f3c15 50321a5 e5f191a 50321a5 c1f3c15 1f20467 8ac4951 6e76183 8ac4951 b1d66c1 f3c0738 dddfb2b c1f3c15 a6537b0 2f2bc12 6642512 c1f3c15 8ac4951 1f20467 b1d66c1 6e76183 3f9fabf afa4c92 4bb84bd 61c72ea 6e76183 61c72ea f3c0738 30412b3 dddfb2b 468ea08 4bb84bd afa4c92 8ac4951 13adf18 756b159 afa4c92 b1d66c1 3f9fabf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 |
import gradio as gr
import os
import allin1
import time
from pathlib import Path
HEADER = """
<header style="text-align: center;">
<h1>
All-In-One Music Structure Analyzer 🔮
</h1>
<p>
<a href="https://github.com/mir-aidj/all-in-one">[Python Package]</a>
<a href="https://arxiv.org/abs/2307.16425">[Paper]</a>
<a href="https://taejun.kim/music-dissector/">[Visual Demo]</a>
</p>
</header>
<main
style="display: flex; justify-content: center;"
>
<div
style="display: inline-block;"
>
<p>
This Space demonstrates the music structure analyzer predicts:
<ul
style="padding-left: 1rem;"
>
<li>BPM</li>
<li>Beats</li>
<li>Downbeats</li>
<li>Functional segment boundaries</li>
<li>Functional segment labels (e.g. intro, verse, chorus, bridge, outro)</li>
</ul>
</p>
<p>
For more information, please visit the links above ✨🧸
</p>
</div>
</main>
"""
CACHE_EXAMPLES = os.getenv('CACHE_EXAMPLES', '1') == '1'
base_dir = "/tmp/gradio/"
def analyze(path):
#Measure time for inference
start = time.time()
path = Path(path)
result= allin1.analyze(
path,
out_dir='./struct',
multiprocess=False,
keep_byproducts=True, # TODO: remove this
)
#fig = allin1.visualize(
# result,
# multiprocess=False,
#)
#fig.set_dpi(300)
#allin1.sonify(
# result,
# out_dir='./sonif',
# multiprocess=False,
#)
#sonif_path = Path(f'./sonif/{path.stem}.sonif{path.suffix}').resolve().as_posix()
#Measure time for inference
end = time.time()
elapsed_time = end-start
# Get the base name of the file
file_name = os.path.basename(path)
# Remove the extension from the file name
file_name_without_extension = os.path.splitext(file_name)[0]
bass_path, drums_path, other_path, vocals_path = None, None, None, None
for root, dirs, files in os.walk("./demix"):
for file_path in files:
#file_path = os.path.join(root, file_name)
#print(file_path)
#if "bass.wav" in file_path:
# bass_path = file_path
#if "vocals.wav" in file_path:
# vocals_path = file_path
#if "other.wav" in file_path:
# other_path = file_path
#if "drums.wav" in file_path:
# drums_path = file_path
#return result.bpm, fig, sonif_path, elapsed_time
return result.bpm, elapsed_time, bass_path, drums_path, other_path, vocals_path
with gr.Blocks() as demo:
gr.HTML(HEADER)
input_audio_path = gr.Audio(
label='Input',
source='upload',
type='filepath',
format='mp3',
show_download_button=False,
)
button = gr.Button('Analyze', variant='primary')
#output_viz = gr.Plot(label='Visualization')
with gr.Row():
output_bpm = gr.Textbox(label='BPM', scale=1)
#output_sonif = gr.Audio(
# label='Sonification',
# type='filepath',
# format='mp3',
# show_download_button=False,
# scale=9,
#)
elapsed_time = gr.Textbox(label='Overall inference time', scale=1)
with gr.Column():
bass = gr.Audio(label='bass', show_share_button=False)
vocals =gr.Audio(label='vocals', show_share_button=False)
other = gr.Audio(label='other', show_share_button=False)
drums =gr.Audio(label='drums', show_share_button=False)
#bass_path = gr.Textbox(label='bass_path', scale=1)
#drums_path = gr.Textbox(label='drums_path', scale=1)
#other_path = gr.Textbox(label='other_path', scale=1)
#vocals_path = gr.Textbox(label='vocals_path', scale=1)
#gr.Examples(
# examples=[
# './assets/NewJeans - Super Shy.mp3',
# './assets/Bruno Mars - 24k Magic.mp3'
# ],
# inputs=input_audio_path,
# outputs=[output_bpm, output_viz, output_sonif],
# fn=analyze,
# cache_examples=CACHE_EXAMPLES,
#)
button.click(
fn=analyze,
inputs=input_audio_path,
#outputs=[output_bpm, output_viz, output_sonif, elapsed_time],
outputs=[output_bpm, elapsed_time, bass, drums, other, vocals],
api_name='analyze',
)
if __name__ == '__main__':
demo.launch()
|