mirror of
https://github.com/JannisX11/blockbench.git
synced 2025-01-18 15:26:19 +08:00
Display sound keyframe waveform
Play keyframe audio when starting in timeline after keyframe
This commit is contained in:
parent
eec815b736
commit
2c5a015c89
@ -564,7 +564,7 @@
|
||||
z-index: 3;
|
||||
text-align: center;
|
||||
width: 13.5px;
|
||||
height: 22px;
|
||||
height: 23px;
|
||||
}
|
||||
#timeline_body .keyframe i {
|
||||
margin-top: 2px;
|
||||
@ -719,6 +719,18 @@
|
||||
flex-grow: 1;
|
||||
}
|
||||
|
||||
.keyframe .keyframe_waveform {
|
||||
height: 23px;
|
||||
width: 8000px;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
}
|
||||
.keyframe .keyframe_waveform > polygon {
|
||||
fill: var(--color-grid);
|
||||
stroke: none;
|
||||
stroke-width: 0;
|
||||
}
|
||||
|
||||
|
||||
/*UV*/
|
||||
.UVEditor {
|
||||
|
@ -1048,6 +1048,9 @@
|
||||
@contextmenu.prevent="keyframe.showContextMenu($event)"
|
||||
>
|
||||
<i class="material-icons">stop</i>
|
||||
<svg class="keyframe_waveform" v-if="keyframe.channel == 'sound' && keyframe.file && waveforms[keyframe.file]" :style="{width: waveforms[keyframe.file].duration * size}">
|
||||
<polygon :points="getWaveformPoints(waveforms[keyframe.file].samples, size)"></polygon>
|
||||
</svg>
|
||||
</keyframe>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -823,6 +823,31 @@ class EffectAnimator extends GeneralAnimator {
|
||||
})
|
||||
}
|
||||
}
|
||||
startPreviousSounds() {
|
||||
if (!this.muted.sound) {
|
||||
this.sound.forEach(kf => {
|
||||
if (kf.file && !kf.cooldown) {
|
||||
var diff = kf.time - Timeline.time;
|
||||
if (diff < 0 && Timeline.waveforms[kf.file] && Timeline.waveforms[kf.file].duration > -diff) {
|
||||
var media = new Audio(kf.file);
|
||||
window._media = media
|
||||
media.volume = Math.clamp(settings.volume.value/100, 0, 1);
|
||||
media.currentTime = -diff;
|
||||
media.play();
|
||||
Timeline.playing_sounds.push(media);
|
||||
media.onended = function() {
|
||||
Timeline.playing_sounds.remove(media);
|
||||
}
|
||||
|
||||
kf.cooldown = true;
|
||||
setTimeout(() => {
|
||||
delete kf.cooldown;
|
||||
}, 400)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
EffectAnimator.prototype.channels = ['particle', 'sound', 'timeline']
|
||||
|
||||
|
@ -651,12 +651,14 @@ BARS.defineActions(function() {
|
||||
startpath: Timeline.selected[0].file
|
||||
}, function(files) {
|
||||
|
||||
let {path} = files[0];
|
||||
Undo.initEdit({keyframes: Timeline.selected})
|
||||
Timeline.selected.forEach((kf) => {
|
||||
if (kf.channel == 'sound') {
|
||||
kf.file = files[0].path;
|
||||
kf.file = path;
|
||||
}
|
||||
})
|
||||
Timeline.visualizeAudioFile(path);
|
||||
Undo.finishEdit('changed keyframe audio file')
|
||||
})
|
||||
}
|
||||
|
@ -488,6 +488,13 @@ const Timeline = {
|
||||
if (Animator.selected.loop == 'hold' && Timeline.time >= (Animator.selected.length||1e3)) {
|
||||
Timeline.setTime(0)
|
||||
}
|
||||
if (Timeline.time > 0) {
|
||||
Animator.animations.forEach(animation => {
|
||||
if (animation.playing && animation.animators.effects) {
|
||||
animation.animators.effects.startPreviousSounds();
|
||||
}
|
||||
})
|
||||
}
|
||||
Timeline.loop()
|
||||
},
|
||||
loop() {
|
||||
@ -526,6 +533,47 @@ const Timeline = {
|
||||
})
|
||||
Timeline.playing_sounds.empty();
|
||||
},
|
||||
|
||||
waveforms: {},
|
||||
waveform_sample_rate: 60,
|
||||
async visualizeAudioFile(path) {
|
||||
|
||||
if (!Timeline.waveforms[path]) {
|
||||
Timeline.waveforms[path] = {
|
||||
samples: [],
|
||||
duration: 0
|
||||
};
|
||||
}
|
||||
let {samples} = Timeline.waveforms[path];
|
||||
|
||||
let audioContext = new AudioContext()
|
||||
let response = await fetch(path);
|
||||
let arrayBuffer = await response.arrayBuffer();
|
||||
let audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
|
||||
let data_array = audioBuffer.getChannelData(0);
|
||||
|
||||
Timeline.waveforms[path].duration = audioBuffer.duration;
|
||||
|
||||
// Sample
|
||||
let sample_count = Math.ceil(audioBuffer.duration * Timeline.waveform_sample_rate);
|
||||
samples.splice(0, samples.length);
|
||||
for (var i = 0; i < sample_count; i++) {
|
||||
samples.push(0);
|
||||
}
|
||||
for (var i = 0; i < data_array.length; i++) {
|
||||
let sample_index = Math.floor((i / data_array.length) * sample_count);
|
||||
samples[sample_index] += Math.abs(data_array[i]);
|
||||
}
|
||||
|
||||
// Normalize
|
||||
let max = Math.max(...samples);
|
||||
samples.forEach((v, i) => samples[i] = v / max);
|
||||
|
||||
Timeline.vue.$forceUpdate();
|
||||
|
||||
return samples;
|
||||
},
|
||||
|
||||
get keyframes() {
|
||||
var keyframes = [];
|
||||
Timeline.animators.forEach(animator => {
|
||||
@ -559,6 +607,7 @@ onVueSetup(function() {
|
||||
timecodes: [],
|
||||
animators: Timeline.animators,
|
||||
markers: [],
|
||||
waveforms: Timeline.waveforms,
|
||||
focus_channel: null,
|
||||
playhead: Timeline.time
|
||||
},
|
||||
@ -572,6 +621,15 @@ onVueSetup(function() {
|
||||
getColor(index) {
|
||||
if (index == -1 || index == undefined) return;
|
||||
return markerColors[index].standard;
|
||||
},
|
||||
getWaveformPoints(samples, size) {
|
||||
let height = 23;
|
||||
let points = [`0,${height}`];
|
||||
samples.forEach((sample, i) => {
|
||||
points.push(`${(i + 0.5) / Timeline.waveform_sample_rate * size},${(1 - sample) * height}`);
|
||||
})
|
||||
points.push(`${(samples.length) / Timeline.waveform_sample_rate * size},${height}`)
|
||||
return points.join(' ');
|
||||
}
|
||||
}
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user