flatten midi recording code

This commit is contained in:
🪞👃🪞 2024-12-28 21:40:18 +01:00
parent 198a730e33
commit 1d7d816899
5 changed files with 68 additions and 58 deletions

View file

@ -93,7 +93,8 @@ from_edn!("sample" => |(_jack, dir): (&Arc<RwLock<JackClient>>, &str), args| ->
start, start,
end, end,
channels: data, channels: data,
rate: None rate: None,
gain: 1.0
})))) }))))
}); });

View file

@ -64,19 +64,22 @@ audio!(|self: GrooveboxTui, client, scope|{
self.editor.set_note_point(key.as_int() as usize); self.editor.set_note_point(key.as_int() as usize);
}, },
MidiMessage::Controller { controller, value } => { MidiMessage::Controller { controller, value } => {
if controller == u7::from(20) { if let Some(sample) = &self.sampler.mapped[self.editor.note_point()] {
if let Some(sample) = &self.sampler.mapped[self.editor.note_point()] { let mut sample = sample.write().unwrap();
let mut sample = sample.write().unwrap(); let percentage = value.as_int() as f64 / 127.;
let percentage = value.as_int() as f64 / 127.; match controller.as_int() {
sample.start = (percentage * sample.end as f64) as usize; 20 => {
} sample.start = (percentage * sample.end as f64) as usize;
} else if controller == u7::from(21) { },
if let Some(sample) = &self.sampler.mapped[self.editor.note_point()] { 21 => {
let mut sample = sample.write().unwrap(); let length = sample.channels[0].len();
let percentage = value.as_int() as f64 / 127.; sample.end = sample.start + (percentage * (length as f64 - sample.start as f64)) as usize;
let length = sample.channels[0].len(); sample.end = sample.end.min(length);
sample.end = sample.start + (percentage * (length as f64 - sample.start as f64)) as usize; },
sample.end = sample.end.min(length); 24 => {
sample.gain = percentage as f32 * 2.0;
},
_ => {}
} }
} }
} }

View file

@ -8,46 +8,6 @@ pub trait MidiRecordApi: HasClock + HasPlayPhrase + HasMidiIns {
fn toggle_record (&mut self) { fn toggle_record (&mut self) {
*self.recording_mut() = !self.recording(); *self.recording_mut() = !self.recording();
} }
fn record (&mut self, scope: &ProcessScope, midi_buf: &mut Vec<Vec<Vec<u8>>>) {
let sample0 = scope.last_frame_time() as usize;
// For highlighting keys and note repeat
let notes_in = self.notes_in().clone();
if self.clock().is_rolling() {
if let Some((started, ref phrase)) = self.play_phrase().clone() {
let start = started.sample.get() as usize;
let quant = self.clock().quant.get();
let timebase = self.clock().timebase().clone();
let monitoring = self.monitoring();
let recording = self.recording();
for input in self.midi_ins_mut().iter() {
for (sample, event, bytes) in parse_midi_input(input.iter(scope)) {
if let LiveEvent::Midi { message, .. } = event {
if monitoring {
midi_buf[sample].push(bytes.to_vec())
}
if recording {
if let Some(phrase) = phrase {
let mut phrase = phrase.write().unwrap();
let length = phrase.length;
phrase.record_event({
let sample = (sample0 + sample - start) as f64;
let pulse = timebase.samples_to_pulse(sample);
let quantized = (pulse / quant).round() * quant;
quantized as usize % length
}, message);
}
}
update_keys(&mut notes_in.write().unwrap(), &message);
}
}
}
}
if let Some((start_at, _clip)) = &self.next_phrase() {
// TODO switch to next phrase and record into it
}
}
}
fn monitoring (&self) -> bool; fn monitoring (&self) -> bool;
fn monitoring_mut (&mut self) -> &mut bool; fn monitoring_mut (&mut self) -> &mut bool;
fn toggle_monitor (&mut self) { fn toggle_monitor (&mut self) {
@ -65,7 +25,52 @@ pub trait MidiRecordApi: HasClock + HasPlayPhrase + HasMidiIns {
} }
} }
} }
fn record (&mut self, scope: &ProcessScope, midi_buf: &mut Vec<Vec<Vec<u8>>>) {
if self.monitoring() {
self.monitor(scope, midi_buf);
}
if !self.clock().is_rolling() {
return
}
if let Some((started, ref clip)) = self.play_phrase().clone() {
self.record_clip(scope, started, clip, midi_buf);
}
if let Some((start_at, phrase)) = &self.next_phrase() {
self.record_next();
}
}
fn record_clip (
&mut self,
scope: &ProcessScope,
started: Moment,
phrase: &Option<Arc<RwLock<MidiClip>>>,
midi_buf: &mut Vec<Vec<Vec<u8>>>
) {
let sample0 = scope.last_frame_time() as usize;
let start = started.sample.get() as usize;
let recording = self.recording();
let timebase = self.clock().timebase().clone();
let quant = self.clock().quant.get();
if let Some(phrase) = phrase {
let mut phrase = phrase.write().unwrap();
let length = phrase.length;
for input in self.midi_ins_mut().iter() {
for (sample, event, bytes) in parse_midi_input(input.iter(scope)) {
if let LiveEvent::Midi { message, .. } = event {
phrase.record_event({
let sample = (sample0 + sample - start) as f64;
let pulse = timebase.samples_to_pulse(sample);
let quantized = (pulse / quant).round() * quant;
quantized as usize % length
}, message);
}
}
}
}
}
fn record_next (&mut self) {
// TODO switch to next clip and record into it
}
fn overdub (&self) -> bool; fn overdub (&self) -> bool;
fn overdub_mut (&mut self) -> &mut bool; fn overdub_mut (&mut self) -> &mut bool;
fn toggle_overdub (&mut self) { fn toggle_overdub (&mut self) {

View file

@ -9,6 +9,7 @@ pub struct Sample {
pub end: usize, pub end: usize,
pub channels: Vec<Vec<f32>>, pub channels: Vec<Vec<f32>>,
pub rate: Option<usize>, pub rate: Option<usize>,
pub gain: f32,
} }
/// Load sample from WAV and assign to MIDI note. /// Load sample from WAV and assign to MIDI note.
@ -24,7 +25,7 @@ pub struct Sample {
impl Sample { impl Sample {
pub fn new (name: &str, start: usize, end: usize, channels: Vec<Vec<f32>>) -> Self { pub fn new (name: &str, start: usize, end: usize, channels: Vec<Vec<f32>>) -> Self {
Self { name: name.to_string(), start, end, channels, rate: None } Self { name: name.to_string(), start, end, channels, rate: None, gain: 1.0 }
} }
pub fn play (sample: &Arc<RwLock<Self>>, after: usize, velocity: &u7) -> Voice { pub fn play (sample: &Arc<RwLock<Self>>, after: usize, velocity: &u7) -> Voice {
Voice { Voice {

View file

@ -21,8 +21,8 @@ impl Iterator for Voice {
let position = self.position; let position = self.position;
self.position += 1; self.position += 1;
return sample.channels[0].get(position).map(|_amplitude|[ return sample.channels[0].get(position).map(|_amplitude|[
sample.channels[0][position] * self.velocity, sample.channels[0][position] * self.velocity * sample.gain,
sample.channels[0][position] * self.velocity, sample.channels[0][position] * self.velocity * sample.gain,
]) ])
} }
None None