ChocolateBird/scripts/MusicRenderer.gd

504 lines
21 KiB
GDScript

#warning-ignore-all:shadowed_variable
extends Node
const music := preload('res://scripts/loaders/snes/music_ff5.gd')
const EventType := music.EventType
var MUSIC := music.new()
const NUM_TRACKS := 8 # TODO
const MAX_NOTE_EVENTS := 4096
class NoteEvent:
var p_event_start: int # In pulse space
var p_note_start: int # For tied notes, this will be earlier than p_event_start and is used for envelope calculations
var p_end: int
var instrument: int
var pitch: int = 0
var velocity: float = 0.0
var adsr_attack_rate: int
var adsr_decay_rate: int
var adsr_decay_total_periods: int
var adsr_sustain_rate: int
class TrackCurve: # built-in Curve class is too restrictive for this
var default: float
var entries: PoolVector3Array
var baked_integrals: PoolRealArray
func _init(default: float = 0.0):
self.default = default
self.entries = PoolVector3Array()
self.baked_integrals = PoolRealArray()
func add_point(pulse: int, value: float, ramp_to_next: bool = false) -> void:
var l := len(self.entries)
var entry := Vector3(float(pulse), value, float(ramp_to_next))
if l == 0 or self.entries[-1].x < pulse:
self.entries.append(entry)
else: # Find the first entry bigger than pulse, and insert before
for i in l:
if self.entries[i].x == pulse:
self.entries[i] = entry # Replace existing, this makes sense for the VOLUME -> VOLUME_SLIDE pattern
elif self.entries[i].x > pulse:
self.entries.insert(i, entry)
break
var last_pulse_block_get: int = -1 # Cache previous position for sequential lookups
func get_pulse(pulse: float) -> float:
var l := len(self.entries)
if l == 0 or pulse < self.entries[0].x:
return self.default
if pulse >= self.entries[-1].x:
return self.entries[-1].y
for i in l-2:
# Find first entry beyond
if pulse < self.entries[i+1].x:
if self.entries[i].z > 0: # ramp_to_next
return range_lerp(pulse, self.entries[i].x, self.entries[i+1].x, self.entries[i].y, self.entries[i+1].y)
else:
return self.entries[i].y
return self.default # Should be unreachable
func bake_integrals():
# Store the starting integrated value (i.e. time for the tempo curve) of each pulse value
self.baked_integrals.clear()
var last_pulse := 0.0
var last_value := self.default
var last_integral := 0.0
var last_ramp := false
for entry in self.entries:
var step_pulse = entry.x - last_pulse
var integral := last_integral
if last_ramp:
# Treat it as a rectangle where the height is the average of the slanted top.
integral += step_pulse * (last_value + entry.y)/2.0
else:
integral += step_pulse * last_value
self.baked_integrals.append(integral)
last_pulse = entry.x
last_value = entry.y
last_integral = integral
last_ramp = entry.z > 0
var last_integral_block_get: int = -1 # Cache previous position for sequential lookups
func get_integral(pulse: float) -> float:
# This is for tempo -> time. Need to bake it to have any hope of efficiency.
if self.baked_integrals.empty():
self.bake_integrals()
# Find first entry earlier than the pulse
for i in range(len(self.entries)-1, -1, -1):
var entry = self.entries[i]
if pulse > entry.x:
var integral = self.baked_integrals[i]
var step_pulse = pulse - entry.x
if entry.z: # Ramp to next
# Treat it as a rectangle where the height is the average of the slanted top.
integral += step_pulse * (entry.y + entries[i+1].y)/2.0 # If last entry somehow has ramp-to-next (it shouldn't), this will out-of-range error
else:
integral += step_pulse * entry.y
return integral
return 0.0
static func get_gcd(a: int, b: int) -> int: # Greatest Common Divisor of two numbers
# Euclidian reduction
var c
while b:
c = b
b = a % b
a = c
return a
static func get_lcm(a: int, b: int) -> int: # Least Common Multiple of two numbers
return (a * b) / get_gcd(a, b)
static func get_lcm_n(values: Array) -> int: # Least Common Multiple of an array of numbers
var lcm: int = values.pop_back()
for value in values:
lcm = get_lcm(lcm, value)
return lcm
const LOOP_OVERSHOOT := 768
static func render_channels(tracks: Array, inst_map: Array, _debug_name := 'none') -> Array: # [data: PoolByteArray, target_time_length: float in seconds]
# Since some channels contain global events (tempo and global volume for now),
# the strategy will be to preprocess each channel in a global-state-agnostic way,
# then once all the global tracks are known, as well as the longest unlooped length,
# do a second pass to generate the final events
# self.print_channel_events(inst_map)
var all_adsr_decay_total_periods: PoolIntArray = SoundLoader.all_adsr_decay_total_periods
var sample_default_adsrs = RomLoader.snes_data.sfx_adsrs + RomLoader.snes_data.bgm_instrument_adsrs # TODO: UNHARDCODE THIS
var all_note_events = []
var curve_master_volume := TrackCurve.new(1.0) # [0.0, 1.0] for now
var curve_master_tempo := TrackCurve.new(120.0) # bpm is too big, need pulses per second
var curve_channel_pans := []
for channel in NUM_TRACKS:
var curve_velocity := TrackCurve.new(1.0) # [0.0, 1.0] for now
var curve_pan := TrackCurve.new() # [-1.0, 1.0] for now
# Stored and unused for now
var curve_fine_tuning := TrackCurve.new() # [0.0, 1.0] for now
var curve_vibrato_on := TrackCurve.new() # [0.0, 1.0] for now
var curve_vibrato_delay := TrackCurve.new()
var curve_vibrato_rate := TrackCurve.new()
var curve_vibrato_depth := TrackCurve.new()
var curve_tremolo_on := TrackCurve.new() # [0.0, 1.0] for now
var curve_tremolo_delay := TrackCurve.new()
var curve_tremolo_rate := TrackCurve.new()
var curve_tremolo_depth := TrackCurve.new()
var curve_pan_lfo_on := TrackCurve.new() # [0.0, 1.0] for now
var curve_pan_lfo_rate := TrackCurve.new()
var curve_pan_lfo_depth := TrackCurve.new()
var curve_noise_on := TrackCurve.new() # [0.0, 1.0] for now
var curve_noise_freq := TrackCurve.new()
var curve_pitchmod_on := TrackCurve.new() # [0.0, 1.0] for now
var curve_echo_on := TrackCurve.new() # [0.0, 1.0] for now
var curve_echo_volume := TrackCurve.new()
var channel_note_events = []
var track: Array = tracks[channel]
var l := len(track)
var p := 0 # current pulse
if l == 0: # Empty channel, move on
all_note_events.append(channel_note_events)
curve_channel_pans.append(curve_pan)
continue
# var num_notes: int = 0
var current_instrument := 0
var current_octave := 5
var current_transpose := 0
# var current_velocity := 255
var current_adsr_attack_rate := 0
var current_adsr_decay_rate := 0
var current_adsr_decay_total_periods := 0
var current_adsr_sustain_rate := 0
# First, check if it ends in a GOTO, then store the program counter of the destination
var infinite_loop_target_program_counter = -1
var infinite_loop_target_pulse = -1
if track[-1][0] == EventType.GOTO:
infinite_loop_target_program_counter = track[-1][1]
var program_counter := 0
var last_note_pretransform_pitch := -2
var last_untied_note_p_start := 0
while true: #num_notes < MAX_NOTE_EVENTS:
if program_counter >= l:
break
if program_counter == infinite_loop_target_program_counter:
infinite_loop_target_pulse = p
var event = track[program_counter]
program_counter += 1
match event[0]: # Control codes
EventType.GOTO: # This is a preprocessed event list, so GOTO is a final infinite loop marker
var note_event = NoteEvent.new()
note_event.p_event_start = p
note_event.p_end = infinite_loop_target_pulse # Fake final note event using p_event_start > p_end to encode the infinite jump back loop.
# Note that event[1] points to an Event, not a NoteEvent, not a Pulse, so we looked it up earlier
channel_note_events.append(note_event)
break
EventType.MASTER_VOLUME:
curve_master_volume.add_point(p, event[1]/255.0, false)
EventType.TEMPO:
var new_tempo = music.tempo_to_seconds_per_pulse(event[1])
curve_master_tempo.add_point(p, new_tempo, false)
EventType.TEMPO_SLIDE:
var old_tempo = curve_master_tempo.get_pulse(p)
var new_tempo = music.tempo_to_seconds_per_pulse(event[2])
var slide_duration: int = event[1] # TODO: work out how this is scaled
curve_master_tempo.add_point(p, old_tempo, true)
curve_master_tempo.add_point(p + slide_duration, new_tempo, false)
EventType.NOTE:
var note = event[1]
var duration = event[2]
var note_event = NoteEvent.new()
note_event.p_event_start = p
note_event.p_note_start = p
note_event.p_end = p + duration
note_event.instrument = current_instrument
note_event.adsr_attack_rate = current_adsr_attack_rate
note_event.adsr_decay_rate = current_adsr_decay_rate
note_event.adsr_decay_total_periods = current_adsr_decay_total_periods
note_event.adsr_sustain_rate = current_adsr_sustain_rate
if note >= 0: # Don't shift or play rests
last_note_pretransform_pitch = note # Ties reuse this
last_untied_note_p_start = p
note += (12 * current_octave) + current_transpose
note_event.pitch = note # pitch_idx #* curve_fine_tuning
note_event.velocity = curve_velocity.get_pulse(p) # current_velocity
elif note == music.NOTE_IS_TIE:
if last_note_pretransform_pitch >= 0:
note = last_note_pretransform_pitch + (12 * current_octave) + current_transpose
note_event.p_note_start = last_untied_note_p_start
note_event.pitch = note # pitch_idx #* curve_fine_tuning
note_event.velocity = curve_velocity.get_pulse(p) # current_velocity
channel_note_events.append(note_event)
p += duration
EventType.VOLUME:
var new_velocity: float = event[1]/255.0
curve_velocity.add_point(p, new_velocity, false)
EventType.VOLUME_SLIDE: # TODO: implement slides
var old_velocity = curve_velocity.get_pulse(p)
var slide_duration: int = event[1]
var new_velocity: float = event[2]/255.0
curve_velocity.add_point(p, old_velocity, true)
curve_velocity.add_point(p + slide_duration, new_velocity, false)
EventType.PAN:
var new_pan = 1.0 - event[1]/127.5
curve_pan.add_point(p, new_pan, false)
EventType.PAN_SLIDE: # TODO: implement slides
var old_pan = curve_pan.get_pulse(p)
var new_pan = 1.0 - event[2]/127.5
var slide_duration: int = event[1] # TODO: work out how slides are scaled
curve_pan.add_point(p, old_pan, true)
curve_pan.add_point(p + slide_duration, new_pan, false)
EventType.PITCH_SLIDE: # TODO: implement slides
var slide_duration: int = event[1]
var target_pitch: int = event[2] # Signed
EventType.OCTAVE:
current_octave = event[1]
EventType.OCTAVE_UP:
current_octave += 1
EventType.OCTAVE_DOWN:
current_octave -= 1
EventType.TRANSPOSE_ABS:
current_transpose = event[1]
EventType.TRANSPOSE_REL:
current_transpose += event[1]
EventType.TUNING:
var fine_tune: int = event[1]
var scale: float
if fine_tune < 0x80:
scale = 1.0 + fine_tune/255.0
else:
scale = fine_tune/255.0
curve_fine_tuning.add_point(p, scale)
EventType.PROGCHANGE:
current_instrument = event[1]
if current_instrument >= 0x20:
current_instrument = inst_map[current_instrument-0x20] - 1 + SoundLoader.SFX_NUM
if current_instrument < len(sample_default_adsrs) and current_instrument > 0:
var adsr = sample_default_adsrs[current_instrument]
current_adsr_attack_rate = adsr.attack_rate
current_adsr_decay_rate = adsr.decay_rate
current_adsr_sustain_rate = adsr.sustain_rate
current_adsr_decay_total_periods = all_adsr_decay_total_periods[adsr.sustain_level]
EventType.ADSR_DEFAULT: # TODO - Investigate actual scaling and order
if current_instrument < len(sample_default_adsrs) and current_instrument > 0:
var adsr = sample_default_adsrs[current_instrument]
current_adsr_attack_rate = adsr.attack_rate
current_adsr_decay_rate = adsr.decay_rate
current_adsr_sustain_rate = adsr.sustain_rate
current_adsr_decay_total_periods = all_adsr_decay_total_periods[adsr.sustain_level]
EventType.ADSR_ATTACK_RATE:
current_adsr_attack_rate = event[1]
EventType.ADSR_DECAY_RATE:
current_adsr_decay_rate = event[1]
EventType.ADSR_SUSTAIN_LEVEL:
current_adsr_decay_total_periods = all_adsr_decay_total_periods[event[1]]
EventType.ADSR_SUSTAIN_RATE:
current_adsr_sustain_rate = event[1]
EventType.VIBRATO_ON:
curve_vibrato_delay.add_point(p, event[1])
curve_vibrato_rate.add_point(p, event[2])
curve_vibrato_depth.add_point(p, event[3])
curve_vibrato_on.add_point(p, 1)
EventType.VIBRATO_OFF:
curve_vibrato_on.add_point(p, 0)
EventType.TREMOLO_ON:
curve_tremolo_delay.add_point(p, event[1])
curve_tremolo_rate.add_point(p, event[2])
curve_tremolo_depth.add_point(p, event[3])
curve_tremolo_on.add_point(p, 1)
EventType.TREMOLO_OFF:
curve_tremolo_on.add_point(p, 0)
EventType.PAN_LFO_ON:
curve_pan_lfo_depth.add_point(p, event[1])
curve_pan_lfo_rate.add_point(p, event[2])
curve_pan_lfo_on.add_point(p, 1)
EventType.PAN_LFO_OFF:
curve_pan_lfo_on.add_point(p, 0)
EventType.NOISE_FREQ:
curve_noise_freq.add_point(p, event[1])
EventType.NOISE_ON:
curve_noise_on.add_point(p, 1)
EventType.NOISE_OFF:
curve_noise_on.add_point(p, 0)
EventType.PITCHMOD_ON:
curve_pitchmod_on.add_point(p, 1)
EventType.PITCHMOD_OFF:
curve_pitchmod_on.add_point(p, 0)
EventType.ECHO_ON:
curve_echo_on.add_point(p, 1)
EventType.ECHO_OFF:
curve_echo_on.add_point(p, 0)
EventType.ECHO_VOLUME:
curve_echo_volume.add_point(p, event[1])
EventType.ECHO_VOLUME_SLIDE:
var slide_duration: int = event[1] # TODO: work out how slides are scaled
var old_echo_volume = curve_echo_volume.get_pulse(p)
var new_echo_volume = event[2]
curve_echo_volume.add_point(p, old_echo_volume, true)
curve_echo_volume.add_point(p + slide_duration, new_echo_volume)
EventType.ECHO_FEEDBACK_FIR: # TODO
var feedback: int = event[1]
var filterIndex: int = event[2]
EventType.END:
break
_:
break
# End of track
if len(channel_note_events) > (MAX_NOTE_EVENTS-2):
print('%s channel %d has too many note events! %d is more than %d' % [_debug_name, channel, len(channel_note_events), MAX_NOTE_EVENTS-2])
all_note_events.append(channel_note_events)
curve_channel_pans.append(curve_pan)
# Integrate tempo so we can get a pulse->time mapping
curve_master_tempo.bake_integrals()
# Find the longest channel
var channel_loop_p_returns := PoolIntArray()
var channel_loop_p_lengths := PoolIntArray()
var channel_p_ends := PoolIntArray()
var longest_channel_idx = 0
var longest_channel_p_end = 0
var highest_channel_p_return = -1
for channel in NUM_TRACKS:
if all_note_events[channel].empty():
channel_loop_p_returns.append(-1)
channel_loop_p_lengths.append(0)
channel_p_ends.append(0)
continue
var note_event: NoteEvent = all_note_events[channel][-1]
var p_end = note_event.p_end
if p_end < note_event.p_event_start:
# Ends on infinite loop
channel_loop_p_returns.append(p_end)
channel_loop_p_lengths.append(note_event.p_event_start - p_end)
if p_end > highest_channel_p_return:
highest_channel_p_return = p_end
p_end = note_event.p_event_start
else:
channel_loop_p_returns.append(-1)
channel_loop_p_lengths.append(0)
channel_p_ends.append(p_end)
if p_end > longest_channel_p_end:
longest_channel_p_end = p_end
longest_channel_idx = channel
var target_pulse_length = longest_channel_p_end + LOOP_OVERSHOOT
var target_time_length = curve_master_tempo.get_integral(target_pulse_length)
# # DEBUG: calculate LCM of the loop lengths to determine required length for a true loop point
# var unique_loop_lengths := []
# # TODO: find common loop pulses
# for loop_length in channel_loop_p_lengths:
# if loop_length > 0 and not (loop_length in unique_loop_lengths):
# unique_loop_lengths.append(loop_length)
# if unique_loop_lengths:
# var loop_length_lcm = get_lcm_n(unique_loop_lengths)
# var p_overall_loop_start = highest_channel_p_return #+ LOOP_OVERSHOOT
# var p_overall_loop_end = p_overall_loop_start + loop_length_lcm
# print('%s has lcm loop length %d pulses from loop lengths %s, from %d to %d.' % [_debug_name, loop_length_lcm, channel_loop_p_lengths, p_overall_loop_start, p_overall_loop_end])
# else:
# print('%s does not loop' % _debug_name)
# Second pass - encode the notes with the now-known global tempo and volume curves
var data := PoolByteArray()
for channel in NUM_TRACKS:
var events = all_note_events[channel]
var loop_return_note_event_idx = -1
var loop_return_p = channel_loop_p_returns[channel]
var curve_pan: TrackCurve = curve_channel_pans[channel]
var midi_events_bytes_t_event_start := StreamPeerBuffer.new()
var midi_events_bytes_t_note_start := StreamPeerBuffer.new()
# var midi_events_bytes_t_end := StreamPeerBuffer.new()
var midi_events_bytes2 := StreamPeerBuffer.new()
var midi_events_bytes_adsr := StreamPeerBuffer.new()
var midi_events_bytes_adsr2 := StreamPeerBuffer.new()
var num_notes: int = 0
var event_ptr := 0
var l_events := len(events)
var loop_p_offset := 0
for i in MAX_NOTE_EVENTS:
if event_ptr >= l_events:
break
if (loop_return_p >= 0) and event_ptr == l_events-1:
event_ptr = loop_return_note_event_idx
loop_p_offset += channel_loop_p_lengths[channel]
var event: NoteEvent = events[event_ptr]
var p = event.p_event_start
if loop_return_note_event_idx < 0 and p >= loop_return_p:
loop_return_note_event_idx = event_ptr
midi_events_bytes_t_event_start.put_32(int(curve_master_tempo.get_integral(p + loop_p_offset) * 32000))
midi_events_bytes_t_note_start.put_32(int(curve_master_tempo.get_integral(event.p_note_start + loop_p_offset) * 32000))
# midi_events_bytes_t_end.put_32(int(curve_master_tempo.get_integral(event.p_end + loop_p_offset) * 32000)) # t_end
midi_events_bytes2.put_u8(event.instrument)
midi_events_bytes2.put_u8(event.pitch)
midi_events_bytes2.put_u8(int(event.velocity * curve_master_volume.get_pulse(p) * 255.0)) # velocity
midi_events_bytes2.put_u8(int((curve_pan.get_pulse(p)+1.0) * 127.5)) # pan
midi_events_bytes_adsr.put_u8(event.adsr_attack_rate)
midi_events_bytes_adsr.put_u8(event.adsr_decay_rate)
midi_events_bytes_adsr.put_u8(event.adsr_decay_total_periods)
midi_events_bytes_adsr.put_u8(event.adsr_sustain_rate)
midi_events_bytes_adsr2.put_32(0)
event_ptr += 1
num_notes += 1
# Fill up end of notes array with dummies
var last_note_end: int = 0
if events:
last_note_end = int(curve_master_tempo.get_integral(events[-1].p_end + loop_p_offset) * 32000)
for i in range(num_notes, MAX_NOTE_EVENTS):
midi_events_bytes_t_event_start.put_32(last_note_end)
midi_events_bytes_t_note_start.put_32(last_note_end)
# midi_events_bytes_t_end.put_32(0x0FFFFFFF)
midi_events_bytes2.put_32(0)
midi_events_bytes_adsr.put_32(0)
midi_events_bytes_adsr2.put_32(0)
# data += midi_events_bytes_t_event_start.data_array + midi_events_bytes_t_end.data_array + midi_events_bytes2.data_array + midi_events_bytes_adsr.data_array + midi_events_bytes_t_note_start.data_array
data += midi_events_bytes_t_event_start.data_array + midi_events_bytes_t_note_start.data_array + midi_events_bytes2.data_array + midi_events_bytes_adsr.data_array + midi_events_bytes_adsr2.data_array
var t_loop_endpoints := Vector2(-1, -1)
if highest_channel_p_return >= 0:
t_loop_endpoints = Vector2(curve_master_tempo.get_integral(highest_channel_p_return + 100), curve_master_tempo.get_integral(longest_channel_p_end + 100))
return [data, target_time_length, t_loop_endpoints]
static func disassemble_channel_events(channel_events: Array, inst_map: Array) -> PoolStringArray:
var output := PoolStringArray()
var p := 0 # current pulse
for event in channel_events:
var print_str := 'p=%6d : %s '%[p, EventType.keys()[event[0]]]
var print_str2 := str(event.slice(1, -1))
match event[0]:
EventType.NOTE:
var note = event[1]
var duration = event[2]
match note:
music.NOTE_IS_REST:
output.append('p=%6d : NOTE_REST %d pulses'%[p, duration])
music.NOTE_IS_TIE:
output.append('p=%6d : NOTE_TIE %d pulses'%[p, duration])
_:
output.append(print_str + print_str2)
p += duration
EventType.PROGCHANGE:
var event_idx = event[1]
if event_idx >= 0x20:
output.append(print_str + '($%02x) = instrument %02d'%[event_idx, inst_map[event_idx-0x20] - 1])
else:
output.append(print_str + 'sfx %d'%event_idx)
_:
output.append(print_str + print_str2)
return output
static func disassemble_bgm(tracks: Array, inst_map: Array) -> PoolStringArray:
var output := PoolStringArray()
var channel := 0
for channel_events in tracks:
output.append('================Channel %d================'%channel)
channel += 1
output.append_array(disassemble_channel_events(channel_events, inst_map))
return output