|
|
|
|
|
class TampiaAI { |
|
|
constructor() { |
|
|
this.vocalFile = null; |
|
|
this.instrumentalFile = null; |
|
|
this.audioContext = null; |
|
|
this.initializeEventListeners(); |
|
|
} |
|
|
|
|
|
initializeEventListeners() { |
|
|
|
|
|
document.getElementById('vocal-file').addEventListener('change', (e) => { |
|
|
this.handleFileUpload(e, 'vocal'); |
|
|
}); |
|
|
|
|
|
document.getElementById('instrumental-file').addEventListener('change', (e) => { |
|
|
this.handleFileUpload(e, 'instrumental'); |
|
|
}); |
|
|
|
|
|
|
|
|
document.getElementById('analyze-btn').addEventListener('click', () => { |
|
|
this.analyzeAudio(); |
|
|
}); |
|
|
} |
|
|
|
|
|
handleFileUpload(event, type) { |
|
|
const file = event.target.files[0]; |
|
|
if (!file) return; |
|
|
|
|
|
if (type === 'vocal') { |
|
|
this.vocalFile = file; |
|
|
document.getElementById('vocal-name').textContent = file.name; |
|
|
} else { |
|
|
this.instrumentalFile = file; |
|
|
document.getElementById('instrumental-name').textContent = file.name; |
|
|
} |
|
|
|
|
|
this.updateAnalyzeButton(); |
|
|
} |
|
|
|
|
|
updateAnalyzeButton() { |
|
|
const button = document.getElementById('analyze-btn'); |
|
|
button.disabled = !(this.vocalFile && this.instrumentalFile); |
|
|
} |
|
|
|
|
|
async analyzeAudio() { |
|
|
this.showLoading(true); |
|
|
this.hideResults(); |
|
|
|
|
|
try { |
|
|
|
|
|
this.audioContext = new (window.AudioContext || window.webkitAudioContext)(); |
|
|
|
|
|
|
|
|
const vocalBuffer = await this.loadAudioFile(this.vocalFile); |
|
|
const instrumentalBuffer = await this.loadAudioFile(this.instrumentalFile); |
|
|
|
|
|
|
|
|
const scale = document.getElementById('scale-select').value; |
|
|
const tempo = parseInt(document.getElementById('tempo-input').value); |
|
|
const timeSignature = document.getElementById('time-signature').value; |
|
|
|
|
|
|
|
|
const analysis = await this.performAnalysis(vocalBuffer, instrumentalBuffer, { |
|
|
scale, |
|
|
tempo, |
|
|
timeSignature |
|
|
}); |
|
|
|
|
|
|
|
|
this.displayResults(analysis); |
|
|
|
|
|
} catch (error) { |
|
|
console.error('Erreur d\'analyse:', error); |
|
|
alert('Une erreur est survenue lors de l\'analyse. Veuillez vérifier vos fichiers audio.'); |
|
|
} finally { |
|
|
this.showLoading(false); |
|
|
} |
|
|
} |
|
|
|
|
|
async loadAudioFile(file) { |
|
|
return new Promise((resolve, reject) => { |
|
|
const reader = new FileReader(); |
|
|
reader.onload = async (e) => { |
|
|
try { |
|
|
const arrayBuffer = e.target.result; |
|
|
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer); |
|
|
resolve(audioBuffer); |
|
|
} catch (error) { |
|
|
reject(error); |
|
|
} |
|
|
}; |
|
|
reader.readAsArrayBuffer(file); |
|
|
}); |
|
|
} |
|
|
|
|
|
async performAnalysis(vocalBuffer, instrumentalBuffer, params) { |
|
|
const duration = Math.min(vocalBuffer.duration, instrumentalBuffer.duration); |
|
|
const sampleRate = vocalBuffer.sampleRate; |
|
|
|
|
|
const analysis = { |
|
|
duration: duration, |
|
|
rhythmMarkers: [], |
|
|
pitchMarkers: [], |
|
|
advice: [] |
|
|
}; |
|
|
|
|
|
|
|
|
const windowSize = 4096; |
|
|
const hopSize = 2048; |
|
|
const maxAnalysisTime = 120; |
|
|
const analysisLength = Math.min(duration, maxAnalysisTime); |
|
|
const maxSamples = Math.floor((analysisLength * sampleRate - windowSize) / hopSize); |
|
|
const numWindows = Math.min(maxSamples, 500); |
|
|
|
|
|
|
|
|
const vocalData = this.downsampleAudio(vocalBuffer.getChannelData(0), sampleRate, 16000); |
|
|
const instrumentalData = this.downsampleAudio(instrumentalBuffer.getChannelData(0), sampleRate, 16000); |
|
|
const newSampleRate = 16000; |
|
|
|
|
|
|
|
|
const scaleFrequencies = this.getScaleFrequencies(params.scale); |
|
|
const targetTempo = params.tempo; |
|
|
|
|
|
|
|
|
const batchSize = 50; |
|
|
for (let batch = 0; batch < Math.ceil(numWindows / batchSize); batch++) { |
|
|
const batchStart = batch * batchSize; |
|
|
const batchEnd = Math.min(batchStart + batchSize, numWindows); |
|
|
|
|
|
|
|
|
for (let i = batchStart; i < batchEnd; i++) { |
|
|
const actualHopSize = Math.floor(hopSize * 16000 / sampleRate); |
|
|
const startSample = i * actualHopSize; |
|
|
const timePosition = (startSample / newSampleRate) * (sampleRate / 16000); |
|
|
const timePercent = (timePosition / duration) * 100; |
|
|
|
|
|
|
|
|
if (startSample + windowSize > vocalData.length) break; |
|
|
|
|
|
|
|
|
const vocalWindow = vocalData.slice(startSample, startSample + windowSize); |
|
|
const instrumentalWindow = instrumentalData.slice(startSample, startSample + windowSize); |
|
|
|
|
|
|
|
|
let vocalPitch = null; |
|
|
let pitchAccuracy = true; |
|
|
if (i % 8 === 0) { |
|
|
vocalPitch = this.detectPitchFast(vocalWindow, newSampleRate); |
|
|
if (vocalPitch) { |
|
|
const expectedPitch = this.getExpectedPitch(timePosition, scaleFrequencies); |
|
|
if (expectedPitch) { |
|
|
const pitchDifference = Math.abs(vocalPitch - expectedPitch); |
|
|
const tolerance = expectedPitch * 0.05; |
|
|
pitchAccuracy = pitchDifference < tolerance; |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const vocalEnergy = this.calculateRMSEnergy(vocalWindow); |
|
|
const instrumentalEnergy = this.calculateRMSEnergy(instrumentalWindow); |
|
|
|
|
|
const expectedBeatTime = this.getExpectedBeatTime(timePosition, targetTempo, params.timeSignature); |
|
|
const rhythmAccuracy = this.compareRhythmSimple(vocalEnergy, instrumentalEnergy, expectedBeatTime, timePosition); |
|
|
|
|
|
|
|
|
if (i % 16 === 0) { |
|
|
if (vocalPitch) { |
|
|
analysis.pitchMarkers.push({ |
|
|
time: timePosition, |
|
|
position: timePercent, |
|
|
isAccurate: pitchAccuracy, |
|
|
frequency: vocalPitch, |
|
|
expectedFrequency: this.getExpectedPitch(timePosition, scaleFrequencies) |
|
|
}); |
|
|
} |
|
|
|
|
|
analysis.rhythmMarkers.push({ |
|
|
time: timePosition, |
|
|
position: timePercent, |
|
|
isAccurate: rhythmAccuracy, |
|
|
isStrongBeat: expectedBeatTime.isStrongBeat, |
|
|
energy: vocalEnergy |
|
|
}); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
if (batch % 5 === 0) { |
|
|
await new Promise(resolve => setTimeout(resolve, 1)); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
analysis.advice = this.generateAdvice(analysis, params); |
|
|
|
|
|
return analysis; |
|
|
} |
|
|
|
|
|
|
|
|
downsampleAudio(audioData, originalSampleRate, targetSampleRate) { |
|
|
if (originalSampleRate === targetSampleRate) return audioData; |
|
|
|
|
|
const ratio = originalSampleRate / targetSampleRate; |
|
|
const length = Math.floor(audioData.length / ratio); |
|
|
const result = new Float32Array(length); |
|
|
|
|
|
for (let i = 0; i < length; i++) { |
|
|
result[i] = audioData[Math.floor(i * ratio)]; |
|
|
} |
|
|
|
|
|
return result; |
|
|
} |
|
|
|
|
|
|
|
|
calculateRMSEnergy(audioData) { |
|
|
let sum = 0; |
|
|
for (let i = 0; i < audioData.length; i++) { |
|
|
sum += audioData[i] * audioData[i]; |
|
|
} |
|
|
return Math.sqrt(sum / audioData.length); |
|
|
} |
|
|
|
|
|
|
|
|
detectPitchFast(audioData, sampleRate) { |
|
|
const minFreq = 80; |
|
|
const maxFreq = 800; |
|
|
const minPeriod = Math.floor(sampleRate / maxFreq); |
|
|
const maxPeriod = Math.floor(sampleRate / minFreq); |
|
|
|
|
|
let bestCorrelation = 0; |
|
|
let bestPeriod = 0; |
|
|
|
|
|
|
|
|
for (let period = minPeriod; period < maxPeriod; period += 2) { |
|
|
let correlation = 0; |
|
|
const maxSamples = Math.min(512, audioData.length - period); |
|
|
|
|
|
for (let i = 0; i < maxSamples; i++) { |
|
|
correlation += audioData[i] * audioData[i + period]; |
|
|
} |
|
|
|
|
|
if (correlation > bestCorrelation) { |
|
|
bestCorrelation = correlation; |
|
|
bestPeriod = period; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const energy = this.calculateRMSEnergy(audioData); |
|
|
if (energy < 0.01 || bestCorrelation < 0.3) { |
|
|
return null; |
|
|
} |
|
|
|
|
|
return sampleRate / bestPeriod; |
|
|
} |
|
|
|
|
|
|
|
|
compareRhythmSimple(vocalEnergy, instrumentalEnergy, expectedBeat, timePosition) { |
|
|
const energyThreshold = 0.02; |
|
|
const tolerance = 0.15; |
|
|
|
|
|
|
|
|
const hasVocalEnergy = vocalEnergy > energyThreshold; |
|
|
const hasInstrumentalEnergy = instrumentalEnergy > energyThreshold; |
|
|
|
|
|
if (hasVocalEnergy && hasInstrumentalEnergy) { |
|
|
const timeDifference = Math.abs(timePosition - expectedBeat.nextBeatTime); |
|
|
return timeDifference < tolerance; |
|
|
} |
|
|
|
|
|
return true; |
|
|
} |
|
|
|
|
|
|
|
|
detectPitch(audioData, sampleRate) { |
|
|
|
|
|
const filteredData = this.lowPassFilter(audioData, sampleRate, 1000); |
|
|
|
|
|
const minFreq = 80; |
|
|
const maxFreq = 1000; |
|
|
const minPeriod = Math.floor(sampleRate / maxFreq); |
|
|
const maxPeriod = Math.floor(sampleRate / minFreq); |
|
|
|
|
|
let bestCorrelation = 0; |
|
|
let bestPeriod = 0; |
|
|
let secondBestCorrelation = 0; |
|
|
|
|
|
|
|
|
for (let period = minPeriod; period < maxPeriod; period++) { |
|
|
let correlation = 0; |
|
|
let norm1 = 0; |
|
|
let norm2 = 0; |
|
|
|
|
|
for (let i = 0; i < filteredData.length - period; i++) { |
|
|
correlation += filteredData[i] * filteredData[i + period]; |
|
|
norm1 += filteredData[i] * filteredData[i]; |
|
|
norm2 += filteredData[i + period] * filteredData[i + period]; |
|
|
} |
|
|
|
|
|
|
|
|
const normalizedCorrelation = correlation / Math.sqrt(norm1 * norm2); |
|
|
|
|
|
if (normalizedCorrelation > bestCorrelation) { |
|
|
secondBestCorrelation = bestCorrelation; |
|
|
bestCorrelation = normalizedCorrelation; |
|
|
bestPeriod = period; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
const energy = filteredData.reduce((sum, sample) => sum + sample * sample, 0) / filteredData.length; |
|
|
const clarityThreshold = 0.4; |
|
|
const energyThreshold = 0.001; |
|
|
|
|
|
|
|
|
const clarity = bestCorrelation - secondBestCorrelation; |
|
|
|
|
|
if (energy < energyThreshold || bestCorrelation < clarityThreshold || clarity < 0.1) { |
|
|
return null; |
|
|
} |
|
|
|
|
|
|
|
|
const refinedPeriod = this.parabolicInterpolation(filteredData, bestPeriod, sampleRate); |
|
|
|
|
|
return sampleRate / refinedPeriod; |
|
|
} |
|
|
|
|
|
|
|
|
lowPassFilter(data, sampleRate, cutoffFreq) { |
|
|
const RC = 1.0 / (cutoffFreq * 2 * Math.PI); |
|
|
const dt = 1.0 / sampleRate; |
|
|
const alpha = dt / (RC + dt); |
|
|
|
|
|
const filtered = new Float32Array(data.length); |
|
|
filtered[0] = data[0]; |
|
|
|
|
|
for (let i = 1; i < data.length; i++) { |
|
|
filtered[i] = alpha * data[i] + (1 - alpha) * filtered[i - 1]; |
|
|
} |
|
|
|
|
|
return filtered; |
|
|
} |
|
|
|
|
|
|
|
|
parabolicInterpolation(data, period, sampleRate) { |
|
|
if (period <= 1 || period >= data.length - 1) return period; |
|
|
|
|
|
|
|
|
const correlations = []; |
|
|
for (let p = period - 1; p <= period + 1; p++) { |
|
|
let corr = 0; |
|
|
for (let i = 0; i < data.length - p; i++) { |
|
|
corr += data[i] * data[i + p]; |
|
|
} |
|
|
correlations.push(corr); |
|
|
} |
|
|
|
|
|
|
|
|
const y1 = correlations[0]; |
|
|
const y2 = correlations[1]; |
|
|
const y3 = correlations[2]; |
|
|
|
|
|
const a = (y1 - 2*y2 + y3) / 2; |
|
|
const b = (y3 - y1) / 2; |
|
|
|
|
|
if (a === 0) return period; |
|
|
|
|
|
const offset = -b / (2 * a); |
|
|
return period + offset; |
|
|
} |
|
|
|
|
|
|
|
|
detectOnset(audioData, previousEnergy = 0) { |
|
|
|
|
|
const rmsEnergy = Math.sqrt(audioData.reduce((sum, sample) => sum + sample * sample, 0) / audioData.length); |
|
|
|
|
|
|
|
|
const spectralFlux = Math.max(0, rmsEnergy - previousEnergy); |
|
|
|
|
|
|
|
|
let maxAmplitude = 0; |
|
|
let minAmplitude = 0; |
|
|
for (let i = 0; i < audioData.length; i++) { |
|
|
maxAmplitude = Math.max(maxAmplitude, Math.abs(audioData[i])); |
|
|
minAmplitude = Math.min(minAmplitude, Math.abs(audioData[i])); |
|
|
} |
|
|
const dynamicRange = maxAmplitude - minAmplitude; |
|
|
|
|
|
|
|
|
const energyThreshold = 0.005; |
|
|
const fluxThreshold = 0.001; |
|
|
const dynamicThreshold = 0.01; |
|
|
|
|
|
|
|
|
const hasOnset = (rmsEnergy > energyThreshold) && |
|
|
(spectralFlux > fluxThreshold) && |
|
|
(dynamicRange > dynamicThreshold); |
|
|
|
|
|
return { |
|
|
hasOnset: hasOnset, |
|
|
energy: rmsEnergy, |
|
|
flux: spectralFlux, |
|
|
dynamic: dynamicRange |
|
|
}; |
|
|
} |
|
|
|
|
|
|
|
|
getScaleFrequencies(scale) { |
|
|
const baseFrequencies = { |
|
|
|
|
|
'C': [261.63, 293.66, 329.63, 349.23, 392.00, 440.00, 493.88], |
|
|
'Db': [277.18, 311.13, 349.23, 369.99, 415.30, 466.16, 523.25], |
|
|
'D': [293.66, 329.63, 369.99, 392.00, 440.00, 493.88, 554.37], |
|
|
'Eb': [311.13, 349.23, 392.00, 415.30, 466.16, 523.25, 587.33], |
|
|
'E': [329.63, 369.99, 415.30, 440.00, 493.88, 554.37, 622.25], |
|
|
'F': [349.23, 392.00, 440.00, 466.16, 523.25, 587.33, 659.25], |
|
|
'Gb': [369.99, 415.30, 466.16, 493.88, 554.37, 622.25, 698.46], |
|
|
'G': [392.00, 440.00, 493.88, 523.25, 587.33, 659.25, 739.99], |
|
|
'Ab': [415.30, 466.16, 523.25, 554.37, 622.25, 698.46, 783.99], |
|
|
'A': [440.00, 493.88, 554.37, 587.33, 659.25, 739.99, 830.61], |
|
|
'Bb': [466.16, 523.25, 587.33, 622.25, 698.46, 783.99, 880.00], |
|
|
'B': [493.88, 554.37, 622.25, 659.25, 739.99, 830.61, 932.33], |
|
|
|
|
|
|
|
|
'Cm': [261.63, 293.66, 311.13, 349.23, 392.00, 415.30, 466.16], |
|
|
'Dbm': [277.18, 311.13, 329.63, 369.99, 415.30, 440.00, 493.88], |
|
|
'Dm': [293.66, 329.63, 349.23, 392.00, 440.00, 466.16, 523.25], |
|
|
'Ebm': [311.13, 349.23, 369.99, 415.30, 466.16, 493.88, 554.37], |
|
|
'Em': [329.63, 369.99, 392.00, 440.00, 493.88, 523.25, 587.33], |
|
|
'Fm': [349.23, 392.00, 415.30, 466.16, 523.25, 554.37, 622.25], |
|
|
'Gbm': [369.99, 415.30, 440.00, 493.88, 554.37, 587.33, 659.25], |
|
|
'Gm': [392.00, 440.00, 466.16, 523.25, 587.33, 622.25, 698.46], |
|
|
'Abm': [415.30, 466.16, 493.88, 554.37, 622.25, 659.25, 739.99], |
|
|
'Am': [440.00, 493.88, 523.25, 587.33, 659.25, 698.46, 783.99], |
|
|
'Bbm': [466.16, 523.25, 554.37, 622.25, 698.46, 739.99, 830.61], |
|
|
'Bm': [493.88, 554.37, 587.33, 659.25, 739.99, 783.99, 880.00], |
|
|
|
|
|
|
|
|
'C_dorian': [261.63, 293.66, 311.13, 349.23, 392.00, 440.00, 466.16], |
|
|
'C_phrygian': [261.63, 277.18, 311.13, 349.23, 392.00, 415.30, 466.16], |
|
|
'C_lydian': [261.63, 293.66, 329.63, 369.99, 392.00, 440.00, 493.88], |
|
|
'C_mixolydian': [261.63, 293.66, 329.63, 349.23, 392.00, 440.00, 466.16], |
|
|
'C_pentatonic': [261.63, 293.66, 329.63, 392.00, 440.00], |
|
|
'C_blues': [261.63, 311.13, 349.23, 369.99, 392.00, 466.16] |
|
|
}; |
|
|
return baseFrequencies[scale] || baseFrequencies['C']; |
|
|
} |
|
|
|
|
|
|
|
|
getExpectedPitch(timePosition, scaleFrequencies) { |
|
|
|
|
|
|
|
|
const noteIndex = Math.floor(timePosition * 2) % scaleFrequencies.length; |
|
|
return scaleFrequencies[noteIndex]; |
|
|
} |
|
|
|
|
|
|
|
|
getExpectedBeatTime(timePosition, tempo, timeSignature) { |
|
|
const [numerator, denominator] = timeSignature.split('/').map(Number); |
|
|
|
|
|
|
|
|
const beatDuration = 60 / tempo; |
|
|
const noteDuration = beatDuration * (4 / denominator); |
|
|
const measureDuration = noteDuration * numerator; |
|
|
|
|
|
|
|
|
const positionInMeasure = timePosition % measureDuration; |
|
|
const beatInMeasure = positionInMeasure / noteDuration; |
|
|
|
|
|
return { |
|
|
measureDuration: measureDuration, |
|
|
beatInMeasure: beatInMeasure, |
|
|
isStrongBeat: this.isStrongBeat(beatInMeasure, timeSignature), |
|
|
nextBeatTime: Math.ceil(beatInMeasure) * noteDuration + Math.floor(timePosition / measureDuration) * measureDuration |
|
|
}; |
|
|
} |
|
|
|
|
|
|
|
|
isStrongBeat(beatPosition, timeSignature) { |
|
|
const [numerator, denominator] = timeSignature.split('/').map(Number); |
|
|
const beatIndex = Math.floor(beatPosition); |
|
|
|
|
|
switch (timeSignature) { |
|
|
case '4/4': |
|
|
return beatIndex === 0 || beatIndex === 2; |
|
|
case '3/4': |
|
|
return beatIndex === 0; |
|
|
case '2/4': |
|
|
return beatIndex === 0; |
|
|
case '6/8': |
|
|
return beatIndex === 0 || beatIndex === 3; |
|
|
case '9/8': |
|
|
return beatIndex === 0 || beatIndex === 3 || beatIndex === 6; |
|
|
case '12/8': |
|
|
return beatIndex === 0 || beatIndex === 3 || beatIndex === 6 || beatIndex === 9; |
|
|
case '5/4': |
|
|
return beatIndex === 0 || beatIndex === 3; |
|
|
case '7/4': |
|
|
return beatIndex === 0 || beatIndex === 4; |
|
|
case '7/8': |
|
|
return beatIndex === 0 || beatIndex === 3; |
|
|
case '5/8': |
|
|
return beatIndex === 0 || beatIndex === 3; |
|
|
default: |
|
|
return beatIndex === 0; |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
compareRhythm(vocalOnset, instrumentalOnset, expectedBeat, timePosition) { |
|
|
const tolerance = 0.1; |
|
|
|
|
|
|
|
|
if (vocalOnset.hasOnset && instrumentalOnset.hasOnset) { |
|
|
const timeDifference = Math.abs(timePosition - expectedBeat.nextBeatTime); |
|
|
return timeDifference < tolerance; |
|
|
} |
|
|
|
|
|
|
|
|
if (instrumentalOnset.hasOnset && !vocalOnset.hasOnset && expectedBeat.isStrongBeat) { |
|
|
return false; |
|
|
} |
|
|
|
|
|
|
|
|
if (vocalOnset.hasOnset && !instrumentalOnset.hasOnset) { |
|
|
|
|
|
const timeDifference = Math.abs(timePosition - expectedBeat.nextBeatTime); |
|
|
return timeDifference < tolerance * 2; |
|
|
} |
|
|
|
|
|
|
|
|
return true; |
|
|
} |
|
|
|
|
|
generateAdvice(analysis, params) { |
|
|
const advice = []; |
|
|
|
|
|
|
|
|
const rhythmIssues = analysis.rhythmMarkers.filter(m => !m.isAccurate).length; |
|
|
const pitchIssues = analysis.pitchMarkers.filter(m => !m.isAccurate).length; |
|
|
|
|
|
const totalRhythm = analysis.rhythmMarkers.length; |
|
|
const totalPitch = analysis.pitchMarkers.length; |
|
|
|
|
|
|
|
|
if (rhythmIssues > totalRhythm * 0.5) { |
|
|
advice.push({ |
|
|
type: 'rhythm', |
|
|
severity: 'high', |
|
|
message: 'Beaucoup de décalages rythmiques détectés. Essayez de travailler avec un métronome à un tempo plus lent d\'abord.' |
|
|
}); |
|
|
} else if (rhythmIssues > totalRhythm * 0.3) { |
|
|
advice.push({ |
|
|
type: 'rhythm', |
|
|
severity: 'medium', |
|
|
message: 'Quelques décalages rythmiques. Concentrez-vous sur les temps forts de la mesure.' |
|
|
}); |
|
|
} else { |
|
|
advice.push({ |
|
|
type: 'rhythm', |
|
|
severity: 'low', |
|
|
message: 'Bon maintien du rythme ! Continuez ainsi.' |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
if (pitchIssues > totalPitch * 0.4) { |
|
|
advice.push({ |
|
|
type: 'pitch', |
|
|
severity: 'high', |
|
|
message: 'Travaillez la justesse en chantant avec un piano ou une application d\'accordage.' |
|
|
}); |
|
|
} else if (pitchIssues > totalPitch * 0.2) { |
|
|
advice.push({ |
|
|
type: 'pitch', |
|
|
severity: 'medium', |
|
|
message: 'Quelques notes à ajuster. Écoutez bien l\'accompagnement instrumental.' |
|
|
}); |
|
|
} else { |
|
|
advice.push({ |
|
|
type: 'pitch', |
|
|
severity: 'low', |
|
|
message: 'Excellente justesse ! Votre oreille musicale est bien développée.' |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
if (params.tempo > 140) { |
|
|
advice.push({ |
|
|
type: 'tempo', |
|
|
severity: 'info', |
|
|
message: 'Tempo rapide détecté. Pour les tempos élevés, concentrez-vous sur la régularité plutôt que la perfection.' |
|
|
}); |
|
|
} |
|
|
|
|
|
return advice; |
|
|
} |
|
|
|
|
|
displayResults(analysis) { |
|
|
const resultsDiv = document.getElementById('results'); |
|
|
const durationDiv = document.getElementById('duration-info'); |
|
|
const timelineDiv = document.getElementById('timeline'); |
|
|
const adviceDiv = document.getElementById('advice'); |
|
|
|
|
|
|
|
|
const minutes = Math.floor(analysis.duration / 60); |
|
|
const seconds = Math.floor(analysis.duration % 60); |
|
|
durationDiv.innerHTML = ` |
|
|
<strong>Durée analysée :</strong> ${minutes}:${seconds.toString().padStart(2, '0')} |
|
|
`; |
|
|
|
|
|
|
|
|
timelineDiv.innerHTML = ` |
|
|
<div class="timeline-bar" id="timeline-bar"> |
|
|
<div style="position: absolute; left: 0; top: -25px; font-size: 0.8em; color: #d63384;">0:00</div> |
|
|
<div style="position: absolute; right: 0; top: -25px; font-size: 0.8em; color: #d63384;">${minutes}:${seconds.toString().padStart(2, '0')}</div> |
|
|
</div> |
|
|
`; |
|
|
|
|
|
const timelineBar = document.getElementById('timeline-bar'); |
|
|
|
|
|
|
|
|
const fragment = document.createDocumentFragment(); |
|
|
|
|
|
|
|
|
const maxRhythmMarkers = 100; |
|
|
const rhythmStep = Math.max(1, Math.floor(analysis.rhythmMarkers.length / maxRhythmMarkers)); |
|
|
|
|
|
analysis.rhythmMarkers.forEach((marker, index) => { |
|
|
if (index % rhythmStep === 0 && (marker.energy > 0.01 || marker.isStrongBeat)) { |
|
|
const markerEl = document.createElement('div'); |
|
|
markerEl.className = `marker ${marker.isAccurate ? 'rhythm-good' : 'rhythm-bad'}`; |
|
|
markerEl.style.left = `${marker.position}%`; |
|
|
markerEl.innerHTML = marker.isAccurate ? '🟢' : '🔴'; |
|
|
markerEl.style.fontSize = marker.isStrongBeat ? '1.2em' : '0.8em'; |
|
|
|
|
|
const timeStr = this.formatTime(marker.time); |
|
|
markerEl.title = `${timeStr}: Rythme ${marker.isAccurate ? 'CALÉ' : 'DÉCALÉ'}${marker.isStrongBeat ? ' (temps fort)' : ''}`; |
|
|
fragment.appendChild(markerEl); |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
const maxPitchMarkers = 50; |
|
|
const pitchStep = Math.max(1, Math.floor(analysis.pitchMarkers.length / maxPitchMarkers)); |
|
|
|
|
|
analysis.pitchMarkers.forEach((marker, index) => { |
|
|
if (index % pitchStep === 0 && marker.frequency) { |
|
|
const markerEl = document.createElement('div'); |
|
|
markerEl.className = `marker ${marker.isAccurate ? 'pitch-good' : 'pitch-bad'}`; |
|
|
markerEl.style.left = `${marker.position}%`; |
|
|
markerEl.style.top = '30px'; |
|
|
markerEl.innerHTML = marker.isAccurate ? 'J' : 'F'; |
|
|
markerEl.style.fontWeight = 'bold'; |
|
|
markerEl.style.padding = '2px 4px'; |
|
|
markerEl.style.background = 'white'; |
|
|
markerEl.style.borderRadius = '3px'; |
|
|
markerEl.style.border = '1px solid #ccc'; |
|
|
|
|
|
const timeStr = this.formatTime(marker.time); |
|
|
const freqStr = Math.round(marker.frequency) + 'Hz'; |
|
|
const expectedStr = marker.expectedFrequency ? Math.round(marker.expectedFrequency) + 'Hz' : 'N/A'; |
|
|
markerEl.title = `${timeStr}: Note ${marker.isAccurate ? 'JUSTE' : 'FAUSSE'}\nDétectée: ${freqStr}\nAttendue: ${expectedStr}`; |
|
|
fragment.appendChild(markerEl); |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
timelineBar.appendChild(fragment); |
|
|
|
|
|
|
|
|
this.displayAdvice(analysis); |
|
|
} |
|
|
|
|
|
formatTime(seconds) { |
|
|
const mins = Math.floor(seconds / 60); |
|
|
const secs = Math.floor(seconds % 60); |
|
|
return `${mins}:${secs.toString().padStart(2, '0')}`; |
|
|
} |
|
|
|
|
|
displayAdvice(analysis) { |
|
|
const adviceDiv = document.getElementById('advice'); |
|
|
|
|
|
|
|
|
adviceDiv.innerHTML = ` |
|
|
<h4>🐱 Conseils de Tampia</h4> |
|
|
${analysis.advice.map(advice => ` |
|
|
<div class="advice-item"> |
|
|
<strong>${this.getAdviceIcon(advice.type)} ${this.getAdviceTitle(advice.type)}:</strong><br> |
|
|
${advice.message} |
|
|
</div> |
|
|
`).join('')} |
|
|
`; |
|
|
|
|
|
const resultsDiv = document.getElementById('results'); |
|
|
resultsDiv.classList.remove('hidden'); |
|
|
} |
|
|
|
|
|
getAdviceIcon(type) { |
|
|
const icons = { |
|
|
rhythm: '🥁', |
|
|
pitch: '🎵', |
|
|
tempo: '⏱️' |
|
|
}; |
|
|
return icons[type] || '💡'; |
|
|
} |
|
|
|
|
|
getAdviceTitle(type) { |
|
|
const titles = { |
|
|
rhythm: 'Rythme', |
|
|
pitch: 'Justesse', |
|
|
tempo: 'Tempo' |
|
|
}; |
|
|
return titles[type] || 'Conseil'; |
|
|
} |
|
|
|
|
|
showLoading(show) { |
|
|
const loadingDiv = document.getElementById('loading'); |
|
|
if (show) { |
|
|
loadingDiv.classList.remove('hidden'); |
|
|
} else { |
|
|
loadingDiv.classList.add('hidden'); |
|
|
} |
|
|
} |
|
|
|
|
|
hideResults() { |
|
|
document.getElementById('results').classList.add('hidden'); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
document.addEventListener('DOMContentLoaded', () => { |
|
|
new TampiaAI(); |
|
|
}); |
|
|
|