Tampia / script.js
Clemylia's picture
Upload 3 files
3e9c71c verified
class TampiaAI {
constructor() {
this.vocalFile = null;
this.instrumentalFile = null;
this.audioContext = null;
this.initializeEventListeners();
}
initializeEventListeners() {
// File upload handlers
document.getElementById('vocal-file').addEventListener('change', (e) => {
this.handleFileUpload(e, 'vocal');
});
document.getElementById('instrumental-file').addEventListener('change', (e) => {
this.handleFileUpload(e, 'instrumental');
});
// Analyze button
document.getElementById('analyze-btn').addEventListener('click', () => {
this.analyzeAudio();
});
}
handleFileUpload(event, type) {
const file = event.target.files[0];
if (!file) return;
if (type === 'vocal') {
this.vocalFile = file;
document.getElementById('vocal-name').textContent = file.name;
} else {
this.instrumentalFile = file;
document.getElementById('instrumental-name').textContent = file.name;
}
this.updateAnalyzeButton();
}
updateAnalyzeButton() {
const button = document.getElementById('analyze-btn');
button.disabled = !(this.vocalFile && this.instrumentalFile);
}
async analyzeAudio() {
this.showLoading(true);
this.hideResults();
try {
// Initialize audio context
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
// Load and analyze audio files
const vocalBuffer = await this.loadAudioFile(this.vocalFile);
const instrumentalBuffer = await this.loadAudioFile(this.instrumentalFile);
// Get analysis parameters
const scale = document.getElementById('scale-select').value;
const tempo = parseInt(document.getElementById('tempo-input').value);
const timeSignature = document.getElementById('time-signature').value;
// Perform analysis
const analysis = await this.performAnalysis(vocalBuffer, instrumentalBuffer, {
scale,
tempo,
timeSignature
});
// Display results
this.displayResults(analysis);
} catch (error) {
console.error('Erreur d\'analyse:', error);
alert('Une erreur est survenue lors de l\'analyse. Veuillez vérifier vos fichiers audio.');
} finally {
this.showLoading(false);
}
}
async loadAudioFile(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = async (e) => {
try {
const arrayBuffer = e.target.result;
const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
resolve(audioBuffer);
} catch (error) {
reject(error);
}
};
reader.readAsArrayBuffer(file);
});
}
async performAnalysis(vocalBuffer, instrumentalBuffer, params) {
const duration = Math.min(vocalBuffer.duration, instrumentalBuffer.duration);
const sampleRate = vocalBuffer.sampleRate;
const analysis = {
duration: duration,
rhythmMarkers: [],
pitchMarkers: [],
advice: []
};
// Optimisation : réduire la granularité d'analyse
const windowSize = 4096; // Fenêtre plus grande
const hopSize = 2048; // Saut plus important
const maxAnalysisTime = 120; // Limiter à 2 minutes max
const analysisLength = Math.min(duration, maxAnalysisTime);
const maxSamples = Math.floor((analysisLength * sampleRate - windowSize) / hopSize);
const numWindows = Math.min(maxSamples, 500); // Limite le nombre de fenêtres
// Obtenir les données audio avec sous-échantillonnage
const vocalData = this.downsampleAudio(vocalBuffer.getChannelData(0), sampleRate, 16000);
const instrumentalData = this.downsampleAudio(instrumentalBuffer.getChannelData(0), sampleRate, 16000);
const newSampleRate = 16000;
// Gammes et fréquences de référence
const scaleFrequencies = this.getScaleFrequencies(params.scale);
const targetTempo = params.tempo;
// Analyse optimisée avec batch processing
const batchSize = 50;
for (let batch = 0; batch < Math.ceil(numWindows / batchSize); batch++) {
const batchStart = batch * batchSize;
const batchEnd = Math.min(batchStart + batchSize, numWindows);
// Traitement par lot pour améliorer les performances
for (let i = batchStart; i < batchEnd; i++) {
const actualHopSize = Math.floor(hopSize * 16000 / sampleRate);
const startSample = i * actualHopSize;
const timePosition = (startSample / newSampleRate) * (sampleRate / 16000);
const timePercent = (timePosition / duration) * 100;
// Vérifier si on a encore assez de données
if (startSample + windowSize > vocalData.length) break;
// Extraire les fenêtres
const vocalWindow = vocalData.slice(startSample, startSample + windowSize);
const instrumentalWindow = instrumentalData.slice(startSample, startSample + windowSize);
// Analyse simplifiée du pitch (tous les 8 échantillons seulement)
let vocalPitch = null;
let pitchAccuracy = true;
if (i % 8 === 0) {
vocalPitch = this.detectPitchFast(vocalWindow, newSampleRate);
if (vocalPitch) {
const expectedPitch = this.getExpectedPitch(timePosition, scaleFrequencies);
if (expectedPitch) {
const pitchDifference = Math.abs(vocalPitch - expectedPitch);
const tolerance = expectedPitch * 0.05; // Tolérance plus large
pitchAccuracy = pitchDifference < tolerance;
}
}
}
// Analyse rythmique simplifiée
const vocalEnergy = this.calculateRMSEnergy(vocalWindow);
const instrumentalEnergy = this.calculateRMSEnergy(instrumentalWindow);
const expectedBeatTime = this.getExpectedBeatTime(timePosition, targetTempo, params.timeSignature);
const rhythmAccuracy = this.compareRhythmSimple(vocalEnergy, instrumentalEnergy, expectedBeatTime, timePosition);
// Ajouter marqueurs avec plus d'espacement
if (i % 16 === 0) {
if (vocalPitch) {
analysis.pitchMarkers.push({
time: timePosition,
position: timePercent,
isAccurate: pitchAccuracy,
frequency: vocalPitch,
expectedFrequency: this.getExpectedPitch(timePosition, scaleFrequencies)
});
}
analysis.rhythmMarkers.push({
time: timePosition,
position: timePercent,
isAccurate: rhythmAccuracy,
isStrongBeat: expectedBeatTime.isStrongBeat,
energy: vocalEnergy
});
}
}
// Permettre au navigateur de respirer entre les lots
if (batch % 5 === 0) {
await new Promise(resolve => setTimeout(resolve, 1));
}
}
// Générer les conseils
analysis.advice = this.generateAdvice(analysis, params);
return analysis;
}
// Sous-échantillonnage pour accélérer l'analyse
downsampleAudio(audioData, originalSampleRate, targetSampleRate) {
if (originalSampleRate === targetSampleRate) return audioData;
const ratio = originalSampleRate / targetSampleRate;
const length = Math.floor(audioData.length / ratio);
const result = new Float32Array(length);
for (let i = 0; i < length; i++) {
result[i] = audioData[Math.floor(i * ratio)];
}
return result;
}
// Calcul d'énergie RMS simple et rapide
calculateRMSEnergy(audioData) {
let sum = 0;
for (let i = 0; i < audioData.length; i++) {
sum += audioData[i] * audioData[i];
}
return Math.sqrt(sum / audioData.length);
}
// Détection de pitch rapide et simplifiée
detectPitchFast(audioData, sampleRate) {
const minFreq = 80;
const maxFreq = 800;
const minPeriod = Math.floor(sampleRate / maxFreq);
const maxPeriod = Math.floor(sampleRate / minFreq);
let bestCorrelation = 0;
let bestPeriod = 0;
// Autocorrélation simplifiée avec moins d'itérations
for (let period = minPeriod; period < maxPeriod; period += 2) { // Saut de 2 pour accélérer
let correlation = 0;
const maxSamples = Math.min(512, audioData.length - period); // Limite les échantillons
for (let i = 0; i < maxSamples; i++) {
correlation += audioData[i] * audioData[i + period];
}
if (correlation > bestCorrelation) {
bestCorrelation = correlation;
bestPeriod = period;
}
}
// Vérification basique de qualité
const energy = this.calculateRMSEnergy(audioData);
if (energy < 0.01 || bestCorrelation < 0.3) {
return null;
}
return sampleRate / bestPeriod;
}
// Comparaison rythmique simplifiée
compareRhythmSimple(vocalEnergy, instrumentalEnergy, expectedBeat, timePosition) {
const energyThreshold = 0.02;
const tolerance = 0.15; // 150ms de tolérance
// Simple : si les deux ont de l'énergie et on est proche d'un temps
const hasVocalEnergy = vocalEnergy > energyThreshold;
const hasInstrumentalEnergy = instrumentalEnergy > energyThreshold;
if (hasVocalEnergy && hasInstrumentalEnergy) {
const timeDifference = Math.abs(timePosition - expectedBeat.nextBeatTime);
return timeDifference < tolerance;
}
return true; // Neutre si pas d'énergie significative
}
// Détection de pitch améliorée avec filtrage et validation
detectPitch(audioData, sampleRate) {
// Pré-traitement : filtrage passe-bas pour réduire le bruit
const filteredData = this.lowPassFilter(audioData, sampleRate, 1000);
const minFreq = 80; // Hz (voix humaine minimum)
const maxFreq = 1000; // Hz (voix humaine maximum)
const minPeriod = Math.floor(sampleRate / maxFreq);
const maxPeriod = Math.floor(sampleRate / minFreq);
let bestCorrelation = 0;
let bestPeriod = 0;
let secondBestCorrelation = 0;
// Autocorrélation améliorée avec normalisation
for (let period = minPeriod; period < maxPeriod; period++) {
let correlation = 0;
let norm1 = 0;
let norm2 = 0;
for (let i = 0; i < filteredData.length - period; i++) {
correlation += filteredData[i] * filteredData[i + period];
norm1 += filteredData[i] * filteredData[i];
norm2 += filteredData[i + period] * filteredData[i + period];
}
// Normalisation pour éviter les faux positifs
const normalizedCorrelation = correlation / Math.sqrt(norm1 * norm2);
if (normalizedCorrelation > bestCorrelation) {
secondBestCorrelation = bestCorrelation;
bestCorrelation = normalizedCorrelation;
bestPeriod = period;
}
}
// Vérifications de qualité du signal
const energy = filteredData.reduce((sum, sample) => sum + sample * sample, 0) / filteredData.length;
const clarityThreshold = 0.4; // Seuil de clarté du pitch
const energyThreshold = 0.001; // Seuil d'énergie minimum
// Vérifier la clarté du pitch (différence entre meilleur et second meilleur)
const clarity = bestCorrelation - secondBestCorrelation;
if (energy < energyThreshold || bestCorrelation < clarityThreshold || clarity < 0.1) {
return null; // Signal trop faible ou pitch pas assez clair
}
// Affinage par interpolation parabolique pour plus de précision
const refinedPeriod = this.parabolicInterpolation(filteredData, bestPeriod, sampleRate);
return sampleRate / refinedPeriod;
}
// Filtre passe-bas simple
lowPassFilter(data, sampleRate, cutoffFreq) {
const RC = 1.0 / (cutoffFreq * 2 * Math.PI);
const dt = 1.0 / sampleRate;
const alpha = dt / (RC + dt);
const filtered = new Float32Array(data.length);
filtered[0] = data[0];
for (let i = 1; i < data.length; i++) {
filtered[i] = alpha * data[i] + (1 - alpha) * filtered[i - 1];
}
return filtered;
}
// Interpolation parabolique pour affiner la détection de période
parabolicInterpolation(data, period, sampleRate) {
if (period <= 1 || period >= data.length - 1) return period;
// Calculer l'autocorrélation pour period-1, period, period+1
const correlations = [];
for (let p = period - 1; p <= period + 1; p++) {
let corr = 0;
for (let i = 0; i < data.length - p; i++) {
corr += data[i] * data[i + p];
}
correlations.push(corr);
}
// Interpolation parabolique
const y1 = correlations[0];
const y2 = correlations[1];
const y3 = correlations[2];
const a = (y1 - 2*y2 + y3) / 2;
const b = (y3 - y1) / 2;
if (a === 0) return period;
const offset = -b / (2 * a);
return period + offset;
}
// Détection d'onset améliorée avec analyse spectrale
detectOnset(audioData, previousEnergy = 0) {
// Calcul de l'énergie RMS
const rmsEnergy = Math.sqrt(audioData.reduce((sum, sample) => sum + sample * sample, 0) / audioData.length);
// Calcul du flux spectral (différence d'énergie entre fenêtres)
const spectralFlux = Math.max(0, rmsEnergy - previousEnergy);
// Calcul de la variation d'amplitude
let maxAmplitude = 0;
let minAmplitude = 0;
for (let i = 0; i < audioData.length; i++) {
maxAmplitude = Math.max(maxAmplitude, Math.abs(audioData[i]));
minAmplitude = Math.min(minAmplitude, Math.abs(audioData[i]));
}
const dynamicRange = maxAmplitude - minAmplitude;
// Seuils adaptatifs
const energyThreshold = 0.005;
const fluxThreshold = 0.001;
const dynamicThreshold = 0.01;
// Détection d'onset avec critères multiples
const hasOnset = (rmsEnergy > energyThreshold) &&
(spectralFlux > fluxThreshold) &&
(dynamicRange > dynamicThreshold);
return {
hasOnset: hasOnset,
energy: rmsEnergy,
flux: spectralFlux,
dynamic: dynamicRange
};
}
// Obtenir les fréquences de la gamme sélectionnée
getScaleFrequencies(scale) {
const baseFrequencies = {
// Gammes majeures
'C': [261.63, 293.66, 329.63, 349.23, 392.00, 440.00, 493.88],
'Db': [277.18, 311.13, 349.23, 369.99, 415.30, 466.16, 523.25],
'D': [293.66, 329.63, 369.99, 392.00, 440.00, 493.88, 554.37],
'Eb': [311.13, 349.23, 392.00, 415.30, 466.16, 523.25, 587.33],
'E': [329.63, 369.99, 415.30, 440.00, 493.88, 554.37, 622.25],
'F': [349.23, 392.00, 440.00, 466.16, 523.25, 587.33, 659.25],
'Gb': [369.99, 415.30, 466.16, 493.88, 554.37, 622.25, 698.46],
'G': [392.00, 440.00, 493.88, 523.25, 587.33, 659.25, 739.99],
'Ab': [415.30, 466.16, 523.25, 554.37, 622.25, 698.46, 783.99],
'A': [440.00, 493.88, 554.37, 587.33, 659.25, 739.99, 830.61],
'Bb': [466.16, 523.25, 587.33, 622.25, 698.46, 783.99, 880.00],
'B': [493.88, 554.37, 622.25, 659.25, 739.99, 830.61, 932.33],
// Gammes mineures naturelles
'Cm': [261.63, 293.66, 311.13, 349.23, 392.00, 415.30, 466.16],
'Dbm': [277.18, 311.13, 329.63, 369.99, 415.30, 440.00, 493.88],
'Dm': [293.66, 329.63, 349.23, 392.00, 440.00, 466.16, 523.25],
'Ebm': [311.13, 349.23, 369.99, 415.30, 466.16, 493.88, 554.37],
'Em': [329.63, 369.99, 392.00, 440.00, 493.88, 523.25, 587.33],
'Fm': [349.23, 392.00, 415.30, 466.16, 523.25, 554.37, 622.25],
'Gbm': [369.99, 415.30, 440.00, 493.88, 554.37, 587.33, 659.25],
'Gm': [392.00, 440.00, 466.16, 523.25, 587.33, 622.25, 698.46],
'Abm': [415.30, 466.16, 493.88, 554.37, 622.25, 659.25, 739.99],
'Am': [440.00, 493.88, 523.25, 587.33, 659.25, 698.46, 783.99],
'Bbm': [466.16, 523.25, 554.37, 622.25, 698.46, 739.99, 830.61],
'Bm': [493.88, 554.37, 587.33, 659.25, 739.99, 783.99, 880.00],
// Modes
'C_dorian': [261.63, 293.66, 311.13, 349.23, 392.00, 440.00, 466.16],
'C_phrygian': [261.63, 277.18, 311.13, 349.23, 392.00, 415.30, 466.16],
'C_lydian': [261.63, 293.66, 329.63, 369.99, 392.00, 440.00, 493.88],
'C_mixolydian': [261.63, 293.66, 329.63, 349.23, 392.00, 440.00, 466.16],
'C_pentatonic': [261.63, 293.66, 329.63, 392.00, 440.00],
'C_blues': [261.63, 311.13, 349.23, 369.99, 392.00, 466.16]
};
return baseFrequencies[scale] || baseFrequencies['C'];
}
// Obtenir la fréquence attendue à un moment donné
getExpectedPitch(timePosition, scaleFrequencies) {
// Simulation simple - dans une vraie implémentation,
// ceci analyserait la mélodie de l'instrumental
const noteIndex = Math.floor(timePosition * 2) % scaleFrequencies.length;
return scaleFrequencies[noteIndex];
}
// Obtenir le temps de battement attendu avec signature rythmique
getExpectedBeatTime(timePosition, tempo, timeSignature) {
const [numerator, denominator] = timeSignature.split('/').map(Number);
// Durée d'une mesure en secondes
const beatDuration = 60 / tempo; // durée d'une noire
const noteDuration = beatDuration * (4 / denominator); // durée de l'unité de mesure
const measureDuration = noteDuration * numerator;
// Position dans la mesure
const positionInMeasure = timePosition % measureDuration;
const beatInMeasure = positionInMeasure / noteDuration;
return {
measureDuration: measureDuration,
beatInMeasure: beatInMeasure,
isStrongBeat: this.isStrongBeat(beatInMeasure, timeSignature),
nextBeatTime: Math.ceil(beatInMeasure) * noteDuration + Math.floor(timePosition / measureDuration) * measureDuration
};
}
// Déterminer si c'est un temps fort selon la signature
isStrongBeat(beatPosition, timeSignature) {
const [numerator, denominator] = timeSignature.split('/').map(Number);
const beatIndex = Math.floor(beatPosition);
switch (timeSignature) {
case '4/4':
return beatIndex === 0 || beatIndex === 2; // Temps 1 et 3
case '3/4':
return beatIndex === 0; // Temps 1
case '2/4':
return beatIndex === 0; // Temps 1
case '6/8':
return beatIndex === 0 || beatIndex === 3; // Temps 1 et 4
case '9/8':
return beatIndex === 0 || beatIndex === 3 || beatIndex === 6; // Temps 1, 4, 7
case '12/8':
return beatIndex === 0 || beatIndex === 3 || beatIndex === 6 || beatIndex === 9;
case '5/4':
return beatIndex === 0 || beatIndex === 3; // Temps 1 et 4
case '7/4':
return beatIndex === 0 || beatIndex === 4; // Temps 1 et 5
case '7/8':
return beatIndex === 0 || beatIndex === 3; // Pattern typique 3+2+2
case '5/8':
return beatIndex === 0 || beatIndex === 3; // Pattern typique 3+2
default:
return beatIndex === 0;
}
}
// Comparer le rythme vocal avec l'instrumental (version améliorée)
compareRhythm(vocalOnset, instrumentalOnset, expectedBeat, timePosition) {
const tolerance = 0.1; // 100ms de tolérance
// Si les deux ont un onset, vérifier la synchronisation
if (vocalOnset.hasOnset && instrumentalOnset.hasOnset) {
const timeDifference = Math.abs(timePosition - expectedBeat.nextBeatTime);
return timeDifference < tolerance;
}
// Si l'instrumental a un onset sur un temps fort mais pas le vocal
if (instrumentalOnset.hasOnset && !vocalOnset.hasOnset && expectedBeat.isStrongBeat) {
return false; // Manque un temps important
}
// Si le vocal a un onset mais pas l'instrumental
if (vocalOnset.hasOnset && !instrumentalOnset.hasOnset) {
// Vérifier si c'est proche d'un temps attendu
const timeDifference = Math.abs(timePosition - expectedBeat.nextBeatTime);
return timeDifference < tolerance * 2; // Plus de tolérance pour les ornements
}
// Aucun onset détecté - c'est neutre
return true;
}
generateAdvice(analysis, params) {
const advice = [];
// Count issues
const rhythmIssues = analysis.rhythmMarkers.filter(m => !m.isAccurate).length;
const pitchIssues = analysis.pitchMarkers.filter(m => !m.isAccurate).length;
const totalRhythm = analysis.rhythmMarkers.length;
const totalPitch = analysis.pitchMarkers.length;
// Rhythm advice
if (rhythmIssues > totalRhythm * 0.5) {
advice.push({
type: 'rhythm',
severity: 'high',
message: 'Beaucoup de décalages rythmiques détectés. Essayez de travailler avec un métronome à un tempo plus lent d\'abord.'
});
} else if (rhythmIssues > totalRhythm * 0.3) {
advice.push({
type: 'rhythm',
severity: 'medium',
message: 'Quelques décalages rythmiques. Concentrez-vous sur les temps forts de la mesure.'
});
} else {
advice.push({
type: 'rhythm',
severity: 'low',
message: 'Bon maintien du rythme ! Continuez ainsi.'
});
}
// Pitch advice
if (pitchIssues > totalPitch * 0.4) {
advice.push({
type: 'pitch',
severity: 'high',
message: 'Travaillez la justesse en chantant avec un piano ou une application d\'accordage.'
});
} else if (pitchIssues > totalPitch * 0.2) {
advice.push({
type: 'pitch',
severity: 'medium',
message: 'Quelques notes à ajuster. Écoutez bien l\'accompagnement instrumental.'
});
} else {
advice.push({
type: 'pitch',
severity: 'low',
message: 'Excellente justesse ! Votre oreille musicale est bien développée.'
});
}
// Tempo-specific advice
if (params.tempo > 140) {
advice.push({
type: 'tempo',
severity: 'info',
message: 'Tempo rapide détecté. Pour les tempos élevés, concentrez-vous sur la régularité plutôt que la perfection.'
});
}
return advice;
}
displayResults(analysis) {
const resultsDiv = document.getElementById('results');
const durationDiv = document.getElementById('duration-info');
const timelineDiv = document.getElementById('timeline');
const adviceDiv = document.getElementById('advice');
// Display duration
const minutes = Math.floor(analysis.duration / 60);
const seconds = Math.floor(analysis.duration % 60);
durationDiv.innerHTML = `
<strong>Durée analysée :</strong> ${minutes}:${seconds.toString().padStart(2, '0')}
`;
// Create timeline
timelineDiv.innerHTML = `
<div class="timeline-bar" id="timeline-bar">
<div style="position: absolute; left: 0; top: -25px; font-size: 0.8em; color: #d63384;">0:00</div>
<div style="position: absolute; right: 0; top: -25px; font-size: 0.8em; color: #d63384;">${minutes}:${seconds.toString().padStart(2, '0')}</div>
</div>
`;
const timelineBar = document.getElementById('timeline-bar');
// Optimisation : créer les marqueurs en lot avec DocumentFragment
const fragment = document.createDocumentFragment();
// Add rhythm markers (limiter le nombre pour les performances)
const maxRhythmMarkers = 100;
const rhythmStep = Math.max(1, Math.floor(analysis.rhythmMarkers.length / maxRhythmMarkers));
analysis.rhythmMarkers.forEach((marker, index) => {
if (index % rhythmStep === 0 && (marker.energy > 0.01 || marker.isStrongBeat)) {
const markerEl = document.createElement('div');
markerEl.className = `marker ${marker.isAccurate ? 'rhythm-good' : 'rhythm-bad'}`;
markerEl.style.left = `${marker.position}%`;
markerEl.innerHTML = marker.isAccurate ? '🟢' : '🔴';
markerEl.style.fontSize = marker.isStrongBeat ? '1.2em' : '0.8em';
const timeStr = this.formatTime(marker.time);
markerEl.title = `${timeStr}: Rythme ${marker.isAccurate ? 'CALÉ' : 'DÉCALÉ'}${marker.isStrongBeat ? ' (temps fort)' : ''}`;
fragment.appendChild(markerEl);
}
});
// Add pitch markers (limiter aussi pour les performances)
const maxPitchMarkers = 50;
const pitchStep = Math.max(1, Math.floor(analysis.pitchMarkers.length / maxPitchMarkers));
analysis.pitchMarkers.forEach((marker, index) => {
if (index % pitchStep === 0 && marker.frequency) {
const markerEl = document.createElement('div');
markerEl.className = `marker ${marker.isAccurate ? 'pitch-good' : 'pitch-bad'}`;
markerEl.style.left = `${marker.position}%`;
markerEl.style.top = '30px';
markerEl.innerHTML = marker.isAccurate ? 'J' : 'F';
markerEl.style.fontWeight = 'bold';
markerEl.style.padding = '2px 4px';
markerEl.style.background = 'white';
markerEl.style.borderRadius = '3px';
markerEl.style.border = '1px solid #ccc';
const timeStr = this.formatTime(marker.time);
const freqStr = Math.round(marker.frequency) + 'Hz';
const expectedStr = marker.expectedFrequency ? Math.round(marker.expectedFrequency) + 'Hz' : 'N/A';
markerEl.title = `${timeStr}: Note ${marker.isAccurate ? 'JUSTE' : 'FAUSSE'}\nDétectée: ${freqStr}\nAttendue: ${expectedStr}`;
fragment.appendChild(markerEl);
}
});
// Ajouter tous les marqueurs en une fois
timelineBar.appendChild(fragment);
// Afficher les conseils
this.displayAdvice(analysis);
}
formatTime(seconds) {
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}:${secs.toString().padStart(2, '0')}`;
}
displayAdvice(analysis) {
const adviceDiv = document.getElementById('advice');
// Display advice
adviceDiv.innerHTML = `
<h4>🐱 Conseils de Tampia</h4>
${analysis.advice.map(advice => `
<div class="advice-item">
<strong>${this.getAdviceIcon(advice.type)} ${this.getAdviceTitle(advice.type)}:</strong><br>
${advice.message}
</div>
`).join('')}
`;
const resultsDiv = document.getElementById('results');
resultsDiv.classList.remove('hidden');
}
getAdviceIcon(type) {
const icons = {
rhythm: '🥁',
pitch: '🎵',
tempo: '⏱️'
};
return icons[type] || '💡';
}
getAdviceTitle(type) {
const titles = {
rhythm: 'Rythme',
pitch: 'Justesse',
tempo: 'Tempo'
};
return titles[type] || 'Conseil';
}
showLoading(show) {
const loadingDiv = document.getElementById('loading');
if (show) {
loadingDiv.classList.remove('hidden');
} else {
loadingDiv.classList.add('hidden');
}
}
hideResults() {
document.getElementById('results').classList.add('hidden');
}
}
// Initialize Tampia AI when page loads
document.addEventListener('DOMContentLoaded', () => {
new TampiaAI();
});