Files
MeshChatX/meshchatx/src/frontend/components/call/RingtoneEditor.vue
Sudo-Ivan 7d7cd7d487
Some checks failed
CI / test-backend (push) Successful in 3s
CI / build-frontend (push) Successful in 1m48s
CI / test-backend (pull_request) Successful in 18s
CI / test-lang (push) Successful in 2m5s
Build and Publish Docker Image / build (pull_request) Has been skipped
CI / test-lang (pull_request) Successful in 1m14s
OSV-Scanner PR Scan / scan-pr (pull_request) Successful in 29s
CI / build-frontend (pull_request) Successful in 9m43s
CI / lint (push) Successful in 9m53s
CI / lint (pull_request) Successful in 9m49s
Build Test / Build and Test (pull_request) Successful in 12m57s
Tests / test (push) Successful in 14m2s
Benchmarks / benchmark (push) Successful in 14m29s
Build and Publish Docker Image / build-dev (pull_request) Successful in 19m25s
Tests / test (pull_request) Failing after 23m6s
Benchmarks / benchmark (pull_request) Successful in 29m13s
Build Test / Build and Test (push) Successful in 45m58s
feat(ui): enhance user experience with new features including QR code display, improved toast messages, and localized strings for various components
2026-01-05 19:22:25 -06:00

496 lines
19 KiB
Vue

<template>
<div class="space-y-6">
<!-- Header -->
<div class="flex items-center justify-between">
<div>
<h3 class="text-lg font-bold text-gray-900 dark:text-white">Edit Ringtone</h3>
<p class="text-xs text-gray-500 dark:text-zinc-500">{{ ringtone.display_name }}</p>
</div>
<button
class="p-2 hover:bg-gray-100 dark:hover:bg-zinc-800 rounded-full transition-colors"
@click="$emit('close')"
>
<MaterialDesignIcon icon-name="close" class="size-6 text-gray-500" />
</button>
</div>
<!-- Waveform Container -->
<div
class="relative bg-gray-50 dark:bg-zinc-800/50 rounded-2xl p-4 border border-gray-100 dark:border-zinc-800 min-h-[200px] flex flex-col justify-center"
>
<div v-if="loading" class="flex flex-col items-center justify-center space-y-3">
<div class="size-8 border-4 border-blue-500/20 border-t-blue-500 rounded-full animate-spin"></div>
<p class="text-sm text-gray-500 dark:text-zinc-400 font-medium">Loading audio...</p>
</div>
<div v-show="!loading" class="relative">
<canvas ref="waveform" class="w-full h-40 cursor-pointer" @mousedown="handleWaveformClick"></canvas>
<!-- Playback Progress -->
<div
class="absolute top-0 bottom-0 w-0.5 bg-blue-500 z-10 pointer-events-none"
:style="{ left: progressPercent + '%' }"
></div>
<!-- Selection Overlays -->
<div
class="absolute top-0 bottom-0 bg-blue-500/10 border-x-2 border-blue-500 z-20"
:style="{ left: startPercent + '%', width: endPercent - startPercent + '%' }"
>
<!-- Handles -->
<div
class="absolute top-1/2 -left-3 -translate-y-1/2 size-6 bg-white dark:bg-zinc-700 border-2 border-blue-500 rounded-full shadow-lg cursor-ew-resize flex items-center justify-center group"
@mousedown.stop.prevent="startDragging('start')"
>
<div class="w-0.5 h-3 bg-blue-500 group-hover:h-4 transition-all"></div>
</div>
<div
class="absolute top-1/2 -right-3 -translate-y-1/2 size-6 bg-white dark:bg-zinc-700 border-2 border-blue-500 rounded-full shadow-lg cursor-ew-resize flex items-center justify-center group"
@mousedown.stop.prevent="startDragging('end')"
>
<div class="w-0.5 h-3 bg-blue-500 group-hover:h-4 transition-all"></div>
</div>
</div>
</div>
</div>
<!-- Controls -->
<div class="grid grid-cols-1 md:grid-cols-2 gap-6">
<div class="space-y-4">
<div class="flex items-center justify-between">
<span class="text-sm font-bold text-gray-700 dark:text-zinc-300">Time Range</span>
<span class="text-[10px] font-mono text-gray-500"
>{{ formatTime(startTime) }} - {{ formatTime(endTime) }} ({{
formatTime(endTime - startTime)
}})</span
>
</div>
<div class="flex gap-4">
<div class="flex-1">
<label class="block text-[10px] uppercase font-bold text-gray-400 mb-1">Start</label>
<input
v-model.number="startTime"
type="number"
step="0.01"
min="0"
:max="endTime"
class="w-full bg-gray-50 dark:bg-zinc-800 border-none rounded-lg text-sm px-3 py-2 focus:ring-2 focus:ring-blue-500 text-gray-900 dark:text-white"
/>
</div>
<div class="flex-1">
<label class="block text-[10px] uppercase font-bold text-gray-400 mb-1">End</label>
<input
v-model.number="endTime"
type="number"
step="0.01"
:min="startTime"
:max="totalDuration"
class="w-full bg-gray-50 dark:bg-zinc-800 border-none rounded-lg text-sm px-3 py-2 focus:ring-2 focus:ring-blue-500 text-gray-900 dark:text-white"
/>
</div>
</div>
</div>
<div class="flex flex-col justify-end">
<button
class="flex items-center justify-center gap-2 py-3 rounded-xl font-bold transition-all w-full"
:class="
isPlaying
? 'bg-orange-500 text-white shadow-lg shadow-orange-500/20'
: 'bg-blue-600 text-white shadow-lg shadow-blue-500/20'
"
@click="togglePlay"
>
<MaterialDesignIcon :icon-name="isPlaying ? 'pause' : 'play'" class="size-5" />
{{ isPlaying ? "Pause Selection" : "Play Selection" }}
</button>
</div>
</div>
<!-- Footer Actions -->
<div class="pt-6 border-t border-gray-100 dark:border-zinc-800 flex items-center justify-between">
<div class="flex items-center gap-2">
<input
id="saveAsNew"
v-model="saveAsNew"
type="checkbox"
class="rounded border-gray-300 text-blue-600 focus:ring-blue-500"
/>
<label for="saveAsNew" class="text-sm text-gray-600 dark:text-zinc-400 cursor-pointer"
>Save as new ringtone</label
>
</div>
<div class="flex items-center gap-3">
<button
class="px-4 py-2 text-sm font-bold text-gray-500 hover:text-gray-700 dark:hover:text-zinc-300 transition-colors"
@click="$emit('close')"
>
Cancel
</button>
<button
:disabled="saving || loading"
class="px-6 py-2 bg-zinc-900 dark:bg-white text-white dark:text-zinc-900 rounded-xl text-sm font-bold shadow-lg transition-all hover:scale-[1.02] active:scale-[0.98] disabled:opacity-50 disabled:cursor-not-allowed flex items-center gap-2"
@click="save"
>
<MaterialDesignIcon v-if="saving" icon-name="loading" class="size-4 animate-spin" />
{{ saving ? "Saving..." : "Save Audio" }}
</button>
</div>
</div>
</div>
</template>
<script>
import MaterialDesignIcon from "../MaterialDesignIcon.vue";
import ToastUtils from "../../js/ToastUtils";
export default {
name: "RingtoneEditor",
components: {
MaterialDesignIcon,
},
props: {
ringtone: {
type: Object,
required: true,
},
},
emits: ["close", "saved"],
data() {
return {
loading: true,
saving: false,
audioBuffer: null,
audioContext: null,
sourceNode: null,
startTime: 0,
endTime: 0,
totalDuration: 0,
isPlaying: false,
playbackStartTime: 0,
playbackStartOffset: 0,
progressPercent: 0,
animationFrame: null,
dragging: null,
saveAsNew: false,
};
},
computed: {
startPercent() {
return (this.startTime / this.totalDuration) * 100 || 0;
},
endPercent() {
return (this.endTime / this.totalDuration) * 100 || 100;
},
},
mounted() {
this.loadAudio();
window.addEventListener("mousemove", this.handleDragging);
window.addEventListener("mouseup", this.stopDragging);
window.addEventListener("resize", this.drawWaveform);
},
beforeUnmount() {
this.stopPlayback();
window.removeEventListener("mousemove", this.handleDragging);
window.removeEventListener("mouseup", this.stopDragging);
window.removeEventListener("resize", this.drawWaveform);
if (this.audioContext) {
this.audioContext.close();
}
},
methods: {
async loadAudio() {
try {
this.loading = true;
const response = await fetch(`/api/v1/telephone/ringtones/${this.ringtone.id}/audio`);
const arrayBuffer = await response.arrayBuffer();
this.audioContext = new (window.AudioContext || window.webkitAudioContext)();
this.audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer);
this.totalDuration = this.audioBuffer.duration;
this.startTime = 0;
this.endTime = this.totalDuration;
this.$nextTick(() => {
this.drawWaveform();
});
} catch (e) {
console.error("Failed to load audio:", e);
ToastUtils.error(this.$t("call.failed_load_audio_edit"));
this.$emit("close");
} finally {
this.loading = false;
}
},
drawWaveform() {
const canvas = this.$refs.waveform;
if (!canvas || !this.audioBuffer) return;
const ctx = canvas.getContext("2d");
const width = canvas.clientWidth;
const height = canvas.clientHeight;
// Set canvas internal resolution
canvas.width = width * window.devicePixelRatio;
canvas.height = height * window.devicePixelRatio;
ctx.scale(window.devicePixelRatio, window.devicePixelRatio);
const data = this.audioBuffer.getChannelData(0);
const step = Math.ceil(data.length / width);
const amp = height / 2;
ctx.clearRect(0, 0, width, height);
ctx.beginPath();
ctx.moveTo(0, amp);
// Draw a nice modern waveform
ctx.strokeStyle = this.isDarkMode() ? "#3f3f46" : "#e4e4e7";
ctx.lineWidth = 1;
for (let i = 0; i < width; i++) {
let min = 1.0;
let max = -1.0;
for (let j = 0; j < step; j++) {
const datum = data[i * step + j];
if (datum < min) min = datum;
if (datum > max) max = datum;
}
ctx.moveTo(i, amp + min * amp);
ctx.lineTo(i, amp + max * amp);
}
ctx.stroke();
// Highlight the selected range
const startX = (this.startTime / this.totalDuration) * width;
const endX = (this.endTime / this.totalDuration) * width;
ctx.beginPath();
ctx.strokeStyle = "#3b82f6";
ctx.lineWidth = 1.5;
for (let i = Math.floor(startX); i < Math.ceil(endX); i++) {
let min = 1.0;
let max = -1.0;
for (let j = 0; j < step; j++) {
const datum = data[i * step + j];
if (datum < min) min = datum;
if (datum > max) max = datum;
}
ctx.moveTo(i, amp + min * amp);
ctx.lineTo(i, amp + max * amp);
}
ctx.stroke();
},
isDarkMode() {
return document.documentElement.classList.contains("dark");
},
formatTime(seconds) {
const mins = Math.floor(seconds / 60);
const secs = (seconds % 60).toFixed(2);
return `${mins}:${secs.padStart(5, "0")}`;
},
startDragging(type) {
this.dragging = type;
},
stopDragging() {
this.dragging = null;
this.drawWaveform();
},
handleDragging(e) {
if (!this.dragging) return;
const canvas = this.$refs.waveform;
const rect = canvas.getBoundingClientRect();
const x = Math.max(0, Math.min(e.clientX - rect.left, rect.width));
const time = (x / rect.width) * this.totalDuration;
if (this.dragging === "start") {
this.startTime = Math.min(time, this.endTime - 0.1);
} else if (this.dragging === "end") {
this.endTime = Math.max(time, this.startTime + 0.1);
}
},
handleWaveformClick(e) {
const canvas = this.$refs.waveform;
const rect = canvas.getBoundingClientRect();
const x = e.clientX - rect.left;
const time = (x / rect.width) * this.totalDuration;
// If click is outside selection, move nearest boundary
if (time < this.startTime) {
this.startTime = time;
} else if (time > this.endTime) {
this.endTime = time;
} else {
// If click is inside, maybe we can use it to seek preview
// but for now let's just keep it simple
}
this.drawWaveform();
},
togglePlay() {
if (this.isPlaying) {
this.stopPlayback();
} else {
this.startPlayback();
}
},
startPlayback() {
if (!this.audioBuffer) return;
this.stopPlayback();
this.sourceNode = this.audioContext.createBufferSource();
this.sourceNode.buffer = this.audioBuffer;
this.sourceNode.connect(this.audioContext.destination);
this.playbackStartOffset = this.startTime;
this.playbackStartTime = this.audioContext.currentTime;
this.sourceNode.start(0, this.startTime, this.endTime - this.startTime);
this.isPlaying = true;
this.sourceNode.onended = () => {
this.isPlaying = false;
this.progressPercent = 0;
cancelAnimationFrame(this.animationFrame);
};
this.updateProgress();
},
stopPlayback() {
if (this.sourceNode) {
this.sourceNode.stop();
this.sourceNode = null;
}
this.isPlaying = false;
this.progressPercent = 0;
cancelAnimationFrame(this.animationFrame);
},
updateProgress() {
if (!this.isPlaying) return;
const elapsed = this.audioContext.currentTime - this.playbackStartTime;
const currentTime = this.playbackStartOffset + elapsed;
this.progressPercent = (currentTime / this.totalDuration) * 100;
if (currentTime >= this.endTime) {
this.stopPlayback();
return;
}
this.animationFrame = requestAnimationFrame(this.updateProgress);
},
async save() {
try {
this.saving = true;
// Create a trimmed version of the audio
const sampleRate = this.audioBuffer.sampleRate;
const startSample = Math.floor(this.startTime * sampleRate);
const endSample = Math.floor(this.endTime * sampleRate);
const frameCount = endSample - startSample;
const offlineCtx = new OfflineAudioContext(this.audioBuffer.numberOfChannels, frameCount, sampleRate);
const trimmedBuffer = offlineCtx.createBuffer(
this.audioBuffer.numberOfChannels,
frameCount,
sampleRate
);
for (let channel = 0; channel < this.audioBuffer.numberOfChannels; channel++) {
const data = this.audioBuffer.getChannelData(channel);
const trimmedData = trimmedBuffer.getChannelData(channel);
for (let i = 0; i < frameCount; i++) {
trimmedData[i] = data[startSample + i];
}
}
// Convert AudioBuffer to WAV blob
const blob = this.audioBufferToWav(trimmedBuffer);
const formData = new FormData();
const filename = this.saveAsNew ? `edited_${this.ringtone.filename}` : this.ringtone.filename;
formData.append("file", blob, filename);
await window.axios.post("/api/v1/telephone/ringtones/upload", formData, {
headers: { "Content-Type": "multipart/form-data" },
});
ToastUtils.success(this.$t("call.ringtone_saved"));
this.$emit("saved");
this.$emit("close");
} catch (e) {
console.error("Failed to save ringtone:", e);
ToastUtils.error(this.$t("call.failed_save_ringtone"));
} finally {
this.saving = false;
}
},
audioBufferToWav(buffer) {
const numOfChan = buffer.numberOfChannels;
const length = buffer.length * numOfChan * 2 + 44;
const buffer_arr = new ArrayBuffer(length);
const view = new DataView(buffer_arr);
const channels = [];
let i;
let sample;
let offset = 0;
let pos = 0;
const setUint16 = (data) => {
view.setUint16(pos, data, true);
pos += 2;
};
const setUint32 = (data) => {
view.setUint32(pos, data, true);
pos += 4;
};
const writeString = (string) => {
for (let i = 0; i < string.length; i++) {
view.setUint8(pos + i, string.charCodeAt(i));
}
pos += string.length;
};
writeString("RIFF");
setUint32(length - 8);
writeString("WAVE");
writeString("fmt ");
setUint32(16);
setUint16(1);
setUint16(numOfChan);
setUint32(buffer.sampleRate);
setUint32(buffer.sampleRate * 2 * numOfChan);
setUint16(numOfChan * 2);
setUint16(16);
writeString("data");
setUint32(length - pos - 4);
for (i = 0; i < buffer.numberOfChannels; i++) channels.push(buffer.getChannelData(i));
while (pos < length) {
for (i = 0; i < numOfChan; i++) {
sample = Math.max(-1, Math.min(1, channels[i][offset]));
sample = (sample < 0 ? sample * 0x8000 : sample * 0x7fff) | 0;
view.setInt16(pos, sample, true);
pos += 2;
}
offset++;
}
return new Blob([buffer_arr], { type: "audio/wav" });
},
},
};
</script>
<style scoped>
canvas {
image-rendering: pixelated;
}
</style>