Commit f5bb479e by haojie

mp3测试

parent f21881b9
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
"dayjs": "^1.10.6", "dayjs": "^1.10.6",
"default-passive-events": "^2.0.0", "default-passive-events": "^2.0.0",
"js-cookie": "^3.0.1", "js-cookie": "^3.0.1",
"lamejs": "^1.2.1",
"tdesign-vue-next": "^0.22.1", "tdesign-vue-next": "^0.22.1",
"uuid": "^9.0.0", "uuid": "^9.0.0",
"vue": "^3.2.31", "vue": "^3.2.31",
......
...@@ -10,8 +10,13 @@ export default function () { ...@@ -10,8 +10,13 @@ export default function () {
// 洗稿id // 洗稿id
const currentConfuseId = ref(''); const currentConfuseId = ref('');
// 是否暂停洗稿
const stopConfuse = ref(false);
const openConfuseInterval = (id: any = '') => { const openConfuseInterval = (id: any = '') => {
console.log('开启轮询,洗稿回调'); console.log('开启轮询,洗稿回调');
// 必须先清空定时器
closeConfuseInterval();
confuseInterval.value = window.setInterval(() => { confuseInterval.value = window.setInterval(() => {
currentStartConfuseBack(id ? id : currentConfuseId.value); currentStartConfuseBack(id ? id : currentConfuseId.value);
}, 3000); }, 3000);
...@@ -50,9 +55,14 @@ export default function () { ...@@ -50,9 +55,14 @@ export default function () {
task_id: id, task_id: id,
}); });
if (res.code == 0 && res.data.length) { if (res.code == 0 && res.data.length) {
// 是否为error
if (res.data[0] === 'error') {
stopConfuse.value = true;
} else {
console.log('洗稿回调成功'); console.log('洗稿回调成功');
closeConfuseInterval();
confuseList.value = res.data; confuseList.value = res.data;
}
closeConfuseInterval();
} else { } else {
console.log('洗稿还没有回调', id); console.log('洗稿还没有回调', id);
console.log(res.data); console.log(res.data);
...@@ -71,6 +81,7 @@ export default function () { ...@@ -71,6 +81,7 @@ export default function () {
}); });
return { return {
stopConfuse,
confuseList, confuseList,
currentConfuseId, currentConfuseId,
currentStartConfuse, currentStartConfuse,
......
// 低版本浏览器兼容
import './utils/polyfills';
import { createApp } from 'vue'; import { createApp } from 'vue';
// 组件库按需引入---注意--以后要加新组件,记得去文件里引入 // 组件库按需引入---注意--以后要加新组件,记得去文件里引入
import TDesign from './utils/Tdesign'; import TDesign from './utils/Tdesign';
...@@ -10,6 +13,7 @@ import '@/style/index.less'; ...@@ -10,6 +13,7 @@ import '@/style/index.less';
// 谷歌浏览器关于滚动事件警告去除 // 谷歌浏览器关于滚动事件警告去除
import 'default-passive-events'; import 'default-passive-events';
import App from './App.vue'; import App from './App.vue';
// font // font
// import '@/style/font-family.css'; // import '@/style/font-family.css';
// 全局样式 // 全局样式
......
...@@ -33,7 +33,7 @@ import { CONFUSE_STATUS } from '@/service/Live'; ...@@ -33,7 +33,7 @@ import { CONFUSE_STATUS } from '@/service/Live';
import { processTextCallback } from '@/hooks/useScript'; import { processTextCallback } from '@/hooks/useScript';
import { scriptTypeText } from '@/service/CreateLive'; import { scriptTypeText } from '@/service/CreateLive';
import { writeLog } from '@/utils/pyqt'; import { writeLog } from '@/utils/pyqt';
const { currentConfuseId, confuseList, currentStartConfuse, openConfuseInterval } = useConfuse(); const { currentConfuseId, confuseList, stopConfuse, currentStartConfuse, openConfuseInterval } = useConfuse();
const { openInterval: confuseInterval } = processTextCallback(); const { openInterval: confuseInterval } = processTextCallback();
const store = useStore(); const store = useStore();
...@@ -272,7 +272,8 @@ const currentTimeChange = (index: number, value: number) => { ...@@ -272,7 +272,8 @@ const currentTimeChange = (index: number, value: number) => {
typeof liveDetail.value.phonetic_timbres_id === 'number' && typeof liveDetail.value.phonetic_timbres_id === 'number' &&
typeof liveDetail.value.tone_id === 'number' && typeof liveDetail.value.tone_id === 'number' &&
liveDetail.value.is_disorganize && liveDetail.value.is_disorganize &&
liveDetail.value.type == scriptTypeText liveDetail.value.type == scriptTypeText &&
!stopConfuse.value
) { ) {
console.log(row.videoIndex, '当前videoIndex'); console.log(row.videoIndex, '当前videoIndex');
currentVideoRow.confuse = CONFUSE_STATUS.CONFUSE_STATUS_PROGRESS; currentVideoRow.confuse = CONFUSE_STATUS.CONFUSE_STATUS_PROGRESS;
......
...@@ -73,15 +73,8 @@ import HomeSvg from '@/assets/svg/createLive/home.svg'; ...@@ -73,15 +73,8 @@ import HomeSvg from '@/assets/svg/createLive/home.svg';
import InteractSvg from '@/assets/svg/createLive/interact.svg'; import InteractSvg from '@/assets/svg/createLive/interact.svg';
import ScriptsSvg from '@/assets/svg/createLive/scripts.svg'; import ScriptsSvg from '@/assets/svg/createLive/scripts.svg';
import { computed, onBeforeMount, ref, onBeforeUnmount, onActivated } from 'vue'; import { computed, onBeforeMount, ref, onBeforeUnmount, onActivated } from 'vue';
import { import { getElBounding, show_message, DataType, dimensionalConvert, ecursionDeepCopy, getFile } from '@/utils/tool';
getElBounding, import { getDurationOfAudioFile } from '@/utils/audio';
show_message,
DataType,
dimensionalConvert,
ecursionDeepCopy,
getDurationOfAudioFile,
getFile,
} from '@/utils/tool';
import { useStore } from 'vuex'; import { useStore } from 'vuex';
import { createLiveKeys, scriptTypeText, scriptTypePhonetics, mergeSameAudio, filterFiled } from '@/service/CreateLive'; import { createLiveKeys, scriptTypeText, scriptTypePhonetics, mergeSameAudio, filterFiled } from '@/service/CreateLive';
import { getLiveTaskInfo, createDrafts, getDraftsDetail, liveTts, createLiveTask } from '@/utils/api/userApi'; import { getLiveTaskInfo, createDrafts, getDraftsDetail, liveTts, createLiveTask } from '@/utils/api/userApi';
......
...@@ -77,7 +77,7 @@ import CustomDialog from '@/components/Dialog.vue'; ...@@ -77,7 +77,7 @@ import CustomDialog from '@/components/Dialog.vue';
import CustomInput from '@/components/input/index.vue'; import CustomInput from '@/components/input/index.vue';
import routerConfig from '@/router/tool'; import routerConfig from '@/router/tool';
import { useRouter } from 'vue-router'; import { useRouter } from 'vue-router';
import { getDigitalPeopleList } from '@/service/Common'; import { getDigitalPeopleList, uploadToAly } from '@/service/Common';
import Button from '@/components/Button.vue'; import Button from '@/components/Button.vue';
import { callPyjsInWindow } from '@/utils/pyqt'; import { callPyjsInWindow } from '@/utils/pyqt';
import { jumpToCreateLivePage } from '@/router/jump'; import { jumpToCreateLivePage } from '@/router/jump';
...@@ -193,15 +193,27 @@ const getList = async () => { ...@@ -193,15 +193,27 @@ const getList = async () => {
digitalPeopleList.loading = false; digitalPeopleList.loading = false;
} }
}; };
const startTest = async () => {
let list = [
'http://yunyi-live.oss-cn-hangzhou.aliyuncs.com/upload/1/2023-08-114e482462-6047-47be-a30c-b85a17c95665.mp3',
'http://yunyi-live.oss-cn-hangzhou.aliyuncs.com/upload/1/2023-08-11efe3f1ea-b445-42e2-93b0-39a70fb7c6f5.mp3',
'http://yunyi-live.oss-cn-hangzhou.aliyuncs.com/upload/1/2023-08-11dd2372d8-73ff-4526-bf59-b1618a3f218f.mp3',
// 'http://nls-cloud-cn-shanghai.oss-cn-shanghai.aliyuncs.com/jupiter-flow/tmp/ad08fa0a70ae4ea88d11ad5e394ce045.wav?Expires=1692149777&OSSAccessKeyId=LTAIUpwNp2H7pBG5&Signature=D93qMT1DovslSOVa9oufV2cGZxE%3D',
// 'http://nls-cloud-cn-shanghai.oss-cn-shanghai.aliyuncs.com/jupiter-flow/tmp/ad08fa0a70ae4ea88d11ad5e394ce045.wav?Expires=1692149777&OSSAccessKeyId=LTAIUpwNp2H7pBG5&Signature=D93qMT1DovslSOVa9oufV2cGZxE%3D',
];
let blob = await audioMerge(list);
console.log(blob);
if (blob) {
// uploadToAly([blob]);
} else {
console.log('文件错误');
}
};
onMounted(() => { onMounted(() => {
// 获取我的数字人 // 获取我的数字人
getList(); getList();
// let list = [ startTest();
// 'http://m10.music.126.net/20230811094657/2ca708dd710fd76bfaa7176ab0a52a01/ymusic/5353/0f0f/0358/d99739615f8e5153d77042092f07fd77.mp3',
// 'http://m10.music.126.net/20230811094657/2ca708dd710fd76bfaa7176ab0a52a01/ymusic/5353/0f0f/0358/d99739615f8e5153d77042092f07fd77.mp3',
// ];
// audioMerge(list);
}); });
</script> </script>
......
...@@ -180,7 +180,6 @@ export const uploadToAly = async (fileList: File[]) => { ...@@ -180,7 +180,6 @@ export const uploadToAly = async (fileList: File[]) => {
}, },
]); ]);
}; };
// 每次上传前清空数组
let alyList = []; let alyList = [];
console.log('任务数'); console.log('任务数');
console.log(fileList.length); console.log(fileList.length);
......
import audiobufferToWav from 'audiobuffer-to-wav'; import audiobufferToWav from 'audiobuffer-to-wav';
import request from '@/utils/upLoadRequest';
import { writeLog } from '@/utils/pyqt'; import { writeLog } from '@/utils/pyqt';
// import lamejs from 'lamejs';
import { getFile } from './tool';
import audioConversion from '@/worker/audioConversion.js?worker';
export const createAudioContext = () => { export const createAudioContext = () => {
return new (window.AudioContext || window.webkitAudioContext)(); return new (window.AudioContext || window.webkitAudioContext)();
}; };
export async function decodeAudio(blob: Blob) { const audioContext = createAudioContext();
const audioContext = createAudioContext();
// 获取音频文件的时长
export const getDurationOfAudioFile = (file: File) => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const fileReader = new FileReader(); const audio = new Audio();
fileReader.onloadend = function () { audio.src = URL.createObjectURL(file);
const arrayBuffer = fileReader.result;
audioContext.decodeAudioData( audio.onloadedmetadata = () => {
arrayBuffer, resolve(audio.duration);
(audioBuffer: any) => { };
resolve(audioBuffer);
}, audio.onerror = () => {
reject, reject('无法获取音频文件的时长!');
);
}; };
fileReader.onerror = reject;
fileReader.readAsArrayBuffer(blob);
}); });
};
// 获取音频文件的类型
export const getAudioFileType = async (url: string) => {
const audio = await getFile(url);
let type = audio.type;
return type;
};
// 将blob音频列表转换为buffer
export const audioBlobToBuffer = async (list: Blob[]) => {
let newList = [];
for (let i = 0; i < list.length; i++) {
const blob: any = list[i];
// 将blob转换为ArrayBuffer
const arrayBuffer = await blob.arrayBuffer();
// 将ArrayBuffer转换为Buffer
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
newList.push(audioBuffer);
}
return newList;
};
// 下载音频列表
export const downloadAudioList = async (filePaths: string[]) => {
let blobList = [];
await Promise.all(
filePaths.map(async (filePath: string, index: number) => {
const blob = await getFile(filePath);
return blob;
}),
).then((blobs) => {
blobList.push(...blobs); // 将解码后的音频缓冲区按顺序添加到数组中
});
return {
blobList: blobList,
};
};
function mergeBuffers(bufferList) {
let numberOfChannels = bufferList[0].numberOfChannels; // 获取音频数据的声道数
console.log(numberOfChannels);
let maxLength = Math.max.apply(
null,
bufferList.map(function (buffer) {
return buffer.length;
}),
); // 获取所有音频数据中的最大长度
let mergedBuffer = new Float32Array(numberOfChannels * maxLength);
for (let channel = 0; channel < numberOfChannels; channel++) {
let channelOffset = channel * maxLength;
bufferList.forEach(function (buffer) {
let channelData = buffer.getChannelData(channel);
for (let i = 0; i < buffer.length; i++) {
mergedBuffer[channelOffset + i] += channelData[i];
}
});
}
return mergedBuffer;
} }
// mp3 worker
export const mp3Worker = async (blobList: any[]) => {
return new Promise<Blob>((reslove) => {
// worker
const worker: Worker = new audioConversion();
worker.onmessage = function (event) {
const blob = event.data;
// 结束线程
worker.terminate();
// 处理编码后的数据,例如保存为文件或进行其他操作
reslove(blob);
};
console.log('发送消息');
worker.postMessage({
blobList: blobList,
});
});
};
// 导出为 MP3
// export function exportToMp3(bufferList) {
// // 合并音频数据
// let mergedBuffer = mergeBuffers(bufferList);
// let start = performance.now();
// // 创建 MP3 编码器
// let encoder = new lamejs.Mp3Encoder(mergedBuffer.length, 44100, 128); // 使用 44100 采样率和 128kbps 比特率
// // 存储编码后的数据
// let mp3Data = [];
// // 编码音频数据
// let blockSize = 1152; // 使用默认块大小
// let bufferLength = mergedBuffer.length;
// for (let i = 0; i < bufferLength; i += blockSize) {
// let subbuffer = mergedBuffer.subarray(i, i + blockSize);
// let mp3buf = encoder.encodeBuffer(subbuffer, blockSize);
// mp3Data.push(mp3buf);
// }
// // 获取剩余的编码数据
// let mp3buf = encoder.flush();
// mp3Data.push(mp3buf);
// let end = performance.now();
// console.log(`本次合并耗时-${end - start}毫秒`);
// // 合并所有编码数据
// return new Blob(mp3Data, { type: 'audio/mpeg' });
// }
// 合并音频文件 // 合并音频文件
export async function audioMerge(filePaths) { export async function audioMerge(filePaths: string[]) {
if (!filePaths.length) {
console.log('没有要合并的音频');
return;
} else {
console.log('要合并的文件'); console.log('要合并的文件');
console.log(filePaths); console.log(filePaths);
}
try { try {
// 创建一个新的音频上下文 // 创建一个新的音频上下文
const audioContext = createAudioContext(); const audioContext = createAudioContext();
let fileType = '';
// 存储每个音频文件的解码后的音频数据 // 获取文件类型
const buffers = [];
// 使用 Promise 依次解码和添加音频数据到 buffers 数组 // 使用 Promise 依次解码和添加音频数据到 buffers 数组
await Promise.all( const { blobList } = await downloadAudioList(filePaths);
filePaths.map(async (filePath) => { // 先获取第一个音频的类型
// const response = await request.get(filePath, { responseType: 'blob' }); if (blobList[0]) {
// const blob = response; fileType = blobList[0].type.split('/')[1];
// // 获取文件类型 if (fileType == 'mpeg') {
// const fileType = blob.type; fileType = 'mp3';
// console.log(fileType); } else if (fileType.indexOf('wav') !== -1) {
fileType = 'wav';
// // 将blob转换为ArrayBuffer }
// const arrayBuffer = await blob.arrayBuffer(); }
console.log(fileType);
// // 将ArrayBuffer转换为Buffer
// const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
// return audioBuffer; // wav
const response = await request.get(filePath, { responseType: 'arraybuffer' }); if (fileType == 'wav') {
const arrayBuffer = response; // 获取buffer列表
const buffers = await audioBlobToBuffer(blobList);
const audioBuffer = await audioContext.decodeAudioData(arrayBuffer);
return audioBuffer;
}),
).then((decodedBuffers) => {
buffers.push(...decodedBuffers); // 将解码后的音频缓冲区按顺序添加到数组中
});
// 计算合并后的音频数据的长度 // 计算合并后的音频数据的长度
const totalDuration = buffers.reduce((accumulator, current) => accumulator + current.duration, 0); const totalDuration = buffers.reduce((accumulator, current) => accumulator + current.duration, 0);
const sampleRate = audioContext.sampleRate; const sampleRate = audioContext.sampleRate;
...@@ -82,12 +193,16 @@ export async function audioMerge(filePaths) { ...@@ -82,12 +193,16 @@ export async function audioMerge(filePaths) {
} }
offset += Math.round(buffer.duration * sampleRate); offset += Math.round(buffer.duration * sampleRate);
}); });
console.log(buffers);
// 导出合并后的音频数据为 WAV 文件 // 导出合并后的音频数据为 WAV 文件
const mergedData = exportBufferAsWav(mergedBuffer); const mergedData = exportBufferAsWav(mergedBuffer);
const blob = new Blob([mergedData], { type: 'audio/wav' }); const blob = new Blob([mergedData], { type: 'audio/wav' });
console.log('合并完成', blob); console.log('合并完成', blob);
return blob; return blob;
} else if (fileType == 'mp3') {
// mp3--耗时操作,放入worker中
const blob = await mp3Worker(blobList);
return blob;
}
} catch (error) { } catch (error) {
writeLog({ writeLog({
name: '音频合并失败', name: '音频合并失败',
......
// import Lame from 'lamejs/src/js/Lame';
// import Presets from 'lamejs/src/js/Presets';
// import GainAnalysis from 'lamejs/src/js/GainAnalysis';
// import QuantizePVT from 'lamejs/src/js/QuantizePVT';
// import Quantize from 'lamejs/src/js/Quantize';
// import Reservoir from 'lamejs/src/js/Reservoir';
// import Takehiro from 'lamejs/src/js/Takehiro';
// import MPEGMode from 'lamejs/src/js/MPEGMode';
// import BitStream from 'lamejs/src/js/BitStream';
// window.Lame = Lame;
// window.Presets = Presets;
// window.GainAnalysis = GainAnalysis;
// window.QuantizePVT = QuantizePVT;
// window.Quantize = Quantize;
// window.Reservoir = Reservoir;
// window.Takehiro = Takehiro;
// window.MPEGMode = MPEGMode;
// window.BitStream = BitStream;
...@@ -308,7 +308,7 @@ export const dimensionalConvert = (list: any[]) => { ...@@ -308,7 +308,7 @@ export const dimensionalConvert = (list: any[]) => {
}; };
export const getFile = (url: string) => { export const getFile = (url: string) => {
return new Promise((resolve, reject) => { return new Promise<Blob>((resolve, reject) => {
request request
.get(url, { .get(url, {
responseType: 'blob', responseType: 'blob',
...@@ -394,22 +394,6 @@ export function timeComparison() { ...@@ -394,22 +394,6 @@ export function timeComparison() {
console.log('forEach循环执行时间:' + (end - start) + ' 毫秒'); console.log('forEach循环执行时间:' + (end - start) + ' 毫秒');
} }
// 获取音频文件的时长
export const getDurationOfAudioFile = (file: File) => {
return new Promise((resolve, reject) => {
const audio = new Audio();
audio.src = URL.createObjectURL(file);
audio.onloadedmetadata = () => {
resolve(audio.duration);
};
audio.onerror = () => {
reject('无法获取音频文件的时长!');
};
});
};
// 从二维数组中合并同类 // 从二维数组中合并同类
export const mergedArray = (arr: any[], key: string = 'uuid', first: string = 'is_old') => { export const mergedArray = (arr: any[], key: string = 'uuid', first: string = 'is_old') => {
let newList = []; let newList = [];
......
import lamejs from 'lamejs';
self.addEventListener('message', async (event) => {
// 接收到消息后的处理逻辑
const data = event.data;
console.log('接收到数据了', data);
const blobList = data.blobList;
const buffers = await mergeBlobs(blobList);
console.log(buffers);
// const blob = await startWorker();
// self.postMessage(blob);
});
// const startWorker = async () => {
// await exportToMp3();
// };
function mergeBlobs(blobs) {
return new Promise((resolve, reject) => {
const buffers = [];
const reader = new FileReader();
const encoder = new lamejs.Mp3Encoder(1, 44100, 128);
reader.onload = function (e) {
const buffer = new Int16Array(e.target.result);
const mp3buffer = encoder.encodeBuffer(buffer);
if (mp3buffer.length > 0) {
buffers.push(mp3buffer);
}
};
reader.onloadend = function () {
const mp3buffer = encoder.flush();
if (mp3buffer.length > 0) {
buffers.push(mp3buffer);
}
const blob = new Blob(buffers, { type: 'audio/mp3' });
resolve(blob);
};
reader.onerror = function () {
reject(new Error('Failed to read audio blobs.'));
};
function readNext(index) {
if (index >= blobs.length) {
reader.onloadend();
return;
}
const blob = blobs[index];
reader.readAsArrayBuffer(blob);
}
readNext(0);
});
}
self.addEventListener('message', (event) => {
// 接收到消息后的处理逻辑
const message = event.data;
console.log('接收到消息:', message);
});
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment