Commit 8b03221a by haojie

音频合并与切割

parent 5bb5c783
<template>
<t-checkbox class="custom-default-checkbox" v-model="checkboxValue">
<slot />
</t-checkbox>
</template>
<script lang="ts" setup>
import { ref, watch } from 'vue';
const props = withDefaults(
defineProps<{
modelValue: boolean;
}>(),
{},
);
const emit = defineEmits(['update:modelValue', 'change']);
const checkboxValue = ref(false);
watch(
() => checkboxValue.value,
(v) => {
emit('update:modelValue', v);
emit('change', v);
},
);
watch(
() => props.modelValue,
(v) => {
checkboxValue.value = v;
},
);
</script>
<style lang="less">
@import '@/style/variables.less';
.custom-default-checkbox {
.t-checkbox__input {
background: transparent;
border: 1px solid #848e9c;
border-radius: 0;
border-radius: 2px;
}
.t-checkbox__label {
color: white;
font-weight: 300;
font-size: @size-12;
}
&:hover {
.t-checkbox__input {
border-color: #00f9f9;
}
}
}
.custom-default-checkbox.t-is-checked {
.t-checkbox__input {
background: #00f9f9;
border-color: transparent;
&::after {
border-color: black;
}
}
}
</style>
......@@ -11,7 +11,6 @@
<script lang="tsx" setup>
import { onBeforeUnmount, onMounted, ref } from 'vue';
import { show_message } from '@/utils/tool';
import { testRequest } from '@/utils/api/userApi';
import { injectWindow } from '@/utils/pyqt';
const py_text = ref('你好');
......@@ -51,8 +50,6 @@ const sendToken = (value: string) => {
onMounted(() => {
injectWindow('sendToken', sendToken);
injectWindow('py_receive', py_receive);
// 请求接口测试
testRequest();
});
</script>
......
......@@ -48,7 +48,7 @@ const confirm = () => {
titleValue.value = '';
contentValue.value = '';
} else {
// show_message('标题或内容必填');
show_message('标题或内容必填');
}
};
......
......@@ -125,11 +125,10 @@ onMounted(async () => {
<style lang="less">
@import '@/style/variables';
.chose-interact-box {
width: 550px;
flex: 1;
display: flex;
flex-direction: column;
padding: 0 4px;
padding: 0 12px;
overflow: hidden;
.all-select {
padding: 12px 4px;
......
......@@ -2,6 +2,7 @@
<div class="create-live-script-setting">
<div class="all-select">
<Select :options="scriptTypeList" v-model="currentOption" @change="scriptTypeChange"></Select>
<CheckBox v-model="isDisorganize" @change="checkboxChange">GPT洗稿</CheckBox>
<div
class="right-chose-tones"
:style="{
......@@ -132,6 +133,7 @@
<script lang="tsx" setup>
import { computed, onMounted, reactive, ref, watch } from 'vue';
import CheckBox from '@/components/CheckBox.vue';
import ConfirmDialog from '@/components/ConfirmDialog.vue';
import TextScriptDialog from './TextScriptDialog.vue';
import ScriptTemplate from '@/components/ScriptTemplate.vue';
......@@ -143,9 +145,10 @@ import { createLiveKeys, scriptTypeList, scriptTypeText, scriptTypePhonetics } f
import { useLiveInfoSubmit } from '@/hooks/useStoreCommit';
import Textarea from '@/components/textarea.vue';
import { getUploadConfig, getTonesList } from '@/service/Common';
import { v4 } from 'uuid';
import { useStore } from 'vuex';
import { useRoute } from 'vue-router';
const [commitInfo] = useLiveInfoSubmit();
const props = withDefaults(
defineProps<{
......@@ -164,6 +167,7 @@ const route = useRoute();
// 编辑信息
const editInfo = computed(() => store.getters['live/getEditLive']);
const createLiveInfo = computed(() => store.getters['live/getLiveInfo']);
// 文本脚本列表
const textScriptList = ref([]);
......@@ -175,6 +179,8 @@ const editTextInfo = ref({});
const deleteTextId = ref();
// 确认删除弹窗
const confirmDeleteVisible = ref(false);
// 是否gpt洗稿
const isDisorganize = ref(false);
const lists = reactive({
tones: [],
......@@ -219,6 +225,13 @@ const textareaValue = ref('');
const currentOption = ref(scriptTypeText);
// 洗稿checkbox变化
const checkboxChange = (value: boolean) => {
commitInfo({
[createLiveKeys.isDisorganize]: value,
});
};
// 新增文本脚本
const addTextScript = () => {
// 打开弹窗
......@@ -242,20 +255,37 @@ const deleteTextScript = (index: number) => {
// 确定删除文本
const confirmDeleteText = () => {
textScriptList.value.splice(deleteTextId.value, 1);
submitTextScript();
};
// 提交文本脚本
const textScriptSubmit = (params: any) => {
if (params.title && params.content) {
// 创建一个uuid
params.uuid = createLiveInfo.value[createLiveKeys.scriptUuid];
if (typeof params.index === 'number') {
// 编辑
textScriptList.value[params.index].title = params.title;
textScriptList.value[params.index].content = params.content;
// 其他参数都要清掉
// 任务id
textScriptList.value[params.index].task_id = '';
// 音频地址
textScriptList.value[params.index].audio_address = '';
} else {
textScriptList.value.push(params);
}
submitTextScript();
}
};
// 文本脚本内容提交到store
const submitTextScript = () => {
commitInfo({
[createLiveKeys.textScriptList]: textScriptList.value,
});
};
// 更新对应的info
const updateTonesInfo = (tone_id: any, phonetic_timbres_id: any) => {
if (tone_id) {
......@@ -428,6 +458,14 @@ const getList = async () => {
updateTonesInfo(tonesValue.value, soundColorValue.value);
};
// 获取uuid
const getUuid = () => {
if (isDev()) {
return 'e6e973a5-ccdd-4779-a09a-be1658ecbea2';
}
return v4();
};
onMounted(async () => {
// 获取上传配置
ossConfig.value = await getUploadConfig();
......@@ -435,6 +473,8 @@ onMounted(async () => {
// 提交默认脚本类型
commitInfo({
[createLiveKeys.scriptType]: currentOption.value,
// 文本脚本的uuid
[createLiveKeys.scriptUuid]: getUuid(),
});
// 获取音色音调列表
......
import { computed, onBeforeUnmount, ref } from 'vue';
import { useStore } from 'vuex';
import { createLiveKeys } from '@/service/CreateLive';
import { getLiveTtsCallback } from '@/utils/api/userApi';
import { show_message } from '@/utils/tool';
import { audioMerge, splitAudio } from '@/utils/audio';
// 轮询处理文本脚本语音生成回调
export const processTextCallback = () => {
const store = useStore();
const interval = ref(null);
const createLiveInfo = computed(() => store.getters['live/getLiveInfo']);
const getTaskStatus = async () => {
try {
let res: any = await getLiveTtsCallback({
task_id: createLiveInfo.value[createLiveKeys.scriptUuid],
});
if (res.code == 0) {
if (res.data.length >= createLiveInfo.value[createLiveKeys.textScriptList].length) {
// 关闭定时器
closeInterval();
let list = JSON.parse(JSON.stringify(createLiveInfo.value[createLiveKeys.textScriptList]));
let audio_list = [];
res.data.forEach((item: any) => {
// 根据task_id更新数组对象
let data = item.data;
if (data && data.audio_address && data.task_id) {
audio_list.push(data.audio_address);
} else {
show_message('缺少音频或id');
}
});
// 音频合并
let file = await audioMerge(audio_list);
console.log(file);
if (file) {
// 开始切割
let split_list = await splitAudio(file, 5 * 1024 * 1024);
console.log(split_list);
}
}
}
} catch (e) {
console.log(e);
}
};
const openInterval = () => {
interval.value = window.setInterval(() => {
getTaskStatus();
}, 3000);
};
const closeInterval = () => {
window.clearInterval(interval.value);
clearInterval(interval.value);
interval.value = null;
};
onBeforeUnmount(() => {
closeInterval();
});
return {
openInterval,
};
};
......@@ -70,13 +70,15 @@ import { computed, onMounted, ref, onBeforeUnmount } from 'vue';
import { getElBounding, show_message, DataType } from '@/utils/tool';
import { useStore } from 'vuex';
import { createLiveKeys, scriptTypeText, scriptTypePhonetics } from '@/service/CreateLive';
import { createLiveTask, getLiveTaskInfo, createDrafts, getDraftsDetail } from '@/utils/api/userApi';
import { createLiveTask, getLiveTaskInfo, createDrafts, getDraftsDetail, liveTts } from '@/utils/api/userApi';
import { useRoute, useRouter } from 'vue-router';
import routerConfig from '@/router/tool';
import { onUpdateLiveTask } from '@/service/Common';
import { createLiveRouteKey } from '@/constants/token';
import { callPyjsInWindow } from '@/utils/pyqt';
import { useLiveInfoSubmit } from '@/hooks/useStoreCommit';
import { processTextCallback } from './hooks/scripts';
const { openInterval } = processTextCallback();
const [commitInfo] = useLiveInfoSubmit();
const store = useStore();
......@@ -95,7 +97,7 @@ const confirmVisible = ref(false);
const publicTool = ref<HTMLElement>();
const toolHeight = ref(0);
const currentSetp = ref(2);
const currentSetp = ref(1);
// 通知子组件初始化
const initNum = ref(1);
......@@ -139,7 +141,7 @@ const setpsList = [
{
key: createLiveKeys.scriptType,
value: scriptTypeText,
require: [createLiveKeys.textTones, createLiveKeys.textScriptValue],
require: [createLiveKeys.textTones, createLiveKeys.textScriptList],
message: '音调或文本必填',
},
{
......@@ -390,7 +392,37 @@ const confirm = async () => {
show_message('请输入标题');
return;
}
let item = createLiveInfo.value;
if (item[createLiveKeys.scriptType] == scriptTypeText) {
// 文本脚本
loading.value = true;
for (let i = 0; i < item[createLiveKeys.textScriptList].length; i++) {
let params = {
// 音色
phonetic_timbres_id: item[createLiveKeys.textSoundColor],
// 音调
tone_id: item[createLiveKeys.textTones],
content: item[createLiveKeys.textScriptList][i].content,
uuid: item[createLiveKeys.textScriptList][i].uuid,
};
// 生成音频
let res: any = await liveTts(params);
if (res.code == 0) {
// 通过uuid找到对应的对象
let index = item[createLiveKeys.textScriptList].findIndex((it: any) => it.uuid == params.uuid);
if (index !== -1) {
let list = JSON.parse(JSON.stringify(item[createLiveKeys.textScriptList]));
list[i].task_id = res.data.task_id;
commitInfo({
[createLiveKeys.textScriptList]: list,
});
}
}
}
// 开启轮询
openInterval();
} else {
try {
loading.value = true;
let res: any = await createLiveTask(filterFiled());
......@@ -416,6 +448,7 @@ const confirm = async () => {
loading.value = false;
console.log(e);
}
}
};
onMounted(() => {
......
......@@ -7,10 +7,13 @@ export const createLiveKeys = {
textTones: 'tone_id', // 文本--音调
textSoundColor: 'phonetic_timbres_id', // 文本音色
textScriptValue: 'type_content', // 文字脚本的文本
textScriptList: 'type_content_list', // 文字脚本列表
phoneticsSoundColor: 'phoneticsSoundColor', // 音频 音色
phoneticsFile: 'phoneticsFile', // 音频文件
commentMethod: 'commentMethod', // 评论方式
interactiveLibrary: 'interactiveLibrary', // 互动库
isDisorganize: 'is_disorganize', // 是否洗稿
scriptUuid: 'script_uuid', // 文本脚本生成是的uuid
};
// 脚本类型
......
import { createLiveKeys } from '@/service/CreateLive';
const state = {
name: '',
createLive: {
const initParams = () => {
return {
[createLiveKeys.id]: '',
[createLiveKeys.id_type]: '',
[createLiveKeys.scriptType]: '',
......@@ -12,8 +11,16 @@ const state = {
[createLiveKeys.phoneticsSoundColor]: '',
[createLiveKeys.phoneticsFile]: '',
[createLiveKeys.commentMethod]: '',
[createLiveKeys.isDisorganize]: false,
[createLiveKeys.interactiveLibrary]: [],
},
[createLiveKeys.textScriptList]: [],
[createLiveKeys.scriptUuid]: '',
};
};
const state = {
name: '',
createLive: initParams(),
editLive: {},
liveImage: '',
liveVideoStatus: {
......@@ -32,20 +39,10 @@ const mutations = {
Object.keys(info).forEach((item: any) => {
state.createLive[item] = info[item];
});
console.log(state.createLive);
},
initLiveInfo(state: StateType) {
state.createLive = {
[createLiveKeys.id]: '',
[createLiveKeys.id_type]: '',
[createLiveKeys.scriptType]: '',
[createLiveKeys.textTones]: '',
[createLiveKeys.textSoundColor]: '',
[createLiveKeys.textScriptValue]: '',
[createLiveKeys.phoneticsSoundColor]: '',
[createLiveKeys.phoneticsFile]: '',
[createLiveKeys.commentMethod]: '',
[createLiveKeys.interactiveLibrary]: [],
};
state.createLive = initParams();
},
// 清空创建直播时的参数
clearCreateLive(state: StateType) {
......
......@@ -318,6 +318,23 @@ export const deleteDrafts = (id: any) => {
});
};
export const testRequest = () => {
return request.get('/api/users/accounts', {});
// 语音合成
export const liveTts = (data: any) => {
const header = getHeader();
return request.post(`/api/live/tts`, data, {
headers: {
...header,
},
});
};
// 获取语音返回值
export const getLiveTtsCallback = (data: any) => {
const header = getHeader();
return request.get(`/api/live/tts/task`, {
params: data,
headers: {
...header,
},
});
};
// 将浮点数音频数据转换为16位PCM数据
const floatTo16BitPCM = (output, offset, input) => {
for (let i = 0; i < input.length; i++, offset += 2) {
const s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7fff, true);
}
};
// 在DataView中写入字符串
const writeString = (view, offset, string) => {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
};
// 将AudioBuffer转换为WAVE格式的Blob
const bufferToWave = (buffer) => {
const arrayBuffer = new ArrayBuffer(44 + buffer.length * 2);
const view = new DataView(arrayBuffer);
// 添加WAVE文件头
writeString(view, 0, 'RIFF');
view.setUint32(4, 32 + buffer.length * 2, true);
writeString(view, 8, 'WAVE');
writeString(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
view.setUint16(22, buffer.numberOfChannels, true);
view.setUint32(24, buffer.sampleRate, true);
view.setUint32(28, buffer.sampleRate * buffer.numberOfChannels * 2, true);
view.setUint16(32, buffer.numberOfChannels * 2, true);
view.setUint16(34, 16, true);
writeString(view, 36, 'data');
view.setUint32(40, buffer.length * 2, true);
// 将音频数据写入buffer
floatTo16BitPCM(view, 44, buffer);
// 将ArrayBuffer转换为Blob
return new Blob([view], { type: 'audio/wav' });
};
// 音频合并
export const audioMerge = async (audioUrls: string[]) => {
// 创建一个新的AudioContext对象
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
const audioBuffers = [];
const loadAudioFile = async (url) => {
const response = await fetch(url);
const arrayBuffer = await response.arrayBuffer();
return audioContext.decodeAudioData(arrayBuffer);
};
const loadAllAudioFiles = async () => {
for (const audioUrl of audioUrls) {
const audioBuffer = await loadAudioFile(audioUrl);
audioBuffers.push(audioBuffer);
}
};
// 合并音频文件
const mergeAudioFiles = async () => {
// 创建一个新的AudioBuffer对象来保存合并后的音频
const mergedBuffer = audioContext.createBuffer(
audioBuffers[0].numberOfChannels,
audioBuffers[0].length,
audioBuffers[0].sampleRate,
);
// 将每个音频文件的数据复制到合并后的AudioBuffer中
for (let i = 0; i < audioBuffers.length; i++) {
const sourceBuffer = audioBuffers[i];
for (let channel = 0; channel < sourceBuffer.numberOfChannels; channel++) {
const sourceData = sourceBuffer.getChannelData(channel);
const mergedData = mergedBuffer.getChannelData(channel);
mergedData.set(sourceData, i * sourceBuffer.length);
}
}
// 将合并后的音频写入文件
let renderedBuffer = await audioContext.startRendering();
const offlineAudioContext = new OfflineAudioContext(
mergedBuffer.numberOfChannels,
renderedBuffer.length,
renderedBuffer.sampleRate,
);
const source = offlineAudioContext.createBufferSource();
source.buffer = mergedBuffer;
source.connect(offlineAudioContext.destination);
source.start();
let renderedBufferChild = await offlineAudioContext.startRendering();
return renderedBufferChild;
// 将合并后的音频保存为文件
const audioBlob = bufferToWave(renderedBufferChild);
// const audioUrl = URL.createObjectURL(audioBlob);
};
// 加载
await loadAllAudioFiles();
// 并合并音频文件
return mergeAudioFiles();
};
// 切割音频并保存为Blob对象
export const splitAudio = async (audioBuffer, blockSize) => {
const numBlocks = Math.ceil(audioBuffer.length / blockSize);
const audioChunks = [];
for (let i = 0; i < numBlocks; i++) {
const startOffset = i * blockSize;
const endOffset = Math.min(startOffset + blockSize, audioBuffer.length);
const bufferChunk = audioBuffer.slice(startOffset, endOffset);
audioChunks.push(bufferChunk);
}
// 将切割后的音频块进行合并,并转换为Blob对象
const mergedChunks = new Blob(audioChunks, { type: 'audio/wav' });
return mergedChunks;
};
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment