From 588ba72af84ce345a1b90018911c7b590df036ac Mon Sep 17 00:00:00 2001 From: AlekPet Date: Sun, 24 Nov 2024 15:40:17 +0300 Subject: [PATCH] update extras_node Update deisgn settings Speech & Recognition and save recording audio Add button abort, chancel recognition and save recod audio Button stop, stoped recognation and get result and save recod audio Small fix global utils.js Updates REAMDE's Other small fixes... --- ExtrasNode/README.md | 2 +- ExtrasNode/css/extras_node_styles.css | 38 ++- ExtrasNode/js/extras_node.js | 5 + ExtrasNode/js/utils.js | 64 ++--- ExtrasNode/lib/extras_node_widgets.js | 322 +++++++++++++++++--------- README.md | 4 + pyproject.toml | 2 +- 7 files changed, 301 insertions(+), 136 deletions(-) diff --git a/ExtrasNode/README.md b/ExtrasNode/README.md index 8ee4a33..5d539e6 100644 --- a/ExtrasNode/README.md +++ b/ExtrasNode/README.md @@ -18,7 +18,7 @@ - Dispaly image size node "Preview Image" -- Speech & Recognition speech +- Speech & Recognition speech and save reocording audio - Preview image, video and sound select list combo diff --git a/ExtrasNode/css/extras_node_styles.css b/ExtrasNode/css/extras_node_styles.css index fa9be2c..12bcc86 100644 --- a/ExtrasNode/css/extras_node_styles.css +++ b/ExtrasNode/css/extras_node_styles.css @@ -11,6 +11,7 @@ width: 45px; font-family: monospace; align-items: center; + margin-top: 3px; } .alekpet_extras_node_speechrecognition_row { @@ -41,10 +42,10 @@ .alekpet_extras_node_recognition_icon_box { display: flex; justify-content: center; - align-items: center; - line-height: 1; + align-items: baseline; gap: 3px; transition: all 0.7s; + margin-top: 2px; } .alekpet_extras_node_recognition_icon:hover { @@ -82,8 +83,10 @@ } .alekpet_extras_node_speech_icon { - font-size: 0.7em; + font-size: 0.6em; opacity: 1; + line-height: 1; + margin-top: 2px; } .alekpet_extras_node_speech_icon_playing { @@ -96,3 +99,32 @@ right: 3px; color: var(--error-text); } + +/* Settings */ +.alekpet_extras_node_speech_recognition_settings_props_label { + background: #646363; + color: #cdcbcb; + border-radius: 5px; + padding: 3px; + display: flex; + align-items: center; + justify-content: space-between; + flex-wrap: nowrap; +} + +.alekpet_extras_node_speech_recognition_settings_props_label:hover { + background: darkgrey; + color: white; +} + +.alekpet_extras_node_speech_recognition_settings_props_input { + max-width: 10px; + padding: 0; + margin: 0; + outline: 0; +} + +.alekpet_extras_node_speech_recognition_settings_button { + display: flex; + font-size: 0.6rem; +} diff --git a/ExtrasNode/js/extras_node.js b/ExtrasNode/js/extras_node.js index ee1ea99..e28dc57 100644 --- a/ExtrasNode/js/extras_node.js +++ b/ExtrasNode/js/extras_node.js @@ -16,6 +16,7 @@ import { createPreiviewSize, speechRect, SpeechSynthesis, + checkboxLSCheckedByKey, } from "./lib/extrasnode/extras_node_widgets.js"; import { RecognationSpeechDialog } from "./lib/extrasnode/extras_node_dialogs.js"; @@ -457,6 +458,10 @@ app.registerExtension({ !!e.target.checked ); SpeechAndRecognationSpeechSaveAs = !!e.target.checked; + checkboxLSCheckedByKey( + `${idExt}.SpeechAndRecognationSpeechSaveAs`, + ".alekpet_extras_node_recognition_saveAs" + ); }, }), ] diff --git a/ExtrasNode/js/utils.js b/ExtrasNode/js/utils.js index 2f20f89..3c0d92b 100644 --- a/ExtrasNode/js/utils.js +++ b/ExtrasNode/js/utils.js @@ -138,13 +138,16 @@ function animateTransitionProps( }); } -function animateClick(target, opacityVal = 0.9) { +function animateClick(target, params = {}) { + const { opacityVal = 0.9, callback = () => {} } = params; if (target?.isAnimating) return; const hide = +target.style.opacity === 0; return animateTransitionProps(target, { opacity: hide ? opacityVal : 0, - }).then(() => showHide({ elements: [target], hide: !hide })); + }) + .then(() => showHide({ elements: [target], hide: !hide })) + .then(() => callback()); } function showHide({ elements = [], hide = null, displayProp = "block" } = {}) { @@ -448,7 +451,11 @@ function createWindowModal({ case "object": default: if (Array.isArray(text)) { - text.forEach((element) => parent.append(element)); + text.forEach( + (element) => + (element.nodeType === 1 || element.nodeType === 3) && + parent.append(element) + ); } else if (text.nodeType === 1 || text.nodeType === 3) parent.append(text); } @@ -496,34 +503,39 @@ function createWindowModal({ }); // Title - const box_settings_title = makeElement("div", { - class: ["alekpet__window__title", ...classesTitle], - }); - - Object.assign(box_settings_title.style, { - ...THEME_MODAL_WINDOW_BASE.stylesTitle, - ...stylesTitle, - }); + let box_settings_title = ""; + if (textTitle) { + box_settings_title = makeElement("div", { + class: ["alekpet__window__title", ...classesTitle], + }); - // Add text (html) to title - addText(textTitle, box_settings_title); + Object.assign(box_settings_title.style, { + ...THEME_MODAL_WINDOW_BASE.stylesTitle, + ...stylesTitle, + }); + // Add text (html) to title + addText(textTitle, box_settings_title); + } // Body - const box_settings_body = makeElement("div", { - class: ["alekpet__window__body", ...classesBody], - }); + let box_settings_body = ""; + if (textBody) { + box_settings_body = makeElement("div", { + class: ["alekpet__window__body", ...classesBody], + }); - Object.assign(box_settings_body.style, { - display: "flex", - flexDirection: "column", - alignItems: "flex-end", - gap: "5px", - textWrap: "wrap", - ...stylesBody, - }); + Object.assign(box_settings_body.style, { + display: "flex", + flexDirection: "column", + alignItems: "flex-end", + gap: "5px", + textWrap: "wrap", + ...stylesBody, + }); - // Add text (html) to body - addText(textBody, box_settings_body); + // Add text (html) to body + addText(textBody, box_settings_body); + } // Close button const close__box__button = makeElement("div", { diff --git a/ExtrasNode/lib/extras_node_widgets.js b/ExtrasNode/lib/extras_node_widgets.js index b7cd757..872d46c 100644 --- a/ExtrasNode/lib/extras_node_widgets.js +++ b/ExtrasNode/lib/extras_node_widgets.js @@ -7,7 +7,12 @@ import { api } from "../../../../scripts/api.js"; import { app } from "../../../../scripts/app.js"; import { $el } from "../../../../scripts/ui.js"; -import { rgbToHex, isValidStyle } from "../../utils.js"; +import { + rgbToHex, + isValidStyle, + createWindowModal, + animateClick, +} from "../../utils.js"; import { RecognationSpeechDialog } from "./extras_node_dialogs.js"; const idExt = "alekpet.ExtrasNode"; @@ -186,6 +191,15 @@ async function checkPremissions( .catch((e) => ({ device, state: "error", status: false })); } +// Check checkbox +function checkboxLSCheckedByKey(lsKey, selector, defaultVal = false) { + const currValue = JSON.parse(localStorage.getItem(lsKey, defaultVal)); + + Array.from(document.querySelectorAll(selector)).forEach( + (saveAs) => (saveAs.checked = currValue) + ); +} + // Set styles const setStylesAllElements = (selector, exclude = null, styles = {}) => { let elements = Array.from(document.querySelectorAll(selector)); @@ -241,18 +255,7 @@ function SpeechWidget(node, inputName, inputData, widgetsText) { ); if (checkboxClear) checkboxClear.checked = widget.value[1] ?? false; - - if (checkboxSave) { - const isCheckedSave = widget.value[0] ?? false; - - if (isCheckedSave) { - const premission = await checkPremissions(); - checkboxSave.checked = - premission?.status && isCheckedSave ? true : false; - } else { - checkboxSave.checked = isCheckedSave; - } - } + if (checkboxSave) checkboxSave.checked = widget.value[0] ?? false; }, onRemove() { widget.element?.remove(); @@ -311,69 +314,78 @@ function SpeechWidget(node, inputName, inputData, widgetsText) { if (speechRect.elements === null) { // Record audio if (checkboxSave.checked) { - const stream = await navigator.mediaDevices.getUserMedia({ - audio: true, - }); - mediaRecorder = new MediaRecorder(stream); + try { + const stream = await navigator.mediaDevices.getUserMedia({ + audio: true, + }); - mediaRecorder.ondataavailable = (event) => { - audioChunks.push(event.data); - }; + mediaRecorder = new MediaRecorder(stream); - mediaRecorder.onstop = async () => { - if (speechRect.isRecognitionAbort) { - speechRect.lastText = ""; - speechRect.isRecognitionAbort = false; - return; - } - - const saveAsWindow = JSON.parse( - localStorage.getItem( - `${idExt}.SpeechAndRecognationSpeechSaveAs`, - false - ) - ); + mediaRecorder.ondataavailable = (event) => { + audioChunks.push(event.data); + }; - // Filename - let nameFile = "recording.webm"; - if (speechRect?.lastText?.length) { - nameFile = `${speechRect.lastText - .slice(0, maxLenAudioFileName) - .replaceAll(regExpFileName, "_")}.webm`; - } - - // Get audio - const audioBlob = new Blob(audioChunks, { - type: "audio/webm", - }); + mediaRecorder.onstop = async () => { + if (speechRect.isRecognitionAbort) { + speechRect.lastText = ""; + speechRect.isRecognitionAbort = false; + return; + } - if (!saveAsWindow) { - const body = new FormData(); - body.append("image", audioBlob, nameFile); - body.append("overwrite", "true"); - const resp = await api.fetchApi("/upload/image", { - method: "POST", - body, + const saveAsWindow = JSON.parse( + localStorage.getItem( + `${idExt}.SpeechAndRecognationSpeechSaveAs`, + false + ) + ); + + // Filename + let nameFile = "recording.webm"; + if (speechRect?.lastText?.length) { + nameFile = `${speechRect.lastText + .slice(0, maxLenAudioFileName) + .replaceAll(regExpFileName, "_")}.webm`; + } + + // Get audio + const audioBlob = new Blob(audioChunks, { + type: "audio/webm", }); - if (resp.status !== 200) { - console.error("[ExtrasNode] Recording audio not saved!"); - return; + if (!saveAsWindow) { + const body = new FormData(); + body.append("image", audioBlob, nameFile); + body.append("overwrite", "true"); + const resp = await api.fetchApi("/upload/image", { + method: "POST", + body, + }); + + if (resp.status !== 200) { + console.error( + "[ExtrasNode] Recording audio not saved!" + ); + return; + } + + console.log( + `[ExtrasNode] Recording audio "${nameFile}" saved successfully!` + ); + } else { + const audioUrl = URL.createObjectURL(audioBlob); + const linkDown = document.createElement("a"); + linkDown.href = audioUrl; + linkDown.download = nameFile; + linkDown.click(); } - console.log( - `[ExtrasNode] Recording audio "${nameFile}" saved successfully!` - ); - } else { - const audioUrl = URL.createObjectURL(audioBlob); - const linkDown = document.createElement("a"); - linkDown.href = audioUrl; - linkDown.download = nameFile; - linkDown.click(); - } - - speechRect.lastText = ""; - }; + speechRect.lastText = ""; + }; + } catch (err) { + alert( + `Device "Microphone" - ${err.message}!\n\nCheck device or allow access!` + ); + } } // end - Record audio @@ -394,46 +406,145 @@ function SpeechWidget(node, inputName, inputData, widgetsText) { } }, }), - $el( - "input.alekpet_extras_node_speech_recognition_checkbox.alekpet_extras_node_recognition_save", - { - type: "checkbox", - checked: widget.value[0] ?? false, - title: "Save in audio file after recognition", - onchange: async (e) => { - const premission = await checkPremissions(); - let checkValue = !!e.target.checked; - - if (!premission?.status && premission.state != "prompt") { - alert( - `Access to the device "${premission.device.name}" is denied!\nAllow access to the device!` + // Settings elements + $el("div.alekpet_extras_node_speech_recognition_settings", [ + $el( + "div.alekpet_extras_node_speech_recognition_settings_button.pi.pi-cog", + { + title: "Settings", + onclick: (e) => { + checkboxLSCheckedByKey( + `${idExt}.SpeechAndRecognationSpeechSaveAs`, + ".alekpet_extras_node_recognition_saveAs" ); - checkValue = false; - } - - navigator.mediaDevices - .getUserMedia({ audio: true }) - .then(() => widget?.callback([checkValue, widget.value[1]])) - .catch((e) => { - widget?.callback([false, widget.value[1]]); - alert( - `Access to the device "${premission.device.name}" is denied!\nAllow access to the device!` - ); - }); + animateClick(e.currentTarget.nextElementSibling); + }, + } + ), + createWindowModal({ + textTitle: null, + stylesClose: { + top: "-4px", + right: "4px", + width: "auto", + height: "auto", + padding: "2px", + fontSize: "0.4rem", + lineHeight: 1, }, - } - ), - $el( - "input.alekpet_extras_node_speech_recognition_checkbox.alekpet_extras_node_recognition_clear", - { - type: "checkbox", - checked: widget.value[1] ?? true, - title: "Clear text after recognition", - onchange: (e) => { - widget?.callback([widget.value[0], !!e.target.checked]); + stylesBox: { + background: "transparent", + border: 0, + padding: 0, + boxShadow: "none", }, - } - ), + stylesBody: { + display: "flex", + flexDirection: "column", + alignItems: "stretch", + gap: "3px", + textWrap: "wrap", + background: "rgb(131, 131, 131)", + color: "white", + padding: "2px", + marginTop: "2px", + borderRadius: "6px", + fontSize: "0.4rem", + minWidth: "85px", + }, + stylesWrapper: { + minWidth: "100px", + transform: "translate(0%, 0%)", + }, + textBody: [ + $el( + "label.alekpet_extras_node_speech_recognition_settings_props_label", + [ + $el( + "span.alekpet_extras_node_speech_recognition_settings_props_name", + { + textContent: "Recoding audio", + title: "Save in audio file after recognition", + } + ), + $el( + "input.alekpet_extras_node_speech_recognition_settings_props_input.alekpet_extras_node_speech_recognition_checkbox.alekpet_extras_node_recognition_save", + { + type: "checkbox", + checked: widget.value[0] ?? false, + onchange: (e) => + widget?.callback([!!e.target.checked, widget.value[1]]), + } + ), + ] + ), + $el( + "label.alekpet_extras_node_speech_recognition_settings_props_label", + [ + $el( + "span.alekpet_extras_node_speech_recognition_settings_props_name", + { + textContent: "Clear text", + title: "Clear text after recognition", + } + ), + $el( + "input.alekpet_extras_node_speech_recognition_settings_props_input.alekpet_extras_node_speech_recognition_checkbox.alekpet_extras_node_recognition_clear", + { + type: "checkbox", + checked: widget.value[1] ?? true, + onchange: (e) => + widget?.callback([widget.value[0], !!e.target.checked]), + } + ), + ] + ), + $el("hr", { style: { padding: 0, margin: 0 } }), + $el( + "label.alekpet_extras_node_speech_recognition_settings_props_label", + [ + $el( + "span.alekpet_extras_node_speech_recognition_settings_props_name", + { + textContent: "'Save as' window?", + title: "Show modal window when saving recorded audio.", + } + ), + $el( + "input.alekpet_extras_node_speech_recognition_settings_props_input.alekpet_extras_node_speech_recognition_checkbox.alekpet_extras_node_recognition_saveAs", + { + type: "checkbox", + checked: JSON.parse( + localStorage.getItem( + `${idExt}.SpeechAndRecognationSpeechSaveAs`, + false + ) + ), + onchange: (e) => { + const check = !!e.target.checked; + const settCheck = document.body.querySelector( + "[id$='.SpeechAndRecognationSpeech'] div input" + ); + settCheck && (settCheck.checked = check); + + localStorage.setItem( + `${idExt}.SpeechAndRecognationSpeechSaveAs`, + check + ); + + checkboxLSCheckedByKey( + `${idExt}.SpeechAndRecognationSpeechSaveAs`, + ".alekpet_extras_node_recognition_saveAs" + ); + }, + } + ), + ] + ), + ], + }), + ]), + // end -- Settings elements ]) ); } @@ -682,4 +793,5 @@ export { speechRect, SpeechSynthesis, speakSynthesisUtterance, + checkboxLSCheckedByKey, }; diff --git a/README.md b/README.md index 094dafe..06e82f6 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,10 @@ Custom nodes that extend the capabilities of [ComfyUI](https://github.com/comfyanonymous/ComfyUI) +## Supporting me 💖 + +If you enjoy my work, consider **[supporting me](https://alekpet.github.io/support)**. Your help means a lot and allows me to keep creating new and exciting projects. Thank you! + # List Nodes: | Name | Description | ComfyUI category | diff --git a/pyproject.toml b/pyproject.toml index 70f6a85..8ca6ded 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "comfyui_custom_nodes_alekpet" description = "Nodes: PoseNode, PainterNode, TranslateTextNode, TranslateCLIPTextEncodeNode, DeepTranslatorTextNode, DeepTranslatorCLIPTextEncodeNode, ArgosTranslateTextNode, ArgosTranslateCLIPTextEncodeNode, ChatGLM4TranslateCLIPTextEncodeNode, ChatGLM4TranslateTextNode, PreviewTextNode, HexToHueNode, ColorsCorrectNode, IDENode." -version = "1.0.34" +version = "1.0.35" license = { file = "LICENSE" } [project.urls]